code stringlengths 2k 1.04M | repo_path stringlengths 5 517 | parsed_code stringlengths 0 1.04M | quality_prob float64 0.02 0.95 | learning_prob float64 0.02 0.93 |
|---|---|---|---|---|
from django.core.exceptions import ObjectDoesNotExist
from rest_framework import viewsets
from rest_framework.response import Response
from .serializers import UserSerializer, PostSerializer, \
PostImageSerializer, CommentSerializer, LikeSerializer
from .models import User, Post, Like, Comment
import json
# Create your views here.
class UserViewSet(viewsets.ViewSet):
@classmethod
def list(self, request, **kwargs):
if kwargs.get('deviceID') is None:
queryset = User.objects.all()
serializer = UserSerializer(queryset, many=True)
return Response(serializer.data, status=200)
else:
try:
query = User.objects.get(deviceID=kwargs.get('deviceID'))
except ObjectDoesNotExist:
return Response(f'The user who has device ID '
f'[{kwargs.get("deviceID")}] '
f'doesn\'t exist.', status=400)
serializer = UserSerializer(query)
return Response(serializer.data, status=200)
@classmethod
def create(self, request):
requestData = {"userID": request.POST.get("userID"),
"deviceID": request.POST.get("deviceID"),
"username": request.POST.get("username"),
"picture": request.FILES['picture']}
serializer = UserSerializer(data=requestData)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=201)
return Response(serializer.errors, status=400)
class PostViewSet(viewsets.ViewSet):
@classmethod
def list(self, request, **kwargs):
if kwargs.get('author') is None:
queryset = Post.objects.all()
serializer = PostSerializer(queryset, many=True)
else:
Posts = Post.objects.filter(author=kwargs.get('author'))
serializer = PostSerializer(Posts, many=True)
return Response(serializer.data)
@classmethod
def create(self, request):
requestData = {"contents": request.POST.get("contents"),
"location": json.loads(request.POST.get("location"))}
postSerializer = PostSerializer(data=requestData)
if postSerializer.is_valid():
try:
postSerializer.validated_data['writer'] = \
User.objects.get(id=request.POST.get("writer"))
except ObjectDoesNotExist:
return Response(f'The user with id '
f'[{request.POST.get("writer")}] '
f'doesn\'t exist.', status=400)
postSerializer.save()
for image in request.FILES.getlist("postImage"):
requestData_Image = {"post": postSerializer.data["postID"],
"image": image}
postimageSerializer = \
PostImageSerializer(data=requestData_Image)
if postimageSerializer.is_valid():
postimageSerializer.save()
return Response(postSerializer.data, status=201)
return Response(postSerializer.errors, status=400)
class CommentViewSet(viewsets.ViewSet):
@classmethod
def create(self, request, **kwargs):
requestData = {"writer": request.POST.get("writer"),
"post": kwargs.get('postID'),
"contents": request.POST.get("contents")}
postid = kwargs.get('postID')
try:
postquery = Post.objects.get(postID=postid)
userquery = User.objects.get(id=request.POST.get("writer"))
except ObjectDoesNotExist as e:
return Response(str(e), status=400)
serializer = CommentSerializer(data=requestData)
if serializer.is_valid():
serializer.validated_data['writer'] = userquery
serializer.validated_data['post'] = postquery
serializer.save()
return Response(serializer.data, status=201)
return Response(serializer.errors, status=400)
@classmethod
def list(self, request, **kwargs):
postid = kwargs.get('postID')
commentQuery = Comment.objects.filter(post=postid)
serializer = CommentSerializer(commentQuery, many=True)
return Response(serializer.data, status=200)
class LikeViewSet(viewsets.ViewSet):
@classmethod
def list(self, request, **kwargs):
postId = kwargs.get('postID')
likeQuery = Like.objects.filter(post=postId)
serializer = LikeSerializer(likeQuery, many=True)
return Response(serializer.data, status=200)
@classmethod
def create(self, request, **kwargs):
requestData = {"liker": request.POST.get("userID"),
"post": kwargs.get('postID')}
likerID = int(requestData["liker"])
posts = Like.objects.filter(post=kwargs.get("postID"))
posts_liker = posts.filter(liker=likerID)
if posts_liker.exists():
return Response("User Aleady liked to this post.", status=400)
serializer = LikeSerializer(data=requestData)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=200)
return Response(serializer.errors, status=400)
@classmethod
def destroy(self, request, **kwargs):
requestData = {"liker": request.POST.get("userID"),
"post": kwargs.get('postID')}
likerID = int(requestData["liker"])
posts = Like.objects.filter(post=kwargs.get("postID"))
posts_liker = posts.filter(liker=likerID)
if not posts_liker.exists():
return Response(f'Can\'t find post {requestData["post"]}'
f' or userID {likerID}.', status=400)
posts_liker.delete()
return Response("deleted.", status=200) | server/sns/views.py | from django.core.exceptions import ObjectDoesNotExist
from rest_framework import viewsets
from rest_framework.response import Response
from .serializers import UserSerializer, PostSerializer, \
PostImageSerializer, CommentSerializer, LikeSerializer
from .models import User, Post, Like, Comment
import json
# Create your views here.
class UserViewSet(viewsets.ViewSet):
@classmethod
def list(self, request, **kwargs):
if kwargs.get('deviceID') is None:
queryset = User.objects.all()
serializer = UserSerializer(queryset, many=True)
return Response(serializer.data, status=200)
else:
try:
query = User.objects.get(deviceID=kwargs.get('deviceID'))
except ObjectDoesNotExist:
return Response(f'The user who has device ID '
f'[{kwargs.get("deviceID")}] '
f'doesn\'t exist.', status=400)
serializer = UserSerializer(query)
return Response(serializer.data, status=200)
@classmethod
def create(self, request):
requestData = {"userID": request.POST.get("userID"),
"deviceID": request.POST.get("deviceID"),
"username": request.POST.get("username"),
"picture": request.FILES['picture']}
serializer = UserSerializer(data=requestData)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=201)
return Response(serializer.errors, status=400)
class PostViewSet(viewsets.ViewSet):
@classmethod
def list(self, request, **kwargs):
if kwargs.get('author') is None:
queryset = Post.objects.all()
serializer = PostSerializer(queryset, many=True)
else:
Posts = Post.objects.filter(author=kwargs.get('author'))
serializer = PostSerializer(Posts, many=True)
return Response(serializer.data)
@classmethod
def create(self, request):
requestData = {"contents": request.POST.get("contents"),
"location": json.loads(request.POST.get("location"))}
postSerializer = PostSerializer(data=requestData)
if postSerializer.is_valid():
try:
postSerializer.validated_data['writer'] = \
User.objects.get(id=request.POST.get("writer"))
except ObjectDoesNotExist:
return Response(f'The user with id '
f'[{request.POST.get("writer")}] '
f'doesn\'t exist.', status=400)
postSerializer.save()
for image in request.FILES.getlist("postImage"):
requestData_Image = {"post": postSerializer.data["postID"],
"image": image}
postimageSerializer = \
PostImageSerializer(data=requestData_Image)
if postimageSerializer.is_valid():
postimageSerializer.save()
return Response(postSerializer.data, status=201)
return Response(postSerializer.errors, status=400)
class CommentViewSet(viewsets.ViewSet):
@classmethod
def create(self, request, **kwargs):
requestData = {"writer": request.POST.get("writer"),
"post": kwargs.get('postID'),
"contents": request.POST.get("contents")}
postid = kwargs.get('postID')
try:
postquery = Post.objects.get(postID=postid)
userquery = User.objects.get(id=request.POST.get("writer"))
except ObjectDoesNotExist as e:
return Response(str(e), status=400)
serializer = CommentSerializer(data=requestData)
if serializer.is_valid():
serializer.validated_data['writer'] = userquery
serializer.validated_data['post'] = postquery
serializer.save()
return Response(serializer.data, status=201)
return Response(serializer.errors, status=400)
@classmethod
def list(self, request, **kwargs):
postid = kwargs.get('postID')
commentQuery = Comment.objects.filter(post=postid)
serializer = CommentSerializer(commentQuery, many=True)
return Response(serializer.data, status=200)
class LikeViewSet(viewsets.ViewSet):
@classmethod
def list(self, request, **kwargs):
postId = kwargs.get('postID')
likeQuery = Like.objects.filter(post=postId)
serializer = LikeSerializer(likeQuery, many=True)
return Response(serializer.data, status=200)
@classmethod
def create(self, request, **kwargs):
requestData = {"liker": request.POST.get("userID"),
"post": kwargs.get('postID')}
likerID = int(requestData["liker"])
posts = Like.objects.filter(post=kwargs.get("postID"))
posts_liker = posts.filter(liker=likerID)
if posts_liker.exists():
return Response("User Aleady liked to this post.", status=400)
serializer = LikeSerializer(data=requestData)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=200)
return Response(serializer.errors, status=400)
@classmethod
def destroy(self, request, **kwargs):
requestData = {"liker": request.POST.get("userID"),
"post": kwargs.get('postID')}
likerID = int(requestData["liker"])
posts = Like.objects.filter(post=kwargs.get("postID"))
posts_liker = posts.filter(liker=likerID)
if not posts_liker.exists():
return Response(f'Can\'t find post {requestData["post"]}'
f' or userID {likerID}.', status=400)
posts_liker.delete()
return Response("deleted.", status=200) | 0.548432 | 0.084909 |
import json
from alipay.aop.api.constant.ParamConstants import *
from alipay.aop.api.domain.SignerBean import SignerBean
class SignFieldBean(object):
def __init__(self):
self._sign_field_type = None
self._signer = None
self._struct_key = None
@property
def sign_field_type(self):
return self._sign_field_type
@sign_field_type.setter
def sign_field_type(self, value):
self._sign_field_type = value
@property
def signer(self):
return self._signer
@signer.setter
def signer(self, value):
if isinstance(value, SignerBean):
self._signer = value
else:
self._signer = SignerBean.from_alipay_dict(value)
@property
def struct_key(self):
return self._struct_key
@struct_key.setter
def struct_key(self, value):
self._struct_key = value
def to_alipay_dict(self):
params = dict()
if self.sign_field_type:
if hasattr(self.sign_field_type, 'to_alipay_dict'):
params['sign_field_type'] = self.sign_field_type.to_alipay_dict()
else:
params['sign_field_type'] = self.sign_field_type
if self.signer:
if hasattr(self.signer, 'to_alipay_dict'):
params['signer'] = self.signer.to_alipay_dict()
else:
params['signer'] = self.signer
if self.struct_key:
if hasattr(self.struct_key, 'to_alipay_dict'):
params['struct_key'] = self.struct_key.to_alipay_dict()
else:
params['struct_key'] = self.struct_key
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = SignFieldBean()
if 'sign_field_type' in d:
o.sign_field_type = d['sign_field_type']
if 'signer' in d:
o.signer = d['signer']
if 'struct_key' in d:
o.struct_key = d['struct_key']
return o | alipay/aop/api/domain/SignFieldBean.py | import json
from alipay.aop.api.constant.ParamConstants import *
from alipay.aop.api.domain.SignerBean import SignerBean
class SignFieldBean(object):
def __init__(self):
self._sign_field_type = None
self._signer = None
self._struct_key = None
@property
def sign_field_type(self):
return self._sign_field_type
@sign_field_type.setter
def sign_field_type(self, value):
self._sign_field_type = value
@property
def signer(self):
return self._signer
@signer.setter
def signer(self, value):
if isinstance(value, SignerBean):
self._signer = value
else:
self._signer = SignerBean.from_alipay_dict(value)
@property
def struct_key(self):
return self._struct_key
@struct_key.setter
def struct_key(self, value):
self._struct_key = value
def to_alipay_dict(self):
params = dict()
if self.sign_field_type:
if hasattr(self.sign_field_type, 'to_alipay_dict'):
params['sign_field_type'] = self.sign_field_type.to_alipay_dict()
else:
params['sign_field_type'] = self.sign_field_type
if self.signer:
if hasattr(self.signer, 'to_alipay_dict'):
params['signer'] = self.signer.to_alipay_dict()
else:
params['signer'] = self.signer
if self.struct_key:
if hasattr(self.struct_key, 'to_alipay_dict'):
params['struct_key'] = self.struct_key.to_alipay_dict()
else:
params['struct_key'] = self.struct_key
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = SignFieldBean()
if 'sign_field_type' in d:
o.sign_field_type = d['sign_field_type']
if 'signer' in d:
o.signer = d['signer']
if 'struct_key' in d:
o.struct_key = d['struct_key']
return o | 0.552057 | 0.114467 |
from __future__ import division
import numpy as np
from sklearn import datasets
from sklearn import metrics
from sklearn.model_selection import StratifiedKFold, train_test_split
from neupy import algorithms
from neupy.exceptions import NotTrained
from base import BaseTestCase
class PNNTestCase(BaseTestCase):
def test_handle_errors(self):
with self.assertRaises(ValueError):
# size of target data not the same as
# size of input data.
pnnet = algorithms.PNN(verbose=False)
pnnet.train(np.array([[0], [0]]), np.array([0]))
with self.assertRaises(ValueError):
# 2-D target vector (must be 1-D)
pnnet = algorithms.PNN(verbose=False)
pnnet.train(np.array([[0]]), np.array([[0, 0]]))
with self.assertRaises(ValueError):
# invalid feature size for prediction data
pnnet = algorithms.PNN(verbose=False)
pnnet.train(np.array([[0], [0]]), np.array([0]))
pnnet.predict(np.array([[0]]))
msg = "hasn't been trained"
with self.assertRaisesRegexp(NotTrained, msg):
# predict without training
pnnet = algorithms.PNN(verbose=False)
pnnet.predict(np.array([[0]]))
with self.assertRaises(ValueError):
# different number of features for
# train and test data
grnet = algorithms.PNN(verbose=False)
grnet.train(np.array([[0]]), np.array([0]))
grnet.predict(np.array([[0, 0]]))
def test_simple_pnn(self):
dataset = datasets.load_iris()
data = dataset.data
target = dataset.target
test_data_size = 10
skfold = StratifiedKFold(n_splits=test_data_size)
avarage_result = 0
for train, test in skfold.split(data, target):
x_train, x_test = data[train], data[test]
y_train, y_test = target[train], target[test]
nw = algorithms.PNN(verbose=False, std=0.1)
nw.train(x_train, y_train)
result = nw.predict(x_test)
avarage_result += sum(y_test == result)
self.assertEqual(avarage_result / test_data_size, 14.4)
self.assertPickledNetwork(nw, x_test)
def test_digit_prediction(self):
dataset = datasets.load_digits()
x_train, x_test, y_train, y_test = train_test_split(
dataset.data, dataset.target, train_size=0.7
)
nw = algorithms.PNN(verbose=False, std=10)
nw.train(x_train, y_train)
result = nw.predict(x_test)
accuracy = metrics.accuracy_score(y_test, result)
self.assertAlmostEqual(accuracy, 0.9889, places=4)
def test_predict_probability(self):
dataset = datasets.load_digits()
x_train, x_test, y_train, y_test = train_test_split(
dataset.data, dataset.target, train_size=0.7)
x_train_before = x_train.copy()
x_test_before = x_test.copy()
y_train_before = y_train.copy()
number_of_classes = len(np.unique(dataset.target))
pnnet = algorithms.PNN(verbose=False, std=10)
pnnet.train(x_train, y_train)
result = pnnet.predict_proba(x_test)
n_test_inputs = x_test.shape[0]
self.assertEqual(result.shape, (n_test_inputs, number_of_classes))
total_classes_prob = np.round(result.sum(axis=1), 10)
np.testing.assert_array_equal(
total_classes_prob,
np.ones(n_test_inputs))
old_result = result.copy()
# Test problem with variable links
np.testing.assert_array_equal(x_train, x_train_before)
np.testing.assert_array_equal(x_test, x_test_before)
np.testing.assert_array_equal(y_train, y_train_before)
x_train[:, :] = 0
result = pnnet.predict_proba(x_test)
total_classes_prob = np.round(result.sum(axis=1), 10)
np.testing.assert_array_almost_equal(result, old_result)
def test_train_different_inputs(self):
self.assertInvalidVectorTrain(
algorithms.PNN(verbose=False),
np.array([1, 2, 3]),
np.array([1, 0, 1]))
def test_predict_different_inputs(self):
pnnet = algorithms.PNN(verbose=False)
data = np.array([[1, 2, 3]]).T
target = np.array([[1, 0, 1]]).T
pnnet.train(data, target)
self.assertInvalidVectorPred(
pnnet, data.ravel(), target.ravel(), decimal=2)
def test_pnn_mini_batches(self):
dataset = datasets.load_digits()
n_classes = len(np.unique(dataset.target))
x_train, x_test, y_train, y_test = train_test_split(
dataset.data, dataset.target, train_size=0.7)
pnnet = algorithms.PNN(verbose=False, batch_size=100)
pnnet.train(x_train, y_train)
y_predicted = pnnet.predict(x_test)
self.assertEqual(y_predicted.shape, y_test.shape)
y_predicted = pnnet.predict_proba(x_test)
self.assertEqual(y_predicted.shape,
(y_test.shape[0], n_classes))
def test_pnn_repr(self):
pnn = algorithms.PNN()
self.assertIn('PNN', str(pnn))
self.assertIn('std', str(pnn))
self.assertIn('batch_size', str(pnn))
def test_pnn_non_trivial_class_names(self):
# Issue #177: https://github.com/itdxer/neupy/issues/177
x = np.array([10] * 10 + [20] * 10 + [30] * 10)
y = np.array([1] * 10 + [2] * 10 + [3] * 10)
pnn = algorithms.PNN(std=1)
pnn.train(x, y)
y_predicted = pnn.predict(x)
np.testing.assert_array_almost_equal(y, y_predicted)
self.assertEqual(sorted(pnn.classes), [1, 2, 3])
def test_pnn_non_trivial_class_names_as_strings(self):
# Issue #177: https://github.com/itdxer/neupy/issues/177
x = np.array([10] * 10 + [20] * 10 + [30] * 10)
y = np.array(['cat'] * 10 + ['dog'] * 10 + ['horse'] * 10)
pnn = algorithms.PNN(std=1)
pnn.train(x, y)
y_predicted = pnn.predict(x)
np.testing.assert_array_equal(y, y_predicted)
self.assertEqual(sorted(pnn.classes), ['cat', 'dog', 'horse']) | tests/algorithms/rbfn/test_pnn.py | from __future__ import division
import numpy as np
from sklearn import datasets
from sklearn import metrics
from sklearn.model_selection import StratifiedKFold, train_test_split
from neupy import algorithms
from neupy.exceptions import NotTrained
from base import BaseTestCase
class PNNTestCase(BaseTestCase):
def test_handle_errors(self):
with self.assertRaises(ValueError):
# size of target data not the same as
# size of input data.
pnnet = algorithms.PNN(verbose=False)
pnnet.train(np.array([[0], [0]]), np.array([0]))
with self.assertRaises(ValueError):
# 2-D target vector (must be 1-D)
pnnet = algorithms.PNN(verbose=False)
pnnet.train(np.array([[0]]), np.array([[0, 0]]))
with self.assertRaises(ValueError):
# invalid feature size for prediction data
pnnet = algorithms.PNN(verbose=False)
pnnet.train(np.array([[0], [0]]), np.array([0]))
pnnet.predict(np.array([[0]]))
msg = "hasn't been trained"
with self.assertRaisesRegexp(NotTrained, msg):
# predict without training
pnnet = algorithms.PNN(verbose=False)
pnnet.predict(np.array([[0]]))
with self.assertRaises(ValueError):
# different number of features for
# train and test data
grnet = algorithms.PNN(verbose=False)
grnet.train(np.array([[0]]), np.array([0]))
grnet.predict(np.array([[0, 0]]))
def test_simple_pnn(self):
dataset = datasets.load_iris()
data = dataset.data
target = dataset.target
test_data_size = 10
skfold = StratifiedKFold(n_splits=test_data_size)
avarage_result = 0
for train, test in skfold.split(data, target):
x_train, x_test = data[train], data[test]
y_train, y_test = target[train], target[test]
nw = algorithms.PNN(verbose=False, std=0.1)
nw.train(x_train, y_train)
result = nw.predict(x_test)
avarage_result += sum(y_test == result)
self.assertEqual(avarage_result / test_data_size, 14.4)
self.assertPickledNetwork(nw, x_test)
def test_digit_prediction(self):
dataset = datasets.load_digits()
x_train, x_test, y_train, y_test = train_test_split(
dataset.data, dataset.target, train_size=0.7
)
nw = algorithms.PNN(verbose=False, std=10)
nw.train(x_train, y_train)
result = nw.predict(x_test)
accuracy = metrics.accuracy_score(y_test, result)
self.assertAlmostEqual(accuracy, 0.9889, places=4)
def test_predict_probability(self):
dataset = datasets.load_digits()
x_train, x_test, y_train, y_test = train_test_split(
dataset.data, dataset.target, train_size=0.7)
x_train_before = x_train.copy()
x_test_before = x_test.copy()
y_train_before = y_train.copy()
number_of_classes = len(np.unique(dataset.target))
pnnet = algorithms.PNN(verbose=False, std=10)
pnnet.train(x_train, y_train)
result = pnnet.predict_proba(x_test)
n_test_inputs = x_test.shape[0]
self.assertEqual(result.shape, (n_test_inputs, number_of_classes))
total_classes_prob = np.round(result.sum(axis=1), 10)
np.testing.assert_array_equal(
total_classes_prob,
np.ones(n_test_inputs))
old_result = result.copy()
# Test problem with variable links
np.testing.assert_array_equal(x_train, x_train_before)
np.testing.assert_array_equal(x_test, x_test_before)
np.testing.assert_array_equal(y_train, y_train_before)
x_train[:, :] = 0
result = pnnet.predict_proba(x_test)
total_classes_prob = np.round(result.sum(axis=1), 10)
np.testing.assert_array_almost_equal(result, old_result)
def test_train_different_inputs(self):
self.assertInvalidVectorTrain(
algorithms.PNN(verbose=False),
np.array([1, 2, 3]),
np.array([1, 0, 1]))
def test_predict_different_inputs(self):
pnnet = algorithms.PNN(verbose=False)
data = np.array([[1, 2, 3]]).T
target = np.array([[1, 0, 1]]).T
pnnet.train(data, target)
self.assertInvalidVectorPred(
pnnet, data.ravel(), target.ravel(), decimal=2)
def test_pnn_mini_batches(self):
dataset = datasets.load_digits()
n_classes = len(np.unique(dataset.target))
x_train, x_test, y_train, y_test = train_test_split(
dataset.data, dataset.target, train_size=0.7)
pnnet = algorithms.PNN(verbose=False, batch_size=100)
pnnet.train(x_train, y_train)
y_predicted = pnnet.predict(x_test)
self.assertEqual(y_predicted.shape, y_test.shape)
y_predicted = pnnet.predict_proba(x_test)
self.assertEqual(y_predicted.shape,
(y_test.shape[0], n_classes))
def test_pnn_repr(self):
pnn = algorithms.PNN()
self.assertIn('PNN', str(pnn))
self.assertIn('std', str(pnn))
self.assertIn('batch_size', str(pnn))
def test_pnn_non_trivial_class_names(self):
# Issue #177: https://github.com/itdxer/neupy/issues/177
x = np.array([10] * 10 + [20] * 10 + [30] * 10)
y = np.array([1] * 10 + [2] * 10 + [3] * 10)
pnn = algorithms.PNN(std=1)
pnn.train(x, y)
y_predicted = pnn.predict(x)
np.testing.assert_array_almost_equal(y, y_predicted)
self.assertEqual(sorted(pnn.classes), [1, 2, 3])
def test_pnn_non_trivial_class_names_as_strings(self):
# Issue #177: https://github.com/itdxer/neupy/issues/177
x = np.array([10] * 10 + [20] * 10 + [30] * 10)
y = np.array(['cat'] * 10 + ['dog'] * 10 + ['horse'] * 10)
pnn = algorithms.PNN(std=1)
pnn.train(x, y)
y_predicted = pnn.predict(x)
np.testing.assert_array_equal(y, y_predicted)
self.assertEqual(sorted(pnn.classes), ['cat', 'dog', 'horse']) | 0.877411 | 0.61806 |
import csv
import zipfile
import requests
import StringIO, datetime, re
from lxml import etree
from util import fred_countries_currencies, oecd_countries_currencies
FRED_RATES = "https://fred.stlouisfed.org/categories/94/downloaddata/INTLFXD_csv_2.zip"
OECD_RATES = "http://stats.oecd.org/restsdmx/sdmx.ashx/GetData/MEI_FIN/CCUS.AUS+AUT+BEL+CAN+CHL+CZE+DNK+EST+FIN+FRA+DEU+GRC+HUN+ISL+IRL+ISR+ITA+JPN+KOR+LVA+LUX+MEX+NLD+NZL+NOR+POL+PRT+SVK+SVN+ESP+SWE+CHE+TUR+GBR+USA+EA19+SDR+NMEC+BRA+CHN+COL+CRI+IND+IDN+RUS+ZAF.M/all?startTime=1950-01"
def get_fred_rates(outfp, writer):
def extract_file(zfo, id_, from_currency, to_currency, freq):
fp = 'INTLFXD_csv_2/data/%s' % id_
fo = zfo.open(fp)
reader = csv.reader(fo)
reader.next()
country = {True: to_currency,
False: from_currency}[from_currency == "U.S."]
for row in reader:
if row[1] == ".": continue
if from_currency == "U.S.": row[1] = round(1/float(row[1]),4)
outrow = row + [fred_countries_currencies[country], freq, "FRED"]
writer.writerow(outrow)
fo.close()
def read_files(zfo):
fo = zfo.open('INTLFXD_csv_2/README_SERIES_ID_SORT.txt')
for line in fo.readlines():
if not line.startswith("DEX"): continue
columns = line.split(';')
id_ = columns[0].strip()
from_currency, to_currency = re.match(
"(.*) / (.*) Foreign Exchange Rate", columns[1]
).groups()
freq = columns[3].strip()
try:
extract_file(zfo, id_, from_currency, to_currency, freq)
except Exception, inst:
print id_, country
raise
r = requests.get(FRED_RATES, stream=True)
zfo = zipfile.ZipFile(StringIO.StringIO(r.content))
read_files(zfo)
def get_oecd_rates(outfp, writer):
def make_date(value):
return datetime.datetime.strptime(value, "%Y-%m-%d")
# Find earliest data for each currency from the St Louis Fed data
def get_earliest_dates():
outfp_file = open(outfp, "r")
reader = csv.DictReader(outfp_file)
indata = list(map(lambda row: row, reader))
outfp_file.close()
currencies = dict(map(lambda currency:
(currency, None),
list(set(map(lambda row: row["Currency"], indata)))))
for currency in currencies:
currency_dates = list(map(lambda y:
make_date(y["Date"]),
filter(lambda x: x['Currency'] == currency, indata)
))
currencies[currency] = min(currency_dates)
return currencies
def get_OECD_data(writer, currencies_dates):
r = requests.get(OECD_RATES, stream=True)
fp_doc = etree.fromstring(r.content)
nsmap = {
"ns":"http://www.SDMX.org/resources/SDMXML/schemas/v2_0/generic"
}
series = fp_doc.findall("ns:DataSet/ns:Series", namespaces=nsmap)
for serie in series:
currency = serie.find("ns:SeriesKey/ns:Value[@concept='LOCATION']", namespaces=nsmap).get("value")
min_currency_date = currencies_dates.get(
oecd_countries_currencies.get(currency),
datetime.datetime.utcnow())
for obs in serie.findall("ns:Obs", namespaces=nsmap):
date = "{}-01".format(obs.find("ns:Time", namespaces=nsmap).text)
value = obs.find("ns:ObsValue", namespaces=nsmap).get("value")
if make_date(date) < min_currency_date:
writer.writerow([date, value, oecd_countries_currencies.get(currency), "M", "OECD"])
currencies_dates = get_earliest_dates()
get_OECD_data(writer, currencies_dates)
def update_rates(out_filename):
outfp = out_filename
outfp_f = open(outfp, 'w')
writer = csv.writer(outfp_f)
writer.writerow(['Date', 'Rate', 'Currency', 'Frequency', 'Source'])
get_fred_rates(outfp, writer)
outfp_f.close()
outfp_f = open(outfp, 'ab')
writer = csv.writer(outfp_f)
get_oecd_rates(outfp, writer)
outfp_f.close()
if __name__ == "__main__":
update_rates('data/consolidated_rates.csv') | exchangerates/get_rates.py | import csv
import zipfile
import requests
import StringIO, datetime, re
from lxml import etree
from util import fred_countries_currencies, oecd_countries_currencies
FRED_RATES = "https://fred.stlouisfed.org/categories/94/downloaddata/INTLFXD_csv_2.zip"
OECD_RATES = "http://stats.oecd.org/restsdmx/sdmx.ashx/GetData/MEI_FIN/CCUS.AUS+AUT+BEL+CAN+CHL+CZE+DNK+EST+FIN+FRA+DEU+GRC+HUN+ISL+IRL+ISR+ITA+JPN+KOR+LVA+LUX+MEX+NLD+NZL+NOR+POL+PRT+SVK+SVN+ESP+SWE+CHE+TUR+GBR+USA+EA19+SDR+NMEC+BRA+CHN+COL+CRI+IND+IDN+RUS+ZAF.M/all?startTime=1950-01"
def get_fred_rates(outfp, writer):
def extract_file(zfo, id_, from_currency, to_currency, freq):
fp = 'INTLFXD_csv_2/data/%s' % id_
fo = zfo.open(fp)
reader = csv.reader(fo)
reader.next()
country = {True: to_currency,
False: from_currency}[from_currency == "U.S."]
for row in reader:
if row[1] == ".": continue
if from_currency == "U.S.": row[1] = round(1/float(row[1]),4)
outrow = row + [fred_countries_currencies[country], freq, "FRED"]
writer.writerow(outrow)
fo.close()
def read_files(zfo):
fo = zfo.open('INTLFXD_csv_2/README_SERIES_ID_SORT.txt')
for line in fo.readlines():
if not line.startswith("DEX"): continue
columns = line.split(';')
id_ = columns[0].strip()
from_currency, to_currency = re.match(
"(.*) / (.*) Foreign Exchange Rate", columns[1]
).groups()
freq = columns[3].strip()
try:
extract_file(zfo, id_, from_currency, to_currency, freq)
except Exception, inst:
print id_, country
raise
r = requests.get(FRED_RATES, stream=True)
zfo = zipfile.ZipFile(StringIO.StringIO(r.content))
read_files(zfo)
def get_oecd_rates(outfp, writer):
def make_date(value):
return datetime.datetime.strptime(value, "%Y-%m-%d")
# Find earliest data for each currency from the St Louis Fed data
def get_earliest_dates():
outfp_file = open(outfp, "r")
reader = csv.DictReader(outfp_file)
indata = list(map(lambda row: row, reader))
outfp_file.close()
currencies = dict(map(lambda currency:
(currency, None),
list(set(map(lambda row: row["Currency"], indata)))))
for currency in currencies:
currency_dates = list(map(lambda y:
make_date(y["Date"]),
filter(lambda x: x['Currency'] == currency, indata)
))
currencies[currency] = min(currency_dates)
return currencies
def get_OECD_data(writer, currencies_dates):
r = requests.get(OECD_RATES, stream=True)
fp_doc = etree.fromstring(r.content)
nsmap = {
"ns":"http://www.SDMX.org/resources/SDMXML/schemas/v2_0/generic"
}
series = fp_doc.findall("ns:DataSet/ns:Series", namespaces=nsmap)
for serie in series:
currency = serie.find("ns:SeriesKey/ns:Value[@concept='LOCATION']", namespaces=nsmap).get("value")
min_currency_date = currencies_dates.get(
oecd_countries_currencies.get(currency),
datetime.datetime.utcnow())
for obs in serie.findall("ns:Obs", namespaces=nsmap):
date = "{}-01".format(obs.find("ns:Time", namespaces=nsmap).text)
value = obs.find("ns:ObsValue", namespaces=nsmap).get("value")
if make_date(date) < min_currency_date:
writer.writerow([date, value, oecd_countries_currencies.get(currency), "M", "OECD"])
currencies_dates = get_earliest_dates()
get_OECD_data(writer, currencies_dates)
def update_rates(out_filename):
outfp = out_filename
outfp_f = open(outfp, 'w')
writer = csv.writer(outfp_f)
writer.writerow(['Date', 'Rate', 'Currency', 'Frequency', 'Source'])
get_fred_rates(outfp, writer)
outfp_f.close()
outfp_f = open(outfp, 'ab')
writer = csv.writer(outfp_f)
get_oecd_rates(outfp, writer)
outfp_f.close()
if __name__ == "__main__":
update_rates('data/consolidated_rates.csv') | 0.370909 | 0.197948 |
# "申报时间", "证券代码", "证券名称", "操作", "委托状态", "委托数量", "成交数量", "撤消数量", , "委托价格", "成交均价", "合同编号", "委托子业务", "约定号", "对方账户", "参考汇率", "结算币种", "交易币种", "证券中文名", "出错信息
from QUANTAXIS.QAUtil.QAParameter import ORDER_DIRECTION, TRADE_STATUS, ORDER_STATUS
cn_en_compare = {'明细': 'id',
'证券代码': 'code',
'市场代码': 'market_code',
'证券名称': 'name',
'股票余额': 'amount',
'证券数量': 'amount', # 通达信
'持仓量': 'hold',
'报价方式': 'order_model',
'可卖数量': 'sell_available',
'可用余额': 'sell_available',
'冻结数量': 'frozen',
'最新市值': 'lastest_mv', # 通达信
'成本价': 'cost',
'投保标志': 'tb_signal',
'买卖标志': 'towards',
'撤消数量': 'cancel_amount',
'撤单数量': 'cancel_amount',
'今买数量': 'today_buy_amount',
'今卖数量': 'today_sell_amount',
'买持仓': 'buy_holding',
'卖持仓': 'sell_holding',
'昨日结算价': 'yesterday_price',
'保证金': 'margin',
'订单类型': 'order_type',
'操作': 'towards', # 这个是模拟交易的买卖标志
'委托价格': 'order_price',
'委托数量': 'order_amount',
'成交价格': 'trade_price',
'成交日期': 'trade_date',
'成交状态':'status',
'成交数量': 'trade_amount',
'状态说明': 'status',
'备注': 'status', # 这个是模拟交易的status
'废弃': 'dropped',
'当前价': 'price',
'场外撤单': 'cancel_outside',
'场内撤单': 'cancel_inside',
'未成交': 'pending',
'全部撤单': 'cancel_all',
'委托时间': 'order_time',
'合同编号': 'realorder_id', # 模拟交易的委托编号
'撤销数量': 'cancel_amount',
'委托编号': 'realorder_id',
'批次号': 'pc_id',
'盈亏': 'pnl',
"": 'None',
'成本金额': 'cost',
'盈亏估算': 'pnl_prob',
'成本价': 'hold_price',
'实现盈亏': 'pnl_money_already',
'浮动盈亏': 'pnl_money',
'盈亏比例(%)': 'pnl_ratio',
'市价': 'price',
'市值': 'market_value',
'交易市场': 'SSE',
'交易所名称': 'SSE',
'股东帐户': 'shareholders',
'股东代码': 'shareholders',
'资金帐号': 'assetsholders', # tdx
'实际数量': 'total_amount',
'可申赎数量': 'redemption_number',
'资讯': 'message',
'汇率': 'exchange_rate',
'沪港深港市场': 'hkmarket',
'成本价港币': 'hold_price_hk',
'买入成本价港币': 'buy_price_hk',
'买入在途数量': 'buy_onway',
'卖出在途数量': 'sell_onway',
'场内废单': 'failled',
'未成交': 'pending',
'已成交': 'finished',
'成交均价': 'trade_price', # 成交价
'成交金额': 'trade_money',
'成交编号': 'trade_id',
'委托状态': 'status',
'申报时间': 'order_time',
'委托日期': 'order_date',
'委托子业务': 'order_subjob',
'约定号': 'yd_id',
'对方账户': 'other_account',
'参考汇率': 'refer_exchange',
'币种': 'trade_currency',
'结算币种': 'settlement_currency',
'交易币种': 'trade_currency',
'证券中文名': 'CNname',
'出错信息': 'error',
'':'other',
'成交时间': 'trade_time'}
trade_towards_cn_en = {
'买入': ORDER_DIRECTION.BUY,
'买': ORDER_DIRECTION.BUY,
'卖出': ORDER_DIRECTION.SELL,
'卖': ORDER_DIRECTION.SELL,
'申购': ORDER_DIRECTION.ASK,
'申': ORDER_DIRECTION.ASK,
'证券买入': ORDER_DIRECTION.BUY,
'证券卖出': ORDER_DIRECTION.SELL,
'派息': ORDER_DIRECTION.XDXR,
'': ORDER_DIRECTION.OTHER
}
order_status_cn_en = {
'已报': ORDER_STATUS.QUEUED, # 委托已经被交易端接受了
'全部申报': ORDER_STATUS.QUEUED,
'未成交': ORDER_STATUS.QUEUED,
'已确认': ORDER_STATUS.QUEUED, # 新股申购已经被交易端接受
'场内废单': ORDER_STATUS.FAILED,
'废单': ORDER_STATUS.FAILED, # 委托不符合交易规则,被交易端拒绝了
'未报': ORDER_STATUS.FAILED, # 委托还没有被交易端接受
'场外废单': ORDER_STATUS.FAILED,
'已成交': ORDER_STATUS.SUCCESS_ALL,
'已成': ORDER_STATUS.SUCCESS_ALL,
'全部成交': ORDER_STATUS.SUCCESS_ALL,
'部成': ORDER_STATUS.SUCCESS_PART, # 委托已经成交了一部份
'已撤单': ORDER_STATUS.CANCEL_ALL,
'全部撤单': ORDER_STATUS.CANCEL_ALL,
'已撤': ORDER_STATUS.CANCEL_ALL,
'已报待撤': ORDER_STATUS.QUEUED, # 已经申报了撤单,交易端也已接受,但目前可能因为还没在交易时间段,所以还在等待撤消
'场内撤单': ORDER_STATUS.CANCEL_ALL,
'': ORDER_STATUS.FAILED
}
if __name__ == '__main__':
a=order_status_cn_en
print(a.keys())
b=cn_en_compare
print(b.keys()) | QUANTAXIS/QAMarket/common.py |
# "申报时间", "证券代码", "证券名称", "操作", "委托状态", "委托数量", "成交数量", "撤消数量", , "委托价格", "成交均价", "合同编号", "委托子业务", "约定号", "对方账户", "参考汇率", "结算币种", "交易币种", "证券中文名", "出错信息
from QUANTAXIS.QAUtil.QAParameter import ORDER_DIRECTION, TRADE_STATUS, ORDER_STATUS
cn_en_compare = {'明细': 'id',
'证券代码': 'code',
'市场代码': 'market_code',
'证券名称': 'name',
'股票余额': 'amount',
'证券数量': 'amount', # 通达信
'持仓量': 'hold',
'报价方式': 'order_model',
'可卖数量': 'sell_available',
'可用余额': 'sell_available',
'冻结数量': 'frozen',
'最新市值': 'lastest_mv', # 通达信
'成本价': 'cost',
'投保标志': 'tb_signal',
'买卖标志': 'towards',
'撤消数量': 'cancel_amount',
'撤单数量': 'cancel_amount',
'今买数量': 'today_buy_amount',
'今卖数量': 'today_sell_amount',
'买持仓': 'buy_holding',
'卖持仓': 'sell_holding',
'昨日结算价': 'yesterday_price',
'保证金': 'margin',
'订单类型': 'order_type',
'操作': 'towards', # 这个是模拟交易的买卖标志
'委托价格': 'order_price',
'委托数量': 'order_amount',
'成交价格': 'trade_price',
'成交日期': 'trade_date',
'成交状态':'status',
'成交数量': 'trade_amount',
'状态说明': 'status',
'备注': 'status', # 这个是模拟交易的status
'废弃': 'dropped',
'当前价': 'price',
'场外撤单': 'cancel_outside',
'场内撤单': 'cancel_inside',
'未成交': 'pending',
'全部撤单': 'cancel_all',
'委托时间': 'order_time',
'合同编号': 'realorder_id', # 模拟交易的委托编号
'撤销数量': 'cancel_amount',
'委托编号': 'realorder_id',
'批次号': 'pc_id',
'盈亏': 'pnl',
"": 'None',
'成本金额': 'cost',
'盈亏估算': 'pnl_prob',
'成本价': 'hold_price',
'实现盈亏': 'pnl_money_already',
'浮动盈亏': 'pnl_money',
'盈亏比例(%)': 'pnl_ratio',
'市价': 'price',
'市值': 'market_value',
'交易市场': 'SSE',
'交易所名称': 'SSE',
'股东帐户': 'shareholders',
'股东代码': 'shareholders',
'资金帐号': 'assetsholders', # tdx
'实际数量': 'total_amount',
'可申赎数量': 'redemption_number',
'资讯': 'message',
'汇率': 'exchange_rate',
'沪港深港市场': 'hkmarket',
'成本价港币': 'hold_price_hk',
'买入成本价港币': 'buy_price_hk',
'买入在途数量': 'buy_onway',
'卖出在途数量': 'sell_onway',
'场内废单': 'failled',
'未成交': 'pending',
'已成交': 'finished',
'成交均价': 'trade_price', # 成交价
'成交金额': 'trade_money',
'成交编号': 'trade_id',
'委托状态': 'status',
'申报时间': 'order_time',
'委托日期': 'order_date',
'委托子业务': 'order_subjob',
'约定号': 'yd_id',
'对方账户': 'other_account',
'参考汇率': 'refer_exchange',
'币种': 'trade_currency',
'结算币种': 'settlement_currency',
'交易币种': 'trade_currency',
'证券中文名': 'CNname',
'出错信息': 'error',
'':'other',
'成交时间': 'trade_time'}
trade_towards_cn_en = {
'买入': ORDER_DIRECTION.BUY,
'买': ORDER_DIRECTION.BUY,
'卖出': ORDER_DIRECTION.SELL,
'卖': ORDER_DIRECTION.SELL,
'申购': ORDER_DIRECTION.ASK,
'申': ORDER_DIRECTION.ASK,
'证券买入': ORDER_DIRECTION.BUY,
'证券卖出': ORDER_DIRECTION.SELL,
'派息': ORDER_DIRECTION.XDXR,
'': ORDER_DIRECTION.OTHER
}
order_status_cn_en = {
'已报': ORDER_STATUS.QUEUED, # 委托已经被交易端接受了
'全部申报': ORDER_STATUS.QUEUED,
'未成交': ORDER_STATUS.QUEUED,
'已确认': ORDER_STATUS.QUEUED, # 新股申购已经被交易端接受
'场内废单': ORDER_STATUS.FAILED,
'废单': ORDER_STATUS.FAILED, # 委托不符合交易规则,被交易端拒绝了
'未报': ORDER_STATUS.FAILED, # 委托还没有被交易端接受
'场外废单': ORDER_STATUS.FAILED,
'已成交': ORDER_STATUS.SUCCESS_ALL,
'已成': ORDER_STATUS.SUCCESS_ALL,
'全部成交': ORDER_STATUS.SUCCESS_ALL,
'部成': ORDER_STATUS.SUCCESS_PART, # 委托已经成交了一部份
'已撤单': ORDER_STATUS.CANCEL_ALL,
'全部撤单': ORDER_STATUS.CANCEL_ALL,
'已撤': ORDER_STATUS.CANCEL_ALL,
'已报待撤': ORDER_STATUS.QUEUED, # 已经申报了撤单,交易端也已接受,但目前可能因为还没在交易时间段,所以还在等待撤消
'场内撤单': ORDER_STATUS.CANCEL_ALL,
'': ORDER_STATUS.FAILED
}
if __name__ == '__main__':
a=order_status_cn_en
print(a.keys())
b=cn_en_compare
print(b.keys()) | 0.226612 | 0.245847 |
import argparse
import sys, os, subprocess
from env_backup.src.kernel import kernel_exists, get_kernel_location, install_new_ipykernel, \
set_userbase, get_userbase, install_launch_script
from env_backup.src.util import fmt_now, backup_local, valid_label, remove_local, \
_kernel_name_description, prompt_proceed, prompt_proceed_exit, remove_dir
def new_kernel(args):
if not valid_label(args.name):
print(_kernel_name_description, file=sys.stderr)
sys.exit(1)
kernel_name = f'python3_{args.name}'
display_name = f'Python 3 ({args.name})'
local_backup_dir = os.path.expanduser(f'~/.local_backups/{kernel_name}')
if kernel_exists(kernel_name):
print(f'Kernel {kernel_name} already exists.')
if not args.yes: prompt_proceed_exit(f'-> Replace the content in {kernel_name}? (y/n) ')
if os.path.exists(local_backup_dir):
print(f'Backup in {local_backup_dir} already exists.')
if not args.yes: prompt_proceed_exit(f'-> Replace the content in {local_backup_dir}? (y/n) ')
print('\n'.join([
'New kernel information:',
f' Kernel name: "{kernel_name}"',
f' Display name: "{display_name}"',
f' Install location: "~/.local/shared/jupyter/kernels/{kernel_name}"',
f' .local backup location: "{local_backup_dir}"'
]))
if not args.yes: prompt_proceed_exit("-> Ready to proceed? (y/n) ")
print()
install_new_ipykernel(kernel_name, display_name)
backup_local(dest=local_backup_dir)
set_userbase(kernel_name, local_backup_dir)
install_launch_script(kernel_name)
if args.yes or prompt_proceed("-> Remove python dependencies in ~/.local/lib? (y/n) "):
remove_local()
print("Python dependencies in ~/.local/lib are removed")
print('Done')
def list_kernel(args):
cmd = subprocess.run([
'jupyter', 'kernelspec', 'list'
], capture_output=True)
print(cmd.stdout.decode(), end='')
def remove_kernel(args):
kernel_name = args.kernel_name
if not kernel_exists(kernel_name):
print(f'"{kernel_name}" does not exist', sys.stderr)
sys.exit(1)
# remove .local backup
userbase_path = get_userbase(kernel_name)
remove_dir(userbase_path)
# remove kernelspec
cmd = subprocess.run([
'jupyter', 'kernelspec', 'remove', '-f', kernel_name
], capture_output=True)
assert cmd.returncode == 0, cmd.stderr.decode()
print('Done')
def get_parser():
parser = argparse.ArgumentParser(
prog='env-backup',
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog='\n'.join([
'----------',
'env-activate, env-deactivate for switching between environments:',
' usage: source env-activate <KERNEL_NAME>',
' source env-deactivate',
' Manually verify by checking if `echo $PYTHONUSEBASE` is present'
])
)
subparsers = parser.add_subparsers(help='Commands to choose from')
# Parser for command `new`
parser_a = subparsers.add_parser('new', help='Create a new pip environment backup')
parser_a.add_argument('--name', '-n', default=fmt_now(), help='Set name for new environment')
parser_a.add_argument('--yes', '-y', action='store_true', help='automatically set every option to true')
parser_a.set_defaults(func=new_kernel)
# Parser for command `list`
parser_b = subparsers.add_parser('list', help='List current environments (jupyter kernels)')
parser_b.set_defaults(func=list_kernel)
# Parser for command `remove`
parser_c = subparsers.add_parser('remove', help='Remove envionment by name')
parser_c.add_argument('kernel_name')
parser_c.set_defaults(func=remove_kernel)
return parser
def main():
parser = get_parser()
if len(sys.argv)==1:
parser.print_help()
sys.exit(0)
args = parser.parse_args()
args.func(args)
if __name__ == "__main__":
main() | env_backup/command_line.py | import argparse
import sys, os, subprocess
from env_backup.src.kernel import kernel_exists, get_kernel_location, install_new_ipykernel, \
set_userbase, get_userbase, install_launch_script
from env_backup.src.util import fmt_now, backup_local, valid_label, remove_local, \
_kernel_name_description, prompt_proceed, prompt_proceed_exit, remove_dir
def new_kernel(args):
if not valid_label(args.name):
print(_kernel_name_description, file=sys.stderr)
sys.exit(1)
kernel_name = f'python3_{args.name}'
display_name = f'Python 3 ({args.name})'
local_backup_dir = os.path.expanduser(f'~/.local_backups/{kernel_name}')
if kernel_exists(kernel_name):
print(f'Kernel {kernel_name} already exists.')
if not args.yes: prompt_proceed_exit(f'-> Replace the content in {kernel_name}? (y/n) ')
if os.path.exists(local_backup_dir):
print(f'Backup in {local_backup_dir} already exists.')
if not args.yes: prompt_proceed_exit(f'-> Replace the content in {local_backup_dir}? (y/n) ')
print('\n'.join([
'New kernel information:',
f' Kernel name: "{kernel_name}"',
f' Display name: "{display_name}"',
f' Install location: "~/.local/shared/jupyter/kernels/{kernel_name}"',
f' .local backup location: "{local_backup_dir}"'
]))
if not args.yes: prompt_proceed_exit("-> Ready to proceed? (y/n) ")
print()
install_new_ipykernel(kernel_name, display_name)
backup_local(dest=local_backup_dir)
set_userbase(kernel_name, local_backup_dir)
install_launch_script(kernel_name)
if args.yes or prompt_proceed("-> Remove python dependencies in ~/.local/lib? (y/n) "):
remove_local()
print("Python dependencies in ~/.local/lib are removed")
print('Done')
def list_kernel(args):
cmd = subprocess.run([
'jupyter', 'kernelspec', 'list'
], capture_output=True)
print(cmd.stdout.decode(), end='')
def remove_kernel(args):
kernel_name = args.kernel_name
if not kernel_exists(kernel_name):
print(f'"{kernel_name}" does not exist', sys.stderr)
sys.exit(1)
# remove .local backup
userbase_path = get_userbase(kernel_name)
remove_dir(userbase_path)
# remove kernelspec
cmd = subprocess.run([
'jupyter', 'kernelspec', 'remove', '-f', kernel_name
], capture_output=True)
assert cmd.returncode == 0, cmd.stderr.decode()
print('Done')
def get_parser():
parser = argparse.ArgumentParser(
prog='env-backup',
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog='\n'.join([
'----------',
'env-activate, env-deactivate for switching between environments:',
' usage: source env-activate <KERNEL_NAME>',
' source env-deactivate',
' Manually verify by checking if `echo $PYTHONUSEBASE` is present'
])
)
subparsers = parser.add_subparsers(help='Commands to choose from')
# Parser for command `new`
parser_a = subparsers.add_parser('new', help='Create a new pip environment backup')
parser_a.add_argument('--name', '-n', default=fmt_now(), help='Set name for new environment')
parser_a.add_argument('--yes', '-y', action='store_true', help='automatically set every option to true')
parser_a.set_defaults(func=new_kernel)
# Parser for command `list`
parser_b = subparsers.add_parser('list', help='List current environments (jupyter kernels)')
parser_b.set_defaults(func=list_kernel)
# Parser for command `remove`
parser_c = subparsers.add_parser('remove', help='Remove envionment by name')
parser_c.add_argument('kernel_name')
parser_c.set_defaults(func=remove_kernel)
return parser
def main():
parser = get_parser()
if len(sys.argv)==1:
parser.print_help()
sys.exit(0)
args = parser.parse_args()
args.func(args)
if __name__ == "__main__":
main() | 0.23855 | 0.085633 |
import ast
import tempfile
from datetime import datetime
from http import HTTPStatus
from os import remove
from fastapi import APIRouter, Request, Query
from fastapi.concurrency import run_in_threadpool
from kfp import Client as kfclient
import mlrun.api.crud
import mlrun.api.schemas
from mlrun.api.api.utils import log_and_raise
from mlrun.config import config
from mlrun.k8s_utils import get_k8s_helper
from mlrun.utils import logger
router = APIRouter()
@router.get(
"/projects/{project}/pipelines", response_model=mlrun.api.schemas.PipelinesOutput
)
def list_pipelines(
project: str,
namespace: str = None,
sort_by: str = "",
page_token: str = "",
filter_: str = Query("", alias="filter"),
format_: mlrun.api.schemas.Format = Query(
mlrun.api.schemas.Format.metadata_only, alias="format"
),
page_size: int = Query(None, gt=0, le=200),
):
total_size, next_page_token, runs = None, None, None
if get_k8s_helper(silent=True).is_running_inside_kubernetes_cluster():
total_size, next_page_token, runs = mlrun.api.crud.list_pipelines(
project, namespace, sort_by, page_token, filter_, format_, page_size,
)
return mlrun.api.schemas.PipelinesOutput(
runs=runs or [],
total_size=total_size or 0,
next_page_token=next_page_token or None,
)
# curl -d@/path/to/pipe.yaml http://localhost:8080/submit_pipeline
@router.post("/submit_pipeline")
@router.post("/submit_pipeline/")
async def submit_pipeline(
request: Request,
namespace: str = config.namespace,
experiment_name: str = Query("Default", alias="experiment"),
run_name: str = Query("", alias="run"),
):
run_name = run_name or experiment_name + " " + datetime.now().strftime(
"%Y-%m-%d %H-%M-%S"
)
data = await request.body()
if not data:
log_and_raise(HTTPStatus.BAD_REQUEST.value, reason="post data is empty")
run = await run_in_threadpool(
_submit_pipeline, request, data, namespace, experiment_name, run_name
)
return {
"id": run.id,
"name": run.name,
}
# curl http://localhost:8080/pipelines/:id
@router.get("/pipelines/{run_id}")
@router.get("/pipelines/{run_id}/")
def get_pipeline(run_id, namespace: str = Query(config.namespace)):
client = kfclient(namespace=namespace)
try:
run = client.get_run(run_id)
if run:
run = run.to_dict()
except Exception as e:
log_and_raise(
HTTPStatus.INTERNAL_SERVER_ERROR.value, reason=f"get kfp error: {e}"
)
return run
def _submit_pipeline(request, data, namespace, experiment_name, run_name):
arguments = {}
arguments_data = request.headers.get("pipeline-arguments")
if arguments_data:
arguments = ast.literal_eval(arguments_data)
logger.info(f"pipeline arguments {arguments_data}")
ctype = request.headers.get("content-type", "")
if "/yaml" in ctype:
ctype = ".yaml"
elif " /zip" in ctype:
ctype = ".zip"
else:
log_and_raise(
HTTPStatus.BAD_REQUEST.value, reason=f"unsupported pipeline type {ctype}",
)
logger.info(f"writing file {ctype}")
print(str(data))
pipe_tmp = tempfile.mktemp(suffix=ctype)
with open(pipe_tmp, "wb") as fp:
fp.write(data)
run = None
try:
client = kfclient(namespace=namespace)
experiment = client.create_experiment(name=experiment_name)
run = client.run_pipeline(experiment.id, run_name, pipe_tmp, params=arguments)
except Exception as e:
remove(pipe_tmp)
log_and_raise(HTTPStatus.BAD_REQUEST.value, reason=f"kfp err: {e}")
remove(pipe_tmp)
return run | mlrun/api/api/endpoints/pipelines.py | import ast
import tempfile
from datetime import datetime
from http import HTTPStatus
from os import remove
from fastapi import APIRouter, Request, Query
from fastapi.concurrency import run_in_threadpool
from kfp import Client as kfclient
import mlrun.api.crud
import mlrun.api.schemas
from mlrun.api.api.utils import log_and_raise
from mlrun.config import config
from mlrun.k8s_utils import get_k8s_helper
from mlrun.utils import logger
router = APIRouter()
@router.get(
"/projects/{project}/pipelines", response_model=mlrun.api.schemas.PipelinesOutput
)
def list_pipelines(
project: str,
namespace: str = None,
sort_by: str = "",
page_token: str = "",
filter_: str = Query("", alias="filter"),
format_: mlrun.api.schemas.Format = Query(
mlrun.api.schemas.Format.metadata_only, alias="format"
),
page_size: int = Query(None, gt=0, le=200),
):
total_size, next_page_token, runs = None, None, None
if get_k8s_helper(silent=True).is_running_inside_kubernetes_cluster():
total_size, next_page_token, runs = mlrun.api.crud.list_pipelines(
project, namespace, sort_by, page_token, filter_, format_, page_size,
)
return mlrun.api.schemas.PipelinesOutput(
runs=runs or [],
total_size=total_size or 0,
next_page_token=next_page_token or None,
)
# curl -d@/path/to/pipe.yaml http://localhost:8080/submit_pipeline
@router.post("/submit_pipeline")
@router.post("/submit_pipeline/")
async def submit_pipeline(
request: Request,
namespace: str = config.namespace,
experiment_name: str = Query("Default", alias="experiment"),
run_name: str = Query("", alias="run"),
):
run_name = run_name or experiment_name + " " + datetime.now().strftime(
"%Y-%m-%d %H-%M-%S"
)
data = await request.body()
if not data:
log_and_raise(HTTPStatus.BAD_REQUEST.value, reason="post data is empty")
run = await run_in_threadpool(
_submit_pipeline, request, data, namespace, experiment_name, run_name
)
return {
"id": run.id,
"name": run.name,
}
# curl http://localhost:8080/pipelines/:id
@router.get("/pipelines/{run_id}")
@router.get("/pipelines/{run_id}/")
def get_pipeline(run_id, namespace: str = Query(config.namespace)):
client = kfclient(namespace=namespace)
try:
run = client.get_run(run_id)
if run:
run = run.to_dict()
except Exception as e:
log_and_raise(
HTTPStatus.INTERNAL_SERVER_ERROR.value, reason=f"get kfp error: {e}"
)
return run
def _submit_pipeline(request, data, namespace, experiment_name, run_name):
arguments = {}
arguments_data = request.headers.get("pipeline-arguments")
if arguments_data:
arguments = ast.literal_eval(arguments_data)
logger.info(f"pipeline arguments {arguments_data}")
ctype = request.headers.get("content-type", "")
if "/yaml" in ctype:
ctype = ".yaml"
elif " /zip" in ctype:
ctype = ".zip"
else:
log_and_raise(
HTTPStatus.BAD_REQUEST.value, reason=f"unsupported pipeline type {ctype}",
)
logger.info(f"writing file {ctype}")
print(str(data))
pipe_tmp = tempfile.mktemp(suffix=ctype)
with open(pipe_tmp, "wb") as fp:
fp.write(data)
run = None
try:
client = kfclient(namespace=namespace)
experiment = client.create_experiment(name=experiment_name)
run = client.run_pipeline(experiment.id, run_name, pipe_tmp, params=arguments)
except Exception as e:
remove(pipe_tmp)
log_and_raise(HTTPStatus.BAD_REQUEST.value, reason=f"kfp err: {e}")
remove(pipe_tmp)
return run | 0.327238 | 0.073763 |
from pysmt.shortcuts import Symbol, And, Equals, BVAdd, BVMul, Bool, Ite, BV, BVURem, BVExtract, ForAll, Exists, Portfolio, Solver
from pysmt.typing import BVType
from pysmt.logics import BV as logicBV
from frail import IteExtend, BVAddExtend, BVSubExtend, BVMulExtend, BVEqualsExtend
import time
op_design_free_vars = {}
op_design_scans = []
op_design_scans_results = []
def scan_const10_f(scan_var_10):
if 16 not in op_design_free_vars:
op_design_free_vars[16] = Symbol("x_max_op_design", BVType(16))
x16 = op_design_free_vars[16]
x40 = BV(1,16)
x41 = BVSubExtend(x16, x40)
x42 = BVEqualsExtend(scan_var_10, x41)
x43 = BV(0,16)
x44 = BV(1,16)
x45 = BVAddExtend(scan_var_10, x44)
x46 = IteExtend(x42, x43, x45)
return x46
op_design_scans.append(scan_const10_f)
op_design_scans_results.append("scan_const10")
scan_const10 = BV(0, 16)
def scan_const14_f(scan_var_14):
x12 = scan_const10
if 16 not in op_design_free_vars:
op_design_free_vars[16] = Symbol("x_max_op_design", BVType(16))
x16 = op_design_free_vars[16]
x34 = BV(1,16)
x35 = BVSubExtend(x16, x34)
x36 = BVEqualsExtend(x12, x35)
if 19 not in op_design_free_vars:
op_design_free_vars[19] = Symbol("y_stride_op_design", BVType(16))
x19 = op_design_free_vars[19]
if 18 not in op_design_free_vars:
op_design_free_vars[18] = Symbol("x_stride_op_design", BVType(16))
x18 = op_design_free_vars[18]
x37 = IteExtend(x36, x19, x18)
x38 = BVAddExtend(scan_var_14, x37)
return x38
op_design_scans.append(scan_const14_f)
op_design_scans_results.append("scan_const14")
scan_const14 = BV(0, 16)
def scan_const15_f(scan_var_15):
x31 = scan_const14
if 4 not in op_design_free_vars:
op_design_free_vars[4] = Symbol("offset_op_design", BVType(16))
x4 = op_design_free_vars[4]
x32 = BVAddExtend(x31, x4)
return x32
op_design_scans.append(scan_const15_f)
op_design_scans_results.append("scan_const15")
scan_const15 = BV(0, 16)
from pysmt.shortcuts import Symbol, And, Equals, BVAdd, BVMul, Bool, Ite, BV, BVURem, BVExtract, ForAll, Exists, Portfolio, Solver
from pysmt.typing import BVType
from pysmt.logics import BV as logicBV
from frail import IteExtend, BVAddExtend, BVSubExtend, BVMulExtend, BVEqualsExtend
import time
og_design_free_vars = {}
og_design_scans = []
og_design_scans_results = []
def scan_const5_f(scan_var_5):
if 16 not in og_design_free_vars:
og_design_free_vars[16] = Symbol("x_max_og_design", BVType(16))
x16 = og_design_free_vars[16]
x76 = BV(1,16)
x77 = BVSubExtend(x16, x76)
x78 = BVEqualsExtend(scan_var_5, x77)
x79 = BV(0,16)
x80 = BV(1,16)
x81 = BVAddExtend(scan_var_5, x80)
x82 = IteExtend(x78, x79, x81)
return x82
og_design_scans.append(scan_const5_f)
og_design_scans_results.append("scan_const5")
scan_const5 = BV(0, 16)
def scan_const7_f(scan_var_7):
x53 = scan_const5
if 16 not in og_design_free_vars:
og_design_free_vars[16] = Symbol("x_max_og_design", BVType(16))
x16 = og_design_free_vars[16]
x54 = BV(1,16)
x55 = BVSubExtend(x16, x54)
x56 = BVEqualsExtend(x53, x55)
x57 = BV(0,16)
if 18 not in og_design_free_vars:
og_design_free_vars[18] = Symbol("x_stride_og_design", BVType(16))
x18 = og_design_free_vars[18]
x58 = BVAddExtend(scan_var_7, x18)
x59 = IteExtend(x56, x57, x58)
return x59
og_design_scans.append(scan_const7_f)
og_design_scans_results.append("scan_const7")
scan_const7 = BV(0, 16)
def scan_const8_f(scan_var_8):
x69 = scan_const5
if 16 not in og_design_free_vars:
og_design_free_vars[16] = Symbol("x_max_og_design", BVType(16))
x16 = og_design_free_vars[16]
x70 = BV(1,16)
x71 = BVSubExtend(x16, x70)
x72 = BVEqualsExtend(x69, x71)
if 19 not in og_design_free_vars:
og_design_free_vars[19] = Symbol("y_stride_og_design", BVType(16))
x19 = og_design_free_vars[19]
x73 = BVAddExtend(scan_var_8, x19)
x74 = IteExtend(x72, x73, scan_var_8)
return x74
og_design_scans.append(scan_const8_f)
og_design_scans_results.append("scan_const8")
scan_const8 = BV(0, 16)
def scan_const9_f(scan_var_9):
x48 = scan_const7
x49 = scan_const8
x50 = BVAddExtend(x48, x49)
if 4 not in og_design_free_vars:
og_design_free_vars[4] = Symbol("offset_og_design", BVType(16))
x4 = og_design_free_vars[4]
x51 = BVAddExtend(x50, x4)
return x51
og_design_scans.append(scan_const9_f)
og_design_scans_results.append("scan_const9")
scan_const9 = BV(0, 16)
with Solver("cvc4",
logic=logicBV,
incremental=True) as s:
per_step_constraints = []
for step in range(2):
print("handling step " + str(step))
for i in range(len(op_design_scans)):
globals()[op_design_scans_results[i]] = op_design_scans[i](globals()[op_design_scans_results[i]])
for i in range(len(og_design_scans)):
globals()[og_design_scans_results[i]] = og_design_scans[i](globals()[og_design_scans_results[i]])
per_step_constraints.append(Equals(globals()[op_design_scans_results[len(op_design_scans_results)-1]], globals()[og_design_scans_results[len(og_design_scans_results)-1]]))
final_constraint = per_step_constraints[0]
for c in per_step_constraints[1:]:
final_constraint = And(final_constraint, c)
s.add_assertion(ForAll(op_design_free_vars.values(), Exists(og_design_free_vars.values(), final_constraint)))
start = time.time()
res = s.solve()
assert res
end = time.time()
print("time: " + str(end - start)) | run/gen_smt/example_smt.py | from pysmt.shortcuts import Symbol, And, Equals, BVAdd, BVMul, Bool, Ite, BV, BVURem, BVExtract, ForAll, Exists, Portfolio, Solver
from pysmt.typing import BVType
from pysmt.logics import BV as logicBV
from frail import IteExtend, BVAddExtend, BVSubExtend, BVMulExtend, BVEqualsExtend
import time
op_design_free_vars = {}
op_design_scans = []
op_design_scans_results = []
def scan_const10_f(scan_var_10):
if 16 not in op_design_free_vars:
op_design_free_vars[16] = Symbol("x_max_op_design", BVType(16))
x16 = op_design_free_vars[16]
x40 = BV(1,16)
x41 = BVSubExtend(x16, x40)
x42 = BVEqualsExtend(scan_var_10, x41)
x43 = BV(0,16)
x44 = BV(1,16)
x45 = BVAddExtend(scan_var_10, x44)
x46 = IteExtend(x42, x43, x45)
return x46
op_design_scans.append(scan_const10_f)
op_design_scans_results.append("scan_const10")
scan_const10 = BV(0, 16)
def scan_const14_f(scan_var_14):
x12 = scan_const10
if 16 not in op_design_free_vars:
op_design_free_vars[16] = Symbol("x_max_op_design", BVType(16))
x16 = op_design_free_vars[16]
x34 = BV(1,16)
x35 = BVSubExtend(x16, x34)
x36 = BVEqualsExtend(x12, x35)
if 19 not in op_design_free_vars:
op_design_free_vars[19] = Symbol("y_stride_op_design", BVType(16))
x19 = op_design_free_vars[19]
if 18 not in op_design_free_vars:
op_design_free_vars[18] = Symbol("x_stride_op_design", BVType(16))
x18 = op_design_free_vars[18]
x37 = IteExtend(x36, x19, x18)
x38 = BVAddExtend(scan_var_14, x37)
return x38
op_design_scans.append(scan_const14_f)
op_design_scans_results.append("scan_const14")
scan_const14 = BV(0, 16)
def scan_const15_f(scan_var_15):
x31 = scan_const14
if 4 not in op_design_free_vars:
op_design_free_vars[4] = Symbol("offset_op_design", BVType(16))
x4 = op_design_free_vars[4]
x32 = BVAddExtend(x31, x4)
return x32
op_design_scans.append(scan_const15_f)
op_design_scans_results.append("scan_const15")
scan_const15 = BV(0, 16)
from pysmt.shortcuts import Symbol, And, Equals, BVAdd, BVMul, Bool, Ite, BV, BVURem, BVExtract, ForAll, Exists, Portfolio, Solver
from pysmt.typing import BVType
from pysmt.logics import BV as logicBV
from frail import IteExtend, BVAddExtend, BVSubExtend, BVMulExtend, BVEqualsExtend
import time
og_design_free_vars = {}
og_design_scans = []
og_design_scans_results = []
def scan_const5_f(scan_var_5):
if 16 not in og_design_free_vars:
og_design_free_vars[16] = Symbol("x_max_og_design", BVType(16))
x16 = og_design_free_vars[16]
x76 = BV(1,16)
x77 = BVSubExtend(x16, x76)
x78 = BVEqualsExtend(scan_var_5, x77)
x79 = BV(0,16)
x80 = BV(1,16)
x81 = BVAddExtend(scan_var_5, x80)
x82 = IteExtend(x78, x79, x81)
return x82
og_design_scans.append(scan_const5_f)
og_design_scans_results.append("scan_const5")
scan_const5 = BV(0, 16)
def scan_const7_f(scan_var_7):
x53 = scan_const5
if 16 not in og_design_free_vars:
og_design_free_vars[16] = Symbol("x_max_og_design", BVType(16))
x16 = og_design_free_vars[16]
x54 = BV(1,16)
x55 = BVSubExtend(x16, x54)
x56 = BVEqualsExtend(x53, x55)
x57 = BV(0,16)
if 18 not in og_design_free_vars:
og_design_free_vars[18] = Symbol("x_stride_og_design", BVType(16))
x18 = og_design_free_vars[18]
x58 = BVAddExtend(scan_var_7, x18)
x59 = IteExtend(x56, x57, x58)
return x59
og_design_scans.append(scan_const7_f)
og_design_scans_results.append("scan_const7")
scan_const7 = BV(0, 16)
def scan_const8_f(scan_var_8):
x69 = scan_const5
if 16 not in og_design_free_vars:
og_design_free_vars[16] = Symbol("x_max_og_design", BVType(16))
x16 = og_design_free_vars[16]
x70 = BV(1,16)
x71 = BVSubExtend(x16, x70)
x72 = BVEqualsExtend(x69, x71)
if 19 not in og_design_free_vars:
og_design_free_vars[19] = Symbol("y_stride_og_design", BVType(16))
x19 = og_design_free_vars[19]
x73 = BVAddExtend(scan_var_8, x19)
x74 = IteExtend(x72, x73, scan_var_8)
return x74
og_design_scans.append(scan_const8_f)
og_design_scans_results.append("scan_const8")
scan_const8 = BV(0, 16)
def scan_const9_f(scan_var_9):
x48 = scan_const7
x49 = scan_const8
x50 = BVAddExtend(x48, x49)
if 4 not in og_design_free_vars:
og_design_free_vars[4] = Symbol("offset_og_design", BVType(16))
x4 = og_design_free_vars[4]
x51 = BVAddExtend(x50, x4)
return x51
og_design_scans.append(scan_const9_f)
og_design_scans_results.append("scan_const9")
scan_const9 = BV(0, 16)
with Solver("cvc4",
logic=logicBV,
incremental=True) as s:
per_step_constraints = []
for step in range(2):
print("handling step " + str(step))
for i in range(len(op_design_scans)):
globals()[op_design_scans_results[i]] = op_design_scans[i](globals()[op_design_scans_results[i]])
for i in range(len(og_design_scans)):
globals()[og_design_scans_results[i]] = og_design_scans[i](globals()[og_design_scans_results[i]])
per_step_constraints.append(Equals(globals()[op_design_scans_results[len(op_design_scans_results)-1]], globals()[og_design_scans_results[len(og_design_scans_results)-1]]))
final_constraint = per_step_constraints[0]
for c in per_step_constraints[1:]:
final_constraint = And(final_constraint, c)
s.add_assertion(ForAll(op_design_free_vars.values(), Exists(og_design_free_vars.values(), final_constraint)))
start = time.time()
res = s.solve()
assert res
end = time.time()
print("time: " + str(end - start)) | 0.319758 | 0.245808 |
entryPoints = [
"autocomplete",
"autocomplete/testing",
"badge",
"badge/testing",
"bottom-sheet",
"bottom-sheet/testing",
"button",
"button/testing",
"button-toggle",
"button-toggle/testing",
"card",
"checkbox",
"checkbox/testing",
"chips",
"core",
"core/testing",
"datepicker",
"dialog",
"dialog/testing",
"divider",
"divider/testing",
"expansion",
"expansion/testing",
"form-field",
"grid-list",
"icon",
"input",
"list",
"list/testing",
"menu",
"menu/testing",
"paginator",
"paginator/testing",
"progress-bar",
"progress-bar/testing",
"progress-spinner",
"progress-spinner/testing",
"radio",
"radio/testing",
"select",
"sidenav",
"sidenav/testing",
"slide-toggle",
"slide-toggle/testing",
"slider",
"slider/testing",
"snack-bar",
"snack-bar/testing",
"sort",
"sort/testing",
"stepper",
"table",
"table/testing",
"tabs",
"tabs/testing",
"toolbar",
"tooltip",
"tree",
"form-field/testing",
"form-field/testing/control",
"input/testing",
"select/testing",
]
# List of all non-testing entry-points of the Angular Material package.
MATERIAL_ENTRYPOINTS = [
ep
for ep in entryPoints
if not "/testing" in ep
]
# List of all testing entry-points of the Angular Material package.
MATERIAL_TESTING_ENTRYPOINTS = [
ep
for ep in entryPoints
if not ep in MATERIAL_ENTRYPOINTS
]
# List of all non-testing entry-point targets of the Angular Material package.
MATERIAL_TARGETS = ["//src/material"] + \
["//src/material/%s" % ep for ep in MATERIAL_ENTRYPOINTS]
# List of all testing entry-point targets of the Angular Material package.
MATERIAL_TESTING_TARGETS = ["//src/material/%s" % ep for ep in MATERIAL_TESTING_ENTRYPOINTS]
# List that references the sass libraries for each Material non-testing entry-point. This
# can be used to specify dependencies for the "all-theme.scss" file in core.
MATERIAL_SCSS_LIBS = [
"//src/material/%s:%s_scss_lib" % (ep, ep.replace("-", "_"))
for ep in MATERIAL_ENTRYPOINTS
] | src/material/config.bzl | entryPoints = [
"autocomplete",
"autocomplete/testing",
"badge",
"badge/testing",
"bottom-sheet",
"bottom-sheet/testing",
"button",
"button/testing",
"button-toggle",
"button-toggle/testing",
"card",
"checkbox",
"checkbox/testing",
"chips",
"core",
"core/testing",
"datepicker",
"dialog",
"dialog/testing",
"divider",
"divider/testing",
"expansion",
"expansion/testing",
"form-field",
"grid-list",
"icon",
"input",
"list",
"list/testing",
"menu",
"menu/testing",
"paginator",
"paginator/testing",
"progress-bar",
"progress-bar/testing",
"progress-spinner",
"progress-spinner/testing",
"radio",
"radio/testing",
"select",
"sidenav",
"sidenav/testing",
"slide-toggle",
"slide-toggle/testing",
"slider",
"slider/testing",
"snack-bar",
"snack-bar/testing",
"sort",
"sort/testing",
"stepper",
"table",
"table/testing",
"tabs",
"tabs/testing",
"toolbar",
"tooltip",
"tree",
"form-field/testing",
"form-field/testing/control",
"input/testing",
"select/testing",
]
# List of all non-testing entry-points of the Angular Material package.
MATERIAL_ENTRYPOINTS = [
ep
for ep in entryPoints
if not "/testing" in ep
]
# List of all testing entry-points of the Angular Material package.
MATERIAL_TESTING_ENTRYPOINTS = [
ep
for ep in entryPoints
if not ep in MATERIAL_ENTRYPOINTS
]
# List of all non-testing entry-point targets of the Angular Material package.
MATERIAL_TARGETS = ["//src/material"] + \
["//src/material/%s" % ep for ep in MATERIAL_ENTRYPOINTS]
# List of all testing entry-point targets of the Angular Material package.
MATERIAL_TESTING_TARGETS = ["//src/material/%s" % ep for ep in MATERIAL_TESTING_ENTRYPOINTS]
# List that references the sass libraries for each Material non-testing entry-point. This
# can be used to specify dependencies for the "all-theme.scss" file in core.
MATERIAL_SCSS_LIBS = [
"//src/material/%s:%s_scss_lib" % (ep, ep.replace("-", "_"))
for ep in MATERIAL_ENTRYPOINTS
] | 0.443841 | 0.419291 |
import os
import requests
import ujson as json
from . import errors as err
class Config(object):
"""Load Configuration."""
def __init__(self, const):
"""Initialize default configuration parameters.
:const: Constant class with default values.
"""
self.config_file = const.CONFIG_FILE_NAME
self.config_paths = const.CONFIG_PATH
self.param = {
"remote_config_provider": const.REMOTE_CONFIG_PROVIDER,
"remote_config_endpoint": const.REMOTE_CONFIG_ENDPOINT,
"remote_config_path": const.REMOTE_CONFIG_PATH,
"remote_config_secret_keyring": const.REMOTE_CONFIG_SECRET_KEYRING,
"log": {
"level": const.LOG_LEVEL,
"network": const.LOG_NETWORK,
"address": const.LOG_ADDRESS,
},
"stats": {
"prefix": const.STATS_PREFIX,
"host": const.STATS_HOST,
"port": const.STATS_PORT,
},
}
def empty_remote_config(self):
"""Check if the remote configuration settings are empty."""
return (
self.param["remote_config_provider"] == ""
or self.param["remote_config_endpoint"] == ""
or self.param["remote_config_path"] == ""
)
def get_local_config_params(self, config_dir=""):
"""Get the local configuration parameters.
:config_dir: Local configuration directory (if any).
"""
if config_dir:
if not os.path.isfile(os.path.join(config_dir, self.config_file)):
raise Exception(
"Unable to find configuration file in: {0}".format(config_dir)
)
self.config_paths.insert(0, config_dir)
for path in self.config_paths:
cfgfile = os.path.join(path, self.config_file)
if os.path.isfile(cfgfile):
with open(cfgfile, "r") as fileobj:
self.param.update(json.loads(fileobj.read()))
break
# overwrite remote config with environment variables
self.param["remote_config_provider"] = os.getenv(
"~#UPROJECT#~_REMOTECONFIGPROVIDER", self.param["remote_config_provider"]
)
self.param["remote_config_endpoint"] = os.getenv(
"~#UPROJECT#~_REMOTECONFIGENDPOINT", self.param["remote_config_endpoint"]
)
self.param["remote_config_path"] = os.getenv(
"~#UPROJECT#~_REMOTECONFIGPATH", self.param["remote_config_path"]
)
self.param["remote_config_secret_keyring"] = os.getenv(
"~#UPROJECT#~_REMOTECONFIGSECRETKEYRING",
self.param["remote_config_secret_keyring"],
)
def get_remote_config(self, provider, endpoint, path, key):
"""Load the remote configuration using the specified provider.
:provider: Type of remote provide (indentify the method used to retrieve remote config).
:endpoint: Base URL of the remote configuration system.
:path: Path of the configuration directory relative to the endpoint.
:key: Secret to add as URL query or another secret key depending on the provider type.
"""
method_name = "get_config_" + str(provider)
method = getattr(self, method_name)
return method(endpoint, path, key)
def get_remote_config_params(self):
"""Load the remote configuration."""
if self.empty_remote_config():
return None
return self.get_remote_config(
self.param["remote_config_provider"],
self.param["remote_config_endpoint"],
self.param["remote_config_path"],
self.param["remote_config_secret_keyring"],
)
def get_config_url(self, endpoint, path, key):
"""Load the config from a remote URL.
:endpoint: Base URL of the remote configuration system.
:path: Path of the configuration directory relative to the endpoint.
:key: Secret to add as URL query (e.g. token=123456).
"""
url = "/".join(
(endpoint.strip("/"), path.strip("/"), self.config_file + "?" + key)
)
req = requests.get(url)
req.raise_for_status()
self.param.update(req.json())
def get_config_params(self, opt):
"""Load the configuration data.
:opt: Dictionary containing the command-line arguments.
"""
self.get_local_config_params(opt["--config-dir"])
self.get_remote_config_params()
if opt["--log-level"]:
self.param["log"]["level"] = opt["--log-level"]
def check_config_params(self):
"""Check the validity of configuration parameters."""
if not self.param["log"]["level"]:
raise err.InvalidConfigError("log.level is empty")
if not self.param["stats"]["prefix"]:
raise err.InvalidConfigError("stats.prefix is empty")
if not self.param["stats"]["host"]:
raise err.InvalidConfigError("stats.host is empty")
if not self.param["stats"]["port"]:
raise err.InvalidConfigError("stats.port is empty") | src/libapp/app/app__PROJECT_/config.py |
import os
import requests
import ujson as json
from . import errors as err
class Config(object):
"""Load Configuration."""
def __init__(self, const):
"""Initialize default configuration parameters.
:const: Constant class with default values.
"""
self.config_file = const.CONFIG_FILE_NAME
self.config_paths = const.CONFIG_PATH
self.param = {
"remote_config_provider": const.REMOTE_CONFIG_PROVIDER,
"remote_config_endpoint": const.REMOTE_CONFIG_ENDPOINT,
"remote_config_path": const.REMOTE_CONFIG_PATH,
"remote_config_secret_keyring": const.REMOTE_CONFIG_SECRET_KEYRING,
"log": {
"level": const.LOG_LEVEL,
"network": const.LOG_NETWORK,
"address": const.LOG_ADDRESS,
},
"stats": {
"prefix": const.STATS_PREFIX,
"host": const.STATS_HOST,
"port": const.STATS_PORT,
},
}
def empty_remote_config(self):
"""Check if the remote configuration settings are empty."""
return (
self.param["remote_config_provider"] == ""
or self.param["remote_config_endpoint"] == ""
or self.param["remote_config_path"] == ""
)
def get_local_config_params(self, config_dir=""):
"""Get the local configuration parameters.
:config_dir: Local configuration directory (if any).
"""
if config_dir:
if not os.path.isfile(os.path.join(config_dir, self.config_file)):
raise Exception(
"Unable to find configuration file in: {0}".format(config_dir)
)
self.config_paths.insert(0, config_dir)
for path in self.config_paths:
cfgfile = os.path.join(path, self.config_file)
if os.path.isfile(cfgfile):
with open(cfgfile, "r") as fileobj:
self.param.update(json.loads(fileobj.read()))
break
# overwrite remote config with environment variables
self.param["remote_config_provider"] = os.getenv(
"~#UPROJECT#~_REMOTECONFIGPROVIDER", self.param["remote_config_provider"]
)
self.param["remote_config_endpoint"] = os.getenv(
"~#UPROJECT#~_REMOTECONFIGENDPOINT", self.param["remote_config_endpoint"]
)
self.param["remote_config_path"] = os.getenv(
"~#UPROJECT#~_REMOTECONFIGPATH", self.param["remote_config_path"]
)
self.param["remote_config_secret_keyring"] = os.getenv(
"~#UPROJECT#~_REMOTECONFIGSECRETKEYRING",
self.param["remote_config_secret_keyring"],
)
def get_remote_config(self, provider, endpoint, path, key):
"""Load the remote configuration using the specified provider.
:provider: Type of remote provide (indentify the method used to retrieve remote config).
:endpoint: Base URL of the remote configuration system.
:path: Path of the configuration directory relative to the endpoint.
:key: Secret to add as URL query or another secret key depending on the provider type.
"""
method_name = "get_config_" + str(provider)
method = getattr(self, method_name)
return method(endpoint, path, key)
def get_remote_config_params(self):
"""Load the remote configuration."""
if self.empty_remote_config():
return None
return self.get_remote_config(
self.param["remote_config_provider"],
self.param["remote_config_endpoint"],
self.param["remote_config_path"],
self.param["remote_config_secret_keyring"],
)
def get_config_url(self, endpoint, path, key):
"""Load the config from a remote URL.
:endpoint: Base URL of the remote configuration system.
:path: Path of the configuration directory relative to the endpoint.
:key: Secret to add as URL query (e.g. token=123456).
"""
url = "/".join(
(endpoint.strip("/"), path.strip("/"), self.config_file + "?" + key)
)
req = requests.get(url)
req.raise_for_status()
self.param.update(req.json())
def get_config_params(self, opt):
"""Load the configuration data.
:opt: Dictionary containing the command-line arguments.
"""
self.get_local_config_params(opt["--config-dir"])
self.get_remote_config_params()
if opt["--log-level"]:
self.param["log"]["level"] = opt["--log-level"]
def check_config_params(self):
"""Check the validity of configuration parameters."""
if not self.param["log"]["level"]:
raise err.InvalidConfigError("log.level is empty")
if not self.param["stats"]["prefix"]:
raise err.InvalidConfigError("stats.prefix is empty")
if not self.param["stats"]["host"]:
raise err.InvalidConfigError("stats.host is empty")
if not self.param["stats"]["port"]:
raise err.InvalidConfigError("stats.port is empty") | 0.590425 | 0.061255 |
import unittest2
from google.appengine.ext import ndb
from google.appengine.ext import testbed
from helpers.district_manipulator import DistrictManipulator
from helpers.event_helper import EventHelper
from models.district import District
class TestEventGetShortName(unittest2.TestCase):
def setUp(self):
self.testbed = testbed.Testbed()
self.testbed.activate()
self.testbed.init_datastore_v3_stub()
self.testbed.init_memcache_stub()
self.testbed.init_taskqueue_stub(root_path=".")
ndb.get_context().clear_cache() # Prevent data from leaking between tests
# Create districts
districts = []
for code in ['mar', 'isr', 'nc', 'ne', 'pnw', 'pch', 'chs', 'in', 'ont', 'fim', 'tx']:
year = 2017
districts.append(District(
id=District.renderKeyName(year, code),
year=year,
abbreviation=code,
))
DistrictManipulator.createOrUpdate(districts)
def tearDown(self):
self.testbed.deactivate()
def test_event_get_short_name(self):
# Edge cases.
self.assertEquals(EventHelper.getShortName(" { Random 2.718 stuff! } "), "{ Random 2.718 stuff! }")
self.assertEquals(EventHelper.getShortName("IN District -Bee's Knee's LX "), "Bee's Knee's LX")
self.assertEquals(EventHelper.getShortName("MAR District - Brussels Int'l Event sponsored by Sprouts"), "Brussels Int'l")
self.assertEquals(EventHelper.getShortName("FIM District - Brussels Int'l Eventapalooza sponsored by TBA"), "Brussels Int'l")
self.assertEquals(EventHelper.getShortName("NE District - ReallyBigEvent Scaling Up Every Year"), "ReallyBig")
self.assertEquals(EventHelper.getShortName("PNW District - Event! "), "Event!")
self.assertEquals(EventHelper.getShortName("FRC Detroit FIRST Robotics District Competition"), "Detroit")
self.assertEquals(EventHelper.getShortName("FIRST Robotics Detroit FRC State Championship"), "Detroit")
self.assertEquals(EventHelper.getShortName("Maui FIRST Robotics Regional and Luau"), "Maui")
self.assertEquals(EventHelper.getShortName("California State Surf and Turf sponsored by TBA"), "California")
self.assertEquals(EventHelper.getShortName("CarTalk Plaza Tournament"), "CarTalk Plaza")
self.assertEquals(EventHelper.getShortName("IRI FRC Be-all and End-all"), "IRI")
self.assertEquals(EventHelper.getShortName(" Ada Field "), "Ada")
self.assertEquals(EventHelper.getShortName(" FIRST Robotics Einstein Field Equations "), "Einstein")
self.assertEquals(EventHelper.getShortName("FRC Martin Luther King Jr. Region Championship"), "Martin Luther King Jr.")
self.assertEquals(EventHelper.getShortName("PNW Ada Lovelace Tournament of Software "), "Ada Lovelace")
self.assertEquals(EventHelper.getShortName("\tPNW Ada Lovelace Tournament of Software "), "Ada Lovelace")
self.assertEquals(EventHelper.getShortName(" MAR FIRST Robotics Rosa Parks FRC Tournament of Roses "), "Rosa Parks")
self.assertEquals(EventHelper.getShortName("Washington D.C. FIRST Robotics Region"), "Washington D.C.")
self.assertEquals(EventHelper.getShortName("Washington D.C. FIRST Robotics Region."), "Washington D.C.")
self.assertEquals(EventHelper.getShortName("Washington D.C. FIRST Robotics Regiontonian"), "Washington D.C. FIRST Robotics Regiontonian") # Does not match "Region\b"
# Tests from various years
self.assertEqual(EventHelper.getShortName("FIRST Robotics Competition"), "FIRST Robotics Competition")
self.assertEqual(EventHelper.getShortName("National Championship"), "National Championship")
self.assertEqual(EventHelper.getShortName("New England Tournament"), "New England")
self.assertEqual(EventHelper.getShortName("FIRST National Championship"), "FIRST National Championship")
self.assertEqual(EventHelper.getShortName("Motorola Midwest Regional"), "Motorola Midwest")
self.assertEqual(EventHelper.getShortName("DEKA New England Regional"), "DEKA New England")
self.assertEqual(EventHelper.getShortName("Johnson & Johnson Mid-Atlantic Regional"), "Johnson & Johnson Mid-Atlantic")
self.assertEqual(EventHelper.getShortName("Great Lakes Regional"), "Great Lakes")
self.assertEqual(EventHelper.getShortName("New England Regional"), "New England")
self.assertEqual(EventHelper.getShortName("Southwest Regional"), "Southwest")
self.assertEqual(EventHelper.getShortName("NASA Ames Regional"), "NASA Ames")
self.assertEqual(EventHelper.getShortName("Kennedy Space Center Regional"), "Kennedy Space Center")
self.assertEqual(EventHelper.getShortName("UTC New England Regional"), "UTC New England")
self.assertEqual(EventHelper.getShortName("Philadelphia Alliance Regional"), "Philadelphia Alliance")
self.assertEqual(EventHelper.getShortName("Kennedy Space Center Southeast Regional"), "Kennedy Space Center Southeast")
self.assertEqual(EventHelper.getShortName("Long Island Regional"), "Long Island")
self.assertEqual(EventHelper.getShortName("Lone Star Regional"), "Lone Star")
self.assertEqual(EventHelper.getShortName("NASA Langley/VCU Regional"), "NASA Langley/VCU")
self.assertEqual(EventHelper.getShortName("Archimedes Field"), "Archimedes")
self.assertEqual(EventHelper.getShortName("Southern California Regional"), "Southern California")
self.assertEqual(EventHelper.getShortName("Silicon Valley Regional"), "Silicon Valley")
self.assertEqual(EventHelper.getShortName("UTC/New England Regional"), "UTC/New England")
self.assertEqual(EventHelper.getShortName("Curie Field"), "Curie")
self.assertEqual(EventHelper.getShortName("NASA KSC Southeast Regional"), "NASA KSC Southeast")
self.assertEqual(EventHelper.getShortName("Galileo Field"), "Galileo")
self.assertEqual(EventHelper.getShortName("West Michigan Regional"), "West Michigan")
self.assertEqual(EventHelper.getShortName("Newton Field"), "Newton")
self.assertEqual(EventHelper.getShortName("J&J Mid-Atlantic Regional"), "J&J Mid-Atlantic")
self.assertEqual(EventHelper.getShortName("New York City Regional"), "New York City")
self.assertEqual(EventHelper.getShortName("NASA Langley Regional"), "NASA Langley")
self.assertEqual(EventHelper.getShortName("SBPLI Long Island Regional"), "SBPLI Long Island")
self.assertEqual(EventHelper.getShortName("Western Michigan Regional"), "Western Michigan")
self.assertEqual(EventHelper.getShortName("St. Louis Regional"), "St. Louis")
self.assertEqual(EventHelper.getShortName("J&J Mid Atlantic Regional"), "J&J Mid Atlantic")
self.assertEqual(EventHelper.getShortName("Buckeye Regional"), "Buckeye")
self.assertEqual(EventHelper.getShortName("Canadian Regional"), "Canadian")
self.assertEqual(EventHelper.getShortName("NASA Langley / VCU Regional"), "NASA Langley / VCU")
self.assertEqual(EventHelper.getShortName("Pacific Northwest Regional"), "Pacific Northwest")
self.assertEqual(EventHelper.getShortName("Arizona Regional"), "Arizona")
self.assertEqual(EventHelper.getShortName("Einstein Field"), "Einstein")
self.assertEqual(EventHelper.getShortName("Central Florida Regional"), "Central Florida")
self.assertEqual(EventHelper.getShortName("Peachtree Regional"), "Peachtree")
self.assertEqual(EventHelper.getShortName("Midwest Regional"), "Midwest")
self.assertEqual(EventHelper.getShortName("Chesapeake Regional"), "Chesapeake")
self.assertEqual(EventHelper.getShortName("BAE SYSTEMS Granite State Regional"), "BAE SYSTEMS Granite State")
self.assertEqual(EventHelper.getShortName("Philadelphia Regional"), "Philadelphia")
self.assertEqual(EventHelper.getShortName("Pittsburgh Regional"), "Pittsburgh")
self.assertEqual(EventHelper.getShortName("Sacramento Regional"), "Sacramento")
self.assertEqual(EventHelper.getShortName("NASA / VCU Regional"), "NASA / VCU")
self.assertEqual(EventHelper.getShortName("Colorado Regional"), "Colorado")
self.assertEqual(EventHelper.getShortName("Detroit Regional"), "Detroit")
self.assertEqual(EventHelper.getShortName("Florida Regional"), "Florida")
self.assertEqual(EventHelper.getShortName("New Jersey Regional"), "New Jersey")
self.assertEqual(EventHelper.getShortName("Greater Toronto Regional"), "Greater Toronto")
self.assertEqual(EventHelper.getShortName("Palmetto Regional"), "Palmetto")
self.assertEqual(EventHelper.getShortName("Boilermaker Regional"), "Boilermaker")
self.assertEqual(EventHelper.getShortName("GM/Technion University Israel Pilot Regional"), "GM/Technion University Israel Pilot")
self.assertEqual(EventHelper.getShortName("Las Vegas Regional"), "Las Vegas")
self.assertEqual(EventHelper.getShortName("Finger Lakes Regional"), "Finger Lakes")
self.assertEqual(EventHelper.getShortName("Waterloo Regional"), "Waterloo")
self.assertEqual(EventHelper.getShortName("GM/Technion Israel Regional"), "GM/Technion Israel")
self.assertEqual(EventHelper.getShortName("Boston Regional"), "Boston")
self.assertEqual(EventHelper.getShortName("Davis Sacramento Regional"), "Davis Sacramento")
self.assertEqual(EventHelper.getShortName("Wisconsin Regional"), "Wisconsin")
self.assertEqual(EventHelper.getShortName("Brazil Pilot"), "Brazil Pilot")
self.assertEqual(EventHelper.getShortName("Los Angeles Regional"), "Los Angeles")
self.assertEqual(EventHelper.getShortName("UTC Connecticut Regional"), "UTC Connecticut")
self.assertEqual(EventHelper.getShortName("Greater Kansas City Regional"), "Greater Kansas City")
self.assertEqual(EventHelper.getShortName("Bayou Regional"), "Bayou")
self.assertEqual(EventHelper.getShortName("San Diego Regional"), "San Diego")
self.assertEqual(EventHelper.getShortName("Brazil Regional"), "Brazil")
self.assertEqual(EventHelper.getShortName("Connecticut Regional"), "Connecticut")
self.assertEqual(EventHelper.getShortName("Hawaii Regional"), "Hawaii")
self.assertEqual(EventHelper.getShortName("Israel Regional"), "Israel")
self.assertEqual(EventHelper.getShortName("Minnesota Regional"), "Minnesota")
self.assertEqual(EventHelper.getShortName("BAE Systems Granite State Regional"), "BAE Systems Granite State")
self.assertEqual(EventHelper.getShortName("Oklahoma City Regional"), "Oklahoma City")
self.assertEqual(EventHelper.getShortName("Oregon Regional"), "Oregon")
self.assertEqual(EventHelper.getShortName("UC Davis Sacramento Regional"), "UC Davis Sacramento")
self.assertEqual(EventHelper.getShortName("Microsoft Seattle Regional"), "Microsoft Seattle")
self.assertEqual(EventHelper.getShortName("Dallas Regional, Sponsored by JCPenney and the JCPenney Afterschool Fund"), "Dallas")
self.assertEqual(EventHelper.getShortName("Washington DC Regional"), "Washington DC")
self.assertEqual(EventHelper.getShortName("Detroit FIRST Robotics District Competition"), "Detroit")
self.assertEqual(EventHelper.getShortName("Cass Tech FIRST Robotics District Competition"), "Cass Tech")
self.assertEqual(EventHelper.getShortName("Kettering University FIRST Robotics District Competition"), "Kettering University")
self.assertEqual(EventHelper.getShortName("Michigan FIRST Robotics Competition State Championship"), "Michigan")
self.assertEqual(EventHelper.getShortName("Lansing FIRST Robotics District Competition"), "Lansing")
self.assertEqual(EventHelper.getShortName("Traverse City FIRST Robotics District Competition"), "Traverse City")
self.assertEqual(EventHelper.getShortName("West Michigan FIRST Robotics District Competition"), "West Michigan")
self.assertEqual(EventHelper.getShortName("Minnesota 10000 Lakes Regional"), "Minnesota 10000 Lakes")
self.assertEqual(EventHelper.getShortName("Minnesota North Star Regional"), "Minnesota North Star")
self.assertEqual(EventHelper.getShortName("BAE Granite State Regional"), "BAE Granite State")
self.assertEqual(EventHelper.getShortName("Troy FIRST Robotics District Competition"), "Troy")
self.assertEqual(EventHelper.getShortName("NASA VCU Regional"), "NASA VCU")
self.assertEqual(EventHelper.getShortName("Northeast Utilities FIRST Connecticut Regional"), "Northeast Utilities FIRST Connecticut")
self.assertEqual(EventHelper.getShortName("Dallas Regional sponsored by JCPenney and the JCPenney Afterschool Fund"), "Dallas")
self.assertEqual(EventHelper.getShortName("Hawaii Regional sponsored by BAE Systems"), "Hawaii")
self.assertEqual(EventHelper.getShortName("North Carolina Regional"), "North Carolina")
self.assertEqual(EventHelper.getShortName("Oklahoma Regional"), "Oklahoma")
self.assertEqual(EventHelper.getShortName("Autodesk Oregon Regional"), "Autodesk Oregon")
self.assertEqual(EventHelper.getShortName("Silicon Valley Regional sponsored by Google and BAE Systems"), "Silicon Valley")
self.assertEqual(EventHelper.getShortName("Utah Regional sponsored by NASA & Platt"), "Utah")
self.assertEqual(EventHelper.getShortName("Virginia Regional"), "Virginia")
self.assertEqual(EventHelper.getShortName("Ann Arbor FIRST Robotics District Competition"), "Ann Arbor")
self.assertEqual(EventHelper.getShortName("WPI Regional"), "WPI")
self.assertEqual(EventHelper.getShortName("Dallas Regional sponsored by jcpenney"), "Dallas")
self.assertEqual(EventHelper.getShortName("Lake Superior Regional"), "Lake Superior")
self.assertEqual(EventHelper.getShortName("Michigan FIRST Robotics District Competition State Championship"), "Michigan")
self.assertEqual(EventHelper.getShortName("BAE Systems/Granite State Regional"), "BAE Systems/Granite State")
self.assertEqual(EventHelper.getShortName("Waterford FIRST Robotics District Competition"), "Waterford")
self.assertEqual(EventHelper.getShortName("Greater Toronto East Regional"), "Greater Toronto East")
self.assertEqual(EventHelper.getShortName("Greater Toronto West Regional"), "Greater Toronto West")
self.assertEqual(EventHelper.getShortName("Alamo Regional"), "Alamo")
self.assertEqual(EventHelper.getShortName("Niles FIRST Robotics District Competition"), "Niles")
self.assertEqual(EventHelper.getShortName("Smoky Mountain Regional"), "Smoky Mountain")
self.assertEqual(EventHelper.getShortName("Utah Regional co-sponsored by NASA and Platt"), "Utah")
self.assertEqual(EventHelper.getShortName("Seattle Olympic Regional"), "Seattle Olympic")
self.assertEqual(EventHelper.getShortName("Seattle Cascade Regional"), "Seattle Cascade")
self.assertEqual(EventHelper.getShortName("Livonia FIRST Robotics District Competition"), "Livonia")
self.assertEqual(EventHelper.getShortName("Central Valley Regional"), "Central Valley")
self.assertEqual(EventHelper.getShortName("Dallas East Regional sponsored by jcpenney"), "Dallas East")
self.assertEqual(EventHelper.getShortName("Dallas West Regional sponsored by jcpenney"), "Dallas West")
self.assertEqual(EventHelper.getShortName("Orlando Regional"), "Orlando")
self.assertEqual(EventHelper.getShortName("Michigan FRC State Championship"), "Michigan")
self.assertEqual(EventHelper.getShortName("Gull Lake FIRST Robotics District Competition"), "Gull Lake")
self.assertEqual(EventHelper.getShortName("Rutgers University FIRST Robotics District Competition"), "Rutgers University")
self.assertEqual(EventHelper.getShortName("Mount Olive FIRST Robotics District Competition"), "Mount Olive")
self.assertEqual(EventHelper.getShortName("Lenape FIRST Robotics District Competition"), "Lenape")
self.assertEqual(EventHelper.getShortName("Queen City Regional"), "Queen City")
self.assertEqual(EventHelper.getShortName("Mid-Atlantic Robotics FRC Region Championship"), "Mid-Atlantic Robotics")
self.assertEqual(EventHelper.getShortName("Hatboro-Horsham FIRST Robotics District Competition"), "Hatboro-Horsham")
self.assertEqual(EventHelper.getShortName("Chestnut Hill FIRST Robotics District Competition"), "Chestnut Hill")
self.assertEqual(EventHelper.getShortName("Festival de Robotique FRC a Montreal Regional"), "Festival de Robotique")
self.assertEqual(EventHelper.getShortName("South Florida Regional"), "South Florida")
self.assertEqual(EventHelper.getShortName("Smoky Mountains Regional"), "Smoky Mountains")
self.assertEqual(EventHelper.getShortName("Spokane Regional"), "Spokane")
self.assertEqual(EventHelper.getShortName("Northville FIRST Robotics District Competition"), "Northville")
self.assertEqual(EventHelper.getShortName("Western Canadian FRC Regional"), "Western Canadian")
self.assertEqual(EventHelper.getShortName("Razorback Regional"), "Razorback")
self.assertEqual(EventHelper.getShortName("Phoenix Regional"), "Phoenix")
self.assertEqual(EventHelper.getShortName("Los Angeles Regional sponsored by The Roddenberry Foundation"), "Los Angeles")
self.assertEqual(EventHelper.getShortName("Inland Empire Regional"), "Inland Empire")
self.assertEqual(EventHelper.getShortName("Connecticut Regional sponsored by UTC"), "Connecticut")
self.assertEqual(EventHelper.getShortName("Crossroads Regional"), "Crossroads")
self.assertEqual(EventHelper.getShortName("Pine Tree Regional"), "Pine Tree")
self.assertEqual(EventHelper.getShortName("Bedford FIRST Robotics District Competition"), "Bedford")
self.assertEqual(EventHelper.getShortName("Grand Blanc FIRST Robotics District Competition"), "Grand Blanc")
self.assertEqual(EventHelper.getShortName("St Joseph FIRST Robotics District Competition"), "St Joseph")
self.assertEqual(EventHelper.getShortName("Northern Lights Regional"), "Northern Lights")
self.assertEqual(EventHelper.getShortName("Bridgewater-Raritan FIRST Robotics District Competition"), "Bridgewater-Raritan")
self.assertEqual(EventHelper.getShortName("TCNJ FIRST Robotics District Competition"), "TCNJ")
self.assertEqual(EventHelper.getShortName("Lenape Seneca FIRST Robotics District Competition"), "Lenape Seneca")
self.assertEqual(EventHelper.getShortName("Springside - Chestnut Hill FIRST Robotics District Competition"), "Springside - Chestnut Hill")
self.assertEqual(EventHelper.getShortName("Festival de Robotique FRC de Montreal Regional"), "Festival de Robotique")
self.assertEqual(EventHelper.getShortName("Dallas Regional"), "Dallas")
self.assertEqual(EventHelper.getShortName("Hub City Regional"), "Hub City")
self.assertEqual(EventHelper.getShortName("Alamo Regional sponsored by Rackspace Hosting"), "Alamo")
self.assertEqual(EventHelper.getShortName("Utah Regional co-sponsored by the L<NAME> Group & Platt"), "Utah")
self.assertEqual(EventHelper.getShortName("Seattle Regional"), "Seattle")
self.assertEqual(EventHelper.getShortName("Central Washington Regional"), "Central Washington")
self.assertEqual(EventHelper.getShortName("Western Canada Regional"), "Western Canada")
self.assertEqual(EventHelper.getShortName("Arkansas Regional"), "Arkansas")
self.assertEqual(EventHelper.getShortName("Groton District Event"), "Groton")
self.assertEqual(EventHelper.getShortName("Hartford District Event"), "Hartford")
self.assertEqual(EventHelper.getShortName("Southington District Event"), "Southington")
self.assertEqual(EventHelper.getShortName("Greater DC Regional"), "Greater DC")
self.assertEqual(EventHelper.getShortName("Central Illinois Regional"), "Central Illinois")
self.assertEqual(EventHelper.getShortName("Northeastern University District Event"), "Northeastern University")
self.assertEqual(EventHelper.getShortName("WPI District Event"), "WPI")
self.assertEqual(EventHelper.getShortName("Pine Tree District Event"), "Pine Tree")
self.assertEqual(EventHelper.getShortName("Center Line FIRST Robotics District Competition"), "Center Line")
self.assertEqual(EventHelper.getShortName("Escanaba FIRST Robotics District Competition"), "Escanaba")
self.assertEqual(EventHelper.getShortName("Howell FIRST Robotics District Competition"), "Howell")
self.assertEqual(EventHelper.getShortName("St. Joseph FIRST Robotics District Competition"), "St. Joseph")
self.assertEqual(EventHelper.getShortName("Southfield FIRST Robotics District Competition"), "Southfield")
self.assertEqual(EventHelper.getShortName("Mexico City Regional"), "Mexico City")
self.assertEqual(EventHelper.getShortName("New England FRC Region Championship"), "New England")
self.assertEqual(EventHelper.getShortName("UNH District Event"), "UNH")
self.assertEqual(EventHelper.getShortName("Granite State District Event"), "Granite State")
self.assertEqual(EventHelper.getShortName("MAR FIRST Robotics Bridgewater-Raritan District Competition"), "Bridgewater-Raritan")
self.assertEqual(EventHelper.getShortName("MAR FIRST Robotics Clifton District Competition"), "Clifton")
self.assertEqual(EventHelper.getShortName("MAR FIRST Robotics Mt. Olive District Competition"), "Mt. Olive")
self.assertEqual(EventHelper.getShortName("MAR FIRST Robotics Lenape-Seneca District Competition"), "Lenape-Seneca")
self.assertEqual(EventHelper.getShortName("New York Tech Valley Regional"), "New York Tech Valley")
self.assertEqual(EventHelper.getShortName("North Bay Regional"), "North Bay")
self.assertEqual(EventHelper.getShortName("Windsor Essex Great Lakes Regional"), "Windsor Essex Great Lakes")
self.assertEqual(EventHelper.getShortName("PNW FIRST Robotics Oregon City District Event"), "Oregon City")
self.assertEqual(EventHelper.getShortName("PNW FIRST Robotics Oregon State University District Event"), "Oregon State University")
self.assertEqual(EventHelper.getShortName("PNW FIRST Robotics Wilsonville District Event"), "Wilsonville")
self.assertEqual(EventHelper.getShortName("MAR FIRST Robotics Hatboro-Horsham District Competition"), "Hatboro-Horsham")
self.assertEqual(EventHelper.getShortName("MAR FIRST Robotics Springside Chestnut Hill District Competition"), "Springside Chestnut Hill")
self.assertEqual(EventHelper.getShortName("Greater Pittsburgh Regional"), "Greater Pittsburgh")
self.assertEqual(EventHelper.getShortName("Autodesk PNW FRC Championship"), "Autodesk PNW")
self.assertEqual(EventHelper.getShortName("Rhode Island District Event"), "Rhode Island")
self.assertEqual(EventHelper.getShortName("Utah Regional"), "Utah")
self.assertEqual(EventHelper.getShortName("PNW FIRST Robotics Auburn District Event"), "Auburn")
self.assertEqual(EventHelper.getShortName("PNW FIRST Robotics Auburn Mountainview District Event"), "Auburn Mountainview")
self.assertEqual(EventHelper.getShortName("PNW FIRST Robotics Eastern Washington University District Event"), "Eastern Washington University")
self.assertEqual(EventHelper.getShortName("PNW FIRST Robotics Central Washington University District Event"), "Central Washington University")
self.assertEqual(EventHelper.getShortName("PNW FIRST Robotics Mt. Vernon District Event"), "Mt. Vernon")
self.assertEqual(EventHelper.getShortName("PNW FIRST Robotics Shorewood District Event"), "Shorewood")
self.assertEqual(EventHelper.getShortName("PNW FIRST Robotics Glacier Peak District Event"), "Glacier Peak")
# 2015 edge cases
self.assertEqual(EventHelper.getShortName("FIM District - Howell Event"), "Howell")
self.assertEqual(EventHelper.getShortName("NE District - Granite State Event"), "Granite State")
self.assertEqual(EventHelper.getShortName("PNW District - Oregon City Event"), "Oregon City")
self.assertEqual(EventHelper.getShortName("IN District -Indianapolis"), "Indianapolis")
self.assertEqual(EventHelper.getShortName("MAR District - Mt. Olive Event"), "Mt. Olive")
self.assertEqual(EventHelper.getShortName("Israel Regional - see Site Info for additional information"), "Israel")
self.assertEqual(EventHelper.getShortName("IN District - Kokomo City of Firsts Event sponsored by AndyMark"), "Kokomo City of Firsts")
# 2017 edge cases
self.assertEqual(EventHelper.getShortName("ONT District - McMaster University Event"), "McMaster University")
self.assertEqual(EventHelper.getShortName("FIRST Ontario Provincial Championship"), "Ontario")
self.assertEqual(EventHelper.getShortName("FIM District - Kettering University Event #1"), "Kettering University #1")
self.assertEqual(EventHelper.getShortName("ISR District Event #1"), "ISR #1")
# 2018 edge cases
self.assertEqual(EventHelper.getShortName("PNW District Clackamas Academy Event"), "Clackamas Academy")
# 2019 edge cases
self.assertEqual(EventHelper.getShortName("FMA District Hatboro-Horsham Event"), "Hatboro-Horsham")
self.assertEqual(EventHelper.getShortName("FIT District Austin Event"), "Austin")
# 2020 edge cases
self.assertEqual(EventHelper.getShortName("***SUSPENDED*** Silicon Valley Regional"), "Silicon Valley") | old_py2/tests/test_event_get_short_name.py | import unittest2
from google.appengine.ext import ndb
from google.appengine.ext import testbed
from helpers.district_manipulator import DistrictManipulator
from helpers.event_helper import EventHelper
from models.district import District
class TestEventGetShortName(unittest2.TestCase):
def setUp(self):
self.testbed = testbed.Testbed()
self.testbed.activate()
self.testbed.init_datastore_v3_stub()
self.testbed.init_memcache_stub()
self.testbed.init_taskqueue_stub(root_path=".")
ndb.get_context().clear_cache() # Prevent data from leaking between tests
# Create districts
districts = []
for code in ['mar', 'isr', 'nc', 'ne', 'pnw', 'pch', 'chs', 'in', 'ont', 'fim', 'tx']:
year = 2017
districts.append(District(
id=District.renderKeyName(year, code),
year=year,
abbreviation=code,
))
DistrictManipulator.createOrUpdate(districts)
def tearDown(self):
self.testbed.deactivate()
def test_event_get_short_name(self):
# Edge cases.
self.assertEquals(EventHelper.getShortName(" { Random 2.718 stuff! } "), "{ Random 2.718 stuff! }")
self.assertEquals(EventHelper.getShortName("IN District -Bee's Knee's LX "), "Bee's Knee's LX")
self.assertEquals(EventHelper.getShortName("MAR District - Brussels Int'l Event sponsored by Sprouts"), "Brussels Int'l")
self.assertEquals(EventHelper.getShortName("FIM District - Brussels Int'l Eventapalooza sponsored by TBA"), "Brussels Int'l")
self.assertEquals(EventHelper.getShortName("NE District - ReallyBigEvent Scaling Up Every Year"), "ReallyBig")
self.assertEquals(EventHelper.getShortName("PNW District - Event! "), "Event!")
self.assertEquals(EventHelper.getShortName("FRC Detroit FIRST Robotics District Competition"), "Detroit")
self.assertEquals(EventHelper.getShortName("FIRST Robotics Detroit FRC State Championship"), "Detroit")
self.assertEquals(EventHelper.getShortName("Maui FIRST Robotics Regional and Luau"), "Maui")
self.assertEquals(EventHelper.getShortName("California State Surf and Turf sponsored by TBA"), "California")
self.assertEquals(EventHelper.getShortName("CarTalk Plaza Tournament"), "CarTalk Plaza")
self.assertEquals(EventHelper.getShortName("IRI FRC Be-all and End-all"), "IRI")
self.assertEquals(EventHelper.getShortName(" Ada Field "), "Ada")
self.assertEquals(EventHelper.getShortName(" FIRST Robotics Einstein Field Equations "), "Einstein")
self.assertEquals(EventHelper.getShortName("FRC Martin Luther King Jr. Region Championship"), "Martin Luther King Jr.")
self.assertEquals(EventHelper.getShortName("PNW Ada Lovelace Tournament of Software "), "Ada Lovelace")
self.assertEquals(EventHelper.getShortName("\tPNW Ada Lovelace Tournament of Software "), "Ada Lovelace")
self.assertEquals(EventHelper.getShortName(" MAR FIRST Robotics Rosa Parks FRC Tournament of Roses "), "Rosa Parks")
self.assertEquals(EventHelper.getShortName("Washington D.C. FIRST Robotics Region"), "Washington D.C.")
self.assertEquals(EventHelper.getShortName("Washington D.C. FIRST Robotics Region."), "Washington D.C.")
self.assertEquals(EventHelper.getShortName("Washington D.C. FIRST Robotics Regiontonian"), "Washington D.C. FIRST Robotics Regiontonian") # Does not match "Region\b"
# Tests from various years
self.assertEqual(EventHelper.getShortName("FIRST Robotics Competition"), "FIRST Robotics Competition")
self.assertEqual(EventHelper.getShortName("National Championship"), "National Championship")
self.assertEqual(EventHelper.getShortName("New England Tournament"), "New England")
self.assertEqual(EventHelper.getShortName("FIRST National Championship"), "FIRST National Championship")
self.assertEqual(EventHelper.getShortName("Motorola Midwest Regional"), "Motorola Midwest")
self.assertEqual(EventHelper.getShortName("DEKA New England Regional"), "DEKA New England")
self.assertEqual(EventHelper.getShortName("Johnson & Johnson Mid-Atlantic Regional"), "Johnson & Johnson Mid-Atlantic")
self.assertEqual(EventHelper.getShortName("Great Lakes Regional"), "Great Lakes")
self.assertEqual(EventHelper.getShortName("New England Regional"), "New England")
self.assertEqual(EventHelper.getShortName("Southwest Regional"), "Southwest")
self.assertEqual(EventHelper.getShortName("NASA Ames Regional"), "NASA Ames")
self.assertEqual(EventHelper.getShortName("Kennedy Space Center Regional"), "Kennedy Space Center")
self.assertEqual(EventHelper.getShortName("UTC New England Regional"), "UTC New England")
self.assertEqual(EventHelper.getShortName("Philadelphia Alliance Regional"), "Philadelphia Alliance")
self.assertEqual(EventHelper.getShortName("Kennedy Space Center Southeast Regional"), "Kennedy Space Center Southeast")
self.assertEqual(EventHelper.getShortName("Long Island Regional"), "Long Island")
self.assertEqual(EventHelper.getShortName("Lone Star Regional"), "Lone Star")
self.assertEqual(EventHelper.getShortName("NASA Langley/VCU Regional"), "NASA Langley/VCU")
self.assertEqual(EventHelper.getShortName("Archimedes Field"), "Archimedes")
self.assertEqual(EventHelper.getShortName("Southern California Regional"), "Southern California")
self.assertEqual(EventHelper.getShortName("Silicon Valley Regional"), "Silicon Valley")
self.assertEqual(EventHelper.getShortName("UTC/New England Regional"), "UTC/New England")
self.assertEqual(EventHelper.getShortName("Curie Field"), "Curie")
self.assertEqual(EventHelper.getShortName("NASA KSC Southeast Regional"), "NASA KSC Southeast")
self.assertEqual(EventHelper.getShortName("Galileo Field"), "Galileo")
self.assertEqual(EventHelper.getShortName("West Michigan Regional"), "West Michigan")
self.assertEqual(EventHelper.getShortName("Newton Field"), "Newton")
self.assertEqual(EventHelper.getShortName("J&J Mid-Atlantic Regional"), "J&J Mid-Atlantic")
self.assertEqual(EventHelper.getShortName("New York City Regional"), "New York City")
self.assertEqual(EventHelper.getShortName("NASA Langley Regional"), "NASA Langley")
self.assertEqual(EventHelper.getShortName("SBPLI Long Island Regional"), "SBPLI Long Island")
self.assertEqual(EventHelper.getShortName("Western Michigan Regional"), "Western Michigan")
self.assertEqual(EventHelper.getShortName("St. Louis Regional"), "St. Louis")
self.assertEqual(EventHelper.getShortName("J&J Mid Atlantic Regional"), "J&J Mid Atlantic")
self.assertEqual(EventHelper.getShortName("Buckeye Regional"), "Buckeye")
self.assertEqual(EventHelper.getShortName("Canadian Regional"), "Canadian")
self.assertEqual(EventHelper.getShortName("NASA Langley / VCU Regional"), "NASA Langley / VCU")
self.assertEqual(EventHelper.getShortName("Pacific Northwest Regional"), "Pacific Northwest")
self.assertEqual(EventHelper.getShortName("Arizona Regional"), "Arizona")
self.assertEqual(EventHelper.getShortName("Einstein Field"), "Einstein")
self.assertEqual(EventHelper.getShortName("Central Florida Regional"), "Central Florida")
self.assertEqual(EventHelper.getShortName("Peachtree Regional"), "Peachtree")
self.assertEqual(EventHelper.getShortName("Midwest Regional"), "Midwest")
self.assertEqual(EventHelper.getShortName("Chesapeake Regional"), "Chesapeake")
self.assertEqual(EventHelper.getShortName("BAE SYSTEMS Granite State Regional"), "BAE SYSTEMS Granite State")
self.assertEqual(EventHelper.getShortName("Philadelphia Regional"), "Philadelphia")
self.assertEqual(EventHelper.getShortName("Pittsburgh Regional"), "Pittsburgh")
self.assertEqual(EventHelper.getShortName("Sacramento Regional"), "Sacramento")
self.assertEqual(EventHelper.getShortName("NASA / VCU Regional"), "NASA / VCU")
self.assertEqual(EventHelper.getShortName("Colorado Regional"), "Colorado")
self.assertEqual(EventHelper.getShortName("Detroit Regional"), "Detroit")
self.assertEqual(EventHelper.getShortName("Florida Regional"), "Florida")
self.assertEqual(EventHelper.getShortName("New Jersey Regional"), "New Jersey")
self.assertEqual(EventHelper.getShortName("Greater Toronto Regional"), "Greater Toronto")
self.assertEqual(EventHelper.getShortName("Palmetto Regional"), "Palmetto")
self.assertEqual(EventHelper.getShortName("Boilermaker Regional"), "Boilermaker")
self.assertEqual(EventHelper.getShortName("GM/Technion University Israel Pilot Regional"), "GM/Technion University Israel Pilot")
self.assertEqual(EventHelper.getShortName("Las Vegas Regional"), "Las Vegas")
self.assertEqual(EventHelper.getShortName("Finger Lakes Regional"), "Finger Lakes")
self.assertEqual(EventHelper.getShortName("Waterloo Regional"), "Waterloo")
self.assertEqual(EventHelper.getShortName("GM/Technion Israel Regional"), "GM/Technion Israel")
self.assertEqual(EventHelper.getShortName("Boston Regional"), "Boston")
self.assertEqual(EventHelper.getShortName("Davis Sacramento Regional"), "Davis Sacramento")
self.assertEqual(EventHelper.getShortName("Wisconsin Regional"), "Wisconsin")
self.assertEqual(EventHelper.getShortName("Brazil Pilot"), "Brazil Pilot")
self.assertEqual(EventHelper.getShortName("Los Angeles Regional"), "Los Angeles")
self.assertEqual(EventHelper.getShortName("UTC Connecticut Regional"), "UTC Connecticut")
self.assertEqual(EventHelper.getShortName("Greater Kansas City Regional"), "Greater Kansas City")
self.assertEqual(EventHelper.getShortName("Bayou Regional"), "Bayou")
self.assertEqual(EventHelper.getShortName("San Diego Regional"), "San Diego")
self.assertEqual(EventHelper.getShortName("Brazil Regional"), "Brazil")
self.assertEqual(EventHelper.getShortName("Connecticut Regional"), "Connecticut")
self.assertEqual(EventHelper.getShortName("Hawaii Regional"), "Hawaii")
self.assertEqual(EventHelper.getShortName("Israel Regional"), "Israel")
self.assertEqual(EventHelper.getShortName("Minnesota Regional"), "Minnesota")
self.assertEqual(EventHelper.getShortName("BAE Systems Granite State Regional"), "BAE Systems Granite State")
self.assertEqual(EventHelper.getShortName("Oklahoma City Regional"), "Oklahoma City")
self.assertEqual(EventHelper.getShortName("Oregon Regional"), "Oregon")
self.assertEqual(EventHelper.getShortName("UC Davis Sacramento Regional"), "UC Davis Sacramento")
self.assertEqual(EventHelper.getShortName("Microsoft Seattle Regional"), "Microsoft Seattle")
self.assertEqual(EventHelper.getShortName("Dallas Regional, Sponsored by JCPenney and the JCPenney Afterschool Fund"), "Dallas")
self.assertEqual(EventHelper.getShortName("Washington DC Regional"), "Washington DC")
self.assertEqual(EventHelper.getShortName("Detroit FIRST Robotics District Competition"), "Detroit")
self.assertEqual(EventHelper.getShortName("Cass Tech FIRST Robotics District Competition"), "Cass Tech")
self.assertEqual(EventHelper.getShortName("Kettering University FIRST Robotics District Competition"), "Kettering University")
self.assertEqual(EventHelper.getShortName("Michigan FIRST Robotics Competition State Championship"), "Michigan")
self.assertEqual(EventHelper.getShortName("Lansing FIRST Robotics District Competition"), "Lansing")
self.assertEqual(EventHelper.getShortName("Traverse City FIRST Robotics District Competition"), "Traverse City")
self.assertEqual(EventHelper.getShortName("West Michigan FIRST Robotics District Competition"), "West Michigan")
self.assertEqual(EventHelper.getShortName("Minnesota 10000 Lakes Regional"), "Minnesota 10000 Lakes")
self.assertEqual(EventHelper.getShortName("Minnesota North Star Regional"), "Minnesota North Star")
self.assertEqual(EventHelper.getShortName("BAE Granite State Regional"), "BAE Granite State")
self.assertEqual(EventHelper.getShortName("Troy FIRST Robotics District Competition"), "Troy")
self.assertEqual(EventHelper.getShortName("NASA VCU Regional"), "NASA VCU")
self.assertEqual(EventHelper.getShortName("Northeast Utilities FIRST Connecticut Regional"), "Northeast Utilities FIRST Connecticut")
self.assertEqual(EventHelper.getShortName("Dallas Regional sponsored by JCPenney and the JCPenney Afterschool Fund"), "Dallas")
self.assertEqual(EventHelper.getShortName("Hawaii Regional sponsored by BAE Systems"), "Hawaii")
self.assertEqual(EventHelper.getShortName("North Carolina Regional"), "North Carolina")
self.assertEqual(EventHelper.getShortName("Oklahoma Regional"), "Oklahoma")
self.assertEqual(EventHelper.getShortName("Autodesk Oregon Regional"), "Autodesk Oregon")
self.assertEqual(EventHelper.getShortName("Silicon Valley Regional sponsored by Google and BAE Systems"), "Silicon Valley")
self.assertEqual(EventHelper.getShortName("Utah Regional sponsored by NASA & Platt"), "Utah")
self.assertEqual(EventHelper.getShortName("Virginia Regional"), "Virginia")
self.assertEqual(EventHelper.getShortName("Ann Arbor FIRST Robotics District Competition"), "Ann Arbor")
self.assertEqual(EventHelper.getShortName("WPI Regional"), "WPI")
self.assertEqual(EventHelper.getShortName("Dallas Regional sponsored by jcpenney"), "Dallas")
self.assertEqual(EventHelper.getShortName("Lake Superior Regional"), "Lake Superior")
self.assertEqual(EventHelper.getShortName("Michigan FIRST Robotics District Competition State Championship"), "Michigan")
self.assertEqual(EventHelper.getShortName("BAE Systems/Granite State Regional"), "BAE Systems/Granite State")
self.assertEqual(EventHelper.getShortName("Waterford FIRST Robotics District Competition"), "Waterford")
self.assertEqual(EventHelper.getShortName("Greater Toronto East Regional"), "Greater Toronto East")
self.assertEqual(EventHelper.getShortName("Greater Toronto West Regional"), "Greater Toronto West")
self.assertEqual(EventHelper.getShortName("Alamo Regional"), "Alamo")
self.assertEqual(EventHelper.getShortName("Niles FIRST Robotics District Competition"), "Niles")
self.assertEqual(EventHelper.getShortName("Smoky Mountain Regional"), "Smoky Mountain")
self.assertEqual(EventHelper.getShortName("Utah Regional co-sponsored by NASA and Platt"), "Utah")
self.assertEqual(EventHelper.getShortName("Seattle Olympic Regional"), "Seattle Olympic")
self.assertEqual(EventHelper.getShortName("Seattle Cascade Regional"), "Seattle Cascade")
self.assertEqual(EventHelper.getShortName("Livonia FIRST Robotics District Competition"), "Livonia")
self.assertEqual(EventHelper.getShortName("Central Valley Regional"), "Central Valley")
self.assertEqual(EventHelper.getShortName("Dallas East Regional sponsored by jcpenney"), "Dallas East")
self.assertEqual(EventHelper.getShortName("Dallas West Regional sponsored by jcpenney"), "Dallas West")
self.assertEqual(EventHelper.getShortName("Orlando Regional"), "Orlando")
self.assertEqual(EventHelper.getShortName("Michigan FRC State Championship"), "Michigan")
self.assertEqual(EventHelper.getShortName("Gull Lake FIRST Robotics District Competition"), "Gull Lake")
self.assertEqual(EventHelper.getShortName("Rutgers University FIRST Robotics District Competition"), "Rutgers University")
self.assertEqual(EventHelper.getShortName("Mount Olive FIRST Robotics District Competition"), "Mount Olive")
self.assertEqual(EventHelper.getShortName("Lenape FIRST Robotics District Competition"), "Lenape")
self.assertEqual(EventHelper.getShortName("Queen City Regional"), "Queen City")
self.assertEqual(EventHelper.getShortName("Mid-Atlantic Robotics FRC Region Championship"), "Mid-Atlantic Robotics")
self.assertEqual(EventHelper.getShortName("Hatboro-Horsham FIRST Robotics District Competition"), "Hatboro-Horsham")
self.assertEqual(EventHelper.getShortName("Chestnut Hill FIRST Robotics District Competition"), "Chestnut Hill")
self.assertEqual(EventHelper.getShortName("Festival de Robotique FRC a Montreal Regional"), "Festival de Robotique")
self.assertEqual(EventHelper.getShortName("South Florida Regional"), "South Florida")
self.assertEqual(EventHelper.getShortName("Smoky Mountains Regional"), "Smoky Mountains")
self.assertEqual(EventHelper.getShortName("Spokane Regional"), "Spokane")
self.assertEqual(EventHelper.getShortName("Northville FIRST Robotics District Competition"), "Northville")
self.assertEqual(EventHelper.getShortName("Western Canadian FRC Regional"), "Western Canadian")
self.assertEqual(EventHelper.getShortName("Razorback Regional"), "Razorback")
self.assertEqual(EventHelper.getShortName("Phoenix Regional"), "Phoenix")
self.assertEqual(EventHelper.getShortName("Los Angeles Regional sponsored by The Roddenberry Foundation"), "Los Angeles")
self.assertEqual(EventHelper.getShortName("Inland Empire Regional"), "Inland Empire")
self.assertEqual(EventHelper.getShortName("Connecticut Regional sponsored by UTC"), "Connecticut")
self.assertEqual(EventHelper.getShortName("Crossroads Regional"), "Crossroads")
self.assertEqual(EventHelper.getShortName("Pine Tree Regional"), "Pine Tree")
self.assertEqual(EventHelper.getShortName("Bedford FIRST Robotics District Competition"), "Bedford")
self.assertEqual(EventHelper.getShortName("Grand Blanc FIRST Robotics District Competition"), "Grand Blanc")
self.assertEqual(EventHelper.getShortName("St Joseph FIRST Robotics District Competition"), "St Joseph")
self.assertEqual(EventHelper.getShortName("Northern Lights Regional"), "Northern Lights")
self.assertEqual(EventHelper.getShortName("Bridgewater-Raritan FIRST Robotics District Competition"), "Bridgewater-Raritan")
self.assertEqual(EventHelper.getShortName("TCNJ FIRST Robotics District Competition"), "TCNJ")
self.assertEqual(EventHelper.getShortName("Lenape Seneca FIRST Robotics District Competition"), "Lenape Seneca")
self.assertEqual(EventHelper.getShortName("Springside - Chestnut Hill FIRST Robotics District Competition"), "Springside - Chestnut Hill")
self.assertEqual(EventHelper.getShortName("Festival de Robotique FRC de Montreal Regional"), "Festival de Robotique")
self.assertEqual(EventHelper.getShortName("Dallas Regional"), "Dallas")
self.assertEqual(EventHelper.getShortName("Hub City Regional"), "Hub City")
self.assertEqual(EventHelper.getShortName("Alamo Regional sponsored by Rackspace Hosting"), "Alamo")
self.assertEqual(EventHelper.getShortName("Utah Regional co-sponsored by the L<NAME> Group & Platt"), "Utah")
self.assertEqual(EventHelper.getShortName("Seattle Regional"), "Seattle")
self.assertEqual(EventHelper.getShortName("Central Washington Regional"), "Central Washington")
self.assertEqual(EventHelper.getShortName("Western Canada Regional"), "Western Canada")
self.assertEqual(EventHelper.getShortName("Arkansas Regional"), "Arkansas")
self.assertEqual(EventHelper.getShortName("Groton District Event"), "Groton")
self.assertEqual(EventHelper.getShortName("Hartford District Event"), "Hartford")
self.assertEqual(EventHelper.getShortName("Southington District Event"), "Southington")
self.assertEqual(EventHelper.getShortName("Greater DC Regional"), "Greater DC")
self.assertEqual(EventHelper.getShortName("Central Illinois Regional"), "Central Illinois")
self.assertEqual(EventHelper.getShortName("Northeastern University District Event"), "Northeastern University")
self.assertEqual(EventHelper.getShortName("WPI District Event"), "WPI")
self.assertEqual(EventHelper.getShortName("Pine Tree District Event"), "Pine Tree")
self.assertEqual(EventHelper.getShortName("Center Line FIRST Robotics District Competition"), "Center Line")
self.assertEqual(EventHelper.getShortName("Escanaba FIRST Robotics District Competition"), "Escanaba")
self.assertEqual(EventHelper.getShortName("Howell FIRST Robotics District Competition"), "Howell")
self.assertEqual(EventHelper.getShortName("St. Joseph FIRST Robotics District Competition"), "St. Joseph")
self.assertEqual(EventHelper.getShortName("Southfield FIRST Robotics District Competition"), "Southfield")
self.assertEqual(EventHelper.getShortName("Mexico City Regional"), "Mexico City")
self.assertEqual(EventHelper.getShortName("New England FRC Region Championship"), "New England")
self.assertEqual(EventHelper.getShortName("UNH District Event"), "UNH")
self.assertEqual(EventHelper.getShortName("Granite State District Event"), "Granite State")
self.assertEqual(EventHelper.getShortName("MAR FIRST Robotics Bridgewater-Raritan District Competition"), "Bridgewater-Raritan")
self.assertEqual(EventHelper.getShortName("MAR FIRST Robotics Clifton District Competition"), "Clifton")
self.assertEqual(EventHelper.getShortName("MAR FIRST Robotics Mt. Olive District Competition"), "Mt. Olive")
self.assertEqual(EventHelper.getShortName("MAR FIRST Robotics Lenape-Seneca District Competition"), "Lenape-Seneca")
self.assertEqual(EventHelper.getShortName("New York Tech Valley Regional"), "New York Tech Valley")
self.assertEqual(EventHelper.getShortName("North Bay Regional"), "North Bay")
self.assertEqual(EventHelper.getShortName("Windsor Essex Great Lakes Regional"), "Windsor Essex Great Lakes")
self.assertEqual(EventHelper.getShortName("PNW FIRST Robotics Oregon City District Event"), "Oregon City")
self.assertEqual(EventHelper.getShortName("PNW FIRST Robotics Oregon State University District Event"), "Oregon State University")
self.assertEqual(EventHelper.getShortName("PNW FIRST Robotics Wilsonville District Event"), "Wilsonville")
self.assertEqual(EventHelper.getShortName("MAR FIRST Robotics Hatboro-Horsham District Competition"), "Hatboro-Horsham")
self.assertEqual(EventHelper.getShortName("MAR FIRST Robotics Springside Chestnut Hill District Competition"), "Springside Chestnut Hill")
self.assertEqual(EventHelper.getShortName("Greater Pittsburgh Regional"), "Greater Pittsburgh")
self.assertEqual(EventHelper.getShortName("Autodesk PNW FRC Championship"), "Autodesk PNW")
self.assertEqual(EventHelper.getShortName("Rhode Island District Event"), "Rhode Island")
self.assertEqual(EventHelper.getShortName("Utah Regional"), "Utah")
self.assertEqual(EventHelper.getShortName("PNW FIRST Robotics Auburn District Event"), "Auburn")
self.assertEqual(EventHelper.getShortName("PNW FIRST Robotics Auburn Mountainview District Event"), "Auburn Mountainview")
self.assertEqual(EventHelper.getShortName("PNW FIRST Robotics Eastern Washington University District Event"), "Eastern Washington University")
self.assertEqual(EventHelper.getShortName("PNW FIRST Robotics Central Washington University District Event"), "Central Washington University")
self.assertEqual(EventHelper.getShortName("PNW FIRST Robotics Mt. Vernon District Event"), "Mt. Vernon")
self.assertEqual(EventHelper.getShortName("PNW FIRST Robotics Shorewood District Event"), "Shorewood")
self.assertEqual(EventHelper.getShortName("PNW FIRST Robotics Glacier Peak District Event"), "Glacier Peak")
# 2015 edge cases
self.assertEqual(EventHelper.getShortName("FIM District - Howell Event"), "Howell")
self.assertEqual(EventHelper.getShortName("NE District - Granite State Event"), "Granite State")
self.assertEqual(EventHelper.getShortName("PNW District - Oregon City Event"), "Oregon City")
self.assertEqual(EventHelper.getShortName("IN District -Indianapolis"), "Indianapolis")
self.assertEqual(EventHelper.getShortName("MAR District - Mt. Olive Event"), "Mt. Olive")
self.assertEqual(EventHelper.getShortName("Israel Regional - see Site Info for additional information"), "Israel")
self.assertEqual(EventHelper.getShortName("IN District - Kokomo City of Firsts Event sponsored by AndyMark"), "Kokomo City of Firsts")
# 2017 edge cases
self.assertEqual(EventHelper.getShortName("ONT District - McMaster University Event"), "McMaster University")
self.assertEqual(EventHelper.getShortName("FIRST Ontario Provincial Championship"), "Ontario")
self.assertEqual(EventHelper.getShortName("FIM District - Kettering University Event #1"), "Kettering University #1")
self.assertEqual(EventHelper.getShortName("ISR District Event #1"), "ISR #1")
# 2018 edge cases
self.assertEqual(EventHelper.getShortName("PNW District Clackamas Academy Event"), "Clackamas Academy")
# 2019 edge cases
self.assertEqual(EventHelper.getShortName("FMA District Hatboro-Horsham Event"), "Hatboro-Horsham")
self.assertEqual(EventHelper.getShortName("FIT District Austin Event"), "Austin")
# 2020 edge cases
self.assertEqual(EventHelper.getShortName("***SUSPENDED*** Silicon Valley Regional"), "Silicon Valley") | 0.496826 | 0.297763 |
import os
import sys
import subprocess
def supports_color():
"""Check if system supports ANSI colour."""
plat = sys.platform
supported_platform = plat != 'Pocket PC' and (plat != 'win32' or 'ANSICON' in os.environ)
is_a_tty = hasattr(sys.stdout, 'isatty') and sys.stdout.isatty()
return supported_platform and is_a_tty
def install(color=False):
"""Install Web95."""
if color:
green = "\u001b[32m\u001b[1m"
yellow = "\u001b[33m\u001b[1m"
red = "\u001b[31m\u001b[1m"
reset = "\u001b[0m"
else:
green, yellow, red, reset = "", "", "", ""
print(yellow + "Cloning Repository..." + reset)
result = subprocess.call(["git", "clone", "https://github.com/Juicy-Jaguars/summer-code-jam-2020"])
os.chdir("summer-code-jam-2020/juicy-jaguars")
if result != 0: # Checks the returned error code. 0=OK !0=Not OK
print(red + "Error cloning Repository. Exiting...")
sys.exit()
print(green + "Repository successfully cloned." + reset)
print()
print(yellow + "Installing dependencies..." + reset)
result = subprocess.call(["pip3", "install", "-r", "requirements.txt"])
if result != 0: # Checks the returned error code. 0=OK !0=Not OK
print(red + "Error installing Dependencies. Exiting...")
sys.exit()
print(green + "Dependencies successfully installed." + reset)
print()
print(yellow + "Making migrations..." + reset)
result = subprocess.call(["python", os.path.join("Web95", "manage.py"), "migrate"])
if result != 0: # Checks the returned error code. 0=OK !0=Not OK
print(red + "Error making migrations. Exiting...")
sys.exit()
print(green + "Made migrations successfully." + reset)
print()
print(green + "Successfully installed. Run with 'python Web95/manage.py runserver' from summer-code-jam-2020/\
juicy-jaguars/" + reset)
if __name__ == "__main__":
color = supports_color()
if sys.argv[1] == "install":
install(color) | juicy-jaguars/setup.py | import os
import sys
import subprocess
def supports_color():
"""Check if system supports ANSI colour."""
plat = sys.platform
supported_platform = plat != 'Pocket PC' and (plat != 'win32' or 'ANSICON' in os.environ)
is_a_tty = hasattr(sys.stdout, 'isatty') and sys.stdout.isatty()
return supported_platform and is_a_tty
def install(color=False):
"""Install Web95."""
if color:
green = "\u001b[32m\u001b[1m"
yellow = "\u001b[33m\u001b[1m"
red = "\u001b[31m\u001b[1m"
reset = "\u001b[0m"
else:
green, yellow, red, reset = "", "", "", ""
print(yellow + "Cloning Repository..." + reset)
result = subprocess.call(["git", "clone", "https://github.com/Juicy-Jaguars/summer-code-jam-2020"])
os.chdir("summer-code-jam-2020/juicy-jaguars")
if result != 0: # Checks the returned error code. 0=OK !0=Not OK
print(red + "Error cloning Repository. Exiting...")
sys.exit()
print(green + "Repository successfully cloned." + reset)
print()
print(yellow + "Installing dependencies..." + reset)
result = subprocess.call(["pip3", "install", "-r", "requirements.txt"])
if result != 0: # Checks the returned error code. 0=OK !0=Not OK
print(red + "Error installing Dependencies. Exiting...")
sys.exit()
print(green + "Dependencies successfully installed." + reset)
print()
print(yellow + "Making migrations..." + reset)
result = subprocess.call(["python", os.path.join("Web95", "manage.py"), "migrate"])
if result != 0: # Checks the returned error code. 0=OK !0=Not OK
print(red + "Error making migrations. Exiting...")
sys.exit()
print(green + "Made migrations successfully." + reset)
print()
print(green + "Successfully installed. Run with 'python Web95/manage.py runserver' from summer-code-jam-2020/\
juicy-jaguars/" + reset)
if __name__ == "__main__":
color = supports_color()
if sys.argv[1] == "install":
install(color) | 0.455441 | 0.140454 |
"""Tests for the Disentangled Sequential Variational Autoencoder."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow as tf
from tensorflow_probability.examples import disentangled_vae
tfe = tf.contrib.eager
@tfe.run_all_tests_in_graph_and_eager_modes
class DisentangledVAETest(tf.test.TestCase):
"""Base test class."""
def setUp(self):
self.samples = 2
self.batch_size = 3
self.length = 4
self.latent_size = 5
self.dimensions = self.latent_size
self.hidden_size = 6
self.channels = 3
def assertDistShape(self, dist, event_shape, batch_shape):
self.assertEqual(dist.event_shape, event_shape)
self.assertEqual(dist.batch_shape, batch_shape)
@tfe.run_all_tests_in_graph_and_eager_modes
class DisentangledVAEComponentsTest(DisentangledVAETest):
"""Test class for the individual model components."""
def testLearnableMultivariateNormalDiagClass(self):
dist_model = disentangled_vae.LearnableMultivariateNormalDiag(
self.dimensions)
dist = dist_model()
self.assertDistShape(dist, (self.dimensions,), ())
def testLearnableMultivariateNormalDiagCellClassNoBatch(self):
prior = disentangled_vae.LearnableMultivariateNormalDiagCell(
self.dimensions, self.hidden_size)
# Zero state.
dynamic_previous_output, state = prior.zero_state()
self.assertEqual(dynamic_previous_output.shape, (self.dimensions,))
for tensor in state:
self.assertEqual(tensor.shape, (1, self.hidden_size))
h0, c0 = state
# First timestep.
dist_z1, state_z1 = prior(dynamic_previous_output, state)
self.assertDistShape(dist_z1, (self.dimensions,), ())
for tensor in state_z1:
self.assertEqual(tensor.shape, (1, self.hidden_size))
if not tfe.in_eager_mode():
self.evaluate(tf.global_variables_initializer())
h1, c1 = state_z1
self.assertTrue(np.allclose(self.evaluate(h1), self.evaluate(h0)))
self.assertTrue(np.allclose(self.evaluate(c1), self.evaluate(c0)))
# Second timestep.
dist_z2, state = prior(dist_z1.sample(), state_z1)
self.assertDistShape(dist_z2, (self.dimensions,), ())
for tensor in state:
self.assertEqual(tensor.shape, (1, self.hidden_size))
h2, c2 = state
self.assertFalse(np.allclose(self.evaluate(h2), self.evaluate(h1)))
self.assertFalse(np.allclose(self.evaluate(c2), self.evaluate(c1)))
# Second timestep with sample shape.
dist_z2, state = prior(dist_z1.sample(2), state_z1)
self.assertDistShape(dist_z2, (self.dimensions,), (2))
for tensor in state:
self.assertEqual(tensor.shape, (2, self.hidden_size))
def testLearnableMultivariateNormalDiagCellClassBatch(self):
prior = disentangled_vae.LearnableMultivariateNormalDiagCell(
self.dimensions, self.hidden_size)
# Zero state with complex batch shape.
dynamic_previous_output, state = prior.zero_state(
(self.samples, self.batch_size))
self.assertEqual(dynamic_previous_output.shape,
(self.samples, self.batch_size, self.dimensions))
for tensor in state:
self.assertEqual(tensor.shape, (1, self.hidden_size))
h0, c0 = state
# First timestep.
dist_z1, state_z1 = prior(dynamic_previous_output, state)
self.assertDistShape(dist_z1, (self.dimensions,),
(self.samples, self.batch_size))
for tensor in state_z1:
self.assertEqual(
tensor.shape, (self.samples, self.batch_size, self.hidden_size))
if not tfe.in_eager_mode():
self.evaluate(tf.global_variables_initializer())
h1, c1 = state_z1
self.assertTrue(np.allclose(self.evaluate(h1), self.evaluate(h0)))
self.assertTrue(np.allclose(self.evaluate(c1), self.evaluate(c0)))
# Second timestep.
dist_z2, state = prior(dist_z1.sample(), state_z1)
self.assertDistShape(dist_z2, (self.dimensions,),
(self.samples, self.batch_size))
for tensor in state:
self.assertEqual(
tensor.shape, (self.samples, self.batch_size, self.hidden_size))
h2, c2 = state
self.assertFalse(np.allclose(self.evaluate(h2), self.evaluate(h1)))
self.assertFalse(np.allclose(self.evaluate(c2), self.evaluate(c1)))
# Second timestep with sample shape.
dist_z2, state = prior(dist_z1.sample(2), state_z1)
self.assertDistShape(dist_z2, (self.dimensions,),
(2, self.samples, self.batch_size))
for tensor in state:
self.assertEqual(
tensor.shape, (2, self.samples, self.batch_size, self.hidden_size))
def testDecoderClassNoSampleShape(self):
decoder = disentangled_vae.Decoder(20, self.channels)
z = tf.random_normal([self.batch_size, self.length, 10])
f = tf.random_normal([self.batch_size, 10])
dist = decoder((z, f))
self.assertDistShape(dist, (64, 64, self.channels),
(self.batch_size, self.length))
def testDecoderClassWithSampleShape(self):
decoder = disentangled_vae.Decoder(20, self.channels)
# Using sample shape.
z = tf.random_normal([self.samples, self.batch_size, self.length, 10])
f = tf.random_normal([self.samples, self.batch_size, 10])
dist = decoder((z, f))
self.assertDistShape(dist, (64, 64, self.channels),
(self.samples, self.batch_size, self.length))
def testCompressorClassNoSampleShape(self):
encoder = disentangled_vae.Compressor(self.hidden_size)
xt = tf.random_normal([self.batch_size, self.length, 64, 64, 3])
out = encoder(xt)
self.assertEqual(out.shape,
(self.batch_size, self.length, self.hidden_size))
def testCompressorClassWithSampleShape(self):
encoder = disentangled_vae.Compressor(self.hidden_size)
xt = tf.random_normal(
[self.samples, self.batch_size, self.length, 64, 64, 3])
out = encoder(xt)
self.assertEqual(out.shape,
(self.samples, self.batch_size, self.length,
self.hidden_size))
def testEncoderStaticClassNoSamples(self):
encoder = disentangled_vae.EncoderStatic(self.latent_size, self.hidden_size)
input_features = tf.random_normal(
[self.batch_size, self.length, self.hidden_size])
dist = encoder(input_features)
self.assertDistShape(dist, (self.latent_size,), (self.batch_size,))
def testEncoderStaticClassSamples(self):
encoder = disentangled_vae.EncoderStatic(self.latent_size, self.hidden_size)
input_features = tf.random_normal(
[self.samples, self.batch_size, self.length, self.hidden_size])
dist = encoder(input_features)
self.assertDistShape(dist, (self.latent_size,),
(self.samples, self.batch_size))
def testEncoderDynamicFactorizedClassNoSamples(self):
encoder = disentangled_vae.EncoderDynamicFactorized(
self.latent_size, self.hidden_size)
input_features = tf.random_normal(
[self.batch_size, self.length, self.hidden_size])
dist = encoder(input_features)
self.assertDistShape(dist, (self.latent_size,),
(self.batch_size, self.length))
def testEncoderDynamicFactorizedClassSamples(self):
encoder = disentangled_vae.EncoderDynamicFactorized(
self.latent_size, self.hidden_size)
input_features = tf.random_normal(
[self.samples, self.batch_size, self.length, self.hidden_size])
dist = encoder(input_features)
self.assertDistShape(dist, (self.latent_size,),
(self.samples, self.batch_size, self.length))
def testEncoderDynamicFullClassNoSamples(self):
encoder = disentangled_vae.EncoderDynamicFull(
self.latent_size, self.hidden_size)
input_features = tf.random_normal(
[self.batch_size, self.length, self.hidden_size])
f = tf.random_normal([self.batch_size, self.latent_size])
dist = encoder((input_features, f))
self.assertDistShape(dist, (self.latent_size,),
(self.batch_size, self.length))
def testEncoderDynamicFullClassStaticSamples(self):
encoder = disentangled_vae.EncoderDynamicFull(
self.latent_size, self.hidden_size)
input_features = tf.random_normal(
[self.batch_size, self.length, self.hidden_size])
f = tf.random_normal([self.samples, self.batch_size, self.latent_size])
dist = encoder((input_features, f))
self.assertDistShape(dist, (self.latent_size,),
(self.samples, self.batch_size, self.length))
def testEncoderDynamicFullClassInputSamples(self):
encoder = disentangled_vae.EncoderDynamicFull(
self.latent_size, self.hidden_size)
input_features = tf.random_normal(
[self.samples, self.batch_size, self.length, self.hidden_size])
f = tf.random_normal([self.batch_size, self.latent_size])
dist = encoder((input_features, f))
self.assertDistShape(dist, (self.latent_size,),
(self.samples, self.batch_size, self.length))
def testEncoderDynamicFullClassBothSamples(self):
encoder = disentangled_vae.EncoderDynamicFull(
self.latent_size, self.hidden_size)
input_features = tf.random_normal(
[self.samples, self.batch_size, self.length, self.hidden_size])
f = tf.random_normal([self.samples, self.batch_size, self.latent_size])
dist = encoder((input_features, f))
self.assertDistShape(dist, (self.latent_size,),
(self.samples, self.batch_size, self.length))
def testEncoderDynamicFullClassComplexStaticSampleShape(self):
encoder = disentangled_vae.EncoderDynamicFull(
self.latent_size, self.hidden_size)
input_features = tf.random_normal(
[self.batch_size, self.length, self.hidden_size])
f = tf.random_normal(
[self.samples*2, self.samples, self.batch_size, self.latent_size])
dist = encoder((input_features, f))
self.assertDistShape(
dist, (self.latent_size,),
(self.samples*2, self.samples, self.batch_size, self.length))
@tfe.run_all_tests_in_graph_and_eager_modes
class DisentangledSequentialVAETest(DisentangledVAETest):
"""Test class for the DisentangledSequentialVAE model."""
def setUp(self):
super(DisentangledSequentialVAETest, self).setUp()
self.latent_size_static = 10
self.latent_size_dynamic = 11
self.model_factorized = disentangled_vae.DisentangledSequentialVAE(
self.latent_size_static, self.latent_size_dynamic, self.hidden_size,
self.channels, "factorized")
self.model_full = disentangled_vae.DisentangledSequentialVAE(
self.latent_size_static, self.latent_size_dynamic, self.hidden_size,
self.channels, "full")
self.inputs = tf.random_normal(
[self.batch_size, self.length, 64, 64, self.channels])
def testGenerateFactorizedFixStatic(self):
dist = self.model_factorized.generate(self.batch_size, self.length,
samples=self.samples, fix_static=True)
self.assertDistShape(dist, (64, 64, self.channels),
(self.samples, self.batch_size, self.length))
def testGenerateFullFixStatic(self):
dist = self.model_full.generate(self.batch_size, self.length,
samples=self.samples, fix_static=True)
self.assertDistShape(dist, (64, 64, self.channels),
(self.samples, self.batch_size, self.length))
def testGenerateFactorizedFixDynamic(self):
dist = self.model_factorized.generate(self.batch_size, self.length,
samples=self.samples,
fix_dynamic=True)
self.assertDistShape(dist, (64, 64, self.channels),
(self.samples, self.batch_size, self.length))
def testGenerateFullFixDynamic(self):
dist = self.model_full.generate(self.batch_size, self.length,
samples=self.samples, fix_dynamic=True)
self.assertDistShape(dist, (64, 64, self.channels),
(self.samples, self.batch_size, self.length))
def testGenerateFactorizedFixBoth(self):
dist = self.model_factorized.generate(self.batch_size, self.length,
samples=self.samples, fix_static=True,
fix_dynamic=True)
self.assertDistShape(dist, (64, 64, self.channels),
(self.samples, self.batch_size, self.length))
def testGenerateFullFixBoth(self):
dist = self.model_full.generate(self.batch_size, self.length,
samples=self.samples, fix_static=True,
fix_dynamic=True)
self.assertDistShape(dist, (64, 64, self.channels),
(self.samples, self.batch_size, self.length))
def testReconstructFactorized(self):
dist = self.model_factorized.reconstruct(self.inputs, self.samples)
self.assertDistShape(dist, (64, 64, self.channels),
(self.samples, self.batch_size, self.length))
def testReconstructFull(self):
dist = self.model_full.reconstruct(self.inputs, self.samples)
self.assertDistShape(dist, (64, 64, self.channels),
(self.samples, self.batch_size, self.length))
def testReconstructFactorizedSampleStatic(self):
dist = self.model_factorized.reconstruct(self.inputs, self.samples,
sample_static=True)
self.assertDistShape(dist, (64, 64, self.channels),
(self.samples, self.batch_size, self.length))
def testReconstructFullSampleStatic(self):
dist = self.model_full.reconstruct(self.inputs, self.samples,
sample_static=True)
self.assertDistShape(dist, (64, 64, self.channels),
(self.samples, self.batch_size, self.length))
def testReconstructFactorizedSampleStaticFixed(self):
dist = self.model_factorized.reconstruct(self.inputs, self.samples,
sample_static=True,
fix_static=True)
self.assertDistShape(dist, (64, 64, self.channels),
(self.samples, self.batch_size, self.length))
def testReconstructFullSampleStaticFixed(self):
dist = self.model_full.reconstruct(self.inputs, self.samples,
sample_static=True, fix_static=True)
self.assertDistShape(dist, (64, 64, self.channels),
(self.samples, self.batch_size, self.length))
def testReconstructFactorizedSampleDynamic(self):
dist = self.model_factorized.reconstruct(self.inputs, self.samples,
sample_dynamic=True)
self.assertDistShape(dist, (64, 64, self.channels),
(self.samples, self.batch_size, self.length))
def testReconstructFullSampleDynamic(self):
dist = self.model_full.reconstruct(self.inputs, self.samples,
sample_dynamic=True)
self.assertDistShape(dist, (64, 64, self.channels),
(self.samples, self.batch_size, self.length))
def testReconstructFactorizedSampleDynamicFixed(self):
dist = self.model_factorized.reconstruct(self.inputs, self.samples,
sample_dynamic=True,
fix_dynamic=True)
self.assertDistShape(dist, (64, 64, self.channels),
(self.samples, self.batch_size, self.length))
def testReconstructFullSampleDynamicFixed(self):
dist = self.model_full.reconstruct(self.inputs, self.samples,
sample_dynamic=True, fix_dynamic=True)
self.assertDistShape(dist, (64, 64, self.channels),
(self.samples, self.batch_size, self.length))
def testReconstructFactorizedSampleBoth(self):
dist = self.model_factorized.reconstruct(self.inputs, self.samples,
sample_static=True,
sample_dynamic=True)
self.assertDistShape(dist, (64, 64, self.channels),
(self.samples, self.batch_size, self.length))
def testReconstructFullSampleBoth(self):
dist = self.model_full.reconstruct(self.inputs, self.samples,
sample_static=True, sample_dynamic=True)
self.assertDistShape(dist, (64, 64, self.channels),
(self.samples, self.batch_size, self.length))
def testReconstructFactorizedSwapStatic(self):
dist = self.model_factorized.reconstruct(self.inputs, self.samples,
swap_static=True)
self.assertDistShape(dist, (64, 64, self.channels),
(self.samples, self.batch_size, self.length))
def testReconstructFullSwapStatic(self):
dist = self.model_full.reconstruct(self.inputs, self.samples,
swap_static=True)
self.assertDistShape(dist, (64, 64, self.channels),
(self.samples, self.batch_size, self.length))
def testReconstructFactorizedSwapDynamic(self):
dist = self.model_factorized.reconstruct(self.inputs, self.samples,
swap_dynamic=True)
self.assertDistShape(dist, (64, 64, self.channels),
(self.samples, self.batch_size, self.length))
def testReconstructFullSwapDynamic(self):
dist = self.model_full.reconstruct(self.inputs, self.samples,
swap_dynamic=True)
self.assertDistShape(dist, (64, 64, self.channels),
(self.samples, self.batch_size, self.length))
def testSampleStaticPriorFactorized(self):
sample, dist = self.model_factorized.sample_static_prior(self.samples,
self.batch_size)
self.assertEqual(sample.shape, (self.samples, self.batch_size,
self.latent_size_static))
self.assertDistShape(dist, (self.latent_size_static,), ())
def testSampleStaticPriorFull(self):
sample, dist = self.model_full.sample_static_prior(self.samples,
self.batch_size)
self.assertEqual(sample.shape, (self.samples, self.batch_size,
self.latent_size_static))
self.assertDistShape(dist, (self.latent_size_static,), ())
def testSampleStaticPriorFactorizedFixed(self):
sample, dist = self.model_factorized.sample_static_prior(
self.samples, self.batch_size, fixed=True)
self.assertEqual(sample.shape, (self.samples, self.batch_size,
self.latent_size_static))
self.assertDistShape(dist, (self.latent_size_static,), ())
def testSampleStaticPriorFullFixed(self):
sample, dist = self.model_full.sample_static_prior(
self.samples, self.batch_size, fixed=True)
self.assertEqual(sample.shape, (self.samples, self.batch_size,
self.latent_size_static))
self.assertDistShape(dist, (self.latent_size_static,), ())
def testSampleStaticPosteriorFactorized(self):
features = self.model_factorized.compressor(self.inputs)
sample, dist = self.model_factorized.sample_static_posterior(features,
self.samples)
self.assertEqual(sample.shape, (self.samples, self.batch_size,
self.latent_size_static))
self.assertDistShape(dist, (self.latent_size_static,), (self.batch_size,))
def testSampleStaticPosteriorFull(self):
features = self.model_full.compressor(self.inputs)
sample, dist = self.model_full.sample_static_posterior(features,
self.samples)
self.assertEqual(sample.shape, (self.samples, self.batch_size,
self.latent_size_static))
self.assertDistShape(dist, (self.latent_size_static,), (self.batch_size,))
def testSampleDynamicPriorFactorized(self):
sample, dist = self.model_factorized.sample_dynamic_prior(
self.samples, self.batch_size, self.length)
self.assertEqual(sample.shape, (self.samples, self.batch_size, self.length,
self.latent_size_dynamic))
self.assertDistShape(dist, (self.latent_size_dynamic,),
(self.samples, self.batch_size, self.length))
def testSampleDynamicPriorFull(self):
sample, dist = self.model_full.sample_dynamic_prior(
self.samples, self.batch_size, self.length)
self.assertEqual(sample.shape, (self.samples, self.batch_size, self.length,
self.latent_size_dynamic))
self.assertDistShape(dist, (self.latent_size_dynamic,),
(self.samples, self.batch_size, self.length))
def testSampleDynamicPriorFactorizedFixed(self):
sample, dist = self.model_factorized.sample_dynamic_prior(
self.samples, self.batch_size, self.length, fixed=True)
self.assertEqual(sample.shape, (self.samples, self.batch_size, self.length,
self.latent_size_dynamic))
self.assertDistShape(dist, (self.latent_size_dynamic,),
(self.samples, 1, self.length))
def testSampleDynamicPriorFullFixed(self):
sample, dist = self.model_full.sample_dynamic_prior(
self.samples, self.batch_size, self.length, fixed=True)
self.assertEqual(sample.shape, (self.samples, self.batch_size, self.length,
self.latent_size_dynamic))
self.assertDistShape(dist, (self.latent_size_dynamic,),
(self.samples, 1, self.length))
def testSampleDynamicPosteriorFactorized(self):
features = self.model_factorized.compressor(self.inputs)
sample, dist = self.model_factorized.sample_dynamic_posterior(
features, self.samples)
self.assertEqual(sample.shape, (self.samples, self.batch_size, self.length,
self.latent_size_dynamic))
self.assertDistShape(dist, (self.latent_size_dynamic,),
(self.batch_size, self.length))
def testSampleDynamicPosteriorFull(self):
features = self.model_full.compressor(self.inputs)
static_sample, dist = self.model_full.sample_static_posterior(features,
self.samples)
sample, dist = self.model_full.sample_dynamic_posterior(
features, self.samples, static_sample)
self.assertEqual(sample.shape, (self.samples, self.batch_size, self.length,
self.latent_size_dynamic))
self.assertDistShape(dist, (self.latent_size_dynamic,),
(self.samples, self.batch_size, self.length))
if __name__ == "__main__":
tf.test.main() | tensorflow_probability/examples/disentangled_vae_test.py | """Tests for the Disentangled Sequential Variational Autoencoder."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow as tf
from tensorflow_probability.examples import disentangled_vae
tfe = tf.contrib.eager
@tfe.run_all_tests_in_graph_and_eager_modes
class DisentangledVAETest(tf.test.TestCase):
"""Base test class."""
def setUp(self):
self.samples = 2
self.batch_size = 3
self.length = 4
self.latent_size = 5
self.dimensions = self.latent_size
self.hidden_size = 6
self.channels = 3
def assertDistShape(self, dist, event_shape, batch_shape):
self.assertEqual(dist.event_shape, event_shape)
self.assertEqual(dist.batch_shape, batch_shape)
@tfe.run_all_tests_in_graph_and_eager_modes
class DisentangledVAEComponentsTest(DisentangledVAETest):
"""Test class for the individual model components."""
def testLearnableMultivariateNormalDiagClass(self):
dist_model = disentangled_vae.LearnableMultivariateNormalDiag(
self.dimensions)
dist = dist_model()
self.assertDistShape(dist, (self.dimensions,), ())
def testLearnableMultivariateNormalDiagCellClassNoBatch(self):
prior = disentangled_vae.LearnableMultivariateNormalDiagCell(
self.dimensions, self.hidden_size)
# Zero state.
dynamic_previous_output, state = prior.zero_state()
self.assertEqual(dynamic_previous_output.shape, (self.dimensions,))
for tensor in state:
self.assertEqual(tensor.shape, (1, self.hidden_size))
h0, c0 = state
# First timestep.
dist_z1, state_z1 = prior(dynamic_previous_output, state)
self.assertDistShape(dist_z1, (self.dimensions,), ())
for tensor in state_z1:
self.assertEqual(tensor.shape, (1, self.hidden_size))
if not tfe.in_eager_mode():
self.evaluate(tf.global_variables_initializer())
h1, c1 = state_z1
self.assertTrue(np.allclose(self.evaluate(h1), self.evaluate(h0)))
self.assertTrue(np.allclose(self.evaluate(c1), self.evaluate(c0)))
# Second timestep.
dist_z2, state = prior(dist_z1.sample(), state_z1)
self.assertDistShape(dist_z2, (self.dimensions,), ())
for tensor in state:
self.assertEqual(tensor.shape, (1, self.hidden_size))
h2, c2 = state
self.assertFalse(np.allclose(self.evaluate(h2), self.evaluate(h1)))
self.assertFalse(np.allclose(self.evaluate(c2), self.evaluate(c1)))
# Second timestep with sample shape.
dist_z2, state = prior(dist_z1.sample(2), state_z1)
self.assertDistShape(dist_z2, (self.dimensions,), (2))
for tensor in state:
self.assertEqual(tensor.shape, (2, self.hidden_size))
def testLearnableMultivariateNormalDiagCellClassBatch(self):
prior = disentangled_vae.LearnableMultivariateNormalDiagCell(
self.dimensions, self.hidden_size)
# Zero state with complex batch shape.
dynamic_previous_output, state = prior.zero_state(
(self.samples, self.batch_size))
self.assertEqual(dynamic_previous_output.shape,
(self.samples, self.batch_size, self.dimensions))
for tensor in state:
self.assertEqual(tensor.shape, (1, self.hidden_size))
h0, c0 = state
# First timestep.
dist_z1, state_z1 = prior(dynamic_previous_output, state)
self.assertDistShape(dist_z1, (self.dimensions,),
(self.samples, self.batch_size))
for tensor in state_z1:
self.assertEqual(
tensor.shape, (self.samples, self.batch_size, self.hidden_size))
if not tfe.in_eager_mode():
self.evaluate(tf.global_variables_initializer())
h1, c1 = state_z1
self.assertTrue(np.allclose(self.evaluate(h1), self.evaluate(h0)))
self.assertTrue(np.allclose(self.evaluate(c1), self.evaluate(c0)))
# Second timestep.
dist_z2, state = prior(dist_z1.sample(), state_z1)
self.assertDistShape(dist_z2, (self.dimensions,),
(self.samples, self.batch_size))
for tensor in state:
self.assertEqual(
tensor.shape, (self.samples, self.batch_size, self.hidden_size))
h2, c2 = state
self.assertFalse(np.allclose(self.evaluate(h2), self.evaluate(h1)))
self.assertFalse(np.allclose(self.evaluate(c2), self.evaluate(c1)))
# Second timestep with sample shape.
dist_z2, state = prior(dist_z1.sample(2), state_z1)
self.assertDistShape(dist_z2, (self.dimensions,),
(2, self.samples, self.batch_size))
for tensor in state:
self.assertEqual(
tensor.shape, (2, self.samples, self.batch_size, self.hidden_size))
def testDecoderClassNoSampleShape(self):
decoder = disentangled_vae.Decoder(20, self.channels)
z = tf.random_normal([self.batch_size, self.length, 10])
f = tf.random_normal([self.batch_size, 10])
dist = decoder((z, f))
self.assertDistShape(dist, (64, 64, self.channels),
(self.batch_size, self.length))
def testDecoderClassWithSampleShape(self):
decoder = disentangled_vae.Decoder(20, self.channels)
# Using sample shape.
z = tf.random_normal([self.samples, self.batch_size, self.length, 10])
f = tf.random_normal([self.samples, self.batch_size, 10])
dist = decoder((z, f))
self.assertDistShape(dist, (64, 64, self.channels),
(self.samples, self.batch_size, self.length))
def testCompressorClassNoSampleShape(self):
encoder = disentangled_vae.Compressor(self.hidden_size)
xt = tf.random_normal([self.batch_size, self.length, 64, 64, 3])
out = encoder(xt)
self.assertEqual(out.shape,
(self.batch_size, self.length, self.hidden_size))
def testCompressorClassWithSampleShape(self):
encoder = disentangled_vae.Compressor(self.hidden_size)
xt = tf.random_normal(
[self.samples, self.batch_size, self.length, 64, 64, 3])
out = encoder(xt)
self.assertEqual(out.shape,
(self.samples, self.batch_size, self.length,
self.hidden_size))
def testEncoderStaticClassNoSamples(self):
encoder = disentangled_vae.EncoderStatic(self.latent_size, self.hidden_size)
input_features = tf.random_normal(
[self.batch_size, self.length, self.hidden_size])
dist = encoder(input_features)
self.assertDistShape(dist, (self.latent_size,), (self.batch_size,))
def testEncoderStaticClassSamples(self):
encoder = disentangled_vae.EncoderStatic(self.latent_size, self.hidden_size)
input_features = tf.random_normal(
[self.samples, self.batch_size, self.length, self.hidden_size])
dist = encoder(input_features)
self.assertDistShape(dist, (self.latent_size,),
(self.samples, self.batch_size))
def testEncoderDynamicFactorizedClassNoSamples(self):
encoder = disentangled_vae.EncoderDynamicFactorized(
self.latent_size, self.hidden_size)
input_features = tf.random_normal(
[self.batch_size, self.length, self.hidden_size])
dist = encoder(input_features)
self.assertDistShape(dist, (self.latent_size,),
(self.batch_size, self.length))
def testEncoderDynamicFactorizedClassSamples(self):
encoder = disentangled_vae.EncoderDynamicFactorized(
self.latent_size, self.hidden_size)
input_features = tf.random_normal(
[self.samples, self.batch_size, self.length, self.hidden_size])
dist = encoder(input_features)
self.assertDistShape(dist, (self.latent_size,),
(self.samples, self.batch_size, self.length))
def testEncoderDynamicFullClassNoSamples(self):
encoder = disentangled_vae.EncoderDynamicFull(
self.latent_size, self.hidden_size)
input_features = tf.random_normal(
[self.batch_size, self.length, self.hidden_size])
f = tf.random_normal([self.batch_size, self.latent_size])
dist = encoder((input_features, f))
self.assertDistShape(dist, (self.latent_size,),
(self.batch_size, self.length))
def testEncoderDynamicFullClassStaticSamples(self):
encoder = disentangled_vae.EncoderDynamicFull(
self.latent_size, self.hidden_size)
input_features = tf.random_normal(
[self.batch_size, self.length, self.hidden_size])
f = tf.random_normal([self.samples, self.batch_size, self.latent_size])
dist = encoder((input_features, f))
self.assertDistShape(dist, (self.latent_size,),
(self.samples, self.batch_size, self.length))
def testEncoderDynamicFullClassInputSamples(self):
encoder = disentangled_vae.EncoderDynamicFull(
self.latent_size, self.hidden_size)
input_features = tf.random_normal(
[self.samples, self.batch_size, self.length, self.hidden_size])
f = tf.random_normal([self.batch_size, self.latent_size])
dist = encoder((input_features, f))
self.assertDistShape(dist, (self.latent_size,),
(self.samples, self.batch_size, self.length))
def testEncoderDynamicFullClassBothSamples(self):
encoder = disentangled_vae.EncoderDynamicFull(
self.latent_size, self.hidden_size)
input_features = tf.random_normal(
[self.samples, self.batch_size, self.length, self.hidden_size])
f = tf.random_normal([self.samples, self.batch_size, self.latent_size])
dist = encoder((input_features, f))
self.assertDistShape(dist, (self.latent_size,),
(self.samples, self.batch_size, self.length))
def testEncoderDynamicFullClassComplexStaticSampleShape(self):
encoder = disentangled_vae.EncoderDynamicFull(
self.latent_size, self.hidden_size)
input_features = tf.random_normal(
[self.batch_size, self.length, self.hidden_size])
f = tf.random_normal(
[self.samples*2, self.samples, self.batch_size, self.latent_size])
dist = encoder((input_features, f))
self.assertDistShape(
dist, (self.latent_size,),
(self.samples*2, self.samples, self.batch_size, self.length))
@tfe.run_all_tests_in_graph_and_eager_modes
class DisentangledSequentialVAETest(DisentangledVAETest):
"""Test class for the DisentangledSequentialVAE model."""
def setUp(self):
super(DisentangledSequentialVAETest, self).setUp()
self.latent_size_static = 10
self.latent_size_dynamic = 11
self.model_factorized = disentangled_vae.DisentangledSequentialVAE(
self.latent_size_static, self.latent_size_dynamic, self.hidden_size,
self.channels, "factorized")
self.model_full = disentangled_vae.DisentangledSequentialVAE(
self.latent_size_static, self.latent_size_dynamic, self.hidden_size,
self.channels, "full")
self.inputs = tf.random_normal(
[self.batch_size, self.length, 64, 64, self.channels])
def testGenerateFactorizedFixStatic(self):
dist = self.model_factorized.generate(self.batch_size, self.length,
samples=self.samples, fix_static=True)
self.assertDistShape(dist, (64, 64, self.channels),
(self.samples, self.batch_size, self.length))
def testGenerateFullFixStatic(self):
dist = self.model_full.generate(self.batch_size, self.length,
samples=self.samples, fix_static=True)
self.assertDistShape(dist, (64, 64, self.channels),
(self.samples, self.batch_size, self.length))
def testGenerateFactorizedFixDynamic(self):
dist = self.model_factorized.generate(self.batch_size, self.length,
samples=self.samples,
fix_dynamic=True)
self.assertDistShape(dist, (64, 64, self.channels),
(self.samples, self.batch_size, self.length))
def testGenerateFullFixDynamic(self):
dist = self.model_full.generate(self.batch_size, self.length,
samples=self.samples, fix_dynamic=True)
self.assertDistShape(dist, (64, 64, self.channels),
(self.samples, self.batch_size, self.length))
def testGenerateFactorizedFixBoth(self):
dist = self.model_factorized.generate(self.batch_size, self.length,
samples=self.samples, fix_static=True,
fix_dynamic=True)
self.assertDistShape(dist, (64, 64, self.channels),
(self.samples, self.batch_size, self.length))
def testGenerateFullFixBoth(self):
dist = self.model_full.generate(self.batch_size, self.length,
samples=self.samples, fix_static=True,
fix_dynamic=True)
self.assertDistShape(dist, (64, 64, self.channels),
(self.samples, self.batch_size, self.length))
def testReconstructFactorized(self):
dist = self.model_factorized.reconstruct(self.inputs, self.samples)
self.assertDistShape(dist, (64, 64, self.channels),
(self.samples, self.batch_size, self.length))
def testReconstructFull(self):
dist = self.model_full.reconstruct(self.inputs, self.samples)
self.assertDistShape(dist, (64, 64, self.channels),
(self.samples, self.batch_size, self.length))
def testReconstructFactorizedSampleStatic(self):
dist = self.model_factorized.reconstruct(self.inputs, self.samples,
sample_static=True)
self.assertDistShape(dist, (64, 64, self.channels),
(self.samples, self.batch_size, self.length))
def testReconstructFullSampleStatic(self):
dist = self.model_full.reconstruct(self.inputs, self.samples,
sample_static=True)
self.assertDistShape(dist, (64, 64, self.channels),
(self.samples, self.batch_size, self.length))
def testReconstructFactorizedSampleStaticFixed(self):
dist = self.model_factorized.reconstruct(self.inputs, self.samples,
sample_static=True,
fix_static=True)
self.assertDistShape(dist, (64, 64, self.channels),
(self.samples, self.batch_size, self.length))
def testReconstructFullSampleStaticFixed(self):
dist = self.model_full.reconstruct(self.inputs, self.samples,
sample_static=True, fix_static=True)
self.assertDistShape(dist, (64, 64, self.channels),
(self.samples, self.batch_size, self.length))
def testReconstructFactorizedSampleDynamic(self):
dist = self.model_factorized.reconstruct(self.inputs, self.samples,
sample_dynamic=True)
self.assertDistShape(dist, (64, 64, self.channels),
(self.samples, self.batch_size, self.length))
def testReconstructFullSampleDynamic(self):
dist = self.model_full.reconstruct(self.inputs, self.samples,
sample_dynamic=True)
self.assertDistShape(dist, (64, 64, self.channels),
(self.samples, self.batch_size, self.length))
def testReconstructFactorizedSampleDynamicFixed(self):
dist = self.model_factorized.reconstruct(self.inputs, self.samples,
sample_dynamic=True,
fix_dynamic=True)
self.assertDistShape(dist, (64, 64, self.channels),
(self.samples, self.batch_size, self.length))
def testReconstructFullSampleDynamicFixed(self):
dist = self.model_full.reconstruct(self.inputs, self.samples,
sample_dynamic=True, fix_dynamic=True)
self.assertDistShape(dist, (64, 64, self.channels),
(self.samples, self.batch_size, self.length))
def testReconstructFactorizedSampleBoth(self):
dist = self.model_factorized.reconstruct(self.inputs, self.samples,
sample_static=True,
sample_dynamic=True)
self.assertDistShape(dist, (64, 64, self.channels),
(self.samples, self.batch_size, self.length))
def testReconstructFullSampleBoth(self):
dist = self.model_full.reconstruct(self.inputs, self.samples,
sample_static=True, sample_dynamic=True)
self.assertDistShape(dist, (64, 64, self.channels),
(self.samples, self.batch_size, self.length))
def testReconstructFactorizedSwapStatic(self):
dist = self.model_factorized.reconstruct(self.inputs, self.samples,
swap_static=True)
self.assertDistShape(dist, (64, 64, self.channels),
(self.samples, self.batch_size, self.length))
def testReconstructFullSwapStatic(self):
dist = self.model_full.reconstruct(self.inputs, self.samples,
swap_static=True)
self.assertDistShape(dist, (64, 64, self.channels),
(self.samples, self.batch_size, self.length))
def testReconstructFactorizedSwapDynamic(self):
dist = self.model_factorized.reconstruct(self.inputs, self.samples,
swap_dynamic=True)
self.assertDistShape(dist, (64, 64, self.channels),
(self.samples, self.batch_size, self.length))
def testReconstructFullSwapDynamic(self):
dist = self.model_full.reconstruct(self.inputs, self.samples,
swap_dynamic=True)
self.assertDistShape(dist, (64, 64, self.channels),
(self.samples, self.batch_size, self.length))
def testSampleStaticPriorFactorized(self):
sample, dist = self.model_factorized.sample_static_prior(self.samples,
self.batch_size)
self.assertEqual(sample.shape, (self.samples, self.batch_size,
self.latent_size_static))
self.assertDistShape(dist, (self.latent_size_static,), ())
def testSampleStaticPriorFull(self):
sample, dist = self.model_full.sample_static_prior(self.samples,
self.batch_size)
self.assertEqual(sample.shape, (self.samples, self.batch_size,
self.latent_size_static))
self.assertDistShape(dist, (self.latent_size_static,), ())
def testSampleStaticPriorFactorizedFixed(self):
sample, dist = self.model_factorized.sample_static_prior(
self.samples, self.batch_size, fixed=True)
self.assertEqual(sample.shape, (self.samples, self.batch_size,
self.latent_size_static))
self.assertDistShape(dist, (self.latent_size_static,), ())
def testSampleStaticPriorFullFixed(self):
sample, dist = self.model_full.sample_static_prior(
self.samples, self.batch_size, fixed=True)
self.assertEqual(sample.shape, (self.samples, self.batch_size,
self.latent_size_static))
self.assertDistShape(dist, (self.latent_size_static,), ())
def testSampleStaticPosteriorFactorized(self):
features = self.model_factorized.compressor(self.inputs)
sample, dist = self.model_factorized.sample_static_posterior(features,
self.samples)
self.assertEqual(sample.shape, (self.samples, self.batch_size,
self.latent_size_static))
self.assertDistShape(dist, (self.latent_size_static,), (self.batch_size,))
def testSampleStaticPosteriorFull(self):
features = self.model_full.compressor(self.inputs)
sample, dist = self.model_full.sample_static_posterior(features,
self.samples)
self.assertEqual(sample.shape, (self.samples, self.batch_size,
self.latent_size_static))
self.assertDistShape(dist, (self.latent_size_static,), (self.batch_size,))
def testSampleDynamicPriorFactorized(self):
sample, dist = self.model_factorized.sample_dynamic_prior(
self.samples, self.batch_size, self.length)
self.assertEqual(sample.shape, (self.samples, self.batch_size, self.length,
self.latent_size_dynamic))
self.assertDistShape(dist, (self.latent_size_dynamic,),
(self.samples, self.batch_size, self.length))
def testSampleDynamicPriorFull(self):
sample, dist = self.model_full.sample_dynamic_prior(
self.samples, self.batch_size, self.length)
self.assertEqual(sample.shape, (self.samples, self.batch_size, self.length,
self.latent_size_dynamic))
self.assertDistShape(dist, (self.latent_size_dynamic,),
(self.samples, self.batch_size, self.length))
def testSampleDynamicPriorFactorizedFixed(self):
sample, dist = self.model_factorized.sample_dynamic_prior(
self.samples, self.batch_size, self.length, fixed=True)
self.assertEqual(sample.shape, (self.samples, self.batch_size, self.length,
self.latent_size_dynamic))
self.assertDistShape(dist, (self.latent_size_dynamic,),
(self.samples, 1, self.length))
def testSampleDynamicPriorFullFixed(self):
sample, dist = self.model_full.sample_dynamic_prior(
self.samples, self.batch_size, self.length, fixed=True)
self.assertEqual(sample.shape, (self.samples, self.batch_size, self.length,
self.latent_size_dynamic))
self.assertDistShape(dist, (self.latent_size_dynamic,),
(self.samples, 1, self.length))
def testSampleDynamicPosteriorFactorized(self):
features = self.model_factorized.compressor(self.inputs)
sample, dist = self.model_factorized.sample_dynamic_posterior(
features, self.samples)
self.assertEqual(sample.shape, (self.samples, self.batch_size, self.length,
self.latent_size_dynamic))
self.assertDistShape(dist, (self.latent_size_dynamic,),
(self.batch_size, self.length))
def testSampleDynamicPosteriorFull(self):
features = self.model_full.compressor(self.inputs)
static_sample, dist = self.model_full.sample_static_posterior(features,
self.samples)
sample, dist = self.model_full.sample_dynamic_posterior(
features, self.samples, static_sample)
self.assertEqual(sample.shape, (self.samples, self.batch_size, self.length,
self.latent_size_dynamic))
self.assertDistShape(dist, (self.latent_size_dynamic,),
(self.samples, self.batch_size, self.length))
if __name__ == "__main__":
tf.test.main() | 0.928522 | 0.608449 |
import logging
import sys
import typing
from typing import Union
from dbnd._core.errors.friendly_error.executor_k8s import (
local_engine_not_accept_remote_jobs,
)
from dbnd._core.parameter.parameter_builder import parameter
from dbnd._core.task import config
from dbnd._core.task_build.task_registry import build_task_from_config
from targets import DirTarget
if typing.TYPE_CHECKING:
from dbnd._core.run.databand_run import DatabandRun
logger = logging.getLogger(__name__)
class EngineConfig(config.Config):
"""Databand's engine configuration (where tasks are executed)"""
require_submit = parameter(
description="Should the task engine be forced to submit tasks"
).value(False)
dbnd_local_root = parameter(
default=None, description="Local dbnd home directory at the engine environment"
)[DirTarget]
dbnd_executable = parameter(
default=[sys.executable, "-m", "dbnd"],
description="'dbnd' executable path at engine environment",
)[typing.List[str]]
def cleanup_after_run(self):
pass
def submit_to_engine_task(self, env, task_name, args, interactive=True):
raise local_engine_not_accept_remote_jobs(self.env, self)
def prepare_for_run(self, run):
# type: (DatabandRun) -> None
return
def _should_wrap_with_submit_task(self, task_run):
return self.require_submit
class LocalMachineEngineConfig(EngineConfig):
"""
Engine configuration for executing on the local machine.
"""
_conf__task_family = "local_machine"
def submit_to_engine_task(self, env, task_name, args, interactive=True):
from dbnd.tasks.basics.shell import bash_cmd
return bash_cmd.task(
args=args,
task_version="now",
task_env=env,
task_name=task_name,
task_is_system=True,
)
def _should_wrap_with_submit_task(self, task_run):
"""
We don't want to resubmit if it's bash cmd already
"""
from dbnd.tasks.basics.shell import bash_cmd
if isinstance(task_run.task, bash_cmd.task):
return False
return super(LocalMachineEngineConfig, self)._should_wrap_with_submit_task(
task_run
)
def build_engine_config(name):
# type: ( Union[str, EngineConfig]) -> EngineConfig
"""
Builds EngineConfig object for `name`
"""
return build_task_from_config(name, EngineConfig) | modules/dbnd/src/dbnd/_core/settings/engine.py | import logging
import sys
import typing
from typing import Union
from dbnd._core.errors.friendly_error.executor_k8s import (
local_engine_not_accept_remote_jobs,
)
from dbnd._core.parameter.parameter_builder import parameter
from dbnd._core.task import config
from dbnd._core.task_build.task_registry import build_task_from_config
from targets import DirTarget
if typing.TYPE_CHECKING:
from dbnd._core.run.databand_run import DatabandRun
logger = logging.getLogger(__name__)
class EngineConfig(config.Config):
"""Databand's engine configuration (where tasks are executed)"""
require_submit = parameter(
description="Should the task engine be forced to submit tasks"
).value(False)
dbnd_local_root = parameter(
default=None, description="Local dbnd home directory at the engine environment"
)[DirTarget]
dbnd_executable = parameter(
default=[sys.executable, "-m", "dbnd"],
description="'dbnd' executable path at engine environment",
)[typing.List[str]]
def cleanup_after_run(self):
pass
def submit_to_engine_task(self, env, task_name, args, interactive=True):
raise local_engine_not_accept_remote_jobs(self.env, self)
def prepare_for_run(self, run):
# type: (DatabandRun) -> None
return
def _should_wrap_with_submit_task(self, task_run):
return self.require_submit
class LocalMachineEngineConfig(EngineConfig):
"""
Engine configuration for executing on the local machine.
"""
_conf__task_family = "local_machine"
def submit_to_engine_task(self, env, task_name, args, interactive=True):
from dbnd.tasks.basics.shell import bash_cmd
return bash_cmd.task(
args=args,
task_version="now",
task_env=env,
task_name=task_name,
task_is_system=True,
)
def _should_wrap_with_submit_task(self, task_run):
"""
We don't want to resubmit if it's bash cmd already
"""
from dbnd.tasks.basics.shell import bash_cmd
if isinstance(task_run.task, bash_cmd.task):
return False
return super(LocalMachineEngineConfig, self)._should_wrap_with_submit_task(
task_run
)
def build_engine_config(name):
# type: ( Union[str, EngineConfig]) -> EngineConfig
"""
Builds EngineConfig object for `name`
"""
return build_task_from_config(name, EngineConfig) | 0.529993 | 0.167832 |
import numpy as np
import cirq
import cirq.google as cg
import cirq.google.common_serializers as cgc
def test_foxtail_qubits():
expected_qubits = []
for i in range(0, 2):
for j in range(0, 11):
expected_qubits.append(cirq.GridQubit(i, j))
assert set(expected_qubits) == cirq.google.Foxtail.qubits
def test_foxtail_device_proto():
assert str(cirq.google.devices.known_devices.FOXTAIL_PROTO) == """\
valid_gate_sets {
name: "xmon"
valid_gates {
id: "xy"
number_of_qubits: 1
valid_args {
name: "axis_half_turns"
type: FLOAT
}
valid_args {
name: "half_turns"
type: FLOAT
}
gate_duration_picos: 20000
}
valid_gates {
id: "z"
number_of_qubits: 1
valid_args {
name: "half_turns"
type: FLOAT
}
valid_args {
name: "type"
type: STRING
}
}
valid_gates {
id: "cz"
number_of_qubits: 2
valid_args {
name: "half_turns"
type: FLOAT
}
gate_duration_picos: 50000
valid_targets: "2_qubit_targets"
}
valid_gates {
id: "meas"
valid_args {
name: "key"
type: STRING
}
valid_args {
name: "invert_mask"
type: REPEATED_BOOLEAN
}
gate_duration_picos: 4000000
valid_targets: "meas_targets"
}
}
valid_qubits: "0_0"
valid_qubits: "0_1"
valid_qubits: "0_2"
valid_qubits: "0_3"
valid_qubits: "0_4"
valid_qubits: "0_5"
valid_qubits: "0_6"
valid_qubits: "0_7"
valid_qubits: "0_8"
valid_qubits: "0_9"
valid_qubits: "0_10"
valid_qubits: "1_0"
valid_qubits: "1_1"
valid_qubits: "1_2"
valid_qubits: "1_3"
valid_qubits: "1_4"
valid_qubits: "1_5"
valid_qubits: "1_6"
valid_qubits: "1_7"
valid_qubits: "1_8"
valid_qubits: "1_9"
valid_qubits: "1_10"
valid_targets {
name: "meas_targets"
target_ordering: SUBSET_PERMUTATION
}
valid_targets {
name: "2_qubit_targets"
target_ordering: SYMMETRIC
targets {
ids: "0_0"
ids: "0_1"
}
targets {
ids: "0_0"
ids: "1_0"
}
targets {
ids: "0_1"
ids: "0_2"
}
targets {
ids: "0_1"
ids: "1_1"
}
targets {
ids: "0_2"
ids: "0_3"
}
targets {
ids: "0_2"
ids: "1_2"
}
targets {
ids: "0_3"
ids: "0_4"
}
targets {
ids: "0_3"
ids: "1_3"
}
targets {
ids: "0_4"
ids: "0_5"
}
targets {
ids: "0_4"
ids: "1_4"
}
targets {
ids: "0_5"
ids: "0_6"
}
targets {
ids: "0_5"
ids: "1_5"
}
targets {
ids: "0_6"
ids: "0_7"
}
targets {
ids: "0_6"
ids: "1_6"
}
targets {
ids: "0_7"
ids: "0_8"
}
targets {
ids: "0_7"
ids: "1_7"
}
targets {
ids: "0_8"
ids: "0_9"
}
targets {
ids: "0_8"
ids: "1_8"
}
targets {
ids: "0_9"
ids: "0_10"
}
targets {
ids: "0_9"
ids: "1_9"
}
targets {
ids: "0_10"
ids: "1_10"
}
targets {
ids: "1_0"
ids: "1_1"
}
targets {
ids: "1_1"
ids: "1_2"
}
targets {
ids: "1_2"
ids: "1_3"
}
targets {
ids: "1_3"
ids: "1_4"
}
targets {
ids: "1_4"
ids: "1_5"
}
targets {
ids: "1_5"
ids: "1_6"
}
targets {
ids: "1_6"
ids: "1_7"
}
targets {
ids: "1_7"
ids: "1_8"
}
targets {
ids: "1_8"
ids: "1_9"
}
targets {
ids: "1_9"
ids: "1_10"
}
}
"""
def test_multiple_gate_sets():
halfPiGateSet = cg.serializable_gate_set.SerializableGateSet(
gate_set_name='half_pi_gateset',
serializers=[
*cgc.SINGLE_QUBIT_HALF_PI_SERIALIZERS, cgc.MEASUREMENT_SERIALIZER
],
deserializers=[
*cgc.SINGLE_QUBIT_HALF_PI_DESERIALIZERS,
cgc.MEASUREMENT_DESERIALIZER
],
)
durations_dict = {
'xy_pi': 20_000,
'xy_half_pi': 10_000,
'xy': 53_000,
'cz': 11_000,
'meas': 14_141
}
test_proto = cg.devices.known_devices.create_device_proto_from_diagram(
"aa\naa", [cg.gate_sets.XMON, halfPiGateSet], durations_dict)
assert str(test_proto) == """\
valid_gate_sets {
name: "xmon"
valid_gates {
id: "xy"
number_of_qubits: 1
valid_args {
name: "axis_half_turns"
type: FLOAT
}
valid_args {
name: "half_turns"
type: FLOAT
}
gate_duration_picos: 53000
}
valid_gates {
id: "z"
number_of_qubits: 1
valid_args {
name: "half_turns"
type: FLOAT
}
valid_args {
name: "type"
type: STRING
}
}
valid_gates {
id: "cz"
number_of_qubits: 2
valid_args {
name: "half_turns"
type: FLOAT
}
gate_duration_picos: 11000
valid_targets: "2_qubit_targets"
}
valid_gates {
id: "meas"
valid_args {
name: "key"
type: STRING
}
valid_args {
name: "invert_mask"
type: REPEATED_BOOLEAN
}
gate_duration_picos: 14141
valid_targets: "meas_targets"
}
}
valid_gate_sets {
name: "half_pi_gateset"
valid_gates {
id: "xy_pi"
number_of_qubits: 1
valid_args {
name: "axis_half_turns"
type: FLOAT
}
gate_duration_picos: 20000
}
valid_gates {
id: "xy_half_pi"
number_of_qubits: 1
valid_args {
name: "axis_half_turns"
type: FLOAT
}
gate_duration_picos: 10000
}
valid_gates {
id: "meas"
valid_args {
name: "key"
type: STRING
}
valid_args {
name: "invert_mask"
type: REPEATED_BOOLEAN
}
gate_duration_picos: 14141
valid_targets: "meas_targets"
}
}
valid_qubits: "0_0"
valid_qubits: "0_1"
valid_qubits: "1_0"
valid_qubits: "1_1"
valid_targets {
name: "meas_targets"
target_ordering: SUBSET_PERMUTATION
}
valid_targets {
name: "2_qubit_targets"
target_ordering: SYMMETRIC
targets {
ids: "0_0"
ids: "0_1"
}
targets {
ids: "0_0"
ids: "1_0"
}
targets {
ids: "0_1"
ids: "1_1"
}
targets {
ids: "1_0"
ids: "1_1"
}
}
"""
def test_json_dict():
assert cg.Foxtail._json_dict_() == {
'cirq_type': '_NamedConstantXmonDevice',
'constant': 'cirq.google.Foxtail',
'measurement_duration': cirq.Duration(nanos=4000),
'exp_w_duration': cirq.Duration(nanos=20),
'exp_11_duration': cirq.Duration(nanos=50),
'qubits': sorted(cirq.google.Foxtail.qubits)
}
assert cirq.google.Bristlecone._json_dict_() == {
'cirq_type': '_NamedConstantXmonDevice',
'constant': 'cirq.google.Bristlecone',
'measurement_duration': cirq.Duration(nanos=4000),
'exp_w_duration': cirq.Duration(nanos=20),
'exp_11_duration': cirq.Duration(nanos=50),
'qubits': sorted(cirq.google.Bristlecone.qubits)
}
def test_sycamore_device():
q0 = cirq.GridQubit(5, 4)
q1 = cirq.GridQubit(5, 5)
syc = cirq.FSimGate(theta=np.pi / 2, phi=np.pi / 6)(q0, q1)
sqrt_iswap = cirq.FSimGate(theta=np.pi / 4, phi=0)(q0, q1)
cg.Sycamore.validate_operation(syc)
cg.Sycamore.validate_operation(sqrt_iswap)
assert cg.Sycamore.duration_of(syc) == cirq.Duration(nanos=12)
assert cg.Sycamore.duration_of(sqrt_iswap) == cirq.Duration(nanos=32) | cirq/google/devices/known_devices_test.py | import numpy as np
import cirq
import cirq.google as cg
import cirq.google.common_serializers as cgc
def test_foxtail_qubits():
expected_qubits = []
for i in range(0, 2):
for j in range(0, 11):
expected_qubits.append(cirq.GridQubit(i, j))
assert set(expected_qubits) == cirq.google.Foxtail.qubits
def test_foxtail_device_proto():
assert str(cirq.google.devices.known_devices.FOXTAIL_PROTO) == """\
valid_gate_sets {
name: "xmon"
valid_gates {
id: "xy"
number_of_qubits: 1
valid_args {
name: "axis_half_turns"
type: FLOAT
}
valid_args {
name: "half_turns"
type: FLOAT
}
gate_duration_picos: 20000
}
valid_gates {
id: "z"
number_of_qubits: 1
valid_args {
name: "half_turns"
type: FLOAT
}
valid_args {
name: "type"
type: STRING
}
}
valid_gates {
id: "cz"
number_of_qubits: 2
valid_args {
name: "half_turns"
type: FLOAT
}
gate_duration_picos: 50000
valid_targets: "2_qubit_targets"
}
valid_gates {
id: "meas"
valid_args {
name: "key"
type: STRING
}
valid_args {
name: "invert_mask"
type: REPEATED_BOOLEAN
}
gate_duration_picos: 4000000
valid_targets: "meas_targets"
}
}
valid_qubits: "0_0"
valid_qubits: "0_1"
valid_qubits: "0_2"
valid_qubits: "0_3"
valid_qubits: "0_4"
valid_qubits: "0_5"
valid_qubits: "0_6"
valid_qubits: "0_7"
valid_qubits: "0_8"
valid_qubits: "0_9"
valid_qubits: "0_10"
valid_qubits: "1_0"
valid_qubits: "1_1"
valid_qubits: "1_2"
valid_qubits: "1_3"
valid_qubits: "1_4"
valid_qubits: "1_5"
valid_qubits: "1_6"
valid_qubits: "1_7"
valid_qubits: "1_8"
valid_qubits: "1_9"
valid_qubits: "1_10"
valid_targets {
name: "meas_targets"
target_ordering: SUBSET_PERMUTATION
}
valid_targets {
name: "2_qubit_targets"
target_ordering: SYMMETRIC
targets {
ids: "0_0"
ids: "0_1"
}
targets {
ids: "0_0"
ids: "1_0"
}
targets {
ids: "0_1"
ids: "0_2"
}
targets {
ids: "0_1"
ids: "1_1"
}
targets {
ids: "0_2"
ids: "0_3"
}
targets {
ids: "0_2"
ids: "1_2"
}
targets {
ids: "0_3"
ids: "0_4"
}
targets {
ids: "0_3"
ids: "1_3"
}
targets {
ids: "0_4"
ids: "0_5"
}
targets {
ids: "0_4"
ids: "1_4"
}
targets {
ids: "0_5"
ids: "0_6"
}
targets {
ids: "0_5"
ids: "1_5"
}
targets {
ids: "0_6"
ids: "0_7"
}
targets {
ids: "0_6"
ids: "1_6"
}
targets {
ids: "0_7"
ids: "0_8"
}
targets {
ids: "0_7"
ids: "1_7"
}
targets {
ids: "0_8"
ids: "0_9"
}
targets {
ids: "0_8"
ids: "1_8"
}
targets {
ids: "0_9"
ids: "0_10"
}
targets {
ids: "0_9"
ids: "1_9"
}
targets {
ids: "0_10"
ids: "1_10"
}
targets {
ids: "1_0"
ids: "1_1"
}
targets {
ids: "1_1"
ids: "1_2"
}
targets {
ids: "1_2"
ids: "1_3"
}
targets {
ids: "1_3"
ids: "1_4"
}
targets {
ids: "1_4"
ids: "1_5"
}
targets {
ids: "1_5"
ids: "1_6"
}
targets {
ids: "1_6"
ids: "1_7"
}
targets {
ids: "1_7"
ids: "1_8"
}
targets {
ids: "1_8"
ids: "1_9"
}
targets {
ids: "1_9"
ids: "1_10"
}
}
"""
def test_multiple_gate_sets():
halfPiGateSet = cg.serializable_gate_set.SerializableGateSet(
gate_set_name='half_pi_gateset',
serializers=[
*cgc.SINGLE_QUBIT_HALF_PI_SERIALIZERS, cgc.MEASUREMENT_SERIALIZER
],
deserializers=[
*cgc.SINGLE_QUBIT_HALF_PI_DESERIALIZERS,
cgc.MEASUREMENT_DESERIALIZER
],
)
durations_dict = {
'xy_pi': 20_000,
'xy_half_pi': 10_000,
'xy': 53_000,
'cz': 11_000,
'meas': 14_141
}
test_proto = cg.devices.known_devices.create_device_proto_from_diagram(
"aa\naa", [cg.gate_sets.XMON, halfPiGateSet], durations_dict)
assert str(test_proto) == """\
valid_gate_sets {
name: "xmon"
valid_gates {
id: "xy"
number_of_qubits: 1
valid_args {
name: "axis_half_turns"
type: FLOAT
}
valid_args {
name: "half_turns"
type: FLOAT
}
gate_duration_picos: 53000
}
valid_gates {
id: "z"
number_of_qubits: 1
valid_args {
name: "half_turns"
type: FLOAT
}
valid_args {
name: "type"
type: STRING
}
}
valid_gates {
id: "cz"
number_of_qubits: 2
valid_args {
name: "half_turns"
type: FLOAT
}
gate_duration_picos: 11000
valid_targets: "2_qubit_targets"
}
valid_gates {
id: "meas"
valid_args {
name: "key"
type: STRING
}
valid_args {
name: "invert_mask"
type: REPEATED_BOOLEAN
}
gate_duration_picos: 14141
valid_targets: "meas_targets"
}
}
valid_gate_sets {
name: "half_pi_gateset"
valid_gates {
id: "xy_pi"
number_of_qubits: 1
valid_args {
name: "axis_half_turns"
type: FLOAT
}
gate_duration_picos: 20000
}
valid_gates {
id: "xy_half_pi"
number_of_qubits: 1
valid_args {
name: "axis_half_turns"
type: FLOAT
}
gate_duration_picos: 10000
}
valid_gates {
id: "meas"
valid_args {
name: "key"
type: STRING
}
valid_args {
name: "invert_mask"
type: REPEATED_BOOLEAN
}
gate_duration_picos: 14141
valid_targets: "meas_targets"
}
}
valid_qubits: "0_0"
valid_qubits: "0_1"
valid_qubits: "1_0"
valid_qubits: "1_1"
valid_targets {
name: "meas_targets"
target_ordering: SUBSET_PERMUTATION
}
valid_targets {
name: "2_qubit_targets"
target_ordering: SYMMETRIC
targets {
ids: "0_0"
ids: "0_1"
}
targets {
ids: "0_0"
ids: "1_0"
}
targets {
ids: "0_1"
ids: "1_1"
}
targets {
ids: "1_0"
ids: "1_1"
}
}
"""
def test_json_dict():
assert cg.Foxtail._json_dict_() == {
'cirq_type': '_NamedConstantXmonDevice',
'constant': 'cirq.google.Foxtail',
'measurement_duration': cirq.Duration(nanos=4000),
'exp_w_duration': cirq.Duration(nanos=20),
'exp_11_duration': cirq.Duration(nanos=50),
'qubits': sorted(cirq.google.Foxtail.qubits)
}
assert cirq.google.Bristlecone._json_dict_() == {
'cirq_type': '_NamedConstantXmonDevice',
'constant': 'cirq.google.Bristlecone',
'measurement_duration': cirq.Duration(nanos=4000),
'exp_w_duration': cirq.Duration(nanos=20),
'exp_11_duration': cirq.Duration(nanos=50),
'qubits': sorted(cirq.google.Bristlecone.qubits)
}
def test_sycamore_device():
q0 = cirq.GridQubit(5, 4)
q1 = cirq.GridQubit(5, 5)
syc = cirq.FSimGate(theta=np.pi / 2, phi=np.pi / 6)(q0, q1)
sqrt_iswap = cirq.FSimGate(theta=np.pi / 4, phi=0)(q0, q1)
cg.Sycamore.validate_operation(syc)
cg.Sycamore.validate_operation(sqrt_iswap)
assert cg.Sycamore.duration_of(syc) == cirq.Duration(nanos=12)
assert cg.Sycamore.duration_of(sqrt_iswap) == cirq.Duration(nanos=32) | 0.509764 | 0.509825 |
import math
import torch
import torch.nn as nn
import torch.nn.functional as F
from utils.tools import get_mask_from_lengths
class SpeakerEncoder(nn.Module):
def __init__(self):
super(SpeakerEncoder, self).__init__()
self.frameencoder = NormalEncoder()
self.dsencoder = DownsampleEncoder()
def forward(self, inputs, input_lens):
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
max_len = inputs.size(1) // 16
out_lens = input_lens // 16
out_masks = (1 - get_mask_from_lengths(out_lens, max_len).float()).unsqueeze(-1).expand(-1, -1, 256).to(device) # [B, T_y]
outs = self.frameencoder(inputs)
outs = self.dsencoder(outs)
spkemb = torch.sum(outs * out_masks, axis=1) / out_lens.unsqueeze(-1).expand(-1, 256)
return spkemb
class NormalEncoder(nn.Module):
def __init__(self, in_dim=80, conv_channels=[512, 512], kernel_size=5, stride=1, padding=2, dropout=0.2, out_dim=256):
super(NormalEncoder, self).__init__()
# convolution layers followed by batch normalization and ReLU activation
K = len(conv_channels)
# 1-D convolution layers
filters = [in_dim] + conv_channels
self.conv1ds = nn.ModuleList(
[nn.Conv1d(in_channels=filters[i],
out_channels=filters[i+1],
kernel_size=kernel_size,
stride=stride,
padding=padding)
for i in range(K)])
# 1-D batch normalization (BN) layers
self.bns = nn.ModuleList(
[nn.BatchNorm1d(num_features=conv_channels[i])
for i in range(K)])
# ReLU
self.relu = nn.ReLU()
# dropout
self.dropout = nn.Dropout(dropout)
self.outlayer = nn.Linear(in_features=conv_channels[-1], out_features=out_dim)
def forward(self, x):
# transpose to (B, embed_dim, T) for convolution, and then back
out = x.transpose(1, 2)
for conv, bn in zip(self.conv1ds, self.bns):
out = conv(out)
out = self.relu(out)
out = bn(out) # [B, 128, T//2^K, mel_dim//2^K], where 128 = conv_channels[-1]
out = self.dropout(out)
out = out.transpose(1, 2) # [B, T//2^K, 128, mel_dim//2^K]
B, T = out.size(0), out.size(1)
out = out.contiguous().view(B, T, -1) # [B, T//2^K, 128*mel_dim//2^K]
out = self.outlayer(out)
return out
class DownsampleEncoder(nn.Module):
def __init__(self, in_dim=256, conv_channels=[128, 256, 512, 512], kernel_size=3, stride=1, padding=1, dropout=0.2, pooling_sizes=[2, 2, 2, 2], out_dim=256):
super(DownsampleEncoder, self).__init__()
K = len(conv_channels)
# 1-D convolution layers
filters = [in_dim] + conv_channels
self.conv1ds = nn.ModuleList(
[nn.Conv1d(in_channels=filters[i],
out_channels=filters[i+1],
kernel_size=kernel_size,
stride=stride,
padding=padding)
for i in range(K)])
# 1-D batch normalization (BN) layers
self.bns = nn.ModuleList(
[nn.BatchNorm1d(num_features=conv_channels[i])
for i in range(K)])
self.pools = nn.ModuleList(
[nn.AvgPool1d(kernel_size=pooling_sizes[i]) for i in range(K)]
)
# ReLU
self.relu = nn.ReLU()
# dropout
self.dropout = nn.Dropout(dropout)
self.local_outlayer = nn.Sequential(
nn.Linear(in_features=conv_channels[-1],
out_features=out_dim),
nn.Tanh()
)
def forward(self, inputs):
out = inputs.transpose(1, 2)
for conv, bn, pool in zip(self.conv1ds, self.bns, self.pools):
out = conv(out)
out = self.relu(out)
out = bn(out) # [B, 128, T//2^K, mel_dim//2^K], where 128 = conv_channels[-1]
out = self.dropout(out)
out = pool(out)
out = out.transpose(1, 2) # [B, T//2^K, 128, mel_dim//2^K]
B, T = out.size(0), out.size(1)
out = out.contiguous().view(B, T, -1) # [B, T//2^K, 128*mel_dim//2^K]
local_output = self.local_outlayer(out)
return local_output | model/reference.py | import math
import torch
import torch.nn as nn
import torch.nn.functional as F
from utils.tools import get_mask_from_lengths
class SpeakerEncoder(nn.Module):
def __init__(self):
super(SpeakerEncoder, self).__init__()
self.frameencoder = NormalEncoder()
self.dsencoder = DownsampleEncoder()
def forward(self, inputs, input_lens):
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
max_len = inputs.size(1) // 16
out_lens = input_lens // 16
out_masks = (1 - get_mask_from_lengths(out_lens, max_len).float()).unsqueeze(-1).expand(-1, -1, 256).to(device) # [B, T_y]
outs = self.frameencoder(inputs)
outs = self.dsencoder(outs)
spkemb = torch.sum(outs * out_masks, axis=1) / out_lens.unsqueeze(-1).expand(-1, 256)
return spkemb
class NormalEncoder(nn.Module):
def __init__(self, in_dim=80, conv_channels=[512, 512], kernel_size=5, stride=1, padding=2, dropout=0.2, out_dim=256):
super(NormalEncoder, self).__init__()
# convolution layers followed by batch normalization and ReLU activation
K = len(conv_channels)
# 1-D convolution layers
filters = [in_dim] + conv_channels
self.conv1ds = nn.ModuleList(
[nn.Conv1d(in_channels=filters[i],
out_channels=filters[i+1],
kernel_size=kernel_size,
stride=stride,
padding=padding)
for i in range(K)])
# 1-D batch normalization (BN) layers
self.bns = nn.ModuleList(
[nn.BatchNorm1d(num_features=conv_channels[i])
for i in range(K)])
# ReLU
self.relu = nn.ReLU()
# dropout
self.dropout = nn.Dropout(dropout)
self.outlayer = nn.Linear(in_features=conv_channels[-1], out_features=out_dim)
def forward(self, x):
# transpose to (B, embed_dim, T) for convolution, and then back
out = x.transpose(1, 2)
for conv, bn in zip(self.conv1ds, self.bns):
out = conv(out)
out = self.relu(out)
out = bn(out) # [B, 128, T//2^K, mel_dim//2^K], where 128 = conv_channels[-1]
out = self.dropout(out)
out = out.transpose(1, 2) # [B, T//2^K, 128, mel_dim//2^K]
B, T = out.size(0), out.size(1)
out = out.contiguous().view(B, T, -1) # [B, T//2^K, 128*mel_dim//2^K]
out = self.outlayer(out)
return out
class DownsampleEncoder(nn.Module):
def __init__(self, in_dim=256, conv_channels=[128, 256, 512, 512], kernel_size=3, stride=1, padding=1, dropout=0.2, pooling_sizes=[2, 2, 2, 2], out_dim=256):
super(DownsampleEncoder, self).__init__()
K = len(conv_channels)
# 1-D convolution layers
filters = [in_dim] + conv_channels
self.conv1ds = nn.ModuleList(
[nn.Conv1d(in_channels=filters[i],
out_channels=filters[i+1],
kernel_size=kernel_size,
stride=stride,
padding=padding)
for i in range(K)])
# 1-D batch normalization (BN) layers
self.bns = nn.ModuleList(
[nn.BatchNorm1d(num_features=conv_channels[i])
for i in range(K)])
self.pools = nn.ModuleList(
[nn.AvgPool1d(kernel_size=pooling_sizes[i]) for i in range(K)]
)
# ReLU
self.relu = nn.ReLU()
# dropout
self.dropout = nn.Dropout(dropout)
self.local_outlayer = nn.Sequential(
nn.Linear(in_features=conv_channels[-1],
out_features=out_dim),
nn.Tanh()
)
def forward(self, inputs):
out = inputs.transpose(1, 2)
for conv, bn, pool in zip(self.conv1ds, self.bns, self.pools):
out = conv(out)
out = self.relu(out)
out = bn(out) # [B, 128, T//2^K, mel_dim//2^K], where 128 = conv_channels[-1]
out = self.dropout(out)
out = pool(out)
out = out.transpose(1, 2) # [B, T//2^K, 128, mel_dim//2^K]
B, T = out.size(0), out.size(1)
out = out.contiguous().view(B, T, -1) # [B, T//2^K, 128*mel_dim//2^K]
local_output = self.local_outlayer(out)
return local_output | 0.94248 | 0.437703 |
import hashlib
import json
import re
from dateutil import parser
from dojo.models import Finding
class DawnScannerParser(object):
def get_scan_types(self):
return ["DawnScanner Scan"]
def get_label_for_scan_types(self, scan_type):
return scan_type # no custom label for now
def get_description_for_scan_types(self, scan_type):
return "Dawnscanner (-j) output file can be imported in JSON format."
def get_findings(self, filename, test):
data = json.load(filename)
dupes = dict()
find_date = parser.parse(data['scan_started'])
for item in data['vulnerabilities']:
categories = ''
language = ''
mitigation = ''
impact = ''
references = ''
findingdetail = ''
title = ''
group = ''
status = ''
title = item['name'].upper()
if "CVE" in title:
# FIXME switch to a function
cve = re.findall(r'CVE-\d{4}-\d{4,7}', title)[0]
else:
cve = None
# Finding details information
findingdetail = item['message'] if item['message'][0:2] != 'b,' else item['message'][0:-1]
sev = item['severity'].capitalize()
mitigation = item['remediation']
references = item['cve_link']
dupe_key = hashlib.sha256(str(sev + '|' + title).encode("utf-8")).hexdigest()
if dupe_key in dupes:
find = dupes[dupe_key]
else:
dupes[dupe_key] = True
find = Finding(
title=title,
test=test,
active=False,
cve=cve,
verified=False,
description=findingdetail,
severity=sev,
numerical_severity=Finding.get_numerical_severity(sev),
mitigation=mitigation,
impact=impact,
references=references,
url='N/A',
date=find_date,
static_finding=True)
dupes[dupe_key] = find
return list(dupes.values()) | dojo/tools/dawnscanner/parser.py | import hashlib
import json
import re
from dateutil import parser
from dojo.models import Finding
class DawnScannerParser(object):
def get_scan_types(self):
return ["DawnScanner Scan"]
def get_label_for_scan_types(self, scan_type):
return scan_type # no custom label for now
def get_description_for_scan_types(self, scan_type):
return "Dawnscanner (-j) output file can be imported in JSON format."
def get_findings(self, filename, test):
data = json.load(filename)
dupes = dict()
find_date = parser.parse(data['scan_started'])
for item in data['vulnerabilities']:
categories = ''
language = ''
mitigation = ''
impact = ''
references = ''
findingdetail = ''
title = ''
group = ''
status = ''
title = item['name'].upper()
if "CVE" in title:
# FIXME switch to a function
cve = re.findall(r'CVE-\d{4}-\d{4,7}', title)[0]
else:
cve = None
# Finding details information
findingdetail = item['message'] if item['message'][0:2] != 'b,' else item['message'][0:-1]
sev = item['severity'].capitalize()
mitigation = item['remediation']
references = item['cve_link']
dupe_key = hashlib.sha256(str(sev + '|' + title).encode("utf-8")).hexdigest()
if dupe_key in dupes:
find = dupes[dupe_key]
else:
dupes[dupe_key] = True
find = Finding(
title=title,
test=test,
active=False,
cve=cve,
verified=False,
description=findingdetail,
severity=sev,
numerical_severity=Finding.get_numerical_severity(sev),
mitigation=mitigation,
impact=impact,
references=references,
url='N/A',
date=find_date,
static_finding=True)
dupes[dupe_key] = find
return list(dupes.values()) | 0.232746 | 0.140013 |
import functools
from json import loads as _loads
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import (
ClientAuthenticationError,
HttpResponseError,
ResourceExistsError,
ResourceNotFoundError,
map_error,
)
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.core.polling.base_polling import LROBasePolling
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from msrest import Serializer
from .._vendor import _format_url_section
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
_SERIALIZER = Serializer()
# fmt: off
def build_paging_get_no_item_name_pages_request(
**kwargs # type: Any
):
# type: (...) -> HttpRequest
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/paging/noitemname')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
headers=header_parameters,
**kwargs
)
def build_paging_get_null_next_link_name_pages_request(
**kwargs # type: Any
):
# type: (...) -> HttpRequest
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/paging/nullnextlink')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
headers=header_parameters,
**kwargs
)
def build_paging_get_single_pages_request(
**kwargs # type: Any
):
# type: (...) -> HttpRequest
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/paging/single')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
headers=header_parameters,
**kwargs
)
def build_paging_first_response_empty_request(
**kwargs # type: Any
):
# type: (...) -> HttpRequest
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/paging/firstResponseEmpty/1')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
headers=header_parameters,
**kwargs
)
def build_paging_get_multiple_pages_request(
**kwargs # type: Any
):
# type: (...) -> HttpRequest
client_request_id = kwargs.pop('client_request_id', None) # type: Optional[str]
maxresults = kwargs.pop('maxresults', None) # type: Optional[int]
timeout = kwargs.pop('timeout', 30) # type: Optional[int]
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/paging/multiple')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if client_request_id is not None:
header_parameters['client-request-id'] = _SERIALIZER.header("client_request_id", client_request_id, 'str')
if maxresults is not None:
header_parameters['maxresults'] = _SERIALIZER.header("maxresults", maxresults, 'int')
if timeout is not None:
header_parameters['timeout'] = _SERIALIZER.header("timeout", timeout, 'int')
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
headers=header_parameters,
**kwargs
)
def build_paging_get_with_query_params_request(
**kwargs # type: Any
):
# type: (...) -> HttpRequest
required_query_parameter = kwargs.pop('required_query_parameter') # type: int
query_constant = True
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/paging/multiple/getWithQueryParams')
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['requiredQueryParameter'] = _SERIALIZER.query("required_query_parameter", required_query_parameter, 'int')
query_parameters['queryConstant'] = _SERIALIZER.query("query_constant", query_constant, 'bool')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_paging_next_operation_with_query_params_request(
**kwargs # type: Any
):
# type: (...) -> HttpRequest
query_constant = True
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/paging/multiple/nextOperationWithQueryParams')
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['queryConstant'] = _SERIALIZER.query("query_constant", query_constant, 'bool')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_paging_get_odata_multiple_pages_request(
**kwargs # type: Any
):
# type: (...) -> HttpRequest
client_request_id = kwargs.pop('client_request_id', None) # type: Optional[str]
maxresults = kwargs.pop('maxresults', None) # type: Optional[int]
timeout = kwargs.pop('timeout', 30) # type: Optional[int]
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/paging/multiple/odata')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if client_request_id is not None:
header_parameters['client-request-id'] = _SERIALIZER.header("client_request_id", client_request_id, 'str')
if maxresults is not None:
header_parameters['maxresults'] = _SERIALIZER.header("maxresults", maxresults, 'int')
if timeout is not None:
header_parameters['timeout'] = _SERIALIZER.header("timeout", timeout, 'int')
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
headers=header_parameters,
**kwargs
)
def build_paging_get_multiple_pages_with_offset_request(
offset, # type: int
**kwargs # type: Any
):
# type: (...) -> HttpRequest
client_request_id = kwargs.pop('client_request_id', None) # type: Optional[str]
maxresults = kwargs.pop('maxresults', None) # type: Optional[int]
timeout = kwargs.pop('timeout', 30) # type: Optional[int]
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/paging/multiple/withpath/{offset}')
path_format_arguments = {
"offset": _SERIALIZER.url("offset", offset, 'int'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if client_request_id is not None:
header_parameters['client-request-id'] = _SERIALIZER.header("client_request_id", client_request_id, 'str')
if maxresults is not None:
header_parameters['maxresults'] = _SERIALIZER.header("maxresults", maxresults, 'int')
if timeout is not None:
header_parameters['timeout'] = _SERIALIZER.header("timeout", timeout, 'int')
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
headers=header_parameters,
**kwargs
)
def build_paging_get_multiple_pages_retry_first_request(
**kwargs # type: Any
):
# type: (...) -> HttpRequest
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/paging/multiple/retryfirst')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
headers=header_parameters,
**kwargs
)
def build_paging_get_multiple_pages_retry_second_request(
**kwargs # type: Any
):
# type: (...) -> HttpRequest
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/paging/multiple/retrysecond')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
headers=header_parameters,
**kwargs
)
def build_paging_get_single_pages_failure_request(
**kwargs # type: Any
):
# type: (...) -> HttpRequest
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/paging/single/failure')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
headers=header_parameters,
**kwargs
)
def build_paging_get_multiple_pages_failure_request(
**kwargs # type: Any
):
# type: (...) -> HttpRequest
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/paging/multiple/failure')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
headers=header_parameters,
**kwargs
)
def build_paging_get_multiple_pages_failure_uri_request(
**kwargs # type: Any
):
# type: (...) -> HttpRequest
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/paging/multiple/failureuri')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
headers=header_parameters,
**kwargs
)
def build_paging_get_multiple_pages_fragment_next_link_request(
tenant, # type: str
**kwargs # type: Any
):
# type: (...) -> HttpRequest
api_version = kwargs.pop('api_version') # type: str
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/paging/multiple/fragment/{tenant}')
path_format_arguments = {
"tenant": _SERIALIZER.url("tenant", tenant, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api_version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_paging_get_multiple_pages_fragment_with_grouping_next_link_request(
tenant, # type: str
**kwargs # type: Any
):
# type: (...) -> HttpRequest
api_version = kwargs.pop('api_version') # type: str
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/paging/multiple/fragmentwithgrouping/{tenant}')
path_format_arguments = {
"tenant": _SERIALIZER.url("tenant", tenant, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api_version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_paging_get_multiple_pages_lro_request_initial(
**kwargs # type: Any
):
# type: (...) -> HttpRequest
client_request_id = kwargs.pop('client_request_id', None) # type: Optional[str]
maxresults = kwargs.pop('maxresults', None) # type: Optional[int]
timeout = kwargs.pop('timeout', 30) # type: Optional[int]
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/paging/multiple/lro')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if client_request_id is not None:
header_parameters['client-request-id'] = _SERIALIZER.header("client_request_id", client_request_id, 'str')
if maxresults is not None:
header_parameters['maxresults'] = _SERIALIZER.header("maxresults", maxresults, 'int')
if timeout is not None:
header_parameters['timeout'] = _SERIALIZER.header("timeout", timeout, 'int')
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="POST",
url=url,
headers=header_parameters,
**kwargs
)
def build_paging_next_fragment_request(
tenant, # type: str
next_link, # type: str
**kwargs # type: Any
):
# type: (...) -> HttpRequest
api_version = kwargs.pop('api_version') # type: str
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/paging/multiple/fragment/{tenant}/{nextLink}')
path_format_arguments = {
"tenant": _SERIALIZER.url("tenant", tenant, 'str'),
"nextLink": _SERIALIZER.url("next_link", next_link, 'str', skip_quote=True),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api_version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_paging_next_fragment_with_grouping_request(
tenant, # type: str
next_link, # type: str
**kwargs # type: Any
):
# type: (...) -> HttpRequest
api_version = kwargs.pop('api_version') # type: str
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/paging/multiple/fragmentwithgrouping/{tenant}/{nextLink}')
path_format_arguments = {
"tenant": _SERIALIZER.url("tenant", tenant, 'str'),
"nextLink": _SERIALIZER.url("next_link", next_link, 'str', skip_quote=True),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api_version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_paging_get_paging_model_with_item_name_with_xms_client_name_request(
**kwargs # type: Any
):
# type: (...) -> HttpRequest
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/paging/itemNameWithXMSClientName')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
headers=header_parameters,
**kwargs
)
# fmt: on
class PagingOperations(object):
"""PagingOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace
def get_no_item_name_pages(
self, **kwargs # type: Any
):
# type: (...) -> Iterable[Any]
"""A paging operation that must return result of the default 'value' node.
:return: An iterator like instance of JSON object
:rtype: ~azure.core.paging.ItemPaged[Any]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# response body for status code(s): 200
response.json() == {
"nextLink": "str", # Optional.
"value": [
{
"properties": {
"id": 0, # Optional.
"name": "str" # Optional.
}
}
]
}
"""
cls = kwargs.pop("cls", None) # type: ClsType[Any]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
def prepare_request(next_link=None):
if not next_link:
request = build_paging_get_no_item_name_pages_request(
template_url=self.get_no_item_name_pages.metadata["url"],
)
request.url = self._client.format_url(request.url)
else:
request = build_paging_get_no_item_name_pages_request(
template_url=next_link,
)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = _loads(pipeline_response.http_response.body())
list_of_elem = deserialized["value"]
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.get("nextLink", None), iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
return ItemPaged(get_next, extract_data)
get_no_item_name_pages.metadata = {"url": "/paging/noitemname"} # type: ignore
@distributed_trace
def get_null_next_link_name_pages(
self, **kwargs # type: Any
):
# type: (...) -> Iterable[Any]
"""A paging operation that must ignore any kind of nextLink, and stop after page 1.
:return: An iterator like instance of JSON object
:rtype: ~azure.core.paging.ItemPaged[Any]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# response body for status code(s): 200
response.json() == {
"nextLink": "str", # Optional.
"values": [
{
"properties": {
"id": 0, # Optional.
"name": "str" # Optional.
}
}
]
}
"""
cls = kwargs.pop("cls", None) # type: ClsType[Any]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
def prepare_request(next_link=None):
if not next_link:
request = build_paging_get_null_next_link_name_pages_request(
template_url=self.get_null_next_link_name_pages.metadata["url"],
)
request.url = self._client.format_url(request.url)
else:
request = build_paging_get_null_next_link_name_pages_request(
template_url=next_link,
)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = _loads(pipeline_response.http_response.body())
list_of_elem = deserialized["values"]
if cls:
list_of_elem = cls(list_of_elem)
return None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
return ItemPaged(get_next, extract_data)
get_null_next_link_name_pages.metadata = {"url": "/paging/nullnextlink"} # type: ignore
@distributed_trace
def get_single_pages(
self, **kwargs # type: Any
):
# type: (...) -> Iterable[Any]
"""A paging operation that finishes on the first call without a nextlink.
:return: An iterator like instance of JSON object
:rtype: ~azure.core.paging.ItemPaged[Any]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# response body for status code(s): 200
response.json() == {
"nextLink": "str", # Optional.
"values": [
{
"properties": {
"id": 0, # Optional.
"name": "str" # Optional.
}
}
]
}
"""
cls = kwargs.pop("cls", None) # type: ClsType[Any]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
def prepare_request(next_link=None):
if not next_link:
request = build_paging_get_single_pages_request(
template_url=self.get_single_pages.metadata["url"],
)
request.url = self._client.format_url(request.url)
else:
request = build_paging_get_single_pages_request(
template_url=next_link,
)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = _loads(pipeline_response.http_response.body())
list_of_elem = deserialized["values"]
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.get("nextLink", None), iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
return ItemPaged(get_next, extract_data)
get_single_pages.metadata = {"url": "/paging/single"} # type: ignore
@distributed_trace
def first_response_empty(
self, **kwargs # type: Any
):
# type: (...) -> Iterable[Any]
"""A paging operation whose first response's items list is empty, but still returns a next link.
Second (and final) call, will give you an items list of 1.
:return: An iterator like instance of JSON object
:rtype: ~azure.core.paging.ItemPaged[Any]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# response body for status code(s): 200
response.json() == {
"nextLink": "str", # Optional.
"value": [
{
"properties": {
"id": 0, # Optional.
"name": "str" # Optional.
}
}
]
}
"""
cls = kwargs.pop("cls", None) # type: ClsType[Any]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
def prepare_request(next_link=None):
if not next_link:
request = build_paging_first_response_empty_request(
template_url=self.first_response_empty.metadata["url"],
)
request.url = self._client.format_url(request.url)
else:
request = build_paging_first_response_empty_request(
template_url=next_link,
)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = _loads(pipeline_response.http_response.body())
list_of_elem = deserialized["value"]
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.get("nextLink", None), iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
return ItemPaged(get_next, extract_data)
first_response_empty.metadata = {"url": "/paging/firstResponseEmpty/1"} # type: ignore
@distributed_trace
def get_multiple_pages(
self, **kwargs # type: Any
):
# type: (...) -> Iterable[Any]
"""A paging operation that includes a nextLink that has 10 pages.
:keyword client_request_id:
:paramtype client_request_id: str
:keyword maxresults: Sets the maximum number of items to return in the response.
:paramtype maxresults: int
:keyword timeout: Sets the maximum time that the server can spend processing the request, in
seconds. The default is 30 seconds.
:paramtype timeout: int
:return: An iterator like instance of JSON object
:rtype: ~azure.core.paging.ItemPaged[Any]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# response body for status code(s): 200
response.json() == {
"nextLink": "str", # Optional.
"values": [
{
"properties": {
"id": 0, # Optional.
"name": "str" # Optional.
}
}
]
}
"""
client_request_id = kwargs.pop("client_request_id", None) # type: Optional[str]
maxresults = kwargs.pop("maxresults", None) # type: Optional[int]
timeout = kwargs.pop("timeout", 30) # type: Optional[int]
cls = kwargs.pop("cls", None) # type: ClsType[Any]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
def prepare_request(next_link=None):
if not next_link:
request = build_paging_get_multiple_pages_request(
client_request_id=client_request_id,
maxresults=maxresults,
timeout=timeout,
template_url=self.get_multiple_pages.metadata["url"],
)
request.url = self._client.format_url(request.url)
else:
request = build_paging_get_multiple_pages_request(
client_request_id=client_request_id,
maxresults=maxresults,
timeout=timeout,
template_url=next_link,
)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = _loads(pipeline_response.http_response.body())
list_of_elem = deserialized["values"]
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.get("nextLink", None), iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
return ItemPaged(get_next, extract_data)
get_multiple_pages.metadata = {"url": "/paging/multiple"} # type: ignore
@distributed_trace
def get_with_query_params(
self, **kwargs # type: Any
):
# type: (...) -> Iterable[Any]
"""A paging operation that includes a next operation. It has a different query parameter from it's
next operation nextOperationWithQueryParams. Returns a ProductResult.
:keyword required_query_parameter: A required integer query parameter. Put in value '100' to
pass test.
:paramtype required_query_parameter: int
:return: An iterator like instance of JSON object
:rtype: ~azure.core.paging.ItemPaged[Any]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# response body for status code(s): 200
response.json() == {
"nextLink": "str", # Optional.
"values": [
{
"properties": {
"id": 0, # Optional.
"name": "str" # Optional.
}
}
]
}
"""
required_query_parameter = kwargs.pop("required_query_parameter") # type: int
cls = kwargs.pop("cls", None) # type: ClsType[Any]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
def prepare_request(next_link=None):
if not next_link:
request = build_paging_get_with_query_params_request(
required_query_parameter=required_query_parameter,
template_url=self.get_with_query_params.metadata["url"],
)
request.url = self._client.format_url(request.url)
else:
request = build_paging_next_operation_with_query_params_request(
template_url="/paging/multiple/nextOperationWithQueryParams",
)
request.url = self._client.format_url(request.url)
return request
def extract_data(pipeline_response):
deserialized = _loads(pipeline_response.http_response.body())
list_of_elem = deserialized["values"]
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.get("nextLink", None), iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
return ItemPaged(get_next, extract_data)
get_with_query_params.metadata = {"url": "/paging/multiple/getWithQueryParams"} # type: ignore
@distributed_trace
def get_odata_multiple_pages(
self, **kwargs # type: Any
):
# type: (...) -> Iterable[Any]
"""A paging operation that includes a nextLink in odata format that has 10 pages.
:keyword client_request_id:
:paramtype client_request_id: str
:keyword maxresults: Sets the maximum number of items to return in the response.
:paramtype maxresults: int
:keyword timeout: Sets the maximum time that the server can spend processing the request, in
seconds. The default is 30 seconds.
:paramtype timeout: int
:return: An iterator like instance of JSON object
:rtype: ~azure.core.paging.ItemPaged[Any]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# response body for status code(s): 200
response.json() == {
"odata.nextLink": "str", # Optional.
"values": [
{
"properties": {
"id": 0, # Optional.
"name": "str" # Optional.
}
}
]
}
"""
client_request_id = kwargs.pop("client_request_id", None) # type: Optional[str]
maxresults = kwargs.pop("maxresults", None) # type: Optional[int]
timeout = kwargs.pop("timeout", 30) # type: Optional[int]
cls = kwargs.pop("cls", None) # type: ClsType[Any]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
def prepare_request(next_link=None):
if not next_link:
request = build_paging_get_odata_multiple_pages_request(
client_request_id=client_request_id,
maxresults=maxresults,
timeout=timeout,
template_url=self.get_odata_multiple_pages.metadata["url"],
)
request.url = self._client.format_url(request.url)
else:
request = build_paging_get_odata_multiple_pages_request(
client_request_id=client_request_id,
maxresults=maxresults,
timeout=timeout,
template_url=next_link,
)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = _loads(pipeline_response.http_response.body())
list_of_elem = deserialized["values"]
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.get("odata.nextLink", None), iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
return ItemPaged(get_next, extract_data)
get_odata_multiple_pages.metadata = {"url": "/paging/multiple/odata"} # type: ignore
@distributed_trace
def get_multiple_pages_with_offset(
self,
offset, # type: int
**kwargs # type: Any
):
# type: (...) -> Iterable[Any]
"""A paging operation that includes a nextLink that has 10 pages.
:param offset: Offset of return value.
:type offset: int
:keyword client_request_id:
:paramtype client_request_id: str
:keyword maxresults: Sets the maximum number of items to return in the response.
:paramtype maxresults: int
:keyword timeout: Sets the maximum time that the server can spend processing the request, in
seconds. The default is 30 seconds.
:paramtype timeout: int
:return: An iterator like instance of JSON object
:rtype: ~azure.core.paging.ItemPaged[Any]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# response body for status code(s): 200
response.json() == {
"nextLink": "str", # Optional.
"values": [
{
"properties": {
"id": 0, # Optional.
"name": "str" # Optional.
}
}
]
}
"""
client_request_id = kwargs.pop("client_request_id", None) # type: Optional[str]
maxresults = kwargs.pop("maxresults", None) # type: Optional[int]
timeout = kwargs.pop("timeout", 30) # type: Optional[int]
cls = kwargs.pop("cls", None) # type: ClsType[Any]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
def prepare_request(next_link=None):
if not next_link:
request = build_paging_get_multiple_pages_with_offset_request(
offset=offset,
client_request_id=client_request_id,
maxresults=maxresults,
timeout=timeout,
template_url=self.get_multiple_pages_with_offset.metadata["url"],
)
request.url = self._client.format_url(request.url)
else:
request = build_paging_get_multiple_pages_with_offset_request(
offset=offset,
client_request_id=client_request_id,
maxresults=maxresults,
timeout=timeout,
template_url=next_link,
)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = _loads(pipeline_response.http_response.body())
list_of_elem = deserialized["values"]
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.get("nextLink", None), iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
return ItemPaged(get_next, extract_data)
get_multiple_pages_with_offset.metadata = {"url": "/paging/multiple/withpath/{offset}"} # type: ignore
@distributed_trace
def get_multiple_pages_retry_first(
self, **kwargs # type: Any
):
# type: (...) -> Iterable[Any]
"""A paging operation that fails on the first call with 500 and then retries and then get a
response including a nextLink that has 10 pages.
:return: An iterator like instance of JSON object
:rtype: ~azure.core.paging.ItemPaged[Any]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# response body for status code(s): 200
response.json() == {
"nextLink": "str", # Optional.
"values": [
{
"properties": {
"id": 0, # Optional.
"name": "str" # Optional.
}
}
]
}
"""
cls = kwargs.pop("cls", None) # type: ClsType[Any]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
def prepare_request(next_link=None):
if not next_link:
request = build_paging_get_multiple_pages_retry_first_request(
template_url=self.get_multiple_pages_retry_first.metadata["url"],
)
request.url = self._client.format_url(request.url)
else:
request = build_paging_get_multiple_pages_retry_first_request(
template_url=next_link,
)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = _loads(pipeline_response.http_response.body())
list_of_elem = deserialized["values"]
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.get("nextLink", None), iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
return ItemPaged(get_next, extract_data)
get_multiple_pages_retry_first.metadata = {"url": "/paging/multiple/retryfirst"} # type: ignore
@distributed_trace
def get_multiple_pages_retry_second(
self, **kwargs # type: Any
):
# type: (...) -> Iterable[Any]
"""A paging operation that includes a nextLink that has 10 pages, of which the 2nd call fails
first with 500. The client should retry and finish all 10 pages eventually.
:return: An iterator like instance of JSON object
:rtype: ~azure.core.paging.ItemPaged[Any]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# response body for status code(s): 200
response.json() == {
"nextLink": "str", # Optional.
"values": [
{
"properties": {
"id": 0, # Optional.
"name": "str" # Optional.
}
}
]
}
"""
cls = kwargs.pop("cls", None) # type: ClsType[Any]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
def prepare_request(next_link=None):
if not next_link:
request = build_paging_get_multiple_pages_retry_second_request(
template_url=self.get_multiple_pages_retry_second.metadata["url"],
)
request.url = self._client.format_url(request.url)
else:
request = build_paging_get_multiple_pages_retry_second_request(
template_url=next_link,
)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = _loads(pipeline_response.http_response.body())
list_of_elem = deserialized["values"]
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.get("nextLink", None), iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
return ItemPaged(get_next, extract_data)
get_multiple_pages_retry_second.metadata = {"url": "/paging/multiple/retrysecond"} # type: ignore
@distributed_trace
def get_single_pages_failure(
self, **kwargs # type: Any
):
# type: (...) -> Iterable[Any]
"""A paging operation that receives a 400 on the first call.
:return: An iterator like instance of JSON object
:rtype: ~azure.core.paging.ItemPaged[Any]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# response body for status code(s): 200
response.json() == {
"nextLink": "str", # Optional.
"values": [
{
"properties": {
"id": 0, # Optional.
"name": "str" # Optional.
}
}
]
}
"""
cls = kwargs.pop("cls", None) # type: ClsType[Any]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
def prepare_request(next_link=None):
if not next_link:
request = build_paging_get_single_pages_failure_request(
template_url=self.get_single_pages_failure.metadata["url"],
)
request.url = self._client.format_url(request.url)
else:
request = build_paging_get_single_pages_failure_request(
template_url=next_link,
)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = _loads(pipeline_response.http_response.body())
list_of_elem = deserialized["values"]
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.get("nextLink", None), iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
return ItemPaged(get_next, extract_data)
get_single_pages_failure.metadata = {"url": "/paging/single/failure"} # type: ignore
@distributed_trace
def get_multiple_pages_failure(
self, **kwargs # type: Any
):
# type: (...) -> Iterable[Any]
"""A paging operation that receives a 400 on the second call.
:return: An iterator like instance of JSON object
:rtype: ~azure.core.paging.ItemPaged[Any]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# response body for status code(s): 200
response.json() == {
"nextLink": "str", # Optional.
"values": [
{
"properties": {
"id": 0, # Optional.
"name": "str" # Optional.
}
}
]
}
"""
cls = kwargs.pop("cls", None) # type: ClsType[Any]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
def prepare_request(next_link=None):
if not next_link:
request = build_paging_get_multiple_pages_failure_request(
template_url=self.get_multiple_pages_failure.metadata["url"],
)
request.url = self._client.format_url(request.url)
else:
request = build_paging_get_multiple_pages_failure_request(
template_url=next_link,
)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = _loads(pipeline_response.http_response.body())
list_of_elem = deserialized["values"]
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.get("nextLink", None), iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
return ItemPaged(get_next, extract_data)
get_multiple_pages_failure.metadata = {"url": "/paging/multiple/failure"} # type: ignore
@distributed_trace
def get_multiple_pages_failure_uri(
self, **kwargs # type: Any
):
# type: (...) -> Iterable[Any]
"""A paging operation that receives an invalid nextLink.
:return: An iterator like instance of JSON object
:rtype: ~azure.core.paging.ItemPaged[Any]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# response body for status code(s): 200
response.json() == {
"nextLink": "str", # Optional.
"values": [
{
"properties": {
"id": 0, # Optional.
"name": "str" # Optional.
}
}
]
}
"""
cls = kwargs.pop("cls", None) # type: ClsType[Any]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
def prepare_request(next_link=None):
if not next_link:
request = build_paging_get_multiple_pages_failure_uri_request(
template_url=self.get_multiple_pages_failure_uri.metadata["url"],
)
request.url = self._client.format_url(request.url)
else:
request = build_paging_get_multiple_pages_failure_uri_request(
template_url=next_link,
)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = _loads(pipeline_response.http_response.body())
list_of_elem = deserialized["values"]
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.get("nextLink", None), iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
return ItemPaged(get_next, extract_data)
get_multiple_pages_failure_uri.metadata = {"url": "/paging/multiple/failureuri"} # type: ignore
@distributed_trace
def get_multiple_pages_fragment_next_link(
self,
tenant, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable[Any]
"""A paging operation that doesn't return a full URL, just a fragment.
:param tenant: Sets the tenant to use.
:type tenant: str
:keyword api_version: Sets the api version to use.
:paramtype api_version: str
:return: An iterator like instance of JSON object
:rtype: ~azure.core.paging.ItemPaged[Any]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# response body for status code(s): 200
response.json() == {
"odata.nextLink": "str", # Optional.
"values": [
{
"properties": {
"id": 0, # Optional.
"name": "str" # Optional.
}
}
]
}
"""
api_version = kwargs.pop("api_version") # type: str
cls = kwargs.pop("cls", None) # type: ClsType[Any]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
def prepare_request(next_link=None):
if not next_link:
request = build_paging_get_multiple_pages_fragment_next_link_request(
tenant=tenant,
api_version=api_version,
template_url=self.get_multiple_pages_fragment_next_link.metadata["url"],
)
request.url = self._client.format_url(request.url)
else:
request = build_paging_next_fragment_request(
tenant=tenant,
next_link=next_link,
api_version=api_version,
template_url="/paging/multiple/fragment/{tenant}/{nextLink}",
)
request.url = self._client.format_url(request.url)
return request
def extract_data(pipeline_response):
deserialized = _loads(pipeline_response.http_response.body())
list_of_elem = deserialized["values"]
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.get("odata.nextLink", None), iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
return ItemPaged(get_next, extract_data)
get_multiple_pages_fragment_next_link.metadata = {"url": "/paging/multiple/fragment/{tenant}"} # type: ignore
@distributed_trace
def get_multiple_pages_fragment_with_grouping_next_link(
self,
tenant, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable[Any]
"""A paging operation that doesn't return a full URL, just a fragment with parameters grouped.
:param tenant: Sets the tenant to use.
:type tenant: str
:keyword api_version: Sets the api version to use.
:paramtype api_version: str
:return: An iterator like instance of JSON object
:rtype: ~azure.core.paging.ItemPaged[Any]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# response body for status code(s): 200
response.json() == {
"odata.nextLink": "str", # Optional.
"values": [
{
"properties": {
"id": 0, # Optional.
"name": "str" # Optional.
}
}
]
}
"""
api_version = kwargs.pop("api_version") # type: str
cls = kwargs.pop("cls", None) # type: ClsType[Any]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
def prepare_request(next_link=None):
if not next_link:
request = build_paging_get_multiple_pages_fragment_with_grouping_next_link_request(
tenant=tenant,
api_version=api_version,
template_url=self.get_multiple_pages_fragment_with_grouping_next_link.metadata["url"],
)
request.url = self._client.format_url(request.url)
else:
request = build_paging_next_fragment_with_grouping_request(
tenant=tenant,
next_link=next_link,
api_version=api_version,
template_url="/paging/multiple/fragmentwithgrouping/{tenant}/{nextLink}",
)
request.url = self._client.format_url(request.url)
return request
def extract_data(pipeline_response):
deserialized = _loads(pipeline_response.http_response.body())
list_of_elem = deserialized["values"]
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.get("odata.nextLink", None), iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
return ItemPaged(get_next, extract_data)
get_multiple_pages_fragment_with_grouping_next_link.metadata = {"url": "/paging/multiple/fragmentwithgrouping/{tenant}"} # type: ignore
def _get_multiple_pages_lro_initial(
self, **kwargs # type: Any
):
# type: (...) -> Any
cls = kwargs.pop("cls", None) # type: ClsType[Any]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
client_request_id = kwargs.pop("client_request_id", None) # type: Optional[str]
maxresults = kwargs.pop("maxresults", None) # type: Optional[int]
timeout = kwargs.pop("timeout", 30) # type: Optional[int]
request = build_paging_get_multiple_pages_lro_request_initial(
client_request_id=client_request_id,
maxresults=maxresults,
timeout=timeout,
template_url=self._get_multiple_pages_lro_initial.metadata["url"],
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if response.content:
deserialized = response.json()
else:
deserialized = None
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_get_multiple_pages_lro_initial.metadata = {"url": "/paging/multiple/lro"} # type: ignore
@distributed_trace
def begin_get_multiple_pages_lro(
self, **kwargs # type: Any
):
# type: (...) -> LROPoller[ItemPaged[Any]]
"""A long-running paging operation that includes a nextLink that has 10 pages.
:keyword client_request_id:
:paramtype client_request_id: str
:keyword maxresults: Sets the maximum number of items to return in the response.
:paramtype maxresults: int
:keyword timeout: Sets the maximum time that the server can spend processing the request, in
seconds. The default is 30 seconds.
:paramtype timeout: int
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be LROBasePolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns an iterator like instance of JSON object
:rtype: ~azure.core.polling.LROPoller[~azure.core.paging.ItemPaged[Any]]
:raises: ~azure.core.exceptions.HttpResponseError
"""
client_request_id = kwargs.pop("client_request_id", None) # type: Optional[str]
maxresults = kwargs.pop("maxresults", None) # type: Optional[int]
timeout = kwargs.pop("timeout", 30) # type: Optional[int]
cls = kwargs.pop("cls", None) # type: ClsType[Any]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
def prepare_request(next_link=None):
if not next_link:
request = build_paging_get_multiple_pages_lro_request_initial(
client_request_id=client_request_id,
maxresults=maxresults,
timeout=timeout,
template_url=self.begin_get_multiple_pages_lro.metadata["url"],
)
request.url = self._client.format_url(request.url)
else:
request = build_paging_get_multiple_pages_lro_request_initial(
client_request_id=client_request_id,
maxresults=maxresults,
timeout=timeout,
template_url=next_link,
)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = _loads(pipeline_response.http_response.body())
list_of_elem = deserialized["values"]
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.get("nextLink", None), iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
polling = kwargs.pop("polling", True) # type: Union[bool, azure.core.polling.PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[Any]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._get_multiple_pages_lro_initial(
client_request_id=client_request_id,
maxresults=maxresults,
timeout=timeout,
cls=lambda x, y, z: x,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
def internal_get_next(next_link=None):
if next_link is None:
return pipeline_response
else:
return get_next(next_link)
return ItemPaged(internal_get_next, extract_data)
if polling is True:
polling_method = LROBasePolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get_multiple_pages_lro.metadata = {"url": "/paging/multiple/lro"} # type: ignore
@distributed_trace
def get_paging_model_with_item_name_with_xms_client_name(
self, **kwargs # type: Any
):
# type: (...) -> Iterable[Any]
"""A paging operation that returns a paging model whose item name is is overriden by
x-ms-client-name 'indexes'.
:return: An iterator like instance of JSON object
:rtype: ~azure.core.paging.ItemPaged[Any]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# response body for status code(s): 200
response.json() == {
"nextLink": "str", # Optional.
"values": [
{
"properties": {
"id": 0, # Optional.
"name": "str" # Optional.
}
}
]
}
"""
cls = kwargs.pop("cls", None) # type: ClsType[Any]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
def prepare_request(next_link=None):
if not next_link:
request = build_paging_get_paging_model_with_item_name_with_xms_client_name_request(
template_url=self.get_paging_model_with_item_name_with_xms_client_name.metadata["url"],
)
request.url = self._client.format_url(request.url)
else:
request = build_paging_get_paging_model_with_item_name_with_xms_client_name_request(
template_url=next_link,
)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = _loads(pipeline_response.http_response.body())
list_of_elem = deserialized["values"]
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.get("nextLink", None), iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
return ItemPaged(get_next, extract_data)
get_paging_model_with_item_name_with_xms_client_name.metadata = {"url": "/paging/itemNameWithXMSClientName"} # type: ignore | test/azure/version-tolerant/Expected/AcceptanceTests/PagingVersionTolerant/pagingversiontolerant/operations/_operations.py | import functools
from json import loads as _loads
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import (
ClientAuthenticationError,
HttpResponseError,
ResourceExistsError,
ResourceNotFoundError,
map_error,
)
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.core.polling.base_polling import LROBasePolling
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from msrest import Serializer
from .._vendor import _format_url_section
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
_SERIALIZER = Serializer()
# fmt: off
def build_paging_get_no_item_name_pages_request(
**kwargs # type: Any
):
# type: (...) -> HttpRequest
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/paging/noitemname')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
headers=header_parameters,
**kwargs
)
def build_paging_get_null_next_link_name_pages_request(
**kwargs # type: Any
):
# type: (...) -> HttpRequest
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/paging/nullnextlink')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
headers=header_parameters,
**kwargs
)
def build_paging_get_single_pages_request(
**kwargs # type: Any
):
# type: (...) -> HttpRequest
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/paging/single')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
headers=header_parameters,
**kwargs
)
def build_paging_first_response_empty_request(
**kwargs # type: Any
):
# type: (...) -> HttpRequest
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/paging/firstResponseEmpty/1')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
headers=header_parameters,
**kwargs
)
def build_paging_get_multiple_pages_request(
**kwargs # type: Any
):
# type: (...) -> HttpRequest
client_request_id = kwargs.pop('client_request_id', None) # type: Optional[str]
maxresults = kwargs.pop('maxresults', None) # type: Optional[int]
timeout = kwargs.pop('timeout', 30) # type: Optional[int]
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/paging/multiple')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if client_request_id is not None:
header_parameters['client-request-id'] = _SERIALIZER.header("client_request_id", client_request_id, 'str')
if maxresults is not None:
header_parameters['maxresults'] = _SERIALIZER.header("maxresults", maxresults, 'int')
if timeout is not None:
header_parameters['timeout'] = _SERIALIZER.header("timeout", timeout, 'int')
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
headers=header_parameters,
**kwargs
)
def build_paging_get_with_query_params_request(
**kwargs # type: Any
):
# type: (...) -> HttpRequest
required_query_parameter = kwargs.pop('required_query_parameter') # type: int
query_constant = True
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/paging/multiple/getWithQueryParams')
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['requiredQueryParameter'] = _SERIALIZER.query("required_query_parameter", required_query_parameter, 'int')
query_parameters['queryConstant'] = _SERIALIZER.query("query_constant", query_constant, 'bool')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_paging_next_operation_with_query_params_request(
**kwargs # type: Any
):
# type: (...) -> HttpRequest
query_constant = True
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/paging/multiple/nextOperationWithQueryParams')
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['queryConstant'] = _SERIALIZER.query("query_constant", query_constant, 'bool')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_paging_get_odata_multiple_pages_request(
**kwargs # type: Any
):
# type: (...) -> HttpRequest
client_request_id = kwargs.pop('client_request_id', None) # type: Optional[str]
maxresults = kwargs.pop('maxresults', None) # type: Optional[int]
timeout = kwargs.pop('timeout', 30) # type: Optional[int]
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/paging/multiple/odata')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if client_request_id is not None:
header_parameters['client-request-id'] = _SERIALIZER.header("client_request_id", client_request_id, 'str')
if maxresults is not None:
header_parameters['maxresults'] = _SERIALIZER.header("maxresults", maxresults, 'int')
if timeout is not None:
header_parameters['timeout'] = _SERIALIZER.header("timeout", timeout, 'int')
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
headers=header_parameters,
**kwargs
)
def build_paging_get_multiple_pages_with_offset_request(
offset, # type: int
**kwargs # type: Any
):
# type: (...) -> HttpRequest
client_request_id = kwargs.pop('client_request_id', None) # type: Optional[str]
maxresults = kwargs.pop('maxresults', None) # type: Optional[int]
timeout = kwargs.pop('timeout', 30) # type: Optional[int]
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/paging/multiple/withpath/{offset}')
path_format_arguments = {
"offset": _SERIALIZER.url("offset", offset, 'int'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if client_request_id is not None:
header_parameters['client-request-id'] = _SERIALIZER.header("client_request_id", client_request_id, 'str')
if maxresults is not None:
header_parameters['maxresults'] = _SERIALIZER.header("maxresults", maxresults, 'int')
if timeout is not None:
header_parameters['timeout'] = _SERIALIZER.header("timeout", timeout, 'int')
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
headers=header_parameters,
**kwargs
)
def build_paging_get_multiple_pages_retry_first_request(
**kwargs # type: Any
):
# type: (...) -> HttpRequest
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/paging/multiple/retryfirst')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
headers=header_parameters,
**kwargs
)
def build_paging_get_multiple_pages_retry_second_request(
**kwargs # type: Any
):
# type: (...) -> HttpRequest
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/paging/multiple/retrysecond')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
headers=header_parameters,
**kwargs
)
def build_paging_get_single_pages_failure_request(
**kwargs # type: Any
):
# type: (...) -> HttpRequest
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/paging/single/failure')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
headers=header_parameters,
**kwargs
)
def build_paging_get_multiple_pages_failure_request(
**kwargs # type: Any
):
# type: (...) -> HttpRequest
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/paging/multiple/failure')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
headers=header_parameters,
**kwargs
)
def build_paging_get_multiple_pages_failure_uri_request(
**kwargs # type: Any
):
# type: (...) -> HttpRequest
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/paging/multiple/failureuri')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
headers=header_parameters,
**kwargs
)
def build_paging_get_multiple_pages_fragment_next_link_request(
tenant, # type: str
**kwargs # type: Any
):
# type: (...) -> HttpRequest
api_version = kwargs.pop('api_version') # type: str
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/paging/multiple/fragment/{tenant}')
path_format_arguments = {
"tenant": _SERIALIZER.url("tenant", tenant, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api_version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_paging_get_multiple_pages_fragment_with_grouping_next_link_request(
tenant, # type: str
**kwargs # type: Any
):
# type: (...) -> HttpRequest
api_version = kwargs.pop('api_version') # type: str
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/paging/multiple/fragmentwithgrouping/{tenant}')
path_format_arguments = {
"tenant": _SERIALIZER.url("tenant", tenant, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api_version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_paging_get_multiple_pages_lro_request_initial(
**kwargs # type: Any
):
# type: (...) -> HttpRequest
client_request_id = kwargs.pop('client_request_id', None) # type: Optional[str]
maxresults = kwargs.pop('maxresults', None) # type: Optional[int]
timeout = kwargs.pop('timeout', 30) # type: Optional[int]
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/paging/multiple/lro')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if client_request_id is not None:
header_parameters['client-request-id'] = _SERIALIZER.header("client_request_id", client_request_id, 'str')
if maxresults is not None:
header_parameters['maxresults'] = _SERIALIZER.header("maxresults", maxresults, 'int')
if timeout is not None:
header_parameters['timeout'] = _SERIALIZER.header("timeout", timeout, 'int')
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="POST",
url=url,
headers=header_parameters,
**kwargs
)
def build_paging_next_fragment_request(
tenant, # type: str
next_link, # type: str
**kwargs # type: Any
):
# type: (...) -> HttpRequest
api_version = kwargs.pop('api_version') # type: str
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/paging/multiple/fragment/{tenant}/{nextLink}')
path_format_arguments = {
"tenant": _SERIALIZER.url("tenant", tenant, 'str'),
"nextLink": _SERIALIZER.url("next_link", next_link, 'str', skip_quote=True),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api_version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_paging_next_fragment_with_grouping_request(
tenant, # type: str
next_link, # type: str
**kwargs # type: Any
):
# type: (...) -> HttpRequest
api_version = kwargs.pop('api_version') # type: str
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/paging/multiple/fragmentwithgrouping/{tenant}/{nextLink}')
path_format_arguments = {
"tenant": _SERIALIZER.url("tenant", tenant, 'str'),
"nextLink": _SERIALIZER.url("next_link", next_link, 'str', skip_quote=True),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api_version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_paging_get_paging_model_with_item_name_with_xms_client_name_request(
**kwargs # type: Any
):
# type: (...) -> HttpRequest
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/paging/itemNameWithXMSClientName')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
headers=header_parameters,
**kwargs
)
# fmt: on
class PagingOperations(object):
"""PagingOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace
def get_no_item_name_pages(
self, **kwargs # type: Any
):
# type: (...) -> Iterable[Any]
"""A paging operation that must return result of the default 'value' node.
:return: An iterator like instance of JSON object
:rtype: ~azure.core.paging.ItemPaged[Any]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# response body for status code(s): 200
response.json() == {
"nextLink": "str", # Optional.
"value": [
{
"properties": {
"id": 0, # Optional.
"name": "str" # Optional.
}
}
]
}
"""
cls = kwargs.pop("cls", None) # type: ClsType[Any]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
def prepare_request(next_link=None):
if not next_link:
request = build_paging_get_no_item_name_pages_request(
template_url=self.get_no_item_name_pages.metadata["url"],
)
request.url = self._client.format_url(request.url)
else:
request = build_paging_get_no_item_name_pages_request(
template_url=next_link,
)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = _loads(pipeline_response.http_response.body())
list_of_elem = deserialized["value"]
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.get("nextLink", None), iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
return ItemPaged(get_next, extract_data)
get_no_item_name_pages.metadata = {"url": "/paging/noitemname"} # type: ignore
@distributed_trace
def get_null_next_link_name_pages(
self, **kwargs # type: Any
):
# type: (...) -> Iterable[Any]
"""A paging operation that must ignore any kind of nextLink, and stop after page 1.
:return: An iterator like instance of JSON object
:rtype: ~azure.core.paging.ItemPaged[Any]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# response body for status code(s): 200
response.json() == {
"nextLink": "str", # Optional.
"values": [
{
"properties": {
"id": 0, # Optional.
"name": "str" # Optional.
}
}
]
}
"""
cls = kwargs.pop("cls", None) # type: ClsType[Any]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
def prepare_request(next_link=None):
if not next_link:
request = build_paging_get_null_next_link_name_pages_request(
template_url=self.get_null_next_link_name_pages.metadata["url"],
)
request.url = self._client.format_url(request.url)
else:
request = build_paging_get_null_next_link_name_pages_request(
template_url=next_link,
)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = _loads(pipeline_response.http_response.body())
list_of_elem = deserialized["values"]
if cls:
list_of_elem = cls(list_of_elem)
return None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
return ItemPaged(get_next, extract_data)
get_null_next_link_name_pages.metadata = {"url": "/paging/nullnextlink"} # type: ignore
@distributed_trace
def get_single_pages(
self, **kwargs # type: Any
):
# type: (...) -> Iterable[Any]
"""A paging operation that finishes on the first call without a nextlink.
:return: An iterator like instance of JSON object
:rtype: ~azure.core.paging.ItemPaged[Any]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# response body for status code(s): 200
response.json() == {
"nextLink": "str", # Optional.
"values": [
{
"properties": {
"id": 0, # Optional.
"name": "str" # Optional.
}
}
]
}
"""
cls = kwargs.pop("cls", None) # type: ClsType[Any]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
def prepare_request(next_link=None):
if not next_link:
request = build_paging_get_single_pages_request(
template_url=self.get_single_pages.metadata["url"],
)
request.url = self._client.format_url(request.url)
else:
request = build_paging_get_single_pages_request(
template_url=next_link,
)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = _loads(pipeline_response.http_response.body())
list_of_elem = deserialized["values"]
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.get("nextLink", None), iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
return ItemPaged(get_next, extract_data)
get_single_pages.metadata = {"url": "/paging/single"} # type: ignore
@distributed_trace
def first_response_empty(
self, **kwargs # type: Any
):
# type: (...) -> Iterable[Any]
"""A paging operation whose first response's items list is empty, but still returns a next link.
Second (and final) call, will give you an items list of 1.
:return: An iterator like instance of JSON object
:rtype: ~azure.core.paging.ItemPaged[Any]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# response body for status code(s): 200
response.json() == {
"nextLink": "str", # Optional.
"value": [
{
"properties": {
"id": 0, # Optional.
"name": "str" # Optional.
}
}
]
}
"""
cls = kwargs.pop("cls", None) # type: ClsType[Any]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
def prepare_request(next_link=None):
if not next_link:
request = build_paging_first_response_empty_request(
template_url=self.first_response_empty.metadata["url"],
)
request.url = self._client.format_url(request.url)
else:
request = build_paging_first_response_empty_request(
template_url=next_link,
)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = _loads(pipeline_response.http_response.body())
list_of_elem = deserialized["value"]
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.get("nextLink", None), iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
return ItemPaged(get_next, extract_data)
first_response_empty.metadata = {"url": "/paging/firstResponseEmpty/1"} # type: ignore
@distributed_trace
def get_multiple_pages(
self, **kwargs # type: Any
):
# type: (...) -> Iterable[Any]
"""A paging operation that includes a nextLink that has 10 pages.
:keyword client_request_id:
:paramtype client_request_id: str
:keyword maxresults: Sets the maximum number of items to return in the response.
:paramtype maxresults: int
:keyword timeout: Sets the maximum time that the server can spend processing the request, in
seconds. The default is 30 seconds.
:paramtype timeout: int
:return: An iterator like instance of JSON object
:rtype: ~azure.core.paging.ItemPaged[Any]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# response body for status code(s): 200
response.json() == {
"nextLink": "str", # Optional.
"values": [
{
"properties": {
"id": 0, # Optional.
"name": "str" # Optional.
}
}
]
}
"""
client_request_id = kwargs.pop("client_request_id", None) # type: Optional[str]
maxresults = kwargs.pop("maxresults", None) # type: Optional[int]
timeout = kwargs.pop("timeout", 30) # type: Optional[int]
cls = kwargs.pop("cls", None) # type: ClsType[Any]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
def prepare_request(next_link=None):
if not next_link:
request = build_paging_get_multiple_pages_request(
client_request_id=client_request_id,
maxresults=maxresults,
timeout=timeout,
template_url=self.get_multiple_pages.metadata["url"],
)
request.url = self._client.format_url(request.url)
else:
request = build_paging_get_multiple_pages_request(
client_request_id=client_request_id,
maxresults=maxresults,
timeout=timeout,
template_url=next_link,
)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = _loads(pipeline_response.http_response.body())
list_of_elem = deserialized["values"]
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.get("nextLink", None), iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
return ItemPaged(get_next, extract_data)
get_multiple_pages.metadata = {"url": "/paging/multiple"} # type: ignore
@distributed_trace
def get_with_query_params(
self, **kwargs # type: Any
):
# type: (...) -> Iterable[Any]
"""A paging operation that includes a next operation. It has a different query parameter from it's
next operation nextOperationWithQueryParams. Returns a ProductResult.
:keyword required_query_parameter: A required integer query parameter. Put in value '100' to
pass test.
:paramtype required_query_parameter: int
:return: An iterator like instance of JSON object
:rtype: ~azure.core.paging.ItemPaged[Any]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# response body for status code(s): 200
response.json() == {
"nextLink": "str", # Optional.
"values": [
{
"properties": {
"id": 0, # Optional.
"name": "str" # Optional.
}
}
]
}
"""
required_query_parameter = kwargs.pop("required_query_parameter") # type: int
cls = kwargs.pop("cls", None) # type: ClsType[Any]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
def prepare_request(next_link=None):
if not next_link:
request = build_paging_get_with_query_params_request(
required_query_parameter=required_query_parameter,
template_url=self.get_with_query_params.metadata["url"],
)
request.url = self._client.format_url(request.url)
else:
request = build_paging_next_operation_with_query_params_request(
template_url="/paging/multiple/nextOperationWithQueryParams",
)
request.url = self._client.format_url(request.url)
return request
def extract_data(pipeline_response):
deserialized = _loads(pipeline_response.http_response.body())
list_of_elem = deserialized["values"]
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.get("nextLink", None), iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
return ItemPaged(get_next, extract_data)
get_with_query_params.metadata = {"url": "/paging/multiple/getWithQueryParams"} # type: ignore
@distributed_trace
def get_odata_multiple_pages(
self, **kwargs # type: Any
):
# type: (...) -> Iterable[Any]
"""A paging operation that includes a nextLink in odata format that has 10 pages.
:keyword client_request_id:
:paramtype client_request_id: str
:keyword maxresults: Sets the maximum number of items to return in the response.
:paramtype maxresults: int
:keyword timeout: Sets the maximum time that the server can spend processing the request, in
seconds. The default is 30 seconds.
:paramtype timeout: int
:return: An iterator like instance of JSON object
:rtype: ~azure.core.paging.ItemPaged[Any]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# response body for status code(s): 200
response.json() == {
"odata.nextLink": "str", # Optional.
"values": [
{
"properties": {
"id": 0, # Optional.
"name": "str" # Optional.
}
}
]
}
"""
client_request_id = kwargs.pop("client_request_id", None) # type: Optional[str]
maxresults = kwargs.pop("maxresults", None) # type: Optional[int]
timeout = kwargs.pop("timeout", 30) # type: Optional[int]
cls = kwargs.pop("cls", None) # type: ClsType[Any]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
def prepare_request(next_link=None):
if not next_link:
request = build_paging_get_odata_multiple_pages_request(
client_request_id=client_request_id,
maxresults=maxresults,
timeout=timeout,
template_url=self.get_odata_multiple_pages.metadata["url"],
)
request.url = self._client.format_url(request.url)
else:
request = build_paging_get_odata_multiple_pages_request(
client_request_id=client_request_id,
maxresults=maxresults,
timeout=timeout,
template_url=next_link,
)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = _loads(pipeline_response.http_response.body())
list_of_elem = deserialized["values"]
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.get("odata.nextLink", None), iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
return ItemPaged(get_next, extract_data)
get_odata_multiple_pages.metadata = {"url": "/paging/multiple/odata"} # type: ignore
@distributed_trace
def get_multiple_pages_with_offset(
self,
offset, # type: int
**kwargs # type: Any
):
# type: (...) -> Iterable[Any]
"""A paging operation that includes a nextLink that has 10 pages.
:param offset: Offset of return value.
:type offset: int
:keyword client_request_id:
:paramtype client_request_id: str
:keyword maxresults: Sets the maximum number of items to return in the response.
:paramtype maxresults: int
:keyword timeout: Sets the maximum time that the server can spend processing the request, in
seconds. The default is 30 seconds.
:paramtype timeout: int
:return: An iterator like instance of JSON object
:rtype: ~azure.core.paging.ItemPaged[Any]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# response body for status code(s): 200
response.json() == {
"nextLink": "str", # Optional.
"values": [
{
"properties": {
"id": 0, # Optional.
"name": "str" # Optional.
}
}
]
}
"""
client_request_id = kwargs.pop("client_request_id", None) # type: Optional[str]
maxresults = kwargs.pop("maxresults", None) # type: Optional[int]
timeout = kwargs.pop("timeout", 30) # type: Optional[int]
cls = kwargs.pop("cls", None) # type: ClsType[Any]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
def prepare_request(next_link=None):
if not next_link:
request = build_paging_get_multiple_pages_with_offset_request(
offset=offset,
client_request_id=client_request_id,
maxresults=maxresults,
timeout=timeout,
template_url=self.get_multiple_pages_with_offset.metadata["url"],
)
request.url = self._client.format_url(request.url)
else:
request = build_paging_get_multiple_pages_with_offset_request(
offset=offset,
client_request_id=client_request_id,
maxresults=maxresults,
timeout=timeout,
template_url=next_link,
)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = _loads(pipeline_response.http_response.body())
list_of_elem = deserialized["values"]
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.get("nextLink", None), iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
return ItemPaged(get_next, extract_data)
get_multiple_pages_with_offset.metadata = {"url": "/paging/multiple/withpath/{offset}"} # type: ignore
@distributed_trace
def get_multiple_pages_retry_first(
self, **kwargs # type: Any
):
# type: (...) -> Iterable[Any]
"""A paging operation that fails on the first call with 500 and then retries and then get a
response including a nextLink that has 10 pages.
:return: An iterator like instance of JSON object
:rtype: ~azure.core.paging.ItemPaged[Any]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# response body for status code(s): 200
response.json() == {
"nextLink": "str", # Optional.
"values": [
{
"properties": {
"id": 0, # Optional.
"name": "str" # Optional.
}
}
]
}
"""
cls = kwargs.pop("cls", None) # type: ClsType[Any]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
def prepare_request(next_link=None):
if not next_link:
request = build_paging_get_multiple_pages_retry_first_request(
template_url=self.get_multiple_pages_retry_first.metadata["url"],
)
request.url = self._client.format_url(request.url)
else:
request = build_paging_get_multiple_pages_retry_first_request(
template_url=next_link,
)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = _loads(pipeline_response.http_response.body())
list_of_elem = deserialized["values"]
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.get("nextLink", None), iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
return ItemPaged(get_next, extract_data)
get_multiple_pages_retry_first.metadata = {"url": "/paging/multiple/retryfirst"} # type: ignore
@distributed_trace
def get_multiple_pages_retry_second(
self, **kwargs # type: Any
):
# type: (...) -> Iterable[Any]
"""A paging operation that includes a nextLink that has 10 pages, of which the 2nd call fails
first with 500. The client should retry and finish all 10 pages eventually.
:return: An iterator like instance of JSON object
:rtype: ~azure.core.paging.ItemPaged[Any]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# response body for status code(s): 200
response.json() == {
"nextLink": "str", # Optional.
"values": [
{
"properties": {
"id": 0, # Optional.
"name": "str" # Optional.
}
}
]
}
"""
cls = kwargs.pop("cls", None) # type: ClsType[Any]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
def prepare_request(next_link=None):
if not next_link:
request = build_paging_get_multiple_pages_retry_second_request(
template_url=self.get_multiple_pages_retry_second.metadata["url"],
)
request.url = self._client.format_url(request.url)
else:
request = build_paging_get_multiple_pages_retry_second_request(
template_url=next_link,
)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = _loads(pipeline_response.http_response.body())
list_of_elem = deserialized["values"]
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.get("nextLink", None), iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
return ItemPaged(get_next, extract_data)
get_multiple_pages_retry_second.metadata = {"url": "/paging/multiple/retrysecond"} # type: ignore
@distributed_trace
def get_single_pages_failure(
self, **kwargs # type: Any
):
# type: (...) -> Iterable[Any]
"""A paging operation that receives a 400 on the first call.
:return: An iterator like instance of JSON object
:rtype: ~azure.core.paging.ItemPaged[Any]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# response body for status code(s): 200
response.json() == {
"nextLink": "str", # Optional.
"values": [
{
"properties": {
"id": 0, # Optional.
"name": "str" # Optional.
}
}
]
}
"""
cls = kwargs.pop("cls", None) # type: ClsType[Any]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
def prepare_request(next_link=None):
if not next_link:
request = build_paging_get_single_pages_failure_request(
template_url=self.get_single_pages_failure.metadata["url"],
)
request.url = self._client.format_url(request.url)
else:
request = build_paging_get_single_pages_failure_request(
template_url=next_link,
)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = _loads(pipeline_response.http_response.body())
list_of_elem = deserialized["values"]
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.get("nextLink", None), iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
return ItemPaged(get_next, extract_data)
get_single_pages_failure.metadata = {"url": "/paging/single/failure"} # type: ignore
@distributed_trace
def get_multiple_pages_failure(
self, **kwargs # type: Any
):
# type: (...) -> Iterable[Any]
"""A paging operation that receives a 400 on the second call.
:return: An iterator like instance of JSON object
:rtype: ~azure.core.paging.ItemPaged[Any]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# response body for status code(s): 200
response.json() == {
"nextLink": "str", # Optional.
"values": [
{
"properties": {
"id": 0, # Optional.
"name": "str" # Optional.
}
}
]
}
"""
cls = kwargs.pop("cls", None) # type: ClsType[Any]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
def prepare_request(next_link=None):
if not next_link:
request = build_paging_get_multiple_pages_failure_request(
template_url=self.get_multiple_pages_failure.metadata["url"],
)
request.url = self._client.format_url(request.url)
else:
request = build_paging_get_multiple_pages_failure_request(
template_url=next_link,
)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = _loads(pipeline_response.http_response.body())
list_of_elem = deserialized["values"]
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.get("nextLink", None), iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
return ItemPaged(get_next, extract_data)
get_multiple_pages_failure.metadata = {"url": "/paging/multiple/failure"} # type: ignore
@distributed_trace
def get_multiple_pages_failure_uri(
self, **kwargs # type: Any
):
# type: (...) -> Iterable[Any]
"""A paging operation that receives an invalid nextLink.
:return: An iterator like instance of JSON object
:rtype: ~azure.core.paging.ItemPaged[Any]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# response body for status code(s): 200
response.json() == {
"nextLink": "str", # Optional.
"values": [
{
"properties": {
"id": 0, # Optional.
"name": "str" # Optional.
}
}
]
}
"""
cls = kwargs.pop("cls", None) # type: ClsType[Any]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
def prepare_request(next_link=None):
if not next_link:
request = build_paging_get_multiple_pages_failure_uri_request(
template_url=self.get_multiple_pages_failure_uri.metadata["url"],
)
request.url = self._client.format_url(request.url)
else:
request = build_paging_get_multiple_pages_failure_uri_request(
template_url=next_link,
)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = _loads(pipeline_response.http_response.body())
list_of_elem = deserialized["values"]
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.get("nextLink", None), iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
return ItemPaged(get_next, extract_data)
get_multiple_pages_failure_uri.metadata = {"url": "/paging/multiple/failureuri"} # type: ignore
@distributed_trace
def get_multiple_pages_fragment_next_link(
self,
tenant, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable[Any]
"""A paging operation that doesn't return a full URL, just a fragment.
:param tenant: Sets the tenant to use.
:type tenant: str
:keyword api_version: Sets the api version to use.
:paramtype api_version: str
:return: An iterator like instance of JSON object
:rtype: ~azure.core.paging.ItemPaged[Any]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# response body for status code(s): 200
response.json() == {
"odata.nextLink": "str", # Optional.
"values": [
{
"properties": {
"id": 0, # Optional.
"name": "str" # Optional.
}
}
]
}
"""
api_version = kwargs.pop("api_version") # type: str
cls = kwargs.pop("cls", None) # type: ClsType[Any]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
def prepare_request(next_link=None):
if not next_link:
request = build_paging_get_multiple_pages_fragment_next_link_request(
tenant=tenant,
api_version=api_version,
template_url=self.get_multiple_pages_fragment_next_link.metadata["url"],
)
request.url = self._client.format_url(request.url)
else:
request = build_paging_next_fragment_request(
tenant=tenant,
next_link=next_link,
api_version=api_version,
template_url="/paging/multiple/fragment/{tenant}/{nextLink}",
)
request.url = self._client.format_url(request.url)
return request
def extract_data(pipeline_response):
deserialized = _loads(pipeline_response.http_response.body())
list_of_elem = deserialized["values"]
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.get("odata.nextLink", None), iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
return ItemPaged(get_next, extract_data)
get_multiple_pages_fragment_next_link.metadata = {"url": "/paging/multiple/fragment/{tenant}"} # type: ignore
@distributed_trace
def get_multiple_pages_fragment_with_grouping_next_link(
self,
tenant, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable[Any]
"""A paging operation that doesn't return a full URL, just a fragment with parameters grouped.
:param tenant: Sets the tenant to use.
:type tenant: str
:keyword api_version: Sets the api version to use.
:paramtype api_version: str
:return: An iterator like instance of JSON object
:rtype: ~azure.core.paging.ItemPaged[Any]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# response body for status code(s): 200
response.json() == {
"odata.nextLink": "str", # Optional.
"values": [
{
"properties": {
"id": 0, # Optional.
"name": "str" # Optional.
}
}
]
}
"""
api_version = kwargs.pop("api_version") # type: str
cls = kwargs.pop("cls", None) # type: ClsType[Any]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
def prepare_request(next_link=None):
if not next_link:
request = build_paging_get_multiple_pages_fragment_with_grouping_next_link_request(
tenant=tenant,
api_version=api_version,
template_url=self.get_multiple_pages_fragment_with_grouping_next_link.metadata["url"],
)
request.url = self._client.format_url(request.url)
else:
request = build_paging_next_fragment_with_grouping_request(
tenant=tenant,
next_link=next_link,
api_version=api_version,
template_url="/paging/multiple/fragmentwithgrouping/{tenant}/{nextLink}",
)
request.url = self._client.format_url(request.url)
return request
def extract_data(pipeline_response):
deserialized = _loads(pipeline_response.http_response.body())
list_of_elem = deserialized["values"]
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.get("odata.nextLink", None), iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
return ItemPaged(get_next, extract_data)
get_multiple_pages_fragment_with_grouping_next_link.metadata = {"url": "/paging/multiple/fragmentwithgrouping/{tenant}"} # type: ignore
def _get_multiple_pages_lro_initial(
self, **kwargs # type: Any
):
# type: (...) -> Any
cls = kwargs.pop("cls", None) # type: ClsType[Any]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
client_request_id = kwargs.pop("client_request_id", None) # type: Optional[str]
maxresults = kwargs.pop("maxresults", None) # type: Optional[int]
timeout = kwargs.pop("timeout", 30) # type: Optional[int]
request = build_paging_get_multiple_pages_lro_request_initial(
client_request_id=client_request_id,
maxresults=maxresults,
timeout=timeout,
template_url=self._get_multiple_pages_lro_initial.metadata["url"],
)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if response.content:
deserialized = response.json()
else:
deserialized = None
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_get_multiple_pages_lro_initial.metadata = {"url": "/paging/multiple/lro"} # type: ignore
@distributed_trace
def begin_get_multiple_pages_lro(
self, **kwargs # type: Any
):
# type: (...) -> LROPoller[ItemPaged[Any]]
"""A long-running paging operation that includes a nextLink that has 10 pages.
:keyword client_request_id:
:paramtype client_request_id: str
:keyword maxresults: Sets the maximum number of items to return in the response.
:paramtype maxresults: int
:keyword timeout: Sets the maximum time that the server can spend processing the request, in
seconds. The default is 30 seconds.
:paramtype timeout: int
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be LROBasePolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns an iterator like instance of JSON object
:rtype: ~azure.core.polling.LROPoller[~azure.core.paging.ItemPaged[Any]]
:raises: ~azure.core.exceptions.HttpResponseError
"""
client_request_id = kwargs.pop("client_request_id", None) # type: Optional[str]
maxresults = kwargs.pop("maxresults", None) # type: Optional[int]
timeout = kwargs.pop("timeout", 30) # type: Optional[int]
cls = kwargs.pop("cls", None) # type: ClsType[Any]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
def prepare_request(next_link=None):
if not next_link:
request = build_paging_get_multiple_pages_lro_request_initial(
client_request_id=client_request_id,
maxresults=maxresults,
timeout=timeout,
template_url=self.begin_get_multiple_pages_lro.metadata["url"],
)
request.url = self._client.format_url(request.url)
else:
request = build_paging_get_multiple_pages_lro_request_initial(
client_request_id=client_request_id,
maxresults=maxresults,
timeout=timeout,
template_url=next_link,
)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = _loads(pipeline_response.http_response.body())
list_of_elem = deserialized["values"]
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.get("nextLink", None), iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
polling = kwargs.pop("polling", True) # type: Union[bool, azure.core.polling.PollingMethod]
cls = kwargs.pop("cls", None) # type: ClsType[Any]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._get_multiple_pages_lro_initial(
client_request_id=client_request_id,
maxresults=maxresults,
timeout=timeout,
cls=lambda x, y, z: x,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
def internal_get_next(next_link=None):
if next_link is None:
return pipeline_response
else:
return get_next(next_link)
return ItemPaged(internal_get_next, extract_data)
if polling is True:
polling_method = LROBasePolling(lro_delay, **kwargs)
elif polling is False:
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get_multiple_pages_lro.metadata = {"url": "/paging/multiple/lro"} # type: ignore
@distributed_trace
def get_paging_model_with_item_name_with_xms_client_name(
self, **kwargs # type: Any
):
# type: (...) -> Iterable[Any]
"""A paging operation that returns a paging model whose item name is is overriden by
x-ms-client-name 'indexes'.
:return: An iterator like instance of JSON object
:rtype: ~azure.core.paging.ItemPaged[Any]
:raises: ~azure.core.exceptions.HttpResponseError
Example:
.. code-block:: python
# response body for status code(s): 200
response.json() == {
"nextLink": "str", # Optional.
"values": [
{
"properties": {
"id": 0, # Optional.
"name": "str" # Optional.
}
}
]
}
"""
cls = kwargs.pop("cls", None) # type: ClsType[Any]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
def prepare_request(next_link=None):
if not next_link:
request = build_paging_get_paging_model_with_item_name_with_xms_client_name_request(
template_url=self.get_paging_model_with_item_name_with_xms_client_name.metadata["url"],
)
request.url = self._client.format_url(request.url)
else:
request = build_paging_get_paging_model_with_item_name_with_xms_client_name_request(
template_url=next_link,
)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = _loads(pipeline_response.http_response.body())
list_of_elem = deserialized["values"]
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.get("nextLink", None), iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
return pipeline_response
return ItemPaged(get_next, extract_data)
get_paging_model_with_item_name_with_xms_client_name.metadata = {"url": "/paging/itemNameWithXMSClientName"} # type: ignore | 0.67971 | 0.067454 |
import tensorflow as tf
import tensorflow_probability as tfp
import tree
import numpy as np
from numbers import Number
from .base_policy import ContinuousPolicy, BasePolicy
class UniformPolicyMixin:
@tf.function(experimental_relax_shapes=True)
def actions(self, observations):
first_observation = tree.flatten(observations)[0]
first_input_rank = tf.size(tree.flatten(self._input_shapes)[0])
batch_shape = tf.shape(first_observation)[:-first_input_rank]
actions = self.distribution.sample(batch_shape)
return actions
@tf.function(experimental_relax_shapes=True)
def log_probs(self, observations, actions):
log_probs = self.distribution.log_prob(actions)[..., tf.newaxis]
return log_probs
@tf.function(experimental_relax_shapes=True)
def probs(self, observations, actions):
probs = self.distribution.prob(actions)[..., tf.newaxis]
return probs
class ContinuousUniformPolicy(UniformPolicyMixin, ContinuousPolicy):
def __init__(self, *args, **kwargs):
super(ContinuousUniformPolicy, self).__init__(*args, **kwargs)
low, high = self._action_range
self.distribution = tfp.distributions.Independent(
tfp.distributions.Uniform(low=low, high=high),
reinterpreted_batch_ndims=1)
class DiscreteContinuousUniformPolicy(ContinuousPolicy):
def __init__(self, num_discrete, num_continuous, *args, **kwargs):
super().__init__(*args, **kwargs)
self._num_discrete = num_discrete
self._num_continuous = num_continuous
low, high = self._action_range
assert isinstance(low, Number) and isinstance(high, Number)
self.onehot_distribution = tfp.distributions.OneHotCategorical(
logits=np.zeros(self._num_discrete),
dtype=tf.float32)
self.continuous_distribution = tfp.distributions.Sample(
tfp.distributions.Uniform(low=low, high=high),
sample_shape=self._num_continuous)
@tf.function(experimental_relax_shapes=True)
def actions(self, observations):
first_observation = tree.flatten(observations)[0]
first_input_rank = tf.size(tree.flatten(self._input_shapes)[0])
batch_shape = tf.shape(first_observation)[:-first_input_rank]
onehots = self.onehot_distribution.sample(batch_shape)
continuous = self.continuous_distribution.sample(batch_shape)
actions = tf.concat([onehots, continuous], axis=1)
return actions
@tf.function(experimental_relax_shapes=True)
def log_probs(self, observations, actions):
onehot_log_probs = self.onehot_distribution.log_prob(actions[:, :self._num_discrete])[..., tf.newaxis]
continuous_log_probs = self.continuous_distribution.log_prob(actions[:, self._num_discrete:])[..., tf.newaxis]
log_probs = onehot_log_probs + continuous_log_probs
return log_probs
class DiscreteUniformPolicy(BasePolicy):
def __init__(self, num_discrete, *args, **kwargs):
super().__init__(*args, **kwargs)
self._num_discrete = num_discrete
self.action_distribution = tfp.distributions.Categorical(logits=np.zeros(self._num_discrete))
@tf.function(experimental_relax_shapes=True)
def actions(self, observations):
first_observation = tree.flatten(observations)[0]
first_input_rank = tf.size(tree.flatten(self._input_shapes)[0])
batch_shape = tf.shape(first_observation)[:-first_input_rank]
actions = self.action_distribution.sample(batch_shape)
return actions
@tf.function(experimental_relax_shapes=True)
def log_probs(self, observations, actions):
log_probs = self.onehot_distribution.log_prob(actions[:, 0])[..., tf.newaxis]
return log_probs | softlearning/policies/uniform_policy.py | import tensorflow as tf
import tensorflow_probability as tfp
import tree
import numpy as np
from numbers import Number
from .base_policy import ContinuousPolicy, BasePolicy
class UniformPolicyMixin:
@tf.function(experimental_relax_shapes=True)
def actions(self, observations):
first_observation = tree.flatten(observations)[0]
first_input_rank = tf.size(tree.flatten(self._input_shapes)[0])
batch_shape = tf.shape(first_observation)[:-first_input_rank]
actions = self.distribution.sample(batch_shape)
return actions
@tf.function(experimental_relax_shapes=True)
def log_probs(self, observations, actions):
log_probs = self.distribution.log_prob(actions)[..., tf.newaxis]
return log_probs
@tf.function(experimental_relax_shapes=True)
def probs(self, observations, actions):
probs = self.distribution.prob(actions)[..., tf.newaxis]
return probs
class ContinuousUniformPolicy(UniformPolicyMixin, ContinuousPolicy):
def __init__(self, *args, **kwargs):
super(ContinuousUniformPolicy, self).__init__(*args, **kwargs)
low, high = self._action_range
self.distribution = tfp.distributions.Independent(
tfp.distributions.Uniform(low=low, high=high),
reinterpreted_batch_ndims=1)
class DiscreteContinuousUniformPolicy(ContinuousPolicy):
def __init__(self, num_discrete, num_continuous, *args, **kwargs):
super().__init__(*args, **kwargs)
self._num_discrete = num_discrete
self._num_continuous = num_continuous
low, high = self._action_range
assert isinstance(low, Number) and isinstance(high, Number)
self.onehot_distribution = tfp.distributions.OneHotCategorical(
logits=np.zeros(self._num_discrete),
dtype=tf.float32)
self.continuous_distribution = tfp.distributions.Sample(
tfp.distributions.Uniform(low=low, high=high),
sample_shape=self._num_continuous)
@tf.function(experimental_relax_shapes=True)
def actions(self, observations):
first_observation = tree.flatten(observations)[0]
first_input_rank = tf.size(tree.flatten(self._input_shapes)[0])
batch_shape = tf.shape(first_observation)[:-first_input_rank]
onehots = self.onehot_distribution.sample(batch_shape)
continuous = self.continuous_distribution.sample(batch_shape)
actions = tf.concat([onehots, continuous], axis=1)
return actions
@tf.function(experimental_relax_shapes=True)
def log_probs(self, observations, actions):
onehot_log_probs = self.onehot_distribution.log_prob(actions[:, :self._num_discrete])[..., tf.newaxis]
continuous_log_probs = self.continuous_distribution.log_prob(actions[:, self._num_discrete:])[..., tf.newaxis]
log_probs = onehot_log_probs + continuous_log_probs
return log_probs
class DiscreteUniformPolicy(BasePolicy):
def __init__(self, num_discrete, *args, **kwargs):
super().__init__(*args, **kwargs)
self._num_discrete = num_discrete
self.action_distribution = tfp.distributions.Categorical(logits=np.zeros(self._num_discrete))
@tf.function(experimental_relax_shapes=True)
def actions(self, observations):
first_observation = tree.flatten(observations)[0]
first_input_rank = tf.size(tree.flatten(self._input_shapes)[0])
batch_shape = tf.shape(first_observation)[:-first_input_rank]
actions = self.action_distribution.sample(batch_shape)
return actions
@tf.function(experimental_relax_shapes=True)
def log_probs(self, observations, actions):
log_probs = self.onehot_distribution.log_prob(actions[:, 0])[..., tf.newaxis]
return log_probs | 0.883097 | 0.449513 |
# Copyright 2019 <NAME> (Nagoya University)
# Apache 2.0 (http://www.apache.org/licenses/LICENSE-2.0)
# NETWORK & TRAINING PARAMETERS INITIALIZATION
class network_parameter(object):
def __init__(self,
upsampling_flag=True, spk_code_flag=False,
quantize=256, aux=39, resch=512, skipch=256,
learningrate=1e-4, weight_decay=0.0,
iters=200000, update_iters=50000,
checkpoint_interval=10000,
update_interval=5000):
self.upsampling_flag = upsampling_flag
self.spk_code_flag = spk_code_flag
self.quantize = quantize
self.aux = aux
self.resch = resch
self.skipch = skipch
self.lr = learningrate
self.weight_decay = weight_decay
self.iters = iters
self.update_iters = update_iters
self.checkpoint_interval = checkpoint_interval
self.update_interval = update_interval
def set_batch_param(self, batch_length=20000, batch_size=1):
self.batch_length = batch_length
self.batch_size = batch_size
def set_network_ch(self, quantize=256, aux=39, resch=512, skipch=256):
self.quantize = quantize
self.aux = aux
self.resch = resch
self.skipch = skipch
class qpwn_parameter(network_parameter):
def __init__(self, network,
upsampling_flag=True, spk_code_flag=False,
quantize=256, aux=39, resch=512, skipch=256,
learningrate=1e-4, weight_decay=0.0,
iters=200000, update_iters=3000,
checkpoint_interval=10000,
update_interval=100,
decode_batch_size=12):
super().__init__(upsampling_flag, spk_code_flag,
quantize, aux, resch, skipch,
learningrate, weight_decay,
iters, update_iters,
checkpoint_interval,
update_interval)
self._update_network(network, decode_batch_size)
def _update_network(self, network, decode_batch_size):
self.network = network
if network == 'default':
self._update_network_param(
dilationF_depth=4, dilationF_repeat=3,
dilationA_depth=4, dilationA_repeat=1,
kernel_size=2, max_length=30000,
batch_length=20000, batch_size=1,
f0_threshold=0, decode_batch_size=decode_batch_size)
elif network == 'Rd10Rr3Ed4Er1':
self._update_network_param(
dilationF_depth=10, dilationF_repeat=3,
dilationA_depth=4, dilationA_repeat=1,
kernel_size=2, max_length=22500,
batch_length=20000, batch_size=1,
f0_threshold=0, decode_batch_size=7)
else:
raise ValueError("%s is not supported!" % network)
def _update_network_param(self,
dilationF_depth, dilationF_repeat,
dilationA_depth, dilationA_repeat,
kernel_size, max_length,
batch_length, batch_size,
f0_threshold, decode_batch_size):
self.dilationF_depth = dilationF_depth
self.dilationF_repeat = dilationF_repeat
self.dilationA_depth = dilationA_depth
self.dilationA_repeat = dilationA_repeat
self.kernel_size = kernel_size
self.max_length = max_length
self.batch_length = batch_length
self.batch_size = batch_size
self.f0_threshold = f0_threshold
self.decode_batch_size = decode_batch_size | src/utils/param_model.py |
# Copyright 2019 <NAME> (Nagoya University)
# Apache 2.0 (http://www.apache.org/licenses/LICENSE-2.0)
# NETWORK & TRAINING PARAMETERS INITIALIZATION
class network_parameter(object):
def __init__(self,
upsampling_flag=True, spk_code_flag=False,
quantize=256, aux=39, resch=512, skipch=256,
learningrate=1e-4, weight_decay=0.0,
iters=200000, update_iters=50000,
checkpoint_interval=10000,
update_interval=5000):
self.upsampling_flag = upsampling_flag
self.spk_code_flag = spk_code_flag
self.quantize = quantize
self.aux = aux
self.resch = resch
self.skipch = skipch
self.lr = learningrate
self.weight_decay = weight_decay
self.iters = iters
self.update_iters = update_iters
self.checkpoint_interval = checkpoint_interval
self.update_interval = update_interval
def set_batch_param(self, batch_length=20000, batch_size=1):
self.batch_length = batch_length
self.batch_size = batch_size
def set_network_ch(self, quantize=256, aux=39, resch=512, skipch=256):
self.quantize = quantize
self.aux = aux
self.resch = resch
self.skipch = skipch
class qpwn_parameter(network_parameter):
def __init__(self, network,
upsampling_flag=True, spk_code_flag=False,
quantize=256, aux=39, resch=512, skipch=256,
learningrate=1e-4, weight_decay=0.0,
iters=200000, update_iters=3000,
checkpoint_interval=10000,
update_interval=100,
decode_batch_size=12):
super().__init__(upsampling_flag, spk_code_flag,
quantize, aux, resch, skipch,
learningrate, weight_decay,
iters, update_iters,
checkpoint_interval,
update_interval)
self._update_network(network, decode_batch_size)
def _update_network(self, network, decode_batch_size):
self.network = network
if network == 'default':
self._update_network_param(
dilationF_depth=4, dilationF_repeat=3,
dilationA_depth=4, dilationA_repeat=1,
kernel_size=2, max_length=30000,
batch_length=20000, batch_size=1,
f0_threshold=0, decode_batch_size=decode_batch_size)
elif network == 'Rd10Rr3Ed4Er1':
self._update_network_param(
dilationF_depth=10, dilationF_repeat=3,
dilationA_depth=4, dilationA_repeat=1,
kernel_size=2, max_length=22500,
batch_length=20000, batch_size=1,
f0_threshold=0, decode_batch_size=7)
else:
raise ValueError("%s is not supported!" % network)
def _update_network_param(self,
dilationF_depth, dilationF_repeat,
dilationA_depth, dilationA_repeat,
kernel_size, max_length,
batch_length, batch_size,
f0_threshold, decode_batch_size):
self.dilationF_depth = dilationF_depth
self.dilationF_repeat = dilationF_repeat
self.dilationA_depth = dilationA_depth
self.dilationA_repeat = dilationA_repeat
self.kernel_size = kernel_size
self.max_length = max_length
self.batch_length = batch_length
self.batch_size = batch_size
self.f0_threshold = f0_threshold
self.decode_batch_size = decode_batch_size | 0.718693 | 0.148695 |
import re
import sys
import time
import yaml
from kubernetes import client, config, watch
def get_expected_labels_regexs():
with open("./expected-output.txt") as f:
expected_labels = f.readlines()
return [re.compile(label.strip()) for label in expected_labels]
def deploy_yaml_file(core_api, apps_api, rbac_api, daemonset_yaml_file):
with open(daemonset_yaml_file) as f:
bodies = yaml.safe_load_all(f)
for body in bodies:
namespace = body["metadata"].get("namespace", "default")
if body["kind"] == "Namespace":
core_api.create_namespace(body)
elif body["kind"] == "DaemonSet":
apps_api.create_namespaced_daemon_set(namespace, body)
elif body["kind"] == "ServiceAccount":
core_api.create_namespaced_service_account(namespace, body)
elif body["kind"] == "ClusterRole":
rbac_api.create_cluster_role(body)
elif body["kind"] == "ClusterRoleBinding":
rbac_api.create_cluster_role_binding(body)
else:
print("Unknown kind {}".format(body["kind"]), file=sys.stderr)
sys.exit(1)
def check_labels(expected_labels_regexs, labels):
for label in labels[:]:
if label.startswith("feature.node.kubernetes.io/"):
labels.remove(label)
continue
for label_regex in expected_labels_regexs[:]:
if label_regex.match(label):
expected_labels_regexs.remove(label_regex)
labels.remove(label)
break
for label in labels:
print("Unexpected label on node: {}".format(label), file=sys.stderr)
for regex in expected_labels_regexs:
print("Missing label matching regex: {}".format(regex.pattern), file=sys.stderr)
return len(expected_labels_regexs) == 0 and len(labels) == 0
if __name__ == '__main__':
if len(sys.argv) != 3:
print("Usage: {} GFD_YAML_PATH NFD_YAML_PATH".format(sys.argv[0]))
sys.exit(1)
print("Running E2E tests for GFD")
config.load_kube_config()
core_api = client.CoreV1Api()
apps_api = client.AppsV1Api()
rbac_api = client.RbacAuthorizationV1Api()
nodes = core_api.list_node().items
# Should we limit to only one node ?
if len(nodes) < 1:
print("No nodes found", file=sys.stderr)
sys.exit(1)
regexs = get_expected_labels_regexs()
for k, v in nodes[0].metadata.labels.items():
regexs.append(re.compile(k + "=" + v))
print("Deploy NFD and GFD")
deploy_yaml_file(core_api, apps_api, rbac_api, sys.argv[1]) # GFD
deploy_yaml_file(core_api, apps_api, rbac_api, sys.argv[2]) # NFD
timestamp_label_name = "nvidia.com/gfd.timestamp"
print("Watching node updates")
stop = False
w = watch.Watch()
for event in w.stream(core_api.list_node, _request_timeout=180):
if event['type'] == 'MODIFIED':
print("Node modified")
for label_name in event['object'].metadata.labels:
if label_name == timestamp_label_name:
stop = True
print("Timestamp label found. Stop watching node")
break
if stop:
break
print("Checking labels")
nodes = core_api.list_node().items
labels = [k + "=" + v for k, v in nodes[0].metadata.labels.items()]
if not check_labels(regexs, labels):
print("E2E tests failed", file=sys.stderr)
sys.exit(1)
print("E2E tests done")
sys.exit(0) | tests/e2e-tests.py |
import re
import sys
import time
import yaml
from kubernetes import client, config, watch
def get_expected_labels_regexs():
with open("./expected-output.txt") as f:
expected_labels = f.readlines()
return [re.compile(label.strip()) for label in expected_labels]
def deploy_yaml_file(core_api, apps_api, rbac_api, daemonset_yaml_file):
with open(daemonset_yaml_file) as f:
bodies = yaml.safe_load_all(f)
for body in bodies:
namespace = body["metadata"].get("namespace", "default")
if body["kind"] == "Namespace":
core_api.create_namespace(body)
elif body["kind"] == "DaemonSet":
apps_api.create_namespaced_daemon_set(namespace, body)
elif body["kind"] == "ServiceAccount":
core_api.create_namespaced_service_account(namespace, body)
elif body["kind"] == "ClusterRole":
rbac_api.create_cluster_role(body)
elif body["kind"] == "ClusterRoleBinding":
rbac_api.create_cluster_role_binding(body)
else:
print("Unknown kind {}".format(body["kind"]), file=sys.stderr)
sys.exit(1)
def check_labels(expected_labels_regexs, labels):
for label in labels[:]:
if label.startswith("feature.node.kubernetes.io/"):
labels.remove(label)
continue
for label_regex in expected_labels_regexs[:]:
if label_regex.match(label):
expected_labels_regexs.remove(label_regex)
labels.remove(label)
break
for label in labels:
print("Unexpected label on node: {}".format(label), file=sys.stderr)
for regex in expected_labels_regexs:
print("Missing label matching regex: {}".format(regex.pattern), file=sys.stderr)
return len(expected_labels_regexs) == 0 and len(labels) == 0
if __name__ == '__main__':
if len(sys.argv) != 3:
print("Usage: {} GFD_YAML_PATH NFD_YAML_PATH".format(sys.argv[0]))
sys.exit(1)
print("Running E2E tests for GFD")
config.load_kube_config()
core_api = client.CoreV1Api()
apps_api = client.AppsV1Api()
rbac_api = client.RbacAuthorizationV1Api()
nodes = core_api.list_node().items
# Should we limit to only one node ?
if len(nodes) < 1:
print("No nodes found", file=sys.stderr)
sys.exit(1)
regexs = get_expected_labels_regexs()
for k, v in nodes[0].metadata.labels.items():
regexs.append(re.compile(k + "=" + v))
print("Deploy NFD and GFD")
deploy_yaml_file(core_api, apps_api, rbac_api, sys.argv[1]) # GFD
deploy_yaml_file(core_api, apps_api, rbac_api, sys.argv[2]) # NFD
timestamp_label_name = "nvidia.com/gfd.timestamp"
print("Watching node updates")
stop = False
w = watch.Watch()
for event in w.stream(core_api.list_node, _request_timeout=180):
if event['type'] == 'MODIFIED':
print("Node modified")
for label_name in event['object'].metadata.labels:
if label_name == timestamp_label_name:
stop = True
print("Timestamp label found. Stop watching node")
break
if stop:
break
print("Checking labels")
nodes = core_api.list_node().items
labels = [k + "=" + v for k, v in nodes[0].metadata.labels.items()]
if not check_labels(regexs, labels):
print("E2E tests failed", file=sys.stderr)
sys.exit(1)
print("E2E tests done")
sys.exit(0) | 0.289975 | 0.295243 |
import os
import re
import subprocess
import sys
import threading
import time
import c_misc
import c_path
from c_logging import logger
def run_command(cmd, expected_retcode=0, large_output=False, log=True):
if log:
logger.debug('Running command: ' + str(cmd))
if large_output:
try:
retcode = 0
stderr_data = ""
output = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as e:
output = e.output
retcode = e.returncode
stderr_data = output
else:
retcode, output, stderr_data = CoreSubprocess.simpleExecuteCommand(cmd)
if retcode != expected_retcode:
raise RuntimeError(cmd[0] + ' retcode: ' + str(retcode) + '\nOutput: ' + stderr_data)
return output
class CoreSubprocess(object):
""" Class to launch a process. """
# Communication type enums
COM_ARGS = 1
COM_STDIN = 2
# Stderr routing options
STDERR_TMP = 1
STDERR_STDOUT = 2
class CBDefault():
pass
def __init__(self, launchCommand, timeout=None, callback=None,
workingDir=None, comm=1, commands=[], debug=False,
stderr=None):
"""
Initializes internal variables.
Parameters:
1. launchCommand (str): Command that should be launched.
2. timeout (int): The timeout within which the tool should finish.
3. callback (cb): Callback to send the tool ouput on.
Call should have the following prototype:
def callback(buffer):
"""
self.launchCommand = launchCommand
self.timeout = timeout if (timeout and not debug) else None
self.callback = callback
self.workingDir = workingDir
self.comm = comm
self.commands = commands
self.debug = debug
self.stderr = stderr
self._toolOutput = ''
self._toolReturnCode = 0
self._toolTimedout = False
@staticmethod
def _readEnd(process, outputCallback):
"""
Internal method used to read the last few bits.
Parameters:
1. process (obj): Object returned by subprocess.
2. outputCallback (cb): Callback to send the tool output on.
"""
charRead = process.stdout.read()
outputCallback(charRead)
@staticmethod
def _readOutput(process, outputCallback):
"""
Internal method used to read the last few bits.
Parameters:
1. process (obj): Object returned by subprocess.
2. outputCallback (cb): Callback to send the tool output on.
"""
while True:
if not process.poll() is None:
break
charRead = process.stdout.read(1)
outputCallback(charRead)
@staticmethod
def waitForPrompt(process, outputCallback):
currentLine = ''
while True:
charRead = process.stdout.read(1)
outputCallback(charRead)
currentLine += charRead
if charRead == '\n':
currentLine = ''
if currentLine.lstrip() == '>':
break
@classmethod
def _communicate(cls, process, communication, commands, debug, outputCallback,
returnCodeCallback):
"""
Internal method used to launch sub process as a seperate thread.
Parameters:
1. process (obj): Object returned by subprocess.
2. outputCallback (cb): Callback to send the tool output on.
3. returnCodeCallback (cb): Callback to set the return code
"""
readEndBytes = True
if communication == cls.COM_STDIN:
if not debug:
commands += ['quit']
for cmd in commands:
cls.waitForPrompt(process, outputCallback)
outputCallback(cmd + '\r\n')
process.stdin.write(cmd + '\r\n')
elif communication == cls.COM_ARGS:
readOutput = threading.Thread(target=CoreSubprocess._readOutput,
args=([process, outputCallback]))
readOutput.daemon = True
readOutput.start()
while True:
if not process.poll() is None:
break
time.sleep(1)
readOutput.join(5)
readEndBytes = not readOutput.is_alive()
if readEndBytes:
readEnd = threading.Thread(target=CoreSubprocess._readEnd,
args=([process, outputCallback]))
readEnd.daemon = True
readEnd.start()
readEnd.join(5)
returnCodeCallback(process.wait())
def compressToolOutput(self, output=None, lineLimit=5000):
""" Return compressed output limiting the number of lines """
if output is None: output = self._toolOutput
compressedOutput = c_misc.compressBufferContents([output])[-1 * lineLimit:]
return '\n'.join(compressedOutput)
def killProcess(self, process):
if sys.platform.startswith('linux'):
process.kill()
else:
#Using stdout=logger.outputStream caused error
processToKill = subprocess.Popen('taskkill /F /T /PID {0}'.format(str(process.pid)),
#stdout=logger.outputStream,
stderr=subprocess.STDOUT,
bufsize= -1)
processToKill.wait()
def launchTool(self):
"""
Launches the tool specified by the launchCommand
Returns:
1. toolTimedout (bool): True if tool timedout
2. toolReturnCode (int): Returncode from the tool
3. toolOutput (str): Output from the tool
"""
self._toolTimedout = False
self._toolReturnCode = 0
self._toolOutput = ''
if self.stderr == self.STDERR_TMP:
tmpfile = c_path.create_tmp_file()
stderr = open(tmpfile, 'wb')
elif self.stderr == self.STDERR_STDOUT:
stderr = subprocess.STDOUT
else:
stderr = self.stderr
try:
process = subprocess.Popen(self.launchCommand, cwd=self.workingDir,
stdin=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=stderr)
try:
communication = threading.Thread(target=self._communicate,
args=([process, self.comm, self.commands, self.debug,
self._setOutput, self._setReturnCode]))
communication.daemon = True
communication.start()
start_time = end_time = time.time()
while True:
timeout = 600 if (self.timeout is None) else min(600, self.timeout - (end_time - start_time))
communication.join(timeout)
end_time = time.time()
if ((not communication.isAlive()) or ((self.timeout is not None) and ((end_time - start_time) > self.timeout))):
break
if communication.isAlive():
self._toolTimedout = True
self.killProcess(process)
communication.join(2)
assert communication.isAlive() == False
except KeyboardInterrupt:
self.killProcess(process)
raise
finally:
if self.stderr == self.STDERR_TMP:
stderr.close()
c_path.remove_tmp_file(tmpfile)
return self._toolTimedout, self._toolReturnCode, self._toolOutput
def _setReturnCode(self, returnCode):
""" Internal method used to set the tool returnCode """
self._toolReturnCode = returnCode
def _setOutput(self, output):
""" Internal method used to set the tool output and callback """
self._toolOutput += output
if self.callback: self.callback(output)
# This is a hook for caller to format the command
# line string for printing
def formatLaunchCommandForPrinting(self, cmd):
return cmd
def printCommand(self):
cmd = self.launchCommand
cmd = self.formatLaunchCommandForPrinting(cmd)
location = os.path.dirname(cmd[0])
args = ''
curLength = 0
for eachArg in cmd[1:]:
if curLength > 60:
args += '\n'
curLength = 0
args += eachArg + ' '
curLength += len(eachArg) + 1
cmd = os.path.basename(cmd[0])
logger.info(
"""Launching tool: {0}
From: {1}
Args: {2}""".format(cmd, location, '\n '.join(args.split('\n'))))
def printFinish(self, cmd):
logger.info('Finished tool: {0}'.format(cmd))
def validateOutput(self, retcode, f_retcode, f_output,
successRegex, failureRegex,
successString, failureString,
cmd):
returnValue = True
returnError = 'Tool "{0}"\n'.format(cmd)
# Perform validations
if retcode is not None and retcode != f_retcode:
returnError += ' Return code does not match: Expected = "{0}", Got = "{1}"'.format(retcode, f_retcode)
returnValue = False
if successRegex is not None and not re.search(r'{0}'.format(successRegex), f_output):
returnError += ' Output does not have expected success regex.\n Regex: "{0}"'.format(successRegex)
returnValue = False
elif failureRegex is not None and re.search(r'{0}'.format(failureRegex), f_output):
returnError += ' Output has expected failure regex.\n Regex: "{0}"'.format(failureRegex)
returnValue = False
elif successString is not None and f_output.find(successString) == -1:
returnError += ' Output does not have expected success string.\n Expected String: "{0}"'.format(successString)
returnValue = False
elif failureString is not None and not f_output.find(failureString) == -1:
returnError += ' Output has expected failure string.\n Expected String: "{0}"'.format(failureString)
returnValue = False
return returnValue, returnError
@classmethod
def executeCommand(cls, launchCommand, retcode=None, callback=None,
timeout=3600, successRegex=None, failureRegex=None,
successString=None, failureString=None, workingDir=None,
comm=1, commands=[], debug=False, stderr=STDERR_TMP,
print_command=False):
"""
Launches the tool based on the params
Only one of 5, 6, 7, 8 must be given
Parameters:
1. launchCommand (str): command to be executed.
2. retcode (int): Expected return code from tool.
3. callback (cb): Callback to take tool output
Callback should have the following prototype:
def callback(buffer):
4. timeout (int): Time in which tool should finish
5. successRegex (str): Regex in output stream signifying success.
6. failureRegex (str): Regex in output stream signifying failure.
7. successString (str): String in output stream signifying success.
8. failureString (str): String in output stream signifying failure.
Returns:
1. returnValue (bool): True if all validations were successful
2. returnError (str): Failure if any
"""
if len([value for value in [successRegex, failureRegex, successString, failureString] if value is not None]) > 1:
raise AttributeError('Only one of successRegex, failureRegex, successString, failureString must be given')
# Log the running process nicely
if type(launchCommand) is not list:
cmd = launchCommand.split()
else:
cmd = launchCommand
# Setup default callback
cb = callback
if callback is cls.CBDefault:
cb = lambda msg: logger.info(msg, raw=True)
cb('\n---------------------------------------------------------------\n\n')
# Launch the tool
tool = cls(launchCommand, timeout=timeout,
workingDir=workingDir, comm=comm, commands=commands, debug=debug,
callback=cb, stderr=stderr)
if print_command:
tool.printCommand()
f_timeout, f_retcode, f_output = tool.launchTool()
if callback is cls.CBDefault:
cb('\n---------------------------------------------------------------\n\n')
if print_command:
tool.printFinish(cmd)
returnValue, returnError = tool.validateOutput(
retcode, f_retcode, f_output,
successRegex, failureRegex,
successString, failureString,
cmd)
return returnValue, returnError, f_timeout, f_retcode, f_output
@classmethod
def simpleExecuteCommand(cls, launchCommand):
"""
Launches the tool based on the params
Parameters:
1. launchCommand (str): command to be executed.
Returns:
1. returnValue (bool): True if all validations were successful
2. returnError (str): Failure if any
"""
# Log the running process nicely
if type(launchCommand) is not list:
launchCommand = launchCommand.split()
# Launch the tool
retcode, output, error = 0, '', ''
try:
process = subprocess.Popen(launchCommand, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
process.wait()
retcode = process.returncode
output = process.stdout.read()
error = process.stderr.read()
except Exception as e:
retcode = -1
error = str(e)
return retcode, output, error | QCA4020_SDK/target/sectools/qdn/sectools/common/utils/c_process.py | import os
import re
import subprocess
import sys
import threading
import time
import c_misc
import c_path
from c_logging import logger
def run_command(cmd, expected_retcode=0, large_output=False, log=True):
if log:
logger.debug('Running command: ' + str(cmd))
if large_output:
try:
retcode = 0
stderr_data = ""
output = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as e:
output = e.output
retcode = e.returncode
stderr_data = output
else:
retcode, output, stderr_data = CoreSubprocess.simpleExecuteCommand(cmd)
if retcode != expected_retcode:
raise RuntimeError(cmd[0] + ' retcode: ' + str(retcode) + '\nOutput: ' + stderr_data)
return output
class CoreSubprocess(object):
""" Class to launch a process. """
# Communication type enums
COM_ARGS = 1
COM_STDIN = 2
# Stderr routing options
STDERR_TMP = 1
STDERR_STDOUT = 2
class CBDefault():
pass
def __init__(self, launchCommand, timeout=None, callback=None,
workingDir=None, comm=1, commands=[], debug=False,
stderr=None):
"""
Initializes internal variables.
Parameters:
1. launchCommand (str): Command that should be launched.
2. timeout (int): The timeout within which the tool should finish.
3. callback (cb): Callback to send the tool ouput on.
Call should have the following prototype:
def callback(buffer):
"""
self.launchCommand = launchCommand
self.timeout = timeout if (timeout and not debug) else None
self.callback = callback
self.workingDir = workingDir
self.comm = comm
self.commands = commands
self.debug = debug
self.stderr = stderr
self._toolOutput = ''
self._toolReturnCode = 0
self._toolTimedout = False
@staticmethod
def _readEnd(process, outputCallback):
"""
Internal method used to read the last few bits.
Parameters:
1. process (obj): Object returned by subprocess.
2. outputCallback (cb): Callback to send the tool output on.
"""
charRead = process.stdout.read()
outputCallback(charRead)
@staticmethod
def _readOutput(process, outputCallback):
"""
Internal method used to read the last few bits.
Parameters:
1. process (obj): Object returned by subprocess.
2. outputCallback (cb): Callback to send the tool output on.
"""
while True:
if not process.poll() is None:
break
charRead = process.stdout.read(1)
outputCallback(charRead)
@staticmethod
def waitForPrompt(process, outputCallback):
currentLine = ''
while True:
charRead = process.stdout.read(1)
outputCallback(charRead)
currentLine += charRead
if charRead == '\n':
currentLine = ''
if currentLine.lstrip() == '>':
break
@classmethod
def _communicate(cls, process, communication, commands, debug, outputCallback,
returnCodeCallback):
"""
Internal method used to launch sub process as a seperate thread.
Parameters:
1. process (obj): Object returned by subprocess.
2. outputCallback (cb): Callback to send the tool output on.
3. returnCodeCallback (cb): Callback to set the return code
"""
readEndBytes = True
if communication == cls.COM_STDIN:
if not debug:
commands += ['quit']
for cmd in commands:
cls.waitForPrompt(process, outputCallback)
outputCallback(cmd + '\r\n')
process.stdin.write(cmd + '\r\n')
elif communication == cls.COM_ARGS:
readOutput = threading.Thread(target=CoreSubprocess._readOutput,
args=([process, outputCallback]))
readOutput.daemon = True
readOutput.start()
while True:
if not process.poll() is None:
break
time.sleep(1)
readOutput.join(5)
readEndBytes = not readOutput.is_alive()
if readEndBytes:
readEnd = threading.Thread(target=CoreSubprocess._readEnd,
args=([process, outputCallback]))
readEnd.daemon = True
readEnd.start()
readEnd.join(5)
returnCodeCallback(process.wait())
def compressToolOutput(self, output=None, lineLimit=5000):
""" Return compressed output limiting the number of lines """
if output is None: output = self._toolOutput
compressedOutput = c_misc.compressBufferContents([output])[-1 * lineLimit:]
return '\n'.join(compressedOutput)
def killProcess(self, process):
if sys.platform.startswith('linux'):
process.kill()
else:
#Using stdout=logger.outputStream caused error
processToKill = subprocess.Popen('taskkill /F /T /PID {0}'.format(str(process.pid)),
#stdout=logger.outputStream,
stderr=subprocess.STDOUT,
bufsize= -1)
processToKill.wait()
def launchTool(self):
"""
Launches the tool specified by the launchCommand
Returns:
1. toolTimedout (bool): True if tool timedout
2. toolReturnCode (int): Returncode from the tool
3. toolOutput (str): Output from the tool
"""
self._toolTimedout = False
self._toolReturnCode = 0
self._toolOutput = ''
if self.stderr == self.STDERR_TMP:
tmpfile = c_path.create_tmp_file()
stderr = open(tmpfile, 'wb')
elif self.stderr == self.STDERR_STDOUT:
stderr = subprocess.STDOUT
else:
stderr = self.stderr
try:
process = subprocess.Popen(self.launchCommand, cwd=self.workingDir,
stdin=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=stderr)
try:
communication = threading.Thread(target=self._communicate,
args=([process, self.comm, self.commands, self.debug,
self._setOutput, self._setReturnCode]))
communication.daemon = True
communication.start()
start_time = end_time = time.time()
while True:
timeout = 600 if (self.timeout is None) else min(600, self.timeout - (end_time - start_time))
communication.join(timeout)
end_time = time.time()
if ((not communication.isAlive()) or ((self.timeout is not None) and ((end_time - start_time) > self.timeout))):
break
if communication.isAlive():
self._toolTimedout = True
self.killProcess(process)
communication.join(2)
assert communication.isAlive() == False
except KeyboardInterrupt:
self.killProcess(process)
raise
finally:
if self.stderr == self.STDERR_TMP:
stderr.close()
c_path.remove_tmp_file(tmpfile)
return self._toolTimedout, self._toolReturnCode, self._toolOutput
def _setReturnCode(self, returnCode):
""" Internal method used to set the tool returnCode """
self._toolReturnCode = returnCode
def _setOutput(self, output):
""" Internal method used to set the tool output and callback """
self._toolOutput += output
if self.callback: self.callback(output)
# This is a hook for caller to format the command
# line string for printing
def formatLaunchCommandForPrinting(self, cmd):
return cmd
def printCommand(self):
cmd = self.launchCommand
cmd = self.formatLaunchCommandForPrinting(cmd)
location = os.path.dirname(cmd[0])
args = ''
curLength = 0
for eachArg in cmd[1:]:
if curLength > 60:
args += '\n'
curLength = 0
args += eachArg + ' '
curLength += len(eachArg) + 1
cmd = os.path.basename(cmd[0])
logger.info(
"""Launching tool: {0}
From: {1}
Args: {2}""".format(cmd, location, '\n '.join(args.split('\n'))))
def printFinish(self, cmd):
logger.info('Finished tool: {0}'.format(cmd))
def validateOutput(self, retcode, f_retcode, f_output,
successRegex, failureRegex,
successString, failureString,
cmd):
returnValue = True
returnError = 'Tool "{0}"\n'.format(cmd)
# Perform validations
if retcode is not None and retcode != f_retcode:
returnError += ' Return code does not match: Expected = "{0}", Got = "{1}"'.format(retcode, f_retcode)
returnValue = False
if successRegex is not None and not re.search(r'{0}'.format(successRegex), f_output):
returnError += ' Output does not have expected success regex.\n Regex: "{0}"'.format(successRegex)
returnValue = False
elif failureRegex is not None and re.search(r'{0}'.format(failureRegex), f_output):
returnError += ' Output has expected failure regex.\n Regex: "{0}"'.format(failureRegex)
returnValue = False
elif successString is not None and f_output.find(successString) == -1:
returnError += ' Output does not have expected success string.\n Expected String: "{0}"'.format(successString)
returnValue = False
elif failureString is not None and not f_output.find(failureString) == -1:
returnError += ' Output has expected failure string.\n Expected String: "{0}"'.format(failureString)
returnValue = False
return returnValue, returnError
@classmethod
def executeCommand(cls, launchCommand, retcode=None, callback=None,
timeout=3600, successRegex=None, failureRegex=None,
successString=None, failureString=None, workingDir=None,
comm=1, commands=[], debug=False, stderr=STDERR_TMP,
print_command=False):
"""
Launches the tool based on the params
Only one of 5, 6, 7, 8 must be given
Parameters:
1. launchCommand (str): command to be executed.
2. retcode (int): Expected return code from tool.
3. callback (cb): Callback to take tool output
Callback should have the following prototype:
def callback(buffer):
4. timeout (int): Time in which tool should finish
5. successRegex (str): Regex in output stream signifying success.
6. failureRegex (str): Regex in output stream signifying failure.
7. successString (str): String in output stream signifying success.
8. failureString (str): String in output stream signifying failure.
Returns:
1. returnValue (bool): True if all validations were successful
2. returnError (str): Failure if any
"""
if len([value for value in [successRegex, failureRegex, successString, failureString] if value is not None]) > 1:
raise AttributeError('Only one of successRegex, failureRegex, successString, failureString must be given')
# Log the running process nicely
if type(launchCommand) is not list:
cmd = launchCommand.split()
else:
cmd = launchCommand
# Setup default callback
cb = callback
if callback is cls.CBDefault:
cb = lambda msg: logger.info(msg, raw=True)
cb('\n---------------------------------------------------------------\n\n')
# Launch the tool
tool = cls(launchCommand, timeout=timeout,
workingDir=workingDir, comm=comm, commands=commands, debug=debug,
callback=cb, stderr=stderr)
if print_command:
tool.printCommand()
f_timeout, f_retcode, f_output = tool.launchTool()
if callback is cls.CBDefault:
cb('\n---------------------------------------------------------------\n\n')
if print_command:
tool.printFinish(cmd)
returnValue, returnError = tool.validateOutput(
retcode, f_retcode, f_output,
successRegex, failureRegex,
successString, failureString,
cmd)
return returnValue, returnError, f_timeout, f_retcode, f_output
@classmethod
def simpleExecuteCommand(cls, launchCommand):
"""
Launches the tool based on the params
Parameters:
1. launchCommand (str): command to be executed.
Returns:
1. returnValue (bool): True if all validations were successful
2. returnError (str): Failure if any
"""
# Log the running process nicely
if type(launchCommand) is not list:
launchCommand = launchCommand.split()
# Launch the tool
retcode, output, error = 0, '', ''
try:
process = subprocess.Popen(launchCommand, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
process.wait()
retcode = process.returncode
output = process.stdout.read()
error = process.stderr.read()
except Exception as e:
retcode = -1
error = str(e)
return retcode, output, error | 0.452294 | 0.096877 |
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.sankey import Sankey
fig = plt.figure(figsize=(8, 12))
ax = fig.add_subplot(1, 1, 1, xticks=[], yticks=[],
title="Rankine Power Cycle: Example 8.6 from Moran and Shapiro\n"
+ "\x22Fundamentals of Engineering Thermodynamics\x22, 6th ed., 2008")
Hdot = [260.431, 35.078, 180.794, 221.115, 22.700,
142.361, 10.193, 10.210, 43.670, 44.312,
68.631, 10.758, 10.758, 0.017, 0.642,
232.121, 44.559, 100.613, 132.168] # MW
sankey = Sankey(ax=ax, format='%.3G', unit=' MW', gap=0.5, scale=1.0/Hdot[0])
sankey.add(patchlabel='\n\nPump 1', rotation=90, facecolor='#37c959',
flows=[Hdot[13], Hdot[6], -Hdot[7]],
labels=['Shaft power', '', None],
pathlengths=[0.4, 0.883, 0.25],
orientations=[1, -1, 0])
sankey.add(patchlabel='\n\nOpen\nheater', facecolor='#37c959',
flows=[Hdot[11], Hdot[7], Hdot[4], -Hdot[8]],
labels=[None, '', None, None],
pathlengths=[0.25, 0.25, 1.93, 0.25],
orientations=[1, 0, -1, 0], prior=0, connect=(2, 1))
sankey.add(patchlabel='\n\nPump 2', facecolor='#37c959',
flows=[Hdot[14], Hdot[8], -Hdot[9]],
labels=['Shaft power', '', None],
pathlengths=[0.4, 0.25, 0.25],
orientations=[1, 0, 0], prior=1, connect=(3, 1))
sankey.add(patchlabel='Closed\nheater', trunklength=2.914, fc='#37c959',
flows=[Hdot[9], Hdot[1], -Hdot[11], -Hdot[10]],
pathlengths=[0.25, 1.543, 0.25, 0.25],
labels=['', '', None, None],
orientations=[0, -1, 1, -1], prior=2, connect=(2, 0))
sankey.add(patchlabel='Trap', facecolor='#37c959', trunklength=5.102,
flows=[Hdot[11], -Hdot[12]],
labels=['\n', None],
pathlengths=[1.0, 1.01],
orientations=[1, 1], prior=3, connect=(2, 0))
sankey.add(patchlabel='Steam\ngenerator', facecolor='#ff5555',
flows=[Hdot[15], Hdot[10], Hdot[2], -Hdot[3], -Hdot[0]],
labels=['Heat rate', '', '', None, None],
pathlengths=0.25,
orientations=[1, 0, -1, -1, -1], prior=3, connect=(3, 1))
sankey.add(patchlabel='\n\n\nTurbine 1', facecolor='#37c959',
flows=[Hdot[0], -Hdot[16], -Hdot[1], -Hdot[2]],
labels=['', None, None, None],
pathlengths=[0.25, 0.153, 1.543, 0.25],
orientations=[0, 1, -1, -1], prior=5, connect=(4, 0))
sankey.add(patchlabel='\n\n\nReheat', facecolor='#37c959',
flows=[Hdot[2], -Hdot[2]],
labels=[None, None],
pathlengths=[0.725, 0.25],
orientations=[-1, 0], prior=6, connect=(3, 0))
sankey.add(patchlabel='Turbine 2', trunklength=3.212, facecolor='#37c959',
flows=[Hdot[3], Hdot[16], -Hdot[5], -Hdot[4], -Hdot[17]],
labels=[None, 'Shaft power', None, '', 'Shaft power'],
pathlengths=[0.751, 0.15, 0.25, 1.93, 0.25],
orientations=[0, -1, 0, -1, 1], prior=6, connect=(1, 1))
sankey.add(patchlabel='Condenser', facecolor='#58b1fa', trunklength=1.764,
flows=[Hdot[5], -Hdot[18], -Hdot[6]],
labels=['', 'Heat rate', None],
pathlengths=[0.45, 0.25, 0.883],
orientations=[-1, 1, 0], prior=8, connect=(2, 0))
diagrams = sankey.finish()
for diagram in diagrams:
diagram.text.set_fontweight('bold')
diagram.text.set_fontsize('10')
for text in diagram.texts:
text.set_fontsize('10')
# Notice that the explicit connections are handled automatically, but the
# implicit ones currently are not. The lengths of the paths and the trunks
# must be adjusted manually, and that is a bit tricky.
plt.show() | examples/api/sankey_demo_rankine.py | import numpy as np
import matplotlib.pyplot as plt
from matplotlib.sankey import Sankey
fig = plt.figure(figsize=(8, 12))
ax = fig.add_subplot(1, 1, 1, xticks=[], yticks=[],
title="Rankine Power Cycle: Example 8.6 from Moran and Shapiro\n"
+ "\x22Fundamentals of Engineering Thermodynamics\x22, 6th ed., 2008")
Hdot = [260.431, 35.078, 180.794, 221.115, 22.700,
142.361, 10.193, 10.210, 43.670, 44.312,
68.631, 10.758, 10.758, 0.017, 0.642,
232.121, 44.559, 100.613, 132.168] # MW
sankey = Sankey(ax=ax, format='%.3G', unit=' MW', gap=0.5, scale=1.0/Hdot[0])
sankey.add(patchlabel='\n\nPump 1', rotation=90, facecolor='#37c959',
flows=[Hdot[13], Hdot[6], -Hdot[7]],
labels=['Shaft power', '', None],
pathlengths=[0.4, 0.883, 0.25],
orientations=[1, -1, 0])
sankey.add(patchlabel='\n\nOpen\nheater', facecolor='#37c959',
flows=[Hdot[11], Hdot[7], Hdot[4], -Hdot[8]],
labels=[None, '', None, None],
pathlengths=[0.25, 0.25, 1.93, 0.25],
orientations=[1, 0, -1, 0], prior=0, connect=(2, 1))
sankey.add(patchlabel='\n\nPump 2', facecolor='#37c959',
flows=[Hdot[14], Hdot[8], -Hdot[9]],
labels=['Shaft power', '', None],
pathlengths=[0.4, 0.25, 0.25],
orientations=[1, 0, 0], prior=1, connect=(3, 1))
sankey.add(patchlabel='Closed\nheater', trunklength=2.914, fc='#37c959',
flows=[Hdot[9], Hdot[1], -Hdot[11], -Hdot[10]],
pathlengths=[0.25, 1.543, 0.25, 0.25],
labels=['', '', None, None],
orientations=[0, -1, 1, -1], prior=2, connect=(2, 0))
sankey.add(patchlabel='Trap', facecolor='#37c959', trunklength=5.102,
flows=[Hdot[11], -Hdot[12]],
labels=['\n', None],
pathlengths=[1.0, 1.01],
orientations=[1, 1], prior=3, connect=(2, 0))
sankey.add(patchlabel='Steam\ngenerator', facecolor='#ff5555',
flows=[Hdot[15], Hdot[10], Hdot[2], -Hdot[3], -Hdot[0]],
labels=['Heat rate', '', '', None, None],
pathlengths=0.25,
orientations=[1, 0, -1, -1, -1], prior=3, connect=(3, 1))
sankey.add(patchlabel='\n\n\nTurbine 1', facecolor='#37c959',
flows=[Hdot[0], -Hdot[16], -Hdot[1], -Hdot[2]],
labels=['', None, None, None],
pathlengths=[0.25, 0.153, 1.543, 0.25],
orientations=[0, 1, -1, -1], prior=5, connect=(4, 0))
sankey.add(patchlabel='\n\n\nReheat', facecolor='#37c959',
flows=[Hdot[2], -Hdot[2]],
labels=[None, None],
pathlengths=[0.725, 0.25],
orientations=[-1, 0], prior=6, connect=(3, 0))
sankey.add(patchlabel='Turbine 2', trunklength=3.212, facecolor='#37c959',
flows=[Hdot[3], Hdot[16], -Hdot[5], -Hdot[4], -Hdot[17]],
labels=[None, 'Shaft power', None, '', 'Shaft power'],
pathlengths=[0.751, 0.15, 0.25, 1.93, 0.25],
orientations=[0, -1, 0, -1, 1], prior=6, connect=(1, 1))
sankey.add(patchlabel='Condenser', facecolor='#58b1fa', trunklength=1.764,
flows=[Hdot[5], -Hdot[18], -Hdot[6]],
labels=['', 'Heat rate', None],
pathlengths=[0.45, 0.25, 0.883],
orientations=[-1, 1, 0], prior=8, connect=(2, 0))
diagrams = sankey.finish()
for diagram in diagrams:
diagram.text.set_fontweight('bold')
diagram.text.set_fontsize('10')
for text in diagram.texts:
text.set_fontsize('10')
# Notice that the explicit connections are handled automatically, but the
# implicit ones currently are not. The lengths of the paths and the trunks
# must be adjusted manually, and that is a bit tricky.
plt.show() | 0.582372 | 0.619443 |
import requests
import json
import sys
import random
class ReddeApi:
receive_url = "https://api.reddeonline.com/v1/receive"
cashout_url = "https://api.reddeonline.com/v1/cashout"
def __init__(self, api_key, app_id):
self.apikey = api_key
self.appid = app_id
"""Generate a number of fixed length """
def clientReferenceNumber(self, stringLength=6):
numbers = "0123456789"
return ''.join(random.choice(numbers) for i in range(stringLength))
"""Generate a random alpha numeric of fixed length """
def randomClientID(self, stringLength=6):
alphaNumeric = "0123456789ABCDEFGHIJKLMNPQRSTUVWXYZ"
return ''.join(random.choice(alphaNumeric) for i in range(stringLength))
def api_request(self, headers, url, params, http_call='post'):
if http_call == 'post':
try:
response = requests.post(
url, json=params, headers=headers, timeout=3)
response.raise_for_status()
except requests.exceptions.HTTPError as httpErr:
print("Http Error:", httpErr)
except requests.exceptions.ConnectionError as connErr:
print("Error Connecting:", connErr)
except requests.exceptions.Timeout as timeOutErr:
print("Timeout Error:", timeOutErr)
except requests.exceptions.RequestException as reqErr:
print("Something Else:", reqErr)
sys.exit(1)
else:
raise ValueError('Invalid http_call parameter')
try:
result = response.json()
except ValueError:
result = {'error': 'No JSON content returned'}
if response.status_code != 200 or 'error' in result:
print("Check this error", response.status_code)
else:
return result
def sendMoney(self, amount, walletnumber, client_ref, client_id, network):
headers = {
'Content-Type': "application/json;charset=UTF-8",
'ApiKey': self.apikey
}
payload = {
'amount': amount,
'appid': self.appid,
'clientreference': client_ref,
'clienttransid': client_id,
'description': 'Registered Member',
'nickname': 'wigal',
'paymentoption': network,
'vouchercode': '',
'walletnumber': walletnumber
}
data = self.api_request(headers, self.cashout_url, payload, 'post')
return data
def receiveMoney(self, amount, walletnumber, client_ref, client_id, network):
headers = {
'Content-Type': "application/json;charset=UTF-8",
'ApiKey': self.apikey
}
payload = {
'amount': amount,
'appid': self.appid,
'clientreference': client_ref,
'clienttransid': client_id,
'description': 'Registered Member',
'nickname': 'wigal',
'paymentoption': network,
'vouchercode': '',
'walletnumber': walletnumber
}
data = self.api_request(headers, self.receive_url, payload, 'post')
return data | reddeApi/redde.py | import requests
import json
import sys
import random
class ReddeApi:
receive_url = "https://api.reddeonline.com/v1/receive"
cashout_url = "https://api.reddeonline.com/v1/cashout"
def __init__(self, api_key, app_id):
self.apikey = api_key
self.appid = app_id
"""Generate a number of fixed length """
def clientReferenceNumber(self, stringLength=6):
numbers = "0123456789"
return ''.join(random.choice(numbers) for i in range(stringLength))
"""Generate a random alpha numeric of fixed length """
def randomClientID(self, stringLength=6):
alphaNumeric = "0123456789ABCDEFGHIJKLMNPQRSTUVWXYZ"
return ''.join(random.choice(alphaNumeric) for i in range(stringLength))
def api_request(self, headers, url, params, http_call='post'):
if http_call == 'post':
try:
response = requests.post(
url, json=params, headers=headers, timeout=3)
response.raise_for_status()
except requests.exceptions.HTTPError as httpErr:
print("Http Error:", httpErr)
except requests.exceptions.ConnectionError as connErr:
print("Error Connecting:", connErr)
except requests.exceptions.Timeout as timeOutErr:
print("Timeout Error:", timeOutErr)
except requests.exceptions.RequestException as reqErr:
print("Something Else:", reqErr)
sys.exit(1)
else:
raise ValueError('Invalid http_call parameter')
try:
result = response.json()
except ValueError:
result = {'error': 'No JSON content returned'}
if response.status_code != 200 or 'error' in result:
print("Check this error", response.status_code)
else:
return result
def sendMoney(self, amount, walletnumber, client_ref, client_id, network):
headers = {
'Content-Type': "application/json;charset=UTF-8",
'ApiKey': self.apikey
}
payload = {
'amount': amount,
'appid': self.appid,
'clientreference': client_ref,
'clienttransid': client_id,
'description': 'Registered Member',
'nickname': 'wigal',
'paymentoption': network,
'vouchercode': '',
'walletnumber': walletnumber
}
data = self.api_request(headers, self.cashout_url, payload, 'post')
return data
def receiveMoney(self, amount, walletnumber, client_ref, client_id, network):
headers = {
'Content-Type': "application/json;charset=UTF-8",
'ApiKey': self.apikey
}
payload = {
'amount': amount,
'appid': self.appid,
'clientreference': client_ref,
'clienttransid': client_id,
'description': 'Registered Member',
'nickname': 'wigal',
'paymentoption': network,
'vouchercode': '',
'walletnumber': walletnumber
}
data = self.api_request(headers, self.receive_url, payload, 'post')
return data | 0.150809 | 0.097048 |
from web.controllers.api import route_api
from flask import request,jsonify,g
from common.models.food.FoodCat import FoodCat
from common.models.food.Food import Food
from common.models.member.MemberCart import MemberCart
from common.models.member.MemberComments import MemberComments
from common.models.member.Member import Member
from common.libs.UrlManager import UrlManager
from common.libs.Helper import get_current_date,get_dict_filter_field,select_filter_obj
from application import app,db
from sqlalchemy import or_
@route_api.route("/food/index" )
def foodIndex():
resp = { 'code':200 ,'msg':'操作成功~','data':{} }
cat_list = FoodCat.query.filter_by( status = 1 ).order_by( FoodCat.weight.desc() ).all()
data_cat_list = []
data_cat_list.append({
'id': 0,
'name': "全部"
})
if cat_list:
for item in cat_list:
tmp_data = {
'id':item.id,
'name':item.name
}
data_cat_list.append( tmp_data )
resp['data']['cat_list'] = data_cat_list
food_list = Food.query.filter_by( status = 1 )\
.order_by( Food.total_count.desc(),Food.id.desc() ).limit(3).all()
data_food_list = []
if food_list:
for item in food_list:
tmp_data = {
'id':item.id,
'pic_url':UrlManager.build_image_url(item.main_image)
}
data_food_list.append( tmp_data )
resp['data']['banner_list'] = data_food_list
return jsonify( resp )
@route_api.route("/food/search" )
def foodSearch():
resp = {'code': 200, 'msg': '操作成功~', 'data': {}}
req = request.values
cat_id = int( req['cat_id'] ) if 'cat_id' in req else 0
mix_kw = str(req['mix_kw']) if 'mix_kw' in req else ''
p = int( req['p'] ) if 'p' in req else 1
if p < 1:
p = 1
page_size = 10
offset = ( p - 1 ) * page_size
query = Food.query.filter_by(status=1 )
if cat_id > 0:
query = query.filter_by(cat_id = cat_id)
if mix_kw:
rule = or_(Food.name.ilike("%{0}%".format(mix_kw)), Food.tags.ilike("%{0}%".format(mix_kw)))
query = query.filter(rule)
food_list = query.order_by(Food.total_count.desc(), Food.id.desc())\
.offset( offset ).limit( page_size ).all()
data_food_list = []
if food_list:
for item in food_list:
tmp_data = {
'id': item.id,
'name': "%s"%( item.name ),
'price': str( item.price ),
'min_price':str( item.price ),
'pic_url': UrlManager.build_image_url(item.main_image)
}
data_food_list.append(tmp_data)
resp['data']['list'] = data_food_list
resp['data']['has_more'] = 0 if len( data_food_list ) < page_size else 1
return jsonify(resp)
@route_api.route("/food/info" )
def foodInfo():
resp = {'code': 200, 'msg': '操作成功~', 'data': {}}
req = request.values
id = int(req['id']) if 'id' in req else 0
food_info = Food.query.filter_by( id = id ).first()
if not food_info or not food_info.status :
resp['code'] = -1
resp['msg'] = "美食已下架"
return jsonify(resp)
member_info = g.member_info
cart_number = 0
if member_info:
cart_number = MemberCart.query.filter_by( member_id = member_info.id ).count()
resp['data']['info'] = {
"id":food_info.id,
"name":food_info.name,
"summary":food_info.summary,
"total_count":food_info.total_count,
"comment_count":food_info.comment_count,
'main_image':UrlManager.build_image_url(food_info.main_image),
"price":str( food_info.price ),
"stock":food_info.stock,
"pics":[UrlManager.build_image_url(food_info.main_image)]
}
resp['data']['cart_number'] = cart_number
return jsonify(resp)
@route_api.route("/food/comments")
def foodComments():
resp = {'code': 200, 'msg': '操作成功~', 'data': {}}
req = request.values
id = int(req['id']) if 'id' in req else 0
query = MemberComments.query.filter( MemberComments.food_ids.ilike("%_{0}_%".format(id)) )
list = query.order_by( MemberComments.id.desc() ).limit(5).all()
data_list = []
if list:
member_map = get_dict_filter_field(Member, Member.id, "id", select_filter_obj(list, "member_id"))
for item in list:
if item.member_id not in member_map:
continue
tmp_member_info = member_map[ item.member_id ]
tmp_data = {
'score':item.score_desc,
'date': item.created_time.strftime("%Y-%m-%d %H:%M:%S"),
"content":item.content,
"user":{
'nickname':tmp_member_info.nickname,
'avatar_url':tmp_member_info.avatar,
}
}
data_list.append( tmp_data )
resp['data']['list'] = data_list
resp['data']['count'] = query.count()
return jsonify(resp) | web/controllers/api/Food.py | from web.controllers.api import route_api
from flask import request,jsonify,g
from common.models.food.FoodCat import FoodCat
from common.models.food.Food import Food
from common.models.member.MemberCart import MemberCart
from common.models.member.MemberComments import MemberComments
from common.models.member.Member import Member
from common.libs.UrlManager import UrlManager
from common.libs.Helper import get_current_date,get_dict_filter_field,select_filter_obj
from application import app,db
from sqlalchemy import or_
@route_api.route("/food/index" )
def foodIndex():
resp = { 'code':200 ,'msg':'操作成功~','data':{} }
cat_list = FoodCat.query.filter_by( status = 1 ).order_by( FoodCat.weight.desc() ).all()
data_cat_list = []
data_cat_list.append({
'id': 0,
'name': "全部"
})
if cat_list:
for item in cat_list:
tmp_data = {
'id':item.id,
'name':item.name
}
data_cat_list.append( tmp_data )
resp['data']['cat_list'] = data_cat_list
food_list = Food.query.filter_by( status = 1 )\
.order_by( Food.total_count.desc(),Food.id.desc() ).limit(3).all()
data_food_list = []
if food_list:
for item in food_list:
tmp_data = {
'id':item.id,
'pic_url':UrlManager.build_image_url(item.main_image)
}
data_food_list.append( tmp_data )
resp['data']['banner_list'] = data_food_list
return jsonify( resp )
@route_api.route("/food/search" )
def foodSearch():
resp = {'code': 200, 'msg': '操作成功~', 'data': {}}
req = request.values
cat_id = int( req['cat_id'] ) if 'cat_id' in req else 0
mix_kw = str(req['mix_kw']) if 'mix_kw' in req else ''
p = int( req['p'] ) if 'p' in req else 1
if p < 1:
p = 1
page_size = 10
offset = ( p - 1 ) * page_size
query = Food.query.filter_by(status=1 )
if cat_id > 0:
query = query.filter_by(cat_id = cat_id)
if mix_kw:
rule = or_(Food.name.ilike("%{0}%".format(mix_kw)), Food.tags.ilike("%{0}%".format(mix_kw)))
query = query.filter(rule)
food_list = query.order_by(Food.total_count.desc(), Food.id.desc())\
.offset( offset ).limit( page_size ).all()
data_food_list = []
if food_list:
for item in food_list:
tmp_data = {
'id': item.id,
'name': "%s"%( item.name ),
'price': str( item.price ),
'min_price':str( item.price ),
'pic_url': UrlManager.build_image_url(item.main_image)
}
data_food_list.append(tmp_data)
resp['data']['list'] = data_food_list
resp['data']['has_more'] = 0 if len( data_food_list ) < page_size else 1
return jsonify(resp)
@route_api.route("/food/info" )
def foodInfo():
resp = {'code': 200, 'msg': '操作成功~', 'data': {}}
req = request.values
id = int(req['id']) if 'id' in req else 0
food_info = Food.query.filter_by( id = id ).first()
if not food_info or not food_info.status :
resp['code'] = -1
resp['msg'] = "美食已下架"
return jsonify(resp)
member_info = g.member_info
cart_number = 0
if member_info:
cart_number = MemberCart.query.filter_by( member_id = member_info.id ).count()
resp['data']['info'] = {
"id":food_info.id,
"name":food_info.name,
"summary":food_info.summary,
"total_count":food_info.total_count,
"comment_count":food_info.comment_count,
'main_image':UrlManager.build_image_url(food_info.main_image),
"price":str( food_info.price ),
"stock":food_info.stock,
"pics":[UrlManager.build_image_url(food_info.main_image)]
}
resp['data']['cart_number'] = cart_number
return jsonify(resp)
@route_api.route("/food/comments")
def foodComments():
resp = {'code': 200, 'msg': '操作成功~', 'data': {}}
req = request.values
id = int(req['id']) if 'id' in req else 0
query = MemberComments.query.filter( MemberComments.food_ids.ilike("%_{0}_%".format(id)) )
list = query.order_by( MemberComments.id.desc() ).limit(5).all()
data_list = []
if list:
member_map = get_dict_filter_field(Member, Member.id, "id", select_filter_obj(list, "member_id"))
for item in list:
if item.member_id not in member_map:
continue
tmp_member_info = member_map[ item.member_id ]
tmp_data = {
'score':item.score_desc,
'date': item.created_time.strftime("%Y-%m-%d %H:%M:%S"),
"content":item.content,
"user":{
'nickname':tmp_member_info.nickname,
'avatar_url':tmp_member_info.avatar,
}
}
data_list.append( tmp_data )
resp['data']['list'] = data_list
resp['data']['count'] = query.count()
return jsonify(resp) | 0.220846 | 0.113309 |
import asyncio
import datetime
import os
import sys
from typing import Any
from pydantic.fields import Field
from pydantic.main import BaseModel
from blacksmith import (
AsyncClientFactory,
AsyncHTTPAuthorizationMiddleware,
AsyncStaticDiscovery,
PathInfoField,
QueryStringField,
Request,
Response,
register,
)
class Dates(BaseModel):
# We ignore fields we don't want to consume
# created_at: datetime.datetime
# registry_created_at: datetime.datetime
registry_ends_at: datetime.datetime
# updated_at: datetime.datetime
class ListDomainResponse(Response):
# id: uuid.UUID
# fqdn: str
# In this example we rename a field in the response using `alias`
name: str = Field(str, alias="fqdn_unicode")
owner: str
dates: Dates
class DomainParam(Request):
name: str = PathInfoField(str)
class CollectionDomainParam(Request):
per_page: int = QueryStringField(2)
register(
"gandi",
"domain",
"gandi",
"v5",
path="/domain/domains/{name}",
contract={
# In this example we don't provide the response model,
# so we receive a dict for the json response
"GET": (DomainParam, None),
},
collection_path="/domain/domains",
collection_contract={
"GET": (CollectionDomainParam, ListDomainResponse),
},
)
async def main():
if "GANDIV5_API_KEY" not in os.environ:
print("Missing environment var GANDIV5_API_KEY", file=sys.stderr)
sys.exit(-1)
apikey = os.environ["GANDIV5_API_KEY"]
sd = AsyncStaticDiscovery({("gandi", "v5"): "https://api.gandi.net/v5"})
auth = AsyncHTTPAuthorizationMiddleware("Apikey", apikey)
cli: AsyncClientFactory[ListDomainResponse, Any] = AsyncClientFactory(
sd, timeout=(10.0)
).add_middleware(auth)
api = await cli("gandi")
if len(sys.argv) == 2:
domain = sys.argv[1]
domain = await api.domain.get(DomainParam(name=domain))
print(domain.json)
else:
domains = await api.domain.collection_get()
print(domains.meta)
print()
for domain in domains:
print(domain)
print(domain.name)
asyncio.run(main()) | examples/gandi_domain.py | import asyncio
import datetime
import os
import sys
from typing import Any
from pydantic.fields import Field
from pydantic.main import BaseModel
from blacksmith import (
AsyncClientFactory,
AsyncHTTPAuthorizationMiddleware,
AsyncStaticDiscovery,
PathInfoField,
QueryStringField,
Request,
Response,
register,
)
class Dates(BaseModel):
# We ignore fields we don't want to consume
# created_at: datetime.datetime
# registry_created_at: datetime.datetime
registry_ends_at: datetime.datetime
# updated_at: datetime.datetime
class ListDomainResponse(Response):
# id: uuid.UUID
# fqdn: str
# In this example we rename a field in the response using `alias`
name: str = Field(str, alias="fqdn_unicode")
owner: str
dates: Dates
class DomainParam(Request):
name: str = PathInfoField(str)
class CollectionDomainParam(Request):
per_page: int = QueryStringField(2)
register(
"gandi",
"domain",
"gandi",
"v5",
path="/domain/domains/{name}",
contract={
# In this example we don't provide the response model,
# so we receive a dict for the json response
"GET": (DomainParam, None),
},
collection_path="/domain/domains",
collection_contract={
"GET": (CollectionDomainParam, ListDomainResponse),
},
)
async def main():
if "GANDIV5_API_KEY" not in os.environ:
print("Missing environment var GANDIV5_API_KEY", file=sys.stderr)
sys.exit(-1)
apikey = os.environ["GANDIV5_API_KEY"]
sd = AsyncStaticDiscovery({("gandi", "v5"): "https://api.gandi.net/v5"})
auth = AsyncHTTPAuthorizationMiddleware("Apikey", apikey)
cli: AsyncClientFactory[ListDomainResponse, Any] = AsyncClientFactory(
sd, timeout=(10.0)
).add_middleware(auth)
api = await cli("gandi")
if len(sys.argv) == 2:
domain = sys.argv[1]
domain = await api.domain.get(DomainParam(name=domain))
print(domain.json)
else:
domains = await api.domain.collection_get()
print(domains.meta)
print()
for domain in domains:
print(domain)
print(domain.name)
asyncio.run(main()) | 0.452052 | 0.128963 |
from django.conf import settings
from django.db import models, transaction
from django.db.models import Sum
from django.utils import timezone
from rest_framework.exceptions import ParseError, PermissionDenied
from lego.apps.content.models import Content
from lego.apps.polls.permissions import PollPermissionHandler
from lego.apps.users.models import User
from lego.utils.models import BasisModel
def get_time_delta():
return timezone.now() + timezone.timedelta(weeks=52)
class Poll(Content, BasisModel):
description = models.TextField(blank=True)
results_hidden = models.BooleanField(default=False)
valid_until = models.DateTimeField(default=get_time_delta)
answered_users = models.ManyToManyField(User, related_name="answered_polls")
def __str__(self):
return self.title
@property
def total_votes(self):
return Option.objects.filter(poll=self).aggregate(total_votes=Sum("votes"))[
"total_votes"
]
def get_has_answered(self, user):
return self.answered_users.all().filter(pk=user.id).exists()
def get_absolute_url(self):
return f"{settings.FRONTEND_URL}/polls/{self.id}/"
@transaction.atomic
def vote(self, user, option_id):
option = self.options.get(pk=option_id)
if not option:
raise ParseError(detail="Option not found.")
if self.answered_users.filter(pk=user.id).exists():
raise PermissionDenied(detail="Cannot answer a poll twice.")
if self.valid_until < timezone.now():
raise PermissionDenied(detail="Poll is not valid at this time.")
option.votes += 1
option.save()
self.answered_users.add(user)
self.save()
class Meta:
permission_handler = PollPermissionHandler()
get_latest_by = "created_at"
class Option(BasisModel):
name = models.CharField(max_length=30)
votes = models.IntegerField(default=0)
poll = models.ForeignKey(Poll, on_delete=models.CASCADE, related_name="options")
def __str__(self):
return self.name
class Meta:
ordering = ["-votes", "pk"] | lego/apps/polls/models.py | from django.conf import settings
from django.db import models, transaction
from django.db.models import Sum
from django.utils import timezone
from rest_framework.exceptions import ParseError, PermissionDenied
from lego.apps.content.models import Content
from lego.apps.polls.permissions import PollPermissionHandler
from lego.apps.users.models import User
from lego.utils.models import BasisModel
def get_time_delta():
return timezone.now() + timezone.timedelta(weeks=52)
class Poll(Content, BasisModel):
description = models.TextField(blank=True)
results_hidden = models.BooleanField(default=False)
valid_until = models.DateTimeField(default=get_time_delta)
answered_users = models.ManyToManyField(User, related_name="answered_polls")
def __str__(self):
return self.title
@property
def total_votes(self):
return Option.objects.filter(poll=self).aggregate(total_votes=Sum("votes"))[
"total_votes"
]
def get_has_answered(self, user):
return self.answered_users.all().filter(pk=user.id).exists()
def get_absolute_url(self):
return f"{settings.FRONTEND_URL}/polls/{self.id}/"
@transaction.atomic
def vote(self, user, option_id):
option = self.options.get(pk=option_id)
if not option:
raise ParseError(detail="Option not found.")
if self.answered_users.filter(pk=user.id).exists():
raise PermissionDenied(detail="Cannot answer a poll twice.")
if self.valid_until < timezone.now():
raise PermissionDenied(detail="Poll is not valid at this time.")
option.votes += 1
option.save()
self.answered_users.add(user)
self.save()
class Meta:
permission_handler = PollPermissionHandler()
get_latest_by = "created_at"
class Option(BasisModel):
name = models.CharField(max_length=30)
votes = models.IntegerField(default=0)
poll = models.ForeignKey(Poll, on_delete=models.CASCADE, related_name="options")
def __str__(self):
return self.name
class Meta:
ordering = ["-votes", "pk"] | 0.701815 | 0.083591 |
import nltk
nltk.download('punkt')
import pickle
from collections import Counter
import configparser
import pandas as pd
import os
import json
config = configparser.ConfigParser()
config.read('config.ini')
class Vocabulary(object):
"""Simple vocabulary wrapper."""
def __init__(self):
self.word2idx = {}
self.idx2word = {}
self.idx = 0
def add_word(self, word):
if not word in self.word2idx:
self.word2idx[word] = self.idx
self.idx2word[self.idx] = word
self.idx += 1
def __call__(self, word):
if not word in self.word2idx:
return self.word2idx['<unk>']
return self.word2idx[word]
def __len__(self):
return len(self.word2idx)
def build_vocab(df, threshold):
"""Build a simple vocabulary wrapper."""
counter = Counter()
ids = df.index
for i, id in enumerate(ids):
caption = str(df.loc[id]['caption'])
tokens = nltk.tokenize.word_tokenize(caption.lower())
counter.update(tokens)
if (i+1) % 1000 == 0:
print("[{}/{}] Tokenized the captions.".format(i+1, len(ids)))
# If the word frequency is less than 'threshold', then the word is discarded.
words = [word for word, cnt in counter.items() if cnt >= threshold]
# Create a vocab wrapper and add some special tokens.
vocab = Vocabulary()
vocab.add_word('<pad>')
vocab.add_word('<start>')
vocab.add_word('<end>')
vocab.add_word('<unk>')
# Add the words to the vocabulary.
for i, word in enumerate(words):
vocab.add_word(word)
return vocab
def main(caption_path, vocab_path, threshold):
with open(caption_path + 'captions_train2014.json') as file:
file = json.load(file)
captions = pd.DataFrame(file['annotations']).set_index('id')
vocab = build_vocab(df=captions, threshold=threshold)
with open(vocab_path, 'wb') as f:
pickle.dump(vocab, f)
print("Total vocabulary size: {}".format(len(vocab)))
print("Saved the vocabulary wrapper to '{}'".format(vocab_path))
if __name__ == '__main__':
coco_data_dir = config['MSCOCO']['data-path']
coco_annotations = coco_data_dir+'annotations/'
out_dir = config['ALL']['output_dir']
main(
caption_path=coco_annotations,
vocab_path=out_dir+'vocab.pkl',
threshold=4
) | tutorials/03-advanced/image_captioning/build_vocab.py | import nltk
nltk.download('punkt')
import pickle
from collections import Counter
import configparser
import pandas as pd
import os
import json
config = configparser.ConfigParser()
config.read('config.ini')
class Vocabulary(object):
"""Simple vocabulary wrapper."""
def __init__(self):
self.word2idx = {}
self.idx2word = {}
self.idx = 0
def add_word(self, word):
if not word in self.word2idx:
self.word2idx[word] = self.idx
self.idx2word[self.idx] = word
self.idx += 1
def __call__(self, word):
if not word in self.word2idx:
return self.word2idx['<unk>']
return self.word2idx[word]
def __len__(self):
return len(self.word2idx)
def build_vocab(df, threshold):
"""Build a simple vocabulary wrapper."""
counter = Counter()
ids = df.index
for i, id in enumerate(ids):
caption = str(df.loc[id]['caption'])
tokens = nltk.tokenize.word_tokenize(caption.lower())
counter.update(tokens)
if (i+1) % 1000 == 0:
print("[{}/{}] Tokenized the captions.".format(i+1, len(ids)))
# If the word frequency is less than 'threshold', then the word is discarded.
words = [word for word, cnt in counter.items() if cnt >= threshold]
# Create a vocab wrapper and add some special tokens.
vocab = Vocabulary()
vocab.add_word('<pad>')
vocab.add_word('<start>')
vocab.add_word('<end>')
vocab.add_word('<unk>')
# Add the words to the vocabulary.
for i, word in enumerate(words):
vocab.add_word(word)
return vocab
def main(caption_path, vocab_path, threshold):
with open(caption_path + 'captions_train2014.json') as file:
file = json.load(file)
captions = pd.DataFrame(file['annotations']).set_index('id')
vocab = build_vocab(df=captions, threshold=threshold)
with open(vocab_path, 'wb') as f:
pickle.dump(vocab, f)
print("Total vocabulary size: {}".format(len(vocab)))
print("Saved the vocabulary wrapper to '{}'".format(vocab_path))
if __name__ == '__main__':
coco_data_dir = config['MSCOCO']['data-path']
coco_annotations = coco_data_dir+'annotations/'
out_dir = config['ALL']['output_dir']
main(
caption_path=coco_annotations,
vocab_path=out_dir+'vocab.pkl',
threshold=4
) | 0.623606 | 0.110904 |
from typing import Optional
import pytest
from pants.build_graph.address import Address, AddressInput, InvalidSpecPath, InvalidTargetName
def assert_address_input_parsed(
spec: str,
*,
path_component: str,
target_component: Optional[str],
relative_to: Optional[str] = None
) -> None:
ai = AddressInput.parse(spec, relative_to=relative_to)
assert ai.path_component == path_component
if target_component is None:
assert ai.target_component is None
else:
assert ai.target_component == target_component
def test_address_input_parse_spec() -> None:
assert_address_input_parsed("a/b/c", path_component="a/b/c", target_component=None)
assert_address_input_parsed("a/b/c:c", path_component="a/b/c", target_component="c")
# The relative_to has no effect because we have a path.
assert_address_input_parsed(
"a/b/c", relative_to="here", path_component="a/b/c", target_component=None
)
# Relative address spec
assert_address_input_parsed(":c", path_component="", target_component="c")
assert_address_input_parsed(
":c", relative_to="here", path_component="here", target_component="c"
)
assert_address_input_parsed("//:c", relative_to="here", path_component="", target_component="c")
# Absolute spec
assert_address_input_parsed("//a/b/c", path_component="a/b/c", target_component=None)
assert_address_input_parsed("//a/b/c:c", path_component="a/b/c", target_component="c")
assert_address_input_parsed("//:c", path_component="", target_component="c")
assert_address_input_parsed("//:c", relative_to="here", path_component="", target_component="c")
# Files
assert_address_input_parsed("f.txt", path_component="f.txt", target_component=None)
assert_address_input_parsed("//f.txt", path_component="f.txt", target_component=None)
assert_address_input_parsed("a/b/c.txt", path_component="a/b/c.txt", target_component=None)
assert_address_input_parsed("a/b/c.txt:tgt", path_component="a/b/c.txt", target_component="tgt")
assert_address_input_parsed(
"a/b/c.txt:../tgt", path_component="a/b/c.txt", target_component="../tgt"
)
assert_address_input_parsed(
"//a/b/c.txt:tgt", path_component="a/b/c.txt", target_component="tgt"
)
assert_address_input_parsed(
"./f.txt", relative_to="here", path_component="here/f.txt", target_component=None
)
assert_address_input_parsed(
"./subdir/f.txt:tgt",
relative_to="here",
path_component="here/subdir/f.txt",
target_component="tgt",
)
assert_address_input_parsed(
"subdir/f.txt", relative_to="here", path_component="subdir/f.txt", target_component=None
)
def test_address_input_parse_bad_path_component() -> None:
def assert_bad_path_component(spec: str) -> None:
with pytest.raises(InvalidSpecPath):
AddressInput.parse(spec)
assert_bad_path_component("..")
assert_bad_path_component(".")
assert_bad_path_component("//..")
assert_bad_path_component("//.")
assert_bad_path_component("a/.")
assert_bad_path_component("a/..")
assert_bad_path_component("../a")
assert_bad_path_component("a/../a")
assert_bad_path_component("a/:a")
assert_bad_path_component("a/b/:b")
# Absolute paths are banned.
assert_bad_path_component("/a")
assert_bad_path_component("///a")
def test_address_bad_target_component() -> None:
def assert_bad_target_component(spec: str) -> None:
with pytest.raises(InvalidTargetName):
repr(AddressInput.parse(spec).dir_to_address())
# Missing target_component
assert_bad_target_component("")
assert_bad_target_component("a:")
assert_bad_target_component("a::")
assert_bad_target_component("//")
assert_bad_target_component("//:")
# Banned chars
assert_bad_target_component("//:@t")
assert_bad_target_component("//:!t")
assert_bad_target_component("//:?t")
assert_bad_target_component("//:=t")
assert_bad_target_component(r"a:b\c")
def test_subproject_spec() -> None:
# Ensure that a spec referring to a subproject gets assigned to that subproject properly.
def parse(spec, relative_to):
return AddressInput.parse(
spec,
relative_to=relative_to,
subproject_roots=["subprojectA", "path/to/subprojectB"],
)
# Ensure that a spec in subprojectA is determined correctly.
ai = parse("src/python/alib", "subprojectA/src/python")
assert "subprojectA/src/python/alib" == ai.path_component
assert ai.target_component is None
ai = parse("src/python/alib:jake", "subprojectA/src/python/alib")
assert "subprojectA/src/python/alib" == ai.path_component
assert "jake" == ai.target_component
ai = parse(":rel", "subprojectA/src/python/alib")
assert "subprojectA/src/python/alib" == ai.path_component
assert "rel" == ai.target_component
# Ensure that a spec in subprojectB, which is more complex, is correct.
ai = parse("src/python/blib", "path/to/subprojectB/src/python")
assert "path/to/subprojectB/src/python/blib" == ai.path_component
assert ai.target_component is None
ai = parse("src/python/blib:jane", "path/to/subprojectB/src/python/blib")
assert "path/to/subprojectB/src/python/blib" == ai.path_component
assert "jane" == ai.target_component
ai = parse(":rel", "path/to/subprojectB/src/python/blib")
assert "path/to/subprojectB/src/python/blib" == ai.path_component
assert "rel" == ai.target_component
# Ensure that a spec in the parent project is not mapped.
ai = parse("src/python/parent", "src/python")
assert "src/python/parent" == ai.path_component
assert ai.target_component is None
ai = parse("src/python/parent:george", "src/python")
assert "src/python/parent" == ai.path_component
assert "george" == ai.target_component
ai = parse(":rel", "src/python/parent")
assert "src/python/parent" == ai.path_component
assert "rel" == ai.target_component
def test_address_input_from_file() -> None:
assert AddressInput("a/b/c.txt", target_component=None).file_to_address() == Address(
"a/b", relative_file_path="c.txt"
)
assert AddressInput("a/b/c.txt", target_component="original").file_to_address() == Address(
"a/b", target_name="original", relative_file_path="c.txt"
)
assert AddressInput("a/b/c.txt", target_component="../original").file_to_address() == Address(
"a", target_name="original", relative_file_path="b/c.txt"
)
assert AddressInput(
"a/b/c.txt", target_component="../../original"
).file_to_address() == Address("", target_name="original", relative_file_path="a/b/c.txt")
# These refer to targets "below" the file, which is illegal.
with pytest.raises(InvalidTargetName):
AddressInput("f.txt", target_component="subdir/tgt").file_to_address()
with pytest.raises(InvalidTargetName):
AddressInput("f.txt", target_component="subdir../tgt").file_to_address()
with pytest.raises(InvalidTargetName):
AddressInput("a/f.txt", target_component="../a/original").file_to_address()
# Top-level files must include a target_name.
with pytest.raises(InvalidTargetName):
AddressInput("f.txt").file_to_address()
assert AddressInput("f.txt", target_component="tgt").file_to_address() == Address(
"", relative_file_path="f.txt", target_name="tgt"
)
def test_address_input_from_dir() -> None:
assert AddressInput("a").dir_to_address() == Address("a")
assert AddressInput("a", target_component="b").dir_to_address() == Address("a", target_name="b")
def test_address_normalize_target_name() -> None:
assert Address("a/b/c", target_name="c") == Address("a/b/c", target_name=None)
assert Address("a/b/c", target_name="c", relative_file_path="f.txt") == Address(
"a/b/c", target_name=None, relative_file_path="f.txt"
)
def test_address_validate_build_in_spec_path() -> None:
with pytest.raises(InvalidSpecPath):
Address("a/b/BUILD")
with pytest.raises(InvalidSpecPath):
Address("a/b/BUILD.ext")
with pytest.raises(InvalidSpecPath):
Address("a/b/BUILD", target_name="foo")
# It's fine to use BUILD in the relative_file_path or target_name, though.
assert Address("a/b", relative_file_path="BUILD").spec == "a/b/BUILD"
assert Address("a/b", target_name="BUILD").spec == "a/b:BUILD"
def test_address_equality() -> None:
assert "Not really an address" != Address("a/b", target_name="c")
assert Address("a/b", target_name="c") == Address("a/b", target_name="c")
assert Address("a/b", target_name="c") != Address("a/b", target_name="d")
assert Address("a/b", target_name="c") != Address("a/z", target_name="c")
assert Address("a/b", target_name="c") != Address(
"a/b", relative_file_path="c", target_name="original"
)
assert Address("a/b", relative_file_path="c", target_name="original") == Address(
"a/b", relative_file_path="c", target_name="original"
)
def test_address_spec() -> None:
def assert_spec(address: Address, *, expected: str, expected_path_spec: str) -> None:
assert address.spec == expected
assert str(address) == expected
assert address.path_safe_spec == expected_path_spec
assert_spec(Address("a/b"), expected="a/b", expected_path_spec="a.b")
assert_spec(Address("a/b", target_name="c"), expected="a/b:c", expected_path_spec="a.b.c")
assert_spec(Address("", target_name="root"), expected="//:root", expected_path_spec=".root")
assert_spec(
Address("a/b", relative_file_path="c.txt", target_name="c"),
expected="a/b/c.txt:c",
expected_path_spec="a.b.c.txt.c",
)
assert_spec(
Address("", relative_file_path="root.txt", target_name="root"),
expected="//root.txt:root",
expected_path_spec=".root.txt.root",
)
assert_spec(
Address("a/b", relative_file_path="subdir/c.txt", target_name="original"),
expected="a/b/subdir/c.txt:../original",
expected_path_spec="a.b.subdir.c.txt@original",
)
assert_spec(
Address("a/b", relative_file_path="c.txt"),
expected="a/b/c.txt",
expected_path_spec="a.b.c.txt",
)
assert_spec(
Address("a/b", relative_file_path="subdir/f.txt"),
expected="a/b/subdir/f.txt:../b",
expected_path_spec="a.b.subdir.f.txt@b",
)
assert_spec(
Address("a/b", relative_file_path="subdir/dir2/f.txt"),
expected="a/b/subdir/dir2/f.txt:../../b",
expected_path_spec="a.b.subdir.dir2.f.txt@@b",
)
def test_address_maybe_convert_to_build_target() -> None:
def assert_converts_to_base_target(generated_addr: Address, *, expected: Address) -> None:
assert generated_addr.maybe_convert_to_build_target() == expected
assert_converts_to_base_target(
Address("a/b", relative_file_path="c.txt", target_name="c"),
expected=Address("a/b", target_name="c"),
)
assert_converts_to_base_target(
Address("a/b", relative_file_path="c.txt"), expected=Address("a/b")
)
assert_converts_to_base_target(
Address("a/b", relative_file_path="subdir/f.txt"), expected=Address("a/b")
)
assert_converts_to_base_target(
Address("a/b", relative_file_path="subdir/f.txt", target_name="original"),
expected=Address("a/b", target_name="original"),
)
def assert_base_target_noops(addr: Address) -> None:
assert addr.maybe_convert_to_build_target() is addr
assert_base_target_noops(Address("a/b", target_name="c"))
assert_base_target_noops(Address("a/b"))
def test_address_spec_to_address_input() -> None:
"""This smoke tests that Address.spec <-> AddressInput.parse() is idempotent."""
def assert_conversion(address: Address, *, expected: AddressInput) -> None:
assert AddressInput.parse(address.spec) == expected
assert_conversion(Address("a/b/c"), expected=AddressInput("a/b/c"))
assert_conversion(Address("a/b/c", target_name="tgt"), expected=AddressInput("a/b/c", "tgt"))
assert_conversion(
Address("a/b/c", relative_file_path="f.txt"), expected=AddressInput("a/b/c/f.txt")
)
assert_conversion(
Address("a/b/c", relative_file_path="f.txt", target_name="tgt"),
expected=AddressInput("a/b/c/f.txt", "tgt"),
)
assert_conversion(Address("", target_name="tgt"), expected=AddressInput("", "tgt"))
assert_conversion(
Address("", relative_file_path="f.txt", target_name="tgt"),
expected=AddressInput("f.txt", "tgt"),
)
assert_conversion(
Address("a/b/c", relative_file_path="subdir/f.txt"),
expected=AddressInput("a/b/c/subdir/f.txt", "../c"),
)
assert_conversion(
Address("a/b/c", relative_file_path="subdir/f.txt", target_name="tgt"),
expected=AddressInput("a/b/c/subdir/f.txt", "../tgt"),
) | src/python/pants/build_graph/address_test.py |
from typing import Optional
import pytest
from pants.build_graph.address import Address, AddressInput, InvalidSpecPath, InvalidTargetName
def assert_address_input_parsed(
spec: str,
*,
path_component: str,
target_component: Optional[str],
relative_to: Optional[str] = None
) -> None:
ai = AddressInput.parse(spec, relative_to=relative_to)
assert ai.path_component == path_component
if target_component is None:
assert ai.target_component is None
else:
assert ai.target_component == target_component
def test_address_input_parse_spec() -> None:
assert_address_input_parsed("a/b/c", path_component="a/b/c", target_component=None)
assert_address_input_parsed("a/b/c:c", path_component="a/b/c", target_component="c")
# The relative_to has no effect because we have a path.
assert_address_input_parsed(
"a/b/c", relative_to="here", path_component="a/b/c", target_component=None
)
# Relative address spec
assert_address_input_parsed(":c", path_component="", target_component="c")
assert_address_input_parsed(
":c", relative_to="here", path_component="here", target_component="c"
)
assert_address_input_parsed("//:c", relative_to="here", path_component="", target_component="c")
# Absolute spec
assert_address_input_parsed("//a/b/c", path_component="a/b/c", target_component=None)
assert_address_input_parsed("//a/b/c:c", path_component="a/b/c", target_component="c")
assert_address_input_parsed("//:c", path_component="", target_component="c")
assert_address_input_parsed("//:c", relative_to="here", path_component="", target_component="c")
# Files
assert_address_input_parsed("f.txt", path_component="f.txt", target_component=None)
assert_address_input_parsed("//f.txt", path_component="f.txt", target_component=None)
assert_address_input_parsed("a/b/c.txt", path_component="a/b/c.txt", target_component=None)
assert_address_input_parsed("a/b/c.txt:tgt", path_component="a/b/c.txt", target_component="tgt")
assert_address_input_parsed(
"a/b/c.txt:../tgt", path_component="a/b/c.txt", target_component="../tgt"
)
assert_address_input_parsed(
"//a/b/c.txt:tgt", path_component="a/b/c.txt", target_component="tgt"
)
assert_address_input_parsed(
"./f.txt", relative_to="here", path_component="here/f.txt", target_component=None
)
assert_address_input_parsed(
"./subdir/f.txt:tgt",
relative_to="here",
path_component="here/subdir/f.txt",
target_component="tgt",
)
assert_address_input_parsed(
"subdir/f.txt", relative_to="here", path_component="subdir/f.txt", target_component=None
)
def test_address_input_parse_bad_path_component() -> None:
def assert_bad_path_component(spec: str) -> None:
with pytest.raises(InvalidSpecPath):
AddressInput.parse(spec)
assert_bad_path_component("..")
assert_bad_path_component(".")
assert_bad_path_component("//..")
assert_bad_path_component("//.")
assert_bad_path_component("a/.")
assert_bad_path_component("a/..")
assert_bad_path_component("../a")
assert_bad_path_component("a/../a")
assert_bad_path_component("a/:a")
assert_bad_path_component("a/b/:b")
# Absolute paths are banned.
assert_bad_path_component("/a")
assert_bad_path_component("///a")
def test_address_bad_target_component() -> None:
def assert_bad_target_component(spec: str) -> None:
with pytest.raises(InvalidTargetName):
repr(AddressInput.parse(spec).dir_to_address())
# Missing target_component
assert_bad_target_component("")
assert_bad_target_component("a:")
assert_bad_target_component("a::")
assert_bad_target_component("//")
assert_bad_target_component("//:")
# Banned chars
assert_bad_target_component("//:@t")
assert_bad_target_component("//:!t")
assert_bad_target_component("//:?t")
assert_bad_target_component("//:=t")
assert_bad_target_component(r"a:b\c")
def test_subproject_spec() -> None:
# Ensure that a spec referring to a subproject gets assigned to that subproject properly.
def parse(spec, relative_to):
return AddressInput.parse(
spec,
relative_to=relative_to,
subproject_roots=["subprojectA", "path/to/subprojectB"],
)
# Ensure that a spec in subprojectA is determined correctly.
ai = parse("src/python/alib", "subprojectA/src/python")
assert "subprojectA/src/python/alib" == ai.path_component
assert ai.target_component is None
ai = parse("src/python/alib:jake", "subprojectA/src/python/alib")
assert "subprojectA/src/python/alib" == ai.path_component
assert "jake" == ai.target_component
ai = parse(":rel", "subprojectA/src/python/alib")
assert "subprojectA/src/python/alib" == ai.path_component
assert "rel" == ai.target_component
# Ensure that a spec in subprojectB, which is more complex, is correct.
ai = parse("src/python/blib", "path/to/subprojectB/src/python")
assert "path/to/subprojectB/src/python/blib" == ai.path_component
assert ai.target_component is None
ai = parse("src/python/blib:jane", "path/to/subprojectB/src/python/blib")
assert "path/to/subprojectB/src/python/blib" == ai.path_component
assert "jane" == ai.target_component
ai = parse(":rel", "path/to/subprojectB/src/python/blib")
assert "path/to/subprojectB/src/python/blib" == ai.path_component
assert "rel" == ai.target_component
# Ensure that a spec in the parent project is not mapped.
ai = parse("src/python/parent", "src/python")
assert "src/python/parent" == ai.path_component
assert ai.target_component is None
ai = parse("src/python/parent:george", "src/python")
assert "src/python/parent" == ai.path_component
assert "george" == ai.target_component
ai = parse(":rel", "src/python/parent")
assert "src/python/parent" == ai.path_component
assert "rel" == ai.target_component
def test_address_input_from_file() -> None:
assert AddressInput("a/b/c.txt", target_component=None).file_to_address() == Address(
"a/b", relative_file_path="c.txt"
)
assert AddressInput("a/b/c.txt", target_component="original").file_to_address() == Address(
"a/b", target_name="original", relative_file_path="c.txt"
)
assert AddressInput("a/b/c.txt", target_component="../original").file_to_address() == Address(
"a", target_name="original", relative_file_path="b/c.txt"
)
assert AddressInput(
"a/b/c.txt", target_component="../../original"
).file_to_address() == Address("", target_name="original", relative_file_path="a/b/c.txt")
# These refer to targets "below" the file, which is illegal.
with pytest.raises(InvalidTargetName):
AddressInput("f.txt", target_component="subdir/tgt").file_to_address()
with pytest.raises(InvalidTargetName):
AddressInput("f.txt", target_component="subdir../tgt").file_to_address()
with pytest.raises(InvalidTargetName):
AddressInput("a/f.txt", target_component="../a/original").file_to_address()
# Top-level files must include a target_name.
with pytest.raises(InvalidTargetName):
AddressInput("f.txt").file_to_address()
assert AddressInput("f.txt", target_component="tgt").file_to_address() == Address(
"", relative_file_path="f.txt", target_name="tgt"
)
def test_address_input_from_dir() -> None:
assert AddressInput("a").dir_to_address() == Address("a")
assert AddressInput("a", target_component="b").dir_to_address() == Address("a", target_name="b")
def test_address_normalize_target_name() -> None:
assert Address("a/b/c", target_name="c") == Address("a/b/c", target_name=None)
assert Address("a/b/c", target_name="c", relative_file_path="f.txt") == Address(
"a/b/c", target_name=None, relative_file_path="f.txt"
)
def test_address_validate_build_in_spec_path() -> None:
with pytest.raises(InvalidSpecPath):
Address("a/b/BUILD")
with pytest.raises(InvalidSpecPath):
Address("a/b/BUILD.ext")
with pytest.raises(InvalidSpecPath):
Address("a/b/BUILD", target_name="foo")
# It's fine to use BUILD in the relative_file_path or target_name, though.
assert Address("a/b", relative_file_path="BUILD").spec == "a/b/BUILD"
assert Address("a/b", target_name="BUILD").spec == "a/b:BUILD"
def test_address_equality() -> None:
assert "Not really an address" != Address("a/b", target_name="c")
assert Address("a/b", target_name="c") == Address("a/b", target_name="c")
assert Address("a/b", target_name="c") != Address("a/b", target_name="d")
assert Address("a/b", target_name="c") != Address("a/z", target_name="c")
assert Address("a/b", target_name="c") != Address(
"a/b", relative_file_path="c", target_name="original"
)
assert Address("a/b", relative_file_path="c", target_name="original") == Address(
"a/b", relative_file_path="c", target_name="original"
)
def test_address_spec() -> None:
def assert_spec(address: Address, *, expected: str, expected_path_spec: str) -> None:
assert address.spec == expected
assert str(address) == expected
assert address.path_safe_spec == expected_path_spec
assert_spec(Address("a/b"), expected="a/b", expected_path_spec="a.b")
assert_spec(Address("a/b", target_name="c"), expected="a/b:c", expected_path_spec="a.b.c")
assert_spec(Address("", target_name="root"), expected="//:root", expected_path_spec=".root")
assert_spec(
Address("a/b", relative_file_path="c.txt", target_name="c"),
expected="a/b/c.txt:c",
expected_path_spec="a.b.c.txt.c",
)
assert_spec(
Address("", relative_file_path="root.txt", target_name="root"),
expected="//root.txt:root",
expected_path_spec=".root.txt.root",
)
assert_spec(
Address("a/b", relative_file_path="subdir/c.txt", target_name="original"),
expected="a/b/subdir/c.txt:../original",
expected_path_spec="a.b.subdir.c.txt@original",
)
assert_spec(
Address("a/b", relative_file_path="c.txt"),
expected="a/b/c.txt",
expected_path_spec="a.b.c.txt",
)
assert_spec(
Address("a/b", relative_file_path="subdir/f.txt"),
expected="a/b/subdir/f.txt:../b",
expected_path_spec="a.b.subdir.f.txt@b",
)
assert_spec(
Address("a/b", relative_file_path="subdir/dir2/f.txt"),
expected="a/b/subdir/dir2/f.txt:../../b",
expected_path_spec="a.b.subdir.dir2.f.txt@@b",
)
def test_address_maybe_convert_to_build_target() -> None:
def assert_converts_to_base_target(generated_addr: Address, *, expected: Address) -> None:
assert generated_addr.maybe_convert_to_build_target() == expected
assert_converts_to_base_target(
Address("a/b", relative_file_path="c.txt", target_name="c"),
expected=Address("a/b", target_name="c"),
)
assert_converts_to_base_target(
Address("a/b", relative_file_path="c.txt"), expected=Address("a/b")
)
assert_converts_to_base_target(
Address("a/b", relative_file_path="subdir/f.txt"), expected=Address("a/b")
)
assert_converts_to_base_target(
Address("a/b", relative_file_path="subdir/f.txt", target_name="original"),
expected=Address("a/b", target_name="original"),
)
def assert_base_target_noops(addr: Address) -> None:
assert addr.maybe_convert_to_build_target() is addr
assert_base_target_noops(Address("a/b", target_name="c"))
assert_base_target_noops(Address("a/b"))
def test_address_spec_to_address_input() -> None:
"""This smoke tests that Address.spec <-> AddressInput.parse() is idempotent."""
def assert_conversion(address: Address, *, expected: AddressInput) -> None:
assert AddressInput.parse(address.spec) == expected
assert_conversion(Address("a/b/c"), expected=AddressInput("a/b/c"))
assert_conversion(Address("a/b/c", target_name="tgt"), expected=AddressInput("a/b/c", "tgt"))
assert_conversion(
Address("a/b/c", relative_file_path="f.txt"), expected=AddressInput("a/b/c/f.txt")
)
assert_conversion(
Address("a/b/c", relative_file_path="f.txt", target_name="tgt"),
expected=AddressInput("a/b/c/f.txt", "tgt"),
)
assert_conversion(Address("", target_name="tgt"), expected=AddressInput("", "tgt"))
assert_conversion(
Address("", relative_file_path="f.txt", target_name="tgt"),
expected=AddressInput("f.txt", "tgt"),
)
assert_conversion(
Address("a/b/c", relative_file_path="subdir/f.txt"),
expected=AddressInput("a/b/c/subdir/f.txt", "../c"),
)
assert_conversion(
Address("a/b/c", relative_file_path="subdir/f.txt", target_name="tgt"),
expected=AddressInput("a/b/c/subdir/f.txt", "../tgt"),
) | 0.886562 | 0.594993 |
import signal
import sys
import consul
from multiprocessing.managers import SyncManager
import pickle
sys.path.append('./gen-py')
from GenericServerPi import GenericPiThriftService
from GenericStruct.ttypes import ActionEnum
from ThriftException.ttypes import ThriftServiceException
from ThriftException.ttypes import ExternalEndpointUnavailable
from PhonePi.ttypes import Action
from IPhoneConnect import IPhoneConnect
from thrift.transport import TSocket
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol
from thrift.server import TServer
sys.path.append('../../')
import config
import logging
import random
import statsd
stat = statsd.StatsClient(config.statsd_ip, config.statsd_port)
port = random.randint(58800, 58810)
log = logging.getLogger()
log.setLevel(logging.DEBUG)
class PhonePiThriftHandler:
# GET_LOCATION = 0
# GET_STATUS = 1
# PLAY_SOUND = 2
# LOST_MODE = 3
@stat.timer("PhonePi.handleRequest")
def handleRequest(self, input):
print("phone pi!")
try:
input_object = pickle.loads(input, fix_imports=False, encoding="ASCII", errors="strict")
output = ""
if input_object.action is Action.GET_LOCATION:
output = IPhoneConnect().getLocation(input_object)
elif input_object.action is Action.GET_STATUS:
output = IPhoneConnect().getStatus(input_object)
elif input_object.action is Action.PLAY_SOUND:
output = IPhoneConnect().playSound(input_object)
elif input_object.action is Action.LOST_MODE:
if input_object.phonenumber is None:
exception = ThriftServiceException()
exception.serviceName = 'PhonePi'
exception.message = 'Phone number required for lost phone'
raise ThriftServiceException()
output = IPhoneConnect().lostMode(input_object)
else:
output = "NOT IMPLEMENTED YET"
pickle_output = pickle.dumps(obj=output, protocol=None, fix_imports=False)
return pickle_output
except ExternalEndpointUnavailable as endPoint:
raise endPoint
except Exception as ex:
print('invalid request %s' % ex)
raise ThriftServiceException('PhonePi', 'invalid request %s' % ex)
@stat.timer("PhonePi.getDefaultModuleConfig")
def getDefaultModuleConfig(self):
default_config = "email string"
return pickle.dumps(obj=default_config, protocol=None, fix_imports=False)
@stat.timer("PhonePi.ping")
def ping(self, input):
print(input)
def get_ip():
import socket
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
try:
s.connect(('255.255.255.255', 1)) # isn't reachable intentionally
IP = s.getsockname()[0]
except:
IP = '127.0.0.1'
finally:
s.close()
return IP
def create_server():
handler = PhonePiThriftHandler()
return TServer.TSimpleServer(
GenericPiThriftService.Processor(handler),
TSocket.TServerSocket(port=port),
TTransport.TBufferedTransportFactory(),
TBinaryProtocol.TBinaryProtocolFactory()
)
def register():
log.info("register started")
c = consul.Consul(host=config.consul_ip, port=config.consul_port)
key = '%d' % ActionEnum.PHONE
c.kv.put(key, 'phone')
check = consul.Check.tcp(host=get_ip(), port=port, interval=config.consul_interval,
timeout=config.consul_timeout, deregister=unregister())
c.agent.service.register(name="phone-pi", service_id="phone-pi-%d" % port, port=port, check=check)
log.info("services: " + str(c.agent.services()))
def unregister():
log.info("unregister started")
c = consul.Consul(host=config.consul_ip, port=config.consul_port)
c.agent.service.deregister("phone-pi-%d" % port)
c.agent.service.deregister("phone-pi")
log.info("services: " + str(c.agent.services()))
def interupt_manager():
signal.signal(signal.SIGINT, signal.SIG_IGN)
def main(args=None):
manager = SyncManager()
manager.start(interupt_manager)
try:
server = create_server()
register()
server.serve()
finally:
unregister()
print('finally PhonePi shutting down')
manager.shutdown()
if __name__ == '__main__':
main() | PhonePi/src/PythonPhonePiServer.py | import signal
import sys
import consul
from multiprocessing.managers import SyncManager
import pickle
sys.path.append('./gen-py')
from GenericServerPi import GenericPiThriftService
from GenericStruct.ttypes import ActionEnum
from ThriftException.ttypes import ThriftServiceException
from ThriftException.ttypes import ExternalEndpointUnavailable
from PhonePi.ttypes import Action
from IPhoneConnect import IPhoneConnect
from thrift.transport import TSocket
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol
from thrift.server import TServer
sys.path.append('../../')
import config
import logging
import random
import statsd
stat = statsd.StatsClient(config.statsd_ip, config.statsd_port)
port = random.randint(58800, 58810)
log = logging.getLogger()
log.setLevel(logging.DEBUG)
class PhonePiThriftHandler:
# GET_LOCATION = 0
# GET_STATUS = 1
# PLAY_SOUND = 2
# LOST_MODE = 3
@stat.timer("PhonePi.handleRequest")
def handleRequest(self, input):
print("phone pi!")
try:
input_object = pickle.loads(input, fix_imports=False, encoding="ASCII", errors="strict")
output = ""
if input_object.action is Action.GET_LOCATION:
output = IPhoneConnect().getLocation(input_object)
elif input_object.action is Action.GET_STATUS:
output = IPhoneConnect().getStatus(input_object)
elif input_object.action is Action.PLAY_SOUND:
output = IPhoneConnect().playSound(input_object)
elif input_object.action is Action.LOST_MODE:
if input_object.phonenumber is None:
exception = ThriftServiceException()
exception.serviceName = 'PhonePi'
exception.message = 'Phone number required for lost phone'
raise ThriftServiceException()
output = IPhoneConnect().lostMode(input_object)
else:
output = "NOT IMPLEMENTED YET"
pickle_output = pickle.dumps(obj=output, protocol=None, fix_imports=False)
return pickle_output
except ExternalEndpointUnavailable as endPoint:
raise endPoint
except Exception as ex:
print('invalid request %s' % ex)
raise ThriftServiceException('PhonePi', 'invalid request %s' % ex)
@stat.timer("PhonePi.getDefaultModuleConfig")
def getDefaultModuleConfig(self):
default_config = "email string"
return pickle.dumps(obj=default_config, protocol=None, fix_imports=False)
@stat.timer("PhonePi.ping")
def ping(self, input):
print(input)
def get_ip():
import socket
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
try:
s.connect(('255.255.255.255', 1)) # isn't reachable intentionally
IP = s.getsockname()[0]
except:
IP = '127.0.0.1'
finally:
s.close()
return IP
def create_server():
handler = PhonePiThriftHandler()
return TServer.TSimpleServer(
GenericPiThriftService.Processor(handler),
TSocket.TServerSocket(port=port),
TTransport.TBufferedTransportFactory(),
TBinaryProtocol.TBinaryProtocolFactory()
)
def register():
log.info("register started")
c = consul.Consul(host=config.consul_ip, port=config.consul_port)
key = '%d' % ActionEnum.PHONE
c.kv.put(key, 'phone')
check = consul.Check.tcp(host=get_ip(), port=port, interval=config.consul_interval,
timeout=config.consul_timeout, deregister=unregister())
c.agent.service.register(name="phone-pi", service_id="phone-pi-%d" % port, port=port, check=check)
log.info("services: " + str(c.agent.services()))
def unregister():
log.info("unregister started")
c = consul.Consul(host=config.consul_ip, port=config.consul_port)
c.agent.service.deregister("phone-pi-%d" % port)
c.agent.service.deregister("phone-pi")
log.info("services: " + str(c.agent.services()))
def interupt_manager():
signal.signal(signal.SIGINT, signal.SIG_IGN)
def main(args=None):
manager = SyncManager()
manager.start(interupt_manager)
try:
server = create_server()
register()
server.serve()
finally:
unregister()
print('finally PhonePi shutting down')
manager.shutdown()
if __name__ == '__main__':
main() | 0.200714 | 0.038939 |
import ckan.lib.helpers as h
import ckan.logic as l
import ckan.model as model
import ckan.lib.create_test_data as create_test_data
import ckan.tests.functional.base as base
import ckan.plugins as plugins
import ckan.tests.mock_plugin as mock
import ckan.lib.dictization.model_dictize as model_dictize
class MockResourcePreviewExtension(mock.MockSingletonPlugin):
plugins.implements(plugins.IResourcePreview)
def __init__(self):
from collections import defaultdict
self.calls = defaultdict(int)
def can_preview(self, data_dict):
assert(isinstance(data_dict['resource'], dict))
assert(isinstance(data_dict['package'], dict))
assert('on_same_domain' in data_dict['resource'])
self.calls['can_preview'] += 1
return data_dict['resource']['format'].lower() == 'mock'
def setup_template_variables(self, context, data_dict):
self.calls['setup_template_variables'] += 1
def preview_template(self, context, data_dict):
assert(isinstance(data_dict['resource'], dict))
assert(isinstance(data_dict['package'], dict))
self.calls['preview_templates'] += 1
return 'tests/mock_resource_preview_template.html'
class JsonMockResourcePreviewExtension(MockResourcePreviewExtension):
def can_preview(self, data_dict):
super(JsonMockResourcePreviewExtension, self).can_preview(data_dict)
return data_dict['resource']['format'].lower() == 'json'
def preview_template(self, context, data_dict):
super(JsonMockResourcePreviewExtension, self).preview_template(context, data_dict)
self.calls['preview_templates'] += 1
return 'tests/mock_json_resource_preview_template.html'
class TestPluggablePreviews(base.FunctionalTestCase):
@classmethod
def setup_class(cls):
cls.plugin = MockResourcePreviewExtension()
plugins.load(cls.plugin)
json_plugin = JsonMockResourcePreviewExtension()
plugins.load(json_plugin)
create_test_data.CreateTestData.create()
cls.package = model.Package.get('annakarenina')
cls.resource = cls.package.resources[0]
cls.url = h.url_for(controller='package',
action='resource_read',
id=cls.package.name,
resource_id=cls.resource.id)
cls.preview_url = h.url_for(controller='package',
action='resource_datapreview',
id=cls.package.id,
resource_id=cls.resource.id)
@classmethod
def teardown_class(cls):
model.repo.rebuild_db()
plugins.unload(cls.plugin)
def test_hook(self):
testpackage = self.package
resource_dict = model_dictize.resource_dictize(self.resource, {'model': model})
context = {
'model': model,
'session': model.Session,
'user': model.User.get('testsysadmin').name
}
# no preview for type "plain text"
preview_url = self.preview_url
result = self.app.get(preview_url, status=409)
assert 'No preview' in result.body, result.body
# no preview for type "ümlaut", should not fail
resource_dict['format'] = u'ümlaut'
l.action.update.resource_update(context, resource_dict)
result = self.app.get(preview_url, status=409)
assert 'No preview' in result.body, result.body
resource_dict['format'] = 'mock'
l.action.update.resource_update(context, resource_dict)
#there should be a preview for type "json"
preview_url = self.preview_url
result = self.app.get(preview_url, status=200)
assert 'mock-preview' in result.body
assert 'mock-preview.js' in result.body
assert self.plugin.calls['can_preview'] == 3, self.plugin.calls
assert self.plugin.calls['setup_template_variables'] == 1, self.plugin.calls
assert self.plugin.calls['preview_templates'] == 1, self.plugin.calls
# test whether the json preview is used
preview_url = h.url_for(controller='package',
action='resource_datapreview',
id=testpackage.id,
resource_id=testpackage.resources[1].id)
result = self.app.get(preview_url, status=200)
assert 'mock-json-preview' in result.body
assert 'mock-json-preview.js' in result.body
assert self.plugin.calls['can_preview'] == 4, self.plugin.calls
assert self.plugin.calls['setup_template_variables'] == 1, self.plugin.calls
assert self.plugin.calls['preview_templates'] == 1, self.plugin.calls
def test_iframe_is_shown(self):
result = self.app.get(self.url)
assert 'data-module="data-viewer"' in result.body, result.body
assert '<iframe' in result.body, result.body
def test_iframe_url_is_correct(self):
result = self.app.get(self.url)
assert self.preview_url in result.body, (self.preview_url, result.body) | ckan/tests/functional/test_preview_interface.py |
import ckan.lib.helpers as h
import ckan.logic as l
import ckan.model as model
import ckan.lib.create_test_data as create_test_data
import ckan.tests.functional.base as base
import ckan.plugins as plugins
import ckan.tests.mock_plugin as mock
import ckan.lib.dictization.model_dictize as model_dictize
class MockResourcePreviewExtension(mock.MockSingletonPlugin):
plugins.implements(plugins.IResourcePreview)
def __init__(self):
from collections import defaultdict
self.calls = defaultdict(int)
def can_preview(self, data_dict):
assert(isinstance(data_dict['resource'], dict))
assert(isinstance(data_dict['package'], dict))
assert('on_same_domain' in data_dict['resource'])
self.calls['can_preview'] += 1
return data_dict['resource']['format'].lower() == 'mock'
def setup_template_variables(self, context, data_dict):
self.calls['setup_template_variables'] += 1
def preview_template(self, context, data_dict):
assert(isinstance(data_dict['resource'], dict))
assert(isinstance(data_dict['package'], dict))
self.calls['preview_templates'] += 1
return 'tests/mock_resource_preview_template.html'
class JsonMockResourcePreviewExtension(MockResourcePreviewExtension):
def can_preview(self, data_dict):
super(JsonMockResourcePreviewExtension, self).can_preview(data_dict)
return data_dict['resource']['format'].lower() == 'json'
def preview_template(self, context, data_dict):
super(JsonMockResourcePreviewExtension, self).preview_template(context, data_dict)
self.calls['preview_templates'] += 1
return 'tests/mock_json_resource_preview_template.html'
class TestPluggablePreviews(base.FunctionalTestCase):
@classmethod
def setup_class(cls):
cls.plugin = MockResourcePreviewExtension()
plugins.load(cls.plugin)
json_plugin = JsonMockResourcePreviewExtension()
plugins.load(json_plugin)
create_test_data.CreateTestData.create()
cls.package = model.Package.get('annakarenina')
cls.resource = cls.package.resources[0]
cls.url = h.url_for(controller='package',
action='resource_read',
id=cls.package.name,
resource_id=cls.resource.id)
cls.preview_url = h.url_for(controller='package',
action='resource_datapreview',
id=cls.package.id,
resource_id=cls.resource.id)
@classmethod
def teardown_class(cls):
model.repo.rebuild_db()
plugins.unload(cls.plugin)
def test_hook(self):
testpackage = self.package
resource_dict = model_dictize.resource_dictize(self.resource, {'model': model})
context = {
'model': model,
'session': model.Session,
'user': model.User.get('testsysadmin').name
}
# no preview for type "plain text"
preview_url = self.preview_url
result = self.app.get(preview_url, status=409)
assert 'No preview' in result.body, result.body
# no preview for type "ümlaut", should not fail
resource_dict['format'] = u'ümlaut'
l.action.update.resource_update(context, resource_dict)
result = self.app.get(preview_url, status=409)
assert 'No preview' in result.body, result.body
resource_dict['format'] = 'mock'
l.action.update.resource_update(context, resource_dict)
#there should be a preview for type "json"
preview_url = self.preview_url
result = self.app.get(preview_url, status=200)
assert 'mock-preview' in result.body
assert 'mock-preview.js' in result.body
assert self.plugin.calls['can_preview'] == 3, self.plugin.calls
assert self.plugin.calls['setup_template_variables'] == 1, self.plugin.calls
assert self.plugin.calls['preview_templates'] == 1, self.plugin.calls
# test whether the json preview is used
preview_url = h.url_for(controller='package',
action='resource_datapreview',
id=testpackage.id,
resource_id=testpackage.resources[1].id)
result = self.app.get(preview_url, status=200)
assert 'mock-json-preview' in result.body
assert 'mock-json-preview.js' in result.body
assert self.plugin.calls['can_preview'] == 4, self.plugin.calls
assert self.plugin.calls['setup_template_variables'] == 1, self.plugin.calls
assert self.plugin.calls['preview_templates'] == 1, self.plugin.calls
def test_iframe_is_shown(self):
result = self.app.get(self.url)
assert 'data-module="data-viewer"' in result.body, result.body
assert '<iframe' in result.body, result.body
def test_iframe_url_is_correct(self):
result = self.app.get(self.url)
assert self.preview_url in result.body, (self.preview_url, result.body) | 0.622115 | 0.284741 |
from neutron_lib import exceptions as n_exc
from oslo_log import log as logging
from oslo_utils import excutils
from vmware_nsx._i18n import _
from vmware_nsx.db import db as nsx_db
from vmware_nsx.services.lbaas import base_mgr
from vmware_nsx.services.lbaas import lb_const
from vmware_nsx.services.lbaas.nsx_v3.implementation import lb_utils
from vmware_nsxlib.v3 import exceptions as nsxlib_exc
from vmware_nsxlib.v3 import utils
LOG = logging.getLogger(__name__)
class EdgeHealthMonitorManagerFromDict(base_mgr.Nsxv3LoadbalancerBaseManager):
def _build_monitor_args(self, hm):
if hm['type'] in lb_const.NSXV3_MONITOR_MAP:
monitor_type = lb_const.NSXV3_MONITOR_MAP.get(hm['type'])
else:
msg = (_('Cannot create health monitor %(monitor)s with '
'type %(type)s') % {'monitor': hm['id'],
'type': hm['type']})
raise n_exc.InvalidInput(error_message=msg)
body = {'resource_type': monitor_type,
'interval': hm['delay'],
'fall_count': hm['max_retries'],
'timeout': hm['timeout']}
if hm['type'] in [lb_const.LB_HEALTH_MONITOR_HTTP,
lb_const.LB_HEALTH_MONITOR_HTTPS]:
if hm['http_method']:
body['request_method'] = hm['http_method']
if hm['url_path']:
body['request_url'] = hm['url_path']
if hm['expected_codes']:
codes = hm['expected_codes'].split(",")
body['response_status_codes'] = [
int(code) for code in codes]
return body
def create(self, context, hm, completor):
lb_id = hm['pool']['loadbalancer_id']
pool_id = hm['pool']['id']
pool_client = self.core_plugin.nsxlib.load_balancer.pool
monitor_client = self.core_plugin.nsxlib.load_balancer.monitor
monitor_name = utils.get_name_and_uuid(hm['name'] or 'monitor',
hm['id'])
tags = lb_utils.get_tags(self.core_plugin, hm['id'],
lb_const.LB_HM_TYPE,
hm['tenant_id'], context.project_name)
monitor_body = self._build_monitor_args(hm)
try:
lb_monitor = monitor_client.create(
display_name=monitor_name, tags=tags, **monitor_body)
except nsxlib_exc.ManagerError:
with excutils.save_and_reraise_exception():
completor(success=False)
binding = nsx_db.get_nsx_lbaas_pool_binding(
context.session, lb_id, pool_id)
if binding:
lb_pool_id = binding['lb_pool_id']
try:
pool_client.add_monitor_to_pool(lb_pool_id,
lb_monitor['id'])
except nsxlib_exc.ManagerError:
completor(success=False)
msg = _('Failed to attach monitor %(monitor)s to pool '
'%(pool)s') % {'monitor': lb_monitor['id'],
'pool': lb_pool_id}
raise n_exc.BadRequest(resource='lbaas-hm', msg=msg)
nsx_db.add_nsx_lbaas_monitor_binding(
context.session, lb_id, pool_id, hm['id'], lb_monitor['id'],
lb_pool_id)
else:
completor(success=False)
msg = _('Failed to attach monitor %(monitor)s to pool '
'%(pool)s: NSX pool was not found on the DB') % {
'monitor': hm['id'],
'pool': pool_id}
raise n_exc.BadRequest(resource='lbaas-hm', msg=msg)
completor(success=True)
def update(self, context, old_hm, new_hm, completor):
lb_id = new_hm['pool']['loadbalancer_id']
pool_id = new_hm['pool']['id']
monitor_client = self.core_plugin.nsxlib.load_balancer.monitor
binding = nsx_db.get_nsx_lbaas_monitor_binding(
context.session, lb_id, pool_id, new_hm['id'])
if binding:
lb_monitor_id = binding['lb_monitor_id']
monitor_body = self._build_monitor_args(new_hm)
monitor_name = utils.get_name_and_uuid(new_hm['name'] or 'monitor',
new_hm['id'])
monitor_client.update(lb_monitor_id, display_name=monitor_name,
**monitor_body)
else:
completor(success=False)
msg = _('Failed to update monitor %(monitor)s: NSX monitor was '
'not found in DB') % {'monitor': new_hm['id'],
'pool': pool_id}
raise n_exc.BadRequest(resource='lbaas-hm', msg=msg)
completor(success=True)
def delete(self, context, hm, completor):
lb_id = hm['pool']['loadbalancer_id']
pool_id = hm['pool']['id']
pool_client = self.core_plugin.nsxlib.load_balancer.pool
monitor_client = self.core_plugin.nsxlib.load_balancer.monitor
binding = nsx_db.get_nsx_lbaas_monitor_binding(
context.session, lb_id, pool_id, hm['id'])
if binding:
lb_monitor_id = binding['lb_monitor_id']
lb_pool_id = binding['lb_pool_id']
try:
pool_client.remove_monitor_from_pool(lb_pool_id,
lb_monitor_id)
except nsxlib_exc.ResourceNotFound:
pass
except nsxlib_exc.ManagerError as exc:
completor(success=False)
msg = _('Failed to remove monitor %(monitor)s from pool '
'%(pool)s with exception from nsx %(exc)s)') % {
'monitor': lb_monitor_id,
'pool': lb_pool_id,
'exc': exc}
raise n_exc.BadRequest(resource='lbaas-hm', msg=msg)
try:
monitor_client.delete(lb_monitor_id)
except nsxlib_exc.ResourceNotFound:
pass
except nsxlib_exc.ManagerError as exc:
completor(success=False)
msg = _('Failed to delete monitor %(monitor)s from '
'backend with exception %(exc)s') % {
'monitor': lb_monitor_id,
'exc': exc}
raise n_exc.BadRequest(resource='lbaas-hm', msg=msg)
nsx_db.delete_nsx_lbaas_monitor_binding(context.session, lb_id,
pool_id, hm['id'])
else:
# Do not fail a delete action
pass
completor(success=True)
def delete_cascade(self, context, hm, completor):
self.delete(context, hm, completor) | vmware_nsx/services/lbaas/nsx_v3/implementation/healthmonitor_mgr.py |
from neutron_lib import exceptions as n_exc
from oslo_log import log as logging
from oslo_utils import excutils
from vmware_nsx._i18n import _
from vmware_nsx.db import db as nsx_db
from vmware_nsx.services.lbaas import base_mgr
from vmware_nsx.services.lbaas import lb_const
from vmware_nsx.services.lbaas.nsx_v3.implementation import lb_utils
from vmware_nsxlib.v3 import exceptions as nsxlib_exc
from vmware_nsxlib.v3 import utils
LOG = logging.getLogger(__name__)
class EdgeHealthMonitorManagerFromDict(base_mgr.Nsxv3LoadbalancerBaseManager):
def _build_monitor_args(self, hm):
if hm['type'] in lb_const.NSXV3_MONITOR_MAP:
monitor_type = lb_const.NSXV3_MONITOR_MAP.get(hm['type'])
else:
msg = (_('Cannot create health monitor %(monitor)s with '
'type %(type)s') % {'monitor': hm['id'],
'type': hm['type']})
raise n_exc.InvalidInput(error_message=msg)
body = {'resource_type': monitor_type,
'interval': hm['delay'],
'fall_count': hm['max_retries'],
'timeout': hm['timeout']}
if hm['type'] in [lb_const.LB_HEALTH_MONITOR_HTTP,
lb_const.LB_HEALTH_MONITOR_HTTPS]:
if hm['http_method']:
body['request_method'] = hm['http_method']
if hm['url_path']:
body['request_url'] = hm['url_path']
if hm['expected_codes']:
codes = hm['expected_codes'].split(",")
body['response_status_codes'] = [
int(code) for code in codes]
return body
def create(self, context, hm, completor):
lb_id = hm['pool']['loadbalancer_id']
pool_id = hm['pool']['id']
pool_client = self.core_plugin.nsxlib.load_balancer.pool
monitor_client = self.core_plugin.nsxlib.load_balancer.monitor
monitor_name = utils.get_name_and_uuid(hm['name'] or 'monitor',
hm['id'])
tags = lb_utils.get_tags(self.core_plugin, hm['id'],
lb_const.LB_HM_TYPE,
hm['tenant_id'], context.project_name)
monitor_body = self._build_monitor_args(hm)
try:
lb_monitor = monitor_client.create(
display_name=monitor_name, tags=tags, **monitor_body)
except nsxlib_exc.ManagerError:
with excutils.save_and_reraise_exception():
completor(success=False)
binding = nsx_db.get_nsx_lbaas_pool_binding(
context.session, lb_id, pool_id)
if binding:
lb_pool_id = binding['lb_pool_id']
try:
pool_client.add_monitor_to_pool(lb_pool_id,
lb_monitor['id'])
except nsxlib_exc.ManagerError:
completor(success=False)
msg = _('Failed to attach monitor %(monitor)s to pool '
'%(pool)s') % {'monitor': lb_monitor['id'],
'pool': lb_pool_id}
raise n_exc.BadRequest(resource='lbaas-hm', msg=msg)
nsx_db.add_nsx_lbaas_monitor_binding(
context.session, lb_id, pool_id, hm['id'], lb_monitor['id'],
lb_pool_id)
else:
completor(success=False)
msg = _('Failed to attach monitor %(monitor)s to pool '
'%(pool)s: NSX pool was not found on the DB') % {
'monitor': hm['id'],
'pool': pool_id}
raise n_exc.BadRequest(resource='lbaas-hm', msg=msg)
completor(success=True)
def update(self, context, old_hm, new_hm, completor):
lb_id = new_hm['pool']['loadbalancer_id']
pool_id = new_hm['pool']['id']
monitor_client = self.core_plugin.nsxlib.load_balancer.monitor
binding = nsx_db.get_nsx_lbaas_monitor_binding(
context.session, lb_id, pool_id, new_hm['id'])
if binding:
lb_monitor_id = binding['lb_monitor_id']
monitor_body = self._build_monitor_args(new_hm)
monitor_name = utils.get_name_and_uuid(new_hm['name'] or 'monitor',
new_hm['id'])
monitor_client.update(lb_monitor_id, display_name=monitor_name,
**monitor_body)
else:
completor(success=False)
msg = _('Failed to update monitor %(monitor)s: NSX monitor was '
'not found in DB') % {'monitor': new_hm['id'],
'pool': pool_id}
raise n_exc.BadRequest(resource='lbaas-hm', msg=msg)
completor(success=True)
def delete(self, context, hm, completor):
lb_id = hm['pool']['loadbalancer_id']
pool_id = hm['pool']['id']
pool_client = self.core_plugin.nsxlib.load_balancer.pool
monitor_client = self.core_plugin.nsxlib.load_balancer.monitor
binding = nsx_db.get_nsx_lbaas_monitor_binding(
context.session, lb_id, pool_id, hm['id'])
if binding:
lb_monitor_id = binding['lb_monitor_id']
lb_pool_id = binding['lb_pool_id']
try:
pool_client.remove_monitor_from_pool(lb_pool_id,
lb_monitor_id)
except nsxlib_exc.ResourceNotFound:
pass
except nsxlib_exc.ManagerError as exc:
completor(success=False)
msg = _('Failed to remove monitor %(monitor)s from pool '
'%(pool)s with exception from nsx %(exc)s)') % {
'monitor': lb_monitor_id,
'pool': lb_pool_id,
'exc': exc}
raise n_exc.BadRequest(resource='lbaas-hm', msg=msg)
try:
monitor_client.delete(lb_monitor_id)
except nsxlib_exc.ResourceNotFound:
pass
except nsxlib_exc.ManagerError as exc:
completor(success=False)
msg = _('Failed to delete monitor %(monitor)s from '
'backend with exception %(exc)s') % {
'monitor': lb_monitor_id,
'exc': exc}
raise n_exc.BadRequest(resource='lbaas-hm', msg=msg)
nsx_db.delete_nsx_lbaas_monitor_binding(context.session, lb_id,
pool_id, hm['id'])
else:
# Do not fail a delete action
pass
completor(success=True)
def delete_cascade(self, context, hm, completor):
self.delete(context, hm, completor) | 0.311113 | 0.063802 |
from cmd import cmd
import difflib
import gzip
import zlib
import aiohttp
@cmd.new()
async def tio(snd, ctx, piped, args):
r'''
whatis: 'executes the given code using tio.run api'
man:
usage: '{name} [lang] [code] {{input}}'
props:
- PIPED [code]
description: >
Executes the given code using tio.run api.
tio.run supports up to 680 different langs along with
their various compilers.
The output and error of the program will be passed
to the stderr and stdout respectively.
examples:
- |
{name} python3 'print(69)'
- |
{name} rust *\`\`\`rs
fn main() {{
println!("hello!");
}}
\`\`\`
'''
global langs, url
if piped and piped._out:
try:
lang, *inputs = args
except ValueError:
snd.err('language name is not passed.')
snd.exitcode(1)
return
code = piped._out[0]
if not code:
snd.err('empty code.')
snd.exitcode(1)
return
else:
try:
lang, code, *inputs = args
except ValueError:
snd.senderr('needs at least two arguments.')
snd.exitcode(1)
return
code = code.encode()
if lang not in langs:
matches = difflib.get_close_matches(lang, langs)
if not matches:
snd.senderr('invalid language name.')
snd.exitcode(1)
return
lang = matches[0]
lang = lang.encode()
inputs = '\n'.join(inputs).encode()
reqstr = make_request_string(code, lang, inputs)
data = zlib.compress(reqstr)[2:-4]
print(data)
async with aiohttp.request('post', url=url, data=data) as response:
if response.status != 200:
snd.senderr('api error.')
snd.exitcode(1)
return
response = await response.read()
#response = gzip.decompress(response)
print(response)
response = response.split(response[:16])[1:-1]
response[1] = response[1].rsplit(b"\n\n", maxsplit=1)
if len(response[1]) == 1:
stats = response[1][0]
error = b""
else:
error, stats = response[1]
snd.sendout(response[0])
snd.senderr(error)
snd.sendstuff(stats)
if error:
snd.exitcode(1)
def make_request_string(code, lang, inputs=b''):
req_string = (
b"Vlang\x001\x00"
+ lang
+ b"\x00F.code.tio\x00"
+ str(len(code)).encode()
+ b"\x00"
+ code
+ b"\x00"
)
if inputs:
req_string += (
b"F.input.tio\x00"
+ str(len(inputs)).encode()
+ b"\x00"
+ inputs
+ b"\x00R"
)
else:
req_string += b"R"
return req_string
if __name__ == '__main__':
print(repr(make_request_string('fucc', 'python')))
url = "https://tio.run/cgi-bin/run/api/"
langs = {
"05ab1e",
"1l-a",
"1l-aoi",
"2dfuck",
"2l",
"2sable",
"33",
"3var",
"4",
"7",
"99",
"a-gram",
"a-pear-tree",
"abc",
"abc-assembler",
"accbb",
"aceto",
"actually",
"ada-gnat",
"adapt",
"addpp",
"adjust",
"agda",
"agony",
"ahead",
"aheui",
"alchemist",
"algol68g",
"alice",
"alice-bob",
"aliceml",
"alphabeta",
"alphabetti-spaghetti",
"alphuck",
"alumin",
"amnesiac-from-minsk",
"ante",
"anyfix",
"apl-dyalog",
"apl-dyalog-classic",
"apl-dyalog-extended",
"apl-dzaima",
"apl-ngn",
"appleseed",
"arble",
"archway",
"archway2",
"arcyou",
"arnoldc",
"asciidots",
"asperix",
"assembly-as",
"assembly-fasm",
"assembly-gcc",
"assembly-jwasm",
"assembly-nasm",
"ats2",
"attache",
"aubergine",
"awk",
"axo",
"backhand",
"bash",
"bc",
"bctbww",
"bctbww2",
"beam",
"bean",
"beanshell",
"beatnik",
"beeswax",
"befunge",
"befunge-93-fbbi",
"befunge-93-mtfi",
"befunge-93-pyfunge",
"befunge-96-mtfi",
"befunge-97-mtfi",
"befunge-98",
"befunge-98-pyfunge",
"bit",
"bitbitjump",
"bitch",
"bitch-bith",
"bitch-shifty",
"bitchanger",
"bitcycle",
"bitwise",
"blak",
"blc",
"boo",
"boolfuck",
"bosh",
"bot-engine",
"brachylog",
"brachylog2",
"bracmat",
"braille",
"brain-flak",
"brainbash",
"brainbool",
"brainflump",
"brainfuck",
"braingolf",
"brainhack",
"brainlove",
"brainspace",
"brat",
"brian-chuck",
"broccoli",
"bubblegum",
"burlesque",
"buzzfizz",
"bwfuckery",
"c-clang",
"c-gcc",
"c-tcc",
"caboose",
"cakeml",
"calc2",
"canvas",
"cardinal",
"carol-dave",
"carrot",
"cascade",
"catholicon",
"cauliflower",
"ceres",
"ceylon",
"chain",
"charm",
"chef",
"changeling",
"chapel",
"charcoal",
"check",
"checkedc",
"cheddar",
"chip",
"cil-mono",
"cinnamon-gum",
"cixl",
"cjam",
"clam",
"clean",
"clips",
"clisp",
"clojure",
"cobol-gnu",
"cobra",
"coconut",
"coffeescript",
"coffeescript2",
"commata",
"commentator",
"commercial",
"condit",
"convex",
"cood",
"corea",
"cow",
"cpp-clang",
"cpp-gcc",
"cpy",
"cquents",
"crayon",
"cryptol",
"crystal",
"cs-core",
"cs-csc",
"cs-csi",
"cs-mono",
"cs-mono-shell",
"csl",
"cubically",
"cubix",
"curlyfrick",
"curry-pakcs",
"curry-sloth",
"cy",
"cyclone",
"d",
"d2",
"dafny",
"dart",
"dash",
"dc",
"deadfish-",
"decimal",
"delimit",
"deorst",
"dirty",
"detour",
"dg",
"dobela",
"dobela-dobcon",
"dodos",
"dreaderef",
"drive-in-window",
"dscript",
"dstack",
"eacal",
"ec",
"ecndpcaalrlp",
"ecpp-c",
"ecpp-cpp",
"dyvil",
"ed",
"egel",
"element",
"elf",
"elixir",
"elvm-ir",
"emacs-lisp",
"emmental",
"emoji",
"emoji-gramming",
"emojicode",
"emojicode6",
"emojicoder",
"emotifuck",
"emotinomicon",
"empty-nest",
"enlist",
"erlang-escript",
"es",
"esopunk",
"eta",
"euphoria3",
"euphoria4",
"evil",
"explode",
"extended-brainfuck-type-i",
"extrac",
"face",
"factor",
"false",
"fantom",
"farnsworth",
"felix",
"fernando",
"feu",
"fimpp",
"fish",
"fish-shell",
"fission",
"fission2",
"flipbit",
"floater",
"flobnar",
"foam",
"focal",
"foo",
"forget",
"forked",
"forte",
"forth-gforth",
"fortran-gfortran",
"fourier",
"fractran",
"fs-core",
"fs-mono",
"fueue",
"funciton",
"functoid",
"funky",
"funky2",
"fynyl",
"gaia",
"gaotpp",
"gap",
"gema",
"geo",
"glypho",
"glypho-shorthand",
"gnuplot",
"go",
"golfish",
"golfscript",
"granule",
"grass",
"grime",
"groovy",
"gs2",
"gwion",
"hades",
"haskell",
"haskell-gofer",
"haskell-hugs",
"haskell-literate",
"hasm",
"haxe",
"haystack",
"hbcht",
"hdbf",
"hexagony",
"hobbes",
"hodor",
"homespring",
"hspal",
"huginn",
"husk",
"hy",
"i",
"iag",
"icon",
"idris",
"incident",
"ink",
"intercal",
"io",
"j",
"jael",
"jq",
"jx",
"j-uby",
"japt",
"java-jdk",
"java-openjdk",
"javascript-babel-node",
"javascript-node",
"javascript-spidermonkey",
"javascript-v8",
"jelly",
"jellyfish",
"joy",
"julia",
"julia1x",
"julia5",
"julia6",
"julia7",
"k-kona",
"k-ngn",
"k-ok",
"kavod",
"keg",
"kipple-cipple",
"klein",
"koberi-c",
"koka",
"kotlin",
"krrp",
"ksh",
"l33t",
"labyrinth",
"lean",
"lily",
"llvm",
"lmbm",
"lnusp",
"locksmith",
"logicode",
"lolcode",
"lost",
"lower",
"lua",
"lua-luajit",
"lua-openresty",
"ly",
"m",
"m4",
"machinecode",
"make",
"malbolge",
"malbolge-unshackled",
"mamba",
"mariolang",
"mascarpone",
"mathgolf",
"mathematica",
"mathics",
"matl",
"maverick",
"maxima",
"maybelater",
"memory-gap",
"milambda",
"milky-way",
"minefriff",
"minimal-2d",
"miniml",
"minkolang",
"mirror",
"momema",
"monkeys",
"moonscript",
"moorhens",
"mornington-crescent",
"mouse",
"mouse2002",
"mouse83",
"mu6",
"mumps",
"muriel",
"my",
"my-basic",
"nameless",
"nandy",
"nial",
"nikud",
"nim",
"neim",
"neutrino",
"nhohnhehr",
"no",
"noether",
"nqt",
"ntfjc",
"numberwang",
"oasis",
"obcode",
"oberon-07",
"object-pascal-fpc",
"objective-c-clang",
"objective-c-gcc",
"occam-pi",
"ocaml",
"octave",
"odin",
"ohm",
"ohm2",
"oml",
"ooocode",
"oration",
"ork",
"orst",
"osabie",
"osh",
"pain-flak",
"paradoc",
"parenthesis-hell",
"parenthetic",
"pari-gp",
"pascal-fpc",
"path",
"pbrain",
"perl4",
"perl5",
"perl5-cperl",
"perl6",
"perl6-niecza",
"phoenix",
"phooey",
"php",
"physica",
"picolisp",
"piet",
"pike",
"pilot-pspilot",
"pilot-rpilot",
"pingpong",
"pip",
"pixiedust",
"pl",
"pony",
"positron",
"postl",
"postscript-xpost",
"powershell",
"powershell-core",
"prelude",
"premier",
"preproc",
"prolog-ciao",
"prolog-swi",
"proton",
"proton2",
"ps-core",
"pure",
"purescript",
"purple",
"pushy",
"puzzlang",
"pyke",
"pylons",
"pyn-tree",
"pyon",
"pyramid-scheme",
"pyret",
"pyt",
"pyth",
"python1",
"python2",
"python2-cython",
"python2-iron",
"python2-jython",
"python2-pypy",
"python3",
"python38pr",
"python3-cython",
"python3-pypy",
"python3-stackless",
"qqq",
"qs-core",
"quadr",
"quadrefunge-97-mtfi",
"quads",
"quarterstaff",
"quintefunge-97-mtfi",
"r",
"racket",
"rad",
"rail",
"random-brainfuck",
"rapira",
"re-direction",
"reason",
"rebol",
"rebol3",
"recursiva",
"red",
"reng",
"reregex",
"res",
"resplicate",
"reticular",
"retina",
"retina1",
"return",
"rexx",
"ring",
"rk",
"rockstar",
"roda",
"roop",
"ropy",
"rotor",
"rprogn",
"rprogn-2",
"ruby",
"runic",
"rust",
"rutger",
"sad-flak",
"sakura",
"sbf",
"scala",
"scheme-chez",
"scheme-chicken",
"scheme-gambit",
"scheme-guile",
"sed",
"sed-gnu",
"seed",
"septefunge-97-mtfi",
"seriously",
"sesos",
"set",
"sexefunge-97-mtfi",
"sfk",
"shapescript",
"shnap",
"shortc",
"shove",
"shp",
"shtriped",
"silos",
"sidef",
"silberjoder",
"simplefunge",
"simplestack",
"simplex",
"simula",
"sisal",
"sisi",
"slashes",
"smbf",
"sml-mlton",
"smol",
"snails",
"snobol4",
"snowman",
"snusp",
"snusp-bloated",
"snuspi",
"somme",
"spaced",
"spim",
"spl",
"spoon",
"sqlite",
"squirrel",
"stackcats",
"stacked",
"starfish",
"starry",
"stax",
"stencil",
"stones",
"str",
"straw",
"subskin",
"sumerian",
"supermariolang",
"superstack",
"surface",
"swap",
"swift4",
"syms",
"symbolic-python",
"taco",
"tampio",
"tampioi",
"tamsin",
"tapebagel",
"taxi",
"tcl",
"tcsh",
"templat",
"templates",
"thing",
"threead",
"thue",
"thutu",
"tidy",
"tincan",
"tinybf",
"tinylisp",
"tir",
"tis",
"toi",
"tmbww",
"transcript",
"trefunge-97-mtfi",
"trefunge-98-pyfunge",
"triangular",
"triangularity",
"trigger",
"triple-threat",
"trumpscript",
"turtled",
"typescript",
"ubasic",
"underload",
"unefunge-97-mtfi",
"unefunge-98-pyfunge",
"unicat",
"unlambda",
"uno",
"unreadable",
"ursala",
"v",
"v-fmota",
"vala",
"var",
"vb-core",
"verbosity",
"verbosity2",
"versert",
"visual-basic-net-mono",
"visual-basic-net-vbc",
"vitsy",
"vlang",
"vsl",
"wasm",
"waterfall",
"whirl",
"whispers",
"whispers2",
"whitespace",
"width",
"wierd",
"wise",
"woefully",
"wren",
"wsf",
"wumpus",
"wyalhein",
"xeec",
"xeraph",
"yaball",
"yabasic",
"yash",
"ybc",
"yup",
"z3",
"z80golf",
"zephyr",
"zig",
"zkl",
"zoidberg",
"zsh",
} | cmds/tio.py | from cmd import cmd
import difflib
import gzip
import zlib
import aiohttp
@cmd.new()
async def tio(snd, ctx, piped, args):
r'''
whatis: 'executes the given code using tio.run api'
man:
usage: '{name} [lang] [code] {{input}}'
props:
- PIPED [code]
description: >
Executes the given code using tio.run api.
tio.run supports up to 680 different langs along with
their various compilers.
The output and error of the program will be passed
to the stderr and stdout respectively.
examples:
- |
{name} python3 'print(69)'
- |
{name} rust *\`\`\`rs
fn main() {{
println!("hello!");
}}
\`\`\`
'''
global langs, url
if piped and piped._out:
try:
lang, *inputs = args
except ValueError:
snd.err('language name is not passed.')
snd.exitcode(1)
return
code = piped._out[0]
if not code:
snd.err('empty code.')
snd.exitcode(1)
return
else:
try:
lang, code, *inputs = args
except ValueError:
snd.senderr('needs at least two arguments.')
snd.exitcode(1)
return
code = code.encode()
if lang not in langs:
matches = difflib.get_close_matches(lang, langs)
if not matches:
snd.senderr('invalid language name.')
snd.exitcode(1)
return
lang = matches[0]
lang = lang.encode()
inputs = '\n'.join(inputs).encode()
reqstr = make_request_string(code, lang, inputs)
data = zlib.compress(reqstr)[2:-4]
print(data)
async with aiohttp.request('post', url=url, data=data) as response:
if response.status != 200:
snd.senderr('api error.')
snd.exitcode(1)
return
response = await response.read()
#response = gzip.decompress(response)
print(response)
response = response.split(response[:16])[1:-1]
response[1] = response[1].rsplit(b"\n\n", maxsplit=1)
if len(response[1]) == 1:
stats = response[1][0]
error = b""
else:
error, stats = response[1]
snd.sendout(response[0])
snd.senderr(error)
snd.sendstuff(stats)
if error:
snd.exitcode(1)
def make_request_string(code, lang, inputs=b''):
req_string = (
b"Vlang\x001\x00"
+ lang
+ b"\x00F.code.tio\x00"
+ str(len(code)).encode()
+ b"\x00"
+ code
+ b"\x00"
)
if inputs:
req_string += (
b"F.input.tio\x00"
+ str(len(inputs)).encode()
+ b"\x00"
+ inputs
+ b"\x00R"
)
else:
req_string += b"R"
return req_string
if __name__ == '__main__':
print(repr(make_request_string('fucc', 'python')))
url = "https://tio.run/cgi-bin/run/api/"
langs = {
"05ab1e",
"1l-a",
"1l-aoi",
"2dfuck",
"2l",
"2sable",
"33",
"3var",
"4",
"7",
"99",
"a-gram",
"a-pear-tree",
"abc",
"abc-assembler",
"accbb",
"aceto",
"actually",
"ada-gnat",
"adapt",
"addpp",
"adjust",
"agda",
"agony",
"ahead",
"aheui",
"alchemist",
"algol68g",
"alice",
"alice-bob",
"aliceml",
"alphabeta",
"alphabetti-spaghetti",
"alphuck",
"alumin",
"amnesiac-from-minsk",
"ante",
"anyfix",
"apl-dyalog",
"apl-dyalog-classic",
"apl-dyalog-extended",
"apl-dzaima",
"apl-ngn",
"appleseed",
"arble",
"archway",
"archway2",
"arcyou",
"arnoldc",
"asciidots",
"asperix",
"assembly-as",
"assembly-fasm",
"assembly-gcc",
"assembly-jwasm",
"assembly-nasm",
"ats2",
"attache",
"aubergine",
"awk",
"axo",
"backhand",
"bash",
"bc",
"bctbww",
"bctbww2",
"beam",
"bean",
"beanshell",
"beatnik",
"beeswax",
"befunge",
"befunge-93-fbbi",
"befunge-93-mtfi",
"befunge-93-pyfunge",
"befunge-96-mtfi",
"befunge-97-mtfi",
"befunge-98",
"befunge-98-pyfunge",
"bit",
"bitbitjump",
"bitch",
"bitch-bith",
"bitch-shifty",
"bitchanger",
"bitcycle",
"bitwise",
"blak",
"blc",
"boo",
"boolfuck",
"bosh",
"bot-engine",
"brachylog",
"brachylog2",
"bracmat",
"braille",
"brain-flak",
"brainbash",
"brainbool",
"brainflump",
"brainfuck",
"braingolf",
"brainhack",
"brainlove",
"brainspace",
"brat",
"brian-chuck",
"broccoli",
"bubblegum",
"burlesque",
"buzzfizz",
"bwfuckery",
"c-clang",
"c-gcc",
"c-tcc",
"caboose",
"cakeml",
"calc2",
"canvas",
"cardinal",
"carol-dave",
"carrot",
"cascade",
"catholicon",
"cauliflower",
"ceres",
"ceylon",
"chain",
"charm",
"chef",
"changeling",
"chapel",
"charcoal",
"check",
"checkedc",
"cheddar",
"chip",
"cil-mono",
"cinnamon-gum",
"cixl",
"cjam",
"clam",
"clean",
"clips",
"clisp",
"clojure",
"cobol-gnu",
"cobra",
"coconut",
"coffeescript",
"coffeescript2",
"commata",
"commentator",
"commercial",
"condit",
"convex",
"cood",
"corea",
"cow",
"cpp-clang",
"cpp-gcc",
"cpy",
"cquents",
"crayon",
"cryptol",
"crystal",
"cs-core",
"cs-csc",
"cs-csi",
"cs-mono",
"cs-mono-shell",
"csl",
"cubically",
"cubix",
"curlyfrick",
"curry-pakcs",
"curry-sloth",
"cy",
"cyclone",
"d",
"d2",
"dafny",
"dart",
"dash",
"dc",
"deadfish-",
"decimal",
"delimit",
"deorst",
"dirty",
"detour",
"dg",
"dobela",
"dobela-dobcon",
"dodos",
"dreaderef",
"drive-in-window",
"dscript",
"dstack",
"eacal",
"ec",
"ecndpcaalrlp",
"ecpp-c",
"ecpp-cpp",
"dyvil",
"ed",
"egel",
"element",
"elf",
"elixir",
"elvm-ir",
"emacs-lisp",
"emmental",
"emoji",
"emoji-gramming",
"emojicode",
"emojicode6",
"emojicoder",
"emotifuck",
"emotinomicon",
"empty-nest",
"enlist",
"erlang-escript",
"es",
"esopunk",
"eta",
"euphoria3",
"euphoria4",
"evil",
"explode",
"extended-brainfuck-type-i",
"extrac",
"face",
"factor",
"false",
"fantom",
"farnsworth",
"felix",
"fernando",
"feu",
"fimpp",
"fish",
"fish-shell",
"fission",
"fission2",
"flipbit",
"floater",
"flobnar",
"foam",
"focal",
"foo",
"forget",
"forked",
"forte",
"forth-gforth",
"fortran-gfortran",
"fourier",
"fractran",
"fs-core",
"fs-mono",
"fueue",
"funciton",
"functoid",
"funky",
"funky2",
"fynyl",
"gaia",
"gaotpp",
"gap",
"gema",
"geo",
"glypho",
"glypho-shorthand",
"gnuplot",
"go",
"golfish",
"golfscript",
"granule",
"grass",
"grime",
"groovy",
"gs2",
"gwion",
"hades",
"haskell",
"haskell-gofer",
"haskell-hugs",
"haskell-literate",
"hasm",
"haxe",
"haystack",
"hbcht",
"hdbf",
"hexagony",
"hobbes",
"hodor",
"homespring",
"hspal",
"huginn",
"husk",
"hy",
"i",
"iag",
"icon",
"idris",
"incident",
"ink",
"intercal",
"io",
"j",
"jael",
"jq",
"jx",
"j-uby",
"japt",
"java-jdk",
"java-openjdk",
"javascript-babel-node",
"javascript-node",
"javascript-spidermonkey",
"javascript-v8",
"jelly",
"jellyfish",
"joy",
"julia",
"julia1x",
"julia5",
"julia6",
"julia7",
"k-kona",
"k-ngn",
"k-ok",
"kavod",
"keg",
"kipple-cipple",
"klein",
"koberi-c",
"koka",
"kotlin",
"krrp",
"ksh",
"l33t",
"labyrinth",
"lean",
"lily",
"llvm",
"lmbm",
"lnusp",
"locksmith",
"logicode",
"lolcode",
"lost",
"lower",
"lua",
"lua-luajit",
"lua-openresty",
"ly",
"m",
"m4",
"machinecode",
"make",
"malbolge",
"malbolge-unshackled",
"mamba",
"mariolang",
"mascarpone",
"mathgolf",
"mathematica",
"mathics",
"matl",
"maverick",
"maxima",
"maybelater",
"memory-gap",
"milambda",
"milky-way",
"minefriff",
"minimal-2d",
"miniml",
"minkolang",
"mirror",
"momema",
"monkeys",
"moonscript",
"moorhens",
"mornington-crescent",
"mouse",
"mouse2002",
"mouse83",
"mu6",
"mumps",
"muriel",
"my",
"my-basic",
"nameless",
"nandy",
"nial",
"nikud",
"nim",
"neim",
"neutrino",
"nhohnhehr",
"no",
"noether",
"nqt",
"ntfjc",
"numberwang",
"oasis",
"obcode",
"oberon-07",
"object-pascal-fpc",
"objective-c-clang",
"objective-c-gcc",
"occam-pi",
"ocaml",
"octave",
"odin",
"ohm",
"ohm2",
"oml",
"ooocode",
"oration",
"ork",
"orst",
"osabie",
"osh",
"pain-flak",
"paradoc",
"parenthesis-hell",
"parenthetic",
"pari-gp",
"pascal-fpc",
"path",
"pbrain",
"perl4",
"perl5",
"perl5-cperl",
"perl6",
"perl6-niecza",
"phoenix",
"phooey",
"php",
"physica",
"picolisp",
"piet",
"pike",
"pilot-pspilot",
"pilot-rpilot",
"pingpong",
"pip",
"pixiedust",
"pl",
"pony",
"positron",
"postl",
"postscript-xpost",
"powershell",
"powershell-core",
"prelude",
"premier",
"preproc",
"prolog-ciao",
"prolog-swi",
"proton",
"proton2",
"ps-core",
"pure",
"purescript",
"purple",
"pushy",
"puzzlang",
"pyke",
"pylons",
"pyn-tree",
"pyon",
"pyramid-scheme",
"pyret",
"pyt",
"pyth",
"python1",
"python2",
"python2-cython",
"python2-iron",
"python2-jython",
"python2-pypy",
"python3",
"python38pr",
"python3-cython",
"python3-pypy",
"python3-stackless",
"qqq",
"qs-core",
"quadr",
"quadrefunge-97-mtfi",
"quads",
"quarterstaff",
"quintefunge-97-mtfi",
"r",
"racket",
"rad",
"rail",
"random-brainfuck",
"rapira",
"re-direction",
"reason",
"rebol",
"rebol3",
"recursiva",
"red",
"reng",
"reregex",
"res",
"resplicate",
"reticular",
"retina",
"retina1",
"return",
"rexx",
"ring",
"rk",
"rockstar",
"roda",
"roop",
"ropy",
"rotor",
"rprogn",
"rprogn-2",
"ruby",
"runic",
"rust",
"rutger",
"sad-flak",
"sakura",
"sbf",
"scala",
"scheme-chez",
"scheme-chicken",
"scheme-gambit",
"scheme-guile",
"sed",
"sed-gnu",
"seed",
"septefunge-97-mtfi",
"seriously",
"sesos",
"set",
"sexefunge-97-mtfi",
"sfk",
"shapescript",
"shnap",
"shortc",
"shove",
"shp",
"shtriped",
"silos",
"sidef",
"silberjoder",
"simplefunge",
"simplestack",
"simplex",
"simula",
"sisal",
"sisi",
"slashes",
"smbf",
"sml-mlton",
"smol",
"snails",
"snobol4",
"snowman",
"snusp",
"snusp-bloated",
"snuspi",
"somme",
"spaced",
"spim",
"spl",
"spoon",
"sqlite",
"squirrel",
"stackcats",
"stacked",
"starfish",
"starry",
"stax",
"stencil",
"stones",
"str",
"straw",
"subskin",
"sumerian",
"supermariolang",
"superstack",
"surface",
"swap",
"swift4",
"syms",
"symbolic-python",
"taco",
"tampio",
"tampioi",
"tamsin",
"tapebagel",
"taxi",
"tcl",
"tcsh",
"templat",
"templates",
"thing",
"threead",
"thue",
"thutu",
"tidy",
"tincan",
"tinybf",
"tinylisp",
"tir",
"tis",
"toi",
"tmbww",
"transcript",
"trefunge-97-mtfi",
"trefunge-98-pyfunge",
"triangular",
"triangularity",
"trigger",
"triple-threat",
"trumpscript",
"turtled",
"typescript",
"ubasic",
"underload",
"unefunge-97-mtfi",
"unefunge-98-pyfunge",
"unicat",
"unlambda",
"uno",
"unreadable",
"ursala",
"v",
"v-fmota",
"vala",
"var",
"vb-core",
"verbosity",
"verbosity2",
"versert",
"visual-basic-net-mono",
"visual-basic-net-vbc",
"vitsy",
"vlang",
"vsl",
"wasm",
"waterfall",
"whirl",
"whispers",
"whispers2",
"whitespace",
"width",
"wierd",
"wise",
"woefully",
"wren",
"wsf",
"wumpus",
"wyalhein",
"xeec",
"xeraph",
"yaball",
"yabasic",
"yash",
"ybc",
"yup",
"z3",
"z80golf",
"zephyr",
"zig",
"zkl",
"zoidberg",
"zsh",
} | 0.383988 | 0.243575 |
from typing import AsyncGenerator
from unittest.mock import patch
import pytest
from pywizlight import SCENES, PilotBuilder, wizlight
from pywizlight.bulb import states_match
from pywizlight.bulblibrary import BulbClass, BulbType, Features, KelvinRange
from pywizlight.discovery import discover_lights
from pywizlight.exceptions import WizLightTimeOutError
from pywizlight.tests.fake_bulb import startup_bulb
@pytest.fixture()
async def correct_bulb() -> AsyncGenerator[wizlight, None]:
shutdown, port = await startup_bulb(
module_name="ESP01_SHRGB_03", firmware_version="1.25.0"
)
bulb = wizlight(ip="127.0.0.1", port=port)
yield bulb
await bulb.async_close()
shutdown()
@pytest.fixture()
async def bad_bulb() -> AsyncGenerator[wizlight, None]:
bulb = wizlight(ip="1.1.1.1")
yield bulb
await bulb.async_close()
# Non-Error states - PilotBuilder - Turn On
@pytest.mark.asyncio
async def test_Bulb_Discovery(correct_bulb: wizlight) -> None:
"""Test discovery function."""
# Use a random available port since otherwise the
# test may fail
with patch("pywizlight.discovery.PORT", 0):
bulbs = await discover_lights(broadcast_space="192.168.178.255", wait_time=0.02)
for bulb in bulbs:
with patch("pywizlight.bulb.FIRST_SEND_INTERVAL", 0.01), patch(
"pywizlight.bulb.TIMEOUT", 0.01
):
state = await bulb.updateState()
assert state and state.get_state() is False
@pytest.mark.asyncio
async def test_PilotBuilder_state(correct_bulb: wizlight) -> None:
"""Test State."""
state = await correct_bulb.updateState()
assert state and state.get_state() is False
@pytest.mark.asyncio
async def test_PilotBuilder_colortemp(correct_bulb: wizlight) -> None:
"""Test Color Temp."""
await correct_bulb.turn_on(PilotBuilder(colortemp=2800))
state = await correct_bulb.updateState()
assert state and state.get_colortemp() == 2800
@pytest.mark.asyncio
async def test_PilotBuilder_brightness(correct_bulb: wizlight) -> None:
"""Test Brightness."""
await correct_bulb.turn_on(PilotBuilder(brightness=10))
state = await correct_bulb.updateState()
# 10% == 26 in Hex
assert state and state.get_brightness() == 26
@pytest.mark.asyncio
async def test_PilotBuilder_warm_wite(correct_bulb: wizlight) -> None:
"""Test Warm White."""
await correct_bulb.turn_on(PilotBuilder(warm_white=255))
state = await correct_bulb.updateState()
assert state and state.get_warm_white() == 255
@pytest.mark.asyncio
async def test_PilotBuilder_cold_white(correct_bulb: wizlight) -> None:
"""Test Cold White."""
await correct_bulb.turn_on(PilotBuilder(cold_white=255))
state = await correct_bulb.updateState()
assert state and state.get_cold_white() == 255
@pytest.mark.asyncio
async def test_PilotBuilder_rgb(correct_bulb: wizlight) -> None:
"""Test RGB Value."""
await correct_bulb.turn_on(PilotBuilder(rgb=(0, 128, 255)))
state = await correct_bulb.updateState()
assert state and state.get_rgb() == (0, 127, 255)
@pytest.mark.asyncio
async def test_PilotBuilder_hucolor(correct_bulb: wizlight) -> None:
"""Test RGB Value via hucolor."""
await correct_bulb.turn_on(PilotBuilder(hucolor=(100, 50)))
state = await correct_bulb.updateState()
assert state and state.get_rgb() == (88.0, 255.0, 0.0)
@pytest.mark.asyncio
async def test_setting_rgbw(correct_bulb: wizlight) -> None:
"""Test setting rgbw."""
await correct_bulb.turn_on(PilotBuilder(rgbw=(1, 2, 3, 4)))
state = await correct_bulb.updateState()
assert state and state.get_rgbw() == (1, 2, 3, 4)
@pytest.mark.asyncio
async def test_PilotBuilder_scene(correct_bulb: wizlight) -> None:
"""Test scene."""
await correct_bulb.turn_on(PilotBuilder(scene=1))
state = await correct_bulb.updateState()
assert state and state.get_scene() == SCENES[1]
state.pilotResult["schdPsetId"] = True
assert state.get_scene() == SCENES[1000]
@pytest.mark.asyncio
async def test_PilotBuilder_scene_empty(correct_bulb: wizlight) -> None:
"""Test scene with no scene set."""
state = await correct_bulb.updateState()
assert state is not None
if "sceneId" in state.pilotResult:
del state.pilotResult["sceneId"]
assert state and state.get_scene() is None
@pytest.mark.asyncio
async def test_PilotBuilder_speed(correct_bulb: wizlight) -> None:
"""Test speed."""
await correct_bulb.turn_on(PilotBuilder(scene=1, speed=50))
state = await correct_bulb.updateState()
assert state and state.get_scene() == SCENES[1]
assert state and state.get_speed() == 50
@pytest.mark.asyncio
async def test_set_speed(correct_bulb: wizlight) -> None:
"""Set speed."""
await correct_bulb.set_speed(125)
state = await correct_bulb.updateState()
assert state and state.get_speed() == 125
@pytest.mark.asyncio
async def test_get_source(correct_bulb: wizlight) -> None:
"""Test getting the source."""
state = await correct_bulb.updateState()
assert state and state.get_source() == "udp"
# ------ Error states -------------------------------------
@pytest.mark.asyncio
async def test_error_PilotBuilder_brightness(correct_bulb: wizlight) -> None:
"""Error Brightness."""
with pytest.raises(ValueError):
await correct_bulb.turn_on(PilotBuilder(brightness=500))
@pytest.mark.asyncio
async def test_error_PilotBuilder_warm_wite(correct_bulb: wizlight) -> None:
"""Error Warm White."""
with pytest.raises(ValueError):
await correct_bulb.turn_on(PilotBuilder(warm_white=300))
with pytest.raises(ValueError):
await correct_bulb.turn_on(PilotBuilder(warm_white=-1))
@pytest.mark.asyncio
async def test_error_PilotBuilder_cold_white_upper(correct_bulb: wizlight) -> None:
"""Error Cold White."""
with pytest.raises(ValueError):
await correct_bulb.turn_on(PilotBuilder(cold_white=300))
@pytest.mark.asyncio
async def test_error_PilotBuilder_cold_white_lower(correct_bulb: wizlight) -> None:
"""Error Cold White."""
with pytest.raises(ValueError):
await correct_bulb.turn_on(PilotBuilder(cold_white=-1))
@pytest.mark.asyncio
async def test_error_PilotBuilder_r(correct_bulb: wizlight) -> None:
"""Error Red Value."""
with pytest.raises(ValueError):
await correct_bulb.turn_on(PilotBuilder(rgb=(300, 0, 0)))
@pytest.mark.asyncio
async def test_error_PilotBuilder_green(correct_bulb: wizlight) -> None:
"""Error Green Value."""
with pytest.raises(ValueError):
await correct_bulb.turn_on(PilotBuilder(rgb=(0, 300, 0)))
@pytest.mark.asyncio
async def test_error_PilotBuilder_blue(correct_bulb: wizlight) -> None:
"""Error Blue Value."""
with pytest.raises(ValueError):
await correct_bulb.turn_on(PilotBuilder(rgb=(0, 0, 300)))
@pytest.mark.asyncio
async def test_error_PilotBuilder_cold_white(correct_bulb: wizlight) -> None:
"""Error Cold White Value."""
with pytest.raises(ValueError):
await correct_bulb.turn_on(PilotBuilder(cold_white=9999))
@pytest.mark.asyncio
async def test_error_PilotBuilder_scene(correct_bulb: wizlight) -> None:
"""Error scene."""
with pytest.raises(ValueError):
await correct_bulb.turn_on(PilotBuilder(scene=532))
@pytest.mark.asyncio
async def test_error_PilotBuilder_speed(correct_bulb: wizlight) -> None:
"""Error speed."""
with pytest.raises(ValueError):
await correct_bulb.turn_on(PilotBuilder(speed=532))
@pytest.mark.asyncio
async def test_error_set_speed(correct_bulb: wizlight) -> None:
"""Error speed."""
with pytest.raises(ValueError):
await correct_bulb.set_speed(532)
@pytest.mark.asyncio
async def test_fw_version(correct_bulb: wizlight) -> None:
"""Test fetching the firmware version."""
bulb_type = await correct_bulb.get_bulbtype()
assert bulb_type == BulbType(
features=Features(
color=True, color_tmp=True, effect=True, brightness=True, dual_head=False
),
name="ESP01_SHRGB_03",
kelvin_range=KelvinRange(max=6500, min=2200),
bulb_type=BulbClass.RGB,
fw_version="1.25.0",
white_channels=1,
white_to_color_ratio=30,
)
assert correct_bulb.mac == "a8bb5006033d"
@pytest.mark.asyncio
async def test_get_mac(correct_bulb: wizlight) -> None:
"""Test getting the mac address."""
mac = await correct_bulb.getMac()
assert mac == "a8bb5006033d"
mac = await correct_bulb.getMac()
assert mac == "a8bb5006033d"
# Error states / Timout
@pytest.mark.asyncio
async def test_timeout(bad_bulb: wizlight) -> None:
"""Test the timout exception after."""
with pytest.raises(WizLightTimeOutError), patch(
"pywizlight.bulb.FIRST_SEND_INTERVAL", 0.01
), patch("pywizlight.bulb.TIMEOUT", 0.01):
await bad_bulb.getBulbConfig()
@pytest.mark.asyncio
async def test_timeout_PilotBuilder(bad_bulb: wizlight) -> None:
"""Test Timout for Result."""
# check if the bulb state it given as bool - mock ?
with pytest.raises(WizLightTimeOutError), patch(
"pywizlight.bulb.FIRST_SEND_INTERVAL", 0.01
), patch("pywizlight.bulb.TIMEOUT", 0.01):
await bad_bulb.turn_on(PilotBuilder(brightness=255))
@pytest.mark.asyncio
async def test_states_match_with_occupancy() -> None:
"""Test states match always sends pir updates but we ignore mqttCd, rssi, and ts."""
state_off_ios = {
"mac": "a8bb50d46a1c",
"rssi": -70,
"src": "ios",
"mqttCd": 0,
"ts": 1644440635,
"state": False,
"sceneId": 0,
}
state_on_ios = {
"mac": "<KEY>",
"rssi": -45,
"src": "ios",
"mqttCd": 0,
"ts": 1644440662,
"state": False,
"sceneId": 27,
"speed": 100,
"dimming": 100,
}
state_on_hb = {
"mac": "<KEY>",
"rssi": -45,
"src": "hb",
"mqttCd": 0,
"ts": 1644440642,
"state": False,
"sceneId": 27,
"speed": 100,
"dimming": 100,
}
ios_scene27 = {
"mac": "<KEY>",
"rssi": -48,
"src": "ios",
"state": True,
"sceneId": 27,
"speed": 100,
"dimming": 100,
}
ios_off = {
"mac": "<KEY>",
"rssi": -69,
"src": "ios",
"state": False,
"sceneId": 0,
}
occupancy_detected_scene27 = {
"mac": "<KEY>",
"rssi": -48,
"src": "pir",
"state": True,
"sceneId": 27,
"speed": 100,
"dimming": 100,
}
occupancy_not_detected = {
"mac": "<KEY>",
"rssi": -69,
"src": "pir",
"state": False,
"sceneId": 0,
}
assert states_match(state_off_ios, state_off_ios)
assert not states_match(state_off_ios, state_on_ios)
assert states_match(
state_on_ios, state_on_hb
) # source change does not matter unless its a PIR
assert not states_match(
ios_scene27, occupancy_detected_scene27
) # source change matters since its a PIR
assert states_match(occupancy_detected_scene27, occupancy_detected_scene27)
assert not states_match(
ios_off, occupancy_not_detected
) # source change matters since its a PIR
@pytest.mark.asyncio
async def test_states_match_with_button() -> None:
"""Test states match always sends button updates but we ignore mqttCd, rssi, and ts."""
button_on_press = {
"mac": "<KEY>",
"rssi": -48,
"src": "wfa1",
"state": True,
}
button_off_press = {
"mac": "<KEY>",
"rssi": -69,
"src": "wfa2",
"state": False,
"sceneId": 0,
}
button_1_press_state_false = {
"mac": "<KEY>",
"rssi": -69,
"src": "wfa16",
"state": False,
"sceneId": 0,
}
button_1_press_state_true = {
"mac": "<KEY>",
"rssi": -69,
"src": "wfa16",
"state": True,
"sceneId": 0,
}
ios_off = {
"mac": "<KEY>",
"rssi": -69,
"src": "ios",
"state": False,
"sceneId": 0,
}
assert not states_match(button_on_press, button_off_press)
assert not states_match(button_1_press_state_false, button_1_press_state_true)
assert not states_match(
ios_off, button_1_press_state_false
) # source change matters since its a button | pywizlight/tests/test_bulb.py | from typing import AsyncGenerator
from unittest.mock import patch
import pytest
from pywizlight import SCENES, PilotBuilder, wizlight
from pywizlight.bulb import states_match
from pywizlight.bulblibrary import BulbClass, BulbType, Features, KelvinRange
from pywizlight.discovery import discover_lights
from pywizlight.exceptions import WizLightTimeOutError
from pywizlight.tests.fake_bulb import startup_bulb
@pytest.fixture()
async def correct_bulb() -> AsyncGenerator[wizlight, None]:
shutdown, port = await startup_bulb(
module_name="ESP01_SHRGB_03", firmware_version="1.25.0"
)
bulb = wizlight(ip="127.0.0.1", port=port)
yield bulb
await bulb.async_close()
shutdown()
@pytest.fixture()
async def bad_bulb() -> AsyncGenerator[wizlight, None]:
bulb = wizlight(ip="1.1.1.1")
yield bulb
await bulb.async_close()
# Non-Error states - PilotBuilder - Turn On
@pytest.mark.asyncio
async def test_Bulb_Discovery(correct_bulb: wizlight) -> None:
"""Test discovery function."""
# Use a random available port since otherwise the
# test may fail
with patch("pywizlight.discovery.PORT", 0):
bulbs = await discover_lights(broadcast_space="192.168.178.255", wait_time=0.02)
for bulb in bulbs:
with patch("pywizlight.bulb.FIRST_SEND_INTERVAL", 0.01), patch(
"pywizlight.bulb.TIMEOUT", 0.01
):
state = await bulb.updateState()
assert state and state.get_state() is False
@pytest.mark.asyncio
async def test_PilotBuilder_state(correct_bulb: wizlight) -> None:
"""Test State."""
state = await correct_bulb.updateState()
assert state and state.get_state() is False
@pytest.mark.asyncio
async def test_PilotBuilder_colortemp(correct_bulb: wizlight) -> None:
"""Test Color Temp."""
await correct_bulb.turn_on(PilotBuilder(colortemp=2800))
state = await correct_bulb.updateState()
assert state and state.get_colortemp() == 2800
@pytest.mark.asyncio
async def test_PilotBuilder_brightness(correct_bulb: wizlight) -> None:
"""Test Brightness."""
await correct_bulb.turn_on(PilotBuilder(brightness=10))
state = await correct_bulb.updateState()
# 10% == 26 in Hex
assert state and state.get_brightness() == 26
@pytest.mark.asyncio
async def test_PilotBuilder_warm_wite(correct_bulb: wizlight) -> None:
"""Test Warm White."""
await correct_bulb.turn_on(PilotBuilder(warm_white=255))
state = await correct_bulb.updateState()
assert state and state.get_warm_white() == 255
@pytest.mark.asyncio
async def test_PilotBuilder_cold_white(correct_bulb: wizlight) -> None:
"""Test Cold White."""
await correct_bulb.turn_on(PilotBuilder(cold_white=255))
state = await correct_bulb.updateState()
assert state and state.get_cold_white() == 255
@pytest.mark.asyncio
async def test_PilotBuilder_rgb(correct_bulb: wizlight) -> None:
"""Test RGB Value."""
await correct_bulb.turn_on(PilotBuilder(rgb=(0, 128, 255)))
state = await correct_bulb.updateState()
assert state and state.get_rgb() == (0, 127, 255)
@pytest.mark.asyncio
async def test_PilotBuilder_hucolor(correct_bulb: wizlight) -> None:
"""Test RGB Value via hucolor."""
await correct_bulb.turn_on(PilotBuilder(hucolor=(100, 50)))
state = await correct_bulb.updateState()
assert state and state.get_rgb() == (88.0, 255.0, 0.0)
@pytest.mark.asyncio
async def test_setting_rgbw(correct_bulb: wizlight) -> None:
"""Test setting rgbw."""
await correct_bulb.turn_on(PilotBuilder(rgbw=(1, 2, 3, 4)))
state = await correct_bulb.updateState()
assert state and state.get_rgbw() == (1, 2, 3, 4)
@pytest.mark.asyncio
async def test_PilotBuilder_scene(correct_bulb: wizlight) -> None:
"""Test scene."""
await correct_bulb.turn_on(PilotBuilder(scene=1))
state = await correct_bulb.updateState()
assert state and state.get_scene() == SCENES[1]
state.pilotResult["schdPsetId"] = True
assert state.get_scene() == SCENES[1000]
@pytest.mark.asyncio
async def test_PilotBuilder_scene_empty(correct_bulb: wizlight) -> None:
"""Test scene with no scene set."""
state = await correct_bulb.updateState()
assert state is not None
if "sceneId" in state.pilotResult:
del state.pilotResult["sceneId"]
assert state and state.get_scene() is None
@pytest.mark.asyncio
async def test_PilotBuilder_speed(correct_bulb: wizlight) -> None:
"""Test speed."""
await correct_bulb.turn_on(PilotBuilder(scene=1, speed=50))
state = await correct_bulb.updateState()
assert state and state.get_scene() == SCENES[1]
assert state and state.get_speed() == 50
@pytest.mark.asyncio
async def test_set_speed(correct_bulb: wizlight) -> None:
"""Set speed."""
await correct_bulb.set_speed(125)
state = await correct_bulb.updateState()
assert state and state.get_speed() == 125
@pytest.mark.asyncio
async def test_get_source(correct_bulb: wizlight) -> None:
"""Test getting the source."""
state = await correct_bulb.updateState()
assert state and state.get_source() == "udp"
# ------ Error states -------------------------------------
@pytest.mark.asyncio
async def test_error_PilotBuilder_brightness(correct_bulb: wizlight) -> None:
"""Error Brightness."""
with pytest.raises(ValueError):
await correct_bulb.turn_on(PilotBuilder(brightness=500))
@pytest.mark.asyncio
async def test_error_PilotBuilder_warm_wite(correct_bulb: wizlight) -> None:
"""Error Warm White."""
with pytest.raises(ValueError):
await correct_bulb.turn_on(PilotBuilder(warm_white=300))
with pytest.raises(ValueError):
await correct_bulb.turn_on(PilotBuilder(warm_white=-1))
@pytest.mark.asyncio
async def test_error_PilotBuilder_cold_white_upper(correct_bulb: wizlight) -> None:
"""Error Cold White."""
with pytest.raises(ValueError):
await correct_bulb.turn_on(PilotBuilder(cold_white=300))
@pytest.mark.asyncio
async def test_error_PilotBuilder_cold_white_lower(correct_bulb: wizlight) -> None:
"""Error Cold White."""
with pytest.raises(ValueError):
await correct_bulb.turn_on(PilotBuilder(cold_white=-1))
@pytest.mark.asyncio
async def test_error_PilotBuilder_r(correct_bulb: wizlight) -> None:
"""Error Red Value."""
with pytest.raises(ValueError):
await correct_bulb.turn_on(PilotBuilder(rgb=(300, 0, 0)))
@pytest.mark.asyncio
async def test_error_PilotBuilder_green(correct_bulb: wizlight) -> None:
"""Error Green Value."""
with pytest.raises(ValueError):
await correct_bulb.turn_on(PilotBuilder(rgb=(0, 300, 0)))
@pytest.mark.asyncio
async def test_error_PilotBuilder_blue(correct_bulb: wizlight) -> None:
"""Error Blue Value."""
with pytest.raises(ValueError):
await correct_bulb.turn_on(PilotBuilder(rgb=(0, 0, 300)))
@pytest.mark.asyncio
async def test_error_PilotBuilder_cold_white(correct_bulb: wizlight) -> None:
"""Error Cold White Value."""
with pytest.raises(ValueError):
await correct_bulb.turn_on(PilotBuilder(cold_white=9999))
@pytest.mark.asyncio
async def test_error_PilotBuilder_scene(correct_bulb: wizlight) -> None:
"""Error scene."""
with pytest.raises(ValueError):
await correct_bulb.turn_on(PilotBuilder(scene=532))
@pytest.mark.asyncio
async def test_error_PilotBuilder_speed(correct_bulb: wizlight) -> None:
"""Error speed."""
with pytest.raises(ValueError):
await correct_bulb.turn_on(PilotBuilder(speed=532))
@pytest.mark.asyncio
async def test_error_set_speed(correct_bulb: wizlight) -> None:
"""Error speed."""
with pytest.raises(ValueError):
await correct_bulb.set_speed(532)
@pytest.mark.asyncio
async def test_fw_version(correct_bulb: wizlight) -> None:
"""Test fetching the firmware version."""
bulb_type = await correct_bulb.get_bulbtype()
assert bulb_type == BulbType(
features=Features(
color=True, color_tmp=True, effect=True, brightness=True, dual_head=False
),
name="ESP01_SHRGB_03",
kelvin_range=KelvinRange(max=6500, min=2200),
bulb_type=BulbClass.RGB,
fw_version="1.25.0",
white_channels=1,
white_to_color_ratio=30,
)
assert correct_bulb.mac == "a8bb5006033d"
@pytest.mark.asyncio
async def test_get_mac(correct_bulb: wizlight) -> None:
"""Test getting the mac address."""
mac = await correct_bulb.getMac()
assert mac == "a8bb5006033d"
mac = await correct_bulb.getMac()
assert mac == "a8bb5006033d"
# Error states / Timout
@pytest.mark.asyncio
async def test_timeout(bad_bulb: wizlight) -> None:
"""Test the timout exception after."""
with pytest.raises(WizLightTimeOutError), patch(
"pywizlight.bulb.FIRST_SEND_INTERVAL", 0.01
), patch("pywizlight.bulb.TIMEOUT", 0.01):
await bad_bulb.getBulbConfig()
@pytest.mark.asyncio
async def test_timeout_PilotBuilder(bad_bulb: wizlight) -> None:
"""Test Timout for Result."""
# check if the bulb state it given as bool - mock ?
with pytest.raises(WizLightTimeOutError), patch(
"pywizlight.bulb.FIRST_SEND_INTERVAL", 0.01
), patch("pywizlight.bulb.TIMEOUT", 0.01):
await bad_bulb.turn_on(PilotBuilder(brightness=255))
@pytest.mark.asyncio
async def test_states_match_with_occupancy() -> None:
"""Test states match always sends pir updates but we ignore mqttCd, rssi, and ts."""
state_off_ios = {
"mac": "a8bb50d46a1c",
"rssi": -70,
"src": "ios",
"mqttCd": 0,
"ts": 1644440635,
"state": False,
"sceneId": 0,
}
state_on_ios = {
"mac": "<KEY>",
"rssi": -45,
"src": "ios",
"mqttCd": 0,
"ts": 1644440662,
"state": False,
"sceneId": 27,
"speed": 100,
"dimming": 100,
}
state_on_hb = {
"mac": "<KEY>",
"rssi": -45,
"src": "hb",
"mqttCd": 0,
"ts": 1644440642,
"state": False,
"sceneId": 27,
"speed": 100,
"dimming": 100,
}
ios_scene27 = {
"mac": "<KEY>",
"rssi": -48,
"src": "ios",
"state": True,
"sceneId": 27,
"speed": 100,
"dimming": 100,
}
ios_off = {
"mac": "<KEY>",
"rssi": -69,
"src": "ios",
"state": False,
"sceneId": 0,
}
occupancy_detected_scene27 = {
"mac": "<KEY>",
"rssi": -48,
"src": "pir",
"state": True,
"sceneId": 27,
"speed": 100,
"dimming": 100,
}
occupancy_not_detected = {
"mac": "<KEY>",
"rssi": -69,
"src": "pir",
"state": False,
"sceneId": 0,
}
assert states_match(state_off_ios, state_off_ios)
assert not states_match(state_off_ios, state_on_ios)
assert states_match(
state_on_ios, state_on_hb
) # source change does not matter unless its a PIR
assert not states_match(
ios_scene27, occupancy_detected_scene27
) # source change matters since its a PIR
assert states_match(occupancy_detected_scene27, occupancy_detected_scene27)
assert not states_match(
ios_off, occupancy_not_detected
) # source change matters since its a PIR
@pytest.mark.asyncio
async def test_states_match_with_button() -> None:
"""Test states match always sends button updates but we ignore mqttCd, rssi, and ts."""
button_on_press = {
"mac": "<KEY>",
"rssi": -48,
"src": "wfa1",
"state": True,
}
button_off_press = {
"mac": "<KEY>",
"rssi": -69,
"src": "wfa2",
"state": False,
"sceneId": 0,
}
button_1_press_state_false = {
"mac": "<KEY>",
"rssi": -69,
"src": "wfa16",
"state": False,
"sceneId": 0,
}
button_1_press_state_true = {
"mac": "<KEY>",
"rssi": -69,
"src": "wfa16",
"state": True,
"sceneId": 0,
}
ios_off = {
"mac": "<KEY>",
"rssi": -69,
"src": "ios",
"state": False,
"sceneId": 0,
}
assert not states_match(button_on_press, button_off_press)
assert not states_match(button_1_press_state_false, button_1_press_state_true)
assert not states_match(
ios_off, button_1_press_state_false
) # source change matters since its a button | 0.906691 | 0.639525 |
"""Shared resource arguments and flags."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import arg_parsers
from googlecloudsdk.calliope.concepts import concepts
from googlecloudsdk.command_lib.secrets import completers as secrets_completers
from googlecloudsdk.command_lib.util.concepts import concept_parsers
from googlecloudsdk.core import resources
# Args
def AddDataFile(parser, positional=False, **kwargs):
parser.add_argument(
_ArgOrFlag('data-file', positional),
metavar='PATH',
help=('File path from which to read secret data. Set this to "-" to read '
'the secret data from stdin.'),
**kwargs)
def AddProject(parser, positional=False, **kwargs):
concept_parsers.ConceptParser.ForResource(
name=_ArgOrFlag('project', positional),
resource_spec=GetProjectResourceSpec(),
group_help='The project ID.',
**kwargs).AddToParser(parser)
def AddLocation(parser, purpose, positional=False, **kwargs):
concept_parsers.ConceptParser.ForResource(
name=_ArgOrFlag('location', positional),
resource_spec=GetLocationResourceSpec(),
group_help='The location {}.'.format(purpose),
**kwargs).AddToParser(parser)
def AddReplicationPolicyFile(parser, positional=False, **kwargs):
parser.add_argument(
_ArgOrFlag('replication-policy-file', positional),
metavar='REPLICATION-POLICY-FILE',
help=(
'JSON or YAML file to use to read the replication policy. The file '
'must conform to '
'https://cloud.google.com/secret-manager/docs/reference/rest/v1/projects.secrets#replication.'
'Set this to "-" to read from stdin.'),
**kwargs)
def AddKmsKeyName(parser, positional=False, **kwargs):
parser.add_argument(
_ArgOrFlag('kms-key-name', positional),
metavar='KMS-KEY-NAME',
help=('Global KMS key with which to encrypt and decrypt the secret. Only '
'valid for secrets with an automatic replication policy.'),
**kwargs)
def AddSetKmsKeyName(parser, positional=False, **kwargs):
parser.add_argument(
_ArgOrFlag('set-kms-key', positional),
metavar='SET-KMS-KEY',
help=(
'New KMS key with which to encrypt and decrypt future secret versions.'
),
**kwargs)
def AddRemoveCmek(parser, positional=False, **kwargs):
parser.add_argument(
_ArgOrFlag('remove-cmek', positional),
action='store_true',
help=(
'Remove customer managed encryption key so that future versions will '
'be encrypted by a Google managed encryption key.'),
**kwargs)
def AddReplicaLocation(parser, positional=False, **kwargs):
parser.add_argument(
_ArgOrFlag('location', positional),
metavar='REPLICA-LOCATION',
help=('Location of replica to update. For secrets with automatic '
'replication policies, this can be omitted.'),
**kwargs)
def AddSecret(parser, purpose, positional=False, **kwargs):
concept_parsers.ConceptParser.ForResource(
name=_ArgOrFlag('secret', positional),
resource_spec=GetSecretResourceSpec(),
group_help='The secret {}.'.format(purpose),
**kwargs).AddToParser(parser)
def AddVersion(parser, purpose, positional=False, **kwargs):
concept_parsers.ConceptParser.ForResource(
name=_ArgOrFlag('version', positional),
resource_spec=GetVersionResourceSpec(),
group_help=('Numeric secret version {}.').format(purpose),
**kwargs).AddToParser(parser)
def AddVersionOrLatest(parser, purpose, positional=False, **kwargs):
concept_parsers.ConceptParser.ForResource(
name=_ArgOrFlag('version', positional),
resource_spec=GetVersionResourceSpec(),
group_help=('Numeric secret version {} or `latest` to use the latest '
'version.').format(purpose),
**kwargs).AddToParser(parser)
def AddTopics(parser, positional=False, **kwargs):
parser.add_argument(
_ArgOrFlag('topics', positional),
metavar='TOPICS',
type=arg_parsers.ArgList(),
action=arg_parsers.UpdateAction,
help=('List of Pub/Sub topics to configure on the secret.'),
**kwargs)
def AddUpdateTopicsGroup(parser):
"""Add flags for specifying topics on secret updates."""
group = parser.add_group(mutex=True, help='Topics.')
group.add_argument(
_ArgOrFlag('add-topics', False),
metavar='ADD-TOPICS',
type=arg_parsers.ArgList(),
action=arg_parsers.UpdateAction,
help=('List of Pub/Sub topics to add to the secret.'))
group.add_argument(
_ArgOrFlag('remove-topics', False),
metavar='REMOVE-TOPICS',
type=arg_parsers.ArgList(),
action=arg_parsers.UpdateAction,
help=('List of Pub/Sub topics to remove from the secret.'))
group.add_argument(
_ArgOrFlag('clear-topics', False),
action='store_true',
help=('Clear all Pub/Sub topics from the secret.'))
def AddUpdateReplicationGroup(parser):
"""Add flags for specifying replication policy updates."""
group = parser.add_group(mutex=True, help='Replication update.')
group.add_argument(
_ArgOrFlag('remove-cmek', False),
action='store_true',
help=(
'Remove customer managed encryption key so that future versions will '
'be encrypted by a Google managed encryption key.'))
subgroup = group.add_group(help='CMEK Update.')
subgroup.add_argument(
_ArgOrFlag('set-kms-key', False),
metavar='SET-KMS-KEY',
help=(
'New KMS key with which to encrypt and decrypt future secret versions.'
))
subgroup.add_argument(
_ArgOrFlag('location', False),
metavar='REPLICA-LOCATION',
help=('Location of replica to update. For secrets with automatic '
'replication policies, this can be omitted.'))
def AddCreateReplicationPolicyGroup(parser):
"""Add flags for specifying replication policy on secret creation."""
group = parser.add_group(mutex=True, help='Replication policy.')
group.add_argument(
_ArgOrFlag('replication-policy-file', False),
metavar='REPLICATION-POLICY-FILE',
help=(
'JSON or YAML file to use to read the replication policy. The file '
'must conform to '
'https://cloud.google.com/secret-manager/docs/reference/rest/v1/projects.secrets#replication.'
'Set this to "-" to read from stdin.'))
subgroup = group.add_group(help='Inline replication arguments.')
subgroup.add_argument(
_ArgOrFlag('replication-policy', False),
metavar='POLICY',
help=('The type of replication policy to apply to this secret. Allowed '
'values are "automatic" and "user-managed". If user-managed then '
'--locations must also be provided.'))
subgroup.add_argument(
_ArgOrFlag('kms-key-name', False),
metavar='KMS-KEY-NAME',
help=('Global KMS key with which to encrypt and decrypt the secret. Only '
'valid for secrets with an automatic replication policy.'))
subgroup.add_argument(
_ArgOrFlag('locations', False),
action=arg_parsers.UpdateAction,
metavar='LOCATION',
type=arg_parsers.ArgList(),
help=('Comma-separated list of locations in which the secret should be '
'replicated.'))
def AddCreateExpirationGroup(parser):
"""Add flags for specifying expiration on secret creates."""
group = parser.add_group(mutex=True, help='Expiration.')
group.add_argument(
_ArgOrFlag('expire-time', False),
metavar='EXPIRE-TIME',
help=('Timestamp at which to automatically delete the secret.'))
group.add_argument(
_ArgOrFlag('ttl', False),
metavar='TTL',
help=(
'Duration of time (in seconds) from the running of the command until '
'the secret is automatically deleted.'))
def AddUpdateExpirationGroup(parser):
"""Add flags for specifying expiration on secret updates.."""
group = parser.add_group(mutex=True, help='Expiration.')
group.add_argument(
_ArgOrFlag('expire-time', False),
metavar='EXPIRE-TIME',
help=('Timestamp at which to automatically delete the secret.'))
group.add_argument(
_ArgOrFlag('ttl', False),
metavar='TTL',
help=(
'Duration of time (in seconds) from the running of the command until '
'the secret is automatically deleted.'))
group.add_argument(
_ArgOrFlag('remove-expiration', False),
action='store_true',
help=(
'If set, removes scheduled expiration from secret (if it had one).'))
def AddCreateRotationGroup(parser):
"""Add flags for specifying rotation on secret creates."""
group = parser.add_group(mutex=False, help='Rotation.')
group.add_argument(
_ArgOrFlag('next-rotation-time', False),
help=('Timestamp at which to send rotation notification.'))
group.add_argument(
_ArgOrFlag('rotation-period', False),
help=('Duration of time (in seconds) between rotation notifications.'))
def AddUpdateRotationGroup(parser):
"""Add flags for specifying rotation on secret updates.."""
group = parser.add_group(mutex=False, help='Rotation.')
group.add_argument(
_ArgOrFlag('next-rotation-time', False),
help=('Timestamp at which to send rotation notification.'))
group.add_argument(
_ArgOrFlag('remove-next-rotation-time', False),
action='store_true',
help=('Remove timestamp at which to send rotation notification.'))
group.add_argument(
_ArgOrFlag('rotation-period', False),\
help=('Duration of time (in seconds) between rotation notifications.'))
group.add_argument(
_ArgOrFlag('remove-rotation-period', False),
action='store_true',
help=(
'If set, removes the rotation period, cancelling all rotations except for the next one.'
))
group.add_argument(
_ArgOrFlag('remove-rotation-schedule', False),
action='store_true',
help=('If set, removes rotation policy from a secret.'))
def AddSecretEtag(parser):
"""Add flag for specifying the current secret etag."""
parser.add_argument(
_ArgOrFlag('etag', False),
metavar='ETAG',
help=(
'Current entity tag (ETag) of the secret. If this flag is defined, the secret is updated only if the ETag provided matched the current secret\'s ETag.'
))
def AddVersionEtag(parser):
"""Add flag for specifying the current secret version etag."""
parser.add_argument(
_ArgOrFlag('etag', False),
metavar='ETAG',
help=(
'Current entity tag (ETag) of the secret version. If this flag is defined, the version is updated only if the ETag provided matched the current version\'s ETag.'
))
def _ArgOrFlag(name, positional):
"""Returns the argument name in resource argument format or flag format.
Args:
name (str): name of the argument
positional (bool): whether the argument is positional
Returns:
arg (str): the argument or flag
"""
if positional:
return name.upper().replace('-', '_')
return '--{}'.format(name)
### Attribute configurations
def GetProjectAttributeConfig():
return concepts.DEFAULT_PROJECT_ATTRIBUTE_CONFIG
def GetLocationAttributeConfig():
return concepts.ResourceParameterAttributeConfig(
name='location',
help_text='The location of the {resource}.',
completion_request_params={'fieldMask': 'name'},
completion_id_field='name')
def GetSecretAttributeConfig():
return concepts.ResourceParameterAttributeConfig(
name='secret',
help_text='The secret of the {resource}.',
completer=secrets_completers.SecretsCompleter)
def GetVersionAttributeConfig():
return concepts.ResourceParameterAttributeConfig(
name='version',
help_text='The version of the {resource}.',
completion_request_params={'fieldMask': 'name'},
completion_id_field='name')
# Resource specs
def GetProjectResourceSpec():
return concepts.ResourceSpec(
resource_collection='secretmanager.projects',
resource_name='project',
plural_name='projects',
disable_auto_completers=False,
projectsId=GetProjectAttributeConfig())
def GetLocationResourceSpec():
return concepts.ResourceSpec(
resource_collection='secretmanager.projects.locations',
resource_name='location',
plural_name='locations',
disable_auto_completers=False,
locationsId=GetLocationAttributeConfig(),
projectsId=GetProjectAttributeConfig())
def GetSecretResourceSpec():
return concepts.ResourceSpec(
resource_collection='secretmanager.projects.secrets',
resource_name='secret',
plural_name='secrets',
disable_auto_completers=False,
secretsId=GetSecretAttributeConfig(),
projectsId=GetProjectAttributeConfig())
def GetVersionResourceSpec():
return concepts.ResourceSpec(
'secretmanager.projects.secrets.versions',
resource_name='version',
plural_name='version',
disable_auto_completers=False,
versionsId=GetVersionAttributeConfig(),
secretsId=GetSecretAttributeConfig(),
projectsId=GetProjectAttributeConfig())
# Resource parsers
def ParseProjectRef(ref, **kwargs):
kwargs['collection'] = 'secretmanager.projects'
return resources.REGISTRY.Parse(ref, **kwargs)
def ParseLocationRef(ref, **kwargs):
kwargs['collection'] = 'secretmanager.projects.locations'
return resources.REGISTRY.Parse(ref, **kwargs)
def ParseSecretRef(ref, **kwargs):
kwargs['collection'] = 'secretmanager.projects.secrets'
return resources.REGISTRY.Parse(ref, **kwargs)
def ParseVersionRef(ref, **kwargs):
kwargs['collection'] = 'secretmanager.projects.secrets.versions'
return resources.REGISTRY.Parse(ref, **kwargs) | lib/googlecloudsdk/command_lib/secrets/args.py | """Shared resource arguments and flags."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import arg_parsers
from googlecloudsdk.calliope.concepts import concepts
from googlecloudsdk.command_lib.secrets import completers as secrets_completers
from googlecloudsdk.command_lib.util.concepts import concept_parsers
from googlecloudsdk.core import resources
# Args
def AddDataFile(parser, positional=False, **kwargs):
parser.add_argument(
_ArgOrFlag('data-file', positional),
metavar='PATH',
help=('File path from which to read secret data. Set this to "-" to read '
'the secret data from stdin.'),
**kwargs)
def AddProject(parser, positional=False, **kwargs):
concept_parsers.ConceptParser.ForResource(
name=_ArgOrFlag('project', positional),
resource_spec=GetProjectResourceSpec(),
group_help='The project ID.',
**kwargs).AddToParser(parser)
def AddLocation(parser, purpose, positional=False, **kwargs):
concept_parsers.ConceptParser.ForResource(
name=_ArgOrFlag('location', positional),
resource_spec=GetLocationResourceSpec(),
group_help='The location {}.'.format(purpose),
**kwargs).AddToParser(parser)
def AddReplicationPolicyFile(parser, positional=False, **kwargs):
parser.add_argument(
_ArgOrFlag('replication-policy-file', positional),
metavar='REPLICATION-POLICY-FILE',
help=(
'JSON or YAML file to use to read the replication policy. The file '
'must conform to '
'https://cloud.google.com/secret-manager/docs/reference/rest/v1/projects.secrets#replication.'
'Set this to "-" to read from stdin.'),
**kwargs)
def AddKmsKeyName(parser, positional=False, **kwargs):
parser.add_argument(
_ArgOrFlag('kms-key-name', positional),
metavar='KMS-KEY-NAME',
help=('Global KMS key with which to encrypt and decrypt the secret. Only '
'valid for secrets with an automatic replication policy.'),
**kwargs)
def AddSetKmsKeyName(parser, positional=False, **kwargs):
parser.add_argument(
_ArgOrFlag('set-kms-key', positional),
metavar='SET-KMS-KEY',
help=(
'New KMS key with which to encrypt and decrypt future secret versions.'
),
**kwargs)
def AddRemoveCmek(parser, positional=False, **kwargs):
parser.add_argument(
_ArgOrFlag('remove-cmek', positional),
action='store_true',
help=(
'Remove customer managed encryption key so that future versions will '
'be encrypted by a Google managed encryption key.'),
**kwargs)
def AddReplicaLocation(parser, positional=False, **kwargs):
parser.add_argument(
_ArgOrFlag('location', positional),
metavar='REPLICA-LOCATION',
help=('Location of replica to update. For secrets with automatic '
'replication policies, this can be omitted.'),
**kwargs)
def AddSecret(parser, purpose, positional=False, **kwargs):
concept_parsers.ConceptParser.ForResource(
name=_ArgOrFlag('secret', positional),
resource_spec=GetSecretResourceSpec(),
group_help='The secret {}.'.format(purpose),
**kwargs).AddToParser(parser)
def AddVersion(parser, purpose, positional=False, **kwargs):
concept_parsers.ConceptParser.ForResource(
name=_ArgOrFlag('version', positional),
resource_spec=GetVersionResourceSpec(),
group_help=('Numeric secret version {}.').format(purpose),
**kwargs).AddToParser(parser)
def AddVersionOrLatest(parser, purpose, positional=False, **kwargs):
concept_parsers.ConceptParser.ForResource(
name=_ArgOrFlag('version', positional),
resource_spec=GetVersionResourceSpec(),
group_help=('Numeric secret version {} or `latest` to use the latest '
'version.').format(purpose),
**kwargs).AddToParser(parser)
def AddTopics(parser, positional=False, **kwargs):
parser.add_argument(
_ArgOrFlag('topics', positional),
metavar='TOPICS',
type=arg_parsers.ArgList(),
action=arg_parsers.UpdateAction,
help=('List of Pub/Sub topics to configure on the secret.'),
**kwargs)
def AddUpdateTopicsGroup(parser):
"""Add flags for specifying topics on secret updates."""
group = parser.add_group(mutex=True, help='Topics.')
group.add_argument(
_ArgOrFlag('add-topics', False),
metavar='ADD-TOPICS',
type=arg_parsers.ArgList(),
action=arg_parsers.UpdateAction,
help=('List of Pub/Sub topics to add to the secret.'))
group.add_argument(
_ArgOrFlag('remove-topics', False),
metavar='REMOVE-TOPICS',
type=arg_parsers.ArgList(),
action=arg_parsers.UpdateAction,
help=('List of Pub/Sub topics to remove from the secret.'))
group.add_argument(
_ArgOrFlag('clear-topics', False),
action='store_true',
help=('Clear all Pub/Sub topics from the secret.'))
def AddUpdateReplicationGroup(parser):
"""Add flags for specifying replication policy updates."""
group = parser.add_group(mutex=True, help='Replication update.')
group.add_argument(
_ArgOrFlag('remove-cmek', False),
action='store_true',
help=(
'Remove customer managed encryption key so that future versions will '
'be encrypted by a Google managed encryption key.'))
subgroup = group.add_group(help='CMEK Update.')
subgroup.add_argument(
_ArgOrFlag('set-kms-key', False),
metavar='SET-KMS-KEY',
help=(
'New KMS key with which to encrypt and decrypt future secret versions.'
))
subgroup.add_argument(
_ArgOrFlag('location', False),
metavar='REPLICA-LOCATION',
help=('Location of replica to update. For secrets with automatic '
'replication policies, this can be omitted.'))
def AddCreateReplicationPolicyGroup(parser):
"""Add flags for specifying replication policy on secret creation."""
group = parser.add_group(mutex=True, help='Replication policy.')
group.add_argument(
_ArgOrFlag('replication-policy-file', False),
metavar='REPLICATION-POLICY-FILE',
help=(
'JSON or YAML file to use to read the replication policy. The file '
'must conform to '
'https://cloud.google.com/secret-manager/docs/reference/rest/v1/projects.secrets#replication.'
'Set this to "-" to read from stdin.'))
subgroup = group.add_group(help='Inline replication arguments.')
subgroup.add_argument(
_ArgOrFlag('replication-policy', False),
metavar='POLICY',
help=('The type of replication policy to apply to this secret. Allowed '
'values are "automatic" and "user-managed". If user-managed then '
'--locations must also be provided.'))
subgroup.add_argument(
_ArgOrFlag('kms-key-name', False),
metavar='KMS-KEY-NAME',
help=('Global KMS key with which to encrypt and decrypt the secret. Only '
'valid for secrets with an automatic replication policy.'))
subgroup.add_argument(
_ArgOrFlag('locations', False),
action=arg_parsers.UpdateAction,
metavar='LOCATION',
type=arg_parsers.ArgList(),
help=('Comma-separated list of locations in which the secret should be '
'replicated.'))
def AddCreateExpirationGroup(parser):
"""Add flags for specifying expiration on secret creates."""
group = parser.add_group(mutex=True, help='Expiration.')
group.add_argument(
_ArgOrFlag('expire-time', False),
metavar='EXPIRE-TIME',
help=('Timestamp at which to automatically delete the secret.'))
group.add_argument(
_ArgOrFlag('ttl', False),
metavar='TTL',
help=(
'Duration of time (in seconds) from the running of the command until '
'the secret is automatically deleted.'))
def AddUpdateExpirationGroup(parser):
"""Add flags for specifying expiration on secret updates.."""
group = parser.add_group(mutex=True, help='Expiration.')
group.add_argument(
_ArgOrFlag('expire-time', False),
metavar='EXPIRE-TIME',
help=('Timestamp at which to automatically delete the secret.'))
group.add_argument(
_ArgOrFlag('ttl', False),
metavar='TTL',
help=(
'Duration of time (in seconds) from the running of the command until '
'the secret is automatically deleted.'))
group.add_argument(
_ArgOrFlag('remove-expiration', False),
action='store_true',
help=(
'If set, removes scheduled expiration from secret (if it had one).'))
def AddCreateRotationGroup(parser):
"""Add flags for specifying rotation on secret creates."""
group = parser.add_group(mutex=False, help='Rotation.')
group.add_argument(
_ArgOrFlag('next-rotation-time', False),
help=('Timestamp at which to send rotation notification.'))
group.add_argument(
_ArgOrFlag('rotation-period', False),
help=('Duration of time (in seconds) between rotation notifications.'))
def AddUpdateRotationGroup(parser):
"""Add flags for specifying rotation on secret updates.."""
group = parser.add_group(mutex=False, help='Rotation.')
group.add_argument(
_ArgOrFlag('next-rotation-time', False),
help=('Timestamp at which to send rotation notification.'))
group.add_argument(
_ArgOrFlag('remove-next-rotation-time', False),
action='store_true',
help=('Remove timestamp at which to send rotation notification.'))
group.add_argument(
_ArgOrFlag('rotation-period', False),\
help=('Duration of time (in seconds) between rotation notifications.'))
group.add_argument(
_ArgOrFlag('remove-rotation-period', False),
action='store_true',
help=(
'If set, removes the rotation period, cancelling all rotations except for the next one.'
))
group.add_argument(
_ArgOrFlag('remove-rotation-schedule', False),
action='store_true',
help=('If set, removes rotation policy from a secret.'))
def AddSecretEtag(parser):
"""Add flag for specifying the current secret etag."""
parser.add_argument(
_ArgOrFlag('etag', False),
metavar='ETAG',
help=(
'Current entity tag (ETag) of the secret. If this flag is defined, the secret is updated only if the ETag provided matched the current secret\'s ETag.'
))
def AddVersionEtag(parser):
"""Add flag for specifying the current secret version etag."""
parser.add_argument(
_ArgOrFlag('etag', False),
metavar='ETAG',
help=(
'Current entity tag (ETag) of the secret version. If this flag is defined, the version is updated only if the ETag provided matched the current version\'s ETag.'
))
def _ArgOrFlag(name, positional):
"""Returns the argument name in resource argument format or flag format.
Args:
name (str): name of the argument
positional (bool): whether the argument is positional
Returns:
arg (str): the argument or flag
"""
if positional:
return name.upper().replace('-', '_')
return '--{}'.format(name)
### Attribute configurations
def GetProjectAttributeConfig():
return concepts.DEFAULT_PROJECT_ATTRIBUTE_CONFIG
def GetLocationAttributeConfig():
return concepts.ResourceParameterAttributeConfig(
name='location',
help_text='The location of the {resource}.',
completion_request_params={'fieldMask': 'name'},
completion_id_field='name')
def GetSecretAttributeConfig():
return concepts.ResourceParameterAttributeConfig(
name='secret',
help_text='The secret of the {resource}.',
completer=secrets_completers.SecretsCompleter)
def GetVersionAttributeConfig():
return concepts.ResourceParameterAttributeConfig(
name='version',
help_text='The version of the {resource}.',
completion_request_params={'fieldMask': 'name'},
completion_id_field='name')
# Resource specs
def GetProjectResourceSpec():
return concepts.ResourceSpec(
resource_collection='secretmanager.projects',
resource_name='project',
plural_name='projects',
disable_auto_completers=False,
projectsId=GetProjectAttributeConfig())
def GetLocationResourceSpec():
return concepts.ResourceSpec(
resource_collection='secretmanager.projects.locations',
resource_name='location',
plural_name='locations',
disable_auto_completers=False,
locationsId=GetLocationAttributeConfig(),
projectsId=GetProjectAttributeConfig())
def GetSecretResourceSpec():
return concepts.ResourceSpec(
resource_collection='secretmanager.projects.secrets',
resource_name='secret',
plural_name='secrets',
disable_auto_completers=False,
secretsId=GetSecretAttributeConfig(),
projectsId=GetProjectAttributeConfig())
def GetVersionResourceSpec():
return concepts.ResourceSpec(
'secretmanager.projects.secrets.versions',
resource_name='version',
plural_name='version',
disable_auto_completers=False,
versionsId=GetVersionAttributeConfig(),
secretsId=GetSecretAttributeConfig(),
projectsId=GetProjectAttributeConfig())
# Resource parsers
def ParseProjectRef(ref, **kwargs):
kwargs['collection'] = 'secretmanager.projects'
return resources.REGISTRY.Parse(ref, **kwargs)
def ParseLocationRef(ref, **kwargs):
kwargs['collection'] = 'secretmanager.projects.locations'
return resources.REGISTRY.Parse(ref, **kwargs)
def ParseSecretRef(ref, **kwargs):
kwargs['collection'] = 'secretmanager.projects.secrets'
return resources.REGISTRY.Parse(ref, **kwargs)
def ParseVersionRef(ref, **kwargs):
kwargs['collection'] = 'secretmanager.projects.secrets.versions'
return resources.REGISTRY.Parse(ref, **kwargs) | 0.78535 | 0.072341 |
import pytest
class TestReclosers13Bus:
@pytest.fixture(autouse=True)
def _request(self, solve_snap_13bus):
self.dss = solve_snap_13bus
self.dss.text(r"New 'Recloser.cb1' MonitoredObj=Line.650632 "
r"MonitoredTerm=1 "
r"NumFast=4 "
r"PhaseFast=Ext_Inv "
r"PhaseDelayed=Ext_Inv "
r"PhaseTrip=800 "
r"TDPhFast=1 "
r"TDPhDelayed=1 "
r"PhaseInst=2400 "
r"GroundFast=Ext_Inv "
r"GroundDelayed=Ext_Inv "
r"GroundTrip=800 "
r"TDGrFast=1 "
r"TDGrDelayed=1 "
r"GroundInst=1200 "
r"Shots=4 "
r"RecloseIntervals=(0.5, 2, 2, )")
self.dss.text(r"New 'Recloser.cb2' MonitoredObj=Line.684611 "
r"MonitoredTerm=2 "
r"NumFast=4 "
r"PhaseFast=Ext_Inv "
r"PhaseDelayed=Ext_Inv "
r"PhaseTrip=800 "
r"TDPhFast=1 "
r"TDPhDelayed=1 "
r"PhaseInst=2400 "
r"GroundFast=Ext_Inv "
r"GroundDelayed=Ext_Inv "
r"GroundTrip=800 "
r"TDGrFast=1 "
r"TDGrDelayed=1 "
r"GroundInst=1200 "
r"Shots=4 "
r"RecloseIntervals=(0.5, 2, 2, )")
self.dss.solution_solve()
self.dss.reclosers_write_name('cb1')
# ===================================================================
# Integer methods
# ===================================================================
def test_reclosers_count(self):
expected = 2
actual = self.dss.reclosers_count()
assert expected == actual
def test_reclosers_first(self):
expected = 1
actual = self.dss.reclosers_first()
assert expected == actual
def test_reclosers_next(self):
expected = 2
actual = self.dss.reclosers_next()
assert expected == actual
def test_reclosers_read_monitored_term(self):
expected = 1
actual = self.dss.reclosers_read_monitored_term()
assert expected == actual
def test_reclosers_write_monitored_term(self):
expected = 2
self.dss.reclosers_write_monitored_term(expected)
actual = self.dss.reclosers_read_monitored_term()
assert expected == actual
def test_reclosers_read_switched_term(self):
expected = 1
actual = self.dss.reclosers_read_switched_term()
assert expected == actual
def test_reclosers_write_switched_term(self):
expected = 2
self.dss.reclosers_write_switched_term(expected)
actual = self.dss.reclosers_read_switched_term()
assert expected == actual
def test_reclosers_read_num_fast(self):
expected = 4
actual = self.dss.reclosers_read_num_fast()
assert expected == actual
def test_reclosers_write_num_fast(self):
expected = 1
self.dss.reclosers_write_num_fast(expected)
actual = self.dss.reclosers_read_num_fast()
assert expected == actual
def test_reclosers_read_shots(self):
expected = 4
actual = self.dss.reclosers_read_shots()
assert expected == actual
def test_reclosers_write_shots(self):
expected = 3
self.dss.reclosers_write_shots(expected)
actual = self.dss.reclosers_read_shots()
assert expected == actual
def test_reclosers_open(self):
expected = 0
actual = self.dss.reclosers_open()
assert expected == actual
def test_reclosers_close(self):
expected = 0
actual = self.dss.reclosers_close()
assert expected == actual
def test_reclosers_read_idx(self):
expected = 1
actual = self.dss.reclosers_read_idx()
assert expected == actual
def test_reclosers_write_idx(self):
expected = 2
self.dss.reclosers_write_idx(expected)
actual = self.dss.reclosers_read_idx()
assert expected == actual
# ===================================================================
# Float methods
# ===================================================================
def test_reclosers_read_phase_trip(self):
expected = 800
actual = self.dss.reclosers_read_phase_trip()
assert expected == actual
def test_reclosers_write_phase_trip(self):
expected = 700
self.dss.reclosers_write_phase_trip(expected)
actual = self.dss.reclosers_read_phase_trip()
assert expected == actual
def test_reclosers_read_phase_inst(self):
expected = 2400
actual = self.dss.reclosers_read_phase_inst()
assert expected == actual
def test_reclosers_write_phase_inst(self):
expected = 1200
self.dss.reclosers_write_phase_inst(expected)
actual = self.dss.reclosers_read_phase_inst()
assert expected == actual
def test_reclosers_read_ground_trip(self):
expected = 800
actual = self.dss.reclosers_read_ground_trip()
assert expected == actual
def test_reclosers_write_ground_trip(self):
expected = 700
self.dss.reclosers_write_ground_trip(expected)
actual = self.dss.reclosers_read_ground_trip()
assert expected == actual
def test_reclosers_read_ground_inst(self):
expected = 1200
actual = self.dss.reclosers_read_ground_inst()
assert expected == actual
def test_reclosers_write_ground_inst(self):
expected = 1900
self.dss.reclosers_write_ground_inst(expected)
actual = self.dss.reclosers_read_ground_inst()
assert expected == actual
# ===================================================================
# String methods
# ===================================================================
def test_reclosers_read_name(self):
expected = 'cb1'
actual = self.dss.reclosers_read_name()
assert expected == actual
def test_reclosers_write_name(self):
expected = 'cb2'
self.dss.reclosers_write_name(expected)
actual = self.dss.reclosers_read_name()
assert expected == actual
def test_reclosers_read_monitored_obj(self):
expected = 'line.650632'
actual = self.dss.reclosers_read_monitored_obj()
assert expected == actual
def test_reclosers_write_monitored_obj(self):
expected = 'line.684652'
self.dss.reclosers_write_monitored_obj(expected)
actual = self.dss.reclosers_read_monitored_obj()
assert expected == actual
def test_reclosers_read_switched_obj(self):
expected = 'line.650632'
actual = self.dss.reclosers_read_switched_obj()
assert expected == actual
def test_reclosers_write_switched_obj(self):
expected = 'line.684652'
self.dss.reclosers_write_switched_obj(expected)
actual = self.dss.reclosers_read_switched_obj()
assert expected == actual
# ===================================================================
# Variant methods
# ===================================================================
def test_reclosers_all_names(self):
expected = ['cb1', 'cb2']
actual = self.dss.reclosers_all_names()
assert expected == actual
def test_reclosers_reclose_intervals(self):
expected = [0.5, 2, 2]
actual = self.dss.reclosers_reclose_intervals()
assert expected == actual | tests/py_dss_interface/test_reclosers.py |
import pytest
class TestReclosers13Bus:
@pytest.fixture(autouse=True)
def _request(self, solve_snap_13bus):
self.dss = solve_snap_13bus
self.dss.text(r"New 'Recloser.cb1' MonitoredObj=Line.650632 "
r"MonitoredTerm=1 "
r"NumFast=4 "
r"PhaseFast=Ext_Inv "
r"PhaseDelayed=Ext_Inv "
r"PhaseTrip=800 "
r"TDPhFast=1 "
r"TDPhDelayed=1 "
r"PhaseInst=2400 "
r"GroundFast=Ext_Inv "
r"GroundDelayed=Ext_Inv "
r"GroundTrip=800 "
r"TDGrFast=1 "
r"TDGrDelayed=1 "
r"GroundInst=1200 "
r"Shots=4 "
r"RecloseIntervals=(0.5, 2, 2, )")
self.dss.text(r"New 'Recloser.cb2' MonitoredObj=Line.684611 "
r"MonitoredTerm=2 "
r"NumFast=4 "
r"PhaseFast=Ext_Inv "
r"PhaseDelayed=Ext_Inv "
r"PhaseTrip=800 "
r"TDPhFast=1 "
r"TDPhDelayed=1 "
r"PhaseInst=2400 "
r"GroundFast=Ext_Inv "
r"GroundDelayed=Ext_Inv "
r"GroundTrip=800 "
r"TDGrFast=1 "
r"TDGrDelayed=1 "
r"GroundInst=1200 "
r"Shots=4 "
r"RecloseIntervals=(0.5, 2, 2, )")
self.dss.solution_solve()
self.dss.reclosers_write_name('cb1')
# ===================================================================
# Integer methods
# ===================================================================
def test_reclosers_count(self):
expected = 2
actual = self.dss.reclosers_count()
assert expected == actual
def test_reclosers_first(self):
expected = 1
actual = self.dss.reclosers_first()
assert expected == actual
def test_reclosers_next(self):
expected = 2
actual = self.dss.reclosers_next()
assert expected == actual
def test_reclosers_read_monitored_term(self):
expected = 1
actual = self.dss.reclosers_read_monitored_term()
assert expected == actual
def test_reclosers_write_monitored_term(self):
expected = 2
self.dss.reclosers_write_monitored_term(expected)
actual = self.dss.reclosers_read_monitored_term()
assert expected == actual
def test_reclosers_read_switched_term(self):
expected = 1
actual = self.dss.reclosers_read_switched_term()
assert expected == actual
def test_reclosers_write_switched_term(self):
expected = 2
self.dss.reclosers_write_switched_term(expected)
actual = self.dss.reclosers_read_switched_term()
assert expected == actual
def test_reclosers_read_num_fast(self):
expected = 4
actual = self.dss.reclosers_read_num_fast()
assert expected == actual
def test_reclosers_write_num_fast(self):
expected = 1
self.dss.reclosers_write_num_fast(expected)
actual = self.dss.reclosers_read_num_fast()
assert expected == actual
def test_reclosers_read_shots(self):
expected = 4
actual = self.dss.reclosers_read_shots()
assert expected == actual
def test_reclosers_write_shots(self):
expected = 3
self.dss.reclosers_write_shots(expected)
actual = self.dss.reclosers_read_shots()
assert expected == actual
def test_reclosers_open(self):
expected = 0
actual = self.dss.reclosers_open()
assert expected == actual
def test_reclosers_close(self):
expected = 0
actual = self.dss.reclosers_close()
assert expected == actual
def test_reclosers_read_idx(self):
expected = 1
actual = self.dss.reclosers_read_idx()
assert expected == actual
def test_reclosers_write_idx(self):
expected = 2
self.dss.reclosers_write_idx(expected)
actual = self.dss.reclosers_read_idx()
assert expected == actual
# ===================================================================
# Float methods
# ===================================================================
def test_reclosers_read_phase_trip(self):
expected = 800
actual = self.dss.reclosers_read_phase_trip()
assert expected == actual
def test_reclosers_write_phase_trip(self):
expected = 700
self.dss.reclosers_write_phase_trip(expected)
actual = self.dss.reclosers_read_phase_trip()
assert expected == actual
def test_reclosers_read_phase_inst(self):
expected = 2400
actual = self.dss.reclosers_read_phase_inst()
assert expected == actual
def test_reclosers_write_phase_inst(self):
expected = 1200
self.dss.reclosers_write_phase_inst(expected)
actual = self.dss.reclosers_read_phase_inst()
assert expected == actual
def test_reclosers_read_ground_trip(self):
expected = 800
actual = self.dss.reclosers_read_ground_trip()
assert expected == actual
def test_reclosers_write_ground_trip(self):
expected = 700
self.dss.reclosers_write_ground_trip(expected)
actual = self.dss.reclosers_read_ground_trip()
assert expected == actual
def test_reclosers_read_ground_inst(self):
expected = 1200
actual = self.dss.reclosers_read_ground_inst()
assert expected == actual
def test_reclosers_write_ground_inst(self):
expected = 1900
self.dss.reclosers_write_ground_inst(expected)
actual = self.dss.reclosers_read_ground_inst()
assert expected == actual
# ===================================================================
# String methods
# ===================================================================
def test_reclosers_read_name(self):
expected = 'cb1'
actual = self.dss.reclosers_read_name()
assert expected == actual
def test_reclosers_write_name(self):
expected = 'cb2'
self.dss.reclosers_write_name(expected)
actual = self.dss.reclosers_read_name()
assert expected == actual
def test_reclosers_read_monitored_obj(self):
expected = 'line.650632'
actual = self.dss.reclosers_read_monitored_obj()
assert expected == actual
def test_reclosers_write_monitored_obj(self):
expected = 'line.684652'
self.dss.reclosers_write_monitored_obj(expected)
actual = self.dss.reclosers_read_monitored_obj()
assert expected == actual
def test_reclosers_read_switched_obj(self):
expected = 'line.650632'
actual = self.dss.reclosers_read_switched_obj()
assert expected == actual
def test_reclosers_write_switched_obj(self):
expected = 'line.684652'
self.dss.reclosers_write_switched_obj(expected)
actual = self.dss.reclosers_read_switched_obj()
assert expected == actual
# ===================================================================
# Variant methods
# ===================================================================
def test_reclosers_all_names(self):
expected = ['cb1', 'cb2']
actual = self.dss.reclosers_all_names()
assert expected == actual
def test_reclosers_reclose_intervals(self):
expected = [0.5, 2, 2]
actual = self.dss.reclosers_reclose_intervals()
assert expected == actual | 0.639511 | 0.49231 |
from AutoHotPy import AutoHotPy
from InterceptionWrapper import *
import time
def exitAutoHotKey(autohotpy,event):
autohotpy.stop()
def recorded_macro(autohotpy, event):
start = time.time()
autohotpy.moveMouseToPosition(241,388)
autohotpy.sleep(0.0)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_BUTTON_1_DOWN
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(2.6300001144409184e-06)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(4.9998760223388674e-08)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(3.000020980834961e-08)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(3.000259399414063e-08)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(2.000093460083008e-08)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(1.9998550415039065e-08)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(1.0001659393310547e-08)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(2.0005702972412112e-08)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(1.9993782043457032e-08)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(9.999275207519532e-09)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(1.9998550415039065e-08)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(2.0015239715576174e-08)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(9.98973846435547e-09)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(1.9993782043457032e-08)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(2.0003318786621094e-08)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(2.0003318786621094e-08)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(9.999275207519532e-09)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(1.9996166229248047e-08)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(2.0005702972412112e-08)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(1.0013580322265626e-08)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(1.998424530029297e-08)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(1.0066032409667969e-08)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(1.9929409027099612e-08)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(1.0004043579101564e-08)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(1.0113716125488283e-08)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(1.9888877868652346e-08)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(9.999275207519532e-09)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(9.999275207519532e-09)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(1.9998550415039065e-08)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(1.0004043579101564e-08)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(1.0001659393310547e-08)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(1.9991397857666017e-08)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(1.0004043579101564e-08)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(2.000093460083008e-08)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(1.0015964508056642e-08)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(9.992122650146485e-09)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(1.9989013671875003e-08)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(2.0010471343994142e-08)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(9.987354278564454e-09)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(2.000093460083008e-08)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(1.0004043579101564e-08)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(1.9996166229248047e-08)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
end = time.time()
print(end - start)
if __name__=="__main__":
auto = AutoHotPy()
auto.registerExit(auto.ESC,exitAutoHotKey)
auto.registerForKeyDown(auto.F1,recorded_macro)
auto.start() | frontend/assets/backup/AutoHotPy-master/red.py | from AutoHotPy import AutoHotPy
from InterceptionWrapper import *
import time
def exitAutoHotKey(autohotpy,event):
autohotpy.stop()
def recorded_macro(autohotpy, event):
start = time.time()
autohotpy.moveMouseToPosition(241,388)
autohotpy.sleep(0.0)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_BUTTON_1_DOWN
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x = 0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(2.6300001144409184e-06)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(4.9998760223388674e-08)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(3.000020980834961e-08)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(3.000259399414063e-08)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(2.000093460083008e-08)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(1.9998550415039065e-08)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(1.0001659393310547e-08)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(2.0005702972412112e-08)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(1.9993782043457032e-08)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(9.999275207519532e-09)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(1.9998550415039065e-08)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(2.0015239715576174e-08)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(9.98973846435547e-09)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(1.9993782043457032e-08)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(2.0003318786621094e-08)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(2.0003318786621094e-08)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(9.999275207519532e-09)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(1.9996166229248047e-08)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(2.0005702972412112e-08)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(1.0013580322265626e-08)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(1.998424530029297e-08)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(1.0066032409667969e-08)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(1.9929409027099612e-08)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(1.0004043579101564e-08)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(1.0113716125488283e-08)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(1.9888877868652346e-08)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(9.999275207519532e-09)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(9.999275207519532e-09)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(1.9998550415039065e-08)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(1.0004043579101564e-08)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(1.0001659393310547e-08)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(1.9991397857666017e-08)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(1.0004043579101564e-08)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(2.000093460083008e-08)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(1.0015964508056642e-08)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(9.992122650146485e-09)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(1.9989013671875003e-08)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(2.0010471343994142e-08)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(9.987354278564454e-09)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(2.000093460083008e-08)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(1.0004043579101564e-08)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
autohotpy.sleep(1.9996166229248047e-08)
stroke = InterceptionMouseStroke()
stroke.state = InterceptionMouseState.INTERCEPTION_MOUSE_MOVE
stroke.flags = InterceptionMouseFlag.INTERCEPTION_MOUSE_MOVE_RELATIVE
stroke.rolling = 0
stroke.x =0
stroke.y=1
stroke.information = 0
autohotpy.sendToDefaultMouse(stroke)
end = time.time()
print(end - start)
if __name__=="__main__":
auto = AutoHotPy()
auto.registerExit(auto.ESC,exitAutoHotKey)
auto.registerForKeyDown(auto.F1,recorded_macro)
auto.start() | 0.281406 | 0.125923 |
from mapa import Map
import math
import random
def find_corner(mapa):
for x in range(mapa.hor_tiles):
for y in range(mapa.ver_tiles):
if not mapa.is_blocked((x, y)):
return (x, y)
# para qualquer posicao retorna um lista de possoveis movimentos
def get_possible_ways2(mapa, position):
ways = []
x, y = position
if not mapa.is_blocked([x+1, y]):
ways.append('d')
if not mapa.is_blocked([x, y+1]):
ways.append('s')
if not mapa.is_blocked([x-1, y]):
ways.append('a')
if not mapa.is_blocked([x, y-1]):
ways.append('w')
return ways
def get_possible_ways(mapa, position):
ways = []
x, y = position
tile1 = mapa.map[x+1][y]
tile2 = mapa.map[x-1][y]
tile3 = mapa.get_tile((x,y+1))
tile4 = mapa.get_tile((x,y-1))
if tile1 != 1 and not (x+1,y) in mapa.walls:
ways.append('d')
if tile3 != 1 and not (x,y+1) in mapa.walls:
ways.append('s')
if tile2 != 1 and not (x-1,y) in mapa.walls:
ways.append('a')
if tile4 != 1 and not (x,y-1) in mapa.walls:
ways.append('w')
return ways
# retorna distancia entre duas posiçoes
def dist_to(pos1, pos2):
if len(pos1) != 2 or len(pos1) != 2:
return ''
x1, y1 = pos1
x2, y2 = pos2
return math.sqrt(math.pow((x2-x1), 2) + math.pow((y2-y1), 2))
# verifica se duas posicoes estao na msm direcao
def check_same_direction(pos1, pos2):
if len(pos1) != 2 or len(pos2) != 2:
return False
x1, y1 = pos1
x2, y2 = pos2
if x1 == x2 or y1 == y2:
return True
return False
# calcula e retorna a parede mais proxima (mt ineficiente)
def next_wall(bomberman_pos, walls):
if walls == []:
return
nwall = walls[0]
min_cost = dist_to(bomberman_pos, walls[0])
for wall in walls:
cost = dist_to(bomberman_pos, wall)
if cost < min_cost:
min_cost = cost
nwall = wall
return nwall
def in_range(bomberman_pos,raio,obstaculo,mapa):
cx,cy = bomberman_pos
if obstaculo == None:
return False
bx,by = obstaculo
if by == cy:
for r in range(raio + 1):
if mapa.is_stone((bx + r, by)):
break # protected by stone to the right
if (cx, cy) == (bx + r, by):
return True
for r in range(raio + 1):
if mapa.is_stone((bx - r, by)):
break # protected by stone to the left
if (cx, cy) == (bx - r, by):
return True
if bx == cx:
for r in range(raio + 1):
if mapa.is_stone((bx, by + r)):
break # protected by stone in the bottom
if (cx, cy) == (bx, by + r):
return True
for r in range(raio + 1):
if mapa.is_stone((bx, by - r)):
break # protected by stone in the top
if (cx, cy) == (bx, by - r):
return True
return False | defs2.py | from mapa import Map
import math
import random
def find_corner(mapa):
for x in range(mapa.hor_tiles):
for y in range(mapa.ver_tiles):
if not mapa.is_blocked((x, y)):
return (x, y)
# para qualquer posicao retorna um lista de possoveis movimentos
def get_possible_ways2(mapa, position):
ways = []
x, y = position
if not mapa.is_blocked([x+1, y]):
ways.append('d')
if not mapa.is_blocked([x, y+1]):
ways.append('s')
if not mapa.is_blocked([x-1, y]):
ways.append('a')
if not mapa.is_blocked([x, y-1]):
ways.append('w')
return ways
def get_possible_ways(mapa, position):
ways = []
x, y = position
tile1 = mapa.map[x+1][y]
tile2 = mapa.map[x-1][y]
tile3 = mapa.get_tile((x,y+1))
tile4 = mapa.get_tile((x,y-1))
if tile1 != 1 and not (x+1,y) in mapa.walls:
ways.append('d')
if tile3 != 1 and not (x,y+1) in mapa.walls:
ways.append('s')
if tile2 != 1 and not (x-1,y) in mapa.walls:
ways.append('a')
if tile4 != 1 and not (x,y-1) in mapa.walls:
ways.append('w')
return ways
# retorna distancia entre duas posiçoes
def dist_to(pos1, pos2):
if len(pos1) != 2 or len(pos1) != 2:
return ''
x1, y1 = pos1
x2, y2 = pos2
return math.sqrt(math.pow((x2-x1), 2) + math.pow((y2-y1), 2))
# verifica se duas posicoes estao na msm direcao
def check_same_direction(pos1, pos2):
if len(pos1) != 2 or len(pos2) != 2:
return False
x1, y1 = pos1
x2, y2 = pos2
if x1 == x2 or y1 == y2:
return True
return False
# calcula e retorna a parede mais proxima (mt ineficiente)
def next_wall(bomberman_pos, walls):
if walls == []:
return
nwall = walls[0]
min_cost = dist_to(bomberman_pos, walls[0])
for wall in walls:
cost = dist_to(bomberman_pos, wall)
if cost < min_cost:
min_cost = cost
nwall = wall
return nwall
def in_range(bomberman_pos,raio,obstaculo,mapa):
cx,cy = bomberman_pos
if obstaculo == None:
return False
bx,by = obstaculo
if by == cy:
for r in range(raio + 1):
if mapa.is_stone((bx + r, by)):
break # protected by stone to the right
if (cx, cy) == (bx + r, by):
return True
for r in range(raio + 1):
if mapa.is_stone((bx - r, by)):
break # protected by stone to the left
if (cx, cy) == (bx - r, by):
return True
if bx == cx:
for r in range(raio + 1):
if mapa.is_stone((bx, by + r)):
break # protected by stone in the bottom
if (cx, cy) == (bx, by + r):
return True
for r in range(raio + 1):
if mapa.is_stone((bx, by - r)):
break # protected by stone in the top
if (cx, cy) == (bx, by - r):
return True
return False | 0.234319 | 0.485356 |
from __future__ import absolute_import
from apitools.base.protorpclite import messages as _messages
from apitools.base.py import encoding
from apitools.base.py import extra_types
package = 'cloudbuild'
class AddBitbucketServerConnectedRepositoryRequest(_messages.Message):
r"""RPC request object accepted by the AddBitbucketServerConnectedRepository
RPC method.
Fields:
connectedRepository: The connected repository to add.
"""
connectedRepository = _messages.MessageField('BitbucketServerRepositoryId', 1)
class AddBitbucketServerConnectedRepositoryResponse(_messages.Message):
r"""RPC request object returned by the AddBitbucketServerConnectedRepository
RPC method.
Fields:
config: The name of the `BitbucketServerConfig` that added connected
repository. Format: `projects/{project}/locations/{location}/bitbucketSe
rverConfigs/{config}`
connectedRepository: The connected repository.
"""
config = _messages.StringField(1)
connectedRepository = _messages.MessageField('BitbucketServerRepositoryId', 2)
class AnthosWorkerPool(_messages.Message):
r"""Anthos CICD cluster option.
Fields:
membership: Membership of the GKE Hub registered cluster this build should
execute on. Example:
/projects/{project}/locations/{location}/memberships/{cluster_name} The
cluster's project number must be the same project ID that is running the
build.
"""
membership = _messages.StringField(1)
class ApprovalConfig(_messages.Message):
r"""ApprovalConfig describes configuration for manual approval of a build.
Fields:
approvalRequired: Whether or not approval is needed. If this is set on a
build, it will become pending when created, and will need to be
explicitly approved to start.
"""
approvalRequired = _messages.BooleanField(1)
class ApprovalResult(_messages.Message):
r"""ApprovalResult describes the decision and associated metadata of a
manual approval of a build.
Enums:
DecisionValueValuesEnum: Required. The decision of this manual approval.
Fields:
approvalTime: Output only. The time when the approval decision was made.
approverAccount: Output only. Email of the user that called the
ApproveBuild API to approve or reject a build at the time that the API
was called.
comment: Optional. An optional comment for this manual approval result.
decision: Required. The decision of this manual approval.
url: Optional. An optional URL tied to this manual approval result. This
field is essentially the same as comment, except that it will be
rendered by the UI differently. An example use case is a link to an
external job that approved this Build.
"""
class DecisionValueValuesEnum(_messages.Enum):
r"""Required. The decision of this manual approval.
Values:
DECISION_UNSPECIFIED: Default enum type. This should not be used.
APPROVED: Build is approved.
REJECTED: Build is rejected.
"""
DECISION_UNSPECIFIED = 0
APPROVED = 1
REJECTED = 2
approvalTime = _messages.StringField(1)
approverAccount = _messages.StringField(2)
comment = _messages.StringField(3)
decision = _messages.EnumField('DecisionValueValuesEnum', 4)
url = _messages.StringField(5)
class ApproveBuildRequest(_messages.Message):
r"""Request to approve or reject a pending build.
Fields:
approvalResult: Approval decision and metadata.
"""
approvalResult = _messages.MessageField('ApprovalResult', 1)
class ArtifactObjects(_messages.Message):
r"""Files in the workspace to upload to Cloud Storage upon successful
completion of all build steps.
Fields:
location: Cloud Storage bucket and optional object path, in the form
"gs://bucket/path/to/somewhere/". (see [Bucket Name
Requirements](https://cloud.google.com/storage/docs/bucket-
naming#requirements)). Files in the workspace matching any path pattern
will be uploaded to Cloud Storage with this location as a prefix.
paths: Path globs used to match files in the build's workspace.
timing: Output only. Stores timing information for pushing all artifact
objects.
"""
location = _messages.StringField(1)
paths = _messages.StringField(2, repeated=True)
timing = _messages.MessageField('TimeSpan', 3)
class ArtifactResult(_messages.Message):
r"""An artifact that was uploaded during a build. This is a single record in
the artifact manifest JSON file.
Fields:
fileHash: The file hash of the artifact.
location: The path of an artifact in a Google Cloud Storage bucket, with
the generation number. For example,
`gs://mybucket/path/to/output.jar#generation`.
"""
fileHash = _messages.MessageField('FileHashes', 1, repeated=True)
location = _messages.StringField(2)
class Artifacts(_messages.Message):
r"""Artifacts produced by a build that should be uploaded upon successful
completion of all build steps.
Fields:
images: A list of images to be pushed upon the successful completion of
all build steps. The images will be pushed using the builder service
account's credentials. The digests of the pushed images will be stored
in the Build resource's results field. If any of the images fail to be
pushed, the build is marked FAILURE.
objects: A list of objects to be uploaded to Cloud Storage upon successful
completion of all build steps. Files in the workspace matching specified
paths globs will be uploaded to the specified Cloud Storage location
using the builder service account's credentials. The location and
generation of the uploaded objects will be stored in the Build
resource's results field. If any objects fail to be pushed, the build is
marked FAILURE.
"""
images = _messages.StringField(1, repeated=True)
objects = _messages.MessageField('ArtifactObjects', 2)
class BatchCreateBitbucketServerConnectedRepositoriesRequest(_messages.Message):
r"""RPC request object accepted by
BatchCreateBitbucketServerConnectedRepositories RPC method.
Fields:
requests: Required. Requests to connect Bitbucket Server repositories.
"""
requests = _messages.MessageField('CreateBitbucketServerConnectedRepositoryRequest', 1, repeated=True)
class BatchCreateBitbucketServerConnectedRepositoriesResponse(_messages.Message):
r"""Response of BatchCreateBitbucketServerConnectedRepositories RPC method
including all successfully connected Bitbucket Server repositories.
Fields:
bitbucketServerConnectedRepositories: The connected Bitbucket Server
repositories.
"""
bitbucketServerConnectedRepositories = _messages.MessageField('BitbucketServerConnectedRepository', 1, repeated=True)
class BatchCreateBitbucketServerConnectedRepositoriesResponseMetadata(_messages.Message):
r"""Metadata for `BatchCreateBitbucketServerConnectedRepositories`
operation.
Fields:
completeTime: Time the operation was completed.
config: The name of the `BitbucketServerConfig` that added connected
repositories. Format: `projects/{project}/locations/{location}/bitbucket
ServerConfigs/{config}`
createTime: Time the operation was created.
"""
completeTime = _messages.StringField(1)
config = _messages.StringField(2)
createTime = _messages.StringField(3)
class BitbucketServerConfig(_messages.Message):
r"""BitbucketServerConfig represents the configuration for a Bitbucket
Server.
Fields:
apiKey: Required. Immutable. API Key that will be attached to webhook.
Once this field has been set, it cannot be changed. If you need to
change it, please create another BitbucketServerConfig.
connectedRepositories: Output only. Connected Bitbucket Server
repositories for this config.
createTime: Time when the config was created.
hostUri: Required. Immutable. The URI of the Bitbucket Server host. Once
this field has been set, it cannot be changed. If you need to change it,
please create another BitbucketServerConfig.
name: The resource name for the config.
peeredNetwork: Optional. The network to be used when reaching out to the
Bitbucket Server instance. The VPC network must be enabled for private
service connection. This should be set if the Bitbucket Server instance
is hosted on-premises and not reachable by public internet. If this
field is left empty, no network peering will occur and calls to the
Bitbucket Server instance will be made over the public internet. Must be
in the format `projects/{project}/global/networks/{network}`, where
{project} is a project number or id and {network} is the name of a VPC
network in the project.
secrets: Required. Secret Manager secrets needed by the config.
sslCa: Optional. SSL certificate to use for requests to Bitbucket Server.
The format should be PEM format but the extension can be one of .pem,
.cer, or .crt.
username: Username of the account Cloud Build will use on Bitbucket
Server.
webhookKey: Output only. UUID included in webhook requests. The UUID is
used to look up the corresponding config.
"""
apiKey = _messages.StringField(1)
connectedRepositories = _messages.MessageField('BitbucketServerRepositoryId', 2, repeated=True)
createTime = _messages.StringField(3)
hostUri = _messages.StringField(4)
name = _messages.StringField(5)
peeredNetwork = _messages.StringField(6)
secrets = _messages.MessageField('BitbucketServerSecrets', 7)
sslCa = _messages.StringField(8)
username = _messages.StringField(9)
webhookKey = _messages.StringField(10)
class BitbucketServerConnectedRepository(_messages.Message):
r"""/ BitbucketServerConnectedRepository represents a connected Bitbucket
Server / repository.
Fields:
parent: The name of the `BitbucketServerConfig` that added connected
repository. Format: `projects/{project}/locations/{location}/bitbucketSe
rverConfigs/{config}`
repo: The Bitbucket Server repositories to connect.
status: Output only. The status of the repo connection request.
"""
parent = _messages.StringField(1)
repo = _messages.MessageField('BitbucketServerRepositoryId', 2)
status = _messages.MessageField('Status', 3)
class BitbucketServerRepository(_messages.Message):
r"""BitbucketServerRepository represents a repository hosted on a Bitbucket
Server.
Fields:
browseUri: Link to the browse repo page on the Bitbucket Server instance.
description: Description of the repository.
displayName: Display name of the repository.
name: The resource name of the repository.
repoId: Identifier for a repository hosted on a Bitbucket Server.
"""
browseUri = _messages.StringField(1)
description = _messages.StringField(2)
displayName = _messages.StringField(3)
name = _messages.StringField(4)
repoId = _messages.MessageField('BitbucketServerRepositoryId', 5)
class BitbucketServerRepositoryId(_messages.Message):
r"""BitbucketServerRepositoryId identifies a specific repository hosted on a
Bitbucket Server.
Fields:
projectKey: Required. Identifier for the project storing the repository.
repoSlug: Required. Identifier for the repository.
webhookId: Output only. The ID of the webhook that was created for
receiving events from this repo. We only create and manage a single
webhook for each repo.
"""
projectKey = _messages.StringField(1)
repoSlug = _messages.StringField(2)
webhookId = _messages.IntegerField(3, variant=_messages.Variant.INT32)
class BitbucketServerSecrets(_messages.Message):
r"""BitbucketServerSecrets represents the secrets in Secret Manager for a
Bitbucket Server.
Fields:
adminAccessTokenVersionName: Required. The resource name for the admin
access token's secret version.
readAccessTokenVersionName: Required. The resource name for the read
access token's secret version.
webhookSecretVersionName: Required. Immutable. The resource name for the
webhook secret's secret version. Once this field has been set, it cannot
be changed. If you need to change it, please create another
BitbucketServerConfig.
"""
adminAccessTokenVersionName = _messages.StringField(1)
readAccessTokenVersionName = _messages.StringField(2)
webhookSecretVersionName = _messages.StringField(3)
class BitbucketServerTriggerConfig(_messages.Message):
r"""BitbucketServerTriggerConfig describes the configuration of a trigger
that creates a build whenever a Bitbucket Server event is received.
Fields:
bitbucketServerConfig: Output only. The BitbucketServerConfig specified in
the bitbucket_server_config_resource field.
bitbucketServerConfigResource: Required. The Bitbucket server config
resource that this trigger config maps to.
projectKey: Required. Key of the project that the repo is in. For example:
The key for http://mybitbucket.server/projects/TEST/repos/test-repo is
"TEST".
pullRequest: Filter to match changes in pull requests.
push: Filter to match changes in refs like branches, tags.
repoSlug: Required. Slug of the repository. A repository slug is a URL-
friendly version of a repository name, automatically generated by
Bitbucket for use in the URL. For example, if the repository name is
'test repo', in the URL it would become 'test-repo' as in
http://mybitbucket.server/projects/TEST/repos/test-repo.
"""
bitbucketServerConfig = _messages.MessageField('BitbucketServerConfig', 1)
bitbucketServerConfigResource = _messages.StringField(2)
projectKey = _messages.StringField(3)
pullRequest = _messages.MessageField('PullRequestFilter', 4)
push = _messages.MessageField('PushFilter', 5)
repoSlug = _messages.StringField(6)
class Build(_messages.Message):
r"""A build resource in the Cloud Build API. At a high level, a `Build`
describes where to find source code, how to build it (for example, the
builder image to run on the source), and where to store the built artifacts.
Fields can include the following variables, which will be expanded when the
build is created: - $PROJECT_ID: the project ID of the build. -
$PROJECT_NUMBER: the project number of the build. - $LOCATION: the
location/region of the build. - $BUILD_ID: the autogenerated ID of the
build. - $REPO_NAME: the source repository name specified by RepoSource. -
$BRANCH_NAME: the branch name specified by RepoSource. - $TAG_NAME: the tag
name specified by RepoSource. - $REVISION_ID or $COMMIT_SHA: the commit SHA
specified by RepoSource or resolved from the specified branch or tag. -
$SHORT_SHA: first 7 characters of $REVISION_ID or $COMMIT_SHA.
Enums:
StatusValueValuesEnum: Output only. Status of the build.
Messages:
SubstitutionsValue: Substitutions data for `Build` resource.
TimingValue: Output only. Stores timing information for phases of the
build. Valid keys are: * BUILD: time to execute all build steps. * PUSH:
time to push all specified images. * FETCHSOURCE: time to fetch source.
* SETUPBUILD: time to set up build. If the build does not specify source
or images, these keys will not be included.
Fields:
approval: Output only. Describes this build's approval configuration,
status, and result.
artifacts: Artifacts produced by the build that should be uploaded upon
successful completion of all build steps.
availableSecrets: Secrets and secret environment variables.
buildTriggerId: Output only. The ID of the `BuildTrigger` that triggered
this build, if it was triggered automatically.
createTime: Output only. Time at which the request to create the build was
received.
failureInfo: Output only. Contains information about the build when
status=FAILURE.
finishTime: Output only. Time at which execution of the build was
finished. The difference between finish_time and start_time is the
duration of the build's execution.
id: Output only. Unique identifier of the build.
images: A list of images to be pushed upon the successful completion of
all build steps. The images are pushed using the builder service
account's credentials. The digests of the pushed images will be stored
in the `Build` resource's results field. If any of the images fail to be
pushed, the build status is marked `FAILURE`.
logUrl: Output only. URL to logs for this build in Google Cloud Console.
logsBucket: Google Cloud Storage bucket where logs should be written (see
[Bucket Name Requirements](https://cloud.google.com/storage/docs/bucket-
naming#requirements)). Logs file names will be of the format
`${logs_bucket}/log-${build_id}.txt`.
name: Output only. The 'Build' name with format:
`projects/{project}/locations/{location}/builds/{build}`, where {build}
is a unique identifier generated by the service.
options: Special options for this build.
projectId: Output only. ID of the project.
queueTtl: TTL in queue for this build. If provided and the build is
enqueued longer than this value, the build will expire and the build
status will be `EXPIRED`. The TTL starts ticking from create_time.
results: Output only. Results of the build.
secrets: Secrets to decrypt using Cloud Key Management Service. Note:
Secret Manager is the recommended technique for managing sensitive data
with Cloud Build. Use `available_secrets` to configure builds to access
secrets from Secret Manager. For instructions, see:
https://cloud.google.com/cloud-build/docs/securing-builds/use-secrets
serviceAccount: IAM service account whose credentials will be used at
build runtime. Must be of the format
`projects/{PROJECT_ID}/serviceAccounts/{ACCOUNT}`. ACCOUNT can be email
address or uniqueId of the service account.
source: The location of the source files to build.
sourceProvenance: Output only. A permanent fixed identifier for source.
startTime: Output only. Time at which execution of the build was started.
status: Output only. Status of the build.
statusDetail: Output only. Customer-readable message about the current
status.
steps: Required. The operations to be performed on the workspace.
substitutions: Substitutions data for `Build` resource.
tags: Tags for annotation of a `Build`. These are not docker tags.
timeout: Amount of time that this build should be allowed to run, to
second granularity. If this amount of time elapses, work on the build
will cease and the build status will be `TIMEOUT`. `timeout` starts
ticking from `startTime`. Default time is ten minutes.
timing: Output only. Stores timing information for phases of the build.
Valid keys are: * BUILD: time to execute all build steps. * PUSH: time
to push all specified images. * FETCHSOURCE: time to fetch source. *
SETUPBUILD: time to set up build. If the build does not specify source
or images, these keys will not be included.
warnings: Output only. Non-fatal problems encountered during the execution
of the build.
"""
class StatusValueValuesEnum(_messages.Enum):
r"""Output only. Status of the build.
Values:
STATUS_UNKNOWN: Status of the build is unknown.
PENDING: Build has been created and is pending execution and queuing. It
has not been queued.
QUEUED: Build or step is queued; work has not yet begun.
WORKING: Build or step is being executed.
SUCCESS: Build or step finished successfully.
FAILURE: Build or step failed to complete successfully.
INTERNAL_ERROR: Build or step failed due to an internal cause.
TIMEOUT: Build or step took longer than was allowed.
CANCELLED: Build or step was canceled by a user.
EXPIRED: Build was enqueued for longer than the value of `queue_ttl`.
"""
STATUS_UNKNOWN = 0
PENDING = 1
QUEUED = 2
WORKING = 3
SUCCESS = 4
FAILURE = 5
INTERNAL_ERROR = 6
TIMEOUT = 7
CANCELLED = 8
EXPIRED = 9
@encoding.MapUnrecognizedFields('additionalProperties')
class SubstitutionsValue(_messages.Message):
r"""Substitutions data for `Build` resource.
Messages:
AdditionalProperty: An additional property for a SubstitutionsValue
object.
Fields:
additionalProperties: Additional properties of type SubstitutionsValue
"""
class AdditionalProperty(_messages.Message):
r"""An additional property for a SubstitutionsValue object.
Fields:
key: Name of the additional property.
value: A string attribute.
"""
key = _messages.StringField(1)
value = _messages.StringField(2)
additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
@encoding.MapUnrecognizedFields('additionalProperties')
class TimingValue(_messages.Message):
r"""Output only. Stores timing information for phases of the build. Valid
keys are: * BUILD: time to execute all build steps. * PUSH: time to push
all specified images. * FETCHSOURCE: time to fetch source. * SETUPBUILD:
time to set up build. If the build does not specify source or images,
these keys will not be included.
Messages:
AdditionalProperty: An additional property for a TimingValue object.
Fields:
additionalProperties: Additional properties of type TimingValue
"""
class AdditionalProperty(_messages.Message):
r"""An additional property for a TimingValue object.
Fields:
key: Name of the additional property.
value: A TimeSpan attribute.
"""
key = _messages.StringField(1)
value = _messages.MessageField('TimeSpan', 2)
additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
approval = _messages.MessageField('BuildApproval', 1)
artifacts = _messages.MessageField('Artifacts', 2)
availableSecrets = _messages.MessageField('Secrets', 3)
buildTriggerId = _messages.StringField(4)
createTime = _messages.StringField(5)
failureInfo = _messages.MessageField('FailureInfo', 6)
finishTime = _messages.StringField(7)
id = _messages.StringField(8)
images = _messages.StringField(9, repeated=True)
logUrl = _messages.StringField(10)
logsBucket = _messages.StringField(11)
name = _messages.StringField(12)
options = _messages.MessageField('BuildOptions', 13)
projectId = _messages.StringField(14)
queueTtl = _messages.StringField(15)
results = _messages.MessageField('Results', 16)
secrets = _messages.MessageField('Secret', 17, repeated=True)
serviceAccount = _messages.StringField(18)
source = _messages.MessageField('Source', 19)
sourceProvenance = _messages.MessageField('SourceProvenance', 20)
startTime = _messages.StringField(21)
status = _messages.EnumField('StatusValueValuesEnum', 22)
statusDetail = _messages.StringField(23)
steps = _messages.MessageField('BuildStep', 24, repeated=True)
substitutions = _messages.MessageField('SubstitutionsValue', 25)
tags = _messages.StringField(26, repeated=True)
timeout = _messages.StringField(27)
timing = _messages.MessageField('TimingValue', 28)
warnings = _messages.MessageField('Warning', 29, repeated=True)
class BuildApproval(_messages.Message):
r"""BuildApproval describes a build's approval configuration, state, and
result.
Enums:
StateValueValuesEnum: Output only. The state of this build's approval.
Fields:
config: Output only. Configuration for manual approval of this build.
result: Output only. Result of manual approval for this Build.
state: Output only. The state of this build's approval.
"""
class StateValueValuesEnum(_messages.Enum):
r"""Output only. The state of this build's approval.
Values:
STATE_UNSPECIFIED: Default enum type. This should not be used.
PENDING: Build approval is pending.
APPROVED: Build approval has been approved.
REJECTED: Build approval has been rejected.
CANCELLED: Build was cancelled while it was still pending approval.
"""
STATE_UNSPECIFIED = 0
PENDING = 1
APPROVED = 2
REJECTED = 3
CANCELLED = 4
config = _messages.MessageField('ApprovalConfig', 1)
result = _messages.MessageField('ApprovalResult', 2)
state = _messages.EnumField('StateValueValuesEnum', 3)
class BuildOperationMetadata(_messages.Message):
r"""Metadata for build operations.
Fields:
build: The build that the operation is tracking.
"""
build = _messages.MessageField('Build', 1)
class BuildOptions(_messages.Message):
r"""Optional arguments to enable specific features of builds.
Enums:
DockerDaemonValueValuesEnum: Optional. Option to specify how (or if) a
Docker daemon is provided for the build.
LogStreamingOptionValueValuesEnum: Option to define build log streaming
behavior to Google Cloud Storage.
LoggingValueValuesEnum: Option to specify the logging mode, which
determines if and where build logs are stored.
MachineTypeValueValuesEnum: Compute Engine machine type on which to run
the build.
RequestedVerifyOptionValueValuesEnum: Requested verifiability options.
SourceProvenanceHashValueListEntryValuesEnum:
SubstitutionOptionValueValuesEnum: Option to specify behavior when there
is an error in the substitution checks. NOTE: this is always set to
ALLOW_LOOSE for triggered builds and cannot be overridden in the build
configuration file.
Fields:
anthosCluster: Details about how this build should be executed on a Anthos
cluster.
cluster: Details about how this build should be executed on a GKE cluster.
diskSizeGb: Requested disk size for the VM that runs the build. Note that
this is *NOT* "disk free"; some of the space will be used by the
operating system and build utilities. Also note that this is the minimum
disk size that will be allocated for the build -- the build may run with
a larger disk than requested. At present, the maximum disk size is
1000GB; builds that request more than the maximum are rejected with an
error.
dockerDaemon: Optional. Option to specify how (or if) a Docker daemon is
provided for the build.
dynamicSubstitutions: Option to specify whether or not to apply bash style
string operations to the substitutions. NOTE: this is always enabled for
triggered builds and cannot be overridden in the build configuration
file.
env: A list of global environment variable definitions that will exist for
all build steps in this build. If a variable is defined in both globally
and in a build step, the variable will use the build step value. The
elements are of the form "KEY=VALUE" for the environment variable "KEY"
being given the value "VALUE".
logStreamingOption: Option to define build log streaming behavior to
Google Cloud Storage.
logging: Option to specify the logging mode, which determines if and where
build logs are stored.
machineType: Compute Engine machine type on which to run the build.
pool: Optional. Specification for execution on a `WorkerPool`. See
[running builds in a private
pool](https://cloud.google.com/build/docs/private-pools/run-builds-in-
private-pool) for more information.
requestedVerifyOption: Requested verifiability options.
secretEnv: A list of global environment variables, which are encrypted
using a Cloud Key Management Service crypto key. These values must be
specified in the build's `Secret`. These variables will be available to
all build steps in this build.
sourceProvenanceHash: Requested hash for SourceProvenance.
substitutionOption: Option to specify behavior when there is an error in
the substitution checks. NOTE: this is always set to ALLOW_LOOSE for
triggered builds and cannot be overridden in the build configuration
file.
volumes: Global list of volumes to mount for ALL build steps Each volume
is created as an empty volume prior to starting the build process. Upon
completion of the build, volumes and their contents are discarded.
Global volume names and paths cannot conflict with the volumes defined a
build step. Using a global volume in a build with only one step is not
valid as it is indicative of a build request with an incorrect
configuration.
workerPool: This field deprecated; please use `pool.name` instead.
"""
class DockerDaemonValueValuesEnum(_messages.Enum):
r"""Optional. Option to specify how (or if) a Docker daemon is provided
for the build.
Values:
DOCKER_DAEMON_UNSPECIFIED: If the option is unspecified, a default will
be set based on the environment.
NO_DOCKER: No Docker daemon or functionality will be provided to the
build.
NON_PRIVILEGED: A Docker daemon is available during the build that is
running without privileged mode.
PRIVILEGED: A Docker daemon will be available that is running in
privileged mode. This is potentially a security vulnerability and
should only be used if the user is fully aware of the associated
risks.
"""
DOCKER_DAEMON_UNSPECIFIED = 0
NO_DOCKER = 1
NON_PRIVILEGED = 2
PRIVILEGED = 3
class LogStreamingOptionValueValuesEnum(_messages.Enum):
r"""Option to define build log streaming behavior to Google Cloud Storage.
Values:
STREAM_DEFAULT: Service may automatically determine build log streaming
behavior.
STREAM_ON: Build logs should be streamed to Google Cloud Storage.
STREAM_OFF: Build logs should not be streamed to Google Cloud Storage;
they will be written when the build is completed.
"""
STREAM_DEFAULT = 0
STREAM_ON = 1
STREAM_OFF = 2
class LoggingValueValuesEnum(_messages.Enum):
r"""Option to specify the logging mode, which determines if and where
build logs are stored.
Values:
LOGGING_UNSPECIFIED: The service determines the logging mode. The
default is `LEGACY`. Do not rely on the default logging behavior as it
may change in the future.
LEGACY: Build logs are stored in Cloud Logging and Cloud Storage.
GCS_ONLY: Build logs are stored in Cloud Storage.
STACKDRIVER_ONLY: This option is the same as CLOUD_LOGGING_ONLY.
CLOUD_LOGGING_ONLY: Build logs are stored in Cloud Logging. Selecting
this option will not allow [logs
streaming](https://cloud.google.com/sdk/gcloud/reference/builds/log).
NONE: Turn off all logging. No build logs will be captured.
"""
LOGGING_UNSPECIFIED = 0
LEGACY = 1
GCS_ONLY = 2
STACKDRIVER_ONLY = 3
CLOUD_LOGGING_ONLY = 4
NONE = 5
class MachineTypeValueValuesEnum(_messages.Enum):
r"""Compute Engine machine type on which to run the build.
Values:
UNSPECIFIED: Standard machine type.
N1_HIGHCPU_8: Highcpu machine with 8 CPUs.
N1_HIGHCPU_32: Highcpu machine with 32 CPUs.
E2_HIGHCPU_8: Highcpu e2 machine with 8 CPUs.
E2_HIGHCPU_32: Highcpu e2 machine with 32 CPUs.
"""
UNSPECIFIED = 0
N1_HIGHCPU_8 = 1
N1_HIGHCPU_32 = 2
E2_HIGHCPU_8 = 3
E2_HIGHCPU_32 = 4
class RequestedVerifyOptionValueValuesEnum(_messages.Enum):
r"""Requested verifiability options.
Values:
NOT_VERIFIED: Not a verifiable build. (default)
VERIFIED: Verified build.
"""
NOT_VERIFIED = 0
VERIFIED = 1
class SourceProvenanceHashValueListEntryValuesEnum(_messages.Enum):
r"""SourceProvenanceHashValueListEntryValuesEnum enum type.
Values:
NONE: No hash requested.
SHA256: Use a sha256 hash.
MD5: Use a md5 hash.
"""
NONE = 0
SHA256 = 1
MD5 = 2
class SubstitutionOptionValueValuesEnum(_messages.Enum):
r"""Option to specify behavior when there is an error in the substitution
checks. NOTE: this is always set to ALLOW_LOOSE for triggered builds and
cannot be overridden in the build configuration file.
Values:
MUST_MATCH: Fails the build if error in substitutions checks, like
missing a substitution in the template or in the map.
ALLOW_LOOSE: Do not fail the build if error in substitutions checks.
"""
MUST_MATCH = 0
ALLOW_LOOSE = 1
anthosCluster = _messages.MessageField('AnthosWorkerPool', 1)
cluster = _messages.MessageField('ClusterOptions', 2)
diskSizeGb = _messages.IntegerField(3)
dockerDaemon = _messages.EnumField('DockerDaemonValueValuesEnum', 4)
dynamicSubstitutions = _messages.BooleanField(5)
env = _messages.StringField(6, repeated=True)
logStreamingOption = _messages.EnumField('LogStreamingOptionValueValuesEnum', 7)
logging = _messages.EnumField('LoggingValueValuesEnum', 8)
machineType = _messages.EnumField('MachineTypeValueValuesEnum', 9)
pool = _messages.MessageField('PoolOption', 10)
requestedVerifyOption = _messages.EnumField('RequestedVerifyOptionValueValuesEnum', 11)
secretEnv = _messages.StringField(12, repeated=True)
sourceProvenanceHash = _messages.EnumField('SourceProvenanceHashValueListEntryValuesEnum', 13, repeated=True)
substitutionOption = _messages.EnumField('SubstitutionOptionValueValuesEnum', 14)
volumes = _messages.MessageField('Volume', 15, repeated=True)
workerPool = _messages.StringField(16)
class BuildStep(_messages.Message):
r"""A step in the build pipeline.
Enums:
StatusValueValuesEnum: Output only. Status of the build step. At this
time, build step status is only updated on build completion; step status
is not updated in real-time as the build progresses.
Fields:
args: A list of arguments that will be presented to the step when it is
started. If the image used to run the step's container has an
entrypoint, the `args` are used as arguments to that entrypoint. If the
image does not define an entrypoint, the first element in args is used
as the entrypoint, and the remainder will be used as arguments.
dir: Working directory to use when running this step's container. If this
value is a relative path, it is relative to the build's working
directory. If this value is absolute, it may be outside the build's
working directory, in which case the contents of the path may not be
persisted across build step executions, unless a `volume` for that path
is specified. If the build specifies a `RepoSource` with `dir` and a
step with a `dir`, which specifies an absolute path, the `RepoSource`
`dir` is ignored for the step's execution.
entrypoint: Entrypoint to be used instead of the build step image's
default entrypoint. If unset, the image's default entrypoint is used.
env: A list of environment variable definitions to be used when running a
step. The elements are of the form "KEY=VALUE" for the environment
variable "KEY" being given the value "VALUE".
id: Unique identifier for this build step, used in `wait_for` to reference
this build step as a dependency.
name: Required. The name of the container image that will run this
particular build step. If the image is available in the host's Docker
daemon's cache, it will be run directly. If not, the host will attempt
to pull the image first, using the builder service account's credentials
if necessary. The Docker daemon's cache will already have the latest
versions of all of the officially supported build steps
([https://github.com/GoogleCloudPlatform/cloud-
builders](https://github.com/GoogleCloudPlatform/cloud-builders)). The
Docker daemon will also have cached many of the layers for some popular
images, like "ubuntu", "debian", but they will be refreshed at the time
you attempt to use them. If you built an image in a previous build step,
it will be stored in the host's Docker daemon's cache and is available
to use as the name for a later build step.
pullTiming: Output only. Stores timing information for pulling this build
step's builder image only.
script: A shell script to be executed in the step. When script is
provided, the user cannot specify the entrypoint or args.
secretEnv: A list of environment variables which are encrypted using a
Cloud Key Management Service crypto key. These values must be specified
in the build's `Secret`.
status: Output only. Status of the build step. At this time, build step
status is only updated on build completion; step status is not updated
in real-time as the build progresses.
timeout: Time limit for executing this build step. If not defined, the
step has no time limit and will be allowed to continue to run until
either it completes or the build itself times out.
timing: Output only. Stores timing information for executing this build
step.
volumes: List of volumes to mount into the build step. Each volume is
created as an empty volume prior to execution of the build step. Upon
completion of the build, volumes and their contents are discarded. Using
a named volume in only one step is not valid as it is indicative of a
build request with an incorrect configuration.
waitFor: The ID(s) of the step(s) that this build step depends on. This
build step will not start until all the build steps in `wait_for` have
completed successfully. If `wait_for` is empty, this build step will
start when all previous build steps in the `Build.Steps` list have
completed successfully.
"""
class StatusValueValuesEnum(_messages.Enum):
r"""Output only. Status of the build step. At this time, build step status
is only updated on build completion; step status is not updated in real-
time as the build progresses.
Values:
STATUS_UNKNOWN: Status of the build is unknown.
PENDING: Build has been created and is pending execution and queuing. It
has not been queued.
QUEUED: Build or step is queued; work has not yet begun.
WORKING: Build or step is being executed.
SUCCESS: Build or step finished successfully.
FAILURE: Build or step failed to complete successfully.
INTERNAL_ERROR: Build or step failed due to an internal cause.
TIMEOUT: Build or step took longer than was allowed.
CANCELLED: Build or step was canceled by a user.
EXPIRED: Build was enqueued for longer than the value of `queue_ttl`.
"""
STATUS_UNKNOWN = 0
PENDING = 1
QUEUED = 2
WORKING = 3
SUCCESS = 4
FAILURE = 5
INTERNAL_ERROR = 6
TIMEOUT = 7
CANCELLED = 8
EXPIRED = 9
args = _messages.StringField(1, repeated=True)
dir = _messages.StringField(2)
entrypoint = _messages.StringField(3)
env = _messages.StringField(4, repeated=True)
id = _messages.StringField(5)
name = _messages.StringField(6)
pullTiming = _messages.MessageField('TimeSpan', 7)
script = _messages.StringField(8)
secretEnv = _messages.StringField(9, repeated=True)
status = _messages.EnumField('StatusValueValuesEnum', 10)
timeout = _messages.StringField(11)
timing = _messages.MessageField('TimeSpan', 12)
volumes = _messages.MessageField('Volume', 13, repeated=True)
waitFor = _messages.StringField(14, repeated=True)
class BuildTrigger(_messages.Message):
r"""Configuration for an automated build in response to source repository
changes.
Enums:
EventTypeValueValuesEnum: EventType allows the user to explicitly set the
type of event to which this BuildTrigger should respond. This field will
be validated against the rest of the configuration if it is set.
IncludeBuildLogsValueValuesEnum: If set to INCLUDE_BUILD_LOGS_WITH_STATUS,
log url will be shown on GitHub page when build status is final. Setting
this field to INCLUDE_BUILD_LOGS_WITH_STATUS for non GitHub triggers
results in INVALID_ARGUMENT error.
Messages:
SubstitutionsValue: Substitutions for Build resource. The keys must match
the following regular expression: `^_[A-Z0-9_]+$`.
Fields:
approvalConfig: Configuration for manual approval to start a build
invocation of this BuildTrigger.
autodetect: Autodetect build configuration. The following precedence is
used (case insensitive): 1. cloudbuild.yaml 2. cloudbuild.yml 3.
cloudbuild.json 4. Dockerfile Currently only available for GitHub App
Triggers.
bitbucketServerTriggerConfig: BitbucketServerTriggerConfig describes the
configuration of a trigger that creates a build whenever a Bitbucket
Server event is received.
build: Contents of the build template.
createTime: Output only. Time when the trigger was created.
cron: CronConfig describes the configuration of a trigger that creates a
build whenever a Cloud Scheduler event is received.
description: Human-readable description of this trigger.
disabled: If true, the trigger will never automatically execute a build.
eventType: EventType allows the user to explicitly set the type of event
to which this BuildTrigger should respond. This field will be validated
against the rest of the configuration if it is set.
filename: Path, from the source root, to the build configuration file
(i.e. cloudbuild.yaml).
filter: A Common Expression Language string.
gitFileSource: The file source describing the local or remote Build
template.
github: GitHubEventsConfig describes the configuration of a trigger that
creates a build whenever a GitHub event is received. Mutually exclusive
with `trigger_template`.
id: Output only. Unique identifier of the trigger.
ignoredFiles: ignored_files and included_files are file glob matches using
https://golang.org/pkg/path/filepath/#Match extended with support for
"**". If ignored_files and changed files are both empty, then they are
not used to determine whether or not to trigger a build. If
ignored_files is not empty, then we ignore any files that match any of
the ignored_file globs. If the change has no files that are outside of
the ignored_files globs, then we do not trigger a build.
includeBuildLogs: If set to INCLUDE_BUILD_LOGS_WITH_STATUS, log url will
be shown on GitHub page when build status is final. Setting this field
to INCLUDE_BUILD_LOGS_WITH_STATUS for non GitHub triggers results in
INVALID_ARGUMENT error.
includedFiles: If any of the files altered in the commit pass the
ignored_files filter and included_files is empty, then as far as this
filter is concerned, we should trigger the build. If any of the files
altered in the commit pass the ignored_files filter and included_files
is not empty, then we make sure that at least one of those files matches
a included_files glob. If not, then we do not trigger a build.
name: User-assigned name of the trigger. Must be unique within the
project. Trigger names must meet the following requirements: + They must
contain only alphanumeric characters and dashes. + They can be 1-64
characters long. + They must begin and end with an alphanumeric
character.
pubsubConfig: PubsubConfig describes the configuration of a trigger that
creates a build whenever a Pub/Sub message is published.
resourceName: The `Trigger` name with format:
`projects/{project}/locations/{location}/triggers/{trigger}`, where
{trigger} is a unique identifier generated by the service.
serviceAccount: The service account used for all user-controlled
operations including UpdateBuildTrigger, RunBuildTrigger, CreateBuild,
and CancelBuild. If no service account is set, then the standard Cloud
Build service account ([PROJECT_NUM]@system.<EMAIL>.<EMAIL>) will be
used instead. Format:
`projects/{PROJECT_ID}/serviceAccounts/{ACCOUNT_ID_OR_EMAIL}`
sourceToBuild: The repo and ref of the repository from which to build.
This field is used only for those triggers that do not respond to SCM
events. Triggers that respond to such events build source at whatever
commit caused the event. This field is currently only used by Webhook,
Pub/Sub, Manual, and Cron triggers.
substitutions: Substitutions for Build resource. The keys must match the
following regular expression: `^_[A-Z0-9_]+$`.
tags: Tags for annotation of a `BuildTrigger`
triggerTemplate: Template describing the types of source changes to
trigger a build. Branch and tag names in trigger templates are
interpreted as regular expressions. Any branch or tag change that
matches that regular expression will trigger a build. Mutually exclusive
with `github`.
webhookConfig: WebhookConfig describes the configuration of a trigger that
creates a build whenever a webhook is sent to a trigger's webhook URL.
"""
class EventTypeValueValuesEnum(_messages.Enum):
r"""EventType allows the user to explicitly set the type of event to which
this BuildTrigger should respond. This field will be validated against the
rest of the configuration if it is set.
Values:
EVENT_TYPE_UNSPECIFIED: EVENT_TYPE_UNSPECIFIED event_types are ignored.
REPO: REPO corresponds to the supported VCS integrations.
WEBHOOK: WEBHOOK corresponds to webhook triggers.
PUBSUB: PUBSUB corresponds to pubsub triggers.
MANUAL: MANUAL corresponds to manual-only invoked triggers.
"""
EVENT_TYPE_UNSPECIFIED = 0
REPO = 1
WEBHOOK = 2
PUBSUB = 3
MANUAL = 4
class IncludeBuildLogsValueValuesEnum(_messages.Enum):
r"""If set to INCLUDE_BUILD_LOGS_WITH_STATUS, log url will be shown on
GitHub page when build status is final. Setting this field to
INCLUDE_BUILD_LOGS_WITH_STATUS for non GitHub triggers results in
INVALID_ARGUMENT error.
Values:
INCLUDE_BUILD_LOGS_UNSPECIFIED: Build logs will not be shown on GitHub.
INCLUDE_BUILD_LOGS_WITH_STATUS: Build logs will be shown on GitHub.
"""
INCLUDE_BUILD_LOGS_UNSPECIFIED = 0
INCLUDE_BUILD_LOGS_WITH_STATUS = 1
@encoding.MapUnrecognizedFields('additionalProperties')
class SubstitutionsValue(_messages.Message):
r"""Substitutions for Build resource. The keys must match the following
regular expression: `^_[A-Z0-9_]+$`.
Messages:
AdditionalProperty: An additional property for a SubstitutionsValue
object.
Fields:
additionalProperties: Additional properties of type SubstitutionsValue
"""
class AdditionalProperty(_messages.Message):
r"""An additional property for a SubstitutionsValue object.
Fields:
key: Name of the additional property.
value: A string attribute.
"""
key = _messages.StringField(1)
value = _messages.StringField(2)
additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
approvalConfig = _messages.MessageField('ApprovalConfig', 1)
autodetect = _messages.BooleanField(2)
bitbucketServerTriggerConfig = _messages.MessageField('BitbucketServerTriggerConfig', 3)
build = _messages.MessageField('Build', 4)
createTime = _messages.StringField(5)
cron = _messages.MessageField('CronConfig', 6)
description = _messages.StringField(7)
disabled = _messages.BooleanField(8)
eventType = _messages.EnumField('EventTypeValueValuesEnum', 9)
filename = _messages.StringField(10)
filter = _messages.StringField(11)
gitFileSource = _messages.MessageField('GitFileSource', 12)
github = _messages.MessageField('GitHubEventsConfig', 13)
id = _messages.StringField(14)
ignoredFiles = _messages.StringField(15, repeated=True)
includeBuildLogs = _messages.EnumField('IncludeBuildLogsValueValuesEnum', 16)
includedFiles = _messages.StringField(17, repeated=True)
name = _messages.StringField(18)
pubsubConfig = _messages.MessageField('PubsubConfig', 19)
resourceName = _messages.StringField(20)
serviceAccount = _messages.StringField(21)
sourceToBuild = _messages.MessageField('GitRepoSource', 22)
substitutions = _messages.MessageField('SubstitutionsValue', 23)
tags = _messages.StringField(24, repeated=True)
triggerTemplate = _messages.MessageField('RepoSource', 25)
webhookConfig = _messages.MessageField('WebhookConfig', 26)
class BuiltImage(_messages.Message):
r"""An image built by the pipeline.
Fields:
digest: Docker Registry 2.0 digest.
name: Name used to push the container image to Google Container Registry,
as presented to `docker push`.
pushTiming: Output only. Stores timing information for pushing the
specified image.
"""
digest = _messages.StringField(1)
name = _messages.StringField(2)
pushTiming = _messages.MessageField('TimeSpan', 3)
class CancelBuildRequest(_messages.Message):
r"""Request to cancel an ongoing build.
Fields:
id: Required. ID of the build.
name: The name of the `Build` to cancel. Format:
`projects/{project}/locations/{location}/builds/{build}`
projectId: Required. ID of the project.
"""
id = _messages.StringField(1)
name = _messages.StringField(2)
projectId = _messages.StringField(3)
class CancelOperationRequest(_messages.Message):
r"""The request message for Operations.CancelOperation."""
class CloudbuildGithubInstallationsInstallationsListRequest(_messages.Message):
r"""A CloudbuildGithubInstallationsInstallationsListRequest object.
Fields:
installationId: Installation ID
"""
installationId = _messages.IntegerField(1, required=True)
class CloudbuildGithubInstallationsProjectsListRequest(_messages.Message):
r"""A CloudbuildGithubInstallationsProjectsListRequest object.
Fields:
installationId: Installation ID
"""
installationId = _messages.IntegerField(1, required=True)
class CloudbuildInstallationsInstallationsListRequest(_messages.Message):
r"""A CloudbuildInstallationsInstallationsListRequest object.
Fields:
installationId: Installation ID
"""
installationId = _messages.IntegerField(1, required=True)
class CloudbuildLocationsRegionalWebhookRequest(_messages.Message):
r"""A CloudbuildLocationsRegionalWebhookRequest object.
Fields:
httpBody: A HttpBody resource to be passed as the request body.
location: Required. The location where the webhook should be sent.
webhookKey: For GitHub Enterprise webhooks, this key is used to associate
the webhook request with the GitHubEnterpriseConfig to use for
validation.
"""
httpBody = _messages.MessageField('HttpBody', 1)
location = _messages.StringField(2, required=True)
webhookKey = _messages.StringField(3)
class CloudbuildOauthGetRegistrationRequest(_messages.Message):
r"""A CloudbuildOauthGetRegistrationRequest object.
Enums:
NamespaceValueValuesEnum: Required. The namespace that the credential
belongs to.
Fields:
authUser: For users who are logged in using multiple accounts, specify the
auth user parameter so that the registration url redirects back to the
cloud console using the proper account.
githubEnterpriseConfig: Optional. The full resource name of the github
enterprise resource if applicable.
hostUrl: Required. The host url that the oauth credentials are associated
with. For GitHub, this would be "https://github.com". For
GitHubEnterprise, this would be the host name of their github enterprise
instance.
namespace: Required. The namespace that the credential belongs to.
"""
class NamespaceValueValuesEnum(_messages.Enum):
r"""Required. The namespace that the credential belongs to.
Values:
NAMESPACE_UNSPECIFIED: The default namespace.
GITHUB_ENTERPRISE: A credential to be used with GitHub enterprise.
"""
NAMESPACE_UNSPECIFIED = 0
GITHUB_ENTERPRISE = 1
authUser = _messages.StringField(1)
githubEnterpriseConfig = _messages.StringField(2)
hostUrl = _messages.StringField(3)
namespace = _messages.EnumField('NamespaceValueValuesEnum', 4)
class CloudbuildOauthProcessOAuthCallbackRequest(_messages.Message):
r"""A CloudbuildOauthProcessOAuthCallbackRequest object.
Fields:
code: GitHub generated temproary authorization code.
githubEnterpriseConfig: For github enterprise, the full resource name of
the github enterprise resource.
hostUrl: The host url of the site that the OAuth token is issued for.
namespace: The namespace that the oauth callback credential should be
processed for. This should map to the string name of the enum defined in
the GetOAuthRegistrationURLRequest.
state: The XSRF token that was sent as part of the initial request to
start the OAuth flow.
"""
code = _messages.StringField(1)
githubEnterpriseConfig = _messages.StringField(2)
hostUrl = _messages.StringField(3)
namespace = _messages.StringField(4)
state = _messages.StringField(5)
class CloudbuildOperationsCancelRequest(_messages.Message):
r"""A CloudbuildOperationsCancelRequest object.
Fields:
cancelOperationRequest: A CancelOperationRequest resource to be passed as
the request body.
name: The name of the operation resource to be cancelled.
"""
cancelOperationRequest = _messages.MessageField('CancelOperationRequest', 1)
name = _messages.StringField(2, required=True)
class CloudbuildOperationsGetRequest(_messages.Message):
r"""A CloudbuildOperationsGetRequest object.
Fields:
name: The name of the operation resource.
"""
name = _messages.StringField(1, required=True)
class CloudbuildProjectsBuildsApproveRequest(_messages.Message):
r"""A CloudbuildProjectsBuildsApproveRequest object.
Fields:
approveBuildRequest: A ApproveBuildRequest resource to be passed as the
request body.
name: Required. Name of the target build. For example:
"projects/{$project_id}/builds/{$build_id}"
"""
approveBuildRequest = _messages.MessageField('ApproveBuildRequest', 1)
name = _messages.StringField(2, required=True)
class CloudbuildProjectsBuildsCancelRequest(_messages.Message):
r"""A CloudbuildProjectsBuildsCancelRequest object.
Fields:
cancelBuildRequest: A CancelBuildRequest resource to be passed as the
request body.
id: Required. ID of the build.
projectId: Required. ID of the project.
"""
cancelBuildRequest = _messages.MessageField('CancelBuildRequest', 1)
id = _messages.StringField(2, required=True)
projectId = _messages.StringField(3, required=True)
class CloudbuildProjectsBuildsCreateRequest(_messages.Message):
r"""A CloudbuildProjectsBuildsCreateRequest object.
Fields:
build: A Build resource to be passed as the request body.
parent: The parent resource where this build will be created. Format:
`projects/{project}/locations/{location}`
projectId: Required. ID of the project.
"""
build = _messages.MessageField('Build', 1)
parent = _messages.StringField(2)
projectId = _messages.StringField(3, required=True)
class CloudbuildProjectsBuildsGetRequest(_messages.Message):
r"""A CloudbuildProjectsBuildsGetRequest object.
Fields:
id: Required. ID of the build.
name: The name of the `Build` to retrieve. Format:
`projects/{project}/locations/{location}/builds/{build}`
projectId: Required. ID of the project.
"""
id = _messages.StringField(1, required=True)
name = _messages.StringField(2)
projectId = _messages.StringField(3, required=True)
class CloudbuildProjectsBuildsListRequest(_messages.Message):
r"""A CloudbuildProjectsBuildsListRequest object.
Fields:
filter: The raw filter text to constrain the results.
pageSize: Number of results to return in the list.
pageToken: The page token for the next page of Builds. If unspecified, the
first page of results is returned. If the token is rejected for any
reason, INVALID_ARGUMENT will be thrown. In this case, the token should
be discarded, and pagination should be restarted from the first page of
results. See https://google.aip.dev/158 for more.
parent: The parent of the collection of `Builds`. Format:
`projects/{project}/locations/location`
projectId: Required. ID of the project.
"""
filter = _messages.StringField(1)
pageSize = _messages.IntegerField(2, variant=_messages.Variant.INT32)
pageToken = _messages.StringField(3)
parent = _messages.StringField(4)
projectId = _messages.StringField(5, required=True)
class CloudbuildProjectsGithubEnterpriseConfigsCreateRequest(_messages.Message):
r"""A CloudbuildProjectsGithubEnterpriseConfigsCreateRequest object.
Fields:
gheConfigId: Optional. The ID to use for the GithubEnterpriseConfig, which
will become the final component of the GithubEnterpriseConfig's resource
name. ghe_config_id must meet the following requirements: + They must
contain only alphanumeric characters and dashes. + They can be 1-64
characters long. + They must begin and end with an alphanumeric
character
gitHubEnterpriseConfig: A GitHubEnterpriseConfig resource to be passed as
the request body.
parent: Name of the parent project. For example:
projects/{$project_number} or projects/{$project_id}
projectId: ID of the project.
"""
gheConfigId = _messages.StringField(1)
gitHubEnterpriseConfig = _messages.MessageField('GitHubEnterpriseConfig', 2)
parent = _messages.StringField(3, required=True)
projectId = _messages.StringField(4)
class CloudbuildProjectsGithubEnterpriseConfigsDeleteRequest(_messages.Message):
r"""A CloudbuildProjectsGithubEnterpriseConfigsDeleteRequest object.
Fields:
configId: Unique identifier of the `GitHubEnterpriseConfig`
name: This field should contain the name of the enterprise config
resource. For example:
"projects/{$project_id}/githubEnterpriseConfigs/{$config_id}"
projectId: ID of the project
"""
configId = _messages.StringField(1)
name = _messages.StringField(2, required=True)
projectId = _messages.StringField(3)
class CloudbuildProjectsGithubEnterpriseConfigsGetAppRequest(_messages.Message):
r"""A CloudbuildProjectsGithubEnterpriseConfigsGetAppRequest object.
Fields:
enterpriseConfigResource: Required. The name of the enterprise config
resource associated with the GitHub App. For example:
"projects/{$project_id}/githubEnterpriseConfigs/{$config_id}"
"""
enterpriseConfigResource = _messages.StringField(1, required=True)
class CloudbuildProjectsGithubEnterpriseConfigsGetRequest(_messages.Message):
r"""A CloudbuildProjectsGithubEnterpriseConfigsGetRequest object.
Fields:
configId: Unique identifier of the `GitHubEnterpriseConfig`
name: This field should contain the name of the enterprise config
resource. For example:
"projects/{$project_id}/githubEnterpriseConfigs/{$config_id}"
projectId: ID of the project
"""
configId = _messages.StringField(1)
name = _messages.StringField(2, required=True)
projectId = _messages.StringField(3)
class CloudbuildProjectsGithubEnterpriseConfigsListRequest(_messages.Message):
r"""A CloudbuildProjectsGithubEnterpriseConfigsListRequest object.
Fields:
parent: Name of the parent project. For example:
projects/{$project_number} or projects/{$project_id}
projectId: ID of the project
"""
parent = _messages.StringField(1, required=True)
projectId = _messages.StringField(2)
class CloudbuildProjectsGithubEnterpriseConfigsPatchRequest(_messages.Message):
r"""A CloudbuildProjectsGithubEnterpriseConfigsPatchRequest object.
Fields:
gitHubEnterpriseConfig: A GitHubEnterpriseConfig resource to be passed as
the request body.
name: Optional. The full resource name for the GitHubEnterpriseConfig For
example: "projects/{$project_id}/githubEnterpriseConfigs/{$config_id}"
updateMask: Update mask for the resource. If this is set, the server will
only update the fields specified in the field mask. Otherwise, a full
update of the mutable resource fields will be performed.
"""
gitHubEnterpriseConfig = _messages.MessageField('GitHubEnterpriseConfig', 1)
name = _messages.StringField(2, required=True)
updateMask = _messages.StringField(3)
class CloudbuildProjectsGithubInstallationsCreateRequest(_messages.Message):
r"""A CloudbuildProjectsGithubInstallationsCreateRequest object.
Fields:
installation: A Installation resource to be passed as the request body.
parent: The parent resource where this github installation will be
created. Format: `projects/{project}/locations/{location}`
projectId: The project ID of the GCP project the installation is
associated with.
projectId1: ID of the project.
userOauthCode: GitHub user code. If a GitHub credential is already
associated with the user this can be omitted, else the code is used to
exchange and store an OAuth token.
"""
installation = _messages.MessageField('Installation', 1)
parent = _messages.StringField(2)
projectId = _messages.StringField(3, required=True)
projectId1 = _messages.StringField(4)
userOauthCode = _messages.StringField(5)
class CloudbuildProjectsGithubInstallationsDeleteRequest(_messages.Message):
r"""A CloudbuildProjectsGithubInstallationsDeleteRequest object.
Fields:
installationId: GitHub app installation ID.
name: The name of the `GitHubInstallation` to delete. Format:
`projects/{project}/locations/{location}/installations/{installation}`
projectId: Cloud Project ID.
"""
installationId = _messages.IntegerField(1, required=True)
name = _messages.StringField(2)
projectId = _messages.StringField(3, required=True)
class CloudbuildProjectsGithubInstallationsListRequest(_messages.Message):
r"""A CloudbuildProjectsGithubInstallationsListRequest object.
Fields:
parent: The parent resource where github installations for project will be
listed. Format: `projects/{project}/locations/{location}`
projectId: Project id
"""
parent = _messages.StringField(1)
projectId = _messages.StringField(2, required=True)
class CloudbuildProjectsGithubInstallationsPatchRequest(_messages.Message):
r"""A CloudbuildProjectsGithubInstallationsPatchRequest object.
Fields:
id: GitHub installation ID, created by GitHub.
installation: A Installation resource to be passed as the request body.
installationId: Unique identifier of the GitHub installation. Deprecated.
Should set installation.id
name: The name of the `GitHubInstallation` to update. Format:
`projects/{project}/locations/{location}/installations/{installation}`
projectId: The project ID of the GCP project the installation is
associated with.
projectId1: ID of the project.
updateMask: Update mask for the Installation resource. If this is set, the
server will only update the fields specified in the field mask.
Otherwise, a full update of the resource will be performed.
"""
id = _messages.IntegerField(1, required=True)
installation = _messages.MessageField('Installation', 2)
installationId = _messages.IntegerField(3)
name = _messages.StringField(4)
projectId = _messages.StringField(5, required=True)
projectId1 = _messages.StringField(6)
updateMask = _messages.StringField(7)
class CloudbuildProjectsInstallationsCreateRequest(_messages.Message):
r"""A CloudbuildProjectsInstallationsCreateRequest object.
Fields:
installation: A Installation resource to be passed as the request body.
parent: The parent resource where this github installation will be
created. Format: `projects/{project}/locations/{location}`
projectId: ID of the project.
userOauthCode: GitHub user code. If a GitHub credential is already
associated with the user this can be omitted, else the code is used to
exchange and store an OAuth token.
"""
installation = _messages.MessageField('Installation', 1)
parent = _messages.StringField(2)
projectId = _messages.StringField(3, required=True)
userOauthCode = _messages.StringField(4)
class CloudbuildProjectsInstallationsDeleteRequest(_messages.Message):
r"""A CloudbuildProjectsInstallationsDeleteRequest object.
Fields:
installationId: GitHub app installation ID.
name: The name of the `GitHubInstallation` to delete. Format:
`projects/{project}/locations/{location}/installations/{installation}`
projectId: Cloud Project ID.
"""
installationId = _messages.IntegerField(1, required=True)
name = _messages.StringField(2)
projectId = _messages.StringField(3, required=True)
class CloudbuildProjectsInstallationsListRequest(_messages.Message):
r"""A CloudbuildProjectsInstallationsListRequest object.
Fields:
parent: The parent resource where github installations for project will be
listed. Format: `projects/{project}/locations/{location}`
projectId: Project id
"""
parent = _messages.StringField(1)
projectId = _messages.StringField(2, required=True)
class CloudbuildProjectsInstallationsPatchRequest(_messages.Message):
r"""A CloudbuildProjectsInstallationsPatchRequest object.
Fields:
id: GitHub installation ID, created by GitHub.
installation: A Installation resource to be passed as the request body.
installationId: Unique identifier of the GitHub installation. Deprecated.
Should set installation.id
name: The name of the `GitHubInstallation` to update. Format:
`projects/{project}/locations/{location}/installations/{installation}`
projectId: ID of the project.
projectNum: Numerical ID of the project.
updateMask: Update mask for the Installation resource. If this is set, the
server will only update the fields specified in the field mask.
Otherwise, a full update of the resource will be performed.
"""
id = _messages.IntegerField(1, required=True)
installation = _messages.MessageField('Installation', 2)
installationId = _messages.IntegerField(3)
name = _messages.StringField(4)
projectId = _messages.StringField(5)
projectNum = _messages.IntegerField(6, required=True)
updateMask = _messages.StringField(7)
class CloudbuildProjectsLocationsBitbucketServerConfigsAddBitbucketServerConnectedRepositoryRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsBitbucketServerConfigsAddBitbucketServerCon
nectedRepositoryRequest object.
Fields:
addBitbucketServerConnectedRepositoryRequest: A
AddBitbucketServerConnectedRepositoryRequest resource to be passed as
the request body.
config: Required. The name of the `BitbucketServerConfig` to add a
connected repository. Format: `projects/{project}/locations/{location}/b
itbucketServerConfigs/{config}`
"""
addBitbucketServerConnectedRepositoryRequest = _messages.MessageField('AddBitbucketServerConnectedRepositoryRequest', 1)
config = _messages.StringField(2, required=True)
class CloudbuildProjectsLocationsBitbucketServerConfigsConnectedRepositoriesBatchCreateRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsBitbucketServerConfigsConnectedRepositories
BatchCreateRequest object.
Fields:
batchCreateBitbucketServerConnectedRepositoriesRequest: A
BatchCreateBitbucketServerConnectedRepositoriesRequest resource to be
passed as the request body.
parent: The name of the `BitbucketServerConfig` that added connected
repository. Format: `projects/{project}/locations/{location}/bitbucketSe
rverConfigs/{config}`
"""
batchCreateBitbucketServerConnectedRepositoriesRequest = _messages.MessageField('BatchCreateBitbucketServerConnectedRepositoriesRequest', 1)
parent = _messages.StringField(2, required=True)
class CloudbuildProjectsLocationsBitbucketServerConfigsCreateRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsBitbucketServerConfigsCreateRequest object.
Fields:
bitbucketServerConfig: A BitbucketServerConfig resource to be passed as
the request body.
bitbucketServerConfigId: Optional. The ID to use for the
BitbucketServerConfig, which will become the final component of the
BitbucketServerConfig's resource name. bitbucket_server_config_id must
meet the following requirements: + They must contain only alphanumeric
characters and dashes. + They can be 1-64 characters long. + They must
begin and end with an alphanumeric character.
parent: Required. Name of the parent resource.
"""
bitbucketServerConfig = _messages.MessageField('BitbucketServerConfig', 1)
bitbucketServerConfigId = _messages.StringField(2)
parent = _messages.StringField(3, required=True)
class CloudbuildProjectsLocationsBitbucketServerConfigsDeleteRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsBitbucketServerConfigsDeleteRequest object.
Fields:
name: Required. The config resource name.
"""
name = _messages.StringField(1, required=True)
class CloudbuildProjectsLocationsBitbucketServerConfigsGetRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsBitbucketServerConfigsGetRequest object.
Fields:
name: Required. The config resource name.
"""
name = _messages.StringField(1, required=True)
class CloudbuildProjectsLocationsBitbucketServerConfigsListRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsBitbucketServerConfigsListRequest object.
Fields:
pageSize: The maximum number of configs to return. The service may return
fewer than this value. If unspecified, at most 50 configs will be
returned. The maximum value is 1000; values above 1000 will be coerced
to 1000.
pageToken: A page token, received from a previous
`ListBitbucketServerConfigsRequest` call. Provide this to retrieve the
subsequent page. When paginating, all other parameters provided to
`ListBitbucketServerConfigsRequest` must match the call that provided
the page token.
parent: Required. Name of the parent resource.
"""
pageSize = _messages.IntegerField(1, variant=_messages.Variant.INT32)
pageToken = _messages.StringField(2)
parent = _messages.StringField(3, required=True)
class CloudbuildProjectsLocationsBitbucketServerConfigsPatchRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsBitbucketServerConfigsPatchRequest object.
Fields:
bitbucketServerConfig: A BitbucketServerConfig resource to be passed as
the request body.
name: The resource name for the config.
updateMask: Update mask for the resource. If this is set, the server will
only update the fields specified in the field mask. Otherwise, a full
update of the mutable resource fields will be performed.
"""
bitbucketServerConfig = _messages.MessageField('BitbucketServerConfig', 1)
name = _messages.StringField(2, required=True)
updateMask = _messages.StringField(3)
class CloudbuildProjectsLocationsBitbucketServerConfigsRemoveBitbucketServerConnectedRepositoryRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsBitbucketServerConfigsRemoveBitbucketServer
ConnectedRepositoryRequest object.
Fields:
config: Required. The name of the `BitbucketServerConfig` to remove a
connected repository. Format: `projects/{project}/locations/{location}/b
itbucketServerConfigs/{config}`
removeBitbucketServerConnectedRepositoryRequest: A
RemoveBitbucketServerConnectedRepositoryRequest resource to be passed as
the request body.
"""
config = _messages.StringField(1, required=True)
removeBitbucketServerConnectedRepositoryRequest = _messages.MessageField('RemoveBitbucketServerConnectedRepositoryRequest', 2)
class CloudbuildProjectsLocationsBitbucketServerConfigsReposListRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsBitbucketServerConfigsReposListRequest
object.
Fields:
pageSize: The maximum number of configs to return. The service may return
fewer than this value. If unspecified, at most 50 configs will be
returned. The maximum value is 1000; values above 1000 will be coerced
to 1000.
pageToken: A page token, received from a previous
`ListBitbucketServerRepositoriesRequest` call. Provide this to retrieve
the subsequent page. When paginating, all other parameters provided to
`ListBitbucketServerConfigsRequest` must match the call that provided
the page token.
parent: Required. Name of the parent resource.
"""
pageSize = _messages.IntegerField(1, variant=_messages.Variant.INT32)
pageToken = _messages.StringField(2)
parent = _messages.StringField(3, required=True)
class CloudbuildProjectsLocationsBuildsApproveRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsBuildsApproveRequest object.
Fields:
approveBuildRequest: A ApproveBuildRequest resource to be passed as the
request body.
name: Required. Name of the target build. For example:
"projects/{$project_id}/builds/{$build_id}"
"""
approveBuildRequest = _messages.MessageField('ApproveBuildRequest', 1)
name = _messages.StringField(2, required=True)
class CloudbuildProjectsLocationsBuildsCreateRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsBuildsCreateRequest object.
Fields:
build: A Build resource to be passed as the request body.
parent: The parent resource where this build will be created. Format:
`projects/{project}/locations/{location}`
projectId: Required. ID of the project.
"""
build = _messages.MessageField('Build', 1)
parent = _messages.StringField(2, required=True)
projectId = _messages.StringField(3)
class CloudbuildProjectsLocationsBuildsGetRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsBuildsGetRequest object.
Fields:
id: Required. ID of the build.
name: The name of the `Build` to retrieve. Format:
`projects/{project}/locations/{location}/builds/{build}`
projectId: Required. ID of the project.
"""
id = _messages.StringField(1)
name = _messages.StringField(2, required=True)
projectId = _messages.StringField(3)
class CloudbuildProjectsLocationsBuildsListRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsBuildsListRequest object.
Fields:
filter: The raw filter text to constrain the results.
pageSize: Number of results to return in the list.
pageToken: The page token for the next page of Builds. If unspecified, the
first page of results is returned. If the token is rejected for any
reason, INVALID_ARGUMENT will be thrown. In this case, the token should
be discarded, and pagination should be restarted from the first page of
results. See https://google.aip.dev/158 for more.
parent: The parent of the collection of `Builds`. Format:
`projects/{project}/locations/location`
projectId: Required. ID of the project.
"""
filter = _messages.StringField(1)
pageSize = _messages.IntegerField(2, variant=_messages.Variant.INT32)
pageToken = _messages.StringField(3)
parent = _messages.StringField(4, required=True)
projectId = _messages.StringField(5)
class CloudbuildProjectsLocationsGithubEnterpriseConfigsCreateRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsGithubEnterpriseConfigsCreateRequest
object.
Fields:
gheConfigId: Optional. The ID to use for the GithubEnterpriseConfig, which
will become the final component of the GithubEnterpriseConfig's resource
name. ghe_config_id must meet the following requirements: + They must
contain only alphanumeric characters and dashes. + They can be 1-64
characters long. + They must begin and end with an alphanumeric
character
gitHubEnterpriseConfig: A GitHubEnterpriseConfig resource to be passed as
the request body.
parent: Name of the parent project. For example:
projects/{$project_number} or projects/{$project_id}
projectId: ID of the project.
"""
gheConfigId = _messages.StringField(1)
gitHubEnterpriseConfig = _messages.MessageField('GitHubEnterpriseConfig', 2)
parent = _messages.StringField(3, required=True)
projectId = _messages.StringField(4)
class CloudbuildProjectsLocationsGithubEnterpriseConfigsDeleteRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsGithubEnterpriseConfigsDeleteRequest
object.
Fields:
configId: Unique identifier of the `GitHubEnterpriseConfig`
name: This field should contain the name of the enterprise config
resource. For example:
"projects/{$project_id}/githubEnterpriseConfigs/{$config_id}"
projectId: ID of the project
"""
configId = _messages.StringField(1)
name = _messages.StringField(2, required=True)
projectId = _messages.StringField(3)
class CloudbuildProjectsLocationsGithubEnterpriseConfigsGetAppRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsGithubEnterpriseConfigsGetAppRequest
object.
Fields:
enterpriseConfigResource: Required. The name of the enterprise config
resource associated with the GitHub App. For example:
"projects/{$project_id}/githubEnterpriseConfigs/{$config_id}"
"""
enterpriseConfigResource = _messages.StringField(1, required=True)
class CloudbuildProjectsLocationsGithubEnterpriseConfigsGetRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsGithubEnterpriseConfigsGetRequest object.
Fields:
configId: Unique identifier of the `GitHubEnterpriseConfig`
name: This field should contain the name of the enterprise config
resource. For example:
"projects/{$project_id}/githubEnterpriseConfigs/{$config_id}"
projectId: ID of the project
"""
configId = _messages.StringField(1)
name = _messages.StringField(2, required=True)
projectId = _messages.StringField(3)
class CloudbuildProjectsLocationsGithubEnterpriseConfigsListRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsGithubEnterpriseConfigsListRequest object.
Fields:
parent: Name of the parent project. For example:
projects/{$project_number} or projects/{$project_id}
projectId: ID of the project
"""
parent = _messages.StringField(1, required=True)
projectId = _messages.StringField(2)
class CloudbuildProjectsLocationsGithubEnterpriseConfigsPatchRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsGithubEnterpriseConfigsPatchRequest object.
Fields:
gitHubEnterpriseConfig: A GitHubEnterpriseConfig resource to be passed as
the request body.
name: Optional. The full resource name for the GitHubEnterpriseConfig For
example: "projects/{$project_id}/githubEnterpriseConfigs/{$config_id}"
updateMask: Update mask for the resource. If this is set, the server will
only update the fields specified in the field mask. Otherwise, a full
update of the mutable resource fields will be performed.
"""
gitHubEnterpriseConfig = _messages.MessageField('GitHubEnterpriseConfig', 1)
name = _messages.StringField(2, required=True)
updateMask = _messages.StringField(3)
class CloudbuildProjectsLocationsGithubInstallationsCreateRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsGithubInstallationsCreateRequest object.
Fields:
installation: A Installation resource to be passed as the request body.
parent: The parent resource where this github installation will be
created. Format: `projects/{project}/locations/{location}`
projectId: ID of the project.
userOauthCode: GitHub user code. If a GitHub credential is already
associated with the user this can be omitted, else the code is used to
exchange and store an OAuth token.
"""
installation = _messages.MessageField('Installation', 1)
parent = _messages.StringField(2, required=True)
projectId = _messages.StringField(3)
userOauthCode = _messages.StringField(4)
class CloudbuildProjectsLocationsGithubInstallationsDeleteRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsGithubInstallationsDeleteRequest object.
Fields:
installationId: GitHub app installation ID.
installationsId: A string attribute.
name: The name of the `GitHubInstallation` to delete. Format:
`projects/{project}/locations/{location}/installations/{installation}`
projectId: Cloud Project ID.
"""
installationId = _messages.IntegerField(1)
installationsId = _messages.StringField(2, required=True)
name = _messages.StringField(3, required=True)
projectId = _messages.StringField(4)
class CloudbuildProjectsLocationsGithubInstallationsListRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsGithubInstallationsListRequest object.
Fields:
parent: The parent resource where github installations for project will be
listed. Format: `projects/{project}/locations/{location}`
projectId: Project id
"""
parent = _messages.StringField(1, required=True)
projectId = _messages.StringField(2)
class CloudbuildProjectsLocationsGithubInstallationsPatchRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsGithubInstallationsPatchRequest object.
Fields:
installation: A Installation resource to be passed as the request body.
installationId: Unique identifier of the GitHub installation. Deprecated.
Should set installation.id
installationsId: A string attribute.
name: The `Installation` name with format:
`projects/{project}/locations/{location}/installations/{installation}`,
where {installation} is GitHub installation ID created by GitHub.
name1: The name of the `GitHubInstallation` to update. Format:
`projects/{project}/locations/{location}/installations/{installation}`
projectId: ID of the project.
updateMask: Update mask for the Installation resource. If this is set, the
server will only update the fields specified in the field mask.
Otherwise, a full update of the resource will be performed.
"""
installation = _messages.MessageField('Installation', 1)
installationId = _messages.IntegerField(2)
installationsId = _messages.StringField(3, required=True)
name = _messages.StringField(4, required=True)
name1 = _messages.StringField(5)
projectId = _messages.StringField(6)
updateMask = _messages.StringField(7)
class CloudbuildProjectsLocationsInstallationsDeleteRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsInstallationsDeleteRequest object.
Fields:
installationId: GitHub app installation ID.
name: The name of the `GitHubInstallation` to delete. Format:
`projects/{project}/locations/{location}/installations/{installation}`
projectId: Cloud Project ID.
"""
installationId = _messages.IntegerField(1)
name = _messages.StringField(2, required=True)
projectId = _messages.StringField(3)
class CloudbuildProjectsLocationsInstallationsListRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsInstallationsListRequest object.
Fields:
parent: The parent resource where github installations for project will be
listed. Format: `projects/{project}/locations/{location}`
projectId: Project id
"""
parent = _messages.StringField(1, required=True)
projectId = _messages.StringField(2)
class CloudbuildProjectsLocationsInstallationsPatchRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsInstallationsPatchRequest object.
Fields:
installation: A Installation resource to be passed as the request body.
installationId: Unique identifier of the GitHub installation. Deprecated.
Should set installation.id
name: The `Installation` name with format:
`projects/{project}/locations/{location}/installations/{installation}`,
where {installation} is GitHub installation ID created by GitHub.
name1: The name of the `GitHubInstallation` to update. Format:
`projects/{project}/locations/{location}/installations/{installation}`
projectId: ID of the project.
updateMask: Update mask for the Installation resource. If this is set, the
server will only update the fields specified in the field mask.
Otherwise, a full update of the resource will be performed.
"""
installation = _messages.MessageField('Installation', 1)
installationId = _messages.IntegerField(2)
name = _messages.StringField(3, required=True)
name1 = _messages.StringField(4)
projectId = _messages.StringField(5)
updateMask = _messages.StringField(6)
class CloudbuildProjectsLocationsOperationsCancelRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsOperationsCancelRequest object.
Fields:
cancelOperationRequest: A CancelOperationRequest resource to be passed as
the request body.
name: The name of the operation resource to be cancelled.
"""
cancelOperationRequest = _messages.MessageField('CancelOperationRequest', 1)
name = _messages.StringField(2, required=True)
class CloudbuildProjectsLocationsOperationsGetRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsOperationsGetRequest object.
Fields:
name: The name of the operation resource.
"""
name = _messages.StringField(1, required=True)
class CloudbuildProjectsLocationsTriggersCreateRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsTriggersCreateRequest object.
Fields:
buildTrigger: A BuildTrigger resource to be passed as the request body.
parent: The parent resource where this trigger will be created. Format:
`projects/{project}/locations/{location}`
projectId: Required. ID of the project for which to configure automatic
builds.
"""
buildTrigger = _messages.MessageField('BuildTrigger', 1)
parent = _messages.StringField(2, required=True)
projectId = _messages.StringField(3)
class CloudbuildProjectsLocationsTriggersDeleteRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsTriggersDeleteRequest object.
Fields:
name: The name of the `Trigger` to delete. Format:
`projects/{project}/locations/{location}/triggers/{trigger}`
projectId: Required. ID of the project that owns the trigger.
triggerId: Required. ID of the `BuildTrigger` to delete.
"""
name = _messages.StringField(1, required=True)
projectId = _messages.StringField(2)
triggerId = _messages.StringField(3)
class CloudbuildProjectsLocationsTriggersGetRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsTriggersGetRequest object.
Fields:
name: The name of the `Trigger` to retrieve. Format:
`projects/{project}/locations/{location}/triggers/{trigger}`
projectId: Required. ID of the project that owns the trigger.
triggerId: Required. Identifier (`id` or `name`) of the `BuildTrigger` to
get.
"""
name = _messages.StringField(1, required=True)
projectId = _messages.StringField(2)
triggerId = _messages.StringField(3)
class CloudbuildProjectsLocationsTriggersListRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsTriggersListRequest object.
Fields:
pageSize: Number of results to return in the list.
pageToken: Token to provide to skip to a particular spot in the list.
parent: The parent of the collection of `Triggers`. Format:
`projects/{project}/locations/{location}`
projectId: Required. ID of the project for which to list BuildTriggers.
"""
pageSize = _messages.IntegerField(1, variant=_messages.Variant.INT32)
pageToken = _messages.StringField(2)
parent = _messages.StringField(3, required=True)
projectId = _messages.StringField(4)
class CloudbuildProjectsLocationsTriggersPatchRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsTriggersPatchRequest object.
Fields:
buildTrigger: A BuildTrigger resource to be passed as the request body.
projectId: Required. ID of the project that owns the trigger.
resourceName: The `Trigger` name with format:
`projects/{project}/locations/{location}/triggers/{trigger}`, where
{trigger} is a unique identifier generated by the service.
triggerId: Required. ID of the `BuildTrigger` to update.
"""
buildTrigger = _messages.MessageField('BuildTrigger', 1)
projectId = _messages.StringField(2)
resourceName = _messages.StringField(3, required=True)
triggerId = _messages.StringField(4)
class CloudbuildProjectsLocationsTriggersRunRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsTriggersRunRequest object.
Fields:
name: The name of the `Trigger` to run. Format:
`projects/{project}/locations/{location}/triggers/{trigger}`
runBuildTriggerRequest: A RunBuildTriggerRequest resource to be passed as
the request body.
"""
name = _messages.StringField(1, required=True)
runBuildTriggerRequest = _messages.MessageField('RunBuildTriggerRequest', 2)
class CloudbuildProjectsLocationsTriggersWebhookRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsTriggersWebhookRequest object.
Fields:
httpBody: A HttpBody resource to be passed as the request body.
name: The name of the `ReceiveTriggerWebhook` to retrieve. Format:
`projects/{project}/locations/{location}/triggers/{trigger}`
projectId: Project in which the specified trigger lives
secret: Secret token used for authorization if an OAuth token isn't
provided.
trigger: Name of the trigger to run the payload against
"""
httpBody = _messages.MessageField('HttpBody', 1)
name = _messages.StringField(2, required=True)
projectId = _messages.StringField(3)
secret = _messages.StringField(4)
trigger = _messages.StringField(5)
class CloudbuildProjectsLocationsWorkerPoolsCreateRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsWorkerPoolsCreateRequest object.
Fields:
parent: Required. The parent resource where this worker pool will be
created. Format: `projects/{project}/locations/{location}`.
validateOnly: If set, validate the request and preview the response, but
do not actually post it.
workerPool: A WorkerPool resource to be passed as the request body.
workerPoolId: Required. Immutable. The ID to use for the `WorkerPool`,
which will become the final component of the resource name. This value
should be 1-63 characters, and valid characters are /a-z-/.
"""
parent = _messages.StringField(1, required=True)
validateOnly = _messages.BooleanField(2)
workerPool = _messages.MessageField('WorkerPool', 3)
workerPoolId = _messages.StringField(4)
class CloudbuildProjectsLocationsWorkerPoolsDeleteRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsWorkerPoolsDeleteRequest object.
Fields:
allowMissing: If set to true, and the `WorkerPool` is not found, the
request will succeed but no action will be taken on the server.
etag: Optional. If provided, it must match the server's etag on the
workerpool for the request to be processed.
name: Required. The name of the `WorkerPool` to delete. Format:
`projects/{project}/locations/{workerPool}/workerPools/{workerPool}`.
validateOnly: If set, validate the request and preview the response, but
do not actually post it.
"""
allowMissing = _messages.BooleanField(1)
etag = _messages.StringField(2)
name = _messages.StringField(3, required=True)
validateOnly = _messages.BooleanField(4)
class CloudbuildProjectsLocationsWorkerPoolsGetRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsWorkerPoolsGetRequest object.
Fields:
name: Required. The name of the `WorkerPool` to retrieve. Format:
`projects/{project}/locations/{location}/workerPools/{workerPool}`.
"""
name = _messages.StringField(1, required=True)
class CloudbuildProjectsLocationsWorkerPoolsListRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsWorkerPoolsListRequest object.
Fields:
pageSize: The maximum number of `WorkerPool`s to return. The service may
return fewer than this value. If omitted, the server will use a sensible
default.
pageToken: A page token, received from a previous `ListWorkerPools` call.
Provide this to retrieve the subsequent page.
parent: Required. The parent of the collection of `WorkerPools`. Format:
`projects/{project}/locations/{location}`.
"""
pageSize = _messages.IntegerField(1, variant=_messages.Variant.INT32)
pageToken = _messages.StringField(2)
parent = _messages.StringField(3, required=True)
class CloudbuildProjectsLocationsWorkerPoolsPatchRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsWorkerPoolsPatchRequest object.
Fields:
name: Output only. The resource name of the `WorkerPool`, with format
`projects/{project}/locations/{location}/workerPools/{worker_pool}`. The
value of `{worker_pool}` is provided by `worker_pool_id` in
`CreateWorkerPool` request and the value of `{location}` is determined
by the endpoint accessed.
updateMask: A mask specifying which fields in `worker_pool` to update.
validateOnly: If set, validate the request and preview the response, but
do not actually post it.
workerPool: A WorkerPool resource to be passed as the request body.
"""
name = _messages.StringField(1, required=True)
updateMask = _messages.StringField(2)
validateOnly = _messages.BooleanField(3)
workerPool = _messages.MessageField('WorkerPool', 4)
class CloudbuildProjectsTriggersCreateRequest(_messages.Message):
r"""A CloudbuildProjectsTriggersCreateRequest object.
Fields:
buildTrigger: A BuildTrigger resource to be passed as the request body.
parent: The parent resource where this trigger will be created. Format:
`projects/{project}/locations/{location}`
projectId: Required. ID of the project for which to configure automatic
builds.
"""
buildTrigger = _messages.MessageField('BuildTrigger', 1)
parent = _messages.StringField(2)
projectId = _messages.StringField(3, required=True)
class CloudbuildProjectsTriggersDeleteRequest(_messages.Message):
r"""A CloudbuildProjectsTriggersDeleteRequest object.
Fields:
name: The name of the `Trigger` to delete. Format:
`projects/{project}/locations/{location}/triggers/{trigger}`
projectId: Required. ID of the project that owns the trigger.
triggerId: Required. ID of the `BuildTrigger` to delete.
"""
name = _messages.StringField(1)
projectId = _messages.StringField(2, required=True)
triggerId = _messages.StringField(3, required=True)
class CloudbuildProjectsTriggersGetRequest(_messages.Message):
r"""A CloudbuildProjectsTriggersGetRequest object.
Fields:
name: The name of the `Trigger` to retrieve. Format:
`projects/{project}/locations/{location}/triggers/{trigger}`
projectId: Required. ID of the project that owns the trigger.
triggerId: Required. Identifier (`id` or `name`) of the `BuildTrigger` to
get.
"""
name = _messages.StringField(1)
projectId = _messages.StringField(2, required=True)
triggerId = _messages.StringField(3, required=True)
class CloudbuildProjectsTriggersListRequest(_messages.Message):
r"""A CloudbuildProjectsTriggersListRequest object.
Fields:
pageSize: Number of results to return in the list.
pageToken: Token to provide to skip to a particular spot in the list.
parent: The parent of the collection of `Triggers`. Format:
`projects/{project}/locations/{location}`
projectId: Required. ID of the project for which to list BuildTriggers.
"""
pageSize = _messages.IntegerField(1, variant=_messages.Variant.INT32)
pageToken = _messages.StringField(2)
parent = _messages.StringField(3)
projectId = _messages.StringField(4, required=True)
class CloudbuildProjectsTriggersPatchRequest(_messages.Message):
r"""A CloudbuildProjectsTriggersPatchRequest object.
Fields:
buildTrigger: A BuildTrigger resource to be passed as the request body.
projectId: Required. ID of the project that owns the trigger.
triggerId: Required. ID of the `BuildTrigger` to update.
"""
buildTrigger = _messages.MessageField('BuildTrigger', 1)
projectId = _messages.StringField(2, required=True)
triggerId = _messages.StringField(3, required=True)
class CloudbuildProjectsTriggersRunRequest(_messages.Message):
r"""A CloudbuildProjectsTriggersRunRequest object.
Fields:
name: The name of the `Trigger` to run. Format:
`projects/{project}/locations/{location}/triggers/{trigger}`
projectId: Required. ID of the project.
repoSource: A RepoSource resource to be passed as the request body.
triggerId: Required. ID of the trigger.
"""
name = _messages.StringField(1)
projectId = _messages.StringField(2, required=True)
repoSource = _messages.MessageField('RepoSource', 3)
triggerId = _messages.StringField(4, required=True)
class CloudbuildProjectsTriggersWebhookRequest(_messages.Message):
r"""A CloudbuildProjectsTriggersWebhookRequest object.
Fields:
httpBody: A HttpBody resource to be passed as the request body.
name: The name of the `ReceiveTriggerWebhook` to retrieve. Format:
`projects/{project}/locations/{location}/triggers/{trigger}`
projectId: Project in which the specified trigger lives
secret: Secret token used for authorization if an OAuth token isn't
provided.
trigger: Name of the trigger to run the payload against
"""
httpBody = _messages.MessageField('HttpBody', 1)
name = _messages.StringField(2)
projectId = _messages.StringField(3, required=True)
secret = _messages.StringField(4)
trigger = _messages.StringField(5, required=True)
class CloudbuildWebhookRequest(_messages.Message):
r"""A CloudbuildWebhookRequest object.
Fields:
httpBody: A HttpBody resource to be passed as the request body.
webhookKey: For GitHub Enterprise webhooks, this key is used to associate
the webhook request with the GitHubEnterpriseConfig to use for
validation.
"""
httpBody = _messages.MessageField('HttpBody', 1)
webhookKey = _messages.StringField(2)
class ClusterOptions(_messages.Message):
r"""Details of the GKE Cluster for builds that should execute on-cluster.
Fields:
name: Identifier of the GKE Cluster this build should execute on. Example:
projects/{project_id}/locations/{location}/clusters/{cluster_name} The
cluster's project ID must be the same project ID that is running the
build. The cluster must exist and have the CloudBuild add-on enabled.
"""
name = _messages.StringField(1)
class CreateBitbucketServerConfigOperationMetadata(_messages.Message):
r"""Metadata for `CreateBitbucketServerConfig` operation.
Fields:
bitbucketServerConfig: The resource name of the BitbucketServerConfig to
be created. Format:
`projects/{project}/locations/{location}/bitbucketServerConfigs/{id}`.
completeTime: Time the operation was completed.
createTime: Time the operation was created.
"""
bitbucketServerConfig = _messages.StringField(1)
completeTime = _messages.StringField(2)
createTime = _messages.StringField(3)
class CreateBitbucketServerConnectedRepositoryRequest(_messages.Message):
r"""Request to connect a repository from a connected Bitbucket Server host.
Fields:
bitbucketServerConnectedRepository: Required. The Bitbucket Server
repository to connect.
parent: Required. The name of the `BitbucketServerConfig` that added
connected repository. Format: `projects/{project}/locations/{location}/b
itbucketServerConfigs/{config}`
"""
bitbucketServerConnectedRepository = _messages.MessageField('BitbucketServerConnectedRepository', 1)
parent = _messages.StringField(2)
class CreateGitHubEnterpriseConfigOperationMetadata(_messages.Message):
r"""Metadata for `CreateGithubEnterpriseConfig` operation.
Fields:
completeTime: Time the operation was completed.
createTime: Time the operation was created.
githubEnterpriseConfig: The resource name of the GitHubEnterprise to be
created. Format:
`projects/{project}/locations/{location}/githubEnterpriseConfigs/{id}`.
"""
completeTime = _messages.StringField(1)
createTime = _messages.StringField(2)
githubEnterpriseConfig = _messages.StringField(3)
class CreateWorkerPoolOperationMetadata(_messages.Message):
r"""Metadata for the `CreateWorkerPool` operation.
Fields:
completeTime: Time the operation was completed.
createTime: Time the operation was created.
workerPool: The resource name of the `WorkerPool` to create. Format:
`projects/{project}/locations/{location}/workerPools/{worker_pool}`.
"""
completeTime = _messages.StringField(1)
createTime = _messages.StringField(2)
workerPool = _messages.StringField(3)
class CronConfig(_messages.Message):
r"""CronConfig describes the configuration of a trigger that creates a build
whenever a Cloud Scheduler event is received.
Fields:
enterpriseConfigResource: The GitHub Enterprise config resource name that
is associated with this installation.
schedule: Required. Describes the schedule on which the job will be
executed. The schedule can be either of the following types: *
[Crontab](http://en.wikipedia.org/wiki/Cron#Overview) * English-like
[schedule](https://cloud.google.com/scheduler/docs/configuring/cron-job-
schedules)
timeZone: Specifies the time zone to be used in interpreting the schedule.
The value of this field must be a time zone name from the [tz database]
(http://en.wikipedia.org/wiki/Tz_database). Note that some time zones
include a provision for daylight savings time. The rules for daylight
saving time are determined by the chosen tz. For UTC use the string
"utc". If a time zone is not specified, the default will be in UTC (also
known as GMT).
"""
enterpriseConfigResource = _messages.StringField(1)
schedule = _messages.StringField(2)
timeZone = _messages.StringField(3)
class DeleteBitbucketServerConfigOperationMetadata(_messages.Message):
r"""Metadata for `DeleteBitbucketServerConfig` operation.
Fields:
bitbucketServerConfig: The resource name of the BitbucketServerConfig to
be deleted. Format:
`projects/{project}/locations/{location}/bitbucketServerConfigs/{id}`.
completeTime: Time the operation was completed.
createTime: Time the operation was created.
"""
bitbucketServerConfig = _messages.StringField(1)
completeTime = _messages.StringField(2)
createTime = _messages.StringField(3)
class DeleteGitHubEnterpriseConfigOperationMetadata(_messages.Message):
r"""Metadata for `DeleteGitHubEnterpriseConfig` operation.
Fields:
completeTime: Time the operation was completed.
createTime: Time the operation was created.
githubEnterpriseConfig: The resource name of the GitHubEnterprise to be
deleted. Format:
`projects/{project}/locations/{location}/githubEnterpriseConfigs/{id}`.
"""
completeTime = _messages.StringField(1)
createTime = _messages.StringField(2)
githubEnterpriseConfig = _messages.StringField(3)
class DeleteWorkerPoolOperationMetadata(_messages.Message):
r"""Metadata for the `DeleteWorkerPool` operation.
Fields:
completeTime: Time the operation was completed.
createTime: Time the operation was created.
workerPool: The resource name of the `WorkerPool` being deleted. Format:
`projects/{project}/locations/{location}/workerPools/{worker_pool}`.
"""
completeTime = _messages.StringField(1)
createTime = _messages.StringField(2)
workerPool = _messages.StringField(3)
class Empty(_messages.Message):
r"""A generic empty message that you can re-use to avoid defining duplicated
empty messages in your APIs. A typical example is to use it as the request
or the response type of an API method. For instance: service Foo { rpc
Bar(google.protobuf.Empty) returns (google.protobuf.Empty); } The JSON
representation for `Empty` is empty JSON object `{}`.
"""
class FailureInfo(_messages.Message):
r"""A fatal problem encountered during the execution of the build.
Enums:
TypeValueValuesEnum: The name of the failure.
Fields:
detail: Explains the failure issue in more detail using hard-coded text.
type: The name of the failure.
"""
class TypeValueValuesEnum(_messages.Enum):
r"""The name of the failure.
Values:
FAILURE_TYPE_UNSPECIFIED: Type unspecified
PUSH_FAILED: Unable to push the image to the repository.
PUSH_IMAGE_NOT_FOUND: Final image not found.
PUSH_NOT_AUTHORIZED: Unauthorized push of the final image.
LOGGING_FAILURE: Backend logging failures. Should retry.
USER_BUILD_STEP: A build step has failed.
FETCH_SOURCE_FAILED: The source fetching has failed.
"""
FAILURE_TYPE_UNSPECIFIED = 0
PUSH_FAILED = 1
PUSH_IMAGE_NOT_FOUND = 2
PUSH_NOT_AUTHORIZED = 3
LOGGING_FAILURE = 4
USER_BUILD_STEP = 5
FETCH_SOURCE_FAILED = 6
detail = _messages.StringField(1)
type = _messages.EnumField('TypeValueValuesEnum', 2)
class FileHashes(_messages.Message):
r"""Container message for hashes of byte content of files, used in
SourceProvenance messages to verify integrity of source input to the build.
Fields:
fileHash: Collection of file hashes.
"""
fileHash = _messages.MessageField('Hash', 1, repeated=True)
class GCSLocation(_messages.Message):
r"""Represents a storage location in Cloud Storage
Fields:
bucket: Google Cloud Storage bucket. See
https://cloud.google.com/storage/docs/naming#requirements
generation: Google Cloud Storage generation for the object. If the
generation is omitted, the latest generation will be used.
object: Google Cloud Storage object. See
https://cloud.google.com/storage/docs/naming#objectnames
"""
bucket = _messages.StringField(1)
generation = _messages.IntegerField(2)
object = _messages.StringField(3)
class GitFileSource(_messages.Message):
r"""GitFileSource describes a file within a (possibly remote) code
repository.
Enums:
RepoTypeValueValuesEnum: See RepoType above.
Fields:
path: The path of the file, with the repo root as the root of the path.
repoType: See RepoType above.
revision: The branch, tag, arbitrary ref, or SHA version of the repo to
use when resolving the filename (optional). This field respects the same
syntax/resolution as described here: https://git-
scm.com/docs/gitrevisions If unspecified, the revision from which the
trigger invocation originated is assumed to be the revision from which
to read the specified path.
uri: The URI of the repo (optional). If unspecified, the repo from which
the trigger invocation originated is assumed to be the repo from which
to read the specified path.
"""
class RepoTypeValueValuesEnum(_messages.Enum):
r"""See RepoType above.
Values:
UNKNOWN: The default, unknown repo type.
CLOUD_SOURCE_REPOSITORIES: A Google Cloud Source Repositories-hosted
repo.
GITHUB: A GitHub-hosted repo not necessarily on "github.com" (i.e.
GitHub Enterprise).
"""
UNKNOWN = 0
CLOUD_SOURCE_REPOSITORIES = 1
GITHUB = 2
path = _messages.StringField(1)
repoType = _messages.EnumField('RepoTypeValueValuesEnum', 2)
revision = _messages.StringField(3)
uri = _messages.StringField(4)
class GitHubEnterpriseApp(_messages.Message):
r"""RPC response object returned by the GetGitHubEnterpriseApp RPC method.
Fields:
name: Name of the GitHub App
slug: Slug (URL friendly name) of the GitHub App. This can be found on the
settings page for the GitHub App (e.g.
https://github.com/settings/apps/:app_slug) GitHub docs:
https://docs.github.com/en/free-pro-team@latest/rest/reference/apps#get-
an-app
"""
name = _messages.StringField(1)
slug = _messages.StringField(2)
class GitHubEnterpriseConfig(_messages.Message):
r"""GitHubEnterpriseConfig represents a configuration for a GitHub
Enterprise server.
Fields:
appConfigJson: Cloud Storage location of the encrypted GitHub App config
information.
appId: Required. The GitHub app id of the Cloud Build app on the GitHub
Enterprise server.
createTime: Output only. Time when the installation was associated with
the project.
displayName: Name to display for this config.
hostUrl: The URL of the github enterprise host the configuration is for.
name: Optional. The full resource name for the GitHubEnterpriseConfig For
example: "projects/{$project_id}/githubEnterpriseConfigs/{$config_id}"
peeredNetwork: Optional. The network to be used when reaching out to the
GitHub Enterprise server. The VPC network must be enabled for private
service connection. This should be set if the GitHub Enterprise server
is hosted on-premises and not reachable by public internet. If this
field is left empty, no network peering will occur and calls to the
GitHub Enterprise server will be made over the public internet. Must be
in the format `projects/{project}/global/networks/{network}`, where
{project} is a project number or id and {network} is the name of a VPC
network in the project.
secrets: Names of secrets in Secret Manager.
sslCa: Optional. SSL certificate to use for requests to GitHub Enterprise.
webhookKey: The key that should be attached to webhook calls to the
ReceiveWebhook endpoint.
"""
appConfigJson = _messages.MessageField('GCSLocation', 1)
appId = _messages.IntegerField(2)
createTime = _messages.StringField(3)
displayName = _messages.StringField(4)
hostUrl = _messages.StringField(5)
name = _messages.StringField(6)
peeredNetwork = _messages.StringField(7)
secrets = _messages.MessageField('GitHubEnterpriseSecrets', 8)
sslCa = _messages.StringField(9)
webhookKey = _messages.StringField(10)
class GitHubEnterpriseSecrets(_messages.Message):
r"""GitHubEnterpriseSecrets represents the names of all necessary secrets in
Secret Manager for a GitHub Enterprise server. Format is:
projects//secrets/.
Fields:
oauthClientIdName: The resource name for the OAuth client ID secret in
Secret Manager.
oauthClientIdVersionName: The resource name for the OAuth client ID secret
version in Secret Manager.
oauthSecretName: The resource name for the OAuth secret in Secret Manager.
oauthSecretVersionName: The resource name for the OAuth secret secret
version in Secret Manager.
privateKeyName: The resource name for the private key secret.
privateKeyVersionName: The resource name for the private key secret
version.
webhookSecretName: The resource name for the webhook secret in Secret
Manager.
webhookSecretVersionName: The resource name for the webhook secret secret
version in Secret Manager.
"""
oauthClientIdName = _messages.StringField(1)
oauthClientIdVersionName = _messages.StringField(2)
oauthSecretName = _messages.StringField(3)
oauthSecretVersionName = _messages.StringField(4)
privateKeyName = _messages.StringField(5)
privateKeyVersionName = _messages.StringField(6)
webhookSecretName = _messages.StringField(7)
webhookSecretVersionName = _messages.StringField(8)
class GitHubEventsConfig(_messages.Message):
r"""GitHubEventsConfig describes the configuration of a trigger that creates
a build whenever a GitHub event is received.
Fields:
enterpriseConfig: Output only. The GitHubEnterpriseConfig enterprise
config specified in the enterprise_config_resource_name field.
enterpriseConfigResourceName: Optional. The resource name of the github
enterprise config that should be applied to this installation. For
example: "projects/{$project_id}/githubEnterpriseConfigs/{$config_id}"
installationId: The installationID that emits the GitHub event.
name: Name of the repository. For example: The name for
https://github.com/googlecloudplatform/cloud-builders is "cloud-
builders".
owner: Owner of the repository. For example: The owner for
https://github.com/googlecloudplatform/cloud-builders is
"googlecloudplatform".
pullRequest: filter to match changes in pull requests.
push: filter to match changes in refs like branches, tags.
"""
enterpriseConfig = _messages.MessageField('GitHubEnterpriseConfig', 1)
enterpriseConfigResourceName = _messages.StringField(2)
installationId = _messages.IntegerField(3)
name = _messages.StringField(4)
owner = _messages.StringField(5)
pullRequest = _messages.MessageField('PullRequestFilter', 6)
push = _messages.MessageField('PushFilter', 7)
class GitHubRepositorySetting(_messages.Message):
r"""Represents a GitHub repository setting.
Fields:
name: Name of the repository.
owner: GitHub user or organization name.
"""
name = _messages.StringField(1)
owner = _messages.StringField(2)
class GitHubRepositorySettingList(_messages.Message):
r"""A wrapper message for a list of GitHubRepositorySettings.
Fields:
repositorySettings: A list of GitHubRepositorySettings.
"""
repositorySettings = _messages.MessageField('GitHubRepositorySetting', 1, repeated=True)
class GitRepoSource(_messages.Message):
r"""GitRepoSource describes a repo and ref of a code repository.
Enums:
RepoTypeValueValuesEnum: See RepoType below.
Fields:
ref: The branch or tag to use. Must start with "refs/" (required).
repoType: See RepoType below.
uri: The URI of the repo (required).
"""
class RepoTypeValueValuesEnum(_messages.Enum):
r"""See RepoType below.
Values:
UNKNOWN: The default, unknown repo type.
CLOUD_SOURCE_REPOSITORIES: A Google Cloud Source Repositories-hosted
repo.
GITHUB: A GitHub-hosted repo not necessarily on "github.com" (i.e.
GitHub Enterprise).
"""
UNKNOWN = 0
CLOUD_SOURCE_REPOSITORIES = 1
GITHUB = 2
ref = _messages.StringField(1)
repoType = _messages.EnumField('RepoTypeValueValuesEnum', 2)
uri = _messages.StringField(3)
class GitSource(_messages.Message):
r"""Location of the source in any accessible Git repository.
Fields:
dir: Directory, relative to the source root, in which to run the build.
This must be a relative path. If a step's `dir` is specified and is an
absolute path, this value is ignored for that step's execution.
revision: The revision to fetch from the Git repository such as a branch,
a tag, a commit SHA, or any Git ref. Cloud Build uses `git fetch` to
fetch the revision from the Git repository; therefore make sure that the
string you provide for `revision` is parsable by the command. For
information on string values accepted by `git fetch`, see https://git-
scm.com/docs/gitrevisions#_specifying_revisions. For information on `git
fetch`, see https://git-scm.com/docs/git-fetch.
url: Location of the Git repo to build.
"""
dir = _messages.StringField(1)
revision = _messages.StringField(2)
url = _messages.StringField(3)
class GoogleDevtoolsCloudbuildV1BuildOptionsPoolOptionWorkerConfig(_messages.Message):
r"""Configuration per workload for both Private Pools and Hybrid Pools.
Fields:
diskSizeGb: The disk size (in GB) which is requested for the build
container. If unset, a value of 10 GB will be used.
memoryGb: The memory (in GB) which is requested for the build container.
If unset, a value of 4 GB will be used.
vcpuCount: The number of vCPUs which are requested for the build
container. If unset, a value of 1 will be used.
"""
diskSizeGb = _messages.IntegerField(1)
memoryGb = _messages.FloatField(2, variant=_messages.Variant.FLOAT)
vcpuCount = _messages.FloatField(3, variant=_messages.Variant.FLOAT)
class GoogleDevtoolsCloudbuildV2OperationMetadata(_messages.Message):
r"""Represents the metadata of the long-running operation.
Fields:
apiVersion: Output only. API version used to start the operation.
createTime: Output only. The time the operation was created.
endTime: Output only. The time the operation finished running.
requestedCancellation: Output only. Identifies whether the user has
requested cancellation of the operation. Operations that have
successfully been cancelled have Operation.error value with a
google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.
statusMessage: Output only. Human-readable status of the operation, if
any.
target: Output only. Server-defined resource path for the target of the
operation.
verb: Output only. Name of the verb executed by the operation.
"""
apiVersion = _messages.StringField(1)
createTime = _messages.StringField(2)
endTime = _messages.StringField(3)
requestedCancellation = _messages.BooleanField(4)
statusMessage = _messages.StringField(5)
target = _messages.StringField(6)
verb = _messages.StringField(7)
class HTTPDelivery(_messages.Message):
r"""HTTPDelivery is the delivery configuration for an HTTP notification.
Fields:
uri: The URI to which JSON-containing HTTP POST requests should be sent.
"""
uri = _messages.StringField(1)
class Hash(_messages.Message):
r"""Container message for hash values.
Enums:
TypeValueValuesEnum: The type of hash that was performed.
Fields:
type: The type of hash that was performed.
value: The hash value.
"""
class TypeValueValuesEnum(_messages.Enum):
r"""The type of hash that was performed.
Values:
NONE: No hash requested.
SHA256: Use a sha256 hash.
MD5: Use a md5 hash.
"""
NONE = 0
SHA256 = 1
MD5 = 2
type = _messages.EnumField('TypeValueValuesEnum', 1)
value = _messages.BytesField(2)
class HttpBody(_messages.Message):
r"""Message that represents an arbitrary HTTP body. It should only be used
for payload formats that can't be represented as JSON, such as raw binary or
an HTML page. This message can be used both in streaming and non-streaming
API methods in the request as well as the response. It can be used as a top-
level request field, which is convenient if one wants to extract parameters
from either the URL or HTTP template into the request fields and also want
access to the raw HTTP body. Example: message GetResourceRequest { // A
unique request id. string request_id = 1; // The raw HTTP body is bound to
this field. google.api.HttpBody http_body = 2; } service ResourceService {
rpc GetResource(GetResourceRequest) returns (google.api.HttpBody); rpc
UpdateResource(google.api.HttpBody) returns (google.protobuf.Empty); }
Example with streaming methods: service CaldavService { rpc
GetCalendar(stream google.api.HttpBody) returns (stream
google.api.HttpBody); rpc UpdateCalendar(stream google.api.HttpBody) returns
(stream google.api.HttpBody); } Use of this type only changes how the
request and response bodies are handled, all other features will continue to
work unchanged.
Messages:
ExtensionsValueListEntry: A ExtensionsValueListEntry object.
Fields:
contentType: The HTTP Content-Type header value specifying the content
type of the body.
data: The HTTP request/response body as raw binary.
extensions: Application specific response metadata. Must be set in the
first response for streaming APIs.
"""
@encoding.MapUnrecognizedFields('additionalProperties')
class ExtensionsValueListEntry(_messages.Message):
r"""A ExtensionsValueListEntry object.
Messages:
AdditionalProperty: An additional property for a
ExtensionsValueListEntry object.
Fields:
additionalProperties: Properties of the object. Contains field @type
with type URL.
"""
class AdditionalProperty(_messages.Message):
r"""An additional property for a ExtensionsValueListEntry object.
Fields:
key: Name of the additional property.
value: A extra_types.JsonValue attribute.
"""
key = _messages.StringField(1)
value = _messages.MessageField('extra_types.JsonValue', 2)
additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
contentType = _messages.StringField(1)
data = _messages.BytesField(2)
extensions = _messages.MessageField('ExtensionsValueListEntry', 3, repeated=True)
class HybridPoolConfig(_messages.Message):
r"""Configuration for a Hybrid Worker Pool Next ID: 6
Enums:
BuilderImageCachingValueValuesEnum: Immutable. Controls how the worker
pool caches images. If unspecified during worker pool creation, this
field is defaulted to CACHING_DISABLED.
Fields:
builderImageCaching: Immutable. Controls how the worker pool caches
images. If unspecified during worker pool creation, this field is
defaulted to CACHING_DISABLED.
defaultWorkerConfig: Default settings which will be applied to builds on
this worker pool if they are not specified in the build request.
membership: Required. Immutable. The Anthos/GKE Hub membership of the
cluster which will run the actual build operations. Example:
projects/{project}/locations/{location}/memberships/{cluster_name}
"""
class BuilderImageCachingValueValuesEnum(_messages.Enum):
r"""Immutable. Controls how the worker pool caches images. If unspecified
during worker pool creation, this field is defaulted to CACHING_DISABLED.
Values:
BUILDER_IMAGE_CACHING_UNSPECIFIED: Default enum type. This should not be
used.
CACHING_DISABLED: DinD caching is disabled and no caching resources are
provisioned.
VOLUME_CACHING: A PersistentVolumeClaim is provisioned for caching.
"""
BUILDER_IMAGE_CACHING_UNSPECIFIED = 0
CACHING_DISABLED = 1
VOLUME_CACHING = 2
builderImageCaching = _messages.EnumField('BuilderImageCachingValueValuesEnum', 1)
defaultWorkerConfig = _messages.MessageField('HybridWorkerConfig', 2)
membership = _messages.StringField(3)
class HybridWorkerConfig(_messages.Message):
r"""These settings can be applied to a user's build operations. Next ID: 4
Fields:
diskSizeGb: The disk size (in GB) which is requested for the build
container. Defaults to 10 GB.
memoryGb: The memory (in GB) which is requested for the build container.
Defaults to 4 GB.
vcpuCount: The number of vCPUs which are requested for the build
container. Defaults to 1.
"""
diskSizeGb = _messages.IntegerField(1)
memoryGb = _messages.FloatField(2, variant=_messages.Variant.FLOAT)
vcpuCount = _messages.FloatField(3, variant=_messages.Variant.FLOAT)
class InlineSecret(_messages.Message):
r"""Pairs a set of secret environment variables mapped to encrypted values
with the Cloud KMS key to use to decrypt the value.
Messages:
EnvMapValue: Map of environment variable name to its encrypted value.
Secret environment variables must be unique across all of a build's
secrets, and must be used by at least one build step. Values can be at
most 64 KB in size. There can be at most 100 secret values across all of
a build's secrets.
Fields:
envMap: Map of environment variable name to its encrypted value. Secret
environment variables must be unique across all of a build's secrets,
and must be used by at least one build step. Values can be at most 64 KB
in size. There can be at most 100 secret values across all of a build's
secrets.
kmsKeyName: Resource name of Cloud KMS crypto key to decrypt the encrypted
value. In format: projects/*/locations/*/keyRings/*/cryptoKeys/*
"""
@encoding.MapUnrecognizedFields('additionalProperties')
class EnvMapValue(_messages.Message):
r"""Map of environment variable name to its encrypted value. Secret
environment variables must be unique across all of a build's secrets, and
must be used by at least one build step. Values can be at most 64 KB in
size. There can be at most 100 secret values across all of a build's
secrets.
Messages:
AdditionalProperty: An additional property for a EnvMapValue object.
Fields:
additionalProperties: Additional properties of type EnvMapValue
"""
class AdditionalProperty(_messages.Message):
r"""An additional property for a EnvMapValue object.
Fields:
key: Name of the additional property.
value: A byte attribute.
"""
key = _messages.StringField(1)
value = _messages.BytesField(2)
additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
envMap = _messages.MessageField('EnvMapValue', 1)
kmsKeyName = _messages.StringField(2)
class Installation(_messages.Message):
r"""A GitHub-app installation.
Fields:
createTime: Time when the installation was associated with the project.
This field is immutable and cannot be updated.
enterpriseConfig: Output only. The GitHubEnterpriseConfig enterprise
config specified in the enterprise_config_resource_name field.
enterpriseConfigResourceName: Optional: The resource name of the github
enterprise config that should be applied to this installation. For
example: "projects/{$project_id}/githubEnterpriseConfigs/{$config_id}"
id: GitHub installation ID, created by GitHub.
name: The `Installation` name with format:
`projects/{project}/locations/{location}/installations/{installation}`,
where {installation} is GitHub installation ID created by GitHub.
projectId: The project ID of the GCP project the installation is
associated with.
projectNum: Numerical ID of the project.
repositorySettingList: The GitHub repositories that we should respond to
for this installation. If this is not set, we will respect the
default_check_suite_events boolean for any repository visible for that
installation.
"""
createTime = _messages.StringField(1)
enterpriseConfig = _messages.MessageField('GitHubEnterpriseConfig', 2)
enterpriseConfigResourceName = _messages.StringField(3)
id = _messages.IntegerField(4)
name = _messages.StringField(5)
projectId = _messages.StringField(6)
projectNum = _messages.IntegerField(7)
repositorySettingList = _messages.MessageField('GitHubRepositorySettingList', 8)
class ListBitbucketServerConfigsResponse(_messages.Message):
r"""RPC response object returned by ListBitbucketServerConfigs RPC method.
Fields:
bitbucketServerConfigs: A list of BitbucketServerConfigs
nextPageToken: A token that can be sent as `page_token` to retrieve the
next page. If this field is omitted, there are no subsequent pages.
"""
bitbucketServerConfigs = _messages.MessageField('BitbucketServerConfig', 1, repeated=True)
nextPageToken = _messages.StringField(2)
class ListBitbucketServerRepositoriesResponse(_messages.Message):
r"""RPC response object returned by the ListBitbucketServerRepositories RPC
method.
Fields:
bitbucketServerRepositories: List of Bitbucket Server repositories.
nextPageToken: A token that can be sent as `page_token` to retrieve the
next page. If this field is omitted, there are no subsequent pages.
"""
bitbucketServerRepositories = _messages.MessageField('BitbucketServerRepository', 1, repeated=True)
nextPageToken = _messages.StringField(2)
class ListBuildTriggersResponse(_messages.Message):
r"""Response containing existing `BuildTriggers`.
Fields:
nextPageToken: Token to receive the next page of results.
triggers: `BuildTriggers` for the project, sorted by `create_time`
descending.
"""
nextPageToken = _messages.StringField(1)
triggers = _messages.MessageField('BuildTrigger', 2, repeated=True)
class ListBuildsResponse(_messages.Message):
r"""Response including listed builds.
Fields:
builds: Builds will be sorted by `create_time`, descending.
nextPageToken: Token to receive the next page of results. This will be
absent if the end of the response list has been reached.
"""
builds = _messages.MessageField('Build', 1, repeated=True)
nextPageToken = _messages.StringField(2)
class ListGitHubInstallationsForProjectResponse(_messages.Message):
r"""RPC response object returned by the ListGitHubInstallations RPC method.
Fields:
installations: Installations belonging to the specified project_id.
"""
installations = _messages.MessageField('Installation', 1, repeated=True)
class ListGitHubInstallationsResponse(_messages.Message):
r"""RPC response object accepted by the ListGitHubInstallations RPC method.
Fields:
installations: Installations matching the requested installation ID.
"""
installations = _messages.MessageField('Installation', 1, repeated=True)
class ListGithubEnterpriseConfigsResponse(_messages.Message):
r"""RPC response object returned by ListGithubEnterpriseConfigs RPC method.
Fields:
configs: A list of GitHubEnterpriseConfigs
"""
configs = _messages.MessageField('GitHubEnterpriseConfig', 1, repeated=True)
class ListWorkerPoolsResponse(_messages.Message):
r"""Response containing existing `WorkerPools`.
Fields:
nextPageToken: Continuation token used to page through large result sets.
Provide this value in a subsequent ListWorkerPoolsRequest to return the
next page of results.
workerPools: `WorkerPools` for the specified project.
"""
nextPageToken = _messages.StringField(1)
workerPools = _messages.MessageField('WorkerPool', 2, repeated=True)
class NetworkConfig(_messages.Message):
r"""Defines the network configuration for the pool.
Enums:
EgressOptionValueValuesEnum: Option to configure network egress for the
workers.
Fields:
egressOption: Option to configure network egress for the workers.
peeredNetwork: Required. Immutable. The network definition that the
workers are peered to. If this section is left empty, the workers will
be peered to `WorkerPool.project_id` on the service producer network.
Must be in the format `projects/{project}/global/networks/{network}`,
where `{project}` is a project number, such as `12345`, and `{network}`
is the name of a VPC network in the project. See [Understanding network
configuration options](https://cloud.google.com/build/docs/private-
pools/set-up-private-pool-environment)
"""
class EgressOptionValueValuesEnum(_messages.Enum):
r"""Option to configure network egress for the workers.
Values:
EGRESS_OPTION_UNSPECIFIED: If set, defaults to PUBLIC_EGRESS.
NO_PUBLIC_EGRESS: If set, workers are created without any public
address, which prevents network egress to public IPs unless a network
proxy is configured.
PUBLIC_EGRESS: If set, workers are created with a public address which
allows for public internet egress.
"""
EGRESS_OPTION_UNSPECIFIED = 0
NO_PUBLIC_EGRESS = 1
PUBLIC_EGRESS = 2
egressOption = _messages.EnumField('EgressOptionValueValuesEnum', 1)
peeredNetwork = _messages.StringField(2)
class Notification(_messages.Message):
r"""Notification is the container which holds the data that is relevant to
this particular notification.
Messages:
StructDeliveryValue: Escape hatch for users to supply custom delivery
configs.
Fields:
filter: The filter string to use for notification filtering. Currently,
this is assumed to be a CEL program. See
https://opensource.google/projects/cel for more.
httpDelivery: Configuration for HTTP delivery.
slackDelivery: Configuration for Slack delivery.
smtpDelivery: Configuration for SMTP (email) delivery.
structDelivery: Escape hatch for users to supply custom delivery configs.
"""
@encoding.MapUnrecognizedFields('additionalProperties')
class StructDeliveryValue(_messages.Message):
r"""Escape hatch for users to supply custom delivery configs.
Messages:
AdditionalProperty: An additional property for a StructDeliveryValue
object.
Fields:
additionalProperties: Properties of the object.
"""
class AdditionalProperty(_messages.Message):
r"""An additional property for a StructDeliveryValue object.
Fields:
key: Name of the additional property.
value: A extra_types.JsonValue attribute.
"""
key = _messages.StringField(1)
value = _messages.MessageField('extra_types.JsonValue', 2)
additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
filter = _messages.StringField(1)
httpDelivery = _messages.MessageField('HTTPDelivery', 2)
slackDelivery = _messages.MessageField('SlackDelivery', 3)
smtpDelivery = _messages.MessageField('SMTPDelivery', 4)
structDelivery = _messages.MessageField('StructDeliveryValue', 5)
class NotifierConfig(_messages.Message):
r"""NotifierConfig is the top-level configuration message.
Fields:
apiVersion: The API version of this configuration format.
kind: The type of notifier to use (e.g. SMTPNotifier).
metadata: Metadata for referring to/handling/deploying this notifier.
spec: The actual configuration for this notifier.
"""
apiVersion = _messages.StringField(1)
kind = _messages.StringField(2)
metadata = _messages.MessageField('NotifierMetadata', 3)
spec = _messages.MessageField('NotifierSpec', 4)
class NotifierMetadata(_messages.Message):
r"""NotifierMetadata contains the data which can be used to reference or
describe this notifier.
Fields:
name: The human-readable and user-given name for the notifier. For
example: "repo-merge-email-notifier".
notifier: The string representing the name and version of notifier to
deploy. Expected to be of the form of "/:". For example: "gcr.io/my-
project/notifiers/smtp:1.2.34".
"""
name = _messages.StringField(1)
notifier = _messages.StringField(2)
class NotifierSecret(_messages.Message):
r"""NotifierSecret is the container that maps a secret name (reference) to
its Google Cloud Secret Manager resource path.
Fields:
name: Name is the local name of the secret, such as the verbatim string
"my-smtp-password".
value: Value is interpreted to be a resource path for fetching the actual
(versioned) secret data for this secret. For example, this would be a
Google Cloud Secret Manager secret version resource path like:
"projects/my-project/secrets/my-secret/versions/latest".
"""
name = _messages.StringField(1)
value = _messages.StringField(2)
class NotifierSecretRef(_messages.Message):
r"""NotifierSecretRef contains the reference to a secret stored in the
corresponding NotifierSpec.
Fields:
secretRef: The value of `secret_ref` should be a `name` that is registered
in a `Secret` in the `secrets` list of the `Spec`.
"""
secretRef = _messages.StringField(1)
class NotifierSpec(_messages.Message):
r"""NotifierSpec is the configuration container for notifications.
Fields:
notification: The configuration of this particular notifier.
secrets: Configurations for secret resources used by this particular
notifier.
"""
notification = _messages.MessageField('Notification', 1)
secrets = _messages.MessageField('NotifierSecret', 2, repeated=True)
class OAuthRegistrationURI(_messages.Message):
r"""RPC Response object returned by GetOAuthRegistrationURL
Fields:
registrationUri: The URL that the user should be redirected to in order to
start the OAuth flow. When the user is redirected to this URL, they will
be sent to the source provider specified in the request to authorize
CloudBuild to access their oauth credentials. After the authorization is
completed, the user will be redirected to the Cloud Build console.
"""
registrationUri = _messages.StringField(1)
class Operation(_messages.Message):
r"""This resource represents a long-running operation that is the result of
a network API call.
Messages:
MetadataValue: Service-specific metadata associated with the operation. It
typically contains progress information and common metadata such as
create time. Some services might not provide such metadata. Any method
that returns a long-running operation should document the metadata type,
if any.
ResponseValue: The normal response of the operation in case of success. If
the original method returns no data on success, such as `Delete`, the
response is `google.protobuf.Empty`. If the original method is standard
`Get`/`Create`/`Update`, the response should be the resource. For other
methods, the response should have the type `XxxResponse`, where `Xxx` is
the original method name. For example, if the original method name is
`TakeSnapshot()`, the inferred response type is `TakeSnapshotResponse`.
Fields:
done: If the value is `false`, it means the operation is still in
progress. If `true`, the operation is completed, and either `error` or
`response` is available.
error: The error result of the operation in case of failure or
cancellation.
metadata: Service-specific metadata associated with the operation. It
typically contains progress information and common metadata such as
create time. Some services might not provide such metadata. Any method
that returns a long-running operation should document the metadata type,
if any.
name: The server-assigned name, which is only unique within the same
service that originally returns it. If you use the default HTTP mapping,
the `name` should be a resource name ending with
`operations/{unique_id}`.
response: The normal response of the operation in case of success. If the
original method returns no data on success, such as `Delete`, the
response is `google.protobuf.Empty`. If the original method is standard
`Get`/`Create`/`Update`, the response should be the resource. For other
methods, the response should have the type `XxxResponse`, where `Xxx` is
the original method name. For example, if the original method name is
`TakeSnapshot()`, the inferred response type is `TakeSnapshotResponse`.
"""
@encoding.MapUnrecognizedFields('additionalProperties')
class MetadataValue(_messages.Message):
r"""Service-specific metadata associated with the operation. It typically
contains progress information and common metadata such as create time.
Some services might not provide such metadata. Any method that returns a
long-running operation should document the metadata type, if any.
Messages:
AdditionalProperty: An additional property for a MetadataValue object.
Fields:
additionalProperties: Properties of the object. Contains field @type
with type URL.
"""
class AdditionalProperty(_messages.Message):
r"""An additional property for a MetadataValue object.
Fields:
key: Name of the additional property.
value: A extra_types.JsonValue attribute.
"""
key = _messages.StringField(1)
value = _messages.MessageField('extra_types.JsonValue', 2)
additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
@encoding.MapUnrecognizedFields('additionalProperties')
class ResponseValue(_messages.Message):
r"""The normal response of the operation in case of success. If the
original method returns no data on success, such as `Delete`, the response
is `google.protobuf.Empty`. If the original method is standard
`Get`/`Create`/`Update`, the response should be the resource. For other
methods, the response should have the type `XxxResponse`, where `Xxx` is
the original method name. For example, if the original method name is
`TakeSnapshot()`, the inferred response type is `TakeSnapshotResponse`.
Messages:
AdditionalProperty: An additional property for a ResponseValue object.
Fields:
additionalProperties: Properties of the object. Contains field @type
with type URL.
"""
class AdditionalProperty(_messages.Message):
r"""An additional property for a ResponseValue object.
Fields:
key: Name of the additional property.
value: A extra_types.JsonValue attribute.
"""
key = _messages.StringField(1)
value = _messages.MessageField('extra_types.JsonValue', 2)
additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
done = _messages.BooleanField(1)
error = _messages.MessageField('Status', 2)
metadata = _messages.MessageField('MetadataValue', 3)
name = _messages.StringField(4)
response = _messages.MessageField('ResponseValue', 5)
class OperationMetadata(_messages.Message):
r"""Represents the metadata of the long-running operation.
Fields:
apiVersion: Output only. API version used to start the operation.
cancelRequested: Output only. Identifies whether the user has requested
cancellation of the operation. Operations that have been cancelled
successfully have Operation.error value with a google.rpc.Status.code of
1, corresponding to `Code.CANCELLED`.
createTime: Output only. The time the operation was created.
endTime: Output only. The time the operation finished running.
statusDetail: Output only. Human-readable status of the operation, if any.
target: Output only. Server-defined resource path for the target of the
operation.
verb: Output only. Name of the verb executed by the operation.
"""
apiVersion = _messages.StringField(1)
cancelRequested = _messages.BooleanField(2)
createTime = _messages.StringField(3)
endTime = _messages.StringField(4)
statusDetail = _messages.StringField(5)
target = _messages.StringField(6)
verb = _messages.StringField(7)
class PoolOption(_messages.Message):
r"""Details about how a build should be executed on a `WorkerPool`. See
[running builds in a private
pool](https://cloud.google.com/build/docs/private-pools/run-builds-in-
private-pool) for more information.
Fields:
name: The `WorkerPool` resource to execute the build on. You must have
`cloudbuild.workerpools.use` on the project hosting the WorkerPool.
Format
projects/{project}/locations/{location}/workerPools/{workerPoolId}
workerConfig: Configuration per workload.
"""
name = _messages.StringField(1)
workerConfig = _messages.MessageField('GoogleDevtoolsCloudbuildV1BuildOptionsPoolOptionWorkerConfig', 2)
class PrivatePoolV1Config(_messages.Message):
r"""Configuration for a V1 `PrivatePool`.
Fields:
networkConfig: Network configuration for the pool.
workerConfig: Machine configuration for the workers in the pool.
"""
networkConfig = _messages.MessageField('NetworkConfig', 1)
workerConfig = _messages.MessageField('WorkerConfig', 2)
class ProcessAppManifestCallbackOperationMetadata(_messages.Message):
r"""Metadata for `ProcessAppManifestCallback` operation.
Fields:
completeTime: Time the operation was completed.
createTime: Time the operation was created.
githubEnterpriseConfig: The resource name of the GitHubEnterprise to be
created. Format:
`projects/{project}/locations/{location}/githubEnterpriseConfigs/{id}`.
"""
completeTime = _messages.StringField(1)
createTime = _messages.StringField(2)
githubEnterpriseConfig = _messages.StringField(3)
class PubsubConfig(_messages.Message):
r"""PubsubConfig describes the configuration of a trigger that creates a
build whenever a Pub/Sub message is published.
Enums:
StateValueValuesEnum: Potential issues with the underlying Pub/Sub
subscription configuration. Only populated on get requests.
Fields:
serviceAccountEmail: Service account that will make the push request.
state: Potential issues with the underlying Pub/Sub subscription
configuration. Only populated on get requests.
subscription: Output only. Name of the subscription. Format is
`projects/{project}/subscriptions/{subscription}`.
topic: The name of the topic from which this subscription is receiving
messages. Format is `projects/{project}/topics/{topic}`.
"""
class StateValueValuesEnum(_messages.Enum):
r"""Potential issues with the underlying Pub/Sub subscription
configuration. Only populated on get requests.
Values:
STATE_UNSPECIFIED: The subscription configuration has not been checked.
OK: The Pub/Sub subscription is properly configured.
SUBSCRIPTION_DELETED: The subscription has been deleted.
TOPIC_DELETED: The topic has been deleted.
SUBSCRIPTION_MISCONFIGURED: Some of the subscription's field are
misconfigured.
"""
STATE_UNSPECIFIED = 0
OK = 1
SUBSCRIPTION_DELETED = 2
TOPIC_DELETED = 3
SUBSCRIPTION_MISCONFIGURED = 4
serviceAccountEmail = _messages.StringField(1)
state = _messages.EnumField('StateValueValuesEnum', 2)
subscription = _messages.StringField(3)
topic = _messages.StringField(4)
class PullRequestFilter(_messages.Message):
r"""PullRequestFilter contains filter properties for matching GitHub Pull
Requests.
Enums:
CommentControlValueValuesEnum: Configure builds to run whether a
repository owner or collaborator need to comment `/gcbrun`.
Fields:
branch: Regex of branches to match. The syntax of the regular expressions
accepted is the syntax accepted by RE2 and described at
https://github.com/google/re2/wiki/Syntax
commentControl: Configure builds to run whether a repository owner or
collaborator need to comment `/gcbrun`.
invertRegex: If true, branches that do NOT match the git_ref will trigger
a build.
"""
class CommentControlValueValuesEnum(_messages.Enum):
r"""Configure builds to run whether a repository owner or collaborator
need to comment `/gcbrun`.
Values:
COMMENTS_DISABLED: Do not require comments on Pull Requests before
builds are triggered.
COMMENTS_ENABLED: Enforce that repository owners or collaborators must
comment on Pull Requests before builds are triggered.
COMMENTS_ENABLED_FOR_EXTERNAL_CONTRIBUTORS_ONLY: Enforce that repository
owners or collaborators must comment on external contributors' Pull
Requests before builds are triggered.
"""
COMMENTS_DISABLED = 0
COMMENTS_ENABLED = 1
COMMENTS_ENABLED_FOR_EXTERNAL_CONTRIBUTORS_ONLY = 2
branch = _messages.StringField(1)
commentControl = _messages.EnumField('CommentControlValueValuesEnum', 2)
invertRegex = _messages.BooleanField(3)
class PushFilter(_messages.Message):
r"""Push contains filter properties for matching GitHub git pushes.
Fields:
branch: Regexes matching branches to build. The syntax of the regular
expressions accepted is the syntax accepted by RE2 and described at
https://github.com/google/re2/wiki/Syntax
invertRegex: When true, only trigger a build if the revision regex does
NOT match the git_ref regex.
tag: Regexes matching tags to build. The syntax of the regular expressions
accepted is the syntax accepted by RE2 and described at
https://github.com/google/re2/wiki/Syntax
"""
branch = _messages.StringField(1)
invertRegex = _messages.BooleanField(2)
tag = _messages.StringField(3)
class ReceiveTriggerWebhookResponse(_messages.Message):
r"""ReceiveTriggerWebhookResponse [Experimental] is the response object for
the ReceiveTriggerWebhook method.
"""
class RemoveBitbucketServerConnectedRepositoryRequest(_messages.Message):
r"""RPC request object accepted by RemoveBitbucketServerConnectedRepository
RPC method.
Fields:
connectedRepository: The connected repository to remove.
"""
connectedRepository = _messages.MessageField('BitbucketServerRepositoryId', 1)
class RepoSource(_messages.Message):
r"""Location of the source in a Google Cloud Source Repository.
Messages:
SubstitutionsValue: Substitutions to use in a triggered build. Should only
be used with RunBuildTrigger
Fields:
branchName: Regex matching branches to build. The syntax of the regular
expressions accepted is the syntax accepted by RE2 and described at
https://github.com/google/re2/wiki/Syntax
commitSha: Explicit commit SHA to build.
dir: Directory, relative to the source root, in which to run the build.
This must be a relative path. If a step's `dir` is specified and is an
absolute path, this value is ignored for that step's execution.
invertRegex: Only trigger a build if the revision regex does NOT match the
revision regex.
projectId: ID of the project that owns the Cloud Source Repository. If
omitted, the project ID requesting the build is assumed.
repoName: Name of the Cloud Source Repository.
substitutions: Substitutions to use in a triggered build. Should only be
used with RunBuildTrigger
tagName: Regex matching tags to build. The syntax of the regular
expressions accepted is the syntax accepted by RE2 and described at
https://github.com/google/re2/wiki/Syntax
"""
@encoding.MapUnrecognizedFields('additionalProperties')
class SubstitutionsValue(_messages.Message):
r"""Substitutions to use in a triggered build. Should only be used with
RunBuildTrigger
Messages:
AdditionalProperty: An additional property for a SubstitutionsValue
object.
Fields:
additionalProperties: Additional properties of type SubstitutionsValue
"""
class AdditionalProperty(_messages.Message):
r"""An additional property for a SubstitutionsValue object.
Fields:
key: Name of the additional property.
value: A string attribute.
"""
key = _messages.StringField(1)
value = _messages.StringField(2)
additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
branchName = _messages.StringField(1)
commitSha = _messages.StringField(2)
dir = _messages.StringField(3)
invertRegex = _messages.BooleanField(4)
projectId = _messages.StringField(5)
repoName = _messages.StringField(6)
substitutions = _messages.MessageField('SubstitutionsValue', 7)
tagName = _messages.StringField(8)
class Results(_messages.Message):
r"""Artifacts created by the build pipeline.
Fields:
artifactManifest: Path to the artifact manifest. Only populated when
artifacts are uploaded.
artifactTiming: Time to push all non-container artifacts.
buildStepImages: List of build step digests, in the order corresponding to
build step indices.
buildStepOutputs: List of build step outputs, produced by builder images,
in the order corresponding to build step indices. [Cloud
Builders](https://cloud.google.com/cloud-build/docs/cloud-builders) can
produce this output by writing to `$BUILDER_OUTPUT/output`. Only the
first 4KB of data is stored.
images: Container images that were built as a part of the build.
numArtifacts: Number of artifacts uploaded. Only populated when artifacts
are uploaded.
"""
artifactManifest = _messages.StringField(1)
artifactTiming = _messages.MessageField('TimeSpan', 2)
buildStepImages = _messages.StringField(3, repeated=True)
buildStepOutputs = _messages.BytesField(4, repeated=True)
images = _messages.MessageField('BuiltImage', 5, repeated=True)
numArtifacts = _messages.IntegerField(6)
class RetryBuildRequest(_messages.Message):
r"""Specifies a build to retry.
Fields:
id: Required. Build ID of the original build.
name: The name of the `Build` to retry. Format:
`projects/{project}/locations/{location}/builds/{build}`
projectId: Required. ID of the project.
"""
id = _messages.StringField(1)
name = _messages.StringField(2)
projectId = _messages.StringField(3)
class RunBuildTriggerRequest(_messages.Message):
r"""Specifies a build trigger to run and the source to use.
Fields:
projectId: Required. ID of the project.
source: Source to build against this trigger.
triggerId: Required. ID of the trigger.
"""
projectId = _messages.StringField(1)
source = _messages.MessageField('RepoSource', 2)
triggerId = _messages.StringField(3)
class SMTPDelivery(_messages.Message):
r"""SMTPDelivery is the delivery configuration for an SMTP (email)
notification.
Fields:
fromAddress: This is the SMTP account/email that appears in the `From:` of
the email. If empty, it is assumed to be sender.
password: The <PASSWORD>.
port: The SMTP port of the server.
recipientAddresses: This is the list of addresses to which we send the
email (i.e. in the `To:` of the email).
senderAddress: This is the SMTP account/email that is used to send the
message.
server: The address of the SMTP server.
"""
fromAddress = _messages.StringField(1)
password = _messages.MessageField('Notifier<PASSWORD>Ref', 2)
port = _messages.StringField(3)
recipientAddresses = _messages.StringField(4, repeated=True)
senderAddress = _messages.StringField(5)
server = _messages.StringField(6)
class Secret(_messages.Message):
r"""Pairs a set of secret environment variables containing encrypted values
with the Cloud KMS key to use to decrypt the value. Note: Use `kmsKeyName`
with `available_secrets` instead of using `kmsKeyName` with `secret`. For
instructions see: https://cloud.google.com/cloud-build/docs/securing-
builds/use-encrypted-credentials.
Messages:
SecretEnvValue: Map of environment variable name to its encrypted value.
Secret environment variables must be unique across all of a build's
secrets, and must be used by at least one build step. Values can be at
most 64 KB in size. There can be at most 100 secret values across all of
a build's secrets.
Fields:
kmsKeyName: Cloud KMS key name to use to decrypt these envs.
secretEnv: Map of environment variable name to its encrypted value. Secret
environment variables must be unique across all of a build's secrets,
and must be used by at least one build step. Values can be at most 64 KB
in size. There can be at most 100 secret values across all of a build's
secrets.
"""
@encoding.MapUnrecognizedFields('additionalProperties')
class SecretEnvValue(_messages.Message):
r"""Map of environment variable name to its encrypted value. Secret
environment variables must be unique across all of a build's secrets, and
must be used by at least one build step. Values can be at most 64 KB in
size. There can be at most 100 secret values across all of a build's
secrets.
Messages:
AdditionalProperty: An additional property for a SecretEnvValue object.
Fields:
additionalProperties: Additional properties of type SecretEnvValue
"""
class AdditionalProperty(_messages.Message):
r"""An additional property for a SecretEnvValue object.
Fields:
key: Name of the additional property.
value: A byte attribute.
"""
key = _messages.StringField(1)
value = _messages.BytesField(2)
additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
kmsKeyName = _messages.StringField(1)
secretEnv = _messages.MessageField('SecretEnvValue', 2)
class SecretManagerSecret(_messages.Message):
r"""Pairs a secret environment variable with a SecretVersion in Secret
Manager.
Fields:
env: Environment variable name to associate with the secret. Secret
environment variables must be unique across all of a build's secrets,
and must be used by at least one build step.
versionName: Resource name of the SecretVersion. In format:
projects/*/secrets/*/versions/*
"""
env = _messages.StringField(1)
versionName = _messages.StringField(2)
class Secrets(_messages.Message):
r"""Secrets and secret environment variables.
Fields:
inline: Secrets encrypted with KMS key and the associated secret
environment variable.
secretManager: Secrets in Secret Manager and associated secret environment
variable.
"""
inline = _messages.MessageField('InlineSecret', 1, repeated=True)
secretManager = _messages.MessageField('SecretManagerSecret', 2, repeated=True)
class SlackDelivery(_messages.Message):
r"""SlackDelivery is the delivery configuration for delivering Slack
messages via webhooks. See Slack webhook documentation at:
https://api.slack.com/messaging/webhooks.
Fields:
webhookUri: The secret reference for the Slack webhook URI for sending
messages to a channel.
"""
webhookUri = _messages.MessageField('NotifierSecretRef', 1)
class Source(_messages.Message):
r"""Location of the source in a supported storage service.
Fields:
gitSource: If provided, get the source from this Git repository.
repoSource: If provided, get the source from this location in a Cloud
Source Repository.
storageSource: If provided, get the source from this location in Google
Cloud Storage.
storageSourceManifest: If provided, get the source from this manifest in
Google Cloud Storage. This feature is in Preview; see description
[here](https://github.com/GoogleCloudPlatform/cloud-
builders/tree/master/gcs-fetcher).
"""
gitSource = _messages.MessageField('GitSource', 1)
repoSource = _messages.MessageField('RepoSource', 2)
storageSource = _messages.MessageField('StorageSource', 3)
storageSourceManifest = _messages.MessageField('StorageSourceManifest', 4)
class SourceProvenance(_messages.Message):
r"""Provenance of the source. Ways to find the original source, or verify
that some source was used for this build.
Messages:
FileHashesValue: Output only. Hash(es) of the build source, which can be
used to verify that the original source integrity was maintained in the
build. Note that `FileHashes` will only be populated if `BuildOptions`
has requested a `SourceProvenanceHash`. The keys to this map are file
paths used as build source and the values contain the hash values for
those files. If the build source came in a single package such as a
gzipped tarfile (`.tar.gz`), the `FileHash` will be for the single path
to that file.
Fields:
fileHashes: Output only. Hash(es) of the build source, which can be used
to verify that the original source integrity was maintained in the
build. Note that `FileHashes` will only be populated if `BuildOptions`
has requested a `SourceProvenanceHash`. The keys to this map are file
paths used as build source and the values contain the hash values for
those files. If the build source came in a single package such as a
gzipped tarfile (`.tar.gz`), the `FileHash` will be for the single path
to that file.
resolvedRepoSource: A copy of the build's `source.repo_source`, if exists,
with any revisions resolved.
resolvedStorageSource: A copy of the build's `source.storage_source`, if
exists, with any generations resolved.
resolvedStorageSourceManifest: A copy of the build's
`source.storage_source_manifest`, if exists, with any revisions
resolved. This feature is in Preview.
"""
@encoding.MapUnrecognizedFields('additionalProperties')
class FileHashesValue(_messages.Message):
r"""Output only. Hash(es) of the build source, which can be used to verify
that the original source integrity was maintained in the build. Note that
`FileHashes` will only be populated if `BuildOptions` has requested a
`SourceProvenanceHash`. The keys to this map are file paths used as build
source and the values contain the hash values for those files. If the
build source came in a single package such as a gzipped tarfile
(`.tar.gz`), the `FileHash` will be for the single path to that file.
Messages:
AdditionalProperty: An additional property for a FileHashesValue object.
Fields:
additionalProperties: Additional properties of type FileHashesValue
"""
class AdditionalProperty(_messages.Message):
r"""An additional property for a FileHashesValue object.
Fields:
key: Name of the additional property.
value: A FileHashes attribute.
"""
key = _messages.StringField(1)
value = _messages.MessageField('FileHashes', 2)
additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
fileHashes = _messages.MessageField('FileHashesValue', 1)
resolvedRepoSource = _messages.MessageField('RepoSource', 2)
resolvedStorageSource = _messages.MessageField('StorageSource', 3)
resolvedStorageSourceManifest = _messages.MessageField('StorageSourceManifest', 4)
class StandardQueryParameters(_messages.Message):
r"""Query parameters accepted by all methods.
Enums:
FXgafvValueValuesEnum: V1 error format.
AltValueValuesEnum: Data format for response.
Fields:
f__xgafv: V1 error format.
access_token: OAuth access token.
alt: Data format for response.
callback: JSONP
fields: Selector specifying which fields to include in a partial response.
key: API key. Your API key identifies your project and provides you with
API access, quota, and reports. Required unless you provide an OAuth 2.0
token.
oauth_token: OAuth 2.0 token for the current user.
prettyPrint: Returns response with indentations and line breaks.
quotaUser: Available to use for quota purposes for server-side
applications. Can be any arbitrary string assigned to a user, but should
not exceed 40 characters.
trace: A tracing token of the form "token:<tokenid>" to include in api
requests.
uploadType: Legacy upload protocol for media (e.g. "media", "multipart").
upload_protocol: Upload protocol for media (e.g. "raw", "multipart").
"""
class AltValueValuesEnum(_messages.Enum):
r"""Data format for response.
Values:
json: Responses with Content-Type of application/json
media: Media download with context-dependent Content-Type
proto: Responses with Content-Type of application/x-protobuf
"""
json = 0
media = 1
proto = 2
class FXgafvValueValuesEnum(_messages.Enum):
r"""V1 error format.
Values:
_1: v1 error format
_2: v2 error format
"""
_1 = 0
_2 = 1
f__xgafv = _messages.EnumField('FXgafvValueValuesEnum', 1)
access_token = _messages.StringField(2)
alt = _messages.EnumField('AltValueValuesEnum', 3, default='json')
callback = _messages.StringField(4)
fields = _messages.StringField(5)
key = _messages.StringField(6)
oauth_token = _messages.StringField(7)
prettyPrint = _messages.BooleanField(8, default=True)
quotaUser = _messages.StringField(9)
trace = _messages.StringField(10)
uploadType = _messages.StringField(11)
upload_protocol = _messages.StringField(12)
class Status(_messages.Message):
r"""The `Status` type defines a logical error model that is suitable for
different programming environments, including REST APIs and RPC APIs. It is
used by [gRPC](https://github.com/grpc). Each `Status` message contains
three pieces of data: error code, error message, and error details. You can
find out more about this error model and how to work with it in the [API
Design Guide](https://cloud.google.com/apis/design/errors).
Messages:
DetailsValueListEntry: A DetailsValueListEntry object.
Fields:
code: The status code, which should be an enum value of google.rpc.Code.
details: A list of messages that carry the error details. There is a
common set of message types for APIs to use.
message: A developer-facing error message, which should be in English. Any
user-facing error message should be localized and sent in the
google.rpc.Status.details field, or localized by the client.
"""
@encoding.MapUnrecognizedFields('additionalProperties')
class DetailsValueListEntry(_messages.Message):
r"""A DetailsValueListEntry object.
Messages:
AdditionalProperty: An additional property for a DetailsValueListEntry
object.
Fields:
additionalProperties: Properties of the object. Contains field @type
with type URL.
"""
class AdditionalProperty(_messages.Message):
r"""An additional property for a DetailsValueListEntry object.
Fields:
key: Name of the additional property.
value: A extra_types.JsonValue attribute.
"""
key = _messages.StringField(1)
value = _messages.MessageField('extra_types.JsonValue', 2)
additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
code = _messages.IntegerField(1, variant=_messages.Variant.INT32)
details = _messages.MessageField('DetailsValueListEntry', 2, repeated=True)
message = _messages.StringField(3)
class StorageSource(_messages.Message):
r"""Location of the source in an archive file in Google Cloud Storage.
Fields:
bucket: Google Cloud Storage bucket containing the source (see [Bucket
Name Requirements](https://cloud.google.com/storage/docs/bucket-
naming#requirements)).
generation: Google Cloud Storage generation for the object. If the
generation is omitted, the latest generation will be used.
object: Google Cloud Storage object containing the source. This object
must be a zipped (`.zip`) or gzipped archive file (`.tar.gz`) containing
source to build.
"""
bucket = _messages.StringField(1)
generation = _messages.IntegerField(2)
object = _messages.StringField(3)
class StorageSourceManifest(_messages.Message):
r"""Location of the source manifest in Google Cloud Storage. This feature is
in Preview; see description
[here](https://github.com/GoogleCloudPlatform/cloud-
builders/tree/master/gcs-fetcher).
Fields:
bucket: Google Cloud Storage bucket containing the source manifest (see
[Bucket Name Requirements](https://cloud.google.com/storage/docs/bucket-
naming#requirements)).
generation: Google Cloud Storage generation for the object. If the
generation is omitted, the latest generation will be used.
object: Google Cloud Storage object containing the source manifest. This
object must be a JSON file.
"""
bucket = _messages.StringField(1)
generation = _messages.IntegerField(2)
object = _messages.StringField(3)
class TimeSpan(_messages.Message):
r"""Start and end times for a build execution phase.
Fields:
endTime: End of time span.
startTime: Start of time span.
"""
endTime = _messages.StringField(1)
startTime = _messages.StringField(2)
class UpdateBitbucketServerConfigOperationMetadata(_messages.Message):
r"""Metadata for `UpdateBitbucketServerConfig` operation.
Fields:
bitbucketServerConfig: The resource name of the BitbucketServerConfig to
be updated. Format:
`projects/{project}/locations/{location}/bitbucketServerConfigs/{id}`.
completeTime: Time the operation was completed.
createTime: Time the operation was created.
"""
bitbucketServerConfig = _messages.StringField(1)
completeTime = _messages.StringField(2)
createTime = _messages.StringField(3)
class UpdateGitHubEnterpriseConfigOperationMetadata(_messages.Message):
r"""Metadata for `UpdateGitHubEnterpriseConfig` operation.
Fields:
completeTime: Time the operation was completed.
createTime: Time the operation was created.
githubEnterpriseConfig: The resource name of the GitHubEnterprise to be
updated. Format:
`projects/{project}/locations/{location}/githubEnterpriseConfigs/{id}`.
"""
completeTime = _messages.StringField(1)
createTime = _messages.StringField(2)
githubEnterpriseConfig = _messages.StringField(3)
class UpdateWorkerPoolOperationMetadata(_messages.Message):
r"""Metadata for the `UpdateWorkerPool` operation.
Fields:
completeTime: Time the operation was completed.
createTime: Time the operation was created.
workerPool: The resource name of the `WorkerPool` being updated. Format:
`projects/{project}/locations/{location}/workerPools/{worker_pool}`.
"""
completeTime = _messages.StringField(1)
createTime = _messages.StringField(2)
workerPool = _messages.StringField(3)
class Volume(_messages.Message):
r"""Volume describes a Docker container volume which is mounted into build
steps in order to persist files across build step execution.
Fields:
name: Name of the volume to mount. Volume names must be unique per build
step and must be valid names for Docker volumes. Each named volume must
be used by at least two build steps.
path: Path at which to mount the volume. Paths must be absolute and cannot
conflict with other volume paths on the same build step or with certain
reserved volume paths.
"""
name = _messages.StringField(1)
path = _messages.StringField(2)
class Warning(_messages.Message):
r"""A non-fatal problem encountered during the execution of the build.
Enums:
PriorityValueValuesEnum: The priority for this warning.
Fields:
priority: The priority for this warning.
text: Explanation of the warning generated.
"""
class PriorityValueValuesEnum(_messages.Enum):
r"""The priority for this warning.
Values:
PRIORITY_UNSPECIFIED: Should not be used.
INFO: e.g. deprecation warnings and alternative feature highlights.
WARNING: e.g. automated detection of possible issues with the build.
ALERT: e.g. alerts that a feature used in the build is pending removal
"""
PRIORITY_UNSPECIFIED = 0
INFO = 1
WARNING = 2
ALERT = 3
priority = _messages.EnumField('PriorityValueValuesEnum', 1)
text = _messages.StringField(2)
class WebhookConfig(_messages.Message):
r"""WebhookConfig describes the configuration of a trigger that creates a
build whenever a webhook is sent to a trigger's webhook URL.
Enums:
StateValueValuesEnum: Potential issues with the underlying Pub/Sub
subscription configuration. Only populated on get requests.
Fields:
secret: Required. Resource name for the secret required as a URL
parameter.
state: Potential issues with the underlying Pub/Sub subscription
configuration. Only populated on get requests.
"""
class StateValueValuesEnum(_messages.Enum):
r"""Potential issues with the underlying Pub/Sub subscription
configuration. Only populated on get requests.
Values:
STATE_UNSPECIFIED: The webhook auth configuration not been checked.
OK: The auth configuration is properly setup.
SECRET_DELETED: The secret provided in auth_method has been deleted.
"""
STATE_UNSPECIFIED = 0
OK = 1
SECRET_DELETED = 2
secret = _messages.StringField(1)
state = _messages.EnumField('StateValueValuesEnum', 2)
class WorkerConfig(_messages.Message):
r"""Defines the configuration to be used for creating workers in the pool.
Fields:
diskSizeGb: Size of the disk attached to the worker, in GB. See [Worker
pool config file](https://cloud.google.com/build/docs/private-
pools/worker-pool-config-file-schema). Specify a value of up to 1000. If
`0` is specified, Cloud Build will use a standard disk size.
machineType: Machine type of a worker, such as `e2-medium`. See [Worker
pool config file](https://cloud.google.com/build/docs/private-
pools/worker-pool-config-file-schema). If left blank, Cloud Build will
use a sensible default.
"""
diskSizeGb = _messages.IntegerField(1)
machineType = _messages.StringField(2)
class WorkerPool(_messages.Message):
r"""Configuration for a `WorkerPool`. Cloud Build owns and maintains a pool
of workers for general use and have no access to a project's private
network. By default, builds submitted to Cloud Build will use a worker from
this pool. If your build needs access to resources on a private network,
create and use a `WorkerPool` to run your builds. Private `WorkerPool`s give
your builds access to any single VPC network that you administer, including
any on-prem resources connected to that VPC network. For an overview of
private pools, see [Private pools
overview](https://cloud.google.com/build/docs/private-pools/private-pools-
overview).
Enums:
StateValueValuesEnum: Output only. `WorkerPool` state.
Messages:
AnnotationsValue: User specified annotations. See
https://google.aip.dev/128#annotations for more details such as format
and size limitations.
Fields:
annotations: User specified annotations. See
https://google.aip.dev/128#annotations for more details such as format
and size limitations.
createTime: Output only. Time at which the request to create the
`WorkerPool` was received.
deleteTime: Output only. Time at which the request to delete the
`WorkerPool` was received.
displayName: A user-specified, human-readable name for the `WorkerPool`.
If provided, this value must be 1-63 characters.
etag: Output only. Checksum computed by the server. May be sent on update
and delete requests to ensure that the client has an up-to-date value
before proceeding.
hybridPoolConfig: Hybrid pool configuration
name: Output only. The resource name of the `WorkerPool`, with format
`projects/{project}/locations/{location}/workerPools/{worker_pool}`. The
value of `{worker_pool}` is provided by `worker_pool_id` in
`CreateWorkerPool` request and the value of `{location}` is determined
by the endpoint accessed.
privatePoolV1Config: Legacy Private Pool configuration.
state: Output only. `WorkerPool` state.
uid: Output only. A unique identifier for the `WorkerPool`.
updateTime: Output only. Time at which the request to update the
`WorkerPool` was received.
"""
class StateValueValuesEnum(_messages.Enum):
r"""Output only. `WorkerPool` state.
Values:
STATE_UNSPECIFIED: State of the `WorkerPool` is unknown.
CREATING: `WorkerPool` is being created.
RUNNING: `WorkerPool` is running.
DELETING: `WorkerPool` is being deleted: cancelling builds and draining
workers.
DELETED: `WorkerPool` is deleted.
UPDATING: `WorkerPool` is being updated; new builds cannot be run.
"""
STATE_UNSPECIFIED = 0
CREATING = 1
RUNNING = 2
DELETING = 3
DELETED = 4
UPDATING = 5
@encoding.MapUnrecognizedFields('additionalProperties')
class AnnotationsValue(_messages.Message):
r"""User specified annotations. See https://google.aip.dev/128#annotations
for more details such as format and size limitations.
Messages:
AdditionalProperty: An additional property for a AnnotationsValue
object.
Fields:
additionalProperties: Additional properties of type AnnotationsValue
"""
class AdditionalProperty(_messages.Message):
r"""An additional property for a AnnotationsValue object.
Fields:
key: Name of the additional property.
value: A string attribute.
"""
key = _messages.StringField(1)
value = _messages.StringField(2)
additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
annotations = _messages.MessageField('AnnotationsValue', 1)
createTime = _messages.StringField(2)
deleteTime = _messages.StringField(3)
displayName = _messages.StringField(4)
etag = _messages.StringField(5)
hybridPoolConfig = _messages.MessageField('HybridPoolConfig', 6)
name = _messages.StringField(7)
privatePoolV1Config = _messages.MessageField('PrivatePoolV1Config', 8)
state = _messages.EnumField('StateValueValuesEnum', 9)
uid = _messages.StringField(10)
updateTime = _messages.StringField(11)
encoding.AddCustomJsonFieldMapping(
StandardQueryParameters, 'f__xgafv', '$.xgafv')
encoding.AddCustomJsonEnumMapping(
StandardQueryParameters.FXgafvValueValuesEnum, '_1', '1')
encoding.AddCustomJsonEnumMapping(
StandardQueryParameters.FXgafvValueValuesEnum, '_2', '2') | lib/googlecloudsdk/third_party/apis/cloudbuild/v1/cloudbuild_v1_messages.py |
from __future__ import absolute_import
from apitools.base.protorpclite import messages as _messages
from apitools.base.py import encoding
from apitools.base.py import extra_types
package = 'cloudbuild'
class AddBitbucketServerConnectedRepositoryRequest(_messages.Message):
r"""RPC request object accepted by the AddBitbucketServerConnectedRepository
RPC method.
Fields:
connectedRepository: The connected repository to add.
"""
connectedRepository = _messages.MessageField('BitbucketServerRepositoryId', 1)
class AddBitbucketServerConnectedRepositoryResponse(_messages.Message):
r"""RPC request object returned by the AddBitbucketServerConnectedRepository
RPC method.
Fields:
config: The name of the `BitbucketServerConfig` that added connected
repository. Format: `projects/{project}/locations/{location}/bitbucketSe
rverConfigs/{config}`
connectedRepository: The connected repository.
"""
config = _messages.StringField(1)
connectedRepository = _messages.MessageField('BitbucketServerRepositoryId', 2)
class AnthosWorkerPool(_messages.Message):
r"""Anthos CICD cluster option.
Fields:
membership: Membership of the GKE Hub registered cluster this build should
execute on. Example:
/projects/{project}/locations/{location}/memberships/{cluster_name} The
cluster's project number must be the same project ID that is running the
build.
"""
membership = _messages.StringField(1)
class ApprovalConfig(_messages.Message):
r"""ApprovalConfig describes configuration for manual approval of a build.
Fields:
approvalRequired: Whether or not approval is needed. If this is set on a
build, it will become pending when created, and will need to be
explicitly approved to start.
"""
approvalRequired = _messages.BooleanField(1)
class ApprovalResult(_messages.Message):
r"""ApprovalResult describes the decision and associated metadata of a
manual approval of a build.
Enums:
DecisionValueValuesEnum: Required. The decision of this manual approval.
Fields:
approvalTime: Output only. The time when the approval decision was made.
approverAccount: Output only. Email of the user that called the
ApproveBuild API to approve or reject a build at the time that the API
was called.
comment: Optional. An optional comment for this manual approval result.
decision: Required. The decision of this manual approval.
url: Optional. An optional URL tied to this manual approval result. This
field is essentially the same as comment, except that it will be
rendered by the UI differently. An example use case is a link to an
external job that approved this Build.
"""
class DecisionValueValuesEnum(_messages.Enum):
r"""Required. The decision of this manual approval.
Values:
DECISION_UNSPECIFIED: Default enum type. This should not be used.
APPROVED: Build is approved.
REJECTED: Build is rejected.
"""
DECISION_UNSPECIFIED = 0
APPROVED = 1
REJECTED = 2
approvalTime = _messages.StringField(1)
approverAccount = _messages.StringField(2)
comment = _messages.StringField(3)
decision = _messages.EnumField('DecisionValueValuesEnum', 4)
url = _messages.StringField(5)
class ApproveBuildRequest(_messages.Message):
r"""Request to approve or reject a pending build.
Fields:
approvalResult: Approval decision and metadata.
"""
approvalResult = _messages.MessageField('ApprovalResult', 1)
class ArtifactObjects(_messages.Message):
r"""Files in the workspace to upload to Cloud Storage upon successful
completion of all build steps.
Fields:
location: Cloud Storage bucket and optional object path, in the form
"gs://bucket/path/to/somewhere/". (see [Bucket Name
Requirements](https://cloud.google.com/storage/docs/bucket-
naming#requirements)). Files in the workspace matching any path pattern
will be uploaded to Cloud Storage with this location as a prefix.
paths: Path globs used to match files in the build's workspace.
timing: Output only. Stores timing information for pushing all artifact
objects.
"""
location = _messages.StringField(1)
paths = _messages.StringField(2, repeated=True)
timing = _messages.MessageField('TimeSpan', 3)
class ArtifactResult(_messages.Message):
r"""An artifact that was uploaded during a build. This is a single record in
the artifact manifest JSON file.
Fields:
fileHash: The file hash of the artifact.
location: The path of an artifact in a Google Cloud Storage bucket, with
the generation number. For example,
`gs://mybucket/path/to/output.jar#generation`.
"""
fileHash = _messages.MessageField('FileHashes', 1, repeated=True)
location = _messages.StringField(2)
class Artifacts(_messages.Message):
r"""Artifacts produced by a build that should be uploaded upon successful
completion of all build steps.
Fields:
images: A list of images to be pushed upon the successful completion of
all build steps. The images will be pushed using the builder service
account's credentials. The digests of the pushed images will be stored
in the Build resource's results field. If any of the images fail to be
pushed, the build is marked FAILURE.
objects: A list of objects to be uploaded to Cloud Storage upon successful
completion of all build steps. Files in the workspace matching specified
paths globs will be uploaded to the specified Cloud Storage location
using the builder service account's credentials. The location and
generation of the uploaded objects will be stored in the Build
resource's results field. If any objects fail to be pushed, the build is
marked FAILURE.
"""
images = _messages.StringField(1, repeated=True)
objects = _messages.MessageField('ArtifactObjects', 2)
class BatchCreateBitbucketServerConnectedRepositoriesRequest(_messages.Message):
r"""RPC request object accepted by
BatchCreateBitbucketServerConnectedRepositories RPC method.
Fields:
requests: Required. Requests to connect Bitbucket Server repositories.
"""
requests = _messages.MessageField('CreateBitbucketServerConnectedRepositoryRequest', 1, repeated=True)
class BatchCreateBitbucketServerConnectedRepositoriesResponse(_messages.Message):
r"""Response of BatchCreateBitbucketServerConnectedRepositories RPC method
including all successfully connected Bitbucket Server repositories.
Fields:
bitbucketServerConnectedRepositories: The connected Bitbucket Server
repositories.
"""
bitbucketServerConnectedRepositories = _messages.MessageField('BitbucketServerConnectedRepository', 1, repeated=True)
class BatchCreateBitbucketServerConnectedRepositoriesResponseMetadata(_messages.Message):
r"""Metadata for `BatchCreateBitbucketServerConnectedRepositories`
operation.
Fields:
completeTime: Time the operation was completed.
config: The name of the `BitbucketServerConfig` that added connected
repositories. Format: `projects/{project}/locations/{location}/bitbucket
ServerConfigs/{config}`
createTime: Time the operation was created.
"""
completeTime = _messages.StringField(1)
config = _messages.StringField(2)
createTime = _messages.StringField(3)
class BitbucketServerConfig(_messages.Message):
r"""BitbucketServerConfig represents the configuration for a Bitbucket
Server.
Fields:
apiKey: Required. Immutable. API Key that will be attached to webhook.
Once this field has been set, it cannot be changed. If you need to
change it, please create another BitbucketServerConfig.
connectedRepositories: Output only. Connected Bitbucket Server
repositories for this config.
createTime: Time when the config was created.
hostUri: Required. Immutable. The URI of the Bitbucket Server host. Once
this field has been set, it cannot be changed. If you need to change it,
please create another BitbucketServerConfig.
name: The resource name for the config.
peeredNetwork: Optional. The network to be used when reaching out to the
Bitbucket Server instance. The VPC network must be enabled for private
service connection. This should be set if the Bitbucket Server instance
is hosted on-premises and not reachable by public internet. If this
field is left empty, no network peering will occur and calls to the
Bitbucket Server instance will be made over the public internet. Must be
in the format `projects/{project}/global/networks/{network}`, where
{project} is a project number or id and {network} is the name of a VPC
network in the project.
secrets: Required. Secret Manager secrets needed by the config.
sslCa: Optional. SSL certificate to use for requests to Bitbucket Server.
The format should be PEM format but the extension can be one of .pem,
.cer, or .crt.
username: Username of the account Cloud Build will use on Bitbucket
Server.
webhookKey: Output only. UUID included in webhook requests. The UUID is
used to look up the corresponding config.
"""
apiKey = _messages.StringField(1)
connectedRepositories = _messages.MessageField('BitbucketServerRepositoryId', 2, repeated=True)
createTime = _messages.StringField(3)
hostUri = _messages.StringField(4)
name = _messages.StringField(5)
peeredNetwork = _messages.StringField(6)
secrets = _messages.MessageField('BitbucketServerSecrets', 7)
sslCa = _messages.StringField(8)
username = _messages.StringField(9)
webhookKey = _messages.StringField(10)
class BitbucketServerConnectedRepository(_messages.Message):
r"""/ BitbucketServerConnectedRepository represents a connected Bitbucket
Server / repository.
Fields:
parent: The name of the `BitbucketServerConfig` that added connected
repository. Format: `projects/{project}/locations/{location}/bitbucketSe
rverConfigs/{config}`
repo: The Bitbucket Server repositories to connect.
status: Output only. The status of the repo connection request.
"""
parent = _messages.StringField(1)
repo = _messages.MessageField('BitbucketServerRepositoryId', 2)
status = _messages.MessageField('Status', 3)
class BitbucketServerRepository(_messages.Message):
r"""BitbucketServerRepository represents a repository hosted on a Bitbucket
Server.
Fields:
browseUri: Link to the browse repo page on the Bitbucket Server instance.
description: Description of the repository.
displayName: Display name of the repository.
name: The resource name of the repository.
repoId: Identifier for a repository hosted on a Bitbucket Server.
"""
browseUri = _messages.StringField(1)
description = _messages.StringField(2)
displayName = _messages.StringField(3)
name = _messages.StringField(4)
repoId = _messages.MessageField('BitbucketServerRepositoryId', 5)
class BitbucketServerRepositoryId(_messages.Message):
r"""BitbucketServerRepositoryId identifies a specific repository hosted on a
Bitbucket Server.
Fields:
projectKey: Required. Identifier for the project storing the repository.
repoSlug: Required. Identifier for the repository.
webhookId: Output only. The ID of the webhook that was created for
receiving events from this repo. We only create and manage a single
webhook for each repo.
"""
projectKey = _messages.StringField(1)
repoSlug = _messages.StringField(2)
webhookId = _messages.IntegerField(3, variant=_messages.Variant.INT32)
class BitbucketServerSecrets(_messages.Message):
r"""BitbucketServerSecrets represents the secrets in Secret Manager for a
Bitbucket Server.
Fields:
adminAccessTokenVersionName: Required. The resource name for the admin
access token's secret version.
readAccessTokenVersionName: Required. The resource name for the read
access token's secret version.
webhookSecretVersionName: Required. Immutable. The resource name for the
webhook secret's secret version. Once this field has been set, it cannot
be changed. If you need to change it, please create another
BitbucketServerConfig.
"""
adminAccessTokenVersionName = _messages.StringField(1)
readAccessTokenVersionName = _messages.StringField(2)
webhookSecretVersionName = _messages.StringField(3)
class BitbucketServerTriggerConfig(_messages.Message):
r"""BitbucketServerTriggerConfig describes the configuration of a trigger
that creates a build whenever a Bitbucket Server event is received.
Fields:
bitbucketServerConfig: Output only. The BitbucketServerConfig specified in
the bitbucket_server_config_resource field.
bitbucketServerConfigResource: Required. The Bitbucket server config
resource that this trigger config maps to.
projectKey: Required. Key of the project that the repo is in. For example:
The key for http://mybitbucket.server/projects/TEST/repos/test-repo is
"TEST".
pullRequest: Filter to match changes in pull requests.
push: Filter to match changes in refs like branches, tags.
repoSlug: Required. Slug of the repository. A repository slug is a URL-
friendly version of a repository name, automatically generated by
Bitbucket for use in the URL. For example, if the repository name is
'test repo', in the URL it would become 'test-repo' as in
http://mybitbucket.server/projects/TEST/repos/test-repo.
"""
bitbucketServerConfig = _messages.MessageField('BitbucketServerConfig', 1)
bitbucketServerConfigResource = _messages.StringField(2)
projectKey = _messages.StringField(3)
pullRequest = _messages.MessageField('PullRequestFilter', 4)
push = _messages.MessageField('PushFilter', 5)
repoSlug = _messages.StringField(6)
class Build(_messages.Message):
r"""A build resource in the Cloud Build API. At a high level, a `Build`
describes where to find source code, how to build it (for example, the
builder image to run on the source), and where to store the built artifacts.
Fields can include the following variables, which will be expanded when the
build is created: - $PROJECT_ID: the project ID of the build. -
$PROJECT_NUMBER: the project number of the build. - $LOCATION: the
location/region of the build. - $BUILD_ID: the autogenerated ID of the
build. - $REPO_NAME: the source repository name specified by RepoSource. -
$BRANCH_NAME: the branch name specified by RepoSource. - $TAG_NAME: the tag
name specified by RepoSource. - $REVISION_ID or $COMMIT_SHA: the commit SHA
specified by RepoSource or resolved from the specified branch or tag. -
$SHORT_SHA: first 7 characters of $REVISION_ID or $COMMIT_SHA.
Enums:
StatusValueValuesEnum: Output only. Status of the build.
Messages:
SubstitutionsValue: Substitutions data for `Build` resource.
TimingValue: Output only. Stores timing information for phases of the
build. Valid keys are: * BUILD: time to execute all build steps. * PUSH:
time to push all specified images. * FETCHSOURCE: time to fetch source.
* SETUPBUILD: time to set up build. If the build does not specify source
or images, these keys will not be included.
Fields:
approval: Output only. Describes this build's approval configuration,
status, and result.
artifacts: Artifacts produced by the build that should be uploaded upon
successful completion of all build steps.
availableSecrets: Secrets and secret environment variables.
buildTriggerId: Output only. The ID of the `BuildTrigger` that triggered
this build, if it was triggered automatically.
createTime: Output only. Time at which the request to create the build was
received.
failureInfo: Output only. Contains information about the build when
status=FAILURE.
finishTime: Output only. Time at which execution of the build was
finished. The difference between finish_time and start_time is the
duration of the build's execution.
id: Output only. Unique identifier of the build.
images: A list of images to be pushed upon the successful completion of
all build steps. The images are pushed using the builder service
account's credentials. The digests of the pushed images will be stored
in the `Build` resource's results field. If any of the images fail to be
pushed, the build status is marked `FAILURE`.
logUrl: Output only. URL to logs for this build in Google Cloud Console.
logsBucket: Google Cloud Storage bucket where logs should be written (see
[Bucket Name Requirements](https://cloud.google.com/storage/docs/bucket-
naming#requirements)). Logs file names will be of the format
`${logs_bucket}/log-${build_id}.txt`.
name: Output only. The 'Build' name with format:
`projects/{project}/locations/{location}/builds/{build}`, where {build}
is a unique identifier generated by the service.
options: Special options for this build.
projectId: Output only. ID of the project.
queueTtl: TTL in queue for this build. If provided and the build is
enqueued longer than this value, the build will expire and the build
status will be `EXPIRED`. The TTL starts ticking from create_time.
results: Output only. Results of the build.
secrets: Secrets to decrypt using Cloud Key Management Service. Note:
Secret Manager is the recommended technique for managing sensitive data
with Cloud Build. Use `available_secrets` to configure builds to access
secrets from Secret Manager. For instructions, see:
https://cloud.google.com/cloud-build/docs/securing-builds/use-secrets
serviceAccount: IAM service account whose credentials will be used at
build runtime. Must be of the format
`projects/{PROJECT_ID}/serviceAccounts/{ACCOUNT}`. ACCOUNT can be email
address or uniqueId of the service account.
source: The location of the source files to build.
sourceProvenance: Output only. A permanent fixed identifier for source.
startTime: Output only. Time at which execution of the build was started.
status: Output only. Status of the build.
statusDetail: Output only. Customer-readable message about the current
status.
steps: Required. The operations to be performed on the workspace.
substitutions: Substitutions data for `Build` resource.
tags: Tags for annotation of a `Build`. These are not docker tags.
timeout: Amount of time that this build should be allowed to run, to
second granularity. If this amount of time elapses, work on the build
will cease and the build status will be `TIMEOUT`. `timeout` starts
ticking from `startTime`. Default time is ten minutes.
timing: Output only. Stores timing information for phases of the build.
Valid keys are: * BUILD: time to execute all build steps. * PUSH: time
to push all specified images. * FETCHSOURCE: time to fetch source. *
SETUPBUILD: time to set up build. If the build does not specify source
or images, these keys will not be included.
warnings: Output only. Non-fatal problems encountered during the execution
of the build.
"""
class StatusValueValuesEnum(_messages.Enum):
r"""Output only. Status of the build.
Values:
STATUS_UNKNOWN: Status of the build is unknown.
PENDING: Build has been created and is pending execution and queuing. It
has not been queued.
QUEUED: Build or step is queued; work has not yet begun.
WORKING: Build or step is being executed.
SUCCESS: Build or step finished successfully.
FAILURE: Build or step failed to complete successfully.
INTERNAL_ERROR: Build or step failed due to an internal cause.
TIMEOUT: Build or step took longer than was allowed.
CANCELLED: Build or step was canceled by a user.
EXPIRED: Build was enqueued for longer than the value of `queue_ttl`.
"""
STATUS_UNKNOWN = 0
PENDING = 1
QUEUED = 2
WORKING = 3
SUCCESS = 4
FAILURE = 5
INTERNAL_ERROR = 6
TIMEOUT = 7
CANCELLED = 8
EXPIRED = 9
@encoding.MapUnrecognizedFields('additionalProperties')
class SubstitutionsValue(_messages.Message):
r"""Substitutions data for `Build` resource.
Messages:
AdditionalProperty: An additional property for a SubstitutionsValue
object.
Fields:
additionalProperties: Additional properties of type SubstitutionsValue
"""
class AdditionalProperty(_messages.Message):
r"""An additional property for a SubstitutionsValue object.
Fields:
key: Name of the additional property.
value: A string attribute.
"""
key = _messages.StringField(1)
value = _messages.StringField(2)
additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
@encoding.MapUnrecognizedFields('additionalProperties')
class TimingValue(_messages.Message):
r"""Output only. Stores timing information for phases of the build. Valid
keys are: * BUILD: time to execute all build steps. * PUSH: time to push
all specified images. * FETCHSOURCE: time to fetch source. * SETUPBUILD:
time to set up build. If the build does not specify source or images,
these keys will not be included.
Messages:
AdditionalProperty: An additional property for a TimingValue object.
Fields:
additionalProperties: Additional properties of type TimingValue
"""
class AdditionalProperty(_messages.Message):
r"""An additional property for a TimingValue object.
Fields:
key: Name of the additional property.
value: A TimeSpan attribute.
"""
key = _messages.StringField(1)
value = _messages.MessageField('TimeSpan', 2)
additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
approval = _messages.MessageField('BuildApproval', 1)
artifacts = _messages.MessageField('Artifacts', 2)
availableSecrets = _messages.MessageField('Secrets', 3)
buildTriggerId = _messages.StringField(4)
createTime = _messages.StringField(5)
failureInfo = _messages.MessageField('FailureInfo', 6)
finishTime = _messages.StringField(7)
id = _messages.StringField(8)
images = _messages.StringField(9, repeated=True)
logUrl = _messages.StringField(10)
logsBucket = _messages.StringField(11)
name = _messages.StringField(12)
options = _messages.MessageField('BuildOptions', 13)
projectId = _messages.StringField(14)
queueTtl = _messages.StringField(15)
results = _messages.MessageField('Results', 16)
secrets = _messages.MessageField('Secret', 17, repeated=True)
serviceAccount = _messages.StringField(18)
source = _messages.MessageField('Source', 19)
sourceProvenance = _messages.MessageField('SourceProvenance', 20)
startTime = _messages.StringField(21)
status = _messages.EnumField('StatusValueValuesEnum', 22)
statusDetail = _messages.StringField(23)
steps = _messages.MessageField('BuildStep', 24, repeated=True)
substitutions = _messages.MessageField('SubstitutionsValue', 25)
tags = _messages.StringField(26, repeated=True)
timeout = _messages.StringField(27)
timing = _messages.MessageField('TimingValue', 28)
warnings = _messages.MessageField('Warning', 29, repeated=True)
class BuildApproval(_messages.Message):
r"""BuildApproval describes a build's approval configuration, state, and
result.
Enums:
StateValueValuesEnum: Output only. The state of this build's approval.
Fields:
config: Output only. Configuration for manual approval of this build.
result: Output only. Result of manual approval for this Build.
state: Output only. The state of this build's approval.
"""
class StateValueValuesEnum(_messages.Enum):
r"""Output only. The state of this build's approval.
Values:
STATE_UNSPECIFIED: Default enum type. This should not be used.
PENDING: Build approval is pending.
APPROVED: Build approval has been approved.
REJECTED: Build approval has been rejected.
CANCELLED: Build was cancelled while it was still pending approval.
"""
STATE_UNSPECIFIED = 0
PENDING = 1
APPROVED = 2
REJECTED = 3
CANCELLED = 4
config = _messages.MessageField('ApprovalConfig', 1)
result = _messages.MessageField('ApprovalResult', 2)
state = _messages.EnumField('StateValueValuesEnum', 3)
class BuildOperationMetadata(_messages.Message):
r"""Metadata for build operations.
Fields:
build: The build that the operation is tracking.
"""
build = _messages.MessageField('Build', 1)
class BuildOptions(_messages.Message):
r"""Optional arguments to enable specific features of builds.
Enums:
DockerDaemonValueValuesEnum: Optional. Option to specify how (or if) a
Docker daemon is provided for the build.
LogStreamingOptionValueValuesEnum: Option to define build log streaming
behavior to Google Cloud Storage.
LoggingValueValuesEnum: Option to specify the logging mode, which
determines if and where build logs are stored.
MachineTypeValueValuesEnum: Compute Engine machine type on which to run
the build.
RequestedVerifyOptionValueValuesEnum: Requested verifiability options.
SourceProvenanceHashValueListEntryValuesEnum:
SubstitutionOptionValueValuesEnum: Option to specify behavior when there
is an error in the substitution checks. NOTE: this is always set to
ALLOW_LOOSE for triggered builds and cannot be overridden in the build
configuration file.
Fields:
anthosCluster: Details about how this build should be executed on a Anthos
cluster.
cluster: Details about how this build should be executed on a GKE cluster.
diskSizeGb: Requested disk size for the VM that runs the build. Note that
this is *NOT* "disk free"; some of the space will be used by the
operating system and build utilities. Also note that this is the minimum
disk size that will be allocated for the build -- the build may run with
a larger disk than requested. At present, the maximum disk size is
1000GB; builds that request more than the maximum are rejected with an
error.
dockerDaemon: Optional. Option to specify how (or if) a Docker daemon is
provided for the build.
dynamicSubstitutions: Option to specify whether or not to apply bash style
string operations to the substitutions. NOTE: this is always enabled for
triggered builds and cannot be overridden in the build configuration
file.
env: A list of global environment variable definitions that will exist for
all build steps in this build. If a variable is defined in both globally
and in a build step, the variable will use the build step value. The
elements are of the form "KEY=VALUE" for the environment variable "KEY"
being given the value "VALUE".
logStreamingOption: Option to define build log streaming behavior to
Google Cloud Storage.
logging: Option to specify the logging mode, which determines if and where
build logs are stored.
machineType: Compute Engine machine type on which to run the build.
pool: Optional. Specification for execution on a `WorkerPool`. See
[running builds in a private
pool](https://cloud.google.com/build/docs/private-pools/run-builds-in-
private-pool) for more information.
requestedVerifyOption: Requested verifiability options.
secretEnv: A list of global environment variables, which are encrypted
using a Cloud Key Management Service crypto key. These values must be
specified in the build's `Secret`. These variables will be available to
all build steps in this build.
sourceProvenanceHash: Requested hash for SourceProvenance.
substitutionOption: Option to specify behavior when there is an error in
the substitution checks. NOTE: this is always set to ALLOW_LOOSE for
triggered builds and cannot be overridden in the build configuration
file.
volumes: Global list of volumes to mount for ALL build steps Each volume
is created as an empty volume prior to starting the build process. Upon
completion of the build, volumes and their contents are discarded.
Global volume names and paths cannot conflict with the volumes defined a
build step. Using a global volume in a build with only one step is not
valid as it is indicative of a build request with an incorrect
configuration.
workerPool: This field deprecated; please use `pool.name` instead.
"""
class DockerDaemonValueValuesEnum(_messages.Enum):
r"""Optional. Option to specify how (or if) a Docker daemon is provided
for the build.
Values:
DOCKER_DAEMON_UNSPECIFIED: If the option is unspecified, a default will
be set based on the environment.
NO_DOCKER: No Docker daemon or functionality will be provided to the
build.
NON_PRIVILEGED: A Docker daemon is available during the build that is
running without privileged mode.
PRIVILEGED: A Docker daemon will be available that is running in
privileged mode. This is potentially a security vulnerability and
should only be used if the user is fully aware of the associated
risks.
"""
DOCKER_DAEMON_UNSPECIFIED = 0
NO_DOCKER = 1
NON_PRIVILEGED = 2
PRIVILEGED = 3
class LogStreamingOptionValueValuesEnum(_messages.Enum):
r"""Option to define build log streaming behavior to Google Cloud Storage.
Values:
STREAM_DEFAULT: Service may automatically determine build log streaming
behavior.
STREAM_ON: Build logs should be streamed to Google Cloud Storage.
STREAM_OFF: Build logs should not be streamed to Google Cloud Storage;
they will be written when the build is completed.
"""
STREAM_DEFAULT = 0
STREAM_ON = 1
STREAM_OFF = 2
class LoggingValueValuesEnum(_messages.Enum):
r"""Option to specify the logging mode, which determines if and where
build logs are stored.
Values:
LOGGING_UNSPECIFIED: The service determines the logging mode. The
default is `LEGACY`. Do not rely on the default logging behavior as it
may change in the future.
LEGACY: Build logs are stored in Cloud Logging and Cloud Storage.
GCS_ONLY: Build logs are stored in Cloud Storage.
STACKDRIVER_ONLY: This option is the same as CLOUD_LOGGING_ONLY.
CLOUD_LOGGING_ONLY: Build logs are stored in Cloud Logging. Selecting
this option will not allow [logs
streaming](https://cloud.google.com/sdk/gcloud/reference/builds/log).
NONE: Turn off all logging. No build logs will be captured.
"""
LOGGING_UNSPECIFIED = 0
LEGACY = 1
GCS_ONLY = 2
STACKDRIVER_ONLY = 3
CLOUD_LOGGING_ONLY = 4
NONE = 5
class MachineTypeValueValuesEnum(_messages.Enum):
r"""Compute Engine machine type on which to run the build.
Values:
UNSPECIFIED: Standard machine type.
N1_HIGHCPU_8: Highcpu machine with 8 CPUs.
N1_HIGHCPU_32: Highcpu machine with 32 CPUs.
E2_HIGHCPU_8: Highcpu e2 machine with 8 CPUs.
E2_HIGHCPU_32: Highcpu e2 machine with 32 CPUs.
"""
UNSPECIFIED = 0
N1_HIGHCPU_8 = 1
N1_HIGHCPU_32 = 2
E2_HIGHCPU_8 = 3
E2_HIGHCPU_32 = 4
class RequestedVerifyOptionValueValuesEnum(_messages.Enum):
r"""Requested verifiability options.
Values:
NOT_VERIFIED: Not a verifiable build. (default)
VERIFIED: Verified build.
"""
NOT_VERIFIED = 0
VERIFIED = 1
class SourceProvenanceHashValueListEntryValuesEnum(_messages.Enum):
r"""SourceProvenanceHashValueListEntryValuesEnum enum type.
Values:
NONE: No hash requested.
SHA256: Use a sha256 hash.
MD5: Use a md5 hash.
"""
NONE = 0
SHA256 = 1
MD5 = 2
class SubstitutionOptionValueValuesEnum(_messages.Enum):
r"""Option to specify behavior when there is an error in the substitution
checks. NOTE: this is always set to ALLOW_LOOSE for triggered builds and
cannot be overridden in the build configuration file.
Values:
MUST_MATCH: Fails the build if error in substitutions checks, like
missing a substitution in the template or in the map.
ALLOW_LOOSE: Do not fail the build if error in substitutions checks.
"""
MUST_MATCH = 0
ALLOW_LOOSE = 1
anthosCluster = _messages.MessageField('AnthosWorkerPool', 1)
cluster = _messages.MessageField('ClusterOptions', 2)
diskSizeGb = _messages.IntegerField(3)
dockerDaemon = _messages.EnumField('DockerDaemonValueValuesEnum', 4)
dynamicSubstitutions = _messages.BooleanField(5)
env = _messages.StringField(6, repeated=True)
logStreamingOption = _messages.EnumField('LogStreamingOptionValueValuesEnum', 7)
logging = _messages.EnumField('LoggingValueValuesEnum', 8)
machineType = _messages.EnumField('MachineTypeValueValuesEnum', 9)
pool = _messages.MessageField('PoolOption', 10)
requestedVerifyOption = _messages.EnumField('RequestedVerifyOptionValueValuesEnum', 11)
secretEnv = _messages.StringField(12, repeated=True)
sourceProvenanceHash = _messages.EnumField('SourceProvenanceHashValueListEntryValuesEnum', 13, repeated=True)
substitutionOption = _messages.EnumField('SubstitutionOptionValueValuesEnum', 14)
volumes = _messages.MessageField('Volume', 15, repeated=True)
workerPool = _messages.StringField(16)
class BuildStep(_messages.Message):
r"""A step in the build pipeline.
Enums:
StatusValueValuesEnum: Output only. Status of the build step. At this
time, build step status is only updated on build completion; step status
is not updated in real-time as the build progresses.
Fields:
args: A list of arguments that will be presented to the step when it is
started. If the image used to run the step's container has an
entrypoint, the `args` are used as arguments to that entrypoint. If the
image does not define an entrypoint, the first element in args is used
as the entrypoint, and the remainder will be used as arguments.
dir: Working directory to use when running this step's container. If this
value is a relative path, it is relative to the build's working
directory. If this value is absolute, it may be outside the build's
working directory, in which case the contents of the path may not be
persisted across build step executions, unless a `volume` for that path
is specified. If the build specifies a `RepoSource` with `dir` and a
step with a `dir`, which specifies an absolute path, the `RepoSource`
`dir` is ignored for the step's execution.
entrypoint: Entrypoint to be used instead of the build step image's
default entrypoint. If unset, the image's default entrypoint is used.
env: A list of environment variable definitions to be used when running a
step. The elements are of the form "KEY=VALUE" for the environment
variable "KEY" being given the value "VALUE".
id: Unique identifier for this build step, used in `wait_for` to reference
this build step as a dependency.
name: Required. The name of the container image that will run this
particular build step. If the image is available in the host's Docker
daemon's cache, it will be run directly. If not, the host will attempt
to pull the image first, using the builder service account's credentials
if necessary. The Docker daemon's cache will already have the latest
versions of all of the officially supported build steps
([https://github.com/GoogleCloudPlatform/cloud-
builders](https://github.com/GoogleCloudPlatform/cloud-builders)). The
Docker daemon will also have cached many of the layers for some popular
images, like "ubuntu", "debian", but they will be refreshed at the time
you attempt to use them. If you built an image in a previous build step,
it will be stored in the host's Docker daemon's cache and is available
to use as the name for a later build step.
pullTiming: Output only. Stores timing information for pulling this build
step's builder image only.
script: A shell script to be executed in the step. When script is
provided, the user cannot specify the entrypoint or args.
secretEnv: A list of environment variables which are encrypted using a
Cloud Key Management Service crypto key. These values must be specified
in the build's `Secret`.
status: Output only. Status of the build step. At this time, build step
status is only updated on build completion; step status is not updated
in real-time as the build progresses.
timeout: Time limit for executing this build step. If not defined, the
step has no time limit and will be allowed to continue to run until
either it completes or the build itself times out.
timing: Output only. Stores timing information for executing this build
step.
volumes: List of volumes to mount into the build step. Each volume is
created as an empty volume prior to execution of the build step. Upon
completion of the build, volumes and their contents are discarded. Using
a named volume in only one step is not valid as it is indicative of a
build request with an incorrect configuration.
waitFor: The ID(s) of the step(s) that this build step depends on. This
build step will not start until all the build steps in `wait_for` have
completed successfully. If `wait_for` is empty, this build step will
start when all previous build steps in the `Build.Steps` list have
completed successfully.
"""
class StatusValueValuesEnum(_messages.Enum):
r"""Output only. Status of the build step. At this time, build step status
is only updated on build completion; step status is not updated in real-
time as the build progresses.
Values:
STATUS_UNKNOWN: Status of the build is unknown.
PENDING: Build has been created and is pending execution and queuing. It
has not been queued.
QUEUED: Build or step is queued; work has not yet begun.
WORKING: Build or step is being executed.
SUCCESS: Build or step finished successfully.
FAILURE: Build or step failed to complete successfully.
INTERNAL_ERROR: Build or step failed due to an internal cause.
TIMEOUT: Build or step took longer than was allowed.
CANCELLED: Build or step was canceled by a user.
EXPIRED: Build was enqueued for longer than the value of `queue_ttl`.
"""
STATUS_UNKNOWN = 0
PENDING = 1
QUEUED = 2
WORKING = 3
SUCCESS = 4
FAILURE = 5
INTERNAL_ERROR = 6
TIMEOUT = 7
CANCELLED = 8
EXPIRED = 9
args = _messages.StringField(1, repeated=True)
dir = _messages.StringField(2)
entrypoint = _messages.StringField(3)
env = _messages.StringField(4, repeated=True)
id = _messages.StringField(5)
name = _messages.StringField(6)
pullTiming = _messages.MessageField('TimeSpan', 7)
script = _messages.StringField(8)
secretEnv = _messages.StringField(9, repeated=True)
status = _messages.EnumField('StatusValueValuesEnum', 10)
timeout = _messages.StringField(11)
timing = _messages.MessageField('TimeSpan', 12)
volumes = _messages.MessageField('Volume', 13, repeated=True)
waitFor = _messages.StringField(14, repeated=True)
class BuildTrigger(_messages.Message):
r"""Configuration for an automated build in response to source repository
changes.
Enums:
EventTypeValueValuesEnum: EventType allows the user to explicitly set the
type of event to which this BuildTrigger should respond. This field will
be validated against the rest of the configuration if it is set.
IncludeBuildLogsValueValuesEnum: If set to INCLUDE_BUILD_LOGS_WITH_STATUS,
log url will be shown on GitHub page when build status is final. Setting
this field to INCLUDE_BUILD_LOGS_WITH_STATUS for non GitHub triggers
results in INVALID_ARGUMENT error.
Messages:
SubstitutionsValue: Substitutions for Build resource. The keys must match
the following regular expression: `^_[A-Z0-9_]+$`.
Fields:
approvalConfig: Configuration for manual approval to start a build
invocation of this BuildTrigger.
autodetect: Autodetect build configuration. The following precedence is
used (case insensitive): 1. cloudbuild.yaml 2. cloudbuild.yml 3.
cloudbuild.json 4. Dockerfile Currently only available for GitHub App
Triggers.
bitbucketServerTriggerConfig: BitbucketServerTriggerConfig describes the
configuration of a trigger that creates a build whenever a Bitbucket
Server event is received.
build: Contents of the build template.
createTime: Output only. Time when the trigger was created.
cron: CronConfig describes the configuration of a trigger that creates a
build whenever a Cloud Scheduler event is received.
description: Human-readable description of this trigger.
disabled: If true, the trigger will never automatically execute a build.
eventType: EventType allows the user to explicitly set the type of event
to which this BuildTrigger should respond. This field will be validated
against the rest of the configuration if it is set.
filename: Path, from the source root, to the build configuration file
(i.e. cloudbuild.yaml).
filter: A Common Expression Language string.
gitFileSource: The file source describing the local or remote Build
template.
github: GitHubEventsConfig describes the configuration of a trigger that
creates a build whenever a GitHub event is received. Mutually exclusive
with `trigger_template`.
id: Output only. Unique identifier of the trigger.
ignoredFiles: ignored_files and included_files are file glob matches using
https://golang.org/pkg/path/filepath/#Match extended with support for
"**". If ignored_files and changed files are both empty, then they are
not used to determine whether or not to trigger a build. If
ignored_files is not empty, then we ignore any files that match any of
the ignored_file globs. If the change has no files that are outside of
the ignored_files globs, then we do not trigger a build.
includeBuildLogs: If set to INCLUDE_BUILD_LOGS_WITH_STATUS, log url will
be shown on GitHub page when build status is final. Setting this field
to INCLUDE_BUILD_LOGS_WITH_STATUS for non GitHub triggers results in
INVALID_ARGUMENT error.
includedFiles: If any of the files altered in the commit pass the
ignored_files filter and included_files is empty, then as far as this
filter is concerned, we should trigger the build. If any of the files
altered in the commit pass the ignored_files filter and included_files
is not empty, then we make sure that at least one of those files matches
a included_files glob. If not, then we do not trigger a build.
name: User-assigned name of the trigger. Must be unique within the
project. Trigger names must meet the following requirements: + They must
contain only alphanumeric characters and dashes. + They can be 1-64
characters long. + They must begin and end with an alphanumeric
character.
pubsubConfig: PubsubConfig describes the configuration of a trigger that
creates a build whenever a Pub/Sub message is published.
resourceName: The `Trigger` name with format:
`projects/{project}/locations/{location}/triggers/{trigger}`, where
{trigger} is a unique identifier generated by the service.
serviceAccount: The service account used for all user-controlled
operations including UpdateBuildTrigger, RunBuildTrigger, CreateBuild,
and CancelBuild. If no service account is set, then the standard Cloud
Build service account ([PROJECT_NUM]@system.<EMAIL>.<EMAIL>) will be
used instead. Format:
`projects/{PROJECT_ID}/serviceAccounts/{ACCOUNT_ID_OR_EMAIL}`
sourceToBuild: The repo and ref of the repository from which to build.
This field is used only for those triggers that do not respond to SCM
events. Triggers that respond to such events build source at whatever
commit caused the event. This field is currently only used by Webhook,
Pub/Sub, Manual, and Cron triggers.
substitutions: Substitutions for Build resource. The keys must match the
following regular expression: `^_[A-Z0-9_]+$`.
tags: Tags for annotation of a `BuildTrigger`
triggerTemplate: Template describing the types of source changes to
trigger a build. Branch and tag names in trigger templates are
interpreted as regular expressions. Any branch or tag change that
matches that regular expression will trigger a build. Mutually exclusive
with `github`.
webhookConfig: WebhookConfig describes the configuration of a trigger that
creates a build whenever a webhook is sent to a trigger's webhook URL.
"""
class EventTypeValueValuesEnum(_messages.Enum):
r"""EventType allows the user to explicitly set the type of event to which
this BuildTrigger should respond. This field will be validated against the
rest of the configuration if it is set.
Values:
EVENT_TYPE_UNSPECIFIED: EVENT_TYPE_UNSPECIFIED event_types are ignored.
REPO: REPO corresponds to the supported VCS integrations.
WEBHOOK: WEBHOOK corresponds to webhook triggers.
PUBSUB: PUBSUB corresponds to pubsub triggers.
MANUAL: MANUAL corresponds to manual-only invoked triggers.
"""
EVENT_TYPE_UNSPECIFIED = 0
REPO = 1
WEBHOOK = 2
PUBSUB = 3
MANUAL = 4
class IncludeBuildLogsValueValuesEnum(_messages.Enum):
r"""If set to INCLUDE_BUILD_LOGS_WITH_STATUS, log url will be shown on
GitHub page when build status is final. Setting this field to
INCLUDE_BUILD_LOGS_WITH_STATUS for non GitHub triggers results in
INVALID_ARGUMENT error.
Values:
INCLUDE_BUILD_LOGS_UNSPECIFIED: Build logs will not be shown on GitHub.
INCLUDE_BUILD_LOGS_WITH_STATUS: Build logs will be shown on GitHub.
"""
INCLUDE_BUILD_LOGS_UNSPECIFIED = 0
INCLUDE_BUILD_LOGS_WITH_STATUS = 1
@encoding.MapUnrecognizedFields('additionalProperties')
class SubstitutionsValue(_messages.Message):
r"""Substitutions for Build resource. The keys must match the following
regular expression: `^_[A-Z0-9_]+$`.
Messages:
AdditionalProperty: An additional property for a SubstitutionsValue
object.
Fields:
additionalProperties: Additional properties of type SubstitutionsValue
"""
class AdditionalProperty(_messages.Message):
r"""An additional property for a SubstitutionsValue object.
Fields:
key: Name of the additional property.
value: A string attribute.
"""
key = _messages.StringField(1)
value = _messages.StringField(2)
additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
approvalConfig = _messages.MessageField('ApprovalConfig', 1)
autodetect = _messages.BooleanField(2)
bitbucketServerTriggerConfig = _messages.MessageField('BitbucketServerTriggerConfig', 3)
build = _messages.MessageField('Build', 4)
createTime = _messages.StringField(5)
cron = _messages.MessageField('CronConfig', 6)
description = _messages.StringField(7)
disabled = _messages.BooleanField(8)
eventType = _messages.EnumField('EventTypeValueValuesEnum', 9)
filename = _messages.StringField(10)
filter = _messages.StringField(11)
gitFileSource = _messages.MessageField('GitFileSource', 12)
github = _messages.MessageField('GitHubEventsConfig', 13)
id = _messages.StringField(14)
ignoredFiles = _messages.StringField(15, repeated=True)
includeBuildLogs = _messages.EnumField('IncludeBuildLogsValueValuesEnum', 16)
includedFiles = _messages.StringField(17, repeated=True)
name = _messages.StringField(18)
pubsubConfig = _messages.MessageField('PubsubConfig', 19)
resourceName = _messages.StringField(20)
serviceAccount = _messages.StringField(21)
sourceToBuild = _messages.MessageField('GitRepoSource', 22)
substitutions = _messages.MessageField('SubstitutionsValue', 23)
tags = _messages.StringField(24, repeated=True)
triggerTemplate = _messages.MessageField('RepoSource', 25)
webhookConfig = _messages.MessageField('WebhookConfig', 26)
class BuiltImage(_messages.Message):
r"""An image built by the pipeline.
Fields:
digest: Docker Registry 2.0 digest.
name: Name used to push the container image to Google Container Registry,
as presented to `docker push`.
pushTiming: Output only. Stores timing information for pushing the
specified image.
"""
digest = _messages.StringField(1)
name = _messages.StringField(2)
pushTiming = _messages.MessageField('TimeSpan', 3)
class CancelBuildRequest(_messages.Message):
r"""Request to cancel an ongoing build.
Fields:
id: Required. ID of the build.
name: The name of the `Build` to cancel. Format:
`projects/{project}/locations/{location}/builds/{build}`
projectId: Required. ID of the project.
"""
id = _messages.StringField(1)
name = _messages.StringField(2)
projectId = _messages.StringField(3)
class CancelOperationRequest(_messages.Message):
r"""The request message for Operations.CancelOperation."""
class CloudbuildGithubInstallationsInstallationsListRequest(_messages.Message):
r"""A CloudbuildGithubInstallationsInstallationsListRequest object.
Fields:
installationId: Installation ID
"""
installationId = _messages.IntegerField(1, required=True)
class CloudbuildGithubInstallationsProjectsListRequest(_messages.Message):
r"""A CloudbuildGithubInstallationsProjectsListRequest object.
Fields:
installationId: Installation ID
"""
installationId = _messages.IntegerField(1, required=True)
class CloudbuildInstallationsInstallationsListRequest(_messages.Message):
r"""A CloudbuildInstallationsInstallationsListRequest object.
Fields:
installationId: Installation ID
"""
installationId = _messages.IntegerField(1, required=True)
class CloudbuildLocationsRegionalWebhookRequest(_messages.Message):
r"""A CloudbuildLocationsRegionalWebhookRequest object.
Fields:
httpBody: A HttpBody resource to be passed as the request body.
location: Required. The location where the webhook should be sent.
webhookKey: For GitHub Enterprise webhooks, this key is used to associate
the webhook request with the GitHubEnterpriseConfig to use for
validation.
"""
httpBody = _messages.MessageField('HttpBody', 1)
location = _messages.StringField(2, required=True)
webhookKey = _messages.StringField(3)
class CloudbuildOauthGetRegistrationRequest(_messages.Message):
r"""A CloudbuildOauthGetRegistrationRequest object.
Enums:
NamespaceValueValuesEnum: Required. The namespace that the credential
belongs to.
Fields:
authUser: For users who are logged in using multiple accounts, specify the
auth user parameter so that the registration url redirects back to the
cloud console using the proper account.
githubEnterpriseConfig: Optional. The full resource name of the github
enterprise resource if applicable.
hostUrl: Required. The host url that the oauth credentials are associated
with. For GitHub, this would be "https://github.com". For
GitHubEnterprise, this would be the host name of their github enterprise
instance.
namespace: Required. The namespace that the credential belongs to.
"""
class NamespaceValueValuesEnum(_messages.Enum):
r"""Required. The namespace that the credential belongs to.
Values:
NAMESPACE_UNSPECIFIED: The default namespace.
GITHUB_ENTERPRISE: A credential to be used with GitHub enterprise.
"""
NAMESPACE_UNSPECIFIED = 0
GITHUB_ENTERPRISE = 1
authUser = _messages.StringField(1)
githubEnterpriseConfig = _messages.StringField(2)
hostUrl = _messages.StringField(3)
namespace = _messages.EnumField('NamespaceValueValuesEnum', 4)
class CloudbuildOauthProcessOAuthCallbackRequest(_messages.Message):
r"""A CloudbuildOauthProcessOAuthCallbackRequest object.
Fields:
code: GitHub generated temproary authorization code.
githubEnterpriseConfig: For github enterprise, the full resource name of
the github enterprise resource.
hostUrl: The host url of the site that the OAuth token is issued for.
namespace: The namespace that the oauth callback credential should be
processed for. This should map to the string name of the enum defined in
the GetOAuthRegistrationURLRequest.
state: The XSRF token that was sent as part of the initial request to
start the OAuth flow.
"""
code = _messages.StringField(1)
githubEnterpriseConfig = _messages.StringField(2)
hostUrl = _messages.StringField(3)
namespace = _messages.StringField(4)
state = _messages.StringField(5)
class CloudbuildOperationsCancelRequest(_messages.Message):
r"""A CloudbuildOperationsCancelRequest object.
Fields:
cancelOperationRequest: A CancelOperationRequest resource to be passed as
the request body.
name: The name of the operation resource to be cancelled.
"""
cancelOperationRequest = _messages.MessageField('CancelOperationRequest', 1)
name = _messages.StringField(2, required=True)
class CloudbuildOperationsGetRequest(_messages.Message):
r"""A CloudbuildOperationsGetRequest object.
Fields:
name: The name of the operation resource.
"""
name = _messages.StringField(1, required=True)
class CloudbuildProjectsBuildsApproveRequest(_messages.Message):
r"""A CloudbuildProjectsBuildsApproveRequest object.
Fields:
approveBuildRequest: A ApproveBuildRequest resource to be passed as the
request body.
name: Required. Name of the target build. For example:
"projects/{$project_id}/builds/{$build_id}"
"""
approveBuildRequest = _messages.MessageField('ApproveBuildRequest', 1)
name = _messages.StringField(2, required=True)
class CloudbuildProjectsBuildsCancelRequest(_messages.Message):
r"""A CloudbuildProjectsBuildsCancelRequest object.
Fields:
cancelBuildRequest: A CancelBuildRequest resource to be passed as the
request body.
id: Required. ID of the build.
projectId: Required. ID of the project.
"""
cancelBuildRequest = _messages.MessageField('CancelBuildRequest', 1)
id = _messages.StringField(2, required=True)
projectId = _messages.StringField(3, required=True)
class CloudbuildProjectsBuildsCreateRequest(_messages.Message):
r"""A CloudbuildProjectsBuildsCreateRequest object.
Fields:
build: A Build resource to be passed as the request body.
parent: The parent resource where this build will be created. Format:
`projects/{project}/locations/{location}`
projectId: Required. ID of the project.
"""
build = _messages.MessageField('Build', 1)
parent = _messages.StringField(2)
projectId = _messages.StringField(3, required=True)
class CloudbuildProjectsBuildsGetRequest(_messages.Message):
r"""A CloudbuildProjectsBuildsGetRequest object.
Fields:
id: Required. ID of the build.
name: The name of the `Build` to retrieve. Format:
`projects/{project}/locations/{location}/builds/{build}`
projectId: Required. ID of the project.
"""
id = _messages.StringField(1, required=True)
name = _messages.StringField(2)
projectId = _messages.StringField(3, required=True)
class CloudbuildProjectsBuildsListRequest(_messages.Message):
r"""A CloudbuildProjectsBuildsListRequest object.
Fields:
filter: The raw filter text to constrain the results.
pageSize: Number of results to return in the list.
pageToken: The page token for the next page of Builds. If unspecified, the
first page of results is returned. If the token is rejected for any
reason, INVALID_ARGUMENT will be thrown. In this case, the token should
be discarded, and pagination should be restarted from the first page of
results. See https://google.aip.dev/158 for more.
parent: The parent of the collection of `Builds`. Format:
`projects/{project}/locations/location`
projectId: Required. ID of the project.
"""
filter = _messages.StringField(1)
pageSize = _messages.IntegerField(2, variant=_messages.Variant.INT32)
pageToken = _messages.StringField(3)
parent = _messages.StringField(4)
projectId = _messages.StringField(5, required=True)
class CloudbuildProjectsGithubEnterpriseConfigsCreateRequest(_messages.Message):
r"""A CloudbuildProjectsGithubEnterpriseConfigsCreateRequest object.
Fields:
gheConfigId: Optional. The ID to use for the GithubEnterpriseConfig, which
will become the final component of the GithubEnterpriseConfig's resource
name. ghe_config_id must meet the following requirements: + They must
contain only alphanumeric characters and dashes. + They can be 1-64
characters long. + They must begin and end with an alphanumeric
character
gitHubEnterpriseConfig: A GitHubEnterpriseConfig resource to be passed as
the request body.
parent: Name of the parent project. For example:
projects/{$project_number} or projects/{$project_id}
projectId: ID of the project.
"""
gheConfigId = _messages.StringField(1)
gitHubEnterpriseConfig = _messages.MessageField('GitHubEnterpriseConfig', 2)
parent = _messages.StringField(3, required=True)
projectId = _messages.StringField(4)
class CloudbuildProjectsGithubEnterpriseConfigsDeleteRequest(_messages.Message):
r"""A CloudbuildProjectsGithubEnterpriseConfigsDeleteRequest object.
Fields:
configId: Unique identifier of the `GitHubEnterpriseConfig`
name: This field should contain the name of the enterprise config
resource. For example:
"projects/{$project_id}/githubEnterpriseConfigs/{$config_id}"
projectId: ID of the project
"""
configId = _messages.StringField(1)
name = _messages.StringField(2, required=True)
projectId = _messages.StringField(3)
class CloudbuildProjectsGithubEnterpriseConfigsGetAppRequest(_messages.Message):
r"""A CloudbuildProjectsGithubEnterpriseConfigsGetAppRequest object.
Fields:
enterpriseConfigResource: Required. The name of the enterprise config
resource associated with the GitHub App. For example:
"projects/{$project_id}/githubEnterpriseConfigs/{$config_id}"
"""
enterpriseConfigResource = _messages.StringField(1, required=True)
class CloudbuildProjectsGithubEnterpriseConfigsGetRequest(_messages.Message):
r"""A CloudbuildProjectsGithubEnterpriseConfigsGetRequest object.
Fields:
configId: Unique identifier of the `GitHubEnterpriseConfig`
name: This field should contain the name of the enterprise config
resource. For example:
"projects/{$project_id}/githubEnterpriseConfigs/{$config_id}"
projectId: ID of the project
"""
configId = _messages.StringField(1)
name = _messages.StringField(2, required=True)
projectId = _messages.StringField(3)
class CloudbuildProjectsGithubEnterpriseConfigsListRequest(_messages.Message):
r"""A CloudbuildProjectsGithubEnterpriseConfigsListRequest object.
Fields:
parent: Name of the parent project. For example:
projects/{$project_number} or projects/{$project_id}
projectId: ID of the project
"""
parent = _messages.StringField(1, required=True)
projectId = _messages.StringField(2)
class CloudbuildProjectsGithubEnterpriseConfigsPatchRequest(_messages.Message):
r"""A CloudbuildProjectsGithubEnterpriseConfigsPatchRequest object.
Fields:
gitHubEnterpriseConfig: A GitHubEnterpriseConfig resource to be passed as
the request body.
name: Optional. The full resource name for the GitHubEnterpriseConfig For
example: "projects/{$project_id}/githubEnterpriseConfigs/{$config_id}"
updateMask: Update mask for the resource. If this is set, the server will
only update the fields specified in the field mask. Otherwise, a full
update of the mutable resource fields will be performed.
"""
gitHubEnterpriseConfig = _messages.MessageField('GitHubEnterpriseConfig', 1)
name = _messages.StringField(2, required=True)
updateMask = _messages.StringField(3)
class CloudbuildProjectsGithubInstallationsCreateRequest(_messages.Message):
r"""A CloudbuildProjectsGithubInstallationsCreateRequest object.
Fields:
installation: A Installation resource to be passed as the request body.
parent: The parent resource where this github installation will be
created. Format: `projects/{project}/locations/{location}`
projectId: The project ID of the GCP project the installation is
associated with.
projectId1: ID of the project.
userOauthCode: GitHub user code. If a GitHub credential is already
associated with the user this can be omitted, else the code is used to
exchange and store an OAuth token.
"""
installation = _messages.MessageField('Installation', 1)
parent = _messages.StringField(2)
projectId = _messages.StringField(3, required=True)
projectId1 = _messages.StringField(4)
userOauthCode = _messages.StringField(5)
class CloudbuildProjectsGithubInstallationsDeleteRequest(_messages.Message):
r"""A CloudbuildProjectsGithubInstallationsDeleteRequest object.
Fields:
installationId: GitHub app installation ID.
name: The name of the `GitHubInstallation` to delete. Format:
`projects/{project}/locations/{location}/installations/{installation}`
projectId: Cloud Project ID.
"""
installationId = _messages.IntegerField(1, required=True)
name = _messages.StringField(2)
projectId = _messages.StringField(3, required=True)
class CloudbuildProjectsGithubInstallationsListRequest(_messages.Message):
r"""A CloudbuildProjectsGithubInstallationsListRequest object.
Fields:
parent: The parent resource where github installations for project will be
listed. Format: `projects/{project}/locations/{location}`
projectId: Project id
"""
parent = _messages.StringField(1)
projectId = _messages.StringField(2, required=True)
class CloudbuildProjectsGithubInstallationsPatchRequest(_messages.Message):
r"""A CloudbuildProjectsGithubInstallationsPatchRequest object.
Fields:
id: GitHub installation ID, created by GitHub.
installation: A Installation resource to be passed as the request body.
installationId: Unique identifier of the GitHub installation. Deprecated.
Should set installation.id
name: The name of the `GitHubInstallation` to update. Format:
`projects/{project}/locations/{location}/installations/{installation}`
projectId: The project ID of the GCP project the installation is
associated with.
projectId1: ID of the project.
updateMask: Update mask for the Installation resource. If this is set, the
server will only update the fields specified in the field mask.
Otherwise, a full update of the resource will be performed.
"""
id = _messages.IntegerField(1, required=True)
installation = _messages.MessageField('Installation', 2)
installationId = _messages.IntegerField(3)
name = _messages.StringField(4)
projectId = _messages.StringField(5, required=True)
projectId1 = _messages.StringField(6)
updateMask = _messages.StringField(7)
class CloudbuildProjectsInstallationsCreateRequest(_messages.Message):
r"""A CloudbuildProjectsInstallationsCreateRequest object.
Fields:
installation: A Installation resource to be passed as the request body.
parent: The parent resource where this github installation will be
created. Format: `projects/{project}/locations/{location}`
projectId: ID of the project.
userOauthCode: GitHub user code. If a GitHub credential is already
associated with the user this can be omitted, else the code is used to
exchange and store an OAuth token.
"""
installation = _messages.MessageField('Installation', 1)
parent = _messages.StringField(2)
projectId = _messages.StringField(3, required=True)
userOauthCode = _messages.StringField(4)
class CloudbuildProjectsInstallationsDeleteRequest(_messages.Message):
r"""A CloudbuildProjectsInstallationsDeleteRequest object.
Fields:
installationId: GitHub app installation ID.
name: The name of the `GitHubInstallation` to delete. Format:
`projects/{project}/locations/{location}/installations/{installation}`
projectId: Cloud Project ID.
"""
installationId = _messages.IntegerField(1, required=True)
name = _messages.StringField(2)
projectId = _messages.StringField(3, required=True)
class CloudbuildProjectsInstallationsListRequest(_messages.Message):
r"""A CloudbuildProjectsInstallationsListRequest object.
Fields:
parent: The parent resource where github installations for project will be
listed. Format: `projects/{project}/locations/{location}`
projectId: Project id
"""
parent = _messages.StringField(1)
projectId = _messages.StringField(2, required=True)
class CloudbuildProjectsInstallationsPatchRequest(_messages.Message):
r"""A CloudbuildProjectsInstallationsPatchRequest object.
Fields:
id: GitHub installation ID, created by GitHub.
installation: A Installation resource to be passed as the request body.
installationId: Unique identifier of the GitHub installation. Deprecated.
Should set installation.id
name: The name of the `GitHubInstallation` to update. Format:
`projects/{project}/locations/{location}/installations/{installation}`
projectId: ID of the project.
projectNum: Numerical ID of the project.
updateMask: Update mask for the Installation resource. If this is set, the
server will only update the fields specified in the field mask.
Otherwise, a full update of the resource will be performed.
"""
id = _messages.IntegerField(1, required=True)
installation = _messages.MessageField('Installation', 2)
installationId = _messages.IntegerField(3)
name = _messages.StringField(4)
projectId = _messages.StringField(5)
projectNum = _messages.IntegerField(6, required=True)
updateMask = _messages.StringField(7)
class CloudbuildProjectsLocationsBitbucketServerConfigsAddBitbucketServerConnectedRepositoryRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsBitbucketServerConfigsAddBitbucketServerCon
nectedRepositoryRequest object.
Fields:
addBitbucketServerConnectedRepositoryRequest: A
AddBitbucketServerConnectedRepositoryRequest resource to be passed as
the request body.
config: Required. The name of the `BitbucketServerConfig` to add a
connected repository. Format: `projects/{project}/locations/{location}/b
itbucketServerConfigs/{config}`
"""
addBitbucketServerConnectedRepositoryRequest = _messages.MessageField('AddBitbucketServerConnectedRepositoryRequest', 1)
config = _messages.StringField(2, required=True)
class CloudbuildProjectsLocationsBitbucketServerConfigsConnectedRepositoriesBatchCreateRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsBitbucketServerConfigsConnectedRepositories
BatchCreateRequest object.
Fields:
batchCreateBitbucketServerConnectedRepositoriesRequest: A
BatchCreateBitbucketServerConnectedRepositoriesRequest resource to be
passed as the request body.
parent: The name of the `BitbucketServerConfig` that added connected
repository. Format: `projects/{project}/locations/{location}/bitbucketSe
rverConfigs/{config}`
"""
batchCreateBitbucketServerConnectedRepositoriesRequest = _messages.MessageField('BatchCreateBitbucketServerConnectedRepositoriesRequest', 1)
parent = _messages.StringField(2, required=True)
class CloudbuildProjectsLocationsBitbucketServerConfigsCreateRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsBitbucketServerConfigsCreateRequest object.
Fields:
bitbucketServerConfig: A BitbucketServerConfig resource to be passed as
the request body.
bitbucketServerConfigId: Optional. The ID to use for the
BitbucketServerConfig, which will become the final component of the
BitbucketServerConfig's resource name. bitbucket_server_config_id must
meet the following requirements: + They must contain only alphanumeric
characters and dashes. + They can be 1-64 characters long. + They must
begin and end with an alphanumeric character.
parent: Required. Name of the parent resource.
"""
bitbucketServerConfig = _messages.MessageField('BitbucketServerConfig', 1)
bitbucketServerConfigId = _messages.StringField(2)
parent = _messages.StringField(3, required=True)
class CloudbuildProjectsLocationsBitbucketServerConfigsDeleteRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsBitbucketServerConfigsDeleteRequest object.
Fields:
name: Required. The config resource name.
"""
name = _messages.StringField(1, required=True)
class CloudbuildProjectsLocationsBitbucketServerConfigsGetRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsBitbucketServerConfigsGetRequest object.
Fields:
name: Required. The config resource name.
"""
name = _messages.StringField(1, required=True)
class CloudbuildProjectsLocationsBitbucketServerConfigsListRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsBitbucketServerConfigsListRequest object.
Fields:
pageSize: The maximum number of configs to return. The service may return
fewer than this value. If unspecified, at most 50 configs will be
returned. The maximum value is 1000; values above 1000 will be coerced
to 1000.
pageToken: A page token, received from a previous
`ListBitbucketServerConfigsRequest` call. Provide this to retrieve the
subsequent page. When paginating, all other parameters provided to
`ListBitbucketServerConfigsRequest` must match the call that provided
the page token.
parent: Required. Name of the parent resource.
"""
pageSize = _messages.IntegerField(1, variant=_messages.Variant.INT32)
pageToken = _messages.StringField(2)
parent = _messages.StringField(3, required=True)
class CloudbuildProjectsLocationsBitbucketServerConfigsPatchRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsBitbucketServerConfigsPatchRequest object.
Fields:
bitbucketServerConfig: A BitbucketServerConfig resource to be passed as
the request body.
name: The resource name for the config.
updateMask: Update mask for the resource. If this is set, the server will
only update the fields specified in the field mask. Otherwise, a full
update of the mutable resource fields will be performed.
"""
bitbucketServerConfig = _messages.MessageField('BitbucketServerConfig', 1)
name = _messages.StringField(2, required=True)
updateMask = _messages.StringField(3)
class CloudbuildProjectsLocationsBitbucketServerConfigsRemoveBitbucketServerConnectedRepositoryRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsBitbucketServerConfigsRemoveBitbucketServer
ConnectedRepositoryRequest object.
Fields:
config: Required. The name of the `BitbucketServerConfig` to remove a
connected repository. Format: `projects/{project}/locations/{location}/b
itbucketServerConfigs/{config}`
removeBitbucketServerConnectedRepositoryRequest: A
RemoveBitbucketServerConnectedRepositoryRequest resource to be passed as
the request body.
"""
config = _messages.StringField(1, required=True)
removeBitbucketServerConnectedRepositoryRequest = _messages.MessageField('RemoveBitbucketServerConnectedRepositoryRequest', 2)
class CloudbuildProjectsLocationsBitbucketServerConfigsReposListRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsBitbucketServerConfigsReposListRequest
object.
Fields:
pageSize: The maximum number of configs to return. The service may return
fewer than this value. If unspecified, at most 50 configs will be
returned. The maximum value is 1000; values above 1000 will be coerced
to 1000.
pageToken: A page token, received from a previous
`ListBitbucketServerRepositoriesRequest` call. Provide this to retrieve
the subsequent page. When paginating, all other parameters provided to
`ListBitbucketServerConfigsRequest` must match the call that provided
the page token.
parent: Required. Name of the parent resource.
"""
pageSize = _messages.IntegerField(1, variant=_messages.Variant.INT32)
pageToken = _messages.StringField(2)
parent = _messages.StringField(3, required=True)
class CloudbuildProjectsLocationsBuildsApproveRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsBuildsApproveRequest object.
Fields:
approveBuildRequest: A ApproveBuildRequest resource to be passed as the
request body.
name: Required. Name of the target build. For example:
"projects/{$project_id}/builds/{$build_id}"
"""
approveBuildRequest = _messages.MessageField('ApproveBuildRequest', 1)
name = _messages.StringField(2, required=True)
class CloudbuildProjectsLocationsBuildsCreateRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsBuildsCreateRequest object.
Fields:
build: A Build resource to be passed as the request body.
parent: The parent resource where this build will be created. Format:
`projects/{project}/locations/{location}`
projectId: Required. ID of the project.
"""
build = _messages.MessageField('Build', 1)
parent = _messages.StringField(2, required=True)
projectId = _messages.StringField(3)
class CloudbuildProjectsLocationsBuildsGetRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsBuildsGetRequest object.
Fields:
id: Required. ID of the build.
name: The name of the `Build` to retrieve. Format:
`projects/{project}/locations/{location}/builds/{build}`
projectId: Required. ID of the project.
"""
id = _messages.StringField(1)
name = _messages.StringField(2, required=True)
projectId = _messages.StringField(3)
class CloudbuildProjectsLocationsBuildsListRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsBuildsListRequest object.
Fields:
filter: The raw filter text to constrain the results.
pageSize: Number of results to return in the list.
pageToken: The page token for the next page of Builds. If unspecified, the
first page of results is returned. If the token is rejected for any
reason, INVALID_ARGUMENT will be thrown. In this case, the token should
be discarded, and pagination should be restarted from the first page of
results. See https://google.aip.dev/158 for more.
parent: The parent of the collection of `Builds`. Format:
`projects/{project}/locations/location`
projectId: Required. ID of the project.
"""
filter = _messages.StringField(1)
pageSize = _messages.IntegerField(2, variant=_messages.Variant.INT32)
pageToken = _messages.StringField(3)
parent = _messages.StringField(4, required=True)
projectId = _messages.StringField(5)
class CloudbuildProjectsLocationsGithubEnterpriseConfigsCreateRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsGithubEnterpriseConfigsCreateRequest
object.
Fields:
gheConfigId: Optional. The ID to use for the GithubEnterpriseConfig, which
will become the final component of the GithubEnterpriseConfig's resource
name. ghe_config_id must meet the following requirements: + They must
contain only alphanumeric characters and dashes. + They can be 1-64
characters long. + They must begin and end with an alphanumeric
character
gitHubEnterpriseConfig: A GitHubEnterpriseConfig resource to be passed as
the request body.
parent: Name of the parent project. For example:
projects/{$project_number} or projects/{$project_id}
projectId: ID of the project.
"""
gheConfigId = _messages.StringField(1)
gitHubEnterpriseConfig = _messages.MessageField('GitHubEnterpriseConfig', 2)
parent = _messages.StringField(3, required=True)
projectId = _messages.StringField(4)
class CloudbuildProjectsLocationsGithubEnterpriseConfigsDeleteRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsGithubEnterpriseConfigsDeleteRequest
object.
Fields:
configId: Unique identifier of the `GitHubEnterpriseConfig`
name: This field should contain the name of the enterprise config
resource. For example:
"projects/{$project_id}/githubEnterpriseConfigs/{$config_id}"
projectId: ID of the project
"""
configId = _messages.StringField(1)
name = _messages.StringField(2, required=True)
projectId = _messages.StringField(3)
class CloudbuildProjectsLocationsGithubEnterpriseConfigsGetAppRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsGithubEnterpriseConfigsGetAppRequest
object.
Fields:
enterpriseConfigResource: Required. The name of the enterprise config
resource associated with the GitHub App. For example:
"projects/{$project_id}/githubEnterpriseConfigs/{$config_id}"
"""
enterpriseConfigResource = _messages.StringField(1, required=True)
class CloudbuildProjectsLocationsGithubEnterpriseConfigsGetRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsGithubEnterpriseConfigsGetRequest object.
Fields:
configId: Unique identifier of the `GitHubEnterpriseConfig`
name: This field should contain the name of the enterprise config
resource. For example:
"projects/{$project_id}/githubEnterpriseConfigs/{$config_id}"
projectId: ID of the project
"""
configId = _messages.StringField(1)
name = _messages.StringField(2, required=True)
projectId = _messages.StringField(3)
class CloudbuildProjectsLocationsGithubEnterpriseConfigsListRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsGithubEnterpriseConfigsListRequest object.
Fields:
parent: Name of the parent project. For example:
projects/{$project_number} or projects/{$project_id}
projectId: ID of the project
"""
parent = _messages.StringField(1, required=True)
projectId = _messages.StringField(2)
class CloudbuildProjectsLocationsGithubEnterpriseConfigsPatchRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsGithubEnterpriseConfigsPatchRequest object.
Fields:
gitHubEnterpriseConfig: A GitHubEnterpriseConfig resource to be passed as
the request body.
name: Optional. The full resource name for the GitHubEnterpriseConfig For
example: "projects/{$project_id}/githubEnterpriseConfigs/{$config_id}"
updateMask: Update mask for the resource. If this is set, the server will
only update the fields specified in the field mask. Otherwise, a full
update of the mutable resource fields will be performed.
"""
gitHubEnterpriseConfig = _messages.MessageField('GitHubEnterpriseConfig', 1)
name = _messages.StringField(2, required=True)
updateMask = _messages.StringField(3)
class CloudbuildProjectsLocationsGithubInstallationsCreateRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsGithubInstallationsCreateRequest object.
Fields:
installation: A Installation resource to be passed as the request body.
parent: The parent resource where this github installation will be
created. Format: `projects/{project}/locations/{location}`
projectId: ID of the project.
userOauthCode: GitHub user code. If a GitHub credential is already
associated with the user this can be omitted, else the code is used to
exchange and store an OAuth token.
"""
installation = _messages.MessageField('Installation', 1)
parent = _messages.StringField(2, required=True)
projectId = _messages.StringField(3)
userOauthCode = _messages.StringField(4)
class CloudbuildProjectsLocationsGithubInstallationsDeleteRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsGithubInstallationsDeleteRequest object.
Fields:
installationId: GitHub app installation ID.
installationsId: A string attribute.
name: The name of the `GitHubInstallation` to delete. Format:
`projects/{project}/locations/{location}/installations/{installation}`
projectId: Cloud Project ID.
"""
installationId = _messages.IntegerField(1)
installationsId = _messages.StringField(2, required=True)
name = _messages.StringField(3, required=True)
projectId = _messages.StringField(4)
class CloudbuildProjectsLocationsGithubInstallationsListRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsGithubInstallationsListRequest object.
Fields:
parent: The parent resource where github installations for project will be
listed. Format: `projects/{project}/locations/{location}`
projectId: Project id
"""
parent = _messages.StringField(1, required=True)
projectId = _messages.StringField(2)
class CloudbuildProjectsLocationsGithubInstallationsPatchRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsGithubInstallationsPatchRequest object.
Fields:
installation: A Installation resource to be passed as the request body.
installationId: Unique identifier of the GitHub installation. Deprecated.
Should set installation.id
installationsId: A string attribute.
name: The `Installation` name with format:
`projects/{project}/locations/{location}/installations/{installation}`,
where {installation} is GitHub installation ID created by GitHub.
name1: The name of the `GitHubInstallation` to update. Format:
`projects/{project}/locations/{location}/installations/{installation}`
projectId: ID of the project.
updateMask: Update mask for the Installation resource. If this is set, the
server will only update the fields specified in the field mask.
Otherwise, a full update of the resource will be performed.
"""
installation = _messages.MessageField('Installation', 1)
installationId = _messages.IntegerField(2)
installationsId = _messages.StringField(3, required=True)
name = _messages.StringField(4, required=True)
name1 = _messages.StringField(5)
projectId = _messages.StringField(6)
updateMask = _messages.StringField(7)
class CloudbuildProjectsLocationsInstallationsDeleteRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsInstallationsDeleteRequest object.
Fields:
installationId: GitHub app installation ID.
name: The name of the `GitHubInstallation` to delete. Format:
`projects/{project}/locations/{location}/installations/{installation}`
projectId: Cloud Project ID.
"""
installationId = _messages.IntegerField(1)
name = _messages.StringField(2, required=True)
projectId = _messages.StringField(3)
class CloudbuildProjectsLocationsInstallationsListRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsInstallationsListRequest object.
Fields:
parent: The parent resource where github installations for project will be
listed. Format: `projects/{project}/locations/{location}`
projectId: Project id
"""
parent = _messages.StringField(1, required=True)
projectId = _messages.StringField(2)
class CloudbuildProjectsLocationsInstallationsPatchRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsInstallationsPatchRequest object.
Fields:
installation: A Installation resource to be passed as the request body.
installationId: Unique identifier of the GitHub installation. Deprecated.
Should set installation.id
name: The `Installation` name with format:
`projects/{project}/locations/{location}/installations/{installation}`,
where {installation} is GitHub installation ID created by GitHub.
name1: The name of the `GitHubInstallation` to update. Format:
`projects/{project}/locations/{location}/installations/{installation}`
projectId: ID of the project.
updateMask: Update mask for the Installation resource. If this is set, the
server will only update the fields specified in the field mask.
Otherwise, a full update of the resource will be performed.
"""
installation = _messages.MessageField('Installation', 1)
installationId = _messages.IntegerField(2)
name = _messages.StringField(3, required=True)
name1 = _messages.StringField(4)
projectId = _messages.StringField(5)
updateMask = _messages.StringField(6)
class CloudbuildProjectsLocationsOperationsCancelRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsOperationsCancelRequest object.
Fields:
cancelOperationRequest: A CancelOperationRequest resource to be passed as
the request body.
name: The name of the operation resource to be cancelled.
"""
cancelOperationRequest = _messages.MessageField('CancelOperationRequest', 1)
name = _messages.StringField(2, required=True)
class CloudbuildProjectsLocationsOperationsGetRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsOperationsGetRequest object.
Fields:
name: The name of the operation resource.
"""
name = _messages.StringField(1, required=True)
class CloudbuildProjectsLocationsTriggersCreateRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsTriggersCreateRequest object.
Fields:
buildTrigger: A BuildTrigger resource to be passed as the request body.
parent: The parent resource where this trigger will be created. Format:
`projects/{project}/locations/{location}`
projectId: Required. ID of the project for which to configure automatic
builds.
"""
buildTrigger = _messages.MessageField('BuildTrigger', 1)
parent = _messages.StringField(2, required=True)
projectId = _messages.StringField(3)
class CloudbuildProjectsLocationsTriggersDeleteRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsTriggersDeleteRequest object.
Fields:
name: The name of the `Trigger` to delete. Format:
`projects/{project}/locations/{location}/triggers/{trigger}`
projectId: Required. ID of the project that owns the trigger.
triggerId: Required. ID of the `BuildTrigger` to delete.
"""
name = _messages.StringField(1, required=True)
projectId = _messages.StringField(2)
triggerId = _messages.StringField(3)
class CloudbuildProjectsLocationsTriggersGetRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsTriggersGetRequest object.
Fields:
name: The name of the `Trigger` to retrieve. Format:
`projects/{project}/locations/{location}/triggers/{trigger}`
projectId: Required. ID of the project that owns the trigger.
triggerId: Required. Identifier (`id` or `name`) of the `BuildTrigger` to
get.
"""
name = _messages.StringField(1, required=True)
projectId = _messages.StringField(2)
triggerId = _messages.StringField(3)
class CloudbuildProjectsLocationsTriggersListRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsTriggersListRequest object.
Fields:
pageSize: Number of results to return in the list.
pageToken: Token to provide to skip to a particular spot in the list.
parent: The parent of the collection of `Triggers`. Format:
`projects/{project}/locations/{location}`
projectId: Required. ID of the project for which to list BuildTriggers.
"""
pageSize = _messages.IntegerField(1, variant=_messages.Variant.INT32)
pageToken = _messages.StringField(2)
parent = _messages.StringField(3, required=True)
projectId = _messages.StringField(4)
class CloudbuildProjectsLocationsTriggersPatchRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsTriggersPatchRequest object.
Fields:
buildTrigger: A BuildTrigger resource to be passed as the request body.
projectId: Required. ID of the project that owns the trigger.
resourceName: The `Trigger` name with format:
`projects/{project}/locations/{location}/triggers/{trigger}`, where
{trigger} is a unique identifier generated by the service.
triggerId: Required. ID of the `BuildTrigger` to update.
"""
buildTrigger = _messages.MessageField('BuildTrigger', 1)
projectId = _messages.StringField(2)
resourceName = _messages.StringField(3, required=True)
triggerId = _messages.StringField(4)
class CloudbuildProjectsLocationsTriggersRunRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsTriggersRunRequest object.
Fields:
name: The name of the `Trigger` to run. Format:
`projects/{project}/locations/{location}/triggers/{trigger}`
runBuildTriggerRequest: A RunBuildTriggerRequest resource to be passed as
the request body.
"""
name = _messages.StringField(1, required=True)
runBuildTriggerRequest = _messages.MessageField('RunBuildTriggerRequest', 2)
class CloudbuildProjectsLocationsTriggersWebhookRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsTriggersWebhookRequest object.
Fields:
httpBody: A HttpBody resource to be passed as the request body.
name: The name of the `ReceiveTriggerWebhook` to retrieve. Format:
`projects/{project}/locations/{location}/triggers/{trigger}`
projectId: Project in which the specified trigger lives
secret: Secret token used for authorization if an OAuth token isn't
provided.
trigger: Name of the trigger to run the payload against
"""
httpBody = _messages.MessageField('HttpBody', 1)
name = _messages.StringField(2, required=True)
projectId = _messages.StringField(3)
secret = _messages.StringField(4)
trigger = _messages.StringField(5)
class CloudbuildProjectsLocationsWorkerPoolsCreateRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsWorkerPoolsCreateRequest object.
Fields:
parent: Required. The parent resource where this worker pool will be
created. Format: `projects/{project}/locations/{location}`.
validateOnly: If set, validate the request and preview the response, but
do not actually post it.
workerPool: A WorkerPool resource to be passed as the request body.
workerPoolId: Required. Immutable. The ID to use for the `WorkerPool`,
which will become the final component of the resource name. This value
should be 1-63 characters, and valid characters are /a-z-/.
"""
parent = _messages.StringField(1, required=True)
validateOnly = _messages.BooleanField(2)
workerPool = _messages.MessageField('WorkerPool', 3)
workerPoolId = _messages.StringField(4)
class CloudbuildProjectsLocationsWorkerPoolsDeleteRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsWorkerPoolsDeleteRequest object.
Fields:
allowMissing: If set to true, and the `WorkerPool` is not found, the
request will succeed but no action will be taken on the server.
etag: Optional. If provided, it must match the server's etag on the
workerpool for the request to be processed.
name: Required. The name of the `WorkerPool` to delete. Format:
`projects/{project}/locations/{workerPool}/workerPools/{workerPool}`.
validateOnly: If set, validate the request and preview the response, but
do not actually post it.
"""
allowMissing = _messages.BooleanField(1)
etag = _messages.StringField(2)
name = _messages.StringField(3, required=True)
validateOnly = _messages.BooleanField(4)
class CloudbuildProjectsLocationsWorkerPoolsGetRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsWorkerPoolsGetRequest object.
Fields:
name: Required. The name of the `WorkerPool` to retrieve. Format:
`projects/{project}/locations/{location}/workerPools/{workerPool}`.
"""
name = _messages.StringField(1, required=True)
class CloudbuildProjectsLocationsWorkerPoolsListRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsWorkerPoolsListRequest object.
Fields:
pageSize: The maximum number of `WorkerPool`s to return. The service may
return fewer than this value. If omitted, the server will use a sensible
default.
pageToken: A page token, received from a previous `ListWorkerPools` call.
Provide this to retrieve the subsequent page.
parent: Required. The parent of the collection of `WorkerPools`. Format:
`projects/{project}/locations/{location}`.
"""
pageSize = _messages.IntegerField(1, variant=_messages.Variant.INT32)
pageToken = _messages.StringField(2)
parent = _messages.StringField(3, required=True)
class CloudbuildProjectsLocationsWorkerPoolsPatchRequest(_messages.Message):
r"""A CloudbuildProjectsLocationsWorkerPoolsPatchRequest object.
Fields:
name: Output only. The resource name of the `WorkerPool`, with format
`projects/{project}/locations/{location}/workerPools/{worker_pool}`. The
value of `{worker_pool}` is provided by `worker_pool_id` in
`CreateWorkerPool` request and the value of `{location}` is determined
by the endpoint accessed.
updateMask: A mask specifying which fields in `worker_pool` to update.
validateOnly: If set, validate the request and preview the response, but
do not actually post it.
workerPool: A WorkerPool resource to be passed as the request body.
"""
name = _messages.StringField(1, required=True)
updateMask = _messages.StringField(2)
validateOnly = _messages.BooleanField(3)
workerPool = _messages.MessageField('WorkerPool', 4)
class CloudbuildProjectsTriggersCreateRequest(_messages.Message):
r"""A CloudbuildProjectsTriggersCreateRequest object.
Fields:
buildTrigger: A BuildTrigger resource to be passed as the request body.
parent: The parent resource where this trigger will be created. Format:
`projects/{project}/locations/{location}`
projectId: Required. ID of the project for which to configure automatic
builds.
"""
buildTrigger = _messages.MessageField('BuildTrigger', 1)
parent = _messages.StringField(2)
projectId = _messages.StringField(3, required=True)
class CloudbuildProjectsTriggersDeleteRequest(_messages.Message):
r"""A CloudbuildProjectsTriggersDeleteRequest object.
Fields:
name: The name of the `Trigger` to delete. Format:
`projects/{project}/locations/{location}/triggers/{trigger}`
projectId: Required. ID of the project that owns the trigger.
triggerId: Required. ID of the `BuildTrigger` to delete.
"""
name = _messages.StringField(1)
projectId = _messages.StringField(2, required=True)
triggerId = _messages.StringField(3, required=True)
class CloudbuildProjectsTriggersGetRequest(_messages.Message):
r"""A CloudbuildProjectsTriggersGetRequest object.
Fields:
name: The name of the `Trigger` to retrieve. Format:
`projects/{project}/locations/{location}/triggers/{trigger}`
projectId: Required. ID of the project that owns the trigger.
triggerId: Required. Identifier (`id` or `name`) of the `BuildTrigger` to
get.
"""
name = _messages.StringField(1)
projectId = _messages.StringField(2, required=True)
triggerId = _messages.StringField(3, required=True)
class CloudbuildProjectsTriggersListRequest(_messages.Message):
r"""A CloudbuildProjectsTriggersListRequest object.
Fields:
pageSize: Number of results to return in the list.
pageToken: Token to provide to skip to a particular spot in the list.
parent: The parent of the collection of `Triggers`. Format:
`projects/{project}/locations/{location}`
projectId: Required. ID of the project for which to list BuildTriggers.
"""
pageSize = _messages.IntegerField(1, variant=_messages.Variant.INT32)
pageToken = _messages.StringField(2)
parent = _messages.StringField(3)
projectId = _messages.StringField(4, required=True)
class CloudbuildProjectsTriggersPatchRequest(_messages.Message):
r"""A CloudbuildProjectsTriggersPatchRequest object.
Fields:
buildTrigger: A BuildTrigger resource to be passed as the request body.
projectId: Required. ID of the project that owns the trigger.
triggerId: Required. ID of the `BuildTrigger` to update.
"""
buildTrigger = _messages.MessageField('BuildTrigger', 1)
projectId = _messages.StringField(2, required=True)
triggerId = _messages.StringField(3, required=True)
class CloudbuildProjectsTriggersRunRequest(_messages.Message):
r"""A CloudbuildProjectsTriggersRunRequest object.
Fields:
name: The name of the `Trigger` to run. Format:
`projects/{project}/locations/{location}/triggers/{trigger}`
projectId: Required. ID of the project.
repoSource: A RepoSource resource to be passed as the request body.
triggerId: Required. ID of the trigger.
"""
name = _messages.StringField(1)
projectId = _messages.StringField(2, required=True)
repoSource = _messages.MessageField('RepoSource', 3)
triggerId = _messages.StringField(4, required=True)
class CloudbuildProjectsTriggersWebhookRequest(_messages.Message):
r"""A CloudbuildProjectsTriggersWebhookRequest object.
Fields:
httpBody: A HttpBody resource to be passed as the request body.
name: The name of the `ReceiveTriggerWebhook` to retrieve. Format:
`projects/{project}/locations/{location}/triggers/{trigger}`
projectId: Project in which the specified trigger lives
secret: Secret token used for authorization if an OAuth token isn't
provided.
trigger: Name of the trigger to run the payload against
"""
httpBody = _messages.MessageField('HttpBody', 1)
name = _messages.StringField(2)
projectId = _messages.StringField(3, required=True)
secret = _messages.StringField(4)
trigger = _messages.StringField(5, required=True)
class CloudbuildWebhookRequest(_messages.Message):
r"""A CloudbuildWebhookRequest object.
Fields:
httpBody: A HttpBody resource to be passed as the request body.
webhookKey: For GitHub Enterprise webhooks, this key is used to associate
the webhook request with the GitHubEnterpriseConfig to use for
validation.
"""
httpBody = _messages.MessageField('HttpBody', 1)
webhookKey = _messages.StringField(2)
class ClusterOptions(_messages.Message):
r"""Details of the GKE Cluster for builds that should execute on-cluster.
Fields:
name: Identifier of the GKE Cluster this build should execute on. Example:
projects/{project_id}/locations/{location}/clusters/{cluster_name} The
cluster's project ID must be the same project ID that is running the
build. The cluster must exist and have the CloudBuild add-on enabled.
"""
name = _messages.StringField(1)
class CreateBitbucketServerConfigOperationMetadata(_messages.Message):
r"""Metadata for `CreateBitbucketServerConfig` operation.
Fields:
bitbucketServerConfig: The resource name of the BitbucketServerConfig to
be created. Format:
`projects/{project}/locations/{location}/bitbucketServerConfigs/{id}`.
completeTime: Time the operation was completed.
createTime: Time the operation was created.
"""
bitbucketServerConfig = _messages.StringField(1)
completeTime = _messages.StringField(2)
createTime = _messages.StringField(3)
class CreateBitbucketServerConnectedRepositoryRequest(_messages.Message):
r"""Request to connect a repository from a connected Bitbucket Server host.
Fields:
bitbucketServerConnectedRepository: Required. The Bitbucket Server
repository to connect.
parent: Required. The name of the `BitbucketServerConfig` that added
connected repository. Format: `projects/{project}/locations/{location}/b
itbucketServerConfigs/{config}`
"""
bitbucketServerConnectedRepository = _messages.MessageField('BitbucketServerConnectedRepository', 1)
parent = _messages.StringField(2)
class CreateGitHubEnterpriseConfigOperationMetadata(_messages.Message):
r"""Metadata for `CreateGithubEnterpriseConfig` operation.
Fields:
completeTime: Time the operation was completed.
createTime: Time the operation was created.
githubEnterpriseConfig: The resource name of the GitHubEnterprise to be
created. Format:
`projects/{project}/locations/{location}/githubEnterpriseConfigs/{id}`.
"""
completeTime = _messages.StringField(1)
createTime = _messages.StringField(2)
githubEnterpriseConfig = _messages.StringField(3)
class CreateWorkerPoolOperationMetadata(_messages.Message):
r"""Metadata for the `CreateWorkerPool` operation.
Fields:
completeTime: Time the operation was completed.
createTime: Time the operation was created.
workerPool: The resource name of the `WorkerPool` to create. Format:
`projects/{project}/locations/{location}/workerPools/{worker_pool}`.
"""
completeTime = _messages.StringField(1)
createTime = _messages.StringField(2)
workerPool = _messages.StringField(3)
class CronConfig(_messages.Message):
r"""CronConfig describes the configuration of a trigger that creates a build
whenever a Cloud Scheduler event is received.
Fields:
enterpriseConfigResource: The GitHub Enterprise config resource name that
is associated with this installation.
schedule: Required. Describes the schedule on which the job will be
executed. The schedule can be either of the following types: *
[Crontab](http://en.wikipedia.org/wiki/Cron#Overview) * English-like
[schedule](https://cloud.google.com/scheduler/docs/configuring/cron-job-
schedules)
timeZone: Specifies the time zone to be used in interpreting the schedule.
The value of this field must be a time zone name from the [tz database]
(http://en.wikipedia.org/wiki/Tz_database). Note that some time zones
include a provision for daylight savings time. The rules for daylight
saving time are determined by the chosen tz. For UTC use the string
"utc". If a time zone is not specified, the default will be in UTC (also
known as GMT).
"""
enterpriseConfigResource = _messages.StringField(1)
schedule = _messages.StringField(2)
timeZone = _messages.StringField(3)
class DeleteBitbucketServerConfigOperationMetadata(_messages.Message):
r"""Metadata for `DeleteBitbucketServerConfig` operation.
Fields:
bitbucketServerConfig: The resource name of the BitbucketServerConfig to
be deleted. Format:
`projects/{project}/locations/{location}/bitbucketServerConfigs/{id}`.
completeTime: Time the operation was completed.
createTime: Time the operation was created.
"""
bitbucketServerConfig = _messages.StringField(1)
completeTime = _messages.StringField(2)
createTime = _messages.StringField(3)
class DeleteGitHubEnterpriseConfigOperationMetadata(_messages.Message):
r"""Metadata for `DeleteGitHubEnterpriseConfig` operation.
Fields:
completeTime: Time the operation was completed.
createTime: Time the operation was created.
githubEnterpriseConfig: The resource name of the GitHubEnterprise to be
deleted. Format:
`projects/{project}/locations/{location}/githubEnterpriseConfigs/{id}`.
"""
completeTime = _messages.StringField(1)
createTime = _messages.StringField(2)
githubEnterpriseConfig = _messages.StringField(3)
class DeleteWorkerPoolOperationMetadata(_messages.Message):
r"""Metadata for the `DeleteWorkerPool` operation.
Fields:
completeTime: Time the operation was completed.
createTime: Time the operation was created.
workerPool: The resource name of the `WorkerPool` being deleted. Format:
`projects/{project}/locations/{location}/workerPools/{worker_pool}`.
"""
completeTime = _messages.StringField(1)
createTime = _messages.StringField(2)
workerPool = _messages.StringField(3)
class Empty(_messages.Message):
r"""A generic empty message that you can re-use to avoid defining duplicated
empty messages in your APIs. A typical example is to use it as the request
or the response type of an API method. For instance: service Foo { rpc
Bar(google.protobuf.Empty) returns (google.protobuf.Empty); } The JSON
representation for `Empty` is empty JSON object `{}`.
"""
class FailureInfo(_messages.Message):
r"""A fatal problem encountered during the execution of the build.
Enums:
TypeValueValuesEnum: The name of the failure.
Fields:
detail: Explains the failure issue in more detail using hard-coded text.
type: The name of the failure.
"""
class TypeValueValuesEnum(_messages.Enum):
r"""The name of the failure.
Values:
FAILURE_TYPE_UNSPECIFIED: Type unspecified
PUSH_FAILED: Unable to push the image to the repository.
PUSH_IMAGE_NOT_FOUND: Final image not found.
PUSH_NOT_AUTHORIZED: Unauthorized push of the final image.
LOGGING_FAILURE: Backend logging failures. Should retry.
USER_BUILD_STEP: A build step has failed.
FETCH_SOURCE_FAILED: The source fetching has failed.
"""
FAILURE_TYPE_UNSPECIFIED = 0
PUSH_FAILED = 1
PUSH_IMAGE_NOT_FOUND = 2
PUSH_NOT_AUTHORIZED = 3
LOGGING_FAILURE = 4
USER_BUILD_STEP = 5
FETCH_SOURCE_FAILED = 6
detail = _messages.StringField(1)
type = _messages.EnumField('TypeValueValuesEnum', 2)
class FileHashes(_messages.Message):
r"""Container message for hashes of byte content of files, used in
SourceProvenance messages to verify integrity of source input to the build.
Fields:
fileHash: Collection of file hashes.
"""
fileHash = _messages.MessageField('Hash', 1, repeated=True)
class GCSLocation(_messages.Message):
r"""Represents a storage location in Cloud Storage
Fields:
bucket: Google Cloud Storage bucket. See
https://cloud.google.com/storage/docs/naming#requirements
generation: Google Cloud Storage generation for the object. If the
generation is omitted, the latest generation will be used.
object: Google Cloud Storage object. See
https://cloud.google.com/storage/docs/naming#objectnames
"""
bucket = _messages.StringField(1)
generation = _messages.IntegerField(2)
object = _messages.StringField(3)
class GitFileSource(_messages.Message):
r"""GitFileSource describes a file within a (possibly remote) code
repository.
Enums:
RepoTypeValueValuesEnum: See RepoType above.
Fields:
path: The path of the file, with the repo root as the root of the path.
repoType: See RepoType above.
revision: The branch, tag, arbitrary ref, or SHA version of the repo to
use when resolving the filename (optional). This field respects the same
syntax/resolution as described here: https://git-
scm.com/docs/gitrevisions If unspecified, the revision from which the
trigger invocation originated is assumed to be the revision from which
to read the specified path.
uri: The URI of the repo (optional). If unspecified, the repo from which
the trigger invocation originated is assumed to be the repo from which
to read the specified path.
"""
class RepoTypeValueValuesEnum(_messages.Enum):
r"""See RepoType above.
Values:
UNKNOWN: The default, unknown repo type.
CLOUD_SOURCE_REPOSITORIES: A Google Cloud Source Repositories-hosted
repo.
GITHUB: A GitHub-hosted repo not necessarily on "github.com" (i.e.
GitHub Enterprise).
"""
UNKNOWN = 0
CLOUD_SOURCE_REPOSITORIES = 1
GITHUB = 2
path = _messages.StringField(1)
repoType = _messages.EnumField('RepoTypeValueValuesEnum', 2)
revision = _messages.StringField(3)
uri = _messages.StringField(4)
class GitHubEnterpriseApp(_messages.Message):
r"""RPC response object returned by the GetGitHubEnterpriseApp RPC method.
Fields:
name: Name of the GitHub App
slug: Slug (URL friendly name) of the GitHub App. This can be found on the
settings page for the GitHub App (e.g.
https://github.com/settings/apps/:app_slug) GitHub docs:
https://docs.github.com/en/free-pro-team@latest/rest/reference/apps#get-
an-app
"""
name = _messages.StringField(1)
slug = _messages.StringField(2)
class GitHubEnterpriseConfig(_messages.Message):
r"""GitHubEnterpriseConfig represents a configuration for a GitHub
Enterprise server.
Fields:
appConfigJson: Cloud Storage location of the encrypted GitHub App config
information.
appId: Required. The GitHub app id of the Cloud Build app on the GitHub
Enterprise server.
createTime: Output only. Time when the installation was associated with
the project.
displayName: Name to display for this config.
hostUrl: The URL of the github enterprise host the configuration is for.
name: Optional. The full resource name for the GitHubEnterpriseConfig For
example: "projects/{$project_id}/githubEnterpriseConfigs/{$config_id}"
peeredNetwork: Optional. The network to be used when reaching out to the
GitHub Enterprise server. The VPC network must be enabled for private
service connection. This should be set if the GitHub Enterprise server
is hosted on-premises and not reachable by public internet. If this
field is left empty, no network peering will occur and calls to the
GitHub Enterprise server will be made over the public internet. Must be
in the format `projects/{project}/global/networks/{network}`, where
{project} is a project number or id and {network} is the name of a VPC
network in the project.
secrets: Names of secrets in Secret Manager.
sslCa: Optional. SSL certificate to use for requests to GitHub Enterprise.
webhookKey: The key that should be attached to webhook calls to the
ReceiveWebhook endpoint.
"""
appConfigJson = _messages.MessageField('GCSLocation', 1)
appId = _messages.IntegerField(2)
createTime = _messages.StringField(3)
displayName = _messages.StringField(4)
hostUrl = _messages.StringField(5)
name = _messages.StringField(6)
peeredNetwork = _messages.StringField(7)
secrets = _messages.MessageField('GitHubEnterpriseSecrets', 8)
sslCa = _messages.StringField(9)
webhookKey = _messages.StringField(10)
class GitHubEnterpriseSecrets(_messages.Message):
r"""GitHubEnterpriseSecrets represents the names of all necessary secrets in
Secret Manager for a GitHub Enterprise server. Format is:
projects//secrets/.
Fields:
oauthClientIdName: The resource name for the OAuth client ID secret in
Secret Manager.
oauthClientIdVersionName: The resource name for the OAuth client ID secret
version in Secret Manager.
oauthSecretName: The resource name for the OAuth secret in Secret Manager.
oauthSecretVersionName: The resource name for the OAuth secret secret
version in Secret Manager.
privateKeyName: The resource name for the private key secret.
privateKeyVersionName: The resource name for the private key secret
version.
webhookSecretName: The resource name for the webhook secret in Secret
Manager.
webhookSecretVersionName: The resource name for the webhook secret secret
version in Secret Manager.
"""
oauthClientIdName = _messages.StringField(1)
oauthClientIdVersionName = _messages.StringField(2)
oauthSecretName = _messages.StringField(3)
oauthSecretVersionName = _messages.StringField(4)
privateKeyName = _messages.StringField(5)
privateKeyVersionName = _messages.StringField(6)
webhookSecretName = _messages.StringField(7)
webhookSecretVersionName = _messages.StringField(8)
class GitHubEventsConfig(_messages.Message):
r"""GitHubEventsConfig describes the configuration of a trigger that creates
a build whenever a GitHub event is received.
Fields:
enterpriseConfig: Output only. The GitHubEnterpriseConfig enterprise
config specified in the enterprise_config_resource_name field.
enterpriseConfigResourceName: Optional. The resource name of the github
enterprise config that should be applied to this installation. For
example: "projects/{$project_id}/githubEnterpriseConfigs/{$config_id}"
installationId: The installationID that emits the GitHub event.
name: Name of the repository. For example: The name for
https://github.com/googlecloudplatform/cloud-builders is "cloud-
builders".
owner: Owner of the repository. For example: The owner for
https://github.com/googlecloudplatform/cloud-builders is
"googlecloudplatform".
pullRequest: filter to match changes in pull requests.
push: filter to match changes in refs like branches, tags.
"""
enterpriseConfig = _messages.MessageField('GitHubEnterpriseConfig', 1)
enterpriseConfigResourceName = _messages.StringField(2)
installationId = _messages.IntegerField(3)
name = _messages.StringField(4)
owner = _messages.StringField(5)
pullRequest = _messages.MessageField('PullRequestFilter', 6)
push = _messages.MessageField('PushFilter', 7)
class GitHubRepositorySetting(_messages.Message):
r"""Represents a GitHub repository setting.
Fields:
name: Name of the repository.
owner: GitHub user or organization name.
"""
name = _messages.StringField(1)
owner = _messages.StringField(2)
class GitHubRepositorySettingList(_messages.Message):
r"""A wrapper message for a list of GitHubRepositorySettings.
Fields:
repositorySettings: A list of GitHubRepositorySettings.
"""
repositorySettings = _messages.MessageField('GitHubRepositorySetting', 1, repeated=True)
class GitRepoSource(_messages.Message):
r"""GitRepoSource describes a repo and ref of a code repository.
Enums:
RepoTypeValueValuesEnum: See RepoType below.
Fields:
ref: The branch or tag to use. Must start with "refs/" (required).
repoType: See RepoType below.
uri: The URI of the repo (required).
"""
class RepoTypeValueValuesEnum(_messages.Enum):
r"""See RepoType below.
Values:
UNKNOWN: The default, unknown repo type.
CLOUD_SOURCE_REPOSITORIES: A Google Cloud Source Repositories-hosted
repo.
GITHUB: A GitHub-hosted repo not necessarily on "github.com" (i.e.
GitHub Enterprise).
"""
UNKNOWN = 0
CLOUD_SOURCE_REPOSITORIES = 1
GITHUB = 2
ref = _messages.StringField(1)
repoType = _messages.EnumField('RepoTypeValueValuesEnum', 2)
uri = _messages.StringField(3)
class GitSource(_messages.Message):
r"""Location of the source in any accessible Git repository.
Fields:
dir: Directory, relative to the source root, in which to run the build.
This must be a relative path. If a step's `dir` is specified and is an
absolute path, this value is ignored for that step's execution.
revision: The revision to fetch from the Git repository such as a branch,
a tag, a commit SHA, or any Git ref. Cloud Build uses `git fetch` to
fetch the revision from the Git repository; therefore make sure that the
string you provide for `revision` is parsable by the command. For
information on string values accepted by `git fetch`, see https://git-
scm.com/docs/gitrevisions#_specifying_revisions. For information on `git
fetch`, see https://git-scm.com/docs/git-fetch.
url: Location of the Git repo to build.
"""
dir = _messages.StringField(1)
revision = _messages.StringField(2)
url = _messages.StringField(3)
class GoogleDevtoolsCloudbuildV1BuildOptionsPoolOptionWorkerConfig(_messages.Message):
r"""Configuration per workload for both Private Pools and Hybrid Pools.
Fields:
diskSizeGb: The disk size (in GB) which is requested for the build
container. If unset, a value of 10 GB will be used.
memoryGb: The memory (in GB) which is requested for the build container.
If unset, a value of 4 GB will be used.
vcpuCount: The number of vCPUs which are requested for the build
container. If unset, a value of 1 will be used.
"""
diskSizeGb = _messages.IntegerField(1)
memoryGb = _messages.FloatField(2, variant=_messages.Variant.FLOAT)
vcpuCount = _messages.FloatField(3, variant=_messages.Variant.FLOAT)
class GoogleDevtoolsCloudbuildV2OperationMetadata(_messages.Message):
r"""Represents the metadata of the long-running operation.
Fields:
apiVersion: Output only. API version used to start the operation.
createTime: Output only. The time the operation was created.
endTime: Output only. The time the operation finished running.
requestedCancellation: Output only. Identifies whether the user has
requested cancellation of the operation. Operations that have
successfully been cancelled have Operation.error value with a
google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.
statusMessage: Output only. Human-readable status of the operation, if
any.
target: Output only. Server-defined resource path for the target of the
operation.
verb: Output only. Name of the verb executed by the operation.
"""
apiVersion = _messages.StringField(1)
createTime = _messages.StringField(2)
endTime = _messages.StringField(3)
requestedCancellation = _messages.BooleanField(4)
statusMessage = _messages.StringField(5)
target = _messages.StringField(6)
verb = _messages.StringField(7)
class HTTPDelivery(_messages.Message):
r"""HTTPDelivery is the delivery configuration for an HTTP notification.
Fields:
uri: The URI to which JSON-containing HTTP POST requests should be sent.
"""
uri = _messages.StringField(1)
class Hash(_messages.Message):
r"""Container message for hash values.
Enums:
TypeValueValuesEnum: The type of hash that was performed.
Fields:
type: The type of hash that was performed.
value: The hash value.
"""
class TypeValueValuesEnum(_messages.Enum):
r"""The type of hash that was performed.
Values:
NONE: No hash requested.
SHA256: Use a sha256 hash.
MD5: Use a md5 hash.
"""
NONE = 0
SHA256 = 1
MD5 = 2
type = _messages.EnumField('TypeValueValuesEnum', 1)
value = _messages.BytesField(2)
class HttpBody(_messages.Message):
r"""Message that represents an arbitrary HTTP body. It should only be used
for payload formats that can't be represented as JSON, such as raw binary or
an HTML page. This message can be used both in streaming and non-streaming
API methods in the request as well as the response. It can be used as a top-
level request field, which is convenient if one wants to extract parameters
from either the URL or HTTP template into the request fields and also want
access to the raw HTTP body. Example: message GetResourceRequest { // A
unique request id. string request_id = 1; // The raw HTTP body is bound to
this field. google.api.HttpBody http_body = 2; } service ResourceService {
rpc GetResource(GetResourceRequest) returns (google.api.HttpBody); rpc
UpdateResource(google.api.HttpBody) returns (google.protobuf.Empty); }
Example with streaming methods: service CaldavService { rpc
GetCalendar(stream google.api.HttpBody) returns (stream
google.api.HttpBody); rpc UpdateCalendar(stream google.api.HttpBody) returns
(stream google.api.HttpBody); } Use of this type only changes how the
request and response bodies are handled, all other features will continue to
work unchanged.
Messages:
ExtensionsValueListEntry: A ExtensionsValueListEntry object.
Fields:
contentType: The HTTP Content-Type header value specifying the content
type of the body.
data: The HTTP request/response body as raw binary.
extensions: Application specific response metadata. Must be set in the
first response for streaming APIs.
"""
@encoding.MapUnrecognizedFields('additionalProperties')
class ExtensionsValueListEntry(_messages.Message):
r"""A ExtensionsValueListEntry object.
Messages:
AdditionalProperty: An additional property for a
ExtensionsValueListEntry object.
Fields:
additionalProperties: Properties of the object. Contains field @type
with type URL.
"""
class AdditionalProperty(_messages.Message):
r"""An additional property for a ExtensionsValueListEntry object.
Fields:
key: Name of the additional property.
value: A extra_types.JsonValue attribute.
"""
key = _messages.StringField(1)
value = _messages.MessageField('extra_types.JsonValue', 2)
additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
contentType = _messages.StringField(1)
data = _messages.BytesField(2)
extensions = _messages.MessageField('ExtensionsValueListEntry', 3, repeated=True)
class HybridPoolConfig(_messages.Message):
r"""Configuration for a Hybrid Worker Pool Next ID: 6
Enums:
BuilderImageCachingValueValuesEnum: Immutable. Controls how the worker
pool caches images. If unspecified during worker pool creation, this
field is defaulted to CACHING_DISABLED.
Fields:
builderImageCaching: Immutable. Controls how the worker pool caches
images. If unspecified during worker pool creation, this field is
defaulted to CACHING_DISABLED.
defaultWorkerConfig: Default settings which will be applied to builds on
this worker pool if they are not specified in the build request.
membership: Required. Immutable. The Anthos/GKE Hub membership of the
cluster which will run the actual build operations. Example:
projects/{project}/locations/{location}/memberships/{cluster_name}
"""
class BuilderImageCachingValueValuesEnum(_messages.Enum):
r"""Immutable. Controls how the worker pool caches images. If unspecified
during worker pool creation, this field is defaulted to CACHING_DISABLED.
Values:
BUILDER_IMAGE_CACHING_UNSPECIFIED: Default enum type. This should not be
used.
CACHING_DISABLED: DinD caching is disabled and no caching resources are
provisioned.
VOLUME_CACHING: A PersistentVolumeClaim is provisioned for caching.
"""
BUILDER_IMAGE_CACHING_UNSPECIFIED = 0
CACHING_DISABLED = 1
VOLUME_CACHING = 2
builderImageCaching = _messages.EnumField('BuilderImageCachingValueValuesEnum', 1)
defaultWorkerConfig = _messages.MessageField('HybridWorkerConfig', 2)
membership = _messages.StringField(3)
class HybridWorkerConfig(_messages.Message):
r"""These settings can be applied to a user's build operations. Next ID: 4
Fields:
diskSizeGb: The disk size (in GB) which is requested for the build
container. Defaults to 10 GB.
memoryGb: The memory (in GB) which is requested for the build container.
Defaults to 4 GB.
vcpuCount: The number of vCPUs which are requested for the build
container. Defaults to 1.
"""
diskSizeGb = _messages.IntegerField(1)
memoryGb = _messages.FloatField(2, variant=_messages.Variant.FLOAT)
vcpuCount = _messages.FloatField(3, variant=_messages.Variant.FLOAT)
class InlineSecret(_messages.Message):
r"""Pairs a set of secret environment variables mapped to encrypted values
with the Cloud KMS key to use to decrypt the value.
Messages:
EnvMapValue: Map of environment variable name to its encrypted value.
Secret environment variables must be unique across all of a build's
secrets, and must be used by at least one build step. Values can be at
most 64 KB in size. There can be at most 100 secret values across all of
a build's secrets.
Fields:
envMap: Map of environment variable name to its encrypted value. Secret
environment variables must be unique across all of a build's secrets,
and must be used by at least one build step. Values can be at most 64 KB
in size. There can be at most 100 secret values across all of a build's
secrets.
kmsKeyName: Resource name of Cloud KMS crypto key to decrypt the encrypted
value. In format: projects/*/locations/*/keyRings/*/cryptoKeys/*
"""
@encoding.MapUnrecognizedFields('additionalProperties')
class EnvMapValue(_messages.Message):
r"""Map of environment variable name to its encrypted value. Secret
environment variables must be unique across all of a build's secrets, and
must be used by at least one build step. Values can be at most 64 KB in
size. There can be at most 100 secret values across all of a build's
secrets.
Messages:
AdditionalProperty: An additional property for a EnvMapValue object.
Fields:
additionalProperties: Additional properties of type EnvMapValue
"""
class AdditionalProperty(_messages.Message):
r"""An additional property for a EnvMapValue object.
Fields:
key: Name of the additional property.
value: A byte attribute.
"""
key = _messages.StringField(1)
value = _messages.BytesField(2)
additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
envMap = _messages.MessageField('EnvMapValue', 1)
kmsKeyName = _messages.StringField(2)
class Installation(_messages.Message):
r"""A GitHub-app installation.
Fields:
createTime: Time when the installation was associated with the project.
This field is immutable and cannot be updated.
enterpriseConfig: Output only. The GitHubEnterpriseConfig enterprise
config specified in the enterprise_config_resource_name field.
enterpriseConfigResourceName: Optional: The resource name of the github
enterprise config that should be applied to this installation. For
example: "projects/{$project_id}/githubEnterpriseConfigs/{$config_id}"
id: GitHub installation ID, created by GitHub.
name: The `Installation` name with format:
`projects/{project}/locations/{location}/installations/{installation}`,
where {installation} is GitHub installation ID created by GitHub.
projectId: The project ID of the GCP project the installation is
associated with.
projectNum: Numerical ID of the project.
repositorySettingList: The GitHub repositories that we should respond to
for this installation. If this is not set, we will respect the
default_check_suite_events boolean for any repository visible for that
installation.
"""
createTime = _messages.StringField(1)
enterpriseConfig = _messages.MessageField('GitHubEnterpriseConfig', 2)
enterpriseConfigResourceName = _messages.StringField(3)
id = _messages.IntegerField(4)
name = _messages.StringField(5)
projectId = _messages.StringField(6)
projectNum = _messages.IntegerField(7)
repositorySettingList = _messages.MessageField('GitHubRepositorySettingList', 8)
class ListBitbucketServerConfigsResponse(_messages.Message):
r"""RPC response object returned by ListBitbucketServerConfigs RPC method.
Fields:
bitbucketServerConfigs: A list of BitbucketServerConfigs
nextPageToken: A token that can be sent as `page_token` to retrieve the
next page. If this field is omitted, there are no subsequent pages.
"""
bitbucketServerConfigs = _messages.MessageField('BitbucketServerConfig', 1, repeated=True)
nextPageToken = _messages.StringField(2)
class ListBitbucketServerRepositoriesResponse(_messages.Message):
r"""RPC response object returned by the ListBitbucketServerRepositories RPC
method.
Fields:
bitbucketServerRepositories: List of Bitbucket Server repositories.
nextPageToken: A token that can be sent as `page_token` to retrieve the
next page. If this field is omitted, there are no subsequent pages.
"""
bitbucketServerRepositories = _messages.MessageField('BitbucketServerRepository', 1, repeated=True)
nextPageToken = _messages.StringField(2)
class ListBuildTriggersResponse(_messages.Message):
r"""Response containing existing `BuildTriggers`.
Fields:
nextPageToken: Token to receive the next page of results.
triggers: `BuildTriggers` for the project, sorted by `create_time`
descending.
"""
nextPageToken = _messages.StringField(1)
triggers = _messages.MessageField('BuildTrigger', 2, repeated=True)
class ListBuildsResponse(_messages.Message):
r"""Response including listed builds.
Fields:
builds: Builds will be sorted by `create_time`, descending.
nextPageToken: Token to receive the next page of results. This will be
absent if the end of the response list has been reached.
"""
builds = _messages.MessageField('Build', 1, repeated=True)
nextPageToken = _messages.StringField(2)
class ListGitHubInstallationsForProjectResponse(_messages.Message):
r"""RPC response object returned by the ListGitHubInstallations RPC method.
Fields:
installations: Installations belonging to the specified project_id.
"""
installations = _messages.MessageField('Installation', 1, repeated=True)
class ListGitHubInstallationsResponse(_messages.Message):
r"""RPC response object accepted by the ListGitHubInstallations RPC method.
Fields:
installations: Installations matching the requested installation ID.
"""
installations = _messages.MessageField('Installation', 1, repeated=True)
class ListGithubEnterpriseConfigsResponse(_messages.Message):
r"""RPC response object returned by ListGithubEnterpriseConfigs RPC method.
Fields:
configs: A list of GitHubEnterpriseConfigs
"""
configs = _messages.MessageField('GitHubEnterpriseConfig', 1, repeated=True)
class ListWorkerPoolsResponse(_messages.Message):
r"""Response containing existing `WorkerPools`.
Fields:
nextPageToken: Continuation token used to page through large result sets.
Provide this value in a subsequent ListWorkerPoolsRequest to return the
next page of results.
workerPools: `WorkerPools` for the specified project.
"""
nextPageToken = _messages.StringField(1)
workerPools = _messages.MessageField('WorkerPool', 2, repeated=True)
class NetworkConfig(_messages.Message):
r"""Defines the network configuration for the pool.
Enums:
EgressOptionValueValuesEnum: Option to configure network egress for the
workers.
Fields:
egressOption: Option to configure network egress for the workers.
peeredNetwork: Required. Immutable. The network definition that the
workers are peered to. If this section is left empty, the workers will
be peered to `WorkerPool.project_id` on the service producer network.
Must be in the format `projects/{project}/global/networks/{network}`,
where `{project}` is a project number, such as `12345`, and `{network}`
is the name of a VPC network in the project. See [Understanding network
configuration options](https://cloud.google.com/build/docs/private-
pools/set-up-private-pool-environment)
"""
class EgressOptionValueValuesEnum(_messages.Enum):
r"""Option to configure network egress for the workers.
Values:
EGRESS_OPTION_UNSPECIFIED: If set, defaults to PUBLIC_EGRESS.
NO_PUBLIC_EGRESS: If set, workers are created without any public
address, which prevents network egress to public IPs unless a network
proxy is configured.
PUBLIC_EGRESS: If set, workers are created with a public address which
allows for public internet egress.
"""
EGRESS_OPTION_UNSPECIFIED = 0
NO_PUBLIC_EGRESS = 1
PUBLIC_EGRESS = 2
egressOption = _messages.EnumField('EgressOptionValueValuesEnum', 1)
peeredNetwork = _messages.StringField(2)
class Notification(_messages.Message):
r"""Notification is the container which holds the data that is relevant to
this particular notification.
Messages:
StructDeliveryValue: Escape hatch for users to supply custom delivery
configs.
Fields:
filter: The filter string to use for notification filtering. Currently,
this is assumed to be a CEL program. See
https://opensource.google/projects/cel for more.
httpDelivery: Configuration for HTTP delivery.
slackDelivery: Configuration for Slack delivery.
smtpDelivery: Configuration for SMTP (email) delivery.
structDelivery: Escape hatch for users to supply custom delivery configs.
"""
@encoding.MapUnrecognizedFields('additionalProperties')
class StructDeliveryValue(_messages.Message):
r"""Escape hatch for users to supply custom delivery configs.
Messages:
AdditionalProperty: An additional property for a StructDeliveryValue
object.
Fields:
additionalProperties: Properties of the object.
"""
class AdditionalProperty(_messages.Message):
r"""An additional property for a StructDeliveryValue object.
Fields:
key: Name of the additional property.
value: A extra_types.JsonValue attribute.
"""
key = _messages.StringField(1)
value = _messages.MessageField('extra_types.JsonValue', 2)
additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
filter = _messages.StringField(1)
httpDelivery = _messages.MessageField('HTTPDelivery', 2)
slackDelivery = _messages.MessageField('SlackDelivery', 3)
smtpDelivery = _messages.MessageField('SMTPDelivery', 4)
structDelivery = _messages.MessageField('StructDeliveryValue', 5)
class NotifierConfig(_messages.Message):
r"""NotifierConfig is the top-level configuration message.
Fields:
apiVersion: The API version of this configuration format.
kind: The type of notifier to use (e.g. SMTPNotifier).
metadata: Metadata for referring to/handling/deploying this notifier.
spec: The actual configuration for this notifier.
"""
apiVersion = _messages.StringField(1)
kind = _messages.StringField(2)
metadata = _messages.MessageField('NotifierMetadata', 3)
spec = _messages.MessageField('NotifierSpec', 4)
class NotifierMetadata(_messages.Message):
r"""NotifierMetadata contains the data which can be used to reference or
describe this notifier.
Fields:
name: The human-readable and user-given name for the notifier. For
example: "repo-merge-email-notifier".
notifier: The string representing the name and version of notifier to
deploy. Expected to be of the form of "/:". For example: "gcr.io/my-
project/notifiers/smtp:1.2.34".
"""
name = _messages.StringField(1)
notifier = _messages.StringField(2)
class NotifierSecret(_messages.Message):
r"""NotifierSecret is the container that maps a secret name (reference) to
its Google Cloud Secret Manager resource path.
Fields:
name: Name is the local name of the secret, such as the verbatim string
"my-smtp-password".
value: Value is interpreted to be a resource path for fetching the actual
(versioned) secret data for this secret. For example, this would be a
Google Cloud Secret Manager secret version resource path like:
"projects/my-project/secrets/my-secret/versions/latest".
"""
name = _messages.StringField(1)
value = _messages.StringField(2)
class NotifierSecretRef(_messages.Message):
r"""NotifierSecretRef contains the reference to a secret stored in the
corresponding NotifierSpec.
Fields:
secretRef: The value of `secret_ref` should be a `name` that is registered
in a `Secret` in the `secrets` list of the `Spec`.
"""
secretRef = _messages.StringField(1)
class NotifierSpec(_messages.Message):
r"""NotifierSpec is the configuration container for notifications.
Fields:
notification: The configuration of this particular notifier.
secrets: Configurations for secret resources used by this particular
notifier.
"""
notification = _messages.MessageField('Notification', 1)
secrets = _messages.MessageField('NotifierSecret', 2, repeated=True)
class OAuthRegistrationURI(_messages.Message):
r"""RPC Response object returned by GetOAuthRegistrationURL
Fields:
registrationUri: The URL that the user should be redirected to in order to
start the OAuth flow. When the user is redirected to this URL, they will
be sent to the source provider specified in the request to authorize
CloudBuild to access their oauth credentials. After the authorization is
completed, the user will be redirected to the Cloud Build console.
"""
registrationUri = _messages.StringField(1)
class Operation(_messages.Message):
r"""This resource represents a long-running operation that is the result of
a network API call.
Messages:
MetadataValue: Service-specific metadata associated with the operation. It
typically contains progress information and common metadata such as
create time. Some services might not provide such metadata. Any method
that returns a long-running operation should document the metadata type,
if any.
ResponseValue: The normal response of the operation in case of success. If
the original method returns no data on success, such as `Delete`, the
response is `google.protobuf.Empty`. If the original method is standard
`Get`/`Create`/`Update`, the response should be the resource. For other
methods, the response should have the type `XxxResponse`, where `Xxx` is
the original method name. For example, if the original method name is
`TakeSnapshot()`, the inferred response type is `TakeSnapshotResponse`.
Fields:
done: If the value is `false`, it means the operation is still in
progress. If `true`, the operation is completed, and either `error` or
`response` is available.
error: The error result of the operation in case of failure or
cancellation.
metadata: Service-specific metadata associated with the operation. It
typically contains progress information and common metadata such as
create time. Some services might not provide such metadata. Any method
that returns a long-running operation should document the metadata type,
if any.
name: The server-assigned name, which is only unique within the same
service that originally returns it. If you use the default HTTP mapping,
the `name` should be a resource name ending with
`operations/{unique_id}`.
response: The normal response of the operation in case of success. If the
original method returns no data on success, such as `Delete`, the
response is `google.protobuf.Empty`. If the original method is standard
`Get`/`Create`/`Update`, the response should be the resource. For other
methods, the response should have the type `XxxResponse`, where `Xxx` is
the original method name. For example, if the original method name is
`TakeSnapshot()`, the inferred response type is `TakeSnapshotResponse`.
"""
@encoding.MapUnrecognizedFields('additionalProperties')
class MetadataValue(_messages.Message):
r"""Service-specific metadata associated with the operation. It typically
contains progress information and common metadata such as create time.
Some services might not provide such metadata. Any method that returns a
long-running operation should document the metadata type, if any.
Messages:
AdditionalProperty: An additional property for a MetadataValue object.
Fields:
additionalProperties: Properties of the object. Contains field @type
with type URL.
"""
class AdditionalProperty(_messages.Message):
r"""An additional property for a MetadataValue object.
Fields:
key: Name of the additional property.
value: A extra_types.JsonValue attribute.
"""
key = _messages.StringField(1)
value = _messages.MessageField('extra_types.JsonValue', 2)
additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
@encoding.MapUnrecognizedFields('additionalProperties')
class ResponseValue(_messages.Message):
r"""The normal response of the operation in case of success. If the
original method returns no data on success, such as `Delete`, the response
is `google.protobuf.Empty`. If the original method is standard
`Get`/`Create`/`Update`, the response should be the resource. For other
methods, the response should have the type `XxxResponse`, where `Xxx` is
the original method name. For example, if the original method name is
`TakeSnapshot()`, the inferred response type is `TakeSnapshotResponse`.
Messages:
AdditionalProperty: An additional property for a ResponseValue object.
Fields:
additionalProperties: Properties of the object. Contains field @type
with type URL.
"""
class AdditionalProperty(_messages.Message):
r"""An additional property for a ResponseValue object.
Fields:
key: Name of the additional property.
value: A extra_types.JsonValue attribute.
"""
key = _messages.StringField(1)
value = _messages.MessageField('extra_types.JsonValue', 2)
additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
done = _messages.BooleanField(1)
error = _messages.MessageField('Status', 2)
metadata = _messages.MessageField('MetadataValue', 3)
name = _messages.StringField(4)
response = _messages.MessageField('ResponseValue', 5)
class OperationMetadata(_messages.Message):
r"""Represents the metadata of the long-running operation.
Fields:
apiVersion: Output only. API version used to start the operation.
cancelRequested: Output only. Identifies whether the user has requested
cancellation of the operation. Operations that have been cancelled
successfully have Operation.error value with a google.rpc.Status.code of
1, corresponding to `Code.CANCELLED`.
createTime: Output only. The time the operation was created.
endTime: Output only. The time the operation finished running.
statusDetail: Output only. Human-readable status of the operation, if any.
target: Output only. Server-defined resource path for the target of the
operation.
verb: Output only. Name of the verb executed by the operation.
"""
apiVersion = _messages.StringField(1)
cancelRequested = _messages.BooleanField(2)
createTime = _messages.StringField(3)
endTime = _messages.StringField(4)
statusDetail = _messages.StringField(5)
target = _messages.StringField(6)
verb = _messages.StringField(7)
class PoolOption(_messages.Message):
r"""Details about how a build should be executed on a `WorkerPool`. See
[running builds in a private
pool](https://cloud.google.com/build/docs/private-pools/run-builds-in-
private-pool) for more information.
Fields:
name: The `WorkerPool` resource to execute the build on. You must have
`cloudbuild.workerpools.use` on the project hosting the WorkerPool.
Format
projects/{project}/locations/{location}/workerPools/{workerPoolId}
workerConfig: Configuration per workload.
"""
name = _messages.StringField(1)
workerConfig = _messages.MessageField('GoogleDevtoolsCloudbuildV1BuildOptionsPoolOptionWorkerConfig', 2)
class PrivatePoolV1Config(_messages.Message):
r"""Configuration for a V1 `PrivatePool`.
Fields:
networkConfig: Network configuration for the pool.
workerConfig: Machine configuration for the workers in the pool.
"""
networkConfig = _messages.MessageField('NetworkConfig', 1)
workerConfig = _messages.MessageField('WorkerConfig', 2)
class ProcessAppManifestCallbackOperationMetadata(_messages.Message):
r"""Metadata for `ProcessAppManifestCallback` operation.
Fields:
completeTime: Time the operation was completed.
createTime: Time the operation was created.
githubEnterpriseConfig: The resource name of the GitHubEnterprise to be
created. Format:
`projects/{project}/locations/{location}/githubEnterpriseConfigs/{id}`.
"""
completeTime = _messages.StringField(1)
createTime = _messages.StringField(2)
githubEnterpriseConfig = _messages.StringField(3)
class PubsubConfig(_messages.Message):
r"""PubsubConfig describes the configuration of a trigger that creates a
build whenever a Pub/Sub message is published.
Enums:
StateValueValuesEnum: Potential issues with the underlying Pub/Sub
subscription configuration. Only populated on get requests.
Fields:
serviceAccountEmail: Service account that will make the push request.
state: Potential issues with the underlying Pub/Sub subscription
configuration. Only populated on get requests.
subscription: Output only. Name of the subscription. Format is
`projects/{project}/subscriptions/{subscription}`.
topic: The name of the topic from which this subscription is receiving
messages. Format is `projects/{project}/topics/{topic}`.
"""
class StateValueValuesEnum(_messages.Enum):
r"""Potential issues with the underlying Pub/Sub subscription
configuration. Only populated on get requests.
Values:
STATE_UNSPECIFIED: The subscription configuration has not been checked.
OK: The Pub/Sub subscription is properly configured.
SUBSCRIPTION_DELETED: The subscription has been deleted.
TOPIC_DELETED: The topic has been deleted.
SUBSCRIPTION_MISCONFIGURED: Some of the subscription's field are
misconfigured.
"""
STATE_UNSPECIFIED = 0
OK = 1
SUBSCRIPTION_DELETED = 2
TOPIC_DELETED = 3
SUBSCRIPTION_MISCONFIGURED = 4
serviceAccountEmail = _messages.StringField(1)
state = _messages.EnumField('StateValueValuesEnum', 2)
subscription = _messages.StringField(3)
topic = _messages.StringField(4)
class PullRequestFilter(_messages.Message):
r"""PullRequestFilter contains filter properties for matching GitHub Pull
Requests.
Enums:
CommentControlValueValuesEnum: Configure builds to run whether a
repository owner or collaborator need to comment `/gcbrun`.
Fields:
branch: Regex of branches to match. The syntax of the regular expressions
accepted is the syntax accepted by RE2 and described at
https://github.com/google/re2/wiki/Syntax
commentControl: Configure builds to run whether a repository owner or
collaborator need to comment `/gcbrun`.
invertRegex: If true, branches that do NOT match the git_ref will trigger
a build.
"""
class CommentControlValueValuesEnum(_messages.Enum):
r"""Configure builds to run whether a repository owner or collaborator
need to comment `/gcbrun`.
Values:
COMMENTS_DISABLED: Do not require comments on Pull Requests before
builds are triggered.
COMMENTS_ENABLED: Enforce that repository owners or collaborators must
comment on Pull Requests before builds are triggered.
COMMENTS_ENABLED_FOR_EXTERNAL_CONTRIBUTORS_ONLY: Enforce that repository
owners or collaborators must comment on external contributors' Pull
Requests before builds are triggered.
"""
COMMENTS_DISABLED = 0
COMMENTS_ENABLED = 1
COMMENTS_ENABLED_FOR_EXTERNAL_CONTRIBUTORS_ONLY = 2
branch = _messages.StringField(1)
commentControl = _messages.EnumField('CommentControlValueValuesEnum', 2)
invertRegex = _messages.BooleanField(3)
class PushFilter(_messages.Message):
r"""Push contains filter properties for matching GitHub git pushes.
Fields:
branch: Regexes matching branches to build. The syntax of the regular
expressions accepted is the syntax accepted by RE2 and described at
https://github.com/google/re2/wiki/Syntax
invertRegex: When true, only trigger a build if the revision regex does
NOT match the git_ref regex.
tag: Regexes matching tags to build. The syntax of the regular expressions
accepted is the syntax accepted by RE2 and described at
https://github.com/google/re2/wiki/Syntax
"""
branch = _messages.StringField(1)
invertRegex = _messages.BooleanField(2)
tag = _messages.StringField(3)
class ReceiveTriggerWebhookResponse(_messages.Message):
r"""ReceiveTriggerWebhookResponse [Experimental] is the response object for
the ReceiveTriggerWebhook method.
"""
class RemoveBitbucketServerConnectedRepositoryRequest(_messages.Message):
r"""RPC request object accepted by RemoveBitbucketServerConnectedRepository
RPC method.
Fields:
connectedRepository: The connected repository to remove.
"""
connectedRepository = _messages.MessageField('BitbucketServerRepositoryId', 1)
class RepoSource(_messages.Message):
r"""Location of the source in a Google Cloud Source Repository.
Messages:
SubstitutionsValue: Substitutions to use in a triggered build. Should only
be used with RunBuildTrigger
Fields:
branchName: Regex matching branches to build. The syntax of the regular
expressions accepted is the syntax accepted by RE2 and described at
https://github.com/google/re2/wiki/Syntax
commitSha: Explicit commit SHA to build.
dir: Directory, relative to the source root, in which to run the build.
This must be a relative path. If a step's `dir` is specified and is an
absolute path, this value is ignored for that step's execution.
invertRegex: Only trigger a build if the revision regex does NOT match the
revision regex.
projectId: ID of the project that owns the Cloud Source Repository. If
omitted, the project ID requesting the build is assumed.
repoName: Name of the Cloud Source Repository.
substitutions: Substitutions to use in a triggered build. Should only be
used with RunBuildTrigger
tagName: Regex matching tags to build. The syntax of the regular
expressions accepted is the syntax accepted by RE2 and described at
https://github.com/google/re2/wiki/Syntax
"""
@encoding.MapUnrecognizedFields('additionalProperties')
class SubstitutionsValue(_messages.Message):
r"""Substitutions to use in a triggered build. Should only be used with
RunBuildTrigger
Messages:
AdditionalProperty: An additional property for a SubstitutionsValue
object.
Fields:
additionalProperties: Additional properties of type SubstitutionsValue
"""
class AdditionalProperty(_messages.Message):
r"""An additional property for a SubstitutionsValue object.
Fields:
key: Name of the additional property.
value: A string attribute.
"""
key = _messages.StringField(1)
value = _messages.StringField(2)
additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
branchName = _messages.StringField(1)
commitSha = _messages.StringField(2)
dir = _messages.StringField(3)
invertRegex = _messages.BooleanField(4)
projectId = _messages.StringField(5)
repoName = _messages.StringField(6)
substitutions = _messages.MessageField('SubstitutionsValue', 7)
tagName = _messages.StringField(8)
class Results(_messages.Message):
r"""Artifacts created by the build pipeline.
Fields:
artifactManifest: Path to the artifact manifest. Only populated when
artifacts are uploaded.
artifactTiming: Time to push all non-container artifacts.
buildStepImages: List of build step digests, in the order corresponding to
build step indices.
buildStepOutputs: List of build step outputs, produced by builder images,
in the order corresponding to build step indices. [Cloud
Builders](https://cloud.google.com/cloud-build/docs/cloud-builders) can
produce this output by writing to `$BUILDER_OUTPUT/output`. Only the
first 4KB of data is stored.
images: Container images that were built as a part of the build.
numArtifacts: Number of artifacts uploaded. Only populated when artifacts
are uploaded.
"""
artifactManifest = _messages.StringField(1)
artifactTiming = _messages.MessageField('TimeSpan', 2)
buildStepImages = _messages.StringField(3, repeated=True)
buildStepOutputs = _messages.BytesField(4, repeated=True)
images = _messages.MessageField('BuiltImage', 5, repeated=True)
numArtifacts = _messages.IntegerField(6)
class RetryBuildRequest(_messages.Message):
r"""Specifies a build to retry.
Fields:
id: Required. Build ID of the original build.
name: The name of the `Build` to retry. Format:
`projects/{project}/locations/{location}/builds/{build}`
projectId: Required. ID of the project.
"""
id = _messages.StringField(1)
name = _messages.StringField(2)
projectId = _messages.StringField(3)
class RunBuildTriggerRequest(_messages.Message):
r"""Specifies a build trigger to run and the source to use.
Fields:
projectId: Required. ID of the project.
source: Source to build against this trigger.
triggerId: Required. ID of the trigger.
"""
projectId = _messages.StringField(1)
source = _messages.MessageField('RepoSource', 2)
triggerId = _messages.StringField(3)
class SMTPDelivery(_messages.Message):
r"""SMTPDelivery is the delivery configuration for an SMTP (email)
notification.
Fields:
fromAddress: This is the SMTP account/email that appears in the `From:` of
the email. If empty, it is assumed to be sender.
password: The <PASSWORD>.
port: The SMTP port of the server.
recipientAddresses: This is the list of addresses to which we send the
email (i.e. in the `To:` of the email).
senderAddress: This is the SMTP account/email that is used to send the
message.
server: The address of the SMTP server.
"""
fromAddress = _messages.StringField(1)
password = _messages.MessageField('Notifier<PASSWORD>Ref', 2)
port = _messages.StringField(3)
recipientAddresses = _messages.StringField(4, repeated=True)
senderAddress = _messages.StringField(5)
server = _messages.StringField(6)
class Secret(_messages.Message):
r"""Pairs a set of secret environment variables containing encrypted values
with the Cloud KMS key to use to decrypt the value. Note: Use `kmsKeyName`
with `available_secrets` instead of using `kmsKeyName` with `secret`. For
instructions see: https://cloud.google.com/cloud-build/docs/securing-
builds/use-encrypted-credentials.
Messages:
SecretEnvValue: Map of environment variable name to its encrypted value.
Secret environment variables must be unique across all of a build's
secrets, and must be used by at least one build step. Values can be at
most 64 KB in size. There can be at most 100 secret values across all of
a build's secrets.
Fields:
kmsKeyName: Cloud KMS key name to use to decrypt these envs.
secretEnv: Map of environment variable name to its encrypted value. Secret
environment variables must be unique across all of a build's secrets,
and must be used by at least one build step. Values can be at most 64 KB
in size. There can be at most 100 secret values across all of a build's
secrets.
"""
@encoding.MapUnrecognizedFields('additionalProperties')
class SecretEnvValue(_messages.Message):
r"""Map of environment variable name to its encrypted value. Secret
environment variables must be unique across all of a build's secrets, and
must be used by at least one build step. Values can be at most 64 KB in
size. There can be at most 100 secret values across all of a build's
secrets.
Messages:
AdditionalProperty: An additional property for a SecretEnvValue object.
Fields:
additionalProperties: Additional properties of type SecretEnvValue
"""
class AdditionalProperty(_messages.Message):
r"""An additional property for a SecretEnvValue object.
Fields:
key: Name of the additional property.
value: A byte attribute.
"""
key = _messages.StringField(1)
value = _messages.BytesField(2)
additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
kmsKeyName = _messages.StringField(1)
secretEnv = _messages.MessageField('SecretEnvValue', 2)
class SecretManagerSecret(_messages.Message):
r"""Pairs a secret environment variable with a SecretVersion in Secret
Manager.
Fields:
env: Environment variable name to associate with the secret. Secret
environment variables must be unique across all of a build's secrets,
and must be used by at least one build step.
versionName: Resource name of the SecretVersion. In format:
projects/*/secrets/*/versions/*
"""
env = _messages.StringField(1)
versionName = _messages.StringField(2)
class Secrets(_messages.Message):
r"""Secrets and secret environment variables.
Fields:
inline: Secrets encrypted with KMS key and the associated secret
environment variable.
secretManager: Secrets in Secret Manager and associated secret environment
variable.
"""
inline = _messages.MessageField('InlineSecret', 1, repeated=True)
secretManager = _messages.MessageField('SecretManagerSecret', 2, repeated=True)
class SlackDelivery(_messages.Message):
r"""SlackDelivery is the delivery configuration for delivering Slack
messages via webhooks. See Slack webhook documentation at:
https://api.slack.com/messaging/webhooks.
Fields:
webhookUri: The secret reference for the Slack webhook URI for sending
messages to a channel.
"""
webhookUri = _messages.MessageField('NotifierSecretRef', 1)
class Source(_messages.Message):
r"""Location of the source in a supported storage service.
Fields:
gitSource: If provided, get the source from this Git repository.
repoSource: If provided, get the source from this location in a Cloud
Source Repository.
storageSource: If provided, get the source from this location in Google
Cloud Storage.
storageSourceManifest: If provided, get the source from this manifest in
Google Cloud Storage. This feature is in Preview; see description
[here](https://github.com/GoogleCloudPlatform/cloud-
builders/tree/master/gcs-fetcher).
"""
gitSource = _messages.MessageField('GitSource', 1)
repoSource = _messages.MessageField('RepoSource', 2)
storageSource = _messages.MessageField('StorageSource', 3)
storageSourceManifest = _messages.MessageField('StorageSourceManifest', 4)
class SourceProvenance(_messages.Message):
r"""Provenance of the source. Ways to find the original source, or verify
that some source was used for this build.
Messages:
FileHashesValue: Output only. Hash(es) of the build source, which can be
used to verify that the original source integrity was maintained in the
build. Note that `FileHashes` will only be populated if `BuildOptions`
has requested a `SourceProvenanceHash`. The keys to this map are file
paths used as build source and the values contain the hash values for
those files. If the build source came in a single package such as a
gzipped tarfile (`.tar.gz`), the `FileHash` will be for the single path
to that file.
Fields:
fileHashes: Output only. Hash(es) of the build source, which can be used
to verify that the original source integrity was maintained in the
build. Note that `FileHashes` will only be populated if `BuildOptions`
has requested a `SourceProvenanceHash`. The keys to this map are file
paths used as build source and the values contain the hash values for
those files. If the build source came in a single package such as a
gzipped tarfile (`.tar.gz`), the `FileHash` will be for the single path
to that file.
resolvedRepoSource: A copy of the build's `source.repo_source`, if exists,
with any revisions resolved.
resolvedStorageSource: A copy of the build's `source.storage_source`, if
exists, with any generations resolved.
resolvedStorageSourceManifest: A copy of the build's
`source.storage_source_manifest`, if exists, with any revisions
resolved. This feature is in Preview.
"""
@encoding.MapUnrecognizedFields('additionalProperties')
class FileHashesValue(_messages.Message):
r"""Output only. Hash(es) of the build source, which can be used to verify
that the original source integrity was maintained in the build. Note that
`FileHashes` will only be populated if `BuildOptions` has requested a
`SourceProvenanceHash`. The keys to this map are file paths used as build
source and the values contain the hash values for those files. If the
build source came in a single package such as a gzipped tarfile
(`.tar.gz`), the `FileHash` will be for the single path to that file.
Messages:
AdditionalProperty: An additional property for a FileHashesValue object.
Fields:
additionalProperties: Additional properties of type FileHashesValue
"""
class AdditionalProperty(_messages.Message):
r"""An additional property for a FileHashesValue object.
Fields:
key: Name of the additional property.
value: A FileHashes attribute.
"""
key = _messages.StringField(1)
value = _messages.MessageField('FileHashes', 2)
additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
fileHashes = _messages.MessageField('FileHashesValue', 1)
resolvedRepoSource = _messages.MessageField('RepoSource', 2)
resolvedStorageSource = _messages.MessageField('StorageSource', 3)
resolvedStorageSourceManifest = _messages.MessageField('StorageSourceManifest', 4)
class StandardQueryParameters(_messages.Message):
r"""Query parameters accepted by all methods.
Enums:
FXgafvValueValuesEnum: V1 error format.
AltValueValuesEnum: Data format for response.
Fields:
f__xgafv: V1 error format.
access_token: OAuth access token.
alt: Data format for response.
callback: JSONP
fields: Selector specifying which fields to include in a partial response.
key: API key. Your API key identifies your project and provides you with
API access, quota, and reports. Required unless you provide an OAuth 2.0
token.
oauth_token: OAuth 2.0 token for the current user.
prettyPrint: Returns response with indentations and line breaks.
quotaUser: Available to use for quota purposes for server-side
applications. Can be any arbitrary string assigned to a user, but should
not exceed 40 characters.
trace: A tracing token of the form "token:<tokenid>" to include in api
requests.
uploadType: Legacy upload protocol for media (e.g. "media", "multipart").
upload_protocol: Upload protocol for media (e.g. "raw", "multipart").
"""
class AltValueValuesEnum(_messages.Enum):
r"""Data format for response.
Values:
json: Responses with Content-Type of application/json
media: Media download with context-dependent Content-Type
proto: Responses with Content-Type of application/x-protobuf
"""
json = 0
media = 1
proto = 2
class FXgafvValueValuesEnum(_messages.Enum):
r"""V1 error format.
Values:
_1: v1 error format
_2: v2 error format
"""
_1 = 0
_2 = 1
f__xgafv = _messages.EnumField('FXgafvValueValuesEnum', 1)
access_token = _messages.StringField(2)
alt = _messages.EnumField('AltValueValuesEnum', 3, default='json')
callback = _messages.StringField(4)
fields = _messages.StringField(5)
key = _messages.StringField(6)
oauth_token = _messages.StringField(7)
prettyPrint = _messages.BooleanField(8, default=True)
quotaUser = _messages.StringField(9)
trace = _messages.StringField(10)
uploadType = _messages.StringField(11)
upload_protocol = _messages.StringField(12)
class Status(_messages.Message):
r"""The `Status` type defines a logical error model that is suitable for
different programming environments, including REST APIs and RPC APIs. It is
used by [gRPC](https://github.com/grpc). Each `Status` message contains
three pieces of data: error code, error message, and error details. You can
find out more about this error model and how to work with it in the [API
Design Guide](https://cloud.google.com/apis/design/errors).
Messages:
DetailsValueListEntry: A DetailsValueListEntry object.
Fields:
code: The status code, which should be an enum value of google.rpc.Code.
details: A list of messages that carry the error details. There is a
common set of message types for APIs to use.
message: A developer-facing error message, which should be in English. Any
user-facing error message should be localized and sent in the
google.rpc.Status.details field, or localized by the client.
"""
@encoding.MapUnrecognizedFields('additionalProperties')
class DetailsValueListEntry(_messages.Message):
r"""A DetailsValueListEntry object.
Messages:
AdditionalProperty: An additional property for a DetailsValueListEntry
object.
Fields:
additionalProperties: Properties of the object. Contains field @type
with type URL.
"""
class AdditionalProperty(_messages.Message):
r"""An additional property for a DetailsValueListEntry object.
Fields:
key: Name of the additional property.
value: A extra_types.JsonValue attribute.
"""
key = _messages.StringField(1)
value = _messages.MessageField('extra_types.JsonValue', 2)
additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
code = _messages.IntegerField(1, variant=_messages.Variant.INT32)
details = _messages.MessageField('DetailsValueListEntry', 2, repeated=True)
message = _messages.StringField(3)
class StorageSource(_messages.Message):
r"""Location of the source in an archive file in Google Cloud Storage.
Fields:
bucket: Google Cloud Storage bucket containing the source (see [Bucket
Name Requirements](https://cloud.google.com/storage/docs/bucket-
naming#requirements)).
generation: Google Cloud Storage generation for the object. If the
generation is omitted, the latest generation will be used.
object: Google Cloud Storage object containing the source. This object
must be a zipped (`.zip`) or gzipped archive file (`.tar.gz`) containing
source to build.
"""
bucket = _messages.StringField(1)
generation = _messages.IntegerField(2)
object = _messages.StringField(3)
class StorageSourceManifest(_messages.Message):
r"""Location of the source manifest in Google Cloud Storage. This feature is
in Preview; see description
[here](https://github.com/GoogleCloudPlatform/cloud-
builders/tree/master/gcs-fetcher).
Fields:
bucket: Google Cloud Storage bucket containing the source manifest (see
[Bucket Name Requirements](https://cloud.google.com/storage/docs/bucket-
naming#requirements)).
generation: Google Cloud Storage generation for the object. If the
generation is omitted, the latest generation will be used.
object: Google Cloud Storage object containing the source manifest. This
object must be a JSON file.
"""
bucket = _messages.StringField(1)
generation = _messages.IntegerField(2)
object = _messages.StringField(3)
class TimeSpan(_messages.Message):
r"""Start and end times for a build execution phase.
Fields:
endTime: End of time span.
startTime: Start of time span.
"""
endTime = _messages.StringField(1)
startTime = _messages.StringField(2)
class UpdateBitbucketServerConfigOperationMetadata(_messages.Message):
r"""Metadata for `UpdateBitbucketServerConfig` operation.
Fields:
bitbucketServerConfig: The resource name of the BitbucketServerConfig to
be updated. Format:
`projects/{project}/locations/{location}/bitbucketServerConfigs/{id}`.
completeTime: Time the operation was completed.
createTime: Time the operation was created.
"""
bitbucketServerConfig = _messages.StringField(1)
completeTime = _messages.StringField(2)
createTime = _messages.StringField(3)
class UpdateGitHubEnterpriseConfigOperationMetadata(_messages.Message):
r"""Metadata for `UpdateGitHubEnterpriseConfig` operation.
Fields:
completeTime: Time the operation was completed.
createTime: Time the operation was created.
githubEnterpriseConfig: The resource name of the GitHubEnterprise to be
updated. Format:
`projects/{project}/locations/{location}/githubEnterpriseConfigs/{id}`.
"""
completeTime = _messages.StringField(1)
createTime = _messages.StringField(2)
githubEnterpriseConfig = _messages.StringField(3)
class UpdateWorkerPoolOperationMetadata(_messages.Message):
r"""Metadata for the `UpdateWorkerPool` operation.
Fields:
completeTime: Time the operation was completed.
createTime: Time the operation was created.
workerPool: The resource name of the `WorkerPool` being updated. Format:
`projects/{project}/locations/{location}/workerPools/{worker_pool}`.
"""
completeTime = _messages.StringField(1)
createTime = _messages.StringField(2)
workerPool = _messages.StringField(3)
class Volume(_messages.Message):
r"""Volume describes a Docker container volume which is mounted into build
steps in order to persist files across build step execution.
Fields:
name: Name of the volume to mount. Volume names must be unique per build
step and must be valid names for Docker volumes. Each named volume must
be used by at least two build steps.
path: Path at which to mount the volume. Paths must be absolute and cannot
conflict with other volume paths on the same build step or with certain
reserved volume paths.
"""
name = _messages.StringField(1)
path = _messages.StringField(2)
class Warning(_messages.Message):
r"""A non-fatal problem encountered during the execution of the build.
Enums:
PriorityValueValuesEnum: The priority for this warning.
Fields:
priority: The priority for this warning.
text: Explanation of the warning generated.
"""
class PriorityValueValuesEnum(_messages.Enum):
r"""The priority for this warning.
Values:
PRIORITY_UNSPECIFIED: Should not be used.
INFO: e.g. deprecation warnings and alternative feature highlights.
WARNING: e.g. automated detection of possible issues with the build.
ALERT: e.g. alerts that a feature used in the build is pending removal
"""
PRIORITY_UNSPECIFIED = 0
INFO = 1
WARNING = 2
ALERT = 3
priority = _messages.EnumField('PriorityValueValuesEnum', 1)
text = _messages.StringField(2)
class WebhookConfig(_messages.Message):
r"""WebhookConfig describes the configuration of a trigger that creates a
build whenever a webhook is sent to a trigger's webhook URL.
Enums:
StateValueValuesEnum: Potential issues with the underlying Pub/Sub
subscription configuration. Only populated on get requests.
Fields:
secret: Required. Resource name for the secret required as a URL
parameter.
state: Potential issues with the underlying Pub/Sub subscription
configuration. Only populated on get requests.
"""
class StateValueValuesEnum(_messages.Enum):
r"""Potential issues with the underlying Pub/Sub subscription
configuration. Only populated on get requests.
Values:
STATE_UNSPECIFIED: The webhook auth configuration not been checked.
OK: The auth configuration is properly setup.
SECRET_DELETED: The secret provided in auth_method has been deleted.
"""
STATE_UNSPECIFIED = 0
OK = 1
SECRET_DELETED = 2
secret = _messages.StringField(1)
state = _messages.EnumField('StateValueValuesEnum', 2)
class WorkerConfig(_messages.Message):
r"""Defines the configuration to be used for creating workers in the pool.
Fields:
diskSizeGb: Size of the disk attached to the worker, in GB. See [Worker
pool config file](https://cloud.google.com/build/docs/private-
pools/worker-pool-config-file-schema). Specify a value of up to 1000. If
`0` is specified, Cloud Build will use a standard disk size.
machineType: Machine type of a worker, such as `e2-medium`. See [Worker
pool config file](https://cloud.google.com/build/docs/private-
pools/worker-pool-config-file-schema). If left blank, Cloud Build will
use a sensible default.
"""
diskSizeGb = _messages.IntegerField(1)
machineType = _messages.StringField(2)
class WorkerPool(_messages.Message):
r"""Configuration for a `WorkerPool`. Cloud Build owns and maintains a pool
of workers for general use and have no access to a project's private
network. By default, builds submitted to Cloud Build will use a worker from
this pool. If your build needs access to resources on a private network,
create and use a `WorkerPool` to run your builds. Private `WorkerPool`s give
your builds access to any single VPC network that you administer, including
any on-prem resources connected to that VPC network. For an overview of
private pools, see [Private pools
overview](https://cloud.google.com/build/docs/private-pools/private-pools-
overview).
Enums:
StateValueValuesEnum: Output only. `WorkerPool` state.
Messages:
AnnotationsValue: User specified annotations. See
https://google.aip.dev/128#annotations for more details such as format
and size limitations.
Fields:
annotations: User specified annotations. See
https://google.aip.dev/128#annotations for more details such as format
and size limitations.
createTime: Output only. Time at which the request to create the
`WorkerPool` was received.
deleteTime: Output only. Time at which the request to delete the
`WorkerPool` was received.
displayName: A user-specified, human-readable name for the `WorkerPool`.
If provided, this value must be 1-63 characters.
etag: Output only. Checksum computed by the server. May be sent on update
and delete requests to ensure that the client has an up-to-date value
before proceeding.
hybridPoolConfig: Hybrid pool configuration
name: Output only. The resource name of the `WorkerPool`, with format
`projects/{project}/locations/{location}/workerPools/{worker_pool}`. The
value of `{worker_pool}` is provided by `worker_pool_id` in
`CreateWorkerPool` request and the value of `{location}` is determined
by the endpoint accessed.
privatePoolV1Config: Legacy Private Pool configuration.
state: Output only. `WorkerPool` state.
uid: Output only. A unique identifier for the `WorkerPool`.
updateTime: Output only. Time at which the request to update the
`WorkerPool` was received.
"""
class StateValueValuesEnum(_messages.Enum):
r"""Output only. `WorkerPool` state.
Values:
STATE_UNSPECIFIED: State of the `WorkerPool` is unknown.
CREATING: `WorkerPool` is being created.
RUNNING: `WorkerPool` is running.
DELETING: `WorkerPool` is being deleted: cancelling builds and draining
workers.
DELETED: `WorkerPool` is deleted.
UPDATING: `WorkerPool` is being updated; new builds cannot be run.
"""
STATE_UNSPECIFIED = 0
CREATING = 1
RUNNING = 2
DELETING = 3
DELETED = 4
UPDATING = 5
@encoding.MapUnrecognizedFields('additionalProperties')
class AnnotationsValue(_messages.Message):
r"""User specified annotations. See https://google.aip.dev/128#annotations
for more details such as format and size limitations.
Messages:
AdditionalProperty: An additional property for a AnnotationsValue
object.
Fields:
additionalProperties: Additional properties of type AnnotationsValue
"""
class AdditionalProperty(_messages.Message):
r"""An additional property for a AnnotationsValue object.
Fields:
key: Name of the additional property.
value: A string attribute.
"""
key = _messages.StringField(1)
value = _messages.StringField(2)
additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
annotations = _messages.MessageField('AnnotationsValue', 1)
createTime = _messages.StringField(2)
deleteTime = _messages.StringField(3)
displayName = _messages.StringField(4)
etag = _messages.StringField(5)
hybridPoolConfig = _messages.MessageField('HybridPoolConfig', 6)
name = _messages.StringField(7)
privatePoolV1Config = _messages.MessageField('PrivatePoolV1Config', 8)
state = _messages.EnumField('StateValueValuesEnum', 9)
uid = _messages.StringField(10)
updateTime = _messages.StringField(11)
encoding.AddCustomJsonFieldMapping(
StandardQueryParameters, 'f__xgafv', '$.xgafv')
encoding.AddCustomJsonEnumMapping(
StandardQueryParameters.FXgafvValueValuesEnum, '_1', '1')
encoding.AddCustomJsonEnumMapping(
StandardQueryParameters.FXgafvValueValuesEnum, '_2', '2') | 0.841988 | 0.160332 |
import re
import urllib.request, urllib.parse, urllib.error
import urllib.request, urllib.error, urllib.parse
from core.parser import Parser
from core.display import Display, ProgressBar
class Gather():
def __init__(self, domain, display=None):
self.domain = domain
self.display = display
self.results = ""
self.user_agent = "Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; Trident/4.0)"
self.p = ProgressBar(display=self.display)
self.gather()
self.parser = Parser(self.results, self.domain)
def hosts(self):
return self.parser.hosts()
def emails(self):
return self.parser.emails()
@staticmethod
def get_sources():
return "Currently searching [google, bing, ask, dogpile, yandex, baidu, yahoo, duckduckgo]"
def search(self, url, offset=1, maxoffset=0, title=""):
current_offset = 0
data = ""
self.p.reset(title=title)
while current_offset <= maxoffset:
self.p.rotate()
temp_url = re.sub(r'\[\[OFFSET\]\]', str(current_offset), url)
try:
headers = { 'User-Agent' : self.user_agent }
req = urllib.request.Request(str(temp_url), None, headers)
data += str(urllib.request.urlopen(req).read())
except urllib.error.URLError as e:
self.display.error("Could not access [%s]" % (title))
return data
except Exception as e:
print(e)
current_offset += offset
self.p.done()
return data
def gather(self, maxoffset=500):
self.results += self.search(title="Google", url="http://www.google.com/search?num=100&start=[[OFFSET]]&hl=en&meta=&q=%40\"" + self.domain + "\"", offset=100, maxoffset=maxoffset)
self.results += self.search(title="Bing", url="http://www.bing.com/search?q=%40" + self.domain + "&count=50&first=[[OFFSET]]", offset=50, maxoffset=maxoffset)
self.results += self.search(title="Ask", url="http://www.ask.com/web?q=%40" + self.domain + "&pu=100&page=[[OFFSET]]", offset=100, maxoffset=maxoffset)
self.results += self.search(title="Dogpile", url="http://www.dogpile.com/search/web?qsi=[[OFFSET]]&q=\"%40" + self.domain + "\"", offset=10, maxoffset=maxoffset/10)
self.results += self.search(title="Yandex", url="http://www.yandex.com/search?text=%40" + self.domain + "&numdoc=50&lr=[[OFFSET]]", offset=50, maxoffset=maxoffset)
self.results += self.search(title="Baidu", url="http://www.baidu.com/s?wd=%40" + self.domain + "&pn=[[OFFSET]]", offset=10, maxoffset=maxoffset/10)
self.results += self.search(title="Yahoo", url="https://search.yahoo.com/search?p=\"%40" + self.domain + "\"&b=[[OFFSET]]&pz=10", offset=10, maxoffset=maxoffset/10)
self.results += self.search(title="DuckDuckGo", url="https://duckduckgo.com/lite?q=\"%40" + self.domain + "\"" ) | spf/core/gather.py |
import re
import urllib.request, urllib.parse, urllib.error
import urllib.request, urllib.error, urllib.parse
from core.parser import Parser
from core.display import Display, ProgressBar
class Gather():
def __init__(self, domain, display=None):
self.domain = domain
self.display = display
self.results = ""
self.user_agent = "Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; Trident/4.0)"
self.p = ProgressBar(display=self.display)
self.gather()
self.parser = Parser(self.results, self.domain)
def hosts(self):
return self.parser.hosts()
def emails(self):
return self.parser.emails()
@staticmethod
def get_sources():
return "Currently searching [google, bing, ask, dogpile, yandex, baidu, yahoo, duckduckgo]"
def search(self, url, offset=1, maxoffset=0, title=""):
current_offset = 0
data = ""
self.p.reset(title=title)
while current_offset <= maxoffset:
self.p.rotate()
temp_url = re.sub(r'\[\[OFFSET\]\]', str(current_offset), url)
try:
headers = { 'User-Agent' : self.user_agent }
req = urllib.request.Request(str(temp_url), None, headers)
data += str(urllib.request.urlopen(req).read())
except urllib.error.URLError as e:
self.display.error("Could not access [%s]" % (title))
return data
except Exception as e:
print(e)
current_offset += offset
self.p.done()
return data
def gather(self, maxoffset=500):
self.results += self.search(title="Google", url="http://www.google.com/search?num=100&start=[[OFFSET]]&hl=en&meta=&q=%40\"" + self.domain + "\"", offset=100, maxoffset=maxoffset)
self.results += self.search(title="Bing", url="http://www.bing.com/search?q=%40" + self.domain + "&count=50&first=[[OFFSET]]", offset=50, maxoffset=maxoffset)
self.results += self.search(title="Ask", url="http://www.ask.com/web?q=%40" + self.domain + "&pu=100&page=[[OFFSET]]", offset=100, maxoffset=maxoffset)
self.results += self.search(title="Dogpile", url="http://www.dogpile.com/search/web?qsi=[[OFFSET]]&q=\"%40" + self.domain + "\"", offset=10, maxoffset=maxoffset/10)
self.results += self.search(title="Yandex", url="http://www.yandex.com/search?text=%40" + self.domain + "&numdoc=50&lr=[[OFFSET]]", offset=50, maxoffset=maxoffset)
self.results += self.search(title="Baidu", url="http://www.baidu.com/s?wd=%40" + self.domain + "&pn=[[OFFSET]]", offset=10, maxoffset=maxoffset/10)
self.results += self.search(title="Yahoo", url="https://search.yahoo.com/search?p=\"%40" + self.domain + "\"&b=[[OFFSET]]&pz=10", offset=10, maxoffset=maxoffset/10)
self.results += self.search(title="DuckDuckGo", url="https://duckduckgo.com/lite?q=\"%40" + self.domain + "\"" ) | 0.381335 | 0.087175 |
import os, sys
from lxml.etree import Element, ElementTree
class XmlBase (object):
nsmap = {
'ds': 'http://schema.programmfabrik.de/database-schema/0.1',
'es': 'http://schema.programmfabrik.de/easydb-database-schema/0.1',
'em': 'http://schema.programmfabrik.de/easydb-mask-schema/0.1',
}
class Searchable (XmlBase):
def __init__(self, xml):
self.search_expert = xml.find('em:search/em:expert', self.nsmap).attrib['enabled'] == '1'
self.search_facet = xml.find('em:search/em:facet', self.nsmap).attrib['enabled'] == '1'
self.search_fulltext = xml.find('em:search/em:fulltext', self.nsmap).attrib['enabled'] == '1'
self.search_flags = \
(self.search_expert and 'E' or '') + \
(self.search_facet and 'F' or '') + \
(self.search_fulltext and 'V' or '')
class Field (Searchable):
def __init__(self, xml):
super(Field, self).__init__(xml)
self.name = xml.attrib.get('column-name-hint')
class LinkedTable (Searchable):
def __init__(self, xml):
super(LinkedTable, self).__init__(xml)
self.name = xml.attrib.get('other-table-hint')
class ReverseLinkedTable (LinkedTable):
def __init__(self, xml):
super(ReverseLinkedTable, self).__init__(xml)
class Analyzer (XmlBase):
@classmethod
def analyze_masks(cls, maskxmlfile, mask_name):
tree = ElementTree()
tree.parse(maskxmlfile)
root = tree.getroot()
if mask_name is not None:
mask = root.find("em:mask[@name='{0}']".format(mask_name), cls.nsmap)
if mask is None:
sys.stderr.write("failed to find mask '{0}'\n".format(mask_name))
sys.exit(1)
cls._analyze_mask(mask)
else:
for mask in root.findall('em:mask', cls.nsmap):
cls._analyze_mask(mask)
@classmethod
def _analyze_mask(cls, mask, indent = ''):
print("{0}M:{1}".format(indent, mask.get('name', '<unnamed>')))
for rlinkedxml in mask.findall('em:fields/em:reverse-linked-table', cls.nsmap):
rlinked = ReverseLinkedTable(rlinkedxml)
if len(rlinked.search_flags):
print("{0} R:{1} ({2})".format(indent, rlinked.name, rlinked.search_flags))
maskxml = rlinkedxml.find('em:mask', cls.nsmap)
if maskxml is not None:
cls._analyze_mask(maskxml, indent + ' ')
for linkedxml in mask.findall('em:fields/em:linked-table', cls.nsmap):
linked = LinkedTable(linkedxml)
if len(linked.search_flags):
print("{0} N:{1} ({2})".format(indent, linked.name, linked.search_flags))
maskxml = linkedxml.find('em:mask', cls.nsmap)
if maskxml is not None:
cls._analyze_mask(maskxml, indent + ' ')
for fieldxml in mask.findall('em:fields/em:field', cls.nsmap):
field = Field(fieldxml)
if len(field.search_flags):
print("{0} F:{1} ({2})".format(indent, field.name, field.search_flags))
if __name__ == '__main__':
if len(sys.argv) < 2:
sys.stderr.write('usage: {0} <maskset.xml> [<mask name>]\n'.format(sys.argv[0]))
sys.exit(1)
if not os.path.isfile(sys.argv[1]):
sys.stderr.write('failed to find {0}\n'.format(sys.argv[1]))
sys.exit(1)
Analyzer.analyze_masks(sys.argv[1], len(sys.argv) > 2 and sys.argv[2] or None) | tools/analyze-mask.py |
import os, sys
from lxml.etree import Element, ElementTree
class XmlBase (object):
nsmap = {
'ds': 'http://schema.programmfabrik.de/database-schema/0.1',
'es': 'http://schema.programmfabrik.de/easydb-database-schema/0.1',
'em': 'http://schema.programmfabrik.de/easydb-mask-schema/0.1',
}
class Searchable (XmlBase):
def __init__(self, xml):
self.search_expert = xml.find('em:search/em:expert', self.nsmap).attrib['enabled'] == '1'
self.search_facet = xml.find('em:search/em:facet', self.nsmap).attrib['enabled'] == '1'
self.search_fulltext = xml.find('em:search/em:fulltext', self.nsmap).attrib['enabled'] == '1'
self.search_flags = \
(self.search_expert and 'E' or '') + \
(self.search_facet and 'F' or '') + \
(self.search_fulltext and 'V' or '')
class Field (Searchable):
def __init__(self, xml):
super(Field, self).__init__(xml)
self.name = xml.attrib.get('column-name-hint')
class LinkedTable (Searchable):
def __init__(self, xml):
super(LinkedTable, self).__init__(xml)
self.name = xml.attrib.get('other-table-hint')
class ReverseLinkedTable (LinkedTable):
def __init__(self, xml):
super(ReverseLinkedTable, self).__init__(xml)
class Analyzer (XmlBase):
@classmethod
def analyze_masks(cls, maskxmlfile, mask_name):
tree = ElementTree()
tree.parse(maskxmlfile)
root = tree.getroot()
if mask_name is not None:
mask = root.find("em:mask[@name='{0}']".format(mask_name), cls.nsmap)
if mask is None:
sys.stderr.write("failed to find mask '{0}'\n".format(mask_name))
sys.exit(1)
cls._analyze_mask(mask)
else:
for mask in root.findall('em:mask', cls.nsmap):
cls._analyze_mask(mask)
@classmethod
def _analyze_mask(cls, mask, indent = ''):
print("{0}M:{1}".format(indent, mask.get('name', '<unnamed>')))
for rlinkedxml in mask.findall('em:fields/em:reverse-linked-table', cls.nsmap):
rlinked = ReverseLinkedTable(rlinkedxml)
if len(rlinked.search_flags):
print("{0} R:{1} ({2})".format(indent, rlinked.name, rlinked.search_flags))
maskxml = rlinkedxml.find('em:mask', cls.nsmap)
if maskxml is not None:
cls._analyze_mask(maskxml, indent + ' ')
for linkedxml in mask.findall('em:fields/em:linked-table', cls.nsmap):
linked = LinkedTable(linkedxml)
if len(linked.search_flags):
print("{0} N:{1} ({2})".format(indent, linked.name, linked.search_flags))
maskxml = linkedxml.find('em:mask', cls.nsmap)
if maskxml is not None:
cls._analyze_mask(maskxml, indent + ' ')
for fieldxml in mask.findall('em:fields/em:field', cls.nsmap):
field = Field(fieldxml)
if len(field.search_flags):
print("{0} F:{1} ({2})".format(indent, field.name, field.search_flags))
if __name__ == '__main__':
if len(sys.argv) < 2:
sys.stderr.write('usage: {0} <maskset.xml> [<mask name>]\n'.format(sys.argv[0]))
sys.exit(1)
if not os.path.isfile(sys.argv[1]):
sys.stderr.write('failed to find {0}\n'.format(sys.argv[1]))
sys.exit(1)
Analyzer.analyze_masks(sys.argv[1], len(sys.argv) > 2 and sys.argv[2] or None) | 0.308398 | 0.116462 |
# $Id$
# Copyright (c) 2004 <NAME>
# (Royal Institute of Technology, Stockholm, Sweden).
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the Institute nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE INSTITUTE AND CONTRIBUTORS ``AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE INSTITUTE OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
import re
import string
import sys
import generate
import rfc3454
import rfc4518
import stringprep
if len(sys.argv) != 3:
print "usage: %s rfc3454.txt out-dir" % sys.argv[0]
sys.exit(1)
tables = rfc3454.read(sys.argv[1])
t2 = rfc4518.read()
for x in t2.iterkeys():
tables[x] = t2[x]
error_list = stringprep.get_errorlist()
errorlist_h = generate.Header('%s/errorlist_table.h' % sys.argv[2])
errorlist_c = generate.Implementation('%s/errorlist_table.c' % sys.argv[2])
errorlist_h.file.write(
'''
#include "windlocl.h"
struct error_entry {
uint32_t start;
unsigned len;
wind_profile_flags flags;
};
extern const struct error_entry _wind_errorlist_table[];
extern const size_t _wind_errorlist_table_size;
''')
errorlist_c.file.write(
'''
#include <stdlib.h>
#include "errorlist_table.h"
const struct error_entry _wind_errorlist_table[] = {
''')
trans=[]
for t in error_list.iterkeys():
for l in tables[t]:
m = re.search('^ *([0-9A-F]+)-([0-9A-F]+); *(.*) *$', l)
if m:
start = int(m.group(1), 0x10)
end = int(m.group(2), 0x10)
desc = m.group(3)
trans.append([start, end - start + 1, desc, [t]])
else:
m = re.search('^ *([0-9A-F]+); *(.*) *$', l)
if m:
trans.append([int(m.group(1), 0x10), 1, m.group(2), [t]])
trans = stringprep.sort_merge_trans(trans)
for x in trans:
(start, length, description, tables) = x
symbols = stringprep.symbols(error_list, tables)
if len(symbols) == 0:
print "no symbol for %s" % description
sys.exit(1)
errorlist_c.file.write(" {0x%x, 0x%x, %s}, /* %s: %s */\n"
% (start, length, symbols, ",".join(tables), description))
errorlist_c.file.write(
'''};
''')
errorlist_c.file.write(
"const size_t _wind_errorlist_table_size = %u;\n" % len(trans))
errorlist_h.close()
errorlist_c.close() | head/crypto/heimdal/lib/wind/gen-errorlist.py |
# $Id$
# Copyright (c) 2004 <NAME>
# (Royal Institute of Technology, Stockholm, Sweden).
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the Institute nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE INSTITUTE AND CONTRIBUTORS ``AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE INSTITUTE OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
import re
import string
import sys
import generate
import rfc3454
import rfc4518
import stringprep
if len(sys.argv) != 3:
print "usage: %s rfc3454.txt out-dir" % sys.argv[0]
sys.exit(1)
tables = rfc3454.read(sys.argv[1])
t2 = rfc4518.read()
for x in t2.iterkeys():
tables[x] = t2[x]
error_list = stringprep.get_errorlist()
errorlist_h = generate.Header('%s/errorlist_table.h' % sys.argv[2])
errorlist_c = generate.Implementation('%s/errorlist_table.c' % sys.argv[2])
errorlist_h.file.write(
'''
#include "windlocl.h"
struct error_entry {
uint32_t start;
unsigned len;
wind_profile_flags flags;
};
extern const struct error_entry _wind_errorlist_table[];
extern const size_t _wind_errorlist_table_size;
''')
errorlist_c.file.write(
'''
#include <stdlib.h>
#include "errorlist_table.h"
const struct error_entry _wind_errorlist_table[] = {
''')
trans=[]
for t in error_list.iterkeys():
for l in tables[t]:
m = re.search('^ *([0-9A-F]+)-([0-9A-F]+); *(.*) *$', l)
if m:
start = int(m.group(1), 0x10)
end = int(m.group(2), 0x10)
desc = m.group(3)
trans.append([start, end - start + 1, desc, [t]])
else:
m = re.search('^ *([0-9A-F]+); *(.*) *$', l)
if m:
trans.append([int(m.group(1), 0x10), 1, m.group(2), [t]])
trans = stringprep.sort_merge_trans(trans)
for x in trans:
(start, length, description, tables) = x
symbols = stringprep.symbols(error_list, tables)
if len(symbols) == 0:
print "no symbol for %s" % description
sys.exit(1)
errorlist_c.file.write(" {0x%x, 0x%x, %s}, /* %s: %s */\n"
% (start, length, symbols, ",".join(tables), description))
errorlist_c.file.write(
'''};
''')
errorlist_c.file.write(
"const size_t _wind_errorlist_table_size = %u;\n" % len(trans))
errorlist_h.close()
errorlist_c.close() | 0.289874 | 0.044827 |
import discord
from discord.ext import commands
from discord.ext.commands import has_permissions, BucketType, cooldown
from datetime import datetime, timedelta
import asyncio
import json
import pymongo
class Moderation(commands.Cog):
def __init__(self, bot):
self.bot = bot
@commands.command(aliases=['m'])
@commands.has_permissions(manage_roles = True)
async def mute(self, ctx, user:discord.Member = None, time:int = None):
if not user:
await ctx.send("Please specify whom to mute")
else:
role = discord.utils.get(ctx.guild.roles, name="Muted")
if not role:
perms = discord.Permissions(send_messages=False, read_messages=True)
await ctx.guild.create_role(name="Muted", permissions = perms)
role = discord.utils.get(ctx.guild.roles, name="Muted")
await user.add_roles(role)
if not time:
embed = discord.Embed(description = "Has been muted!" )
embed.set_author(name= user, url=user.avatar_url, icon_url=user.avatar_url)
embed.set_footer(text=f'Requested by {ctx.author}')
await ctx.send(embed=embed, delete_after=5)
else:
embed1 = discord.Embed(description = f"Has been muted for {time} minutes!" )
embed1.set_author(name= user, url=user.avatar_url, icon_url=user.avatar_url)
embed1.set_footer(text=f'Requested by {ctx.author}')
await ctx.send(embed=embed1, delete_after=5)
if role in user.roles:
await asyncio.sleep(time*60)
await user.remove_roles(role)
embed2 = discord.Embed(description = f"Has been unmuted after {time} minutes!" )
embed2.set_author(name= user, url=user.avatar_url, icon_url=user.avatar_url)
embed2.set_footer(text=f'Requested by {ctx.author}')
await ctx.send(embed=embed2, delete_after=5)
await asyncio.sleep(4)
Message = ctx.message
await Message.delete()
@commands.command(aliases=['um'])
@commands.has_permissions(manage_roles = True)
async def unmute(self, ctx, user:discord.Member = None):
role = discord.utils.get(ctx.guild.roles, name="Muted")
if not user:
await ctx.send("Please mention whom to unmute!")
else:
if role in user.roles:
await user.remove_roles(role)
embed = discord.Embed(description = "Has been unmuted!" )
embed.set_author(name= user, url=user.avatar_url, icon_url=user.avatar_url)
embed.set_footer(text=f'Requested by {ctx.author}')
await ctx.send(embed=embed, delete_after=5)
else:
embed = discord.Embed(description = "Hasn't been muted yet!" )
embed.set_author(name= user, url=user.avatar_url, icon_url=user.avatar_url)
embed.set_footer(text=f'Requested by {ctx.author}')
await ctx.send(embed=embed, delete_after=5)
await asyncio.sleep(4)
Message = ctx.message
await Message.delete()
@commands.command(aliases=['k'])
@has_permissions(kick_members = True)
async def kick(self, ctx, user:discord.Member = None,*, reason = "No Reason Specified"):
if not user:
await ctx.send("Please specify whom to kick!")
else:
await user.kick(reason=reason)
embed = discord.Embed(description = f"Because {reason}")
embed.set_author(name= f"{user} has been kick", url=user.avatar_url, icon_url=user.avatar_url)
embed.set_footer(text=f'Requested by {ctx.author}')
await ctx.send(embed=embed)
try:
await user.send(f"You have been kicked. Reason {reason}")
except Exception:
await ctx.send(f"{user.mention}'s DM is closed!", delete_after = 5)
@commands.command(aliases=['b'])
@commands.has_permissions(ban_members=True)
async def ban(self, ctx, user: discord.Member, *, reason=None):
await user.ban(reason=reason)
embed = discord.Embed(description = f"Has been Banned, because {reason}")
embed.set_author(name= user, url=user.avatar_url, icon_url=user.avatar_url)
embed.set_footer(text=f'Requested by {ctx.author}')
await ctx.send(embed=embed)
try:
await user.send(f"You have been banned. Reason {reason}")
except Exception:
await ctx.send(f"{user.mention}'s DM is closed!", delete_after = 5)
# The below code unbans player.
@commands.command(aliases=['ub'])
@commands.has_permissions(administrator=True)
async def unban(self, ctx, *, member):
banned_users = await ctx.guild.bans()
try:
member_name, member_discriminator = member.split("#")
except Exception:
await ctx.send("Use command properly! eg: `.unban MEE6#4876`")
for ban_entry in banned_users:
user = ban_entry.user
if (user.name, user.discriminator) == (member_name, member_discriminator):
await ctx.guild.unban(user)
await ctx.send(f'Unbanned {user.mention}')
@commands.command(aliases=['w'])
@commands.has_permissions(kick_members = True)
async def warn(self, ctx, user: discord.Member = None, *, reason = "No reason provided"):
if not user:
await ctx.send("DUMBASS mention whom to warn!")
else:
embed = discord.Embed(description = f"Because {reason}")
embed.set_author(name= f"{user} <NAME> warned", url=user.avatar_url, icon_url=user.avatar_url)
embed.set_footer(text=f'Requested by {ctx.author}')
await ctx.send(embed=embed)
try:
await user.send(f"You have been warned. Reason {reason}")
except Exception:
await ctx.send(f"{user.mention}'s DM is closed!", delete_after = 5)
@commands.command(name='purge')
async def purge(self, ctx, num_messages: int = 10, user:discord.Member = None):
"""
Clear <n> messages from current channel
"""
if user:
channel = ctx.message.channel
def check(msg):
return msg.author.id == user.id
await ctx.message.delete()
await channel.purge(limit=num_messages, check=check, before=None)
await ctx.send(f"`{num_messages} messages from {user} deleted!`", delete_after=5)
return
channel = ctx.message.channel
await ctx.message.delete()
await channel.purge(limit=num_messages, check=None, before=None)
await ctx.send(f"`{num_messages} messages has been deleted!`", delete_after=5)
@commands.command()
@cooldown(1, 300, BucketType.user)
@commands.is_owner()
async def nuke(self, ctx, channels : discord.TextChannel=None):
if channels == None:
await ctx.send('Give a channel')
return
if ctx.author != ctx.guild.owner:
await ctx.send('Only **{}** Can use this Command'.format(ctx.guild.owner))
else:
verif = await ctx.send('Are you sure!')
await ctx.send('Type in `yes`. To proceed')
def check(m):
user = ctx.author
return m.author.id == user.id and m.content == 'yes'
msg = await self.bot.wait_for('message', check=check)
await ctx.channel.send('Theres no going back!\n**Are you sure.** \n Type in `yes` to proceed!')
msg = await self.bot.wait_for('message', check=check)
new = await channels.clone()
await channels.delete()
await new.send('https://media1.tenor.com/images/6c485efad8b910e5289fc7968ea1d22f/tenor.gif?itemid=5791468')
await asyncio.sleep(2)
await new.send(f'**{self.bot.user.name}** has nuked this channel!')
@commands.command(aliases=['nick'])
@commands.has_guild_permissions(manage_nicknames=True)
async def nickname(self, ctx, member : discord.Member, *args):
if member == None:
await ctx.send('Give me a user dumbass')
elif member == ctx.guild.owner:
await ctx.send('You cant name the owner!')
else:
x = ' '.join(map(str, args))
await member.edit(nick=f'{x}')
await ctx.send(f'{member.name} has been changed to {x}')
@commands.command()
@commands.has_guild_permissions(manage_channels=True)
@commands.cooldown(1, 60, BucketType.user)
async def slowmode(self, ctx, time : int=0):
if time < 0:
await ctx.send('Give a positive number.')
return
try:
if time > 21600:
await ctx.send('Number is too large. You can only have a maximum time of `21600` seconds (6 Hours)')
else:
await ctx.channel.edit(slowmode_delay=time)
await ctx.send(f'The channel {ctx.channel.name} now has a slowmode of {time} seconds')
except Exception:
await ctx.send('Not a number!')
@commands.command()
@commands.has_permissions(manage_channels=True)
async def lock(self, ctx, channel: discord.TextChannel=None):
channel = channel or ctx.channel
if ctx.guild.default_role not in channel.overwrites:
overwrites = {
ctx.guild.default_role: discord.PermissionOverwrite(send_messages=False)
}
await channel.edit(overwrites=overwrites)
await ctx.send("**The channel `{}` has successfully been locked!**".format(ctx.channel.name))
elif channel.overwrites[ctx.guild.default_role].send_messages == True or channel.overwrites[ctx.guild.default_role].send_messages == None:
overwrites = channel.overwrites[ctx.guild.default_role]
overwrites.send_messages = False
await channel.set_permissions(ctx.guild.default_role, overwrite=overwrites)
await ctx.send("**The channel `{}` has successfully been locked!**".format(ctx.channel.name))
else:
overwrites = channel.overwrites[ctx.guild.default_role]
overwrites.send_messages = True
await channel.set_permissions(ctx.guild.default_role, overwrite=overwrites)
await ctx.send('**The channel `{}` has now been unlocked!**'.format(ctx.channel.name))
@commands.command(aliases=['sw', 'setwelcome', 'set_w'])
async def set_welcome(self, ctx, channel : discord.TextChannel=None):
if channel == None:
await ctx.send('You havent provided a valid channel!')
else:
with open('./Other/json/welcome.json', 'r') as f:
welcome_id = json.load(f)
welcome_id[str(ctx.guild.id)] = f'{channel.id}'
with open('./Other/json/welcome.json', 'w') as f:
json.dump(welcome_id, f, indent=4)
await ctx.send(f'The welcomes channel has been set as `{channel.name}`.')
@commands.command(aliases=['rw', 'remove_w', 'r_welcome', 'removewelcome', 'rwelcome'])
async def remove_welcome(self, ctx):
with open('./Other/json/welcome.json', 'r') as f:
welcome_id = json.load(f)
welcome_id[str(ctx.guild.id)] = f'Not Set'
with open('./Other/json/welcome.json', 'w') as f:
json.dump(welcome_id, f, indent=4)
await ctx.send(f'You have removed the welcome messages!')
def setup(bot):
bot.add_cog(Moderation(bot)) | cogs/moderation.py | import discord
from discord.ext import commands
from discord.ext.commands import has_permissions, BucketType, cooldown
from datetime import datetime, timedelta
import asyncio
import json
import pymongo
class Moderation(commands.Cog):
def __init__(self, bot):
self.bot = bot
@commands.command(aliases=['m'])
@commands.has_permissions(manage_roles = True)
async def mute(self, ctx, user:discord.Member = None, time:int = None):
if not user:
await ctx.send("Please specify whom to mute")
else:
role = discord.utils.get(ctx.guild.roles, name="Muted")
if not role:
perms = discord.Permissions(send_messages=False, read_messages=True)
await ctx.guild.create_role(name="Muted", permissions = perms)
role = discord.utils.get(ctx.guild.roles, name="Muted")
await user.add_roles(role)
if not time:
embed = discord.Embed(description = "Has been muted!" )
embed.set_author(name= user, url=user.avatar_url, icon_url=user.avatar_url)
embed.set_footer(text=f'Requested by {ctx.author}')
await ctx.send(embed=embed, delete_after=5)
else:
embed1 = discord.Embed(description = f"Has been muted for {time} minutes!" )
embed1.set_author(name= user, url=user.avatar_url, icon_url=user.avatar_url)
embed1.set_footer(text=f'Requested by {ctx.author}')
await ctx.send(embed=embed1, delete_after=5)
if role in user.roles:
await asyncio.sleep(time*60)
await user.remove_roles(role)
embed2 = discord.Embed(description = f"Has been unmuted after {time} minutes!" )
embed2.set_author(name= user, url=user.avatar_url, icon_url=user.avatar_url)
embed2.set_footer(text=f'Requested by {ctx.author}')
await ctx.send(embed=embed2, delete_after=5)
await asyncio.sleep(4)
Message = ctx.message
await Message.delete()
@commands.command(aliases=['um'])
@commands.has_permissions(manage_roles = True)
async def unmute(self, ctx, user:discord.Member = None):
role = discord.utils.get(ctx.guild.roles, name="Muted")
if not user:
await ctx.send("Please mention whom to unmute!")
else:
if role in user.roles:
await user.remove_roles(role)
embed = discord.Embed(description = "Has been unmuted!" )
embed.set_author(name= user, url=user.avatar_url, icon_url=user.avatar_url)
embed.set_footer(text=f'Requested by {ctx.author}')
await ctx.send(embed=embed, delete_after=5)
else:
embed = discord.Embed(description = "Hasn't been muted yet!" )
embed.set_author(name= user, url=user.avatar_url, icon_url=user.avatar_url)
embed.set_footer(text=f'Requested by {ctx.author}')
await ctx.send(embed=embed, delete_after=5)
await asyncio.sleep(4)
Message = ctx.message
await Message.delete()
@commands.command(aliases=['k'])
@has_permissions(kick_members = True)
async def kick(self, ctx, user:discord.Member = None,*, reason = "No Reason Specified"):
if not user:
await ctx.send("Please specify whom to kick!")
else:
await user.kick(reason=reason)
embed = discord.Embed(description = f"Because {reason}")
embed.set_author(name= f"{user} has been kick", url=user.avatar_url, icon_url=user.avatar_url)
embed.set_footer(text=f'Requested by {ctx.author}')
await ctx.send(embed=embed)
try:
await user.send(f"You have been kicked. Reason {reason}")
except Exception:
await ctx.send(f"{user.mention}'s DM is closed!", delete_after = 5)
@commands.command(aliases=['b'])
@commands.has_permissions(ban_members=True)
async def ban(self, ctx, user: discord.Member, *, reason=None):
await user.ban(reason=reason)
embed = discord.Embed(description = f"Has been Banned, because {reason}")
embed.set_author(name= user, url=user.avatar_url, icon_url=user.avatar_url)
embed.set_footer(text=f'Requested by {ctx.author}')
await ctx.send(embed=embed)
try:
await user.send(f"You have been banned. Reason {reason}")
except Exception:
await ctx.send(f"{user.mention}'s DM is closed!", delete_after = 5)
# The below code unbans player.
@commands.command(aliases=['ub'])
@commands.has_permissions(administrator=True)
async def unban(self, ctx, *, member):
banned_users = await ctx.guild.bans()
try:
member_name, member_discriminator = member.split("#")
except Exception:
await ctx.send("Use command properly! eg: `.unban MEE6#4876`")
for ban_entry in banned_users:
user = ban_entry.user
if (user.name, user.discriminator) == (member_name, member_discriminator):
await ctx.guild.unban(user)
await ctx.send(f'Unbanned {user.mention}')
@commands.command(aliases=['w'])
@commands.has_permissions(kick_members = True)
async def warn(self, ctx, user: discord.Member = None, *, reason = "No reason provided"):
if not user:
await ctx.send("DUMBASS mention whom to warn!")
else:
embed = discord.Embed(description = f"Because {reason}")
embed.set_author(name= f"{user} <NAME> warned", url=user.avatar_url, icon_url=user.avatar_url)
embed.set_footer(text=f'Requested by {ctx.author}')
await ctx.send(embed=embed)
try:
await user.send(f"You have been warned. Reason {reason}")
except Exception:
await ctx.send(f"{user.mention}'s DM is closed!", delete_after = 5)
@commands.command(name='purge')
async def purge(self, ctx, num_messages: int = 10, user:discord.Member = None):
"""
Clear <n> messages from current channel
"""
if user:
channel = ctx.message.channel
def check(msg):
return msg.author.id == user.id
await ctx.message.delete()
await channel.purge(limit=num_messages, check=check, before=None)
await ctx.send(f"`{num_messages} messages from {user} deleted!`", delete_after=5)
return
channel = ctx.message.channel
await ctx.message.delete()
await channel.purge(limit=num_messages, check=None, before=None)
await ctx.send(f"`{num_messages} messages has been deleted!`", delete_after=5)
@commands.command()
@cooldown(1, 300, BucketType.user)
@commands.is_owner()
async def nuke(self, ctx, channels : discord.TextChannel=None):
if channels == None:
await ctx.send('Give a channel')
return
if ctx.author != ctx.guild.owner:
await ctx.send('Only **{}** Can use this Command'.format(ctx.guild.owner))
else:
verif = await ctx.send('Are you sure!')
await ctx.send('Type in `yes`. To proceed')
def check(m):
user = ctx.author
return m.author.id == user.id and m.content == 'yes'
msg = await self.bot.wait_for('message', check=check)
await ctx.channel.send('Theres no going back!\n**Are you sure.** \n Type in `yes` to proceed!')
msg = await self.bot.wait_for('message', check=check)
new = await channels.clone()
await channels.delete()
await new.send('https://media1.tenor.com/images/6c485efad8b910e5289fc7968ea1d22f/tenor.gif?itemid=5791468')
await asyncio.sleep(2)
await new.send(f'**{self.bot.user.name}** has nuked this channel!')
@commands.command(aliases=['nick'])
@commands.has_guild_permissions(manage_nicknames=True)
async def nickname(self, ctx, member : discord.Member, *args):
if member == None:
await ctx.send('Give me a user dumbass')
elif member == ctx.guild.owner:
await ctx.send('You cant name the owner!')
else:
x = ' '.join(map(str, args))
await member.edit(nick=f'{x}')
await ctx.send(f'{member.name} has been changed to {x}')
@commands.command()
@commands.has_guild_permissions(manage_channels=True)
@commands.cooldown(1, 60, BucketType.user)
async def slowmode(self, ctx, time : int=0):
if time < 0:
await ctx.send('Give a positive number.')
return
try:
if time > 21600:
await ctx.send('Number is too large. You can only have a maximum time of `21600` seconds (6 Hours)')
else:
await ctx.channel.edit(slowmode_delay=time)
await ctx.send(f'The channel {ctx.channel.name} now has a slowmode of {time} seconds')
except Exception:
await ctx.send('Not a number!')
@commands.command()
@commands.has_permissions(manage_channels=True)
async def lock(self, ctx, channel: discord.TextChannel=None):
channel = channel or ctx.channel
if ctx.guild.default_role not in channel.overwrites:
overwrites = {
ctx.guild.default_role: discord.PermissionOverwrite(send_messages=False)
}
await channel.edit(overwrites=overwrites)
await ctx.send("**The channel `{}` has successfully been locked!**".format(ctx.channel.name))
elif channel.overwrites[ctx.guild.default_role].send_messages == True or channel.overwrites[ctx.guild.default_role].send_messages == None:
overwrites = channel.overwrites[ctx.guild.default_role]
overwrites.send_messages = False
await channel.set_permissions(ctx.guild.default_role, overwrite=overwrites)
await ctx.send("**The channel `{}` has successfully been locked!**".format(ctx.channel.name))
else:
overwrites = channel.overwrites[ctx.guild.default_role]
overwrites.send_messages = True
await channel.set_permissions(ctx.guild.default_role, overwrite=overwrites)
await ctx.send('**The channel `{}` has now been unlocked!**'.format(ctx.channel.name))
@commands.command(aliases=['sw', 'setwelcome', 'set_w'])
async def set_welcome(self, ctx, channel : discord.TextChannel=None):
if channel == None:
await ctx.send('You havent provided a valid channel!')
else:
with open('./Other/json/welcome.json', 'r') as f:
welcome_id = json.load(f)
welcome_id[str(ctx.guild.id)] = f'{channel.id}'
with open('./Other/json/welcome.json', 'w') as f:
json.dump(welcome_id, f, indent=4)
await ctx.send(f'The welcomes channel has been set as `{channel.name}`.')
@commands.command(aliases=['rw', 'remove_w', 'r_welcome', 'removewelcome', 'rwelcome'])
async def remove_welcome(self, ctx):
with open('./Other/json/welcome.json', 'r') as f:
welcome_id = json.load(f)
welcome_id[str(ctx.guild.id)] = f'Not Set'
with open('./Other/json/welcome.json', 'w') as f:
json.dump(welcome_id, f, indent=4)
await ctx.send(f'You have removed the welcome messages!')
def setup(bot):
bot.add_cog(Moderation(bot)) | 0.121191 | 0.059183 |
from collections.abc import Sequence
import urwid
import blinker
import textwrap
import pprint
from typing import Optional
from mitmproxy import exceptions
from mitmproxy import optmanager
from mitmproxy.tools.console import layoutwidget
from mitmproxy.tools.console import signals
from mitmproxy.tools.console import overlay
HELP_HEIGHT = 5
def can_edit_inplace(opt):
if opt.choices:
return False
if opt.typespec in [str, int, Optional[str], Optional[int]]:
return True
def fcol(s, width, attr):
s = str(s)
return ("fixed", width, urwid.Text((attr, s)))
option_focus_change = blinker.Signal()
class OptionItem(urwid.WidgetWrap):
def __init__(self, walker, opt, focused, namewidth, editing):
self.walker, self.opt, self.focused = walker, opt, focused
self.namewidth = namewidth
self.editing = editing
super().__init__(None)
self._w = self.get_widget()
def get_widget(self):
val = self.opt.current()
if self.opt.typespec == bool:
displayval = "true" if val else "false"
elif not val:
displayval = ""
elif self.opt.typespec == Sequence[str]:
displayval = pprint.pformat(val, indent=1)
else:
displayval = str(val)
changed = self.walker.master.options.has_changed(self.opt.name)
if self.focused:
valstyle = "option_active_selected" if changed else "option_selected"
else:
valstyle = "option_active" if changed else "text"
if self.editing:
valw = urwid.Edit(edit_text=displayval)
else:
valw = urwid.AttrMap(
urwid.Padding(urwid.Text([(valstyle, displayval)])), valstyle
)
return urwid.Columns(
[
(
self.namewidth,
urwid.Text([("title", self.opt.name.ljust(self.namewidth))]),
),
valw,
],
dividechars=2,
focus_column=1,
)
def get_edit_text(self):
return self._w[1].get_edit_text()
def selectable(self):
return True
def keypress(self, size, key):
if self.editing:
self._w[1].keypress(size, key)
return
return key
class OptionListWalker(urwid.ListWalker):
def __init__(self, master):
self.master = master
self.index = 0
self.focusobj = None
self.opts = sorted(master.options.keys())
self.maxlen = max(len(i) for i in self.opts)
self.editing = False
self.set_focus(0)
self.master.options.changed.connect(self.sig_mod)
def sig_mod(self, *args, **kwargs):
self.opts = sorted(self.master.options.keys())
self.maxlen = max(len(i) for i in self.opts)
self._modified()
self.set_focus(self.index)
def start_editing(self):
self.editing = True
self.focus_obj = self._get(self.index, True)
self._modified()
def stop_editing(self):
self.editing = False
self.focus_obj = self._get(self.index, False)
self.set_focus(self.index)
self._modified()
def get_edit_text(self):
return self.focus_obj.get_edit_text()
def _get(self, pos, editing):
name = self.opts[pos]
opt = self.master.options._options[name]
return OptionItem(self, opt, pos == self.index, self.maxlen, editing)
def get_focus(self):
return self.focus_obj, self.index
def set_focus(self, index):
self.editing = False
name = self.opts[index]
opt = self.master.options._options[name]
self.index = index
self.focus_obj = self._get(self.index, self.editing)
option_focus_change.send(opt.help)
self._modified()
def get_next(self, pos):
if pos >= len(self.opts) - 1:
return None, None
pos = pos + 1
return self._get(pos, False), pos
def get_prev(self, pos):
pos = pos - 1
if pos < 0:
return None, None
return self._get(pos, False), pos
def positions(self, reverse=False):
if reverse:
return reversed(range(len(self.opts)))
else:
return range(len(self.opts))
class OptionsList(urwid.ListBox):
def __init__(self, master):
self.master = master
self.walker = OptionListWalker(master)
super().__init__(self.walker)
def save_config(self, path):
try:
optmanager.save(self.master.options, path)
except exceptions.OptionsError as e:
signals.status_message.send(message=str(e))
def keypress(self, size, key):
if self.walker.editing:
if key == "enter":
foc, idx = self.get_focus()
v = self.walker.get_edit_text()
try:
self.master.options.set(f"{foc.opt.name}={v}")
except exceptions.OptionsError as v:
signals.status_message.send(message=str(v))
self.walker.stop_editing()
return None
elif key == "esc":
self.walker.stop_editing()
return None
else:
if key == "m_start":
self.set_focus(0)
self.walker._modified()
elif key == "m_end":
self.set_focus(len(self.walker.opts) - 1)
self.walker._modified()
elif key == "m_select":
foc, idx = self.get_focus()
if foc.opt.typespec == bool:
self.master.options.toggler(foc.opt.name)()
# Bust the focus widget cache
self.set_focus(self.walker.index)
elif can_edit_inplace(foc.opt):
self.walker.start_editing()
self.walker._modified()
elif foc.opt.choices:
self.master.overlay(
overlay.Chooser(
self.master,
foc.opt.name,
foc.opt.choices,
foc.opt.current(),
self.master.options.setter(foc.opt.name),
)
)
elif foc.opt.typespec == Sequence[str]:
self.master.overlay(
overlay.OptionsOverlay(
self.master,
foc.opt.name,
foc.opt.current(),
HELP_HEIGHT + 5,
),
valign="top",
)
else:
raise NotImplementedError()
return super().keypress(size, key)
class OptionHelp(urwid.Frame):
def __init__(self, master):
self.master = master
super().__init__(self.widget(""))
self.set_active(False)
option_focus_change.connect(self.sig_mod)
def set_active(self, val):
h = urwid.Text("Option Help")
style = "heading" if val else "heading_inactive"
self.header = urwid.AttrWrap(h, style)
def widget(self, txt):
cols, _ = self.master.ui.get_cols_rows()
return urwid.ListBox([urwid.Text(i) for i in textwrap.wrap(txt, cols)])
def sig_mod(self, txt):
self.set_body(self.widget(txt))
class Options(urwid.Pile, layoutwidget.LayoutWidget):
title = "Options"
keyctx = "options"
def __init__(self, master):
oh = OptionHelp(master)
self.optionslist = OptionsList(master)
super().__init__(
[
self.optionslist,
(HELP_HEIGHT, oh),
]
)
self.master = master
def current_name(self):
foc, idx = self.optionslist.get_focus()
return foc.opt.name
def keypress(self, size, key):
if key == "m_next":
self.focus_position = (self.focus_position + 1) % len(self.widget_list)
self.widget_list[1].set_active(self.focus_position == 1)
key = None
# This is essentially a copypasta from urwid.Pile's keypress handler.
# So much for "closed for modification, but open for extension".
item_rows = None
if len(size) == 2:
item_rows = self.get_item_rows(size, focus=True)
i = self.widget_list.index(self.focus_item)
tsize = self.get_item_size(size, i, True, item_rows)
return self.focus_item.keypress(tsize, key) | mitmproxy/tools/console/options.py | from collections.abc import Sequence
import urwid
import blinker
import textwrap
import pprint
from typing import Optional
from mitmproxy import exceptions
from mitmproxy import optmanager
from mitmproxy.tools.console import layoutwidget
from mitmproxy.tools.console import signals
from mitmproxy.tools.console import overlay
HELP_HEIGHT = 5
def can_edit_inplace(opt):
if opt.choices:
return False
if opt.typespec in [str, int, Optional[str], Optional[int]]:
return True
def fcol(s, width, attr):
s = str(s)
return ("fixed", width, urwid.Text((attr, s)))
option_focus_change = blinker.Signal()
class OptionItem(urwid.WidgetWrap):
def __init__(self, walker, opt, focused, namewidth, editing):
self.walker, self.opt, self.focused = walker, opt, focused
self.namewidth = namewidth
self.editing = editing
super().__init__(None)
self._w = self.get_widget()
def get_widget(self):
val = self.opt.current()
if self.opt.typespec == bool:
displayval = "true" if val else "false"
elif not val:
displayval = ""
elif self.opt.typespec == Sequence[str]:
displayval = pprint.pformat(val, indent=1)
else:
displayval = str(val)
changed = self.walker.master.options.has_changed(self.opt.name)
if self.focused:
valstyle = "option_active_selected" if changed else "option_selected"
else:
valstyle = "option_active" if changed else "text"
if self.editing:
valw = urwid.Edit(edit_text=displayval)
else:
valw = urwid.AttrMap(
urwid.Padding(urwid.Text([(valstyle, displayval)])), valstyle
)
return urwid.Columns(
[
(
self.namewidth,
urwid.Text([("title", self.opt.name.ljust(self.namewidth))]),
),
valw,
],
dividechars=2,
focus_column=1,
)
def get_edit_text(self):
return self._w[1].get_edit_text()
def selectable(self):
return True
def keypress(self, size, key):
if self.editing:
self._w[1].keypress(size, key)
return
return key
class OptionListWalker(urwid.ListWalker):
def __init__(self, master):
self.master = master
self.index = 0
self.focusobj = None
self.opts = sorted(master.options.keys())
self.maxlen = max(len(i) for i in self.opts)
self.editing = False
self.set_focus(0)
self.master.options.changed.connect(self.sig_mod)
def sig_mod(self, *args, **kwargs):
self.opts = sorted(self.master.options.keys())
self.maxlen = max(len(i) for i in self.opts)
self._modified()
self.set_focus(self.index)
def start_editing(self):
self.editing = True
self.focus_obj = self._get(self.index, True)
self._modified()
def stop_editing(self):
self.editing = False
self.focus_obj = self._get(self.index, False)
self.set_focus(self.index)
self._modified()
def get_edit_text(self):
return self.focus_obj.get_edit_text()
def _get(self, pos, editing):
name = self.opts[pos]
opt = self.master.options._options[name]
return OptionItem(self, opt, pos == self.index, self.maxlen, editing)
def get_focus(self):
return self.focus_obj, self.index
def set_focus(self, index):
self.editing = False
name = self.opts[index]
opt = self.master.options._options[name]
self.index = index
self.focus_obj = self._get(self.index, self.editing)
option_focus_change.send(opt.help)
self._modified()
def get_next(self, pos):
if pos >= len(self.opts) - 1:
return None, None
pos = pos + 1
return self._get(pos, False), pos
def get_prev(self, pos):
pos = pos - 1
if pos < 0:
return None, None
return self._get(pos, False), pos
def positions(self, reverse=False):
if reverse:
return reversed(range(len(self.opts)))
else:
return range(len(self.opts))
class OptionsList(urwid.ListBox):
def __init__(self, master):
self.master = master
self.walker = OptionListWalker(master)
super().__init__(self.walker)
def save_config(self, path):
try:
optmanager.save(self.master.options, path)
except exceptions.OptionsError as e:
signals.status_message.send(message=str(e))
def keypress(self, size, key):
if self.walker.editing:
if key == "enter":
foc, idx = self.get_focus()
v = self.walker.get_edit_text()
try:
self.master.options.set(f"{foc.opt.name}={v}")
except exceptions.OptionsError as v:
signals.status_message.send(message=str(v))
self.walker.stop_editing()
return None
elif key == "esc":
self.walker.stop_editing()
return None
else:
if key == "m_start":
self.set_focus(0)
self.walker._modified()
elif key == "m_end":
self.set_focus(len(self.walker.opts) - 1)
self.walker._modified()
elif key == "m_select":
foc, idx = self.get_focus()
if foc.opt.typespec == bool:
self.master.options.toggler(foc.opt.name)()
# Bust the focus widget cache
self.set_focus(self.walker.index)
elif can_edit_inplace(foc.opt):
self.walker.start_editing()
self.walker._modified()
elif foc.opt.choices:
self.master.overlay(
overlay.Chooser(
self.master,
foc.opt.name,
foc.opt.choices,
foc.opt.current(),
self.master.options.setter(foc.opt.name),
)
)
elif foc.opt.typespec == Sequence[str]:
self.master.overlay(
overlay.OptionsOverlay(
self.master,
foc.opt.name,
foc.opt.current(),
HELP_HEIGHT + 5,
),
valign="top",
)
else:
raise NotImplementedError()
return super().keypress(size, key)
class OptionHelp(urwid.Frame):
def __init__(self, master):
self.master = master
super().__init__(self.widget(""))
self.set_active(False)
option_focus_change.connect(self.sig_mod)
def set_active(self, val):
h = urwid.Text("Option Help")
style = "heading" if val else "heading_inactive"
self.header = urwid.AttrWrap(h, style)
def widget(self, txt):
cols, _ = self.master.ui.get_cols_rows()
return urwid.ListBox([urwid.Text(i) for i in textwrap.wrap(txt, cols)])
def sig_mod(self, txt):
self.set_body(self.widget(txt))
class Options(urwid.Pile, layoutwidget.LayoutWidget):
title = "Options"
keyctx = "options"
def __init__(self, master):
oh = OptionHelp(master)
self.optionslist = OptionsList(master)
super().__init__(
[
self.optionslist,
(HELP_HEIGHT, oh),
]
)
self.master = master
def current_name(self):
foc, idx = self.optionslist.get_focus()
return foc.opt.name
def keypress(self, size, key):
if key == "m_next":
self.focus_position = (self.focus_position + 1) % len(self.widget_list)
self.widget_list[1].set_active(self.focus_position == 1)
key = None
# This is essentially a copypasta from urwid.Pile's keypress handler.
# So much for "closed for modification, but open for extension".
item_rows = None
if len(size) == 2:
item_rows = self.get_item_rows(size, focus=True)
i = self.widget_list.index(self.focus_item)
tsize = self.get_item_size(size, i, True, item_rows)
return self.focus_item.keypress(tsize, key) | 0.721547 | 0.138841 |
# Emulate Python 3 style unicode string literals.
from __future__ import unicode_literals
__author__ = '<EMAIL> (<NAME>)'
import icu
from . import abstract_translator
# To allow the rest of the file to assume Python 3 strings, we will assign str
# to unicode for Python 2. This will error in 3 and be ignored.
try:
str = unicode # pylint: disable=redefined-builtin, invalid-name
except NameError:
pass
_NUM_FORMAT_PATTERNS = {
'currency': '${0:,.2f}',
'decimal': '{0:,.3f}',
'percent': '{0:,.0%}',
'scientific': '{0:.0E}'
}
_SHORT_SUFFIXES = {
1E3: 'K',
1E6: 'M',
1E9: 'B',
1E12: 'T'
}
_LONG_SUFFIXES = {
1E3: ' thousand',
1E6: ' million',
1E9: ' billion',
1E12: ' trillion'
}
class SimpleTranslator(abstract_translator.AbstractTranslator):
"""Minimal implementation of the i18n extension API.
This is a minimal implementation of the core API for demo purpose.
"""
def is_msg_available(self, msg_id):
return True
def prepare_literal(self, msg_id, msg_text):
# use the string itself as the opaque object
return msg_text
def render_literal(self, msg, is_html=False):
# Calling format() to apply the same escape mechanism for '{' and '} as
# formatted string
return _maybe_escape(msg, is_html).format()
def prepare(self, msg_id, msg_text, msg_placeholders):
return msg_text
def render(self, msg, values, is_html=False):
return _maybe_escape(msg, is_html).format(**values)
def prepare_plural(self, msg_id, msg_cases, msg_placeholders):
return msg_cases
def render_plural(self, msg, case_value, values, is_html=False):
msg_text = (msg.get('=%d' % case_value)
if int(case_value) == case_value else None) or msg.get('other')
return _maybe_escape(msg_text, is_html).format(**values)
def prepare_icu(self, msg_id, msg_text, msg_fields, is_html=False):
return icu.MessageFormat(_maybe_escape(msg_text, is_html), icu.Locale('en'))
def render_icu(self, msg, values):
return msg.format(values.keys(), map(_format_icu, values.values()))
def format_num(self,
value,
target_format='decimal',
min_fraction_digits=None,
max_fraction_digits=None):
if min_fraction_digits is not None or max_fraction_digits is not None:
raise ValueError(
'Min and max fraction digits arguments are not supported in python')
if target_format in _NUM_FORMAT_PATTERNS:
result = _NUM_FORMAT_PATTERNS[target_format].format(value)
if target_format == 'decimal':
result = result.rstrip('0').rstrip('.')
return result
elif target_format == 'compact_short':
return _format_compact(value, short=True)
elif target_format == 'compact_long':
return _format_compact(value, short=False)
return str(value)
def _format_icu(value):
try:
return icu.Formattable(value)
except:
return icu.Formattable(str(value))
def _format_compact(value, short=True):
"""Compact number formatting using proper suffixes based on magnitude.
Compact number formatting has slightly idiosyncratic behavior mainly due to
two rules. First, if the value is below 1000, the formatting should just be a
2 digit decimal formatting. Second, the number is always truncated to leave at
least 2 digits. This means that a number with one digit more than the
magnitude, such as 1250, is still left with 1.2K, whereas one more digit would
leave it without the decimal, such as 12500 becoming 12K.
Args:
value: The value to format.
short: Whether to use the short form suffixes or long form suffixes.
Returns:
A formatted number as a string.
"""
if value < 1000:
return '{0:.2f}'.format(value).rstrip('0').rstrip('.')
suffixes = _SHORT_SUFFIXES if short else _LONG_SUFFIXES
for key, suffix in sorted(suffixes.items(), reverse=True):
if value >= key:
value = value / float(key)
if value >= 10:
pattern = '{0:,.0f}' + suffix
else:
pattern = '{0:.1f}' + suffix
return pattern.format(value)
def _maybe_escape(msg, is_html):
if is_html:
# Note that '&' is not replaced because the translation can contain HTML
# entities.
return msg.replace('<', '<')
return msg | python/simple_translator.py | # Emulate Python 3 style unicode string literals.
from __future__ import unicode_literals
__author__ = '<EMAIL> (<NAME>)'
import icu
from . import abstract_translator
# To allow the rest of the file to assume Python 3 strings, we will assign str
# to unicode for Python 2. This will error in 3 and be ignored.
try:
str = unicode # pylint: disable=redefined-builtin, invalid-name
except NameError:
pass
_NUM_FORMAT_PATTERNS = {
'currency': '${0:,.2f}',
'decimal': '{0:,.3f}',
'percent': '{0:,.0%}',
'scientific': '{0:.0E}'
}
_SHORT_SUFFIXES = {
1E3: 'K',
1E6: 'M',
1E9: 'B',
1E12: 'T'
}
_LONG_SUFFIXES = {
1E3: ' thousand',
1E6: ' million',
1E9: ' billion',
1E12: ' trillion'
}
class SimpleTranslator(abstract_translator.AbstractTranslator):
"""Minimal implementation of the i18n extension API.
This is a minimal implementation of the core API for demo purpose.
"""
def is_msg_available(self, msg_id):
return True
def prepare_literal(self, msg_id, msg_text):
# use the string itself as the opaque object
return msg_text
def render_literal(self, msg, is_html=False):
# Calling format() to apply the same escape mechanism for '{' and '} as
# formatted string
return _maybe_escape(msg, is_html).format()
def prepare(self, msg_id, msg_text, msg_placeholders):
return msg_text
def render(self, msg, values, is_html=False):
return _maybe_escape(msg, is_html).format(**values)
def prepare_plural(self, msg_id, msg_cases, msg_placeholders):
return msg_cases
def render_plural(self, msg, case_value, values, is_html=False):
msg_text = (msg.get('=%d' % case_value)
if int(case_value) == case_value else None) or msg.get('other')
return _maybe_escape(msg_text, is_html).format(**values)
def prepare_icu(self, msg_id, msg_text, msg_fields, is_html=False):
return icu.MessageFormat(_maybe_escape(msg_text, is_html), icu.Locale('en'))
def render_icu(self, msg, values):
return msg.format(values.keys(), map(_format_icu, values.values()))
def format_num(self,
value,
target_format='decimal',
min_fraction_digits=None,
max_fraction_digits=None):
if min_fraction_digits is not None or max_fraction_digits is not None:
raise ValueError(
'Min and max fraction digits arguments are not supported in python')
if target_format in _NUM_FORMAT_PATTERNS:
result = _NUM_FORMAT_PATTERNS[target_format].format(value)
if target_format == 'decimal':
result = result.rstrip('0').rstrip('.')
return result
elif target_format == 'compact_short':
return _format_compact(value, short=True)
elif target_format == 'compact_long':
return _format_compact(value, short=False)
return str(value)
def _format_icu(value):
try:
return icu.Formattable(value)
except:
return icu.Formattable(str(value))
def _format_compact(value, short=True):
"""Compact number formatting using proper suffixes based on magnitude.
Compact number formatting has slightly idiosyncratic behavior mainly due to
two rules. First, if the value is below 1000, the formatting should just be a
2 digit decimal formatting. Second, the number is always truncated to leave at
least 2 digits. This means that a number with one digit more than the
magnitude, such as 1250, is still left with 1.2K, whereas one more digit would
leave it without the decimal, such as 12500 becoming 12K.
Args:
value: The value to format.
short: Whether to use the short form suffixes or long form suffixes.
Returns:
A formatted number as a string.
"""
if value < 1000:
return '{0:.2f}'.format(value).rstrip('0').rstrip('.')
suffixes = _SHORT_SUFFIXES if short else _LONG_SUFFIXES
for key, suffix in sorted(suffixes.items(), reverse=True):
if value >= key:
value = value / float(key)
if value >= 10:
pattern = '{0:,.0f}' + suffix
else:
pattern = '{0:.1f}' + suffix
return pattern.format(value)
def _maybe_escape(msg, is_html):
if is_html:
# Note that '&' is not replaced because the translation can contain HTML
# entities.
return msg.replace('<', '<')
return msg | 0.831417 | 0.384739 |
import re
import os
import json
import glob
import logging
from time import sleep
from ulauncher.api.shared.action.CopyToClipboardAction import CopyToClipboardAction
from ulauncher.search.SortedList import SortedList
from ulauncher.utils.SortedCollection import SortedCollection
from ulauncher.api.client.Extension import Extension, PreferencesEventListener
from ulauncher.api.client.EventListener import EventListener
from ulauncher.api.shared.event import (
KeywordQueryEvent,
ItemEnterEvent,
PreferencesEvent,
PreferencesUpdateEvent,
)
from ulauncher.api.shared.item.ExtensionResultItem import ExtensionResultItem
from ulauncher.api.shared.action.RenderResultListAction import RenderResultListAction
from ulauncher.api.shared.action.ExtensionCustomAction import ExtensionCustomAction
from ulauncher.api.shared.action.HideWindowAction import HideWindowAction
from ulauncher.api.shared.action.OpenUrlAction import OpenUrlAction
from ulauncher.utils.fuzzy_search import get_score
logging.basicConfig()
logger = logging.getLogger(__name__)
DEFAULT_VOCABURARIES = [
"english",
"english_uk",
]
DEFAULT_DICTIONARY = (
"https://translate.google.com/#view=home&op=translate&sl=auto&tl=en&text=%s"
)
class Word:
def __init__(self, word, vocabulary):
self._word = word
self._vocabulary = vocabulary
def __repr__(self):
return "{}/{}".format(self._word, self._vocabulary)
def get_search_name(self):
return self._word
def load_words(vocabularies):
base_dir = os.path.dirname(os.path.abspath(__file__))
words = []
for vocabulary in vocabularies:
filename = os.path.join(base_dir, "vocabularies", "{}.txt".format(vocabulary))
with open(filename, "r", encoding="ISO 8859-1") as dict_file:
words += [Word(word.strip(), vocabulary) for word in dict_file.readlines()]
return words
class OneDictExtension(Extension):
def __init__(self):
super(OneDictExtension, self).__init__()
self.subscribe(KeywordQueryEvent, KeywordQueryEventListener())
self.subscribe(ItemEnterEvent, ItemEnterEventListener())
self.word_list = []
def run(self):
self.subscribe(PreferencesEvent, PreferencesEventListener())
self.subscribe(PreferencesUpdateEvent, PreferencesUpdateEventListener())
self._client.connect()
class PreferencesEventListener(EventListener):
def on_event(self, event, extension):
extension.preferences.update(event.preferences)
vocabularies = [
voc.rstrip().lstrip().lower()
for voc in extension.preferences["vocabulary"].split(",")
]
extension.word_list = load_words(vocabularies)
class PreferencesUpdateEventListener(EventListener):
def on_event(self, event, extension):
extension.preferences[event.id] = event.new_value
vocabularies = [
voc.rstrip().lstrip()
for voc in extension.preferences["vocabulary"].split(",")
]
extension.word_list = load_words(vocabularies)
class KeywordQueryEventListener(EventListener):
def on_event(self, event, extension):
items = []
query = event.get_argument()
if query:
dictionaries = get_dictionaries(extension.preferences)
if extension.preferences["matching"] == "regex":
result_list = [
w
for w in extension.word_list
if re.search(r"^{}".format(query), w.get_search_name())
]
else:
result_list = CustomSortedList(query, min_score=65)
result_list.extend(extension.word_list)
for result in result_list[:9]:
word, language = str(result).split("/")
items.append(
ExtensionResultItem(
icon="images/icon.png",
name=word,
description="Language: {}".format(language),
on_enter=CopyToClipboardAction(word),
)
)
else:
items.append(
ExtensionResultItem(
icon="images/icon.png",
name="Type in the word...",
description="",
)
)
return RenderResultListAction(items)
class ItemEnterEventListener(EventListener):
def on_event(self, event, extension):
data = event.get_data()
return RenderResultListAction(
[
ExtensionResultItem(
icon="images/icon.png",
name=data["new_name"],
on_enter=HideWindowAction(),
)
]
)
class CustomSortedList(SortedList):
def __init__(self, query, min_score):
super(CustomSortedList, self).__init__(query, min_score, limit=9)
self._items = SortedCollection(
key=lambda i: (i.score, abs(len(self._query) - len(i.get_search_name())))
)
def get_dictionaries(preferences):
dictionaries = {}
for voc in DEFAULT_VOCABURARIES:
dictionaries[voc] = preferences.get(voc, DEFAULT_DICTIONARY)
return dictionaries
if __name__ == "__main__":
OneDictExtension().run() | main.py | import re
import os
import json
import glob
import logging
from time import sleep
from ulauncher.api.shared.action.CopyToClipboardAction import CopyToClipboardAction
from ulauncher.search.SortedList import SortedList
from ulauncher.utils.SortedCollection import SortedCollection
from ulauncher.api.client.Extension import Extension, PreferencesEventListener
from ulauncher.api.client.EventListener import EventListener
from ulauncher.api.shared.event import (
KeywordQueryEvent,
ItemEnterEvent,
PreferencesEvent,
PreferencesUpdateEvent,
)
from ulauncher.api.shared.item.ExtensionResultItem import ExtensionResultItem
from ulauncher.api.shared.action.RenderResultListAction import RenderResultListAction
from ulauncher.api.shared.action.ExtensionCustomAction import ExtensionCustomAction
from ulauncher.api.shared.action.HideWindowAction import HideWindowAction
from ulauncher.api.shared.action.OpenUrlAction import OpenUrlAction
from ulauncher.utils.fuzzy_search import get_score
logging.basicConfig()
logger = logging.getLogger(__name__)
DEFAULT_VOCABURARIES = [
"english",
"english_uk",
]
DEFAULT_DICTIONARY = (
"https://translate.google.com/#view=home&op=translate&sl=auto&tl=en&text=%s"
)
class Word:
def __init__(self, word, vocabulary):
self._word = word
self._vocabulary = vocabulary
def __repr__(self):
return "{}/{}".format(self._word, self._vocabulary)
def get_search_name(self):
return self._word
def load_words(vocabularies):
base_dir = os.path.dirname(os.path.abspath(__file__))
words = []
for vocabulary in vocabularies:
filename = os.path.join(base_dir, "vocabularies", "{}.txt".format(vocabulary))
with open(filename, "r", encoding="ISO 8859-1") as dict_file:
words += [Word(word.strip(), vocabulary) for word in dict_file.readlines()]
return words
class OneDictExtension(Extension):
def __init__(self):
super(OneDictExtension, self).__init__()
self.subscribe(KeywordQueryEvent, KeywordQueryEventListener())
self.subscribe(ItemEnterEvent, ItemEnterEventListener())
self.word_list = []
def run(self):
self.subscribe(PreferencesEvent, PreferencesEventListener())
self.subscribe(PreferencesUpdateEvent, PreferencesUpdateEventListener())
self._client.connect()
class PreferencesEventListener(EventListener):
def on_event(self, event, extension):
extension.preferences.update(event.preferences)
vocabularies = [
voc.rstrip().lstrip().lower()
for voc in extension.preferences["vocabulary"].split(",")
]
extension.word_list = load_words(vocabularies)
class PreferencesUpdateEventListener(EventListener):
def on_event(self, event, extension):
extension.preferences[event.id] = event.new_value
vocabularies = [
voc.rstrip().lstrip()
for voc in extension.preferences["vocabulary"].split(",")
]
extension.word_list = load_words(vocabularies)
class KeywordQueryEventListener(EventListener):
def on_event(self, event, extension):
items = []
query = event.get_argument()
if query:
dictionaries = get_dictionaries(extension.preferences)
if extension.preferences["matching"] == "regex":
result_list = [
w
for w in extension.word_list
if re.search(r"^{}".format(query), w.get_search_name())
]
else:
result_list = CustomSortedList(query, min_score=65)
result_list.extend(extension.word_list)
for result in result_list[:9]:
word, language = str(result).split("/")
items.append(
ExtensionResultItem(
icon="images/icon.png",
name=word,
description="Language: {}".format(language),
on_enter=CopyToClipboardAction(word),
)
)
else:
items.append(
ExtensionResultItem(
icon="images/icon.png",
name="Type in the word...",
description="",
)
)
return RenderResultListAction(items)
class ItemEnterEventListener(EventListener):
def on_event(self, event, extension):
data = event.get_data()
return RenderResultListAction(
[
ExtensionResultItem(
icon="images/icon.png",
name=data["new_name"],
on_enter=HideWindowAction(),
)
]
)
class CustomSortedList(SortedList):
def __init__(self, query, min_score):
super(CustomSortedList, self).__init__(query, min_score, limit=9)
self._items = SortedCollection(
key=lambda i: (i.score, abs(len(self._query) - len(i.get_search_name())))
)
def get_dictionaries(preferences):
dictionaries = {}
for voc in DEFAULT_VOCABURARIES:
dictionaries[voc] = preferences.get(voc, DEFAULT_DICTIONARY)
return dictionaries
if __name__ == "__main__":
OneDictExtension().run() | 0.399109 | 0.069226 |
import io
from datetime import datetime
from enum import IntEnum, auto
from pathlib import Path
from typing import Union
import pptx
from PIL import Image
from pptx.presentation import Presentation
TEMPLATE = Path(__file__).parents[2] / "resources" / "./template.pptx"
class SlideMaster(IntEnum):
Title = 0
Image = auto()
Contents = auto()
ThreeComparisons = auto()
TwoContents = auto()
TwoComparisons = auto()
TitleOnly = auto()
Blank = auto()
ImageFileType = Union[str, io.BytesIO, Path]
class PowerPoint(Presentation):
def __init__(self):
prs = pptx.Presentation(TEMPLATE)
super().__init__(prs.element, prs.part)
self.last_slide = SlideMaster.Title
def save(self, file: Union[str, Path]) -> None:
if Path(file).suffix != ".pptx":
file = str(file) + ".pptx"
super().save(str(file))
def add_slide(self, slide: SlideMaster):
slide_layout: pptx.slide.SlideLayout = self.slide_layouts[slide]
self.slides.add_slide(slide_layout)
self.last_slide = slide
def add_title(self, title: str = "title", name: str = "") -> None:
self.add_slide(SlideMaster.Title)
self.update_title(title, name)
def update_title(self, title: str = "title", name: str = "") -> None:
assert self.last_slide == SlideMaster.Title
self.slides[-1].placeholders[0].text = title
self.slides[-1].placeholders[1].text = (
name if name else str(datetime.now().date())
)
def add_contents(self, title: str = "title", contents: str = "") -> None:
self.add_slide(SlideMaster.Contents)
self.update_contents(title, contents)
def update_contents(self, title: str = "title", contents: str = "") -> None:
assert self.last_slide == SlideMaster.Contents
self.slides[-1].placeholders[0].text = title
self.slides[-1].placeholders[1].text = contents
def add_image(self, image: Union[Image.Image, ImageFileType]) -> None:
self.add_slide(SlideMaster.Image)
self.update_image(image)
def update_image(self, image: Union[Image.Image, ImageFileType]) -> None:
assert self.last_slide == SlideMaster.Image
self._plot_image(image)
def _save_inmemory_PIL(self, image: Image.Image) -> io.BytesIO:
item = io.BytesIO()
image.save(item, "png")
item.seek(0)
return item
def _plot_image(self, image: Union[Image.Image, Path]) -> None:
if isinstance(image, Image.Image):
item: Union[str, io.BytesIO] = self._save_inmemory_PIL(image)
else:
item = str(image)
pixture_placeholder: pptx.shapes.placeholder.PicturePlaceholder = self.slides[
-1
].placeholders[13]
assert isinstance(
pixture_placeholder, pptx.shapes.placeholder.PicturePlaceholder
)
# Insert the picture
placeholder: pptx.shapes.placeholder.PlaceholderPicture = (
pixture_placeholder.insert_picture(item)
)
# Calculate the image size of the image
width, height = placeholder.image.size
# Calculate ratios and compare
image_ratio = width / height
placeholder_ratio = placeholder.width / placeholder.height
ratio_difference = placeholder_ratio - image_ratio
placeholder.crop_top = 0
placeholder.crop_left = 0
placeholder.crop_bottom = 0
placeholder.crop_right = 0
if ratio_difference > 0:
width_slide = placeholder.width
placeholder.height = placeholder.height
placeholder.width = int((placeholder.height) * image_ratio)
placeholder.left = (width_slide - placeholder.width) // 2
else:
height_slide = placeholder.height
placeholder.width = placeholder.width
placeholder.height = int(placeholder.width / image_ratio)
placeholder.top = (height_slide - placeholder.height) // 2
assert placeholder.width > 0
assert placeholder.height > 0
assert placeholder.top > 0 or placeholder.left > 0
def _current_placeholder_id(self):
for shape in self.slides[-1].shapes:
if shape.is_placeholder:
phf = shape.placeholder_format
print(phf.idx, phf.type)
def paste_image(
input_dir: Union[str, Path],
output_file: Union[str, Path],
title="",
glob_pattern: str = "*.png",
) -> None:
ppt = PowerPoint()
ppt.update_title(title)
for file in Path(input_dir).glob(glob_pattern):
ppt.add_image(file)
ppt.save(output_file) | src/sbs/powerpoint.py | import io
from datetime import datetime
from enum import IntEnum, auto
from pathlib import Path
from typing import Union
import pptx
from PIL import Image
from pptx.presentation import Presentation
TEMPLATE = Path(__file__).parents[2] / "resources" / "./template.pptx"
class SlideMaster(IntEnum):
Title = 0
Image = auto()
Contents = auto()
ThreeComparisons = auto()
TwoContents = auto()
TwoComparisons = auto()
TitleOnly = auto()
Blank = auto()
ImageFileType = Union[str, io.BytesIO, Path]
class PowerPoint(Presentation):
def __init__(self):
prs = pptx.Presentation(TEMPLATE)
super().__init__(prs.element, prs.part)
self.last_slide = SlideMaster.Title
def save(self, file: Union[str, Path]) -> None:
if Path(file).suffix != ".pptx":
file = str(file) + ".pptx"
super().save(str(file))
def add_slide(self, slide: SlideMaster):
slide_layout: pptx.slide.SlideLayout = self.slide_layouts[slide]
self.slides.add_slide(slide_layout)
self.last_slide = slide
def add_title(self, title: str = "title", name: str = "") -> None:
self.add_slide(SlideMaster.Title)
self.update_title(title, name)
def update_title(self, title: str = "title", name: str = "") -> None:
assert self.last_slide == SlideMaster.Title
self.slides[-1].placeholders[0].text = title
self.slides[-1].placeholders[1].text = (
name if name else str(datetime.now().date())
)
def add_contents(self, title: str = "title", contents: str = "") -> None:
self.add_slide(SlideMaster.Contents)
self.update_contents(title, contents)
def update_contents(self, title: str = "title", contents: str = "") -> None:
assert self.last_slide == SlideMaster.Contents
self.slides[-1].placeholders[0].text = title
self.slides[-1].placeholders[1].text = contents
def add_image(self, image: Union[Image.Image, ImageFileType]) -> None:
self.add_slide(SlideMaster.Image)
self.update_image(image)
def update_image(self, image: Union[Image.Image, ImageFileType]) -> None:
assert self.last_slide == SlideMaster.Image
self._plot_image(image)
def _save_inmemory_PIL(self, image: Image.Image) -> io.BytesIO:
item = io.BytesIO()
image.save(item, "png")
item.seek(0)
return item
def _plot_image(self, image: Union[Image.Image, Path]) -> None:
if isinstance(image, Image.Image):
item: Union[str, io.BytesIO] = self._save_inmemory_PIL(image)
else:
item = str(image)
pixture_placeholder: pptx.shapes.placeholder.PicturePlaceholder = self.slides[
-1
].placeholders[13]
assert isinstance(
pixture_placeholder, pptx.shapes.placeholder.PicturePlaceholder
)
# Insert the picture
placeholder: pptx.shapes.placeholder.PlaceholderPicture = (
pixture_placeholder.insert_picture(item)
)
# Calculate the image size of the image
width, height = placeholder.image.size
# Calculate ratios and compare
image_ratio = width / height
placeholder_ratio = placeholder.width / placeholder.height
ratio_difference = placeholder_ratio - image_ratio
placeholder.crop_top = 0
placeholder.crop_left = 0
placeholder.crop_bottom = 0
placeholder.crop_right = 0
if ratio_difference > 0:
width_slide = placeholder.width
placeholder.height = placeholder.height
placeholder.width = int((placeholder.height) * image_ratio)
placeholder.left = (width_slide - placeholder.width) // 2
else:
height_slide = placeholder.height
placeholder.width = placeholder.width
placeholder.height = int(placeholder.width / image_ratio)
placeholder.top = (height_slide - placeholder.height) // 2
assert placeholder.width > 0
assert placeholder.height > 0
assert placeholder.top > 0 or placeholder.left > 0
def _current_placeholder_id(self):
for shape in self.slides[-1].shapes:
if shape.is_placeholder:
phf = shape.placeholder_format
print(phf.idx, phf.type)
def paste_image(
input_dir: Union[str, Path],
output_file: Union[str, Path],
title="",
glob_pattern: str = "*.png",
) -> None:
ppt = PowerPoint()
ppt.update_title(title)
for file in Path(input_dir).glob(glob_pattern):
ppt.add_image(file)
ppt.save(output_file) | 0.817647 | 0.287068 |
import socket
import logging
import click
import kazoo
import websocket as ws_client
from treadmill import cli
from treadmill import context
from treadmill import admin
from treadmill import restclient
from treadmill import zknamespace as z
_LOGGER = logging.getLogger(__name__)
def check(func, message):
"""Check function, output status message."""
if func():
_LOGGER.info('%s: ok', message)
else:
_LOGGER.error('%s: failed', message)
def _zkadmin(hostname, port, command):
"""Netcat."""
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect((hostname, port))
sock.sendall(command)
sock.shutdown(socket.SHUT_WR)
data = []
while True:
chunk = sock.recv(1024)
if not chunk:
break
data.append(chunk)
sock.close()
return ''.join(data)
def check_zk():
"""Check Zookeeper ensemble health."""
admin_cell = admin.Cell(context.GLOBAL.ldap.conn)
cell = admin_cell.get(context.GLOBAL.cell)
success = True
for master in cell['masters']:
hostname = master['hostname']
port = master['zk-client-port']
try:
zk_status = _zkadmin(hostname, port, 'ruok\n')
_LOGGER.debug('%s:%s - %s', hostname, port, zk_status)
except Exception as err: # pylint: disable=W0703
_LOGGER.error('%s:%s - %s', hostname, port, str(err))
success = False
return success
def _check_api(apis):
"""Check API status."""
success = True
if len(apis) < 2:
_LOGGER.error('API is under capacity: expected 2, running: %s',
len(apis))
success = False
for api in apis:
try:
resp = restclient.get(api, '/', retries=0)
_LOGGER.debug('%s - %r', api, resp.status_code)
except restclient.MaxRequestRetriesError as err:
_LOGGER.error('%s - %s', api, str(err))
success = False
return success
def check_cell_api():
"""Check API status."""
try:
return _check_api(context.GLOBAL.cell_api(None))
except context.ContextError as err:
_LOGGER.error('Unable to resolve cell api: %r', str(err))
return False
def check_state_api():
"""Check API status."""
try:
return _check_api(context.GLOBAL.state_api(None))
except context.ContextError as err:
_LOGGER.error('Unable to resolve state api: %r', str(err))
return False
def check_admin_api():
"""Check admin API."""
try:
return _check_api(context.GLOBAL.admin_api(None))
except context.ContextError as err:
_LOGGER.error('Unable to resolve admin api: %r', str(err))
return False
def check_ws_api():
"""Check websocket API."""
success = True
try:
for api in context.GLOBAL.ws_api(None):
try:
ws_client.create_connection(api)
_LOGGER.debug('%s - ok.', api)
except socket.error:
_LOGGER.error('%s - failed.', api)
success = False
except context.ContextError as err:
_LOGGER.error('Unable to resolve websocket api: %r', str(err))
success = False
return success
def check_blackouts():
"""Check blacked-out servers."""
zkclient = context.GLOBAL.zk.conn
try:
blacked_out_nodes = zkclient.get_children(z.BLACKEDOUT_SERVERS)
for server in blacked_out_nodes:
_LOGGER.warn('Server blackedout: %s', server)
except kazoo.client.NoNodeError:
pass
def check_capacity():
"""Check cell capacity."""
zkclient = context.GLOBAL.zk.conn
configured = len(zkclient.get_children(z.SERVERS))
blacked_out = len(zkclient.get_children(z.BLACKEDOUT_SERVERS))
present = len(zkclient.get_children(z.SERVER_PRESENCE))
_LOGGER.info('Server capacity - total: %s, blacked-out: %s, up: %s',
configured, blacked_out, present)
check_blackouts()
def init():
"""Return top level command handler."""
@click.command(name='ok')
@click.option('--cell', required=True,
envvar='TREADMILL_CELL',
callback=cli.handle_context_opt,
expose_value=False)
def _ok():
"""Check status of Zookeeper ensemble."""
log_level = logging.INFO
if not logging.getLogger().isEnabledFor(log_level):
logging.getLogger('treadmill').setLevel(log_level)
logging.getLogger().setLevel(log_level)
check(check_zk, 'Zookeeper ensemble')
check_capacity()
check(check_state_api, 'State api')
check(check_cell_api, 'Cell api')
check(check_admin_api, 'Admin api')
check(check_ws_api, 'Websocket api')
return _ok | treadmill/cli/admin/ok.py |
import socket
import logging
import click
import kazoo
import websocket as ws_client
from treadmill import cli
from treadmill import context
from treadmill import admin
from treadmill import restclient
from treadmill import zknamespace as z
_LOGGER = logging.getLogger(__name__)
def check(func, message):
"""Check function, output status message."""
if func():
_LOGGER.info('%s: ok', message)
else:
_LOGGER.error('%s: failed', message)
def _zkadmin(hostname, port, command):
"""Netcat."""
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect((hostname, port))
sock.sendall(command)
sock.shutdown(socket.SHUT_WR)
data = []
while True:
chunk = sock.recv(1024)
if not chunk:
break
data.append(chunk)
sock.close()
return ''.join(data)
def check_zk():
"""Check Zookeeper ensemble health."""
admin_cell = admin.Cell(context.GLOBAL.ldap.conn)
cell = admin_cell.get(context.GLOBAL.cell)
success = True
for master in cell['masters']:
hostname = master['hostname']
port = master['zk-client-port']
try:
zk_status = _zkadmin(hostname, port, 'ruok\n')
_LOGGER.debug('%s:%s - %s', hostname, port, zk_status)
except Exception as err: # pylint: disable=W0703
_LOGGER.error('%s:%s - %s', hostname, port, str(err))
success = False
return success
def _check_api(apis):
"""Check API status."""
success = True
if len(apis) < 2:
_LOGGER.error('API is under capacity: expected 2, running: %s',
len(apis))
success = False
for api in apis:
try:
resp = restclient.get(api, '/', retries=0)
_LOGGER.debug('%s - %r', api, resp.status_code)
except restclient.MaxRequestRetriesError as err:
_LOGGER.error('%s - %s', api, str(err))
success = False
return success
def check_cell_api():
"""Check API status."""
try:
return _check_api(context.GLOBAL.cell_api(None))
except context.ContextError as err:
_LOGGER.error('Unable to resolve cell api: %r', str(err))
return False
def check_state_api():
"""Check API status."""
try:
return _check_api(context.GLOBAL.state_api(None))
except context.ContextError as err:
_LOGGER.error('Unable to resolve state api: %r', str(err))
return False
def check_admin_api():
"""Check admin API."""
try:
return _check_api(context.GLOBAL.admin_api(None))
except context.ContextError as err:
_LOGGER.error('Unable to resolve admin api: %r', str(err))
return False
def check_ws_api():
"""Check websocket API."""
success = True
try:
for api in context.GLOBAL.ws_api(None):
try:
ws_client.create_connection(api)
_LOGGER.debug('%s - ok.', api)
except socket.error:
_LOGGER.error('%s - failed.', api)
success = False
except context.ContextError as err:
_LOGGER.error('Unable to resolve websocket api: %r', str(err))
success = False
return success
def check_blackouts():
"""Check blacked-out servers."""
zkclient = context.GLOBAL.zk.conn
try:
blacked_out_nodes = zkclient.get_children(z.BLACKEDOUT_SERVERS)
for server in blacked_out_nodes:
_LOGGER.warn('Server blackedout: %s', server)
except kazoo.client.NoNodeError:
pass
def check_capacity():
"""Check cell capacity."""
zkclient = context.GLOBAL.zk.conn
configured = len(zkclient.get_children(z.SERVERS))
blacked_out = len(zkclient.get_children(z.BLACKEDOUT_SERVERS))
present = len(zkclient.get_children(z.SERVER_PRESENCE))
_LOGGER.info('Server capacity - total: %s, blacked-out: %s, up: %s',
configured, blacked_out, present)
check_blackouts()
def init():
"""Return top level command handler."""
@click.command(name='ok')
@click.option('--cell', required=True,
envvar='TREADMILL_CELL',
callback=cli.handle_context_opt,
expose_value=False)
def _ok():
"""Check status of Zookeeper ensemble."""
log_level = logging.INFO
if not logging.getLogger().isEnabledFor(log_level):
logging.getLogger('treadmill').setLevel(log_level)
logging.getLogger().setLevel(log_level)
check(check_zk, 'Zookeeper ensemble')
check_capacity()
check(check_state_api, 'State api')
check(check_cell_api, 'Cell api')
check(check_admin_api, 'Admin api')
check(check_ws_api, 'Websocket api')
return _ok | 0.475605 | 0.070144 |
from mmcv import Config
from .base import BaseArgConverter
from ..registry import ARG_CONVERTERS
@ARG_CONVERTERS.register_module()
class MMDetectionArgsConverter(BaseArgConverter):
# NB: compress_update_args_map is the same as train_update_args_map,
# but without base_learning_rate and epochs
# TODO(LeonidBeynenson): replace the dicts by a function that returns dicts to avoid copying of code
compress_update_args_map = {
'train_ann_files': 'data.train.dataset.ann_file',
'train_data_roots': 'data.train.dataset.img_prefix',
'val_ann_files': 'data.val.ann_file',
'val_data_roots': 'data.val.img_prefix',
'resume_from': 'resume_from',
'load_weights': 'load_from',
'save_checkpoints_to': 'work_dir',
'batch_size': 'data.samples_per_gpu',
}
train_update_args_map = {
'train_ann_files': 'data.train.dataset.ann_file',
'train_data_roots': 'data.train.dataset.img_prefix',
'val_ann_files': 'data.val.ann_file',
'val_data_roots': 'data.val.img_prefix',
'resume_from': 'resume_from',
'load_weights': 'load_from',
'save_checkpoints_to': 'work_dir',
'batch_size': 'data.samples_per_gpu',
'base_learning_rate': 'optimizer.lr',
'epochs': 'total_epochs',
}
train_to_compress_update_args_map = {
'train_ann_files': 'data.train.dataset.ann_file',
'train_data_roots': 'data.train.dataset.img_prefix',
'val_ann_files': 'data.val.ann_file',
'val_data_roots': 'data.val.img_prefix',
# the only difference w.r.t compress_update_args_map
# 'resume_from': 'resume_from',
# 'load_weights': 'load_from',
'save_checkpoints_to': 'work_dir',
'batch_size': 'data.samples_per_gpu',
}
test_update_args_map = {
'test_ann_files': 'data.test.ann_file',
'test_data_roots': 'data.test.img_prefix',
}
def __init__(self):
super(MMDetectionArgsConverter, self).__init__()
def _get_extra_train_args(self, args):
out_args = {}
if 'classes' in args and args['classes']:
classes = '[' + ','.join(f'"{x}"' for x in args['classes'].split(',')) + ']'
num_classes = len(args['classes'].split(','))
out_args['data.train.dataset.classes'] = classes
out_args['data.val.classes'] = classes
out_args['model.bbox_head.num_classes'] = num_classes
if 'mask_head' in Config.fromfile(args['config']).model.roi_head.keys():
update_config['model.roi_head.mask_head.num_classes'] = num_classes
return out_args
def _get_extra_test_args(self, args):
out_args = {}
if 'classes' in args and args['classes']:
classes = '[' + ','.join(f'"{x}"' for x in args['classes'].split(',')) + ']'
num_classes = len(args['classes'].split(','))
out_args['data.test.classes'] = classes
out_args['model.bbox_head.num_classes'] = num_classes
if 'mask_head' in Config.fromfile(args['config']).model.roi_head.keys():
update_config['model.roi_head.mask_head.num_classes'] = num_classes
return out_args | pytorch_toolkit/ote/ote/modules/arg_converters/mmdetection.py | from mmcv import Config
from .base import BaseArgConverter
from ..registry import ARG_CONVERTERS
@ARG_CONVERTERS.register_module()
class MMDetectionArgsConverter(BaseArgConverter):
# NB: compress_update_args_map is the same as train_update_args_map,
# but without base_learning_rate and epochs
# TODO(LeonidBeynenson): replace the dicts by a function that returns dicts to avoid copying of code
compress_update_args_map = {
'train_ann_files': 'data.train.dataset.ann_file',
'train_data_roots': 'data.train.dataset.img_prefix',
'val_ann_files': 'data.val.ann_file',
'val_data_roots': 'data.val.img_prefix',
'resume_from': 'resume_from',
'load_weights': 'load_from',
'save_checkpoints_to': 'work_dir',
'batch_size': 'data.samples_per_gpu',
}
train_update_args_map = {
'train_ann_files': 'data.train.dataset.ann_file',
'train_data_roots': 'data.train.dataset.img_prefix',
'val_ann_files': 'data.val.ann_file',
'val_data_roots': 'data.val.img_prefix',
'resume_from': 'resume_from',
'load_weights': 'load_from',
'save_checkpoints_to': 'work_dir',
'batch_size': 'data.samples_per_gpu',
'base_learning_rate': 'optimizer.lr',
'epochs': 'total_epochs',
}
train_to_compress_update_args_map = {
'train_ann_files': 'data.train.dataset.ann_file',
'train_data_roots': 'data.train.dataset.img_prefix',
'val_ann_files': 'data.val.ann_file',
'val_data_roots': 'data.val.img_prefix',
# the only difference w.r.t compress_update_args_map
# 'resume_from': 'resume_from',
# 'load_weights': 'load_from',
'save_checkpoints_to': 'work_dir',
'batch_size': 'data.samples_per_gpu',
}
test_update_args_map = {
'test_ann_files': 'data.test.ann_file',
'test_data_roots': 'data.test.img_prefix',
}
def __init__(self):
super(MMDetectionArgsConverter, self).__init__()
def _get_extra_train_args(self, args):
out_args = {}
if 'classes' in args and args['classes']:
classes = '[' + ','.join(f'"{x}"' for x in args['classes'].split(',')) + ']'
num_classes = len(args['classes'].split(','))
out_args['data.train.dataset.classes'] = classes
out_args['data.val.classes'] = classes
out_args['model.bbox_head.num_classes'] = num_classes
if 'mask_head' in Config.fromfile(args['config']).model.roi_head.keys():
update_config['model.roi_head.mask_head.num_classes'] = num_classes
return out_args
def _get_extra_test_args(self, args):
out_args = {}
if 'classes' in args and args['classes']:
classes = '[' + ','.join(f'"{x}"' for x in args['classes'].split(',')) + ']'
num_classes = len(args['classes'].split(','))
out_args['data.test.classes'] = classes
out_args['model.bbox_head.num_classes'] = num_classes
if 'mask_head' in Config.fromfile(args['config']).model.roi_head.keys():
update_config['model.roi_head.mask_head.num_classes'] = num_classes
return out_args | 0.272411 | 0.214331 |
from __future__ import print_function
import numpy as np
import six
import os
import inspect
from paddle.fluid.layer_helper import LayerHelper
__all__ = ['fused_elemwise_activation', ]
def fused_elemwise_activation(x,
y,
functor_list,
axis=-1,
scale=0.0,
save_intermediate_out=True):
"""
**Fused elementwise_add/mul and activation layers**
This function computes an elementwise_add/mul cooperated with an activation.
.. math::
out = Unary(Binary(x, y))
or
.. math::
out = Binary(x, Unary(y))
Unary operators can be: `scale`, `relu`, `tanh`. Binary operators can be:
`elementwise_add`, `elementwise_mul`.
Args:
x (Variable): left operation of the binary operator.
y (Variable): right operator of the binary operator.
functor_list (list of str): types of operator which will be executed
by this layer. For example, ['elementwise_add', 'relu']
(out = elementwise_add(x, relu(y))),
or ['relu', 'elemmentwise_add'] (out = relu(elementwise_add(x, y))).
axis (int32, default -1): axis of elementwise op.
scale (float32, default 0): parameter of scale op.
save_intermediate_out (bool, default True): whether to save the
intermediate result, Unary(y) or Binary(x, y).
Returns:
Variable: The computation result.
"""
if isinstance(functor_list, str):
functor_list = functor_list.split(',')
if not isinstance(functor_list, list) or len(functor_list) != 2:
raise ValueError(
'functor_list should be a list of str, and the length should be 2.')
helper = LayerHelper('fused_elemwise_activation', **locals())
out = helper.create_variable_for_type_inference(dtype=x.dtype)
intermediate_out = helper.create_variable_for_type_inference(dtype=x.dtype)
helper.append_op(
type='fused_elemwise_activation',
inputs={'X': x,
'Y': y},
outputs={'Out': out,
'IntermediateOut': intermediate_out},
attrs={
'axis': axis,
'scale': scale,
'save_intermediate_out': save_intermediate_out,
'functor_list': functor_list
})
return out | python/paddle/fluid/contrib/layers/nn.py | from __future__ import print_function
import numpy as np
import six
import os
import inspect
from paddle.fluid.layer_helper import LayerHelper
__all__ = ['fused_elemwise_activation', ]
def fused_elemwise_activation(x,
y,
functor_list,
axis=-1,
scale=0.0,
save_intermediate_out=True):
"""
**Fused elementwise_add/mul and activation layers**
This function computes an elementwise_add/mul cooperated with an activation.
.. math::
out = Unary(Binary(x, y))
or
.. math::
out = Binary(x, Unary(y))
Unary operators can be: `scale`, `relu`, `tanh`. Binary operators can be:
`elementwise_add`, `elementwise_mul`.
Args:
x (Variable): left operation of the binary operator.
y (Variable): right operator of the binary operator.
functor_list (list of str): types of operator which will be executed
by this layer. For example, ['elementwise_add', 'relu']
(out = elementwise_add(x, relu(y))),
or ['relu', 'elemmentwise_add'] (out = relu(elementwise_add(x, y))).
axis (int32, default -1): axis of elementwise op.
scale (float32, default 0): parameter of scale op.
save_intermediate_out (bool, default True): whether to save the
intermediate result, Unary(y) or Binary(x, y).
Returns:
Variable: The computation result.
"""
if isinstance(functor_list, str):
functor_list = functor_list.split(',')
if not isinstance(functor_list, list) or len(functor_list) != 2:
raise ValueError(
'functor_list should be a list of str, and the length should be 2.')
helper = LayerHelper('fused_elemwise_activation', **locals())
out = helper.create_variable_for_type_inference(dtype=x.dtype)
intermediate_out = helper.create_variable_for_type_inference(dtype=x.dtype)
helper.append_op(
type='fused_elemwise_activation',
inputs={'X': x,
'Y': y},
outputs={'Out': out,
'IntermediateOut': intermediate_out},
attrs={
'axis': axis,
'scale': scale,
'save_intermediate_out': save_intermediate_out,
'functor_list': functor_list
})
return out | 0.877214 | 0.248888 |
import os
from .base_video_dataset import BaseVideoDataset
from lib.train.data import jpeg4py_loader
import xml.etree.ElementTree as ET
import json
import torch
from collections import OrderedDict
from lib.train.admin import env_settings
def get_target_to_image_ratio(seq):
anno = torch.Tensor(seq['anno'])
img_sz = torch.Tensor(seq['image_size'])
return (anno[0, 2:4].prod() / (img_sz.prod())).sqrt()
class ImagenetVID(BaseVideoDataset):
""" Imagenet VID dataset.
Publication:
ImageNet Large Scale Visual Recognition Challenge
<NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>,
<NAME>, <NAME>, <NAME> and <NAME>
IJCV, 2015
https://arxiv.org/pdf/1409.0575.pdf
Download the dataset from http://image-net.org/
"""
def __init__(self, root=None, image_loader=jpeg4py_loader, min_length=0, max_target_area=1):
"""
args:
root - path to the imagenet vid dataset.
image_loader (default_image_loader) - The function to read the images. If installed,
jpeg4py (https://github.com/ajkxyz/jpeg4py) is used by default. Else,
opencv's imread is used.
min_length - Minimum allowed sequence length.
max_target_area - max allowed ratio between target area and image area. Can be used to filter out targets
which cover complete image.
"""
root = env_settings().imagenet_dir if root is None else root
super().__init__("imagenetvid", root, image_loader)
cache_file = os.path.join(root, 'cache.json')
if os.path.isfile(cache_file):
# If available, load the pre-processed cache file containing meta-info for each sequence
with open(cache_file, 'r') as f:
sequence_list_dict = json.load(f)
self.sequence_list = sequence_list_dict
else:
# Else process the imagenet annotations and generate the cache file
self.sequence_list = self._process_anno(root)
with open(cache_file, 'w') as f:
json.dump(self.sequence_list, f)
# Filter the sequences based on min_length and max_target_area in the first frame
self.sequence_list = [x for x in self.sequence_list if len(x['anno']) >= min_length and
get_target_to_image_ratio(x) < max_target_area]
def get_name(self):
return 'imagenetvid'
def get_num_sequences(self):
return len(self.sequence_list)
def get_sequence_info(self, seq_id):
bb_anno = torch.Tensor(self.sequence_list[seq_id]['anno'])
valid = (bb_anno[:, 2] > 0) & (bb_anno[:, 3] > 0)
visible = torch.ByteTensor(self.sequence_list[seq_id]['target_visible']) & valid.byte()
return {'bbox': bb_anno, 'valid': valid, 'visible': visible}
def _get_frame(self, sequence, frame_id):
set_name = 'ILSVRC2015_VID_train_{:04d}'.format(sequence['set_id'])
vid_name = 'ILSVRC2015_train_{:08d}'.format(sequence['vid_id'])
frame_number = frame_id + sequence['start_frame']
frame_path = os.path.join(self.root, 'Data', 'VID', 'train', set_name, vid_name,
'{:06d}.JPEG'.format(frame_number))
return self.image_loader(frame_path)
def get_frames(self, seq_id, frame_ids, anno=None):
sequence = self.sequence_list[seq_id]
frame_list = [self._get_frame(sequence, f) for f in frame_ids]
if anno is None:
anno = self.get_sequence_info(seq_id)
# Create anno dict
anno_frames = {}
for key, value in anno.items():
anno_frames[key] = [value[f_id, ...].clone() for f_id in frame_ids]
# added the class info to the meta info
object_meta = OrderedDict({'object_class': sequence['class_name'],
'motion_class': None,
'major_class': None,
'root_class': None,
'motion_adverb': None})
return frame_list, anno_frames, object_meta
def _process_anno(self, root):
# Builds individual tracklets
base_vid_anno_path = os.path.join(root, 'Annotations', 'VID', 'train')
all_sequences = []
for set in sorted(os.listdir(base_vid_anno_path)):
set_id = int(set.split('_')[-1])
for vid in sorted(os.listdir(os.path.join(base_vid_anno_path, set))):
vid_id = int(vid.split('_')[-1])
anno_files = sorted(os.listdir(os.path.join(base_vid_anno_path, set, vid)))
frame1_anno = ET.parse(os.path.join(base_vid_anno_path, set, vid, anno_files[0]))
image_size = [int(frame1_anno.find('size/width').text), int(frame1_anno.find('size/height').text)]
objects = [ET.ElementTree(file=os.path.join(base_vid_anno_path, set, vid, f)).findall('object')
for f in anno_files]
tracklets = {}
# Find all tracklets along with start frame
for f_id, all_targets in enumerate(objects):
for target in all_targets:
tracklet_id = target.find('trackid').text
if tracklet_id not in tracklets:
tracklets[tracklet_id] = f_id
for tracklet_id, tracklet_start in tracklets.items():
tracklet_anno = []
target_visible = []
class_name_id = None
for f_id in range(tracklet_start, len(objects)):
found = False
for target in objects[f_id]:
if target.find('trackid').text == tracklet_id:
if not class_name_id:
class_name_id = target.find('name').text
x1 = int(target.find('bndbox/xmin').text)
y1 = int(target.find('bndbox/ymin').text)
x2 = int(target.find('bndbox/xmax').text)
y2 = int(target.find('bndbox/ymax').text)
tracklet_anno.append([x1, y1, x2 - x1, y2 - y1])
target_visible.append(target.find('occluded').text == '0')
found = True
break
if not found:
break
new_sequence = {'set_id': set_id, 'vid_id': vid_id, 'class_name': class_name_id,
'start_frame': tracklet_start, 'anno': tracklet_anno,
'target_visible': target_visible, 'image_size': image_size}
all_sequences.append(new_sequence)
return all_sequences | lib/train/dataset/imagenetvid.py | import os
from .base_video_dataset import BaseVideoDataset
from lib.train.data import jpeg4py_loader
import xml.etree.ElementTree as ET
import json
import torch
from collections import OrderedDict
from lib.train.admin import env_settings
def get_target_to_image_ratio(seq):
anno = torch.Tensor(seq['anno'])
img_sz = torch.Tensor(seq['image_size'])
return (anno[0, 2:4].prod() / (img_sz.prod())).sqrt()
class ImagenetVID(BaseVideoDataset):
""" Imagenet VID dataset.
Publication:
ImageNet Large Scale Visual Recognition Challenge
<NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>,
<NAME>, <NAME>, <NAME> and <NAME>
IJCV, 2015
https://arxiv.org/pdf/1409.0575.pdf
Download the dataset from http://image-net.org/
"""
def __init__(self, root=None, image_loader=jpeg4py_loader, min_length=0, max_target_area=1):
"""
args:
root - path to the imagenet vid dataset.
image_loader (default_image_loader) - The function to read the images. If installed,
jpeg4py (https://github.com/ajkxyz/jpeg4py) is used by default. Else,
opencv's imread is used.
min_length - Minimum allowed sequence length.
max_target_area - max allowed ratio between target area and image area. Can be used to filter out targets
which cover complete image.
"""
root = env_settings().imagenet_dir if root is None else root
super().__init__("imagenetvid", root, image_loader)
cache_file = os.path.join(root, 'cache.json')
if os.path.isfile(cache_file):
# If available, load the pre-processed cache file containing meta-info for each sequence
with open(cache_file, 'r') as f:
sequence_list_dict = json.load(f)
self.sequence_list = sequence_list_dict
else:
# Else process the imagenet annotations and generate the cache file
self.sequence_list = self._process_anno(root)
with open(cache_file, 'w') as f:
json.dump(self.sequence_list, f)
# Filter the sequences based on min_length and max_target_area in the first frame
self.sequence_list = [x for x in self.sequence_list if len(x['anno']) >= min_length and
get_target_to_image_ratio(x) < max_target_area]
def get_name(self):
return 'imagenetvid'
def get_num_sequences(self):
return len(self.sequence_list)
def get_sequence_info(self, seq_id):
bb_anno = torch.Tensor(self.sequence_list[seq_id]['anno'])
valid = (bb_anno[:, 2] > 0) & (bb_anno[:, 3] > 0)
visible = torch.ByteTensor(self.sequence_list[seq_id]['target_visible']) & valid.byte()
return {'bbox': bb_anno, 'valid': valid, 'visible': visible}
def _get_frame(self, sequence, frame_id):
set_name = 'ILSVRC2015_VID_train_{:04d}'.format(sequence['set_id'])
vid_name = 'ILSVRC2015_train_{:08d}'.format(sequence['vid_id'])
frame_number = frame_id + sequence['start_frame']
frame_path = os.path.join(self.root, 'Data', 'VID', 'train', set_name, vid_name,
'{:06d}.JPEG'.format(frame_number))
return self.image_loader(frame_path)
def get_frames(self, seq_id, frame_ids, anno=None):
sequence = self.sequence_list[seq_id]
frame_list = [self._get_frame(sequence, f) for f in frame_ids]
if anno is None:
anno = self.get_sequence_info(seq_id)
# Create anno dict
anno_frames = {}
for key, value in anno.items():
anno_frames[key] = [value[f_id, ...].clone() for f_id in frame_ids]
# added the class info to the meta info
object_meta = OrderedDict({'object_class': sequence['class_name'],
'motion_class': None,
'major_class': None,
'root_class': None,
'motion_adverb': None})
return frame_list, anno_frames, object_meta
def _process_anno(self, root):
# Builds individual tracklets
base_vid_anno_path = os.path.join(root, 'Annotations', 'VID', 'train')
all_sequences = []
for set in sorted(os.listdir(base_vid_anno_path)):
set_id = int(set.split('_')[-1])
for vid in sorted(os.listdir(os.path.join(base_vid_anno_path, set))):
vid_id = int(vid.split('_')[-1])
anno_files = sorted(os.listdir(os.path.join(base_vid_anno_path, set, vid)))
frame1_anno = ET.parse(os.path.join(base_vid_anno_path, set, vid, anno_files[0]))
image_size = [int(frame1_anno.find('size/width').text), int(frame1_anno.find('size/height').text)]
objects = [ET.ElementTree(file=os.path.join(base_vid_anno_path, set, vid, f)).findall('object')
for f in anno_files]
tracklets = {}
# Find all tracklets along with start frame
for f_id, all_targets in enumerate(objects):
for target in all_targets:
tracklet_id = target.find('trackid').text
if tracklet_id not in tracklets:
tracklets[tracklet_id] = f_id
for tracklet_id, tracklet_start in tracklets.items():
tracklet_anno = []
target_visible = []
class_name_id = None
for f_id in range(tracklet_start, len(objects)):
found = False
for target in objects[f_id]:
if target.find('trackid').text == tracklet_id:
if not class_name_id:
class_name_id = target.find('name').text
x1 = int(target.find('bndbox/xmin').text)
y1 = int(target.find('bndbox/ymin').text)
x2 = int(target.find('bndbox/xmax').text)
y2 = int(target.find('bndbox/ymax').text)
tracklet_anno.append([x1, y1, x2 - x1, y2 - y1])
target_visible.append(target.find('occluded').text == '0')
found = True
break
if not found:
break
new_sequence = {'set_id': set_id, 'vid_id': vid_id, 'class_name': class_name_id,
'start_frame': tracklet_start, 'anno': tracklet_anno,
'target_visible': target_visible, 'image_size': image_size}
all_sequences.append(new_sequence)
return all_sequences | 0.622 | 0.302926 |
import random
import uuid
import unicodedata
import ctypes
import math
import fdb
import fdb.tuple
from bindingtester import util
from bindingtester import FDB_API_VERSION
from bindingtester.known_testers import COMMON_TYPES
class RandomGenerator(object):
def __init__(self, max_int_bits=64, api_version=FDB_API_VERSION, types=COMMON_TYPES):
self.max_int_bits = max_int_bits
self.api_version = api_version
self.types = list(types)
def random_unicode_str(self, length):
return ''.join(self.random_unicode_char() for i in range(0, length))
def random_int(self):
num_bits = random.randint(0, self.max_int_bits) # This way, we test small numbers with higher probability
max_value = (1 << num_bits) - 1
min_value = -max_value - 1
num = random.randint(min_value, max_value)
# util.get_logger().debug('generating int (%d): %d - %s' % (num_bits, num, repr(fdb.tuple.pack((num,)))))
return num
def random_float(self, exp_bits):
if random.random() < 0.05:
# Choose a special value.
return random.choice([float('-nan'), float('-inf'), -0.0, 0.0, float('inf'), float('nan')])
else:
# Choose a value from all over the range of acceptable floats for this precision.
sign = -1 if random.random() < 0.5 else 1
exponent = random.randint(-(1 << (exp_bits - 1)) - 10, (1 << (exp_bits - 1) - 1))
mantissa = random.random()
result = sign * math.pow(2, exponent) * mantissa
if random.random() < 0.05:
result = float(int(result))
return result
def random_tuple(self, max_size, incomplete_versionstamps=False):
size = random.randint(1, max_size)
tup = []
for i in range(size):
choice = random.choice(self.types)
if choice == 'int':
tup.append(self.random_int())
elif choice == 'null':
tup.append(None)
elif choice == 'bytes':
tup.append(self.random_string(random.randint(0, 100)))
elif choice == 'string':
tup.append(self.random_unicode_str(random.randint(0, 100)))
elif choice == 'uuid':
tup.append(uuid.uuid4())
elif choice == 'bool':
b = random.random() < 0.5
if self.api_version < 500:
tup.append(int(b))
else:
tup.append(b)
elif choice == 'float':
tup.append(fdb.tuple.SingleFloat(self.random_float(8)))
elif choice == 'double':
tup.append(self.random_float(11))
elif choice == 'tuple':
length = random.randint(0, max_size - size)
if length == 0:
tup.append(())
else:
tup.append(self.random_tuple(length))
elif choice == 'versionstamp':
if incomplete_versionstamps and random.random() < 0.5:
tr_version = fdb.tuple.Versionstamp._UNSET_TR_VERSION
else:
tr_version = self.random_string(10)
user_version = random.randint(0, 0xffff)
tup.append(fdb.tuple.Versionstamp(tr_version, user_version))
else:
assert false
return tuple(tup)
def random_tuple_list(self, max_size, max_list_size):
size = random.randint(1, max_list_size)
tuples = []
for i in range(size):
to_add = self.random_tuple(max_size)
tuples.append(to_add)
if len(to_add) > 1 and random.random() < 0.25:
# Add a smaller one to test prefixes.
smaller_size = random.randint(1, len(to_add))
tuples.append(to_add[:smaller_size])
else:
non_empty = [x for x in enumerate(to_add) if (isinstance(x[1], list) or isinstance(x[1], tuple)) and len(x[1]) > 0]
if len(non_empty) > 0 and random.random() < 0.25:
# Add a smaller list to test prefixes of nested structures.
idx, choice = random.choice(non_empty)
smaller_size = random.randint(0, len(to_add[idx]))
tuples.append(to_add[:idx] + (choice[:smaller_size],) + to_add[idx + 1:])
random.shuffle(tuples)
return tuples
def random_range_params(self):
if random.random() < 0.75:
limit = random.randint(1, 1e3)
elif random.random() < 0.75:
limit = 0
else:
limit = random.randint(1e8, (1 << 31) - 1)
return (limit, random.randint(0, 1), random.randint(-2, 4))
def random_selector_params(self):
if random.random() < 0.9:
offset = random.randint(-20, 20)
else:
offset = random.randint(-1000, 1000)
return (random.randint(0, 1), offset)
def random_string(self, length):
if length == 0:
return b''
return bytes([random.randint(0, 254)] + [random.randint(0, 255) for i in range(0, length - 1)])
def random_unicode_char(self):
while True:
if random.random() < 0.05:
# Choose one of these special character sequences.
specials = ['\U0001f4a9', '\U0001f63c', '\U0001f3f3\ufe0f\u200d\U0001f308', '\U0001f1f5\U0001f1f2', '\uf8ff',
'\U0002a2b2', '\u05e9\u05dc\u05d5\u05dd']
return random.choice(specials)
c = random.randint(0, 0xffff)
if unicodedata.category(chr(c))[0] in 'LMNPSZ':
return chr(c)
def error_string(error_code):
return fdb.tuple.pack((b'ERROR', bytes(str(error_code), 'utf-8')))
def blocking_commit(instructions):
instructions.append('COMMIT')
instructions.append('WAIT_FUTURE')
instructions.append('RESET')
def to_front(instructions, index):
if index == 0:
pass
elif index == 1:
instructions.push_args(1)
instructions.append('SWAP')
elif index == 2:
instructions.push_args(index - 1)
instructions.append('SWAP')
instructions.push_args(index)
instructions.append('SWAP')
else:
instructions.push_args(index - 1)
instructions.append('SWAP')
instructions.push_args(index)
instructions.append('SWAP')
instructions.push_args(index - 1)
instructions.append('SWAP')
to_front(instructions, index - 1)
def with_length(tup):
return (len(tup),) + tup | bindings/bindingtester/tests/test_util.py |
import random
import uuid
import unicodedata
import ctypes
import math
import fdb
import fdb.tuple
from bindingtester import util
from bindingtester import FDB_API_VERSION
from bindingtester.known_testers import COMMON_TYPES
class RandomGenerator(object):
def __init__(self, max_int_bits=64, api_version=FDB_API_VERSION, types=COMMON_TYPES):
self.max_int_bits = max_int_bits
self.api_version = api_version
self.types = list(types)
def random_unicode_str(self, length):
return ''.join(self.random_unicode_char() for i in range(0, length))
def random_int(self):
num_bits = random.randint(0, self.max_int_bits) # This way, we test small numbers with higher probability
max_value = (1 << num_bits) - 1
min_value = -max_value - 1
num = random.randint(min_value, max_value)
# util.get_logger().debug('generating int (%d): %d - %s' % (num_bits, num, repr(fdb.tuple.pack((num,)))))
return num
def random_float(self, exp_bits):
if random.random() < 0.05:
# Choose a special value.
return random.choice([float('-nan'), float('-inf'), -0.0, 0.0, float('inf'), float('nan')])
else:
# Choose a value from all over the range of acceptable floats for this precision.
sign = -1 if random.random() < 0.5 else 1
exponent = random.randint(-(1 << (exp_bits - 1)) - 10, (1 << (exp_bits - 1) - 1))
mantissa = random.random()
result = sign * math.pow(2, exponent) * mantissa
if random.random() < 0.05:
result = float(int(result))
return result
def random_tuple(self, max_size, incomplete_versionstamps=False):
size = random.randint(1, max_size)
tup = []
for i in range(size):
choice = random.choice(self.types)
if choice == 'int':
tup.append(self.random_int())
elif choice == 'null':
tup.append(None)
elif choice == 'bytes':
tup.append(self.random_string(random.randint(0, 100)))
elif choice == 'string':
tup.append(self.random_unicode_str(random.randint(0, 100)))
elif choice == 'uuid':
tup.append(uuid.uuid4())
elif choice == 'bool':
b = random.random() < 0.5
if self.api_version < 500:
tup.append(int(b))
else:
tup.append(b)
elif choice == 'float':
tup.append(fdb.tuple.SingleFloat(self.random_float(8)))
elif choice == 'double':
tup.append(self.random_float(11))
elif choice == 'tuple':
length = random.randint(0, max_size - size)
if length == 0:
tup.append(())
else:
tup.append(self.random_tuple(length))
elif choice == 'versionstamp':
if incomplete_versionstamps and random.random() < 0.5:
tr_version = fdb.tuple.Versionstamp._UNSET_TR_VERSION
else:
tr_version = self.random_string(10)
user_version = random.randint(0, 0xffff)
tup.append(fdb.tuple.Versionstamp(tr_version, user_version))
else:
assert false
return tuple(tup)
def random_tuple_list(self, max_size, max_list_size):
size = random.randint(1, max_list_size)
tuples = []
for i in range(size):
to_add = self.random_tuple(max_size)
tuples.append(to_add)
if len(to_add) > 1 and random.random() < 0.25:
# Add a smaller one to test prefixes.
smaller_size = random.randint(1, len(to_add))
tuples.append(to_add[:smaller_size])
else:
non_empty = [x for x in enumerate(to_add) if (isinstance(x[1], list) or isinstance(x[1], tuple)) and len(x[1]) > 0]
if len(non_empty) > 0 and random.random() < 0.25:
# Add a smaller list to test prefixes of nested structures.
idx, choice = random.choice(non_empty)
smaller_size = random.randint(0, len(to_add[idx]))
tuples.append(to_add[:idx] + (choice[:smaller_size],) + to_add[idx + 1:])
random.shuffle(tuples)
return tuples
def random_range_params(self):
if random.random() < 0.75:
limit = random.randint(1, 1e3)
elif random.random() < 0.75:
limit = 0
else:
limit = random.randint(1e8, (1 << 31) - 1)
return (limit, random.randint(0, 1), random.randint(-2, 4))
def random_selector_params(self):
if random.random() < 0.9:
offset = random.randint(-20, 20)
else:
offset = random.randint(-1000, 1000)
return (random.randint(0, 1), offset)
def random_string(self, length):
if length == 0:
return b''
return bytes([random.randint(0, 254)] + [random.randint(0, 255) for i in range(0, length - 1)])
def random_unicode_char(self):
while True:
if random.random() < 0.05:
# Choose one of these special character sequences.
specials = ['\U0001f4a9', '\U0001f63c', '\U0001f3f3\ufe0f\u200d\U0001f308', '\U0001f1f5\U0001f1f2', '\uf8ff',
'\U0002a2b2', '\u05e9\u05dc\u05d5\u05dd']
return random.choice(specials)
c = random.randint(0, 0xffff)
if unicodedata.category(chr(c))[0] in 'LMNPSZ':
return chr(c)
def error_string(error_code):
return fdb.tuple.pack((b'ERROR', bytes(str(error_code), 'utf-8')))
def blocking_commit(instructions):
instructions.append('COMMIT')
instructions.append('WAIT_FUTURE')
instructions.append('RESET')
def to_front(instructions, index):
if index == 0:
pass
elif index == 1:
instructions.push_args(1)
instructions.append('SWAP')
elif index == 2:
instructions.push_args(index - 1)
instructions.append('SWAP')
instructions.push_args(index)
instructions.append('SWAP')
else:
instructions.push_args(index - 1)
instructions.append('SWAP')
instructions.push_args(index)
instructions.append('SWAP')
instructions.push_args(index - 1)
instructions.append('SWAP')
to_front(instructions, index - 1)
def with_length(tup):
return (len(tup),) + tup | 0.41941 | 0.207014 |
from functools import partial
import numpy as np
from unittest import TestCase
from datumaro.components.project import Dataset
from datumaro.components.extractor import (
DatasetItem,
AnnotationType,
Label,
Mask,
Points,
Polygon,
PolyLine,
Bbox,
Caption,
LabelCategories,
MaskCategories,
PointsCategories,
)
from datumaro.plugins.datumaro_format.extractor import DatumaroImporter
from datumaro.plugins.datumaro_format.converter import DatumaroConverter
from datumaro.util.mask_tools import generate_colormap
from datumaro.util.image import Image
from datumaro.util.test_utils import (
TestDir,
compare_datasets_strict,
test_save_and_load,
)
class DatumaroConverterTest(TestCase):
def _test_save_and_load(
self,
source_dataset,
converter,
test_dir,
target_dataset=None,
importer_args=None,
):
return test_save_and_load(
self,
source_dataset,
converter,
test_dir,
importer="datumaro",
target_dataset=target_dataset,
importer_args=importer_args,
compare=compare_datasets_strict,
)
@property
def test_dataset(self):
label_categories = LabelCategories()
for i in range(5):
label_categories.add("cat" + str(i))
mask_categories = MaskCategories(generate_colormap(len(label_categories.items)))
points_categories = PointsCategories()
for index, _ in enumerate(label_categories.items):
points_categories.add(index, ["cat1", "cat2"], joints=[[0, 1]])
return Dataset.from_iterable(
[
DatasetItem(
id=100,
subset="train",
image=np.ones((10, 6, 3)),
annotations=[
Caption("hello", id=1),
Caption("world", id=2, group=5),
Label(
2,
id=3,
attributes={
"x": 1,
"y": "2",
},
),
Bbox(
1,
2,
3,
4,
label=4,
id=4,
z_order=1,
attributes={
"score": 1.0,
},
),
Bbox(5, 6, 7, 8, id=5, group=5),
Points([1, 2, 2, 0, 1, 1], label=0, id=5, z_order=4),
Mask(label=3, id=5, z_order=2, image=np.ones((2, 3))),
],
),
DatasetItem(
id=21,
subset="train",
annotations=[
Caption("test"),
Label(2),
Bbox(1, 2, 3, 4, label=5, id=42, group=42),
],
),
DatasetItem(
id=2,
subset="val",
annotations=[
PolyLine([1, 2, 3, 4, 5, 6, 7, 8], id=11, z_order=1),
Polygon([1, 2, 3, 4, 5, 6, 7, 8], id=12, z_order=4),
],
),
DatasetItem(id=42, subset="test", attributes={"a1": 5, "a2": "42"}),
DatasetItem(id=42),
DatasetItem(id=43, image=Image(path="1/b/c.qq", size=(2, 4))),
],
categories={
AnnotationType.label: label_categories,
AnnotationType.mask: mask_categories,
AnnotationType.points: points_categories,
},
)
def test_can_save_and_load(self):
with TestDir() as test_dir:
self._test_save_and_load(
self.test_dataset,
partial(DatumaroConverter.convert, save_images=True),
test_dir,
)
def test_can_detect(self):
with TestDir() as test_dir:
DatumaroConverter.convert(self.test_dataset, save_dir=test_dir)
self.assertTrue(DatumaroImporter.detect(test_dir))
def test_relative_paths(self):
test_dataset = Dataset.from_iterable(
[
DatasetItem(id="1", image=np.ones((4, 2, 3))),
DatasetItem(id="subdir1/1", image=np.ones((2, 6, 3))),
DatasetItem(id="subdir2/1", image=np.ones((5, 4, 3))),
]
)
with TestDir() as test_dir:
self._test_save_and_load(
test_dataset,
partial(DatumaroConverter.convert, save_images=True),
test_dir,
) | tests/test_datumaro_format.py | from functools import partial
import numpy as np
from unittest import TestCase
from datumaro.components.project import Dataset
from datumaro.components.extractor import (
DatasetItem,
AnnotationType,
Label,
Mask,
Points,
Polygon,
PolyLine,
Bbox,
Caption,
LabelCategories,
MaskCategories,
PointsCategories,
)
from datumaro.plugins.datumaro_format.extractor import DatumaroImporter
from datumaro.plugins.datumaro_format.converter import DatumaroConverter
from datumaro.util.mask_tools import generate_colormap
from datumaro.util.image import Image
from datumaro.util.test_utils import (
TestDir,
compare_datasets_strict,
test_save_and_load,
)
class DatumaroConverterTest(TestCase):
def _test_save_and_load(
self,
source_dataset,
converter,
test_dir,
target_dataset=None,
importer_args=None,
):
return test_save_and_load(
self,
source_dataset,
converter,
test_dir,
importer="datumaro",
target_dataset=target_dataset,
importer_args=importer_args,
compare=compare_datasets_strict,
)
@property
def test_dataset(self):
label_categories = LabelCategories()
for i in range(5):
label_categories.add("cat" + str(i))
mask_categories = MaskCategories(generate_colormap(len(label_categories.items)))
points_categories = PointsCategories()
for index, _ in enumerate(label_categories.items):
points_categories.add(index, ["cat1", "cat2"], joints=[[0, 1]])
return Dataset.from_iterable(
[
DatasetItem(
id=100,
subset="train",
image=np.ones((10, 6, 3)),
annotations=[
Caption("hello", id=1),
Caption("world", id=2, group=5),
Label(
2,
id=3,
attributes={
"x": 1,
"y": "2",
},
),
Bbox(
1,
2,
3,
4,
label=4,
id=4,
z_order=1,
attributes={
"score": 1.0,
},
),
Bbox(5, 6, 7, 8, id=5, group=5),
Points([1, 2, 2, 0, 1, 1], label=0, id=5, z_order=4),
Mask(label=3, id=5, z_order=2, image=np.ones((2, 3))),
],
),
DatasetItem(
id=21,
subset="train",
annotations=[
Caption("test"),
Label(2),
Bbox(1, 2, 3, 4, label=5, id=42, group=42),
],
),
DatasetItem(
id=2,
subset="val",
annotations=[
PolyLine([1, 2, 3, 4, 5, 6, 7, 8], id=11, z_order=1),
Polygon([1, 2, 3, 4, 5, 6, 7, 8], id=12, z_order=4),
],
),
DatasetItem(id=42, subset="test", attributes={"a1": 5, "a2": "42"}),
DatasetItem(id=42),
DatasetItem(id=43, image=Image(path="1/b/c.qq", size=(2, 4))),
],
categories={
AnnotationType.label: label_categories,
AnnotationType.mask: mask_categories,
AnnotationType.points: points_categories,
},
)
def test_can_save_and_load(self):
with TestDir() as test_dir:
self._test_save_and_load(
self.test_dataset,
partial(DatumaroConverter.convert, save_images=True),
test_dir,
)
def test_can_detect(self):
with TestDir() as test_dir:
DatumaroConverter.convert(self.test_dataset, save_dir=test_dir)
self.assertTrue(DatumaroImporter.detect(test_dir))
def test_relative_paths(self):
test_dataset = Dataset.from_iterable(
[
DatasetItem(id="1", image=np.ones((4, 2, 3))),
DatasetItem(id="subdir1/1", image=np.ones((2, 6, 3))),
DatasetItem(id="subdir2/1", image=np.ones((5, 4, 3))),
]
)
with TestDir() as test_dir:
self._test_save_and_load(
test_dataset,
partial(DatumaroConverter.convert, save_images=True),
test_dir,
) | 0.669529 | 0.3341 |
from __future__ import unicode_literals
import frappe, json
from frappe import _
from frappe.model.document import Document
from frappe.model.rename_doc import rename_doc
class Medication(Document):
def validate(self):
self.enable_disable_item()
def after_insert(self):
create_item_from_medication(self)
def on_update(self):
if self.change_in_item:
self.update_item_and_item_price()
def enable_disable_item(self):
if self.is_billable:
if self.disabled:
frappe.db.set_value('Item', self.item, 'disabled', 1)
else:
frappe.db.set_value('Item', self.item, 'disabled', 0)
def update_item_and_item_price(self):
if self.is_billable and self.item:
item_doc = frappe.get_doc('Item', {'item_code': self.item})
item_doc.item_name = self.medication_name
item_doc.item_group = self.item_group
item_doc.description = self.description
item_doc.stock_uom = self.stock_uom
item_doc.disabled = 0
item_doc.save(ignore_permissions=True)
if self.rate:
item_price = frappe.get_doc('Item Price', {'item_code': self.item})
item_price.item_name = self.medication_name
item_price.price_list_rate = self.rate
item_price.save()
elif not self.is_billable and self.item:
frappe.db.set_value('Item', self.item, 'disabled', 1)
self.db_set('change_in_item', 0)
def create_item_from_medication(doc):
disabled = doc.disabled
if doc.is_billable and not doc.disabled:
disabled = 0
uom = doc.stock_uom or frappe.db.get_single_value('Stock Settings', 'stock_uom')
item = frappe.get_doc({
'doctype': 'Item',
'item_code': doc.medication_name,
'item_name':doc.medication_name,
'item_group': doc.item_group,
'description':doc.description,
'is_sales_item': 1,
'is_service_item': 1,
'is_purchase_item': 0,
'is_stock_item': 1,
'show_in_website': 0,
'is_pro_applicable': 0,
'disabled': disabled,
'stock_uom': uom
}).insert(ignore_permissions=True, ignore_mandatory=True)
make_item_price(item.name, doc.rate)
doc.db_set('item', item.name)
def make_item_price(item, item_price):
price_list_name = frappe.db.get_value('Price List', {'selling': 1})
frappe.get_doc({
'doctype': 'Item Price',
'price_list': price_list_name,
'item_code': item,
'price_list_rate': item_price
}).insert(ignore_permissions=True, ignore_mandatory=True)
@frappe.whitelist()
def change_item_code_from_medication(item_code, doc):
doc = frappe._dict(json.loads(doc))
if frappe.db.exists('Item', {'item_code': item_code}):
frappe.throw(_('Item with Item Code {0} already exists').format(item_code))
else:
rename_doc('Item', doc.item_code, item_code)
frappe.db.set_value('Medication', doc.name, 'item_code', item_code)
return | hms_tz/hms_tz/doctype/medication/medication.py |
from __future__ import unicode_literals
import frappe, json
from frappe import _
from frappe.model.document import Document
from frappe.model.rename_doc import rename_doc
class Medication(Document):
def validate(self):
self.enable_disable_item()
def after_insert(self):
create_item_from_medication(self)
def on_update(self):
if self.change_in_item:
self.update_item_and_item_price()
def enable_disable_item(self):
if self.is_billable:
if self.disabled:
frappe.db.set_value('Item', self.item, 'disabled', 1)
else:
frappe.db.set_value('Item', self.item, 'disabled', 0)
def update_item_and_item_price(self):
if self.is_billable and self.item:
item_doc = frappe.get_doc('Item', {'item_code': self.item})
item_doc.item_name = self.medication_name
item_doc.item_group = self.item_group
item_doc.description = self.description
item_doc.stock_uom = self.stock_uom
item_doc.disabled = 0
item_doc.save(ignore_permissions=True)
if self.rate:
item_price = frappe.get_doc('Item Price', {'item_code': self.item})
item_price.item_name = self.medication_name
item_price.price_list_rate = self.rate
item_price.save()
elif not self.is_billable and self.item:
frappe.db.set_value('Item', self.item, 'disabled', 1)
self.db_set('change_in_item', 0)
def create_item_from_medication(doc):
disabled = doc.disabled
if doc.is_billable and not doc.disabled:
disabled = 0
uom = doc.stock_uom or frappe.db.get_single_value('Stock Settings', 'stock_uom')
item = frappe.get_doc({
'doctype': 'Item',
'item_code': doc.medication_name,
'item_name':doc.medication_name,
'item_group': doc.item_group,
'description':doc.description,
'is_sales_item': 1,
'is_service_item': 1,
'is_purchase_item': 0,
'is_stock_item': 1,
'show_in_website': 0,
'is_pro_applicable': 0,
'disabled': disabled,
'stock_uom': uom
}).insert(ignore_permissions=True, ignore_mandatory=True)
make_item_price(item.name, doc.rate)
doc.db_set('item', item.name)
def make_item_price(item, item_price):
price_list_name = frappe.db.get_value('Price List', {'selling': 1})
frappe.get_doc({
'doctype': 'Item Price',
'price_list': price_list_name,
'item_code': item,
'price_list_rate': item_price
}).insert(ignore_permissions=True, ignore_mandatory=True)
@frappe.whitelist()
def change_item_code_from_medication(item_code, doc):
doc = frappe._dict(json.loads(doc))
if frappe.db.exists('Item', {'item_code': item_code}):
frappe.throw(_('Item with Item Code {0} already exists').format(item_code))
else:
rename_doc('Item', doc.item_code, item_code)
frappe.db.set_value('Medication', doc.name, 'item_code', item_code)
return | 0.329392 | 0.126893 |
import mysql.connector
from django.shortcuts import render
from plotly.offline import plot
import plotly.graph_objects as go
from .models import Ping, Report
# Create your views here.
def home(request):
return render(request, 'home.html',
{'section': 'home'})
def report(request):
reports = Report.objects.all()
return render(request, 'report.html',
{'section': 'report',
'reports': reports})
def chart(request):
# FIRST GRAPH VARIABLES
connection = mysql.connector.connect(host='localhost',
database='switches',
user='madjango',
port='3306',
password='<PASSWORD>')
ping_status_row, unix_timestamp_row, alert_datetime_row = [], [], []
cursor = connection.cursor()
cursor.execute('USE switches;')
cursor.execute('SELECT * FROM switches.switches_switch1;')
records = cursor.fetchall()
# SECOND GRAPH VARIABLES
connection2 = mysql.connector.connect(host='localhost',
database='switches',
user='madjango',
port='3306',
password='<PASSWORD>')
ping_status_row2, unix_timestamp_row2, alert_datetime_row2 = [], [], []
cursor2 = connection2.cursor()
cursor2.execute('USE switches;')
cursor2.execute('SELECT * FROM switches.switches_switch2;')
records2 = cursor2.fetchall()
# THIRD GRAPH VARIABLES
connection3 = mysql.connector.connect(host='localhost',
database='switches',
user='madjango',
port='3306',
password='<PASSWORD>')
ping_status_row3, unix_timestamp_row3, alert_datetime_row3 = [], [], []
cursor3 = connection3.cursor()
cursor3.execute('USE switches;')
cursor3.execute('SELECT * FROM switches.switches_switch3;')
records3 = cursor3.fetchall()
try:
# SWITCH 1 GRAPH
for row in records:
# row = cursor.fetchone()
ping_status_row.append(row[1])
unix_timestamp_row.append(row[2])
alert_datetime_row.append(row[3])
sumg1 = sum(ping_status_row)
leng1 = len(ping_status_row)
last1 = ping_status_row[-1]
ming1 = min(ping_status_row)
maxg1 = max(ping_status_row)
avgg1 = sumg1 / leng1
annotation1 = f'last: {last1}<br>min: {ming1}<br>avg: {avgg1:.4f}<br>max: {maxg1}'
s1graph = go.Scatter(
x=alert_datetime_row,
y=ping_status_row,
mode='lines',
name='ICMP Ping',
line=dict(color='green'),
)
s1layout = go.Layout(
xaxis=dict(title='Time'),
yaxis=dict(title='Ping Status'),
title='SW-S1 Ping Availability',
showlegend=True,
annotations=[
go.layout.Annotation(
text=annotation1,
align='left',
showarrow=False,
xref='paper',
yref='paper',
x=1.1,
y=0.8,
bordercolor='black',
borderwidth=1
)
],
)
s1fig = go.Figure(data=[s1graph], layout=s1layout)
plot_div = plot(s1fig, output_type='div')
# SWITCH 2 GRAPH
for row in records2:
# row = cursor.fetchone()
ping_status_row2.append(row[1])
unix_timestamp_row2.append(row[2])
alert_datetime_row2.append(row[3])
sumg2 = sum(ping_status_row2)
leng2 = len(ping_status_row2)
last2 = ping_status_row2[-1]
ming2 = min(ping_status_row2)
maxg2 = max(ping_status_row2)
avgg2 = sumg2 / leng2
annotation2 = f'last: {last2}<br>min: {ming2}<br>avg: {avgg2:.4f}<br>max: {maxg2}'
s2graph = go.Scatter(
x=alert_datetime_row2,
y=ping_status_row2,
mode='lines',
name='ICMP Ping',
line=dict(color='green'),
)
s2layout = go.Layout(
xaxis=dict(title='Time'),
yaxis=dict(title='Ping Status'),
title='SW-S2 Ping Availability',
showlegend=True,
annotations=[
go.layout.Annotation(
text=annotation2,
align='left',
showarrow=False,
xref='paper',
yref='paper',
x=1.1,
y=0.8,
bordercolor='black',
borderwidth=1
)
],
)
s2fig = go.Figure(data=[s2graph], layout=s2layout)
plot2_div = plot(s2fig, output_type='div')
# SWITCH 3 GRAPH
for row in records3:
# row = cursor.fetchone()
ping_status_row3.append(row[1])
unix_timestamp_row3.append(row[2])
alert_datetime_row3.append(row[3])
sumg3 = sum(ping_status_row3)
leng3 = len(ping_status_row3)
last3 = ping_status_row3[-1]
ming3 = min(ping_status_row3)
maxg3 = max(ping_status_row3)
avgg3 = sumg3 / leng3
annotation3 = f'last: {last3}<br>min: {ming3}<br>avg: {avgg3:.4f}<br>max: {maxg3}'
s3graph = go.Scatter(
x=alert_datetime_row3,
y=ping_status_row3,
mode='lines',
name='ICMP Ping',
line=dict(color='green'),
)
s3layout = go.Layout(
xaxis=dict(title='Time'),
yaxis=dict(title='Ping Status'),
title='SW-S3 Ping Availability',
showlegend=True,
annotations=[
go.layout.Annotation(
text=annotation3,
align='left',
showarrow=False,
xref='paper',
yref='paper',
x=1.1,
y=0.8,
bordercolor='black',
borderwidth=1
)
],
)
s3fig = go.Figure(data=[s3graph], layout=s3layout)
plot3_div = plot(s3fig, output_type='div')
# RETURN RENDER RESPONSE OF THREE GRAPHS
return render(request, "chart.html", context={'section': 'chart',
'plot_div': plot_div,
'plot2_div': plot2_div,
'plot3_div': plot3_div})
except mysql.connector.Error as error:
print("Failed to connect to MySQL: {}".format(error))
finally:
if connection.is_connected():
cursor.close()
connection.close()
print("MySQL connection to SW-1 database is closed")
if connection2.is_connected():
cursor2.close()
connection2.close()
print("MySQL connection to SW-2 database is closed")
if connection3.is_connected():
cursor3.close()
connection3.close()
print("MySQL connection to SW-3 database is closed") | nosairis/nosairis/switches/views.py | import mysql.connector
from django.shortcuts import render
from plotly.offline import plot
import plotly.graph_objects as go
from .models import Ping, Report
# Create your views here.
def home(request):
return render(request, 'home.html',
{'section': 'home'})
def report(request):
reports = Report.objects.all()
return render(request, 'report.html',
{'section': 'report',
'reports': reports})
def chart(request):
# FIRST GRAPH VARIABLES
connection = mysql.connector.connect(host='localhost',
database='switches',
user='madjango',
port='3306',
password='<PASSWORD>')
ping_status_row, unix_timestamp_row, alert_datetime_row = [], [], []
cursor = connection.cursor()
cursor.execute('USE switches;')
cursor.execute('SELECT * FROM switches.switches_switch1;')
records = cursor.fetchall()
# SECOND GRAPH VARIABLES
connection2 = mysql.connector.connect(host='localhost',
database='switches',
user='madjango',
port='3306',
password='<PASSWORD>')
ping_status_row2, unix_timestamp_row2, alert_datetime_row2 = [], [], []
cursor2 = connection2.cursor()
cursor2.execute('USE switches;')
cursor2.execute('SELECT * FROM switches.switches_switch2;')
records2 = cursor2.fetchall()
# THIRD GRAPH VARIABLES
connection3 = mysql.connector.connect(host='localhost',
database='switches',
user='madjango',
port='3306',
password='<PASSWORD>')
ping_status_row3, unix_timestamp_row3, alert_datetime_row3 = [], [], []
cursor3 = connection3.cursor()
cursor3.execute('USE switches;')
cursor3.execute('SELECT * FROM switches.switches_switch3;')
records3 = cursor3.fetchall()
try:
# SWITCH 1 GRAPH
for row in records:
# row = cursor.fetchone()
ping_status_row.append(row[1])
unix_timestamp_row.append(row[2])
alert_datetime_row.append(row[3])
sumg1 = sum(ping_status_row)
leng1 = len(ping_status_row)
last1 = ping_status_row[-1]
ming1 = min(ping_status_row)
maxg1 = max(ping_status_row)
avgg1 = sumg1 / leng1
annotation1 = f'last: {last1}<br>min: {ming1}<br>avg: {avgg1:.4f}<br>max: {maxg1}'
s1graph = go.Scatter(
x=alert_datetime_row,
y=ping_status_row,
mode='lines',
name='ICMP Ping',
line=dict(color='green'),
)
s1layout = go.Layout(
xaxis=dict(title='Time'),
yaxis=dict(title='Ping Status'),
title='SW-S1 Ping Availability',
showlegend=True,
annotations=[
go.layout.Annotation(
text=annotation1,
align='left',
showarrow=False,
xref='paper',
yref='paper',
x=1.1,
y=0.8,
bordercolor='black',
borderwidth=1
)
],
)
s1fig = go.Figure(data=[s1graph], layout=s1layout)
plot_div = plot(s1fig, output_type='div')
# SWITCH 2 GRAPH
for row in records2:
# row = cursor.fetchone()
ping_status_row2.append(row[1])
unix_timestamp_row2.append(row[2])
alert_datetime_row2.append(row[3])
sumg2 = sum(ping_status_row2)
leng2 = len(ping_status_row2)
last2 = ping_status_row2[-1]
ming2 = min(ping_status_row2)
maxg2 = max(ping_status_row2)
avgg2 = sumg2 / leng2
annotation2 = f'last: {last2}<br>min: {ming2}<br>avg: {avgg2:.4f}<br>max: {maxg2}'
s2graph = go.Scatter(
x=alert_datetime_row2,
y=ping_status_row2,
mode='lines',
name='ICMP Ping',
line=dict(color='green'),
)
s2layout = go.Layout(
xaxis=dict(title='Time'),
yaxis=dict(title='Ping Status'),
title='SW-S2 Ping Availability',
showlegend=True,
annotations=[
go.layout.Annotation(
text=annotation2,
align='left',
showarrow=False,
xref='paper',
yref='paper',
x=1.1,
y=0.8,
bordercolor='black',
borderwidth=1
)
],
)
s2fig = go.Figure(data=[s2graph], layout=s2layout)
plot2_div = plot(s2fig, output_type='div')
# SWITCH 3 GRAPH
for row in records3:
# row = cursor.fetchone()
ping_status_row3.append(row[1])
unix_timestamp_row3.append(row[2])
alert_datetime_row3.append(row[3])
sumg3 = sum(ping_status_row3)
leng3 = len(ping_status_row3)
last3 = ping_status_row3[-1]
ming3 = min(ping_status_row3)
maxg3 = max(ping_status_row3)
avgg3 = sumg3 / leng3
annotation3 = f'last: {last3}<br>min: {ming3}<br>avg: {avgg3:.4f}<br>max: {maxg3}'
s3graph = go.Scatter(
x=alert_datetime_row3,
y=ping_status_row3,
mode='lines',
name='ICMP Ping',
line=dict(color='green'),
)
s3layout = go.Layout(
xaxis=dict(title='Time'),
yaxis=dict(title='Ping Status'),
title='SW-S3 Ping Availability',
showlegend=True,
annotations=[
go.layout.Annotation(
text=annotation3,
align='left',
showarrow=False,
xref='paper',
yref='paper',
x=1.1,
y=0.8,
bordercolor='black',
borderwidth=1
)
],
)
s3fig = go.Figure(data=[s3graph], layout=s3layout)
plot3_div = plot(s3fig, output_type='div')
# RETURN RENDER RESPONSE OF THREE GRAPHS
return render(request, "chart.html", context={'section': 'chart',
'plot_div': plot_div,
'plot2_div': plot2_div,
'plot3_div': plot3_div})
except mysql.connector.Error as error:
print("Failed to connect to MySQL: {}".format(error))
finally:
if connection.is_connected():
cursor.close()
connection.close()
print("MySQL connection to SW-1 database is closed")
if connection2.is_connected():
cursor2.close()
connection2.close()
print("MySQL connection to SW-2 database is closed")
if connection3.is_connected():
cursor3.close()
connection3.close()
print("MySQL connection to SW-3 database is closed") | 0.417746 | 0.118513 |
import SimulateTrade
from utils import ensure_dir_exist, archive_dir_folders
import pandas as pd
import time
import os
import zipfile
import datetime
import sys
DEST_DIR = ".\\FilteredCSVs"
if __name__ == '__main__':
src_dir = SimulateTrade.SOURCE_DIR
archive_results = False
if len(sys.argv) > 1:
src_dir = sys.argv[1]
if len(sys.argv) > 2:
archive_results = bool(sys.argv[2].lower())
start_time = time.time()
snp_500_symbols = SimulateTrade.get_snp_symbols(SimulateTrade.SNP_SYMBOLS_FILE_PATH)
zip_files = SimulateTrade.get_zip_files_in_folder(src_dir)
for curr_file in zip_files:
files_by_zip = {}
file_path = os.path.join(src_dir, curr_file)
files_by_zip[file_path] = SimulateTrade.get_files_from_zip_by_date(file_path)
for zip_file in files_by_zip:
print(f'Filtering {zip_file}')
dir_path = DEST_DIR if not archive_results else os.path.join(DEST_DIR, os.path.basename(os.path.splitext(zip_file)[0]))
ensure_dir_exist(dir_path)
zip_file_obj = zipfile.ZipFile(zip_file)
for curr_date in files_by_zip[zip_file]:
file_time = time.time()
date_info = files_by_zip[zip_file][curr_date]
day = date_info['day']
month = date_info['month']
year = date_info['year']
stock_quotes_file = date_info['stockquotes']
stock_quotes_data = pd.read_csv(zip_file_obj.open(stock_quotes_file))
snp_quotes = SimulateTrade.filter_equity_snp_symbols(stock_quotes_data, snp_500_symbols)
snp_quotes.to_csv(os.path.join(dir_path, f'stockquotes_{year}{month:02}{day:02}.csv'),
index=False)
print(f'Filtering {zip_file}\\{stock_quotes_file} took {time.time() - file_time} seconds')
options_file = date_info['options']
options_data = pd.read_csv(zip_file_obj.open(options_file))
snp_options = SimulateTrade.filter_snp_symbols(options_data, snp_500_symbols)
snp_options['Expiration'] = pd.to_datetime(snp_options['Expiration'], format='%m/%d/%Y')
zip_date = datetime.datetime(year=year, month=month, day=day)
snp_options = SimulateTrade.filter_tradable_options(snp_options, zip_date, 0, 8, 4)
snp_options.to_csv(os.path.join(dir_path, f'options_{year}{month:02}{day:02}.csv'),
index=False)
print(f'Filtering {zip_file}\\{options_file} took {time.time() - file_time} seconds')
if archive_results:
print('archiving output...')
archive_dir_folders(DEST_DIR)
end_time = time.time()
print("Processing took", end_time - start_time, "seconds") | NormalizeData/FilterCSVs.py | import SimulateTrade
from utils import ensure_dir_exist, archive_dir_folders
import pandas as pd
import time
import os
import zipfile
import datetime
import sys
DEST_DIR = ".\\FilteredCSVs"
if __name__ == '__main__':
src_dir = SimulateTrade.SOURCE_DIR
archive_results = False
if len(sys.argv) > 1:
src_dir = sys.argv[1]
if len(sys.argv) > 2:
archive_results = bool(sys.argv[2].lower())
start_time = time.time()
snp_500_symbols = SimulateTrade.get_snp_symbols(SimulateTrade.SNP_SYMBOLS_FILE_PATH)
zip_files = SimulateTrade.get_zip_files_in_folder(src_dir)
for curr_file in zip_files:
files_by_zip = {}
file_path = os.path.join(src_dir, curr_file)
files_by_zip[file_path] = SimulateTrade.get_files_from_zip_by_date(file_path)
for zip_file in files_by_zip:
print(f'Filtering {zip_file}')
dir_path = DEST_DIR if not archive_results else os.path.join(DEST_DIR, os.path.basename(os.path.splitext(zip_file)[0]))
ensure_dir_exist(dir_path)
zip_file_obj = zipfile.ZipFile(zip_file)
for curr_date in files_by_zip[zip_file]:
file_time = time.time()
date_info = files_by_zip[zip_file][curr_date]
day = date_info['day']
month = date_info['month']
year = date_info['year']
stock_quotes_file = date_info['stockquotes']
stock_quotes_data = pd.read_csv(zip_file_obj.open(stock_quotes_file))
snp_quotes = SimulateTrade.filter_equity_snp_symbols(stock_quotes_data, snp_500_symbols)
snp_quotes.to_csv(os.path.join(dir_path, f'stockquotes_{year}{month:02}{day:02}.csv'),
index=False)
print(f'Filtering {zip_file}\\{stock_quotes_file} took {time.time() - file_time} seconds')
options_file = date_info['options']
options_data = pd.read_csv(zip_file_obj.open(options_file))
snp_options = SimulateTrade.filter_snp_symbols(options_data, snp_500_symbols)
snp_options['Expiration'] = pd.to_datetime(snp_options['Expiration'], format='%m/%d/%Y')
zip_date = datetime.datetime(year=year, month=month, day=day)
snp_options = SimulateTrade.filter_tradable_options(snp_options, zip_date, 0, 8, 4)
snp_options.to_csv(os.path.join(dir_path, f'options_{year}{month:02}{day:02}.csv'),
index=False)
print(f'Filtering {zip_file}\\{options_file} took {time.time() - file_time} seconds')
if archive_results:
print('archiving output...')
archive_dir_folders(DEST_DIR)
end_time = time.time()
print("Processing took", end_time - start_time, "seconds") | 0.149469 | 0.09645 |
from constants import CMD_CHAT, CMD_LIST_USERS, CMD_QUIT, SERVER_NAME, SERVER_PORT
import sys
import socket
import threading
import select as s
from datetime import datetime
import pickle
from message import Message
HOST = ''
# Entradas para escuta do select
entry_points = [sys.stdin]
# Mapa de conexoes com o servidor
connections = {}
# Lock para acessar o dicionario de conexoes
lock = threading.Lock()
# Map de username para socket
usernames = dict()
def initServer():
"""Inicia o socket: internet IPv4 + TCP"""
# Default: socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sckt = socket.socket() # Descritor socket
sckt.bind((HOST, SERVER_PORT))
sckt.listen(10)
# Medida preventiva contra falhas entre a chamada de s.select() e sckt.accept()
sckt.setblocking(False)
return sckt
def acceptConnection(sckt):
"""Aceita a conexao com o cliente"""
global usernames
newSckt, address = sckt.accept()
while True:
data = newSckt.recv(1024)
message: Message = pickle.loads(data)
new_username = message.content
if new_username not in usernames.keys() and new_username != SERVER_NAME:
lock.acquire()
usernames[message.content] = newSckt
lock.release()
response = Message(SERVER_NAME, new_username, True, datetime.now())
newSckt.send(pickle.dumps(response))
break
else:
response = Message(SERVER_NAME, None, False, datetime.now())
newSckt.send(pickle.dumps(response))
welcome_msg = Message(SERVER_NAME, message.content,
f'Bem vindo {message.content}! Aqui está a lista dos usuários disponíveis: {list(usernames.keys())}\n'
f'Para iniciar um chat basta digitar "{CMD_CHAT} <USER_NAME>"', datetime.now())
newSckt.send(pickle.dumps(welcome_msg))
print(f'Conectado com: {str(address)}, username: {message.content}') # Log de conexao com endereco <address>
return newSckt, address
def internalCommandHandler(cmd: str, sckt, clients: list):
if cmd == CMD_QUIT:
print('!-----AVISO-----!')
print('Servidor está fechado para novas conexões. Aguardando clientes desconectarem...')
sckt.close()
sys.exit()
elif cmd in CMD_LIST_USERS:
pass #user_list = listActiveUsers()
def requestHandler(cliSckt, address):
"""Recebe requests dos clientes conectados"""
# Recebe uma mensagem
# se o receiver for SERVER, então é um comando. tem que tratar
# se o receiver for outro, então é uma mensagem pra alguém. acessa o map de user e redireciona
while True:
data = cliSckt.recv(1024)
# Se o usuário terminou de forma inesperada
if not data:
sender = list(usernames.keys())[list(usernames.values()).index(cliSckt)]
print(f'O usuário {sender} encerrou de forma inesperada.')
lock.acquire()
usernames.pop(sender)
lock.release()
cliSckt.close()
break
message: Message = pickle.loads(data)
if message.receiver == 'SERVER':
if message.content in CMD_LIST_USERS:
response = Message('SERVER', message.sender, list(usernames.keys()), datetime.now())
usernames[message.sender].send(pickle.dumps(response))
print(f'Lista de usuários enviada para {message.sender}')
elif message.content == CMD_QUIT:
# Garante que o server pode enviar o ack apos deletar registros do cliente
sender = message.sender
sender_sock = usernames[sender]
# Envia sinal de acknowladge para que cliente desconecte: 200 = OK, 500 = Erro
lock.acquire()
if usernames.pop(sender, False):
print(f'O usuário {message.sender} encerrou com sucesso.')
lock.release()
response = Message('SERVER', sender, '200', datetime.now())
sender_sock.send(pickle.dumps(response))
cliSckt.close()
break
else:
lock.release()
response = Message('SERVER', sender, '500', datetime.now())
sender_sock.send(pickle.dumps(response))
else:
if message.receiver not in usernames.keys():
response = Message(SERVER_NAME, message.sender,
f'O usuário {message.receiver} não existe ou está inativo.', datetime.now())
cliSckt.send(pickle.dumps(response))
else:
addressee_sock = usernames[message.receiver]
addressee_sock.send(data)
def main():
sckt = None
try:
sckt = initServer()
print('Pronto para receber conexoes...')
print(f'Para encerrar o servico, digite "{CMD_QUIT}".')
entry_points.append(sckt)
# Lista de threads ativas
client_threads = []
while True:
r, w, x = s.select(entry_points, [], [])
for ready in r:
if ready == sckt:
# Aceita a conexao
client_sckt, client_addr = acceptConnection(sckt)
# Cria a nova thread que ira lidar com a conexao
client = threading.Thread(target=requestHandler, args=(client_sckt, client_addr))
client.start()
# Adiciona a nova thread na lista de threads ativas
client_threads.append(client)
# Protecao contra alteracoes problematicas por multithreading
lock.acquire()
connections[client_sckt] = client_addr # Adiciona a conexao nova ao mapa de conexoes
lock.release()
elif ready == sys.stdin:
# Permite interacao com o servidor
cmd = input()
internalCommandHandler(cmd, sckt, client_threads)
except socket.error as e:
print('Erro: %s' % e)
sys.exit()
finally:
if sckt:
sckt.close()
pass
if __name__ == "__main__":
main() | server.py | from constants import CMD_CHAT, CMD_LIST_USERS, CMD_QUIT, SERVER_NAME, SERVER_PORT
import sys
import socket
import threading
import select as s
from datetime import datetime
import pickle
from message import Message
HOST = ''
# Entradas para escuta do select
entry_points = [sys.stdin]
# Mapa de conexoes com o servidor
connections = {}
# Lock para acessar o dicionario de conexoes
lock = threading.Lock()
# Map de username para socket
usernames = dict()
def initServer():
"""Inicia o socket: internet IPv4 + TCP"""
# Default: socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sckt = socket.socket() # Descritor socket
sckt.bind((HOST, SERVER_PORT))
sckt.listen(10)
# Medida preventiva contra falhas entre a chamada de s.select() e sckt.accept()
sckt.setblocking(False)
return sckt
def acceptConnection(sckt):
"""Aceita a conexao com o cliente"""
global usernames
newSckt, address = sckt.accept()
while True:
data = newSckt.recv(1024)
message: Message = pickle.loads(data)
new_username = message.content
if new_username not in usernames.keys() and new_username != SERVER_NAME:
lock.acquire()
usernames[message.content] = newSckt
lock.release()
response = Message(SERVER_NAME, new_username, True, datetime.now())
newSckt.send(pickle.dumps(response))
break
else:
response = Message(SERVER_NAME, None, False, datetime.now())
newSckt.send(pickle.dumps(response))
welcome_msg = Message(SERVER_NAME, message.content,
f'Bem vindo {message.content}! Aqui está a lista dos usuários disponíveis: {list(usernames.keys())}\n'
f'Para iniciar um chat basta digitar "{CMD_CHAT} <USER_NAME>"', datetime.now())
newSckt.send(pickle.dumps(welcome_msg))
print(f'Conectado com: {str(address)}, username: {message.content}') # Log de conexao com endereco <address>
return newSckt, address
def internalCommandHandler(cmd: str, sckt, clients: list):
if cmd == CMD_QUIT:
print('!-----AVISO-----!')
print('Servidor está fechado para novas conexões. Aguardando clientes desconectarem...')
sckt.close()
sys.exit()
elif cmd in CMD_LIST_USERS:
pass #user_list = listActiveUsers()
def requestHandler(cliSckt, address):
"""Recebe requests dos clientes conectados"""
# Recebe uma mensagem
# se o receiver for SERVER, então é um comando. tem que tratar
# se o receiver for outro, então é uma mensagem pra alguém. acessa o map de user e redireciona
while True:
data = cliSckt.recv(1024)
# Se o usuário terminou de forma inesperada
if not data:
sender = list(usernames.keys())[list(usernames.values()).index(cliSckt)]
print(f'O usuário {sender} encerrou de forma inesperada.')
lock.acquire()
usernames.pop(sender)
lock.release()
cliSckt.close()
break
message: Message = pickle.loads(data)
if message.receiver == 'SERVER':
if message.content in CMD_LIST_USERS:
response = Message('SERVER', message.sender, list(usernames.keys()), datetime.now())
usernames[message.sender].send(pickle.dumps(response))
print(f'Lista de usuários enviada para {message.sender}')
elif message.content == CMD_QUIT:
# Garante que o server pode enviar o ack apos deletar registros do cliente
sender = message.sender
sender_sock = usernames[sender]
# Envia sinal de acknowladge para que cliente desconecte: 200 = OK, 500 = Erro
lock.acquire()
if usernames.pop(sender, False):
print(f'O usuário {message.sender} encerrou com sucesso.')
lock.release()
response = Message('SERVER', sender, '200', datetime.now())
sender_sock.send(pickle.dumps(response))
cliSckt.close()
break
else:
lock.release()
response = Message('SERVER', sender, '500', datetime.now())
sender_sock.send(pickle.dumps(response))
else:
if message.receiver not in usernames.keys():
response = Message(SERVER_NAME, message.sender,
f'O usuário {message.receiver} não existe ou está inativo.', datetime.now())
cliSckt.send(pickle.dumps(response))
else:
addressee_sock = usernames[message.receiver]
addressee_sock.send(data)
def main():
sckt = None
try:
sckt = initServer()
print('Pronto para receber conexoes...')
print(f'Para encerrar o servico, digite "{CMD_QUIT}".')
entry_points.append(sckt)
# Lista de threads ativas
client_threads = []
while True:
r, w, x = s.select(entry_points, [], [])
for ready in r:
if ready == sckt:
# Aceita a conexao
client_sckt, client_addr = acceptConnection(sckt)
# Cria a nova thread que ira lidar com a conexao
client = threading.Thread(target=requestHandler, args=(client_sckt, client_addr))
client.start()
# Adiciona a nova thread na lista de threads ativas
client_threads.append(client)
# Protecao contra alteracoes problematicas por multithreading
lock.acquire()
connections[client_sckt] = client_addr # Adiciona a conexao nova ao mapa de conexoes
lock.release()
elif ready == sys.stdin:
# Permite interacao com o servidor
cmd = input()
internalCommandHandler(cmd, sckt, client_threads)
except socket.error as e:
print('Erro: %s' % e)
sys.exit()
finally:
if sckt:
sckt.close()
pass
if __name__ == "__main__":
main() | 0.220175 | 0.095814 |
import argparse
import glob
import multiprocessing
import re
from functools import partial
from pathlib import Path
import librosa
import numpy
from become_yukarin import SuperResolution
from become_yukarin.config.sr_config import create_from_json as create_config
from become_yukarin.dataset.dataset import AcousticFeatureProcess
from become_yukarin.dataset.dataset import WaveFileLoadProcess
parser = argparse.ArgumentParser()
parser.add_argument('model_names', nargs='+')
parser.add_argument('-md', '--model_directory', type=Path, default=Path('/mnt/dwango/hiroshiba/become-yukarin/'))
parser.add_argument('-iwd', '--input_wave_directory', type=Path,
default=Path('/mnt/dwango/hiroshiba/become-yukarin/dataset/yukari-wave/yukari-news/'))
parser.add_argument('-it', '--iteration', type=int)
parser.add_argument('-g', '--gpu', type=int)
args = parser.parse_args()
model_directory = args.model_directory # type: Path
input_wave_directory = args.input_wave_directory # type: Path
it = args.iteration
gpu = args.gpu
paths_test = list(Path('./test_data_sr/').glob('*.wav'))
def extract_number(f):
s = re.findall("\d+", str(f))
return int(s[-1]) if s else -1
def process(p: Path, super_resolution: SuperResolution):
param = config.dataset.param
wave_process = WaveFileLoadProcess(
sample_rate=param.voice_param.sample_rate,
top_db=None,
)
acoustic_feature_process = AcousticFeatureProcess(
frame_period=param.acoustic_feature_param.frame_period,
order=param.acoustic_feature_param.order,
alpha=param.acoustic_feature_param.alpha,
f0_estimating_method=param.acoustic_feature_param.f0_estimating_method,
)
try:
if p.suffix in ['.npy', '.npz']:
p = glob.glob(str(input_wave_directory / p.stem) + '.*')[0]
p = Path(p)
input = acoustic_feature_process(wave_process(str(p)))
wave = super_resolution(input.spectrogram, acoustic_feature=input, sampling_rate=param.voice_param.sample_rate)
librosa.output.write_wav(str(output / p.stem) + '.wav', wave.wave, wave.sampling_rate, norm=True)
except:
import traceback
print('error!', str(p))
traceback.format_exc()
for model_name in args.model_names:
base_model = model_directory / model_name
config = create_config(base_model / 'config.json')
#input_paths = list(sorted([Path(p) for p in glob.glob(str(config.dataset.input_glob))]))
#numpy.random.RandomState(config.dataset.seed).shuffle(input_paths)
#path_train = input_paths[0]
#path_test = input_paths[-1]
if it is not None:
model_path = base_model / 'predictor_{}.npz'.format(it)
else:
model_paths = base_model.glob('predictor_*.npz')
model_path = list(sorted(model_paths, key=extract_number))[-1]
print(model_path)
super_resolution = SuperResolution(config, model_path, gpu=gpu)
output = Path('./output').absolute() / base_model.name
output.mkdir(exist_ok=True)
#paths = [path_train, path_test] + paths_test
paths = paths_test
process_partial = partial(process, super_resolution=super_resolution)
if gpu is None:
pool = multiprocessing.Pool()
pool.map(process_partial, paths)
else:
list(map(process_partial, paths)) | scripts/super_resolution_test.py | import argparse
import glob
import multiprocessing
import re
from functools import partial
from pathlib import Path
import librosa
import numpy
from become_yukarin import SuperResolution
from become_yukarin.config.sr_config import create_from_json as create_config
from become_yukarin.dataset.dataset import AcousticFeatureProcess
from become_yukarin.dataset.dataset import WaveFileLoadProcess
parser = argparse.ArgumentParser()
parser.add_argument('model_names', nargs='+')
parser.add_argument('-md', '--model_directory', type=Path, default=Path('/mnt/dwango/hiroshiba/become-yukarin/'))
parser.add_argument('-iwd', '--input_wave_directory', type=Path,
default=Path('/mnt/dwango/hiroshiba/become-yukarin/dataset/yukari-wave/yukari-news/'))
parser.add_argument('-it', '--iteration', type=int)
parser.add_argument('-g', '--gpu', type=int)
args = parser.parse_args()
model_directory = args.model_directory # type: Path
input_wave_directory = args.input_wave_directory # type: Path
it = args.iteration
gpu = args.gpu
paths_test = list(Path('./test_data_sr/').glob('*.wav'))
def extract_number(f):
s = re.findall("\d+", str(f))
return int(s[-1]) if s else -1
def process(p: Path, super_resolution: SuperResolution):
param = config.dataset.param
wave_process = WaveFileLoadProcess(
sample_rate=param.voice_param.sample_rate,
top_db=None,
)
acoustic_feature_process = AcousticFeatureProcess(
frame_period=param.acoustic_feature_param.frame_period,
order=param.acoustic_feature_param.order,
alpha=param.acoustic_feature_param.alpha,
f0_estimating_method=param.acoustic_feature_param.f0_estimating_method,
)
try:
if p.suffix in ['.npy', '.npz']:
p = glob.glob(str(input_wave_directory / p.stem) + '.*')[0]
p = Path(p)
input = acoustic_feature_process(wave_process(str(p)))
wave = super_resolution(input.spectrogram, acoustic_feature=input, sampling_rate=param.voice_param.sample_rate)
librosa.output.write_wav(str(output / p.stem) + '.wav', wave.wave, wave.sampling_rate, norm=True)
except:
import traceback
print('error!', str(p))
traceback.format_exc()
for model_name in args.model_names:
base_model = model_directory / model_name
config = create_config(base_model / 'config.json')
#input_paths = list(sorted([Path(p) for p in glob.glob(str(config.dataset.input_glob))]))
#numpy.random.RandomState(config.dataset.seed).shuffle(input_paths)
#path_train = input_paths[0]
#path_test = input_paths[-1]
if it is not None:
model_path = base_model / 'predictor_{}.npz'.format(it)
else:
model_paths = base_model.glob('predictor_*.npz')
model_path = list(sorted(model_paths, key=extract_number))[-1]
print(model_path)
super_resolution = SuperResolution(config, model_path, gpu=gpu)
output = Path('./output').absolute() / base_model.name
output.mkdir(exist_ok=True)
#paths = [path_train, path_test] + paths_test
paths = paths_test
process_partial = partial(process, super_resolution=super_resolution)
if gpu is None:
pool = multiprocessing.Pool()
pool.map(process_partial, paths)
else:
list(map(process_partial, paths)) | 0.331877 | 0.09645 |
import numpy as np
import os
from soundfile import SoundFile
from scipy.interpolate import interp1d
import partitura
TWO_PI = 2 * np.pi
SAMPLE_RATE = 44100
A4 = 440
DTYPE = float
def midinote2freq(midi_pitch, a4=A4):
return (a4 / 32) * (2 ** ((midi_pitch - 9) / 12))
def freq2midinote(freq, a4=A4):
pitch = 12 * np.log2(32 * freq / a4) + 9
return int(np.round(pitch))
def exp_in_exp_out(num_frames, attack_frames, decay_frames):
"""
Sound envelope with exponential attack and decay
"""
# Initialize envelope
envelope = np.ones(num_frames, dtype=DTYPE)
# number of frames for decay
decay_frames = np.minimum(num_frames // 10, 1000)
# number of frames for attack
attack_frames = np.minimum(num_frames // 100, 1000)
# Compute envelope
envelope[-decay_frames:] = np.exp(-np.linspace(0, 100, decay_frames)).astype(DTYPE)
envelope[:attack_frames] = np.exp(np.linspace(-100, 0, attack_frames)).astype(DTYPE)
return envelope
def lin_in_lin_out(num_frames):
"""
Sound envelope with linear attack and decay
"""
# Initialize envelope
envelope = np.ones(num_frames, dtype=DTYPE)
# Number of frames for decay
decay_frames = np.minimum(num_frames // 10, 1000)
# number of frames for attack
attack_frames = np.minimum(num_frames // 100, 1000)
# Compute envelope
envelope[-decay_frames:] = np.linspace(1, 0, decay_frames, dtype=DTYPE)
envelope[:attack_frames] = np.linspace(0, 1, attack_frames, dtype=DTYPE)
return envelope
def additive_synthesis(freqs, duration, samplerate=SAMPLE_RATE,
weights='equal', envelope_fun='linear'):
"""
Additive synthesis
"""
if isinstance(freqs, (int, float)):
freqs = [freqs]
if isinstance(weights, (int, float)):
weights = [weights]
elif weights == 'equal':
weights = np.ones(len(freqs), dtype=DTYPE) / len(freqs)
freqs = np.array(freqs).reshape(-1, 1)
weights = np.array(weights).reshape(-1, 1)
if envelope_fun == 'linear':
envelope_fun = lin_in_lin_out
elif envelope_fun == 'exp':
envelope_fun = exp_in_exp_out
else:
if not callable(envelope_fun):
raise ValueError('`envelope_fun` must be "linear", "exp" or a callable')
num_frames = int(np.round(duration * SAMPLE_RATE))
envelope = envelope_fun(num_frames)
x = np.linspace(0, duration, num=num_frames)
output = weights * np.sin(TWO_PI * freqs * x)
return output.sum(0) * envelope
class DistributedHarmonics(object):
def __init__(self, n_harmonics, weights='equal'):
self.n_harmonics = n_harmonics
self.weights = weights
if self.weights == 'equal':
self.weights = 1.0 / (self.n_harmonics + 1) * np.ones(self.n_harmonics + 1)
self._overtones = np.arange(1, self.n_harmonics + 2)
def __call__(self, freq):
return self._overtones * freq, self.weights
class ShepardTones(object):
"""
Generate Shepard Tones
"""
def __init__(self, min_freq=77.8, max_freq=2349):
self.min_freq = min_freq
self.max_freq = max_freq
x_freq = np.linspace(self.min_freq, self.max_freq, 1000)
weights = np.hanning(len(x_freq) + 2) + 0.001
weights /= max(weights)
self.shepard_weights_fun = interp1d(x_freq, weights[1:-1],
bounds_error=False,
fill_value=weights.min())
def __call__(self, freq):
min_freq = self.min_f(freq)
freqs = 2 ** np.arange(5) * min_freq
return freqs, self.shepard_weights_fun(freqs)
def min_f(self, freq):
n = np.floor(np.log2(freq) - np.log2(self.min_freq))
return freq / (2 ** n)
def max_f(self, freq):
n = np.floor(np.log2(self.max_freq) - np.log2(freq))
return freq * (2 ** n)
def check_instance(fn):
"""
Checks if input is Partitura part object or structured array
"""
if isinstance(fn, partitura.score.Part):
return True
elif isinstance(fn, partitura.score.PartGroup):
return True
elif isinstance(fn, list) and isinstance(fn[0], partitura.score.Part):
return True
elif isinstance(fn, np.ndarray):
return False
else:
raise TypeError("The file type is not supported.")
def synthesize_data(in_fn, out_fn,
samplerate=SAMPLE_RATE,
envelope_fun='linear',
harmonic_dist=None,
bpm=60):
"""
Synthesize_data from part or note array.
Parameters
----------
in_fn : Part object or structured array
A partitura Part Object (or group part or part list) or a Note array.
out_fn : str
The directory and name of the file to be created, i.e. Path/To/Directory/example.wav
envelope_fun: str
The type of envelop to apply to the individual sines
harmonic_dist : int or str
Default is None. Option is shepard.
bpm : int
The bpm for playback.
"""
if check_instance(in_fn):
note_array = partitura.utils.ensure_notearray(in_fn)
else:
note_array = in_fn
if np.min(note_array["onset_beat"]) <= 0:
note_array["onset_beat"] = note_array["onset_beat"] + np.min(note_array["onset_beat"])
else:
note_array["onset_beat"] = note_array["onset_beat"] - np.min(note_array["onset_beat"])
print(note_array)
beat2sec = 60/bpm
onsets = note_array["onset_beat"] * beat2sec
offsets = (note_array["onset_beat"] + note_array["duration_beat"]) * beat2sec
duration = note_array["duration_beat"] * beat2sec
pitch = note_array["pitch"]
piece_duration = offsets.max()
# Number of frames
num_frames = int(np.round(piece_duration * samplerate))
# Initialize array containing audio
audio = np.zeros(num_frames, dtype='float')
# Initialize the time axis
x = np.linspace(0, piece_duration, num=num_frames)
# onsets in frames (i.e., indices of the `audio` array)
onsets_in_frames = np.digitize(onsets, x)
# frequency of the note in herz
freq_in_hz = midinote2freq(pitch)
if harmonic_dist is None:
def harmonic_dist(x): return x, 1
elif isinstance(harmonic_dist, int):
harmonic_dist = DistributedHarmonics(harmonic_dist)
for (f, oif, dur) in zip(freq_in_hz, onsets_in_frames, duration):
freqs, weights = harmonic_dist(f)
note = additive_synthesis(freqs=freqs,
duration=dur,
samplerate=samplerate,
weights=weights,
envelope_fun=envelope_fun)
idx = slice(oif, oif + len(note))
audio[idx] += note
# normalization term
# TODO: Non-linear normalization?
norm_term = max(audio.max(), abs(audio.min()))
# normalize audio
audio /= norm_term
wav_fn = out_fn + '.wav'
with SoundFile(file=wav_fn,
mode='w',
samplerate=SAMPLE_RATE,
channels=1,
subtype='PCM_24') as f:
f.write(audio)
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser('Synthesize a MIDI file')
# parser.add_argument('out_fn')
parser.add_argument('--harmonic-dist', default=None)
parser.add_argument('--samplerate', type=int,
default=SAMPLE_RATE)
parser.add_argument('--envelope', default='linear')
args = parser.parse_args()
in_fn = partitura.load_musicxml(os.path.join(os.path.dirname(__file__), "../xml_score.musicxml"))
out_fn = os.path.join(os.path.dirname(__file__), "../out.wav")
if args.harmonic_dist is None:
harmonic_dist = DistributedHarmonics(n_harmonics=0)
elif args.harmonic_dist == 'shepard':
harmonic_dist = ShepardTones()
print('Using Shepard tones')
else:
harmonic_dist = DistributedHarmonics(int(args.harmonic_dist))
print('N harmonics', harmonic_dist.n_harmonics)
synthesize_data(in_fn, out_fn,
samplerate=args.samplerate,
envelope_fun=args.envelope,
harmonic_dist=harmonic_dist) | Tonnetz/synthesize_midi.py | import numpy as np
import os
from soundfile import SoundFile
from scipy.interpolate import interp1d
import partitura
TWO_PI = 2 * np.pi
SAMPLE_RATE = 44100
A4 = 440
DTYPE = float
def midinote2freq(midi_pitch, a4=A4):
return (a4 / 32) * (2 ** ((midi_pitch - 9) / 12))
def freq2midinote(freq, a4=A4):
pitch = 12 * np.log2(32 * freq / a4) + 9
return int(np.round(pitch))
def exp_in_exp_out(num_frames, attack_frames, decay_frames):
"""
Sound envelope with exponential attack and decay
"""
# Initialize envelope
envelope = np.ones(num_frames, dtype=DTYPE)
# number of frames for decay
decay_frames = np.minimum(num_frames // 10, 1000)
# number of frames for attack
attack_frames = np.minimum(num_frames // 100, 1000)
# Compute envelope
envelope[-decay_frames:] = np.exp(-np.linspace(0, 100, decay_frames)).astype(DTYPE)
envelope[:attack_frames] = np.exp(np.linspace(-100, 0, attack_frames)).astype(DTYPE)
return envelope
def lin_in_lin_out(num_frames):
"""
Sound envelope with linear attack and decay
"""
# Initialize envelope
envelope = np.ones(num_frames, dtype=DTYPE)
# Number of frames for decay
decay_frames = np.minimum(num_frames // 10, 1000)
# number of frames for attack
attack_frames = np.minimum(num_frames // 100, 1000)
# Compute envelope
envelope[-decay_frames:] = np.linspace(1, 0, decay_frames, dtype=DTYPE)
envelope[:attack_frames] = np.linspace(0, 1, attack_frames, dtype=DTYPE)
return envelope
def additive_synthesis(freqs, duration, samplerate=SAMPLE_RATE,
weights='equal', envelope_fun='linear'):
"""
Additive synthesis
"""
if isinstance(freqs, (int, float)):
freqs = [freqs]
if isinstance(weights, (int, float)):
weights = [weights]
elif weights == 'equal':
weights = np.ones(len(freqs), dtype=DTYPE) / len(freqs)
freqs = np.array(freqs).reshape(-1, 1)
weights = np.array(weights).reshape(-1, 1)
if envelope_fun == 'linear':
envelope_fun = lin_in_lin_out
elif envelope_fun == 'exp':
envelope_fun = exp_in_exp_out
else:
if not callable(envelope_fun):
raise ValueError('`envelope_fun` must be "linear", "exp" or a callable')
num_frames = int(np.round(duration * SAMPLE_RATE))
envelope = envelope_fun(num_frames)
x = np.linspace(0, duration, num=num_frames)
output = weights * np.sin(TWO_PI * freqs * x)
return output.sum(0) * envelope
class DistributedHarmonics(object):
def __init__(self, n_harmonics, weights='equal'):
self.n_harmonics = n_harmonics
self.weights = weights
if self.weights == 'equal':
self.weights = 1.0 / (self.n_harmonics + 1) * np.ones(self.n_harmonics + 1)
self._overtones = np.arange(1, self.n_harmonics + 2)
def __call__(self, freq):
return self._overtones * freq, self.weights
class ShepardTones(object):
"""
Generate Shepard Tones
"""
def __init__(self, min_freq=77.8, max_freq=2349):
self.min_freq = min_freq
self.max_freq = max_freq
x_freq = np.linspace(self.min_freq, self.max_freq, 1000)
weights = np.hanning(len(x_freq) + 2) + 0.001
weights /= max(weights)
self.shepard_weights_fun = interp1d(x_freq, weights[1:-1],
bounds_error=False,
fill_value=weights.min())
def __call__(self, freq):
min_freq = self.min_f(freq)
freqs = 2 ** np.arange(5) * min_freq
return freqs, self.shepard_weights_fun(freqs)
def min_f(self, freq):
n = np.floor(np.log2(freq) - np.log2(self.min_freq))
return freq / (2 ** n)
def max_f(self, freq):
n = np.floor(np.log2(self.max_freq) - np.log2(freq))
return freq * (2 ** n)
def check_instance(fn):
"""
Checks if input is Partitura part object or structured array
"""
if isinstance(fn, partitura.score.Part):
return True
elif isinstance(fn, partitura.score.PartGroup):
return True
elif isinstance(fn, list) and isinstance(fn[0], partitura.score.Part):
return True
elif isinstance(fn, np.ndarray):
return False
else:
raise TypeError("The file type is not supported.")
def synthesize_data(in_fn, out_fn,
samplerate=SAMPLE_RATE,
envelope_fun='linear',
harmonic_dist=None,
bpm=60):
"""
Synthesize_data from part or note array.
Parameters
----------
in_fn : Part object or structured array
A partitura Part Object (or group part or part list) or a Note array.
out_fn : str
The directory and name of the file to be created, i.e. Path/To/Directory/example.wav
envelope_fun: str
The type of envelop to apply to the individual sines
harmonic_dist : int or str
Default is None. Option is shepard.
bpm : int
The bpm for playback.
"""
if check_instance(in_fn):
note_array = partitura.utils.ensure_notearray(in_fn)
else:
note_array = in_fn
if np.min(note_array["onset_beat"]) <= 0:
note_array["onset_beat"] = note_array["onset_beat"] + np.min(note_array["onset_beat"])
else:
note_array["onset_beat"] = note_array["onset_beat"] - np.min(note_array["onset_beat"])
print(note_array)
beat2sec = 60/bpm
onsets = note_array["onset_beat"] * beat2sec
offsets = (note_array["onset_beat"] + note_array["duration_beat"]) * beat2sec
duration = note_array["duration_beat"] * beat2sec
pitch = note_array["pitch"]
piece_duration = offsets.max()
# Number of frames
num_frames = int(np.round(piece_duration * samplerate))
# Initialize array containing audio
audio = np.zeros(num_frames, dtype='float')
# Initialize the time axis
x = np.linspace(0, piece_duration, num=num_frames)
# onsets in frames (i.e., indices of the `audio` array)
onsets_in_frames = np.digitize(onsets, x)
# frequency of the note in herz
freq_in_hz = midinote2freq(pitch)
if harmonic_dist is None:
def harmonic_dist(x): return x, 1
elif isinstance(harmonic_dist, int):
harmonic_dist = DistributedHarmonics(harmonic_dist)
for (f, oif, dur) in zip(freq_in_hz, onsets_in_frames, duration):
freqs, weights = harmonic_dist(f)
note = additive_synthesis(freqs=freqs,
duration=dur,
samplerate=samplerate,
weights=weights,
envelope_fun=envelope_fun)
idx = slice(oif, oif + len(note))
audio[idx] += note
# normalization term
# TODO: Non-linear normalization?
norm_term = max(audio.max(), abs(audio.min()))
# normalize audio
audio /= norm_term
wav_fn = out_fn + '.wav'
with SoundFile(file=wav_fn,
mode='w',
samplerate=SAMPLE_RATE,
channels=1,
subtype='PCM_24') as f:
f.write(audio)
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser('Synthesize a MIDI file')
# parser.add_argument('out_fn')
parser.add_argument('--harmonic-dist', default=None)
parser.add_argument('--samplerate', type=int,
default=SAMPLE_RATE)
parser.add_argument('--envelope', default='linear')
args = parser.parse_args()
in_fn = partitura.load_musicxml(os.path.join(os.path.dirname(__file__), "../xml_score.musicxml"))
out_fn = os.path.join(os.path.dirname(__file__), "../out.wav")
if args.harmonic_dist is None:
harmonic_dist = DistributedHarmonics(n_harmonics=0)
elif args.harmonic_dist == 'shepard':
harmonic_dist = ShepardTones()
print('Using Shepard tones')
else:
harmonic_dist = DistributedHarmonics(int(args.harmonic_dist))
print('N harmonics', harmonic_dist.n_harmonics)
synthesize_data(in_fn, out_fn,
samplerate=args.samplerate,
envelope_fun=args.envelope,
harmonic_dist=harmonic_dist) | 0.803714 | 0.474327 |
from __future__ import print_function, unicode_literals, absolute_import
from collections import OrderedDict
from atomic_reactor.plugin import PreBuildPluginsRunner, PluginFailedException
from atomic_reactor.plugins.pre_add_labels_in_df import AddLabelsPlugin
from atomic_reactor.plugins.pre_reactor_config import (ReactorConfigPlugin,
WORKSPACE_CONF_KEY,
ReactorConfig)
from atomic_reactor.util import ImageName, df_parser
from atomic_reactor.source import VcsInfo
from atomic_reactor.constants import INSPECT_CONFIG, SCRATCH_FROM
from atomic_reactor import start_time as atomic_reactor_start_time
import datetime
import re
import json
import logging
import pytest
from copy import deepcopy
from flexmock import flexmock
from tests.constants import DOCKERFILE_GIT, DOCKERFILE_SHA1, MOCK
if MOCK:
from tests.docker_mock import mock_docker
class MockSource(object):
dockerfile_path = None
path = None
def __init__(self, release_env=None):
self.config = flexmock()
setattr(self.config, 'release_env_var', release_env)
def get_vcs_info(self):
return VcsInfo(vcs_type="git", vcs_url=DOCKERFILE_GIT, vcs_ref=DOCKERFILE_SHA1)
class X(object):
def __init__(self, release_env=None):
self.image_id = "xxx"
self.source = MockSource(release_env)
self.base_image = ImageName(repo="qwe", tag="asd")
self.base_from_scratch = False
DF_CONTENT = """\
FROM fedora
RUN yum install -y python-django
CMD blabla"""
DF_CONTENT_WITH_LABELS = '''\
FROM fedora
RUN yum install -y python-django
CMD blabla
LABEL "build-date" = "docker value"
LABEL "architecture" = "docker value"
LABEL "vcs-type" = "docker value"
LABEL "vcs-url" = "docker value"
LABEL "vcs-ref" = "docker value"
LABEL "com.redhat.build-host" = "docker value"
LABEL "Build_Host" = "docker value"'''
DF_CONTENT_SINGLE_LINE = """\
FROM fedora"""
DF_CONTENT_LABEL = '''\
FROM fedora
LABEL "label2"="df value"'''
DF_CONTENT_LABELS = '''\
FROM fedora
LABEL "label1"="label1_value"
LABEL "label2"="label2_value"
LABEL "Authoritative_Registry"="authoritative-source-url_value"
LABEL "BZComponent"="com.redhat.component_value"
LABEL "Build_Host"="com.redhat.build-host_value"
LABEL "Version"="version_value"'''
LABELS_CONF_WITH_LABELS = {INSPECT_CONFIG: {"Labels": {
"build-date": "base value",
"architecture": "base value",
"vcs-type": "base value",
"vcs-url": "base value",
"vcs-ref": "base value",
"com.redhat.build-host": "base value",
"Build_Host": "base value"}}}
LABELS_CONF_BASE = {INSPECT_CONFIG: {"Labels": {"label1": "base value"}}}
LABELS_CONF_BASE_EXPLICIT = {INSPECT_CONFIG: {"Labels": {"version": "x", "release": "1"}}}
LABELS_CONF_BASE_NONE = {INSPECT_CONFIG: {"Labels": None}}
LABELS_CONF = OrderedDict({'label1': 'value 1', 'label2': 'long value'})
LABELS_CONF_ONE = {'label2': 'long value'}
LABELS_CONF_WRONG = [('label1', 'value1'), ('label2', 'value2')]
LABELS_CONF_EXPLICIT = {"version": "x", "release": "1"}
LABELS_BLANK = {}
# Can't be sure of the order of the labels, expect either
EXPECTED_OUTPUT = ["""FROM fedora
RUN yum install -y python-django
CMD blabla
LABEL "label1"="value 1" "label2"="long value"
""", """\
FROM fedora
RUN yum install -y python-django
CMD blabla
LABEL "label2"="long value" "label1"="value 1"
"""]
EXPECTED_OUTPUT2 = [r"""FROM fedora
RUN yum install -y python-django
CMD blabla
LABEL "label2"="long value"
"""]
EXPECTED_OUTPUT3 = [DF_CONTENT]
EXPECTED_OUTPUT4 = [r"""FROM fedora
LABEL "label2"="long value"
"""]
EXPECTED_OUTPUT5 = [r"""FROM fedora
LABEL "labelnew"="base value"
"""]
EXPECTED_OUTPUT6 = [r"""FROM fedora
LABEL "labelnew"="long value" "label2"="long value"
""", r"""FROM fedora
LABEL "label2"="long value" "labelnew"="long value"
"""]
EXPECTED_OUTPUT7 = [r"""FROM fedora
LABEL "label2"="df value"
LABEL "labelnew"="df value"
"""]
EXPECTED_OUTPUT8 = [r"""FROM fedora
LABEL "label1"="df value"
LABEL "label2"="df value"
""", r"""FROM fedora
LABEL "label2"="df value"
LABEL "label1"="df value"
"""]
# Label order seems to be set randomly, so both possible options are added
EXPECTED_OUTPUT9 = [r"""FROM fedora
RUN yum install -y python-django
CMD blabla
LABEL "release"="1" "version"="x"
""", r"""FROM fedora
RUN yum install -y python-django
CMD blabla
LABEL "version"="x" "release"="1"
"""]
def make_and_store_reactor_config_map(workflow, additional=None):
workflow.plugin_workspace[ReactorConfigPlugin.key] = {}
reactor_map = {
'version': 1
}
if additional:
reactor_map.update(additional)
workflow.plugin_workspace[ReactorConfigPlugin.key] = {
WORKSPACE_CONF_KEY: ReactorConfig(reactor_map)
}
@pytest.mark.parametrize('df_content, labels_conf_base, labels_conf, eq_conf, dont_overwrite,' # noqa
'aliases, expected_output', [ # noqa
(DF_CONTENT, LABELS_CONF_BASE, LABELS_CONF, [], [], {}, EXPECTED_OUTPUT),
(DF_CONTENT, LABELS_CONF_BASE, json.dumps(LABELS_CONF), [], [], {}, EXPECTED_OUTPUT),
(DF_CONTENT, LABELS_CONF_BASE, LABELS_CONF_WRONG, [], [], {}, RuntimeError()),
(DF_CONTENT, LABELS_CONF_BASE, LABELS_CONF, {'key': 'val'}, [], {}, RuntimeError()),
(DF_CONTENT, LABELS_CONF_BASE, LABELS_CONF, [], ["label1", ], {}, EXPECTED_OUTPUT2),
(DF_CONTENT, LABELS_CONF_BASE, LABELS_BLANK, [], ["label1", ], {}, EXPECTED_OUTPUT3),
(DF_CONTENT_SINGLE_LINE, LABELS_CONF_BASE, LABELS_CONF, [], ["label1", ], {},
EXPECTED_OUTPUT4),
(DF_CONTENT, LABELS_CONF_BASE, LABELS_CONF, [], [], {"not": "present"}, EXPECTED_OUTPUT),
(DF_CONTENT_SINGLE_LINE, LABELS_CONF_BASE, LABELS_BLANK, [], [], {"label1": "labelnew"},
EXPECTED_OUTPUT5),
(DF_CONTENT_SINGLE_LINE, LABELS_CONF_BASE, LABELS_CONF_ONE, [], [], {"label2": "labelnew"},
EXPECTED_OUTPUT6),
(DF_CONTENT_LABEL, LABELS_CONF_BASE, LABELS_BLANK, [], [], {"label2": "labelnew"},
EXPECTED_OUTPUT7),
(DF_CONTENT_LABEL, LABELS_CONF_BASE, LABELS_BLANK, [], [], {"label2": "labelnew", "x": "y"},
EXPECTED_OUTPUT7),
(DF_CONTENT_LABEL, LABELS_CONF_BASE_NONE, LABELS_BLANK, [], [], {"label2": "labelnew"},
EXPECTED_OUTPUT7),
(DF_CONTENT_LABEL, LABELS_CONF_BASE, LABELS_BLANK, [], [], {"label2": "label1"},
EXPECTED_OUTPUT8),
(DF_CONTENT, LABELS_CONF_BASE_EXPLICIT, LABELS_CONF_EXPLICIT, [], [], {}, EXPECTED_OUTPUT9),
])
def test_add_labels_plugin(tmpdir, docker_tasker, workflow,
df_content, labels_conf_base, labels_conf, eq_conf,
dont_overwrite, aliases, expected_output, caplog,
reactor_config_map):
df = df_parser(str(tmpdir))
df.content = df_content
if MOCK:
mock_docker()
setattr(workflow, 'builder', X())
setattr(workflow.builder, 'df_path', df.dockerfile_path)
setattr(workflow.builder, 'base_image_inspect', labels_conf_base)
flexmock(workflow, source=MockSource())
if reactor_config_map:
# reactor_config should not return json
if isinstance(labels_conf, str):
image_labels = json.loads(labels_conf)
else:
image_labels = deepcopy(labels_conf)
make_and_store_reactor_config_map(workflow, {
'image_labels': image_labels,
'image_equal_labels': eq_conf,
})
runner = PreBuildPluginsRunner(
docker_tasker,
workflow,
[{
'name': AddLabelsPlugin.key,
'args': {
'labels': labels_conf,
'dont_overwrite': dont_overwrite,
'auto_labels': [],
'aliases': aliases,
'equal_labels': eq_conf,
}
}]
)
if isinstance(expected_output, RuntimeError):
with pytest.raises(PluginFailedException):
runner.run()
assert "plugin 'add_labels_in_dockerfile' raised an exception: RuntimeError" \
in caplog.text
else:
runner.run()
assert AddLabelsPlugin.key is not None
assert df.content in expected_output
@pytest.mark.parametrize('use_reactor', [True, False]) # noqa
@pytest.mark.parametrize('release', [None, 'test'])
def test_add_labels_arrangement6(tmpdir, docker_tasker, workflow, release, use_reactor):
# explicitly test arrangement 6's handling of reactor config
df = df_parser(str(tmpdir))
df.content = DF_CONTENT
if MOCK:
mock_docker()
setattr(workflow, 'builder', X())
setattr(workflow.builder, 'base_image_inspect', LABELS_CONF_BASE)
setattr(workflow.builder, 'df_path', df.dockerfile_path)
flexmock(workflow, source=MockSource())
if use_reactor:
make_and_store_reactor_config_map(workflow, {'image_labels': LABELS_CONF})
if release is not None:
labels = {'release': release}
else:
labels = None
else:
labels = LABELS_CONF
if release is not None:
labels.update({'release': release})
runner = PreBuildPluginsRunner(
docker_tasker,
workflow,
[{
'name': AddLabelsPlugin.key,
'args': {
'labels': labels,
'dont_overwrite': [],
'auto_labels': [],
'aliases': [],
'equal_labels': [],
}
}]
)
runner.run()
assert AddLabelsPlugin.key is not None
assert 'label1' in df.content
if release:
assert 'release' in df.content
assert release in df.content
else:
assert 'release' not in df.content
@pytest.mark.parametrize('auto_label, value_re_part', [ # noqa
('build-date', r'\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(\.\d+)?'),
('architecture', 'x86_64'),
('vcs-type', 'git'),
('vcs-url', DOCKERFILE_GIT),
('vcs-ref', DOCKERFILE_SHA1),
('com.redhat.build-host', 'the-build-host'),
('wrong_label', None)
])
def test_add_labels_plugin_generated(tmpdir, docker_tasker, workflow,
auto_label, value_re_part,
reactor_config_map):
df = df_parser(str(tmpdir))
df.content = DF_CONTENT
if MOCK:
mock_docker()
setattr(workflow, 'builder', X())
setattr(workflow.builder, 'df_path', df.dockerfile_path)
setattr(workflow.builder, 'base_image_inspect', LABELS_CONF_BASE)
flexmock(workflow, source=MockSource())
if reactor_config_map:
make_and_store_reactor_config_map(workflow, {'image_labels': {}})
runner = PreBuildPluginsRunner(
docker_tasker,
workflow,
[{
'name': AddLabelsPlugin.key,
'args': {'labels': {}, "dont_overwrite": [], "auto_labels": [auto_label],
'aliases': {'Build_Host': 'com.redhat.build-host'}}
}]
)
runner.run()
if value_re_part:
assert re.match(value_re_part, df.labels[auto_label])
if auto_label == "build-date":
utc_dt = datetime.datetime.utcfromtimestamp(atomic_reactor_start_time).isoformat()
assert df.labels[auto_label] == utc_dt
@pytest.mark.parametrize('df_old_as_plugin_arg', [True, False]) # noqa
@pytest.mark.parametrize('df_new_as_plugin_arg', [True, False])
@pytest.mark.parametrize('base_old, base_new, df_old, df_new, exp_old, exp_new, exp_log', [
(None, None, None, None, None, None, None),
(None, None, None, 'A', None, 'A', None),
(None, None, 'A', None, 'A', 'A', 'as an alias for label'),
(None, None, 'A', 'A', 'A', 'A', 'already exists'),
(None, None, 'A', 'B', 'B', 'B', 'as an alias for label'),
(None, 'A', None, None, None, 'A', None),
(None, 'A', None, 'A', None, 'A', None),
(None, 'A', None, 'B', None, 'B', None),
(None, 'A', 'A', None, 'A', 'A', 'already exists'),
(None, 'A', 'B', None, 'B', 'B', 'as an alias for label'),
(None, 'A', 'A', 'A', 'A', 'A', 'already exists'),
(None, 'A', 'A', 'B', 'B', 'B', 'as an alias for label'),
(None, 'A', 'B', 'A', 'A', 'A', 'as an alias for label'),
(None, 'A', 'B', 'B', 'B', 'B', 'already exists'),
(None, 'A', 'B', 'C', 'C', 'C', 'as an alias for label'),
('A', None, None, None, 'A', 'A', 'as an alias for label'),
('A', None, None, 'A', 'A', 'A', 'already exists'),
('A', None, None, 'B', 'B', 'B', 'as an alias for label'),
('A', None, 'A', None, 'A', 'A', 'as an alias for label'),
('A', None, 'B', None, 'B', 'B', 'as an alias for label'),
('A', None, 'A', 'A', 'A', 'A', 'already exists'),
('A', None, 'A', 'B', 'B', 'B', 'as an alias for label'),
('A', None, 'B', 'A', 'A', 'A', 'as an alias for label'),
('A', None, 'B', 'B', 'B', 'B', 'already exists'),
('A', None, 'B', 'C', 'C', 'C', 'as an alias for label'),
('A', 'A', None, None, 'A', 'A', 'already exists'),
('A', 'A', None, 'A', 'A', 'A', 'already exists'),
('A', 'A', None, 'B', 'B', 'B', 'as an alias for label'),
('A', 'A', 'A', None, 'A', 'A', 'already exists'),
('A', 'A', 'B', None, 'B', 'B', 'as an alias for label'),
('A', 'A', 'A', 'A', 'A', 'A', 'already exists'),
('A', 'A', 'A', 'B', 'B', 'B', 'as an alias for label'),
('A', 'A', 'B', 'A', 'A', 'A', 'as an alias for label'),
('A', 'A', 'B', 'B', 'B', 'B', 'already exists'),
('A', 'A', 'B', 'C', 'C', 'C', 'as an alias for label'),
('A', 'B', None, None, 'B', 'B', 'as an alias for label'),
('A', 'B', None, 'A', 'A', 'A', 'already exists'),
('A', 'B', None, 'B', 'B', 'B', 'as an alias for label'),
('A', 'B', None, 'C', 'C', 'C', 'as an alias for label'),
('A', 'B', 'A', None, 'A', 'A', 'as an alias for label'),
('A', 'B', 'B', None, 'B', 'B', 'already exists'),
('A', 'B', 'C', None, 'C', 'C', 'as an alias for label'),
('A', 'B', 'A', 'A', 'A', 'A', 'already exists'),
('A', 'B', 'A', 'B', 'B', 'B', 'as an alias for label'),
('A', 'B', 'A', 'C', 'C', 'C', 'as an alias for label'),
('A', 'B', 'B', 'A', 'A', 'A', 'as an alias for label'),
('A', 'B', 'B', 'B', 'B', 'B', 'already exists'),
('A', 'B', 'B', 'C', 'C', 'C', 'as an alias for label'),
('A', 'B', 'C', 'A', 'A', 'A', 'as an alias for label'),
('A', 'B', 'C', 'B', 'B', 'B', 'as an alias for label'),
('A', 'B', 'C', 'C', 'C', 'C', 'already exists'),
('A', 'B', 'C', 'D', 'D', 'D', 'as an alias for label'),
])
def test_add_labels_aliases(tmpdir, docker_tasker, workflow, caplog,
df_old_as_plugin_arg, df_new_as_plugin_arg,
base_old, base_new, df_old, df_new, exp_old, exp_new, exp_log,
reactor_config_map):
if MOCK:
mock_docker()
df_content = "FROM fedora\n"
plugin_labels = {}
if df_old:
if df_old_as_plugin_arg:
plugin_labels["label_old"] = df_old
else:
df_content += 'LABEL label_old="{0}"\n'.format(df_old)
if df_new:
if df_new_as_plugin_arg:
plugin_labels["label_new"] = df_new
else:
df_content += 'LABEL label_new="{0}"\n'.format(df_new)
base_labels = {INSPECT_CONFIG: {"Labels": {}}}
if base_old:
base_labels[INSPECT_CONFIG]["Labels"]["label_old"] = base_old
if base_new:
base_labels[INSPECT_CONFIG]["Labels"]["label_new"] = base_new
df = df_parser(str(tmpdir))
df.content = df_content
setattr(workflow, 'builder', X())
setattr(workflow.builder, 'df_path', df.dockerfile_path)
setattr(workflow.builder, 'base_image_inspect', base_labels)
flexmock(workflow, source=MockSource())
if reactor_config_map:
make_and_store_reactor_config_map(workflow, {'image_labels': plugin_labels})
runner = PreBuildPluginsRunner(
docker_tasker,
workflow,
[{
'name': AddLabelsPlugin.key,
'args': {
'labels': plugin_labels,
'dont_overwrite': [],
'auto_labels': [],
'aliases': {"label_old": "label_new"},
}
}]
)
runner.run()
assert AddLabelsPlugin.key is not None
result_old = df.labels.get("label_old") or \
base_labels[INSPECT_CONFIG]["Labels"].get("label_old")
result_new = df.labels.get("label_new") or \
base_labels[INSPECT_CONFIG]["Labels"].get("label_new")
assert result_old == exp_old
assert result_new == exp_new
if exp_log:
assert exp_log in caplog.text
@pytest.mark.parametrize('base_l, df_l, expected, expected_log', [ # noqa
((None, None), (None, None), (None, None), None),
((None, None), (None, 'A'), ('A', 'A'), 'adding equal label'),
((None, None), ('A', None), ('A', 'A'), 'adding equal label'),
(('A', None), (None, None), ('A', 'A'), 'adding equal label'),
((None, 'A'), (None, None), ('A', 'A'), 'adding equal label'),
(('A', 'B'), (None, None), ('A', 'B'), None),
((None, None), ('A', 'B'), ('A', 'B'), None),
(('A', 'A'), (None, None), ('A', 'A'), None),
(('A', None), ('A', None), ('A', 'A'), 'adding equal label'),
((None, 'A'), (None, 'A'), ('A', 'A'), 'adding equal label'),
(('A', None), ('B', None), ('B', 'B'), 'adding equal label'),
((None, 'A'), (None, 'B'), ('B', 'B'), 'adding equal label'),
(('A', 'C'), ('B', None), ('B', 'B'), 'adding equal label'),
(('A', 'C'), (None, 'B'), ('B', 'B'), 'adding equal label'),
(('A', 'C'), ('B', 'B'), ('B', 'B'), None),
((None, 'A'), ('B', 'B'), ('B', 'B'), None),
(('A', None), ('B', 'B'), ('B', 'B'), None),
(('A', 'A'), (None, None), ('A', 'A'), None),
(('A', None), (None, 'A'), ('A', 'A'), 'skipping label'),
((None, 'A'), ('A', None), ('A', 'A'), 'skipping label'),
])
def test_add_labels_equal_aliases(tmpdir, docker_tasker, workflow, caplog,
base_l, df_l, expected, expected_log,
reactor_config_map):
if MOCK:
mock_docker()
df_content = "FROM fedora\n"
plugin_labels = {}
if df_l[0]:
df_content += 'LABEL description="{0}"\n'.format(df_l[0])
if df_l[1]:
df_content += 'LABEL io.k8s.description="{0}"\n'.format(df_l[1])
base_labels = {INSPECT_CONFIG: {"Labels": {}}}
if base_l[0]:
base_labels[INSPECT_CONFIG]["Labels"]["description"] = base_l[0]
if base_l[1]:
base_labels[INSPECT_CONFIG]["Labels"]["io.k8s.description"] = base_l[1]
df = df_parser(str(tmpdir))
df.content = df_content
setattr(workflow, 'builder', X())
setattr(workflow.builder, 'df_path', df.dockerfile_path)
setattr(workflow.builder, 'base_image_inspect', base_labels)
flexmock(workflow, source=MockSource())
if reactor_config_map:
make_and_store_reactor_config_map(
workflow,
{
'image_labels': plugin_labels,
'image_equal_labels': [['description', 'io.k8s.description']]})
runner = PreBuildPluginsRunner(
docker_tasker,
workflow,
[{
'name': AddLabelsPlugin.key,
'args': {
'labels': plugin_labels,
'dont_overwrite': [],
'auto_labels': [],
'aliases': {},
'equal_labels': [['description', 'io.k8s.description']]
}
}]
)
runner.run()
assert AddLabelsPlugin.key is not None
result_fst = df.labels.get("description") or \
base_labels[INSPECT_CONFIG]["Labels"].get("description")
result_snd = df.labels.get("io.k8s.description") or \
base_labels[INSPECT_CONFIG]["Labels"].get("io.k8s.description")
assert result_fst == expected[0]
assert result_snd == expected[1]
if expected_log:
assert expected_log in caplog.text
@pytest.mark.parametrize('base_l, df_l, expected, expected_log', [ # noqa
((None, None, None), (None, None, None), (None, None, None), None),
((None, None, None), (None, None, 'A'), ('A', 'A', 'A'), 'adding equal label'),
(('A', 'B', 'B'), (None, None, None), ('A', 'B', 'B'), None),
((None, None, None), ('A', 'B', 'B'), ('A', 'B', 'B'), None),
(('A', 'A', 'A'), (None, None, None), ('A', 'A', 'A'), None),
(('A', None, 'A'), ('A', None, 'A'), ('A', 'A', 'A'), 'adding equal label'),
(('A', None, None), (None, 'A', 'A'), ('A', 'A', 'A'), 'skipping label'),
((None, 'A', 'A'), ('A', 'A', None), ('A', 'A', 'A'), 'skipping label'),
(('A', 'A', 'A'), ('B', 'C', None), ('B', 'C', 'B'), 'adding equal label'),
(('A', 'A', 'A'), (None, 'C', 'D'), ('C', 'C', 'D'), 'adding equal label'),
(('A', 'A', 'A'), ('B', None, 'D'), ('B', 'B', 'D'), 'adding equal label'),
])
def test_add_labels_equal_aliases2(tmpdir, docker_tasker, workflow, caplog, base_l,
df_l, expected, expected_log, reactor_config_map):
"""
test with 3 equal labels
"""
if MOCK:
mock_docker()
df_content = "FROM fedora\n"
plugin_labels = {}
if df_l[0]:
df_content += 'LABEL description="{0}"\n'.format(df_l[0])
if df_l[1]:
df_content += 'LABEL io.k8s.description="{0}"\n'.format(df_l[1])
if df_l[2]:
df_content += 'LABEL description_third="{0}"\n'.format(df_l[2])
base_labels = {INSPECT_CONFIG: {"Labels": {}}}
if base_l[0]:
base_labels[INSPECT_CONFIG]["Labels"]["description"] = base_l[0]
if base_l[1]:
base_labels[INSPECT_CONFIG]["Labels"]["io.k8s.description"] = base_l[1]
if base_l[2]:
base_labels[INSPECT_CONFIG]["Labels"]["description_third"] = base_l[2]
df = df_parser(str(tmpdir))
df.content = df_content
setattr(workflow, 'builder', X())
setattr(workflow.builder, 'df_path', df.dockerfile_path)
setattr(workflow.builder, 'base_image_inspect', base_labels)
flexmock(workflow, source=MockSource())
if reactor_config_map:
make_and_store_reactor_config_map(
workflow,
{
'image_labels': plugin_labels,
'image_equal_labels': [['description',
'io.k8s.description',
'description_third']]})
runner = PreBuildPluginsRunner(
docker_tasker,
workflow,
[{
'name': AddLabelsPlugin.key,
'args': {
'labels': plugin_labels,
'dont_overwrite': [],
'auto_labels': [],
'aliases': {},
'equal_labels': [['description', 'io.k8s.description', 'description_third']]
}
}]
)
if isinstance(expected_log, RuntimeError):
with pytest.raises(PluginFailedException):
runner.run()
else:
runner.run()
assert AddLabelsPlugin.key is not None
result_fst = df.labels.get("description") or \
base_labels[INSPECT_CONFIG]["Labels"].get("description")
result_snd = df.labels.get("io.k8s.description") or \
base_labels[INSPECT_CONFIG]["Labels"].get("io.k8s.description")
result_trd = df.labels.get("description_third") or \
base_labels[INSPECT_CONFIG]["Labels"].get("description_third")
assert result_fst == expected[0]
assert result_snd == expected[1]
assert result_trd == expected[2]
if expected_log:
assert expected_log in caplog.text
@pytest.mark.parametrize("label_names", [ # noqa
("distribution-scope", ),
("com.redhat.license_terms", ),
("distribution-scope", "com.redhat.license_terms"),
])
@pytest.mark.parametrize("dont_overwrite", [True, False])
@pytest.mark.parametrize("parent_val, docker_val, result_val", [
(None, None, "default_value"),
("parent_value", "docker_value", "docker_value"),
("parent_value", None, "default_value"),
(None, "docker_value", "docker_value"),
("parent_value", "parent_value", "parent_value"),
])
def test_dont_overwrite_if_in_dockerfile(tmpdir, docker_tasker, workflow,
label_names, dont_overwrite,
parent_val, docker_val, result_val, reactor_config_map):
default_value = 'default_value'
df_content = "FROM fedora\n"
if docker_val:
for label_name in label_names:
df_content += 'LABEL {0}="{1}"\n'.format(label_name, docker_val)
if parent_val:
labels_conf_base = {INSPECT_CONFIG: {"Labels": {}}}
for label_name in label_names:
labels_conf_base[INSPECT_CONFIG]["Labels"][label_name] = parent_val
else:
labels_conf_base = {INSPECT_CONFIG: {"Labels": {}}}
df = df_parser(str(tmpdir))
df.content = df_content
if MOCK:
mock_docker()
setattr(workflow, 'builder', X())
setattr(workflow.builder, 'df_path', df.dockerfile_path)
setattr(workflow.builder, 'base_image_inspect', labels_conf_base)
flexmock(workflow, source=MockSource())
image_labels = {}
for label_name in label_names:
image_labels[label_name] = default_value
wf_args = {
'labels': image_labels,
'auto_labels': [],
'aliases': {},
}
if dont_overwrite:
wf_args["dont_overwrite_if_in_dockerfile"] = label_names
if reactor_config_map:
make_and_store_reactor_config_map(workflow, {'image_labels': image_labels})
runner = PreBuildPluginsRunner(
docker_tasker,
workflow,
[{
'name': AddLabelsPlugin.key,
'args': wf_args
}]
)
runner.run()
for label_name in label_names:
result = df.labels.get(label_name)
assert result == result_val
@pytest.mark.parametrize('url_format, info_url', [ # noqa
('url_pre {label1} {label2} url_post', 'url_pre label1_value label2_value url_post'),
('url_pre url_post', 'url_pre url_post'),
('url_pre {label1} {label2} {label3_non_existent} url_post', None),
('url_pre {label1} {label2} {version} url_post', 'url_pre label1_value label2_value '
'version_value url_post'),
('url_pre {authoritative-source-url} {com.redhat.component} {com.redhat.build-host} url_post',
'url_pre authoritative-source-url_value com.redhat.component_value '
'com.redhat.build-host_value url_post'),
])
def test_url_label(tmpdir, docker_tasker, workflow, url_format, info_url, reactor_config_map):
if MOCK:
mock_docker()
plugin_labels = {}
base_labels = {INSPECT_CONFIG: {"Labels": {}}}
df = df_parser(str(tmpdir))
df.content = DF_CONTENT_LABELS
setattr(workflow, 'builder', X())
setattr(workflow.builder, 'df_path', df.dockerfile_path)
setattr(workflow.builder, 'base_image_inspect', base_labels)
flexmock(workflow, source=MockSource())
if reactor_config_map:
make_and_store_reactor_config_map(workflow, {
'image_labels': plugin_labels,
'image_label_info_url_format': url_format,
})
runner = PreBuildPluginsRunner(
docker_tasker,
workflow,
[{
'name': AddLabelsPlugin.key,
'args': {
'labels': plugin_labels,
'dont_overwrite': [],
'auto_labels': [],
'info_url_format': url_format
}
}]
)
if info_url is not None:
runner.run()
assert df.labels.get("url") == info_url
else:
with pytest.raises(PluginFailedException):
runner.run()
assert AddLabelsPlugin.key is not None
@pytest.mark.parametrize('auto_label', [ # noqa
'build-date',
'architecture',
'vcs-type',
'vcs-url',
'vcs-ref',
'com.redhat.build-host',
])
@pytest.mark.parametrize('labels_docker', [
DF_CONTENT,
DF_CONTENT_WITH_LABELS,
])
@pytest.mark.parametrize('labels_base', [
LABELS_CONF_BASE_NONE,
LABELS_CONF_WITH_LABELS,
])
def test_add_labels_plugin_explicit(tmpdir, docker_tasker, workflow,
auto_label, labels_docker,
labels_base, reactor_config_map):
df = df_parser(str(tmpdir))
df.content = labels_docker
if MOCK:
mock_docker()
setattr(workflow, 'builder', X())
setattr(workflow.builder, 'df_path', df.dockerfile_path)
setattr(workflow.builder, 'base_image_inspect', labels_base)
flexmock(workflow, source=MockSource())
prov_labels = {}
prov_labels[auto_label] = 'explicit_value'
if reactor_config_map:
make_and_store_reactor_config_map(workflow, {'image_labels': prov_labels})
runner = PreBuildPluginsRunner(
docker_tasker,
workflow,
[{
'name': AddLabelsPlugin.key,
'args': {'labels': prov_labels, "dont_overwrite": [], "auto_labels": [auto_label],
'aliases': {'Build_Host': 'com.redhat.build-host'}}
}]
)
runner.run()
assert df.labels[auto_label] != 'explicit_value'
@pytest.mark.parametrize('parent, should_fail', [ # noqa
('koji/image-build', False),
('scratch', False),
('fedora', True),
])
def test_add_labels_base_image(tmpdir, docker_tasker, workflow,
parent, should_fail,
caplog, reactor_config_map):
df = df_parser(str(tmpdir))
df.content = "FROM {}\n".format(parent)
if MOCK:
mock_docker()
setattr(workflow, 'builder', X())
setattr(workflow.builder, 'tasker', docker_tasker)
setattr(workflow.builder, 'df_path', df.dockerfile_path)
setattr(workflow.builder, 'base_image_inspect', {})
flexmock(workflow, source=MockSource())
if parent == SCRATCH_FROM:
workflow.builder.base_from_scratch = True
# When a 'release' label is provided by parameter and used to
# configure the plugin, it should be set in the Dockerfile even
# when processing base images.
prov_labels = {'release': '5'}
if reactor_config_map:
make_and_store_reactor_config_map(workflow, {'image_labels': prov_labels})
runner = PreBuildPluginsRunner(
docker_tasker,
workflow,
[{
'name': AddLabelsPlugin.key,
'args': {'labels': prov_labels, "dont_overwrite": [],
'aliases': {'Build_Host': 'com.redhat.build-host'}}
}]
)
if should_fail:
with caplog.at_level(logging.ERROR):
with pytest.raises(PluginFailedException):
runner.run()
msg = "base image was not inspected"
assert msg in [x.message for x in caplog.records]
else:
runner.run()
assert df.labels['release'] == '5'
@pytest.mark.parametrize('base_new, df_new, plugin_new, expected_in_df, expected_log', [ # noqa
(None, None, None, None, None),
(None, 'A', 'A', 'A', None),
(None, 'B', 'A', 'A', 'setting label'),
(None, 'A', 'B', 'B', None),
(None, None, 'A', 'A', 'setting label'),
(None, 'A', None, 'A', None),
('A', None, 'A', 'A', 'setting label'),
('B', None, 'A', 'A', 'setting label'),
('A', None, 'B', 'B', 'setting label'),
('A', 'A', None, 'A', None),
('A', 'B', None, 'B', None),
('B', 'A', None, 'A', None),
('A', 'A', 'A', 'A', None),
('A', 'B', 'A', 'A', 'setting label'),
('B', 'A', 'A', 'A', None),
('A', 'A', 'B', 'B', 'setting label'),
('A', 'B', 'B', 'B', None),
('B', 'B', 'A', 'A', 'setting label'),
('B', 'A', 'B', 'B', 'setting label'),
('A', 'B', 'C', 'C', 'setting label'),
])
@pytest.mark.parametrize('release_env', ['TEST_RELEASE_VAR', None])
def test_release_label(tmpdir, docker_tasker, workflow, caplog,
base_new, df_new, plugin_new,
expected_in_df, expected_log, release_env, reactor_config_map):
if MOCK:
mock_docker()
df_content = "FROM fedora\n"
plugin_labels = {}
if df_new:
df_content += 'LABEL release="{0}"\n'.format(df_new)
if plugin_new:
plugin_labels["release"] = plugin_new
base_labels = {INSPECT_CONFIG: {"Labels": {}}}
if base_new:
base_labels[INSPECT_CONFIG]["Labels"]["release"] = base_new
df = df_parser(str(tmpdir))
df.content = df_content
setattr(workflow, 'builder', X(release_env))
setattr(workflow.builder, 'df_path', df.dockerfile_path)
setattr(workflow.builder, 'base_image_inspect', base_labels)
flexmock(workflow, source=MockSource(release_env))
if reactor_config_map:
make_and_store_reactor_config_map(workflow, {'image_labels': plugin_labels})
runner = PreBuildPluginsRunner(
docker_tasker,
workflow,
[{
'name': AddLabelsPlugin.key,
'args': {
'labels': plugin_labels,
'dont_overwrite': [],
'auto_labels': [],
'aliases': {}
}
}]
)
runner.run()
assert AddLabelsPlugin.key is not None
result_new = df.labels.get("release")
assert result_new == expected_in_df
if release_env and expected_in_df:
expected = "ENV {}={}\n".format(release_env, expected_in_df)
assert expected in df.lines
if expected_log:
assert expected_log in caplog.text | tests/plugins/test_add_labels.py | from __future__ import print_function, unicode_literals, absolute_import
from collections import OrderedDict
from atomic_reactor.plugin import PreBuildPluginsRunner, PluginFailedException
from atomic_reactor.plugins.pre_add_labels_in_df import AddLabelsPlugin
from atomic_reactor.plugins.pre_reactor_config import (ReactorConfigPlugin,
WORKSPACE_CONF_KEY,
ReactorConfig)
from atomic_reactor.util import ImageName, df_parser
from atomic_reactor.source import VcsInfo
from atomic_reactor.constants import INSPECT_CONFIG, SCRATCH_FROM
from atomic_reactor import start_time as atomic_reactor_start_time
import datetime
import re
import json
import logging
import pytest
from copy import deepcopy
from flexmock import flexmock
from tests.constants import DOCKERFILE_GIT, DOCKERFILE_SHA1, MOCK
if MOCK:
from tests.docker_mock import mock_docker
class MockSource(object):
dockerfile_path = None
path = None
def __init__(self, release_env=None):
self.config = flexmock()
setattr(self.config, 'release_env_var', release_env)
def get_vcs_info(self):
return VcsInfo(vcs_type="git", vcs_url=DOCKERFILE_GIT, vcs_ref=DOCKERFILE_SHA1)
class X(object):
def __init__(self, release_env=None):
self.image_id = "xxx"
self.source = MockSource(release_env)
self.base_image = ImageName(repo="qwe", tag="asd")
self.base_from_scratch = False
DF_CONTENT = """\
FROM fedora
RUN yum install -y python-django
CMD blabla"""
DF_CONTENT_WITH_LABELS = '''\
FROM fedora
RUN yum install -y python-django
CMD blabla
LABEL "build-date" = "docker value"
LABEL "architecture" = "docker value"
LABEL "vcs-type" = "docker value"
LABEL "vcs-url" = "docker value"
LABEL "vcs-ref" = "docker value"
LABEL "com.redhat.build-host" = "docker value"
LABEL "Build_Host" = "docker value"'''
DF_CONTENT_SINGLE_LINE = """\
FROM fedora"""
DF_CONTENT_LABEL = '''\
FROM fedora
LABEL "label2"="df value"'''
DF_CONTENT_LABELS = '''\
FROM fedora
LABEL "label1"="label1_value"
LABEL "label2"="label2_value"
LABEL "Authoritative_Registry"="authoritative-source-url_value"
LABEL "BZComponent"="com.redhat.component_value"
LABEL "Build_Host"="com.redhat.build-host_value"
LABEL "Version"="version_value"'''
LABELS_CONF_WITH_LABELS = {INSPECT_CONFIG: {"Labels": {
"build-date": "base value",
"architecture": "base value",
"vcs-type": "base value",
"vcs-url": "base value",
"vcs-ref": "base value",
"com.redhat.build-host": "base value",
"Build_Host": "base value"}}}
LABELS_CONF_BASE = {INSPECT_CONFIG: {"Labels": {"label1": "base value"}}}
LABELS_CONF_BASE_EXPLICIT = {INSPECT_CONFIG: {"Labels": {"version": "x", "release": "1"}}}
LABELS_CONF_BASE_NONE = {INSPECT_CONFIG: {"Labels": None}}
LABELS_CONF = OrderedDict({'label1': 'value 1', 'label2': 'long value'})
LABELS_CONF_ONE = {'label2': 'long value'}
LABELS_CONF_WRONG = [('label1', 'value1'), ('label2', 'value2')]
LABELS_CONF_EXPLICIT = {"version": "x", "release": "1"}
LABELS_BLANK = {}
# Can't be sure of the order of the labels, expect either
EXPECTED_OUTPUT = ["""FROM fedora
RUN yum install -y python-django
CMD blabla
LABEL "label1"="value 1" "label2"="long value"
""", """\
FROM fedora
RUN yum install -y python-django
CMD blabla
LABEL "label2"="long value" "label1"="value 1"
"""]
EXPECTED_OUTPUT2 = [r"""FROM fedora
RUN yum install -y python-django
CMD blabla
LABEL "label2"="long value"
"""]
EXPECTED_OUTPUT3 = [DF_CONTENT]
EXPECTED_OUTPUT4 = [r"""FROM fedora
LABEL "label2"="long value"
"""]
EXPECTED_OUTPUT5 = [r"""FROM fedora
LABEL "labelnew"="base value"
"""]
EXPECTED_OUTPUT6 = [r"""FROM fedora
LABEL "labelnew"="long value" "label2"="long value"
""", r"""FROM fedora
LABEL "label2"="long value" "labelnew"="long value"
"""]
EXPECTED_OUTPUT7 = [r"""FROM fedora
LABEL "label2"="df value"
LABEL "labelnew"="df value"
"""]
EXPECTED_OUTPUT8 = [r"""FROM fedora
LABEL "label1"="df value"
LABEL "label2"="df value"
""", r"""FROM fedora
LABEL "label2"="df value"
LABEL "label1"="df value"
"""]
# Label order seems to be set randomly, so both possible options are added
EXPECTED_OUTPUT9 = [r"""FROM fedora
RUN yum install -y python-django
CMD blabla
LABEL "release"="1" "version"="x"
""", r"""FROM fedora
RUN yum install -y python-django
CMD blabla
LABEL "version"="x" "release"="1"
"""]
def make_and_store_reactor_config_map(workflow, additional=None):
workflow.plugin_workspace[ReactorConfigPlugin.key] = {}
reactor_map = {
'version': 1
}
if additional:
reactor_map.update(additional)
workflow.plugin_workspace[ReactorConfigPlugin.key] = {
WORKSPACE_CONF_KEY: ReactorConfig(reactor_map)
}
@pytest.mark.parametrize('df_content, labels_conf_base, labels_conf, eq_conf, dont_overwrite,' # noqa
'aliases, expected_output', [ # noqa
(DF_CONTENT, LABELS_CONF_BASE, LABELS_CONF, [], [], {}, EXPECTED_OUTPUT),
(DF_CONTENT, LABELS_CONF_BASE, json.dumps(LABELS_CONF), [], [], {}, EXPECTED_OUTPUT),
(DF_CONTENT, LABELS_CONF_BASE, LABELS_CONF_WRONG, [], [], {}, RuntimeError()),
(DF_CONTENT, LABELS_CONF_BASE, LABELS_CONF, {'key': 'val'}, [], {}, RuntimeError()),
(DF_CONTENT, LABELS_CONF_BASE, LABELS_CONF, [], ["label1", ], {}, EXPECTED_OUTPUT2),
(DF_CONTENT, LABELS_CONF_BASE, LABELS_BLANK, [], ["label1", ], {}, EXPECTED_OUTPUT3),
(DF_CONTENT_SINGLE_LINE, LABELS_CONF_BASE, LABELS_CONF, [], ["label1", ], {},
EXPECTED_OUTPUT4),
(DF_CONTENT, LABELS_CONF_BASE, LABELS_CONF, [], [], {"not": "present"}, EXPECTED_OUTPUT),
(DF_CONTENT_SINGLE_LINE, LABELS_CONF_BASE, LABELS_BLANK, [], [], {"label1": "labelnew"},
EXPECTED_OUTPUT5),
(DF_CONTENT_SINGLE_LINE, LABELS_CONF_BASE, LABELS_CONF_ONE, [], [], {"label2": "labelnew"},
EXPECTED_OUTPUT6),
(DF_CONTENT_LABEL, LABELS_CONF_BASE, LABELS_BLANK, [], [], {"label2": "labelnew"},
EXPECTED_OUTPUT7),
(DF_CONTENT_LABEL, LABELS_CONF_BASE, LABELS_BLANK, [], [], {"label2": "labelnew", "x": "y"},
EXPECTED_OUTPUT7),
(DF_CONTENT_LABEL, LABELS_CONF_BASE_NONE, LABELS_BLANK, [], [], {"label2": "labelnew"},
EXPECTED_OUTPUT7),
(DF_CONTENT_LABEL, LABELS_CONF_BASE, LABELS_BLANK, [], [], {"label2": "label1"},
EXPECTED_OUTPUT8),
(DF_CONTENT, LABELS_CONF_BASE_EXPLICIT, LABELS_CONF_EXPLICIT, [], [], {}, EXPECTED_OUTPUT9),
])
def test_add_labels_plugin(tmpdir, docker_tasker, workflow,
df_content, labels_conf_base, labels_conf, eq_conf,
dont_overwrite, aliases, expected_output, caplog,
reactor_config_map):
df = df_parser(str(tmpdir))
df.content = df_content
if MOCK:
mock_docker()
setattr(workflow, 'builder', X())
setattr(workflow.builder, 'df_path', df.dockerfile_path)
setattr(workflow.builder, 'base_image_inspect', labels_conf_base)
flexmock(workflow, source=MockSource())
if reactor_config_map:
# reactor_config should not return json
if isinstance(labels_conf, str):
image_labels = json.loads(labels_conf)
else:
image_labels = deepcopy(labels_conf)
make_and_store_reactor_config_map(workflow, {
'image_labels': image_labels,
'image_equal_labels': eq_conf,
})
runner = PreBuildPluginsRunner(
docker_tasker,
workflow,
[{
'name': AddLabelsPlugin.key,
'args': {
'labels': labels_conf,
'dont_overwrite': dont_overwrite,
'auto_labels': [],
'aliases': aliases,
'equal_labels': eq_conf,
}
}]
)
if isinstance(expected_output, RuntimeError):
with pytest.raises(PluginFailedException):
runner.run()
assert "plugin 'add_labels_in_dockerfile' raised an exception: RuntimeError" \
in caplog.text
else:
runner.run()
assert AddLabelsPlugin.key is not None
assert df.content in expected_output
@pytest.mark.parametrize('use_reactor', [True, False]) # noqa
@pytest.mark.parametrize('release', [None, 'test'])
def test_add_labels_arrangement6(tmpdir, docker_tasker, workflow, release, use_reactor):
# explicitly test arrangement 6's handling of reactor config
df = df_parser(str(tmpdir))
df.content = DF_CONTENT
if MOCK:
mock_docker()
setattr(workflow, 'builder', X())
setattr(workflow.builder, 'base_image_inspect', LABELS_CONF_BASE)
setattr(workflow.builder, 'df_path', df.dockerfile_path)
flexmock(workflow, source=MockSource())
if use_reactor:
make_and_store_reactor_config_map(workflow, {'image_labels': LABELS_CONF})
if release is not None:
labels = {'release': release}
else:
labels = None
else:
labels = LABELS_CONF
if release is not None:
labels.update({'release': release})
runner = PreBuildPluginsRunner(
docker_tasker,
workflow,
[{
'name': AddLabelsPlugin.key,
'args': {
'labels': labels,
'dont_overwrite': [],
'auto_labels': [],
'aliases': [],
'equal_labels': [],
}
}]
)
runner.run()
assert AddLabelsPlugin.key is not None
assert 'label1' in df.content
if release:
assert 'release' in df.content
assert release in df.content
else:
assert 'release' not in df.content
@pytest.mark.parametrize('auto_label, value_re_part', [ # noqa
('build-date', r'\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(\.\d+)?'),
('architecture', 'x86_64'),
('vcs-type', 'git'),
('vcs-url', DOCKERFILE_GIT),
('vcs-ref', DOCKERFILE_SHA1),
('com.redhat.build-host', 'the-build-host'),
('wrong_label', None)
])
def test_add_labels_plugin_generated(tmpdir, docker_tasker, workflow,
auto_label, value_re_part,
reactor_config_map):
df = df_parser(str(tmpdir))
df.content = DF_CONTENT
if MOCK:
mock_docker()
setattr(workflow, 'builder', X())
setattr(workflow.builder, 'df_path', df.dockerfile_path)
setattr(workflow.builder, 'base_image_inspect', LABELS_CONF_BASE)
flexmock(workflow, source=MockSource())
if reactor_config_map:
make_and_store_reactor_config_map(workflow, {'image_labels': {}})
runner = PreBuildPluginsRunner(
docker_tasker,
workflow,
[{
'name': AddLabelsPlugin.key,
'args': {'labels': {}, "dont_overwrite": [], "auto_labels": [auto_label],
'aliases': {'Build_Host': 'com.redhat.build-host'}}
}]
)
runner.run()
if value_re_part:
assert re.match(value_re_part, df.labels[auto_label])
if auto_label == "build-date":
utc_dt = datetime.datetime.utcfromtimestamp(atomic_reactor_start_time).isoformat()
assert df.labels[auto_label] == utc_dt
@pytest.mark.parametrize('df_old_as_plugin_arg', [True, False]) # noqa
@pytest.mark.parametrize('df_new_as_plugin_arg', [True, False])
@pytest.mark.parametrize('base_old, base_new, df_old, df_new, exp_old, exp_new, exp_log', [
(None, None, None, None, None, None, None),
(None, None, None, 'A', None, 'A', None),
(None, None, 'A', None, 'A', 'A', 'as an alias for label'),
(None, None, 'A', 'A', 'A', 'A', 'already exists'),
(None, None, 'A', 'B', 'B', 'B', 'as an alias for label'),
(None, 'A', None, None, None, 'A', None),
(None, 'A', None, 'A', None, 'A', None),
(None, 'A', None, 'B', None, 'B', None),
(None, 'A', 'A', None, 'A', 'A', 'already exists'),
(None, 'A', 'B', None, 'B', 'B', 'as an alias for label'),
(None, 'A', 'A', 'A', 'A', 'A', 'already exists'),
(None, 'A', 'A', 'B', 'B', 'B', 'as an alias for label'),
(None, 'A', 'B', 'A', 'A', 'A', 'as an alias for label'),
(None, 'A', 'B', 'B', 'B', 'B', 'already exists'),
(None, 'A', 'B', 'C', 'C', 'C', 'as an alias for label'),
('A', None, None, None, 'A', 'A', 'as an alias for label'),
('A', None, None, 'A', 'A', 'A', 'already exists'),
('A', None, None, 'B', 'B', 'B', 'as an alias for label'),
('A', None, 'A', None, 'A', 'A', 'as an alias for label'),
('A', None, 'B', None, 'B', 'B', 'as an alias for label'),
('A', None, 'A', 'A', 'A', 'A', 'already exists'),
('A', None, 'A', 'B', 'B', 'B', 'as an alias for label'),
('A', None, 'B', 'A', 'A', 'A', 'as an alias for label'),
('A', None, 'B', 'B', 'B', 'B', 'already exists'),
('A', None, 'B', 'C', 'C', 'C', 'as an alias for label'),
('A', 'A', None, None, 'A', 'A', 'already exists'),
('A', 'A', None, 'A', 'A', 'A', 'already exists'),
('A', 'A', None, 'B', 'B', 'B', 'as an alias for label'),
('A', 'A', 'A', None, 'A', 'A', 'already exists'),
('A', 'A', 'B', None, 'B', 'B', 'as an alias for label'),
('A', 'A', 'A', 'A', 'A', 'A', 'already exists'),
('A', 'A', 'A', 'B', 'B', 'B', 'as an alias for label'),
('A', 'A', 'B', 'A', 'A', 'A', 'as an alias for label'),
('A', 'A', 'B', 'B', 'B', 'B', 'already exists'),
('A', 'A', 'B', 'C', 'C', 'C', 'as an alias for label'),
('A', 'B', None, None, 'B', 'B', 'as an alias for label'),
('A', 'B', None, 'A', 'A', 'A', 'already exists'),
('A', 'B', None, 'B', 'B', 'B', 'as an alias for label'),
('A', 'B', None, 'C', 'C', 'C', 'as an alias for label'),
('A', 'B', 'A', None, 'A', 'A', 'as an alias for label'),
('A', 'B', 'B', None, 'B', 'B', 'already exists'),
('A', 'B', 'C', None, 'C', 'C', 'as an alias for label'),
('A', 'B', 'A', 'A', 'A', 'A', 'already exists'),
('A', 'B', 'A', 'B', 'B', 'B', 'as an alias for label'),
('A', 'B', 'A', 'C', 'C', 'C', 'as an alias for label'),
('A', 'B', 'B', 'A', 'A', 'A', 'as an alias for label'),
('A', 'B', 'B', 'B', 'B', 'B', 'already exists'),
('A', 'B', 'B', 'C', 'C', 'C', 'as an alias for label'),
('A', 'B', 'C', 'A', 'A', 'A', 'as an alias for label'),
('A', 'B', 'C', 'B', 'B', 'B', 'as an alias for label'),
('A', 'B', 'C', 'C', 'C', 'C', 'already exists'),
('A', 'B', 'C', 'D', 'D', 'D', 'as an alias for label'),
])
def test_add_labels_aliases(tmpdir, docker_tasker, workflow, caplog,
df_old_as_plugin_arg, df_new_as_plugin_arg,
base_old, base_new, df_old, df_new, exp_old, exp_new, exp_log,
reactor_config_map):
if MOCK:
mock_docker()
df_content = "FROM fedora\n"
plugin_labels = {}
if df_old:
if df_old_as_plugin_arg:
plugin_labels["label_old"] = df_old
else:
df_content += 'LABEL label_old="{0}"\n'.format(df_old)
if df_new:
if df_new_as_plugin_arg:
plugin_labels["label_new"] = df_new
else:
df_content += 'LABEL label_new="{0}"\n'.format(df_new)
base_labels = {INSPECT_CONFIG: {"Labels": {}}}
if base_old:
base_labels[INSPECT_CONFIG]["Labels"]["label_old"] = base_old
if base_new:
base_labels[INSPECT_CONFIG]["Labels"]["label_new"] = base_new
df = df_parser(str(tmpdir))
df.content = df_content
setattr(workflow, 'builder', X())
setattr(workflow.builder, 'df_path', df.dockerfile_path)
setattr(workflow.builder, 'base_image_inspect', base_labels)
flexmock(workflow, source=MockSource())
if reactor_config_map:
make_and_store_reactor_config_map(workflow, {'image_labels': plugin_labels})
runner = PreBuildPluginsRunner(
docker_tasker,
workflow,
[{
'name': AddLabelsPlugin.key,
'args': {
'labels': plugin_labels,
'dont_overwrite': [],
'auto_labels': [],
'aliases': {"label_old": "label_new"},
}
}]
)
runner.run()
assert AddLabelsPlugin.key is not None
result_old = df.labels.get("label_old") or \
base_labels[INSPECT_CONFIG]["Labels"].get("label_old")
result_new = df.labels.get("label_new") or \
base_labels[INSPECT_CONFIG]["Labels"].get("label_new")
assert result_old == exp_old
assert result_new == exp_new
if exp_log:
assert exp_log in caplog.text
@pytest.mark.parametrize('base_l, df_l, expected, expected_log', [ # noqa
((None, None), (None, None), (None, None), None),
((None, None), (None, 'A'), ('A', 'A'), 'adding equal label'),
((None, None), ('A', None), ('A', 'A'), 'adding equal label'),
(('A', None), (None, None), ('A', 'A'), 'adding equal label'),
((None, 'A'), (None, None), ('A', 'A'), 'adding equal label'),
(('A', 'B'), (None, None), ('A', 'B'), None),
((None, None), ('A', 'B'), ('A', 'B'), None),
(('A', 'A'), (None, None), ('A', 'A'), None),
(('A', None), ('A', None), ('A', 'A'), 'adding equal label'),
((None, 'A'), (None, 'A'), ('A', 'A'), 'adding equal label'),
(('A', None), ('B', None), ('B', 'B'), 'adding equal label'),
((None, 'A'), (None, 'B'), ('B', 'B'), 'adding equal label'),
(('A', 'C'), ('B', None), ('B', 'B'), 'adding equal label'),
(('A', 'C'), (None, 'B'), ('B', 'B'), 'adding equal label'),
(('A', 'C'), ('B', 'B'), ('B', 'B'), None),
((None, 'A'), ('B', 'B'), ('B', 'B'), None),
(('A', None), ('B', 'B'), ('B', 'B'), None),
(('A', 'A'), (None, None), ('A', 'A'), None),
(('A', None), (None, 'A'), ('A', 'A'), 'skipping label'),
((None, 'A'), ('A', None), ('A', 'A'), 'skipping label'),
])
def test_add_labels_equal_aliases(tmpdir, docker_tasker, workflow, caplog,
base_l, df_l, expected, expected_log,
reactor_config_map):
if MOCK:
mock_docker()
df_content = "FROM fedora\n"
plugin_labels = {}
if df_l[0]:
df_content += 'LABEL description="{0}"\n'.format(df_l[0])
if df_l[1]:
df_content += 'LABEL io.k8s.description="{0}"\n'.format(df_l[1])
base_labels = {INSPECT_CONFIG: {"Labels": {}}}
if base_l[0]:
base_labels[INSPECT_CONFIG]["Labels"]["description"] = base_l[0]
if base_l[1]:
base_labels[INSPECT_CONFIG]["Labels"]["io.k8s.description"] = base_l[1]
df = df_parser(str(tmpdir))
df.content = df_content
setattr(workflow, 'builder', X())
setattr(workflow.builder, 'df_path', df.dockerfile_path)
setattr(workflow.builder, 'base_image_inspect', base_labels)
flexmock(workflow, source=MockSource())
if reactor_config_map:
make_and_store_reactor_config_map(
workflow,
{
'image_labels': plugin_labels,
'image_equal_labels': [['description', 'io.k8s.description']]})
runner = PreBuildPluginsRunner(
docker_tasker,
workflow,
[{
'name': AddLabelsPlugin.key,
'args': {
'labels': plugin_labels,
'dont_overwrite': [],
'auto_labels': [],
'aliases': {},
'equal_labels': [['description', 'io.k8s.description']]
}
}]
)
runner.run()
assert AddLabelsPlugin.key is not None
result_fst = df.labels.get("description") or \
base_labels[INSPECT_CONFIG]["Labels"].get("description")
result_snd = df.labels.get("io.k8s.description") or \
base_labels[INSPECT_CONFIG]["Labels"].get("io.k8s.description")
assert result_fst == expected[0]
assert result_snd == expected[1]
if expected_log:
assert expected_log in caplog.text
@pytest.mark.parametrize('base_l, df_l, expected, expected_log', [ # noqa
((None, None, None), (None, None, None), (None, None, None), None),
((None, None, None), (None, None, 'A'), ('A', 'A', 'A'), 'adding equal label'),
(('A', 'B', 'B'), (None, None, None), ('A', 'B', 'B'), None),
((None, None, None), ('A', 'B', 'B'), ('A', 'B', 'B'), None),
(('A', 'A', 'A'), (None, None, None), ('A', 'A', 'A'), None),
(('A', None, 'A'), ('A', None, 'A'), ('A', 'A', 'A'), 'adding equal label'),
(('A', None, None), (None, 'A', 'A'), ('A', 'A', 'A'), 'skipping label'),
((None, 'A', 'A'), ('A', 'A', None), ('A', 'A', 'A'), 'skipping label'),
(('A', 'A', 'A'), ('B', 'C', None), ('B', 'C', 'B'), 'adding equal label'),
(('A', 'A', 'A'), (None, 'C', 'D'), ('C', 'C', 'D'), 'adding equal label'),
(('A', 'A', 'A'), ('B', None, 'D'), ('B', 'B', 'D'), 'adding equal label'),
])
def test_add_labels_equal_aliases2(tmpdir, docker_tasker, workflow, caplog, base_l,
df_l, expected, expected_log, reactor_config_map):
"""
test with 3 equal labels
"""
if MOCK:
mock_docker()
df_content = "FROM fedora\n"
plugin_labels = {}
if df_l[0]:
df_content += 'LABEL description="{0}"\n'.format(df_l[0])
if df_l[1]:
df_content += 'LABEL io.k8s.description="{0}"\n'.format(df_l[1])
if df_l[2]:
df_content += 'LABEL description_third="{0}"\n'.format(df_l[2])
base_labels = {INSPECT_CONFIG: {"Labels": {}}}
if base_l[0]:
base_labels[INSPECT_CONFIG]["Labels"]["description"] = base_l[0]
if base_l[1]:
base_labels[INSPECT_CONFIG]["Labels"]["io.k8s.description"] = base_l[1]
if base_l[2]:
base_labels[INSPECT_CONFIG]["Labels"]["description_third"] = base_l[2]
df = df_parser(str(tmpdir))
df.content = df_content
setattr(workflow, 'builder', X())
setattr(workflow.builder, 'df_path', df.dockerfile_path)
setattr(workflow.builder, 'base_image_inspect', base_labels)
flexmock(workflow, source=MockSource())
if reactor_config_map:
make_and_store_reactor_config_map(
workflow,
{
'image_labels': plugin_labels,
'image_equal_labels': [['description',
'io.k8s.description',
'description_third']]})
runner = PreBuildPluginsRunner(
docker_tasker,
workflow,
[{
'name': AddLabelsPlugin.key,
'args': {
'labels': plugin_labels,
'dont_overwrite': [],
'auto_labels': [],
'aliases': {},
'equal_labels': [['description', 'io.k8s.description', 'description_third']]
}
}]
)
if isinstance(expected_log, RuntimeError):
with pytest.raises(PluginFailedException):
runner.run()
else:
runner.run()
assert AddLabelsPlugin.key is not None
result_fst = df.labels.get("description") or \
base_labels[INSPECT_CONFIG]["Labels"].get("description")
result_snd = df.labels.get("io.k8s.description") or \
base_labels[INSPECT_CONFIG]["Labels"].get("io.k8s.description")
result_trd = df.labels.get("description_third") or \
base_labels[INSPECT_CONFIG]["Labels"].get("description_third")
assert result_fst == expected[0]
assert result_snd == expected[1]
assert result_trd == expected[2]
if expected_log:
assert expected_log in caplog.text
@pytest.mark.parametrize("label_names", [ # noqa
("distribution-scope", ),
("com.redhat.license_terms", ),
("distribution-scope", "com.redhat.license_terms"),
])
@pytest.mark.parametrize("dont_overwrite", [True, False])
@pytest.mark.parametrize("parent_val, docker_val, result_val", [
(None, None, "default_value"),
("parent_value", "docker_value", "docker_value"),
("parent_value", None, "default_value"),
(None, "docker_value", "docker_value"),
("parent_value", "parent_value", "parent_value"),
])
def test_dont_overwrite_if_in_dockerfile(tmpdir, docker_tasker, workflow,
label_names, dont_overwrite,
parent_val, docker_val, result_val, reactor_config_map):
default_value = 'default_value'
df_content = "FROM fedora\n"
if docker_val:
for label_name in label_names:
df_content += 'LABEL {0}="{1}"\n'.format(label_name, docker_val)
if parent_val:
labels_conf_base = {INSPECT_CONFIG: {"Labels": {}}}
for label_name in label_names:
labels_conf_base[INSPECT_CONFIG]["Labels"][label_name] = parent_val
else:
labels_conf_base = {INSPECT_CONFIG: {"Labels": {}}}
df = df_parser(str(tmpdir))
df.content = df_content
if MOCK:
mock_docker()
setattr(workflow, 'builder', X())
setattr(workflow.builder, 'df_path', df.dockerfile_path)
setattr(workflow.builder, 'base_image_inspect', labels_conf_base)
flexmock(workflow, source=MockSource())
image_labels = {}
for label_name in label_names:
image_labels[label_name] = default_value
wf_args = {
'labels': image_labels,
'auto_labels': [],
'aliases': {},
}
if dont_overwrite:
wf_args["dont_overwrite_if_in_dockerfile"] = label_names
if reactor_config_map:
make_and_store_reactor_config_map(workflow, {'image_labels': image_labels})
runner = PreBuildPluginsRunner(
docker_tasker,
workflow,
[{
'name': AddLabelsPlugin.key,
'args': wf_args
}]
)
runner.run()
for label_name in label_names:
result = df.labels.get(label_name)
assert result == result_val
@pytest.mark.parametrize('url_format, info_url', [ # noqa
('url_pre {label1} {label2} url_post', 'url_pre label1_value label2_value url_post'),
('url_pre url_post', 'url_pre url_post'),
('url_pre {label1} {label2} {label3_non_existent} url_post', None),
('url_pre {label1} {label2} {version} url_post', 'url_pre label1_value label2_value '
'version_value url_post'),
('url_pre {authoritative-source-url} {com.redhat.component} {com.redhat.build-host} url_post',
'url_pre authoritative-source-url_value com.redhat.component_value '
'com.redhat.build-host_value url_post'),
])
def test_url_label(tmpdir, docker_tasker, workflow, url_format, info_url, reactor_config_map):
if MOCK:
mock_docker()
plugin_labels = {}
base_labels = {INSPECT_CONFIG: {"Labels": {}}}
df = df_parser(str(tmpdir))
df.content = DF_CONTENT_LABELS
setattr(workflow, 'builder', X())
setattr(workflow.builder, 'df_path', df.dockerfile_path)
setattr(workflow.builder, 'base_image_inspect', base_labels)
flexmock(workflow, source=MockSource())
if reactor_config_map:
make_and_store_reactor_config_map(workflow, {
'image_labels': plugin_labels,
'image_label_info_url_format': url_format,
})
runner = PreBuildPluginsRunner(
docker_tasker,
workflow,
[{
'name': AddLabelsPlugin.key,
'args': {
'labels': plugin_labels,
'dont_overwrite': [],
'auto_labels': [],
'info_url_format': url_format
}
}]
)
if info_url is not None:
runner.run()
assert df.labels.get("url") == info_url
else:
with pytest.raises(PluginFailedException):
runner.run()
assert AddLabelsPlugin.key is not None
@pytest.mark.parametrize('auto_label', [ # noqa
'build-date',
'architecture',
'vcs-type',
'vcs-url',
'vcs-ref',
'com.redhat.build-host',
])
@pytest.mark.parametrize('labels_docker', [
DF_CONTENT,
DF_CONTENT_WITH_LABELS,
])
@pytest.mark.parametrize('labels_base', [
LABELS_CONF_BASE_NONE,
LABELS_CONF_WITH_LABELS,
])
def test_add_labels_plugin_explicit(tmpdir, docker_tasker, workflow,
auto_label, labels_docker,
labels_base, reactor_config_map):
df = df_parser(str(tmpdir))
df.content = labels_docker
if MOCK:
mock_docker()
setattr(workflow, 'builder', X())
setattr(workflow.builder, 'df_path', df.dockerfile_path)
setattr(workflow.builder, 'base_image_inspect', labels_base)
flexmock(workflow, source=MockSource())
prov_labels = {}
prov_labels[auto_label] = 'explicit_value'
if reactor_config_map:
make_and_store_reactor_config_map(workflow, {'image_labels': prov_labels})
runner = PreBuildPluginsRunner(
docker_tasker,
workflow,
[{
'name': AddLabelsPlugin.key,
'args': {'labels': prov_labels, "dont_overwrite": [], "auto_labels": [auto_label],
'aliases': {'Build_Host': 'com.redhat.build-host'}}
}]
)
runner.run()
assert df.labels[auto_label] != 'explicit_value'
@pytest.mark.parametrize('parent, should_fail', [ # noqa
('koji/image-build', False),
('scratch', False),
('fedora', True),
])
def test_add_labels_base_image(tmpdir, docker_tasker, workflow,
parent, should_fail,
caplog, reactor_config_map):
df = df_parser(str(tmpdir))
df.content = "FROM {}\n".format(parent)
if MOCK:
mock_docker()
setattr(workflow, 'builder', X())
setattr(workflow.builder, 'tasker', docker_tasker)
setattr(workflow.builder, 'df_path', df.dockerfile_path)
setattr(workflow.builder, 'base_image_inspect', {})
flexmock(workflow, source=MockSource())
if parent == SCRATCH_FROM:
workflow.builder.base_from_scratch = True
# When a 'release' label is provided by parameter and used to
# configure the plugin, it should be set in the Dockerfile even
# when processing base images.
prov_labels = {'release': '5'}
if reactor_config_map:
make_and_store_reactor_config_map(workflow, {'image_labels': prov_labels})
runner = PreBuildPluginsRunner(
docker_tasker,
workflow,
[{
'name': AddLabelsPlugin.key,
'args': {'labels': prov_labels, "dont_overwrite": [],
'aliases': {'Build_Host': 'com.redhat.build-host'}}
}]
)
if should_fail:
with caplog.at_level(logging.ERROR):
with pytest.raises(PluginFailedException):
runner.run()
msg = "base image was not inspected"
assert msg in [x.message for x in caplog.records]
else:
runner.run()
assert df.labels['release'] == '5'
@pytest.mark.parametrize('base_new, df_new, plugin_new, expected_in_df, expected_log', [ # noqa
(None, None, None, None, None),
(None, 'A', 'A', 'A', None),
(None, 'B', 'A', 'A', 'setting label'),
(None, 'A', 'B', 'B', None),
(None, None, 'A', 'A', 'setting label'),
(None, 'A', None, 'A', None),
('A', None, 'A', 'A', 'setting label'),
('B', None, 'A', 'A', 'setting label'),
('A', None, 'B', 'B', 'setting label'),
('A', 'A', None, 'A', None),
('A', 'B', None, 'B', None),
('B', 'A', None, 'A', None),
('A', 'A', 'A', 'A', None),
('A', 'B', 'A', 'A', 'setting label'),
('B', 'A', 'A', 'A', None),
('A', 'A', 'B', 'B', 'setting label'),
('A', 'B', 'B', 'B', None),
('B', 'B', 'A', 'A', 'setting label'),
('B', 'A', 'B', 'B', 'setting label'),
('A', 'B', 'C', 'C', 'setting label'),
])
@pytest.mark.parametrize('release_env', ['TEST_RELEASE_VAR', None])
def test_release_label(tmpdir, docker_tasker, workflow, caplog,
base_new, df_new, plugin_new,
expected_in_df, expected_log, release_env, reactor_config_map):
if MOCK:
mock_docker()
df_content = "FROM fedora\n"
plugin_labels = {}
if df_new:
df_content += 'LABEL release="{0}"\n'.format(df_new)
if plugin_new:
plugin_labels["release"] = plugin_new
base_labels = {INSPECT_CONFIG: {"Labels": {}}}
if base_new:
base_labels[INSPECT_CONFIG]["Labels"]["release"] = base_new
df = df_parser(str(tmpdir))
df.content = df_content
setattr(workflow, 'builder', X(release_env))
setattr(workflow.builder, 'df_path', df.dockerfile_path)
setattr(workflow.builder, 'base_image_inspect', base_labels)
flexmock(workflow, source=MockSource(release_env))
if reactor_config_map:
make_and_store_reactor_config_map(workflow, {'image_labels': plugin_labels})
runner = PreBuildPluginsRunner(
docker_tasker,
workflow,
[{
'name': AddLabelsPlugin.key,
'args': {
'labels': plugin_labels,
'dont_overwrite': [],
'auto_labels': [],
'aliases': {}
}
}]
)
runner.run()
assert AddLabelsPlugin.key is not None
result_new = df.labels.get("release")
assert result_new == expected_in_df
if release_env and expected_in_df:
expected = "ENV {}={}\n".format(release_env, expected_in_df)
assert expected in df.lines
if expected_log:
assert expected_log in caplog.text | 0.606265 | 0.136033 |
import time
import numpy as np
import numba
from sklearn.utils.validation import check_is_fitted
import scipy.sparse
@numba.njit(parallel=True)
def fast_knn_indices(X, n_neighbors):
"""A fast computation of knn indices.
Parameters
----------
X: array of shape (n_samples, n_features)
The input data to compute the k-neighbor indices of.
n_neighbors: int
The number of nearest neighbors to compute for each sample in ``X``.
Returns
-------
knn_indices: array of shape (n_samples, n_neighbors)
The indices on the ``n_neighbors`` closest points in the dataset.
"""
knn_indices = np.empty((X.shape[0], n_neighbors), dtype=np.int32)
for row in numba.prange(X.shape[0]):
# v = np.argsort(X[row]) # Need to call argsort this way for numba
v = X[row].argsort(kind="quicksort")
v = v[:n_neighbors]
knn_indices[row] = v
return knn_indices
@numba.njit("i4(i8[:])")
def tau_rand_int(state):
"""A fast (pseudo)-random number generator.
Parameters
----------
state: array of int64, shape (3,)
The internal state of the rng
Returns
-------
A (pseudo)-random int32 value
"""
state[0] = (((state[0] & 4294967294) << 12) & 0xFFFFFFFF) ^ (
(((state[0] << 13) & 0xFFFFFFFF) ^ state[0]) >> 19
)
state[1] = (((state[1] & 4294967288) << 4) & 0xFFFFFFFF) ^ (
(((state[1] << 2) & 0xFFFFFFFF) ^ state[1]) >> 25
)
state[2] = (((state[2] & 4294967280) << 17) & 0xFFFFFFFF) ^ (
(((state[2] << 3) & 0xFFFFFFFF) ^ state[2]) >> 11
)
return state[0] ^ state[1] ^ state[2]
@numba.njit("f4(i8[:])")
def tau_rand(state):
"""A fast (pseudo)-random number generator for floats in the range [0,1]
Parameters
----------
state: array of int64, shape (3,)
The internal state of the rng
Returns
-------
A (pseudo)-random float32 in the interval [0, 1]
"""
integer = tau_rand_int(state)
return abs(float(integer) / 0x7FFFFFFF)
@numba.njit()
def norm(vec):
"""Compute the (standard l2) norm of a vector.
Parameters
----------
vec: array of shape (dim,)
Returns
-------
The l2 norm of vec.
"""
result = 0.0
for i in range(vec.shape[0]):
result += vec[i] ** 2
return np.sqrt(result)
@numba.njit(parallel=True)
def submatrix(dmat, indices_col, n_neighbors):
"""Return a submatrix given an orginal matrix and the indices to keep.
Parameters
----------
dmat: array, shape (n_samples, n_samples)
Original matrix.
indices_col: array, shape (n_samples, n_neighbors)
Indices to keep. Each row consists of the indices of the columns.
n_neighbors: int
Number of neighbors.
Returns
-------
submat: array, shape (n_samples, n_neighbors)
The corresponding submatrix.
"""
n_samples_transform, n_samples_fit = dmat.shape
submat = np.zeros((n_samples_transform, n_neighbors), dtype=dmat.dtype)
for i in numba.prange(n_samples_transform):
for j in numba.prange(n_neighbors):
submat[i, j] = dmat[i, indices_col[i, j]]
return submat
# Generates a timestamp for use in logging messages when verbose=True
def ts():
return time.ctime(time.time())
# I'm not enough of a numba ninja to numba this successfully.
# np.arrays of lists, which are objects...
def csr_unique(matrix, return_index=True, return_inverse=True, return_counts=True):
"""Find the unique elements of a sparse csr matrix.
We don't explicitly construct the unique matrix leaving that to the user
who may not want to duplicate a massive array in memory.
Returns the indices of the input array that give the unique values.
Returns the indices of the unique array that reconstructs the input array.
Returns the number of times each unique row appears in the input matrix.
matrix: a csr matrix
return_index = bool, optional
If true, return the row indices of 'matrix'
return_inverse: bool, optional
If true, return the the indices of the unique array that can be
used to reconstruct 'matrix'.
return_counts = bool, optional
If true, returns the number of times each unique item appears in 'matrix'
The unique matrix can computed via
unique_matrix = matrix[index]
and the original matrix reconstructed via
unique_matrix[inverse]
"""
lil_matrix = matrix.tolil()
rows = [x + y for x, y in zip(lil_matrix.rows, lil_matrix.data)]
return_values = return_counts + return_inverse + return_index
return np.unique(
rows,
return_index=return_index,
return_inverse=return_inverse,
return_counts=return_counts,
)[1 : (return_values + 1)]
def disconnected_vertices(model):
"""
Returns a boolean vector indicating which vertices are disconnected from the umap graph.
These vertices will often be scattered across the space and make it difficult to focus on the main
manifold. They can either be filtered and have UMAP re-run or simply filtered from the interactive plotting tool
via the subset_points parameter.
Use ~disconnected_vertices(model) to only plot the connected points.
Parameters
----------
model: a trained UMAP model
Returns
-------
A boolean vector indicating which points are disconnected
"""
check_is_fitted(model, "graph_")
if model.unique:
vertices_disconnected = (
np.array(model.graph_[model._unique_inverse_].sum(axis=1)).flatten() == 0
)
else:
vertices_disconnected = np.array(model.graph_.sum(axis=1)).flatten() == 0
return vertices_disconnected | umap/utils.py |
import time
import numpy as np
import numba
from sklearn.utils.validation import check_is_fitted
import scipy.sparse
@numba.njit(parallel=True)
def fast_knn_indices(X, n_neighbors):
"""A fast computation of knn indices.
Parameters
----------
X: array of shape (n_samples, n_features)
The input data to compute the k-neighbor indices of.
n_neighbors: int
The number of nearest neighbors to compute for each sample in ``X``.
Returns
-------
knn_indices: array of shape (n_samples, n_neighbors)
The indices on the ``n_neighbors`` closest points in the dataset.
"""
knn_indices = np.empty((X.shape[0], n_neighbors), dtype=np.int32)
for row in numba.prange(X.shape[0]):
# v = np.argsort(X[row]) # Need to call argsort this way for numba
v = X[row].argsort(kind="quicksort")
v = v[:n_neighbors]
knn_indices[row] = v
return knn_indices
@numba.njit("i4(i8[:])")
def tau_rand_int(state):
"""A fast (pseudo)-random number generator.
Parameters
----------
state: array of int64, shape (3,)
The internal state of the rng
Returns
-------
A (pseudo)-random int32 value
"""
state[0] = (((state[0] & 4294967294) << 12) & 0xFFFFFFFF) ^ (
(((state[0] << 13) & 0xFFFFFFFF) ^ state[0]) >> 19
)
state[1] = (((state[1] & 4294967288) << 4) & 0xFFFFFFFF) ^ (
(((state[1] << 2) & 0xFFFFFFFF) ^ state[1]) >> 25
)
state[2] = (((state[2] & 4294967280) << 17) & 0xFFFFFFFF) ^ (
(((state[2] << 3) & 0xFFFFFFFF) ^ state[2]) >> 11
)
return state[0] ^ state[1] ^ state[2]
@numba.njit("f4(i8[:])")
def tau_rand(state):
"""A fast (pseudo)-random number generator for floats in the range [0,1]
Parameters
----------
state: array of int64, shape (3,)
The internal state of the rng
Returns
-------
A (pseudo)-random float32 in the interval [0, 1]
"""
integer = tau_rand_int(state)
return abs(float(integer) / 0x7FFFFFFF)
@numba.njit()
def norm(vec):
"""Compute the (standard l2) norm of a vector.
Parameters
----------
vec: array of shape (dim,)
Returns
-------
The l2 norm of vec.
"""
result = 0.0
for i in range(vec.shape[0]):
result += vec[i] ** 2
return np.sqrt(result)
@numba.njit(parallel=True)
def submatrix(dmat, indices_col, n_neighbors):
"""Return a submatrix given an orginal matrix and the indices to keep.
Parameters
----------
dmat: array, shape (n_samples, n_samples)
Original matrix.
indices_col: array, shape (n_samples, n_neighbors)
Indices to keep. Each row consists of the indices of the columns.
n_neighbors: int
Number of neighbors.
Returns
-------
submat: array, shape (n_samples, n_neighbors)
The corresponding submatrix.
"""
n_samples_transform, n_samples_fit = dmat.shape
submat = np.zeros((n_samples_transform, n_neighbors), dtype=dmat.dtype)
for i in numba.prange(n_samples_transform):
for j in numba.prange(n_neighbors):
submat[i, j] = dmat[i, indices_col[i, j]]
return submat
# Generates a timestamp for use in logging messages when verbose=True
def ts():
return time.ctime(time.time())
# I'm not enough of a numba ninja to numba this successfully.
# np.arrays of lists, which are objects...
def csr_unique(matrix, return_index=True, return_inverse=True, return_counts=True):
"""Find the unique elements of a sparse csr matrix.
We don't explicitly construct the unique matrix leaving that to the user
who may not want to duplicate a massive array in memory.
Returns the indices of the input array that give the unique values.
Returns the indices of the unique array that reconstructs the input array.
Returns the number of times each unique row appears in the input matrix.
matrix: a csr matrix
return_index = bool, optional
If true, return the row indices of 'matrix'
return_inverse: bool, optional
If true, return the the indices of the unique array that can be
used to reconstruct 'matrix'.
return_counts = bool, optional
If true, returns the number of times each unique item appears in 'matrix'
The unique matrix can computed via
unique_matrix = matrix[index]
and the original matrix reconstructed via
unique_matrix[inverse]
"""
lil_matrix = matrix.tolil()
rows = [x + y for x, y in zip(lil_matrix.rows, lil_matrix.data)]
return_values = return_counts + return_inverse + return_index
return np.unique(
rows,
return_index=return_index,
return_inverse=return_inverse,
return_counts=return_counts,
)[1 : (return_values + 1)]
def disconnected_vertices(model):
"""
Returns a boolean vector indicating which vertices are disconnected from the umap graph.
These vertices will often be scattered across the space and make it difficult to focus on the main
manifold. They can either be filtered and have UMAP re-run or simply filtered from the interactive plotting tool
via the subset_points parameter.
Use ~disconnected_vertices(model) to only plot the connected points.
Parameters
----------
model: a trained UMAP model
Returns
-------
A boolean vector indicating which points are disconnected
"""
check_is_fitted(model, "graph_")
if model.unique:
vertices_disconnected = (
np.array(model.graph_[model._unique_inverse_].sum(axis=1)).flatten() == 0
)
else:
vertices_disconnected = np.array(model.graph_.sum(axis=1)).flatten() == 0
return vertices_disconnected | 0.907622 | 0.799129 |
import math
from typing import List, Tuple, Optional
from rgtk.constraint_solver import ConstraintSolver, Evaluator, Move
from rgtk.Interval import Interval
from rgtk.BitSet import BitSet
class IntervalsToBitSetsEvaluator(Evaluator):
# Static Vars
# Larger intervals will get a better score.
SCORE_ADJUST_PER_INTERVAL_ITEM = -100
# Those with the fewest possible destinations get a better score
# so that they are prioritized. Put another way: we penalize
# intervals that have a lot of possible destinations.
# This is different than how we use moves. Example:
# Let's say we have interval 111 that we're workig into BitSet 000001.
# 000001
# 111 <- 1st possible destination
# 111 <- 2nd
# 111 <- 3rd
SCORE_PER_POSSIBLE_DESTINATION = 100000
# Those leaving larger fragmentation blocks are prioritized so
# that larger intervals can be fitted in later, rather than have
# a bunch of tiny fragments.
SCORE_PER_FRAGMENT_SIZE = -1
class PotentialMove:
def __init__(self, move: Move, base_score: int, smallest_fragment: int, largest_fragment: int):
self.move = move
self.base_score = base_score
# The Potential Move holds the fragment details for scoring.
# These aren't germane to the moves themselves, so we keep them here.
self.smallest_fragment = smallest_fragment
self.largest_fragment = largest_fragment
class ChangeList:
def __init__(self, possible_interval: Interval, chosen_interval: Interval):
self.possible_interval = possible_interval
self.chosen_interval = chosen_interval
@classmethod
def factory_constructor(cls, source_index: int, source: Interval) -> 'IntervalsToBitSetsEvaluator':
return IntervalsToBitSetsEvaluator(source_index, source)
def __init__(self, source_index: int, source: Interval):
super().__init__(source_index, source)
# Create a map of destinations to possible moves.
# This isn't 1:1, as there can be multiple ways for
# an interval to be moved into a given palette.
self._destination_to_potential_move_list = {}
def get_list_of_best_moves(self) -> Tuple[int, List[Move]]:
best_score = math.inf
best_moves = []
# Iterate through all potential moves.
for potential_move_list in self._destination_to_potential_move_list.values():
if potential_move_list is not None:
for potential_move in potential_move_list:
score = potential_move.base_score
# Those leaving larger fragments are prioritized over those that leave smaller ones.
# This is mostly to pick a winner within a given interval's choices.
largest_fragment = potential_move.largest_fragment
score += largest_fragment * IntervalsToBitSetsEvaluator.SCORE_PER_FRAGMENT_SIZE
if score < best_score:
best_score = score
best_moves.clear()
best_moves.append(potential_move.move)
elif score == best_score:
best_moves.append(potential_move.move)
return (best_score, best_moves)
def update_moves_for_destination(self, destination_index: int, destination: BitSet):
# If we have a "None" move list for this destination, that's because
# we've already determined that we can't make a move into it.
# We are operating under the assertion that "if I couldn't move into
# it before, I can't move into it now."
if (destination_index in self._destination_to_potential_move_list) and (self._destination_to_potential_move_list[destination_index] is None):
return
# Otherwise, we either haven't seen the move before or we're about to update our existing one.
# In either event, start by assuming we won't get this to fit.
self._destination_to_potential_move_list[destination_index] = None
change_lists_fragment_infos = self._get_changes_to_fit(destination_index, destination)
change_lists = change_lists_fragment_infos[0]
fragment_infos = change_lists_fragment_infos[1]
if (change_lists is not None) and (len(change_lists) > 0):
# We can make moves!
potential_move_list = []
for change_list_idx in range(len(change_lists)):
change_list = change_lists[change_list_idx]
fragment_info = fragment_infos[change_list_idx]
move = Move(self.source_index, destination_index, change_list)
score = IntervalsToBitSetsEvaluator._get_score_for_changes(self.source, change_list)
smallest_fragment = fragment_info[0]
largest_fragment = fragment_info[1]
potential_move = IntervalsToBitSetsEvaluator.PotentialMove(move, score, smallest_fragment, largest_fragment)
potential_move_list.append(potential_move)
self._destination_to_potential_move_list[destination_index] = potential_move_list
@staticmethod
def apply_changes(source: Interval, destination: BitSet, change_list: 'IntervalsToBitSetsEvaluator.ChangeList'):
# Apply our changes, which is a run of bits to set.
for bit_idx in range(change_list.chosen_interval.begin, change_list.chosen_interval.end + 1):
destination.set_bit(bit_idx)
@staticmethod
def is_destination_empty(destination: BitSet) -> bool:
return destination.are_all_clear()
def _get_changes_to_fit(self, destination_index: int, destination: BitSet) -> Tuple[List['IntervalsToBitSetsEvaluator.ChangeList'], List[Tuple[int, int]]]:
change_lists = []
fragment_infos = []
# Find the intervals where our source Interval can fit.
# e.g., if our Interval were a length of 3, and our BitSet looked like this:
# 00100001000 <- BitSet
# ABCDEFGHIJK <- Bit Pos
# ChangeLists = [(D->G), (I->K)]
range_start_idx = self.source.begin
range_end_idx = self.source.end
source_len = self.source.length
# We'll start at the range's beginning, and stop when we either go off the BitSet or hit the end range.
curr_clear_bit_idx = destination.get_next_unset_bit_index(range_start_idx)
while (curr_clear_bit_idx is not None) and (curr_clear_bit_idx <= range_end_idx):
# We are on a zero within the begin..end range of our source's interval.
# Find the next one value, which will bound our search.
next_set_bit_idx = destination.get_next_set_bit_index(curr_clear_bit_idx)
if next_set_bit_idx is None:
# If we ran off the edge, set the bound at the last value.
next_set_bit_idx = destination.get_num_bits()
if next_set_bit_idx > range_end_idx:
# Make it bound to our top end of the range.
next_set_bit_idx = range_end_idx + 1
# How big is this new interval?
possible_interval = Interval.create_from_fixed_range(curr_clear_bit_idx, next_set_bit_idx - 1)
if possible_interval.length >= source_len:
# Our interval will fit within this one. Now pick an interval *within* the possible
# that fits our source and introduces the least fragmentation.
change_list_fragment_info = self._get_best_change_list_for_possible_interval(possible_interval, destination)
change_list = change_list_fragment_info[0]
fragment_info = change_list_fragment_info[1]
change_lists.append(change_list)
fragment_infos.append(fragment_info)
# Find the next zero AFTER our one.
curr_clear_bit_idx = destination.get_next_unset_bit_index(next_set_bit_idx)
return (change_lists, fragment_infos)
def _get_best_change_list_for_possible_interval(self, possible_interval: Interval, destination: BitSet) -> Tuple['IntervalsToBitSetsEvaluator.ChangeList', Tuple[int, int]]:
# Figure out where the best place within the possible interval
# to assign ourselves. We want the source block to be
# positioned as close as possible to another block to
# minimize fragmentation.
# Example 1:
# Our block consists of BBB
# We have the following BitSet:
# 0011000000000
# ^^^^^^ <- Possible interval
# BAD:
# 00110BBB00000
# 001100BBB0000
# 0011000BBB000
# ^^^^^^
# BEST:
# 0011BBB000000 <- No fragmentation introduced
#
# Example 2:
# Our block constists of BBB
# We have the following BitSet:
# 1100000000000
# ^^^^^ <- Possible interval
#
# No perfect choice here. Default to minimizing known
# fragmentation:
# 1100BBB000000
# ^^^^^ <- Only introduced a 2-spot fragment
# 10000001
# 01234567
# ^^^ Potential Interval (2->4)
# Bits to Left: 1, Bits to Right: 2
# Look to the left of the BEGINNING of our interval.
left_set_bit_idx = destination.get_previous_set_bit_index(possible_interval.begin)
num_bits_to_left = 0
if left_set_bit_idx is not None:
num_bits_to_left = possible_interval.begin - left_set_bit_idx - 1
# Look to the right of the END of our interval.
right_set_bit_idx = destination.get_next_set_bit_index(possible_interval.end)
num_bits_to_right = destination.get_num_bits() - possible_interval.end - 1
if right_set_bit_idx is not None:
num_bits_to_right = right_set_bit_idx - possible_interval.end - 1
if num_bits_to_left <= num_bits_to_right:
# We choose to the left.
chosen_interval = Interval.create_fixed_length_at_start_point(possible_interval.begin, self.source.length)
# Smallest is the distance from our left edge to the nearest 1.
smallest_fragment = num_bits_to_left
# Largest is the distance from the right edge of the possible interval
# PLUS the difference between our possible and source lengths.
largest_fragment = num_bits_to_right + (possible_interval.length - self.source.length)
# Return a tuple of (change list, (smallest, largest))
# We do this because we don't want the change list to hold fragment details
# (since those will change after subsequent moves), but we don't want to have
# to recalculate the fragments separately.
return (IntervalsToBitSetsEvaluator.ChangeList(possible_interval, chosen_interval), (smallest_fragment, largest_fragment))
else:
# Go to the right edge.
chosen_interval = Interval.create_fixed_length_from_end_point(possible_interval.end, self.source.length)
# Smallest is the distance from our right edge to the nearest 1.
smallest_fragment = num_bits_to_right
# Largest is the distance from the left edge of the possible interval
# PLUS the difference between our possible and source lengths.
largest_fragment = num_bits_to_left + (possible_interval.length - self.source.length)
# Return a tuple of (change list, (smallest, largest))
# We do this because we don't want the change list to hold fragment details
# (since those will change after subsequent moves), but we don't want to have
# to recalculate the fragments separately.
return (IntervalsToBitSetsEvaluator.ChangeList(possible_interval, chosen_interval), (smallest_fragment, largest_fragment))
@staticmethod
def _get_score_for_changes(source: Interval, change_list: 'IntervalsToBitSetsEvaluator.ChangeList') -> int:
score = 0
# Larger intervals will get a better score so that they get prioritized.
score += source.length * IntervalsToBitSetsEvaluator.SCORE_ADJUST_PER_INTERVAL_ITEM
# Those with the fewest possible destinations get a better score
# so that they are prioritized.
# This is different than how we use moves. Example:
# Let's say we have interval 111 that we're workig into BitSet 000001.
# 000001
# 111 <- 1st possible destination
# 111 <- 2nd
# 111 <- 3rd
if change_list is not None:
interval_len = source.length
num_destinations = change_list.possible_interval.length - interval_len
score += num_destinations * IntervalsToBitSetsEvaluator.SCORE_PER_POSSIBLE_DESTINATION
return score | rgtk/IntervalsToBitSetsEvaluator.py | import math
from typing import List, Tuple, Optional
from rgtk.constraint_solver import ConstraintSolver, Evaluator, Move
from rgtk.Interval import Interval
from rgtk.BitSet import BitSet
class IntervalsToBitSetsEvaluator(Evaluator):
# Static Vars
# Larger intervals will get a better score.
SCORE_ADJUST_PER_INTERVAL_ITEM = -100
# Those with the fewest possible destinations get a better score
# so that they are prioritized. Put another way: we penalize
# intervals that have a lot of possible destinations.
# This is different than how we use moves. Example:
# Let's say we have interval 111 that we're workig into BitSet 000001.
# 000001
# 111 <- 1st possible destination
# 111 <- 2nd
# 111 <- 3rd
SCORE_PER_POSSIBLE_DESTINATION = 100000
# Those leaving larger fragmentation blocks are prioritized so
# that larger intervals can be fitted in later, rather than have
# a bunch of tiny fragments.
SCORE_PER_FRAGMENT_SIZE = -1
class PotentialMove:
def __init__(self, move: Move, base_score: int, smallest_fragment: int, largest_fragment: int):
self.move = move
self.base_score = base_score
# The Potential Move holds the fragment details for scoring.
# These aren't germane to the moves themselves, so we keep them here.
self.smallest_fragment = smallest_fragment
self.largest_fragment = largest_fragment
class ChangeList:
def __init__(self, possible_interval: Interval, chosen_interval: Interval):
self.possible_interval = possible_interval
self.chosen_interval = chosen_interval
@classmethod
def factory_constructor(cls, source_index: int, source: Interval) -> 'IntervalsToBitSetsEvaluator':
return IntervalsToBitSetsEvaluator(source_index, source)
def __init__(self, source_index: int, source: Interval):
super().__init__(source_index, source)
# Create a map of destinations to possible moves.
# This isn't 1:1, as there can be multiple ways for
# an interval to be moved into a given palette.
self._destination_to_potential_move_list = {}
def get_list_of_best_moves(self) -> Tuple[int, List[Move]]:
best_score = math.inf
best_moves = []
# Iterate through all potential moves.
for potential_move_list in self._destination_to_potential_move_list.values():
if potential_move_list is not None:
for potential_move in potential_move_list:
score = potential_move.base_score
# Those leaving larger fragments are prioritized over those that leave smaller ones.
# This is mostly to pick a winner within a given interval's choices.
largest_fragment = potential_move.largest_fragment
score += largest_fragment * IntervalsToBitSetsEvaluator.SCORE_PER_FRAGMENT_SIZE
if score < best_score:
best_score = score
best_moves.clear()
best_moves.append(potential_move.move)
elif score == best_score:
best_moves.append(potential_move.move)
return (best_score, best_moves)
def update_moves_for_destination(self, destination_index: int, destination: BitSet):
# If we have a "None" move list for this destination, that's because
# we've already determined that we can't make a move into it.
# We are operating under the assertion that "if I couldn't move into
# it before, I can't move into it now."
if (destination_index in self._destination_to_potential_move_list) and (self._destination_to_potential_move_list[destination_index] is None):
return
# Otherwise, we either haven't seen the move before or we're about to update our existing one.
# In either event, start by assuming we won't get this to fit.
self._destination_to_potential_move_list[destination_index] = None
change_lists_fragment_infos = self._get_changes_to_fit(destination_index, destination)
change_lists = change_lists_fragment_infos[0]
fragment_infos = change_lists_fragment_infos[1]
if (change_lists is not None) and (len(change_lists) > 0):
# We can make moves!
potential_move_list = []
for change_list_idx in range(len(change_lists)):
change_list = change_lists[change_list_idx]
fragment_info = fragment_infos[change_list_idx]
move = Move(self.source_index, destination_index, change_list)
score = IntervalsToBitSetsEvaluator._get_score_for_changes(self.source, change_list)
smallest_fragment = fragment_info[0]
largest_fragment = fragment_info[1]
potential_move = IntervalsToBitSetsEvaluator.PotentialMove(move, score, smallest_fragment, largest_fragment)
potential_move_list.append(potential_move)
self._destination_to_potential_move_list[destination_index] = potential_move_list
@staticmethod
def apply_changes(source: Interval, destination: BitSet, change_list: 'IntervalsToBitSetsEvaluator.ChangeList'):
# Apply our changes, which is a run of bits to set.
for bit_idx in range(change_list.chosen_interval.begin, change_list.chosen_interval.end + 1):
destination.set_bit(bit_idx)
@staticmethod
def is_destination_empty(destination: BitSet) -> bool:
return destination.are_all_clear()
def _get_changes_to_fit(self, destination_index: int, destination: BitSet) -> Tuple[List['IntervalsToBitSetsEvaluator.ChangeList'], List[Tuple[int, int]]]:
change_lists = []
fragment_infos = []
# Find the intervals where our source Interval can fit.
# e.g., if our Interval were a length of 3, and our BitSet looked like this:
# 00100001000 <- BitSet
# ABCDEFGHIJK <- Bit Pos
# ChangeLists = [(D->G), (I->K)]
range_start_idx = self.source.begin
range_end_idx = self.source.end
source_len = self.source.length
# We'll start at the range's beginning, and stop when we either go off the BitSet or hit the end range.
curr_clear_bit_idx = destination.get_next_unset_bit_index(range_start_idx)
while (curr_clear_bit_idx is not None) and (curr_clear_bit_idx <= range_end_idx):
# We are on a zero within the begin..end range of our source's interval.
# Find the next one value, which will bound our search.
next_set_bit_idx = destination.get_next_set_bit_index(curr_clear_bit_idx)
if next_set_bit_idx is None:
# If we ran off the edge, set the bound at the last value.
next_set_bit_idx = destination.get_num_bits()
if next_set_bit_idx > range_end_idx:
# Make it bound to our top end of the range.
next_set_bit_idx = range_end_idx + 1
# How big is this new interval?
possible_interval = Interval.create_from_fixed_range(curr_clear_bit_idx, next_set_bit_idx - 1)
if possible_interval.length >= source_len:
# Our interval will fit within this one. Now pick an interval *within* the possible
# that fits our source and introduces the least fragmentation.
change_list_fragment_info = self._get_best_change_list_for_possible_interval(possible_interval, destination)
change_list = change_list_fragment_info[0]
fragment_info = change_list_fragment_info[1]
change_lists.append(change_list)
fragment_infos.append(fragment_info)
# Find the next zero AFTER our one.
curr_clear_bit_idx = destination.get_next_unset_bit_index(next_set_bit_idx)
return (change_lists, fragment_infos)
def _get_best_change_list_for_possible_interval(self, possible_interval: Interval, destination: BitSet) -> Tuple['IntervalsToBitSetsEvaluator.ChangeList', Tuple[int, int]]:
# Figure out where the best place within the possible interval
# to assign ourselves. We want the source block to be
# positioned as close as possible to another block to
# minimize fragmentation.
# Example 1:
# Our block consists of BBB
# We have the following BitSet:
# 0011000000000
# ^^^^^^ <- Possible interval
# BAD:
# 00110BBB00000
# 001100BBB0000
# 0011000BBB000
# ^^^^^^
# BEST:
# 0011BBB000000 <- No fragmentation introduced
#
# Example 2:
# Our block constists of BBB
# We have the following BitSet:
# 1100000000000
# ^^^^^ <- Possible interval
#
# No perfect choice here. Default to minimizing known
# fragmentation:
# 1100BBB000000
# ^^^^^ <- Only introduced a 2-spot fragment
# 10000001
# 01234567
# ^^^ Potential Interval (2->4)
# Bits to Left: 1, Bits to Right: 2
# Look to the left of the BEGINNING of our interval.
left_set_bit_idx = destination.get_previous_set_bit_index(possible_interval.begin)
num_bits_to_left = 0
if left_set_bit_idx is not None:
num_bits_to_left = possible_interval.begin - left_set_bit_idx - 1
# Look to the right of the END of our interval.
right_set_bit_idx = destination.get_next_set_bit_index(possible_interval.end)
num_bits_to_right = destination.get_num_bits() - possible_interval.end - 1
if right_set_bit_idx is not None:
num_bits_to_right = right_set_bit_idx - possible_interval.end - 1
if num_bits_to_left <= num_bits_to_right:
# We choose to the left.
chosen_interval = Interval.create_fixed_length_at_start_point(possible_interval.begin, self.source.length)
# Smallest is the distance from our left edge to the nearest 1.
smallest_fragment = num_bits_to_left
# Largest is the distance from the right edge of the possible interval
# PLUS the difference between our possible and source lengths.
largest_fragment = num_bits_to_right + (possible_interval.length - self.source.length)
# Return a tuple of (change list, (smallest, largest))
# We do this because we don't want the change list to hold fragment details
# (since those will change after subsequent moves), but we don't want to have
# to recalculate the fragments separately.
return (IntervalsToBitSetsEvaluator.ChangeList(possible_interval, chosen_interval), (smallest_fragment, largest_fragment))
else:
# Go to the right edge.
chosen_interval = Interval.create_fixed_length_from_end_point(possible_interval.end, self.source.length)
# Smallest is the distance from our right edge to the nearest 1.
smallest_fragment = num_bits_to_right
# Largest is the distance from the left edge of the possible interval
# PLUS the difference between our possible and source lengths.
largest_fragment = num_bits_to_left + (possible_interval.length - self.source.length)
# Return a tuple of (change list, (smallest, largest))
# We do this because we don't want the change list to hold fragment details
# (since those will change after subsequent moves), but we don't want to have
# to recalculate the fragments separately.
return (IntervalsToBitSetsEvaluator.ChangeList(possible_interval, chosen_interval), (smallest_fragment, largest_fragment))
@staticmethod
def _get_score_for_changes(source: Interval, change_list: 'IntervalsToBitSetsEvaluator.ChangeList') -> int:
score = 0
# Larger intervals will get a better score so that they get prioritized.
score += source.length * IntervalsToBitSetsEvaluator.SCORE_ADJUST_PER_INTERVAL_ITEM
# Those with the fewest possible destinations get a better score
# so that they are prioritized.
# This is different than how we use moves. Example:
# Let's say we have interval 111 that we're workig into BitSet 000001.
# 000001
# 111 <- 1st possible destination
# 111 <- 2nd
# 111 <- 3rd
if change_list is not None:
interval_len = source.length
num_destinations = change_list.possible_interval.length - interval_len
score += num_destinations * IntervalsToBitSetsEvaluator.SCORE_PER_POSSIBLE_DESTINATION
return score | 0.809088 | 0.355327 |
import re
import uuid
from ast import literal_eval
from collections import defaultdict
from typing import (
Any, Dict, Generator, Iterator, List, NamedTuple, Optional, Pattern, Set, Tuple, Union,
)
from urllib.parse import urlencode, urlunsplit
from sortedcontainers import SortedListWithKey
from .exceptions import MethodNotAllowed, NotFound, RedirectRequired
ROUTE_VAR_RE = re.compile(r''' # noqa
(?P<static>[^<]*) # static rule data
<
(?:
(?P<converter>[a-zA-Z_][a-zA-Z0-9_]*) # converter name
(?:\((?P<args>.*?)\))? # converter arguments
\: # variable delimiter
)?
(?P<variable>[a-zA-Z][a-zA-Z0-9_]*) # variable name
>
''', re.VERBOSE) # noqa
CONVERTER_ARGS_RE = re.compile(r''' # noqa
((?P<name>\w+)\s*=\s*)?
(?P<value>
True|False|
\d+.\d+|
\d+.|
\d+|
\w+|
[urUR]?(?P<str_value>"[^"]*?"|'[^']*')
)\s*,
''', re.VERBOSE | re.UNICODE) # noqa
VariablePart = NamedTuple(
'VariablePart',
[('converter', Optional[str]), ('arguments', Tuple[List[Any], Dict[str, Any]]), ('name', str)],
)
WeightedPart = NamedTuple('Weight', [('converter', bool), ('weight', int)])
class ValidationError(Exception):
pass
class BuildError(Exception):
def __init__(
self,
endpoint: str,
rules: List['Rule'],
values: Optional[Dict]=None,
method: Optional[str]=None,
) -> None:
self.endpoint = endpoint
self.rules = rules
self.values = values
self.method = method
def __str__(self) -> str:
message = [f"Could not build rule for endpoint '{self.endpoint}'."]
if len(self.rules):
for rule in self.rules:
message.append(f"{rule.rule} Cannot be built")
if self.method is not None and self.method not in rule.methods:
message.append(f"as {self.method} is not one of {rule.methods}.")
elif self.values is not None:
message.append(
f"as {self.values.keys()} do not match {rule._converters.keys()}.",
)
else:
message.append('No endpoint found.')
return ' '.join(message)
class BaseConverter:
regex = r'[^/]+'
weight = 100
def to_python(self, value: str) -> Any:
return value
def to_url(self, value: Any) -> str:
return value
class StringConverter(BaseConverter):
def __init__(
self, minlength: int=1, maxlength: Optional[int]=None, length: Optional[int]=None,
) -> None:
if length is not None:
re_length = '{%d}' % length
else:
maxlength = '' if maxlength is None else int(maxlength) # type: ignore
re_length = '{%d,%s}' % (minlength, maxlength)
self.regex = f"[^/]{re_length}"
class AnyConverter(BaseConverter):
def __init__(self, *items: str) -> None:
self.regex = '(?:%s)' % '|'.join((re.escape(x) for x in items))
class PathConverter(BaseConverter):
regex = r'[^/].*?'
weight = 200
class IntegerConverter(BaseConverter):
regex = r'\d+'
weight = 50
def __init__(
self, fixed_digits: Optional[int]=None, min: Optional[int]=None,
max: Optional[int]=None,
) -> None:
self.fixed_digits = fixed_digits
self.min = min
self.max = max
def to_python(self, value: str) -> int:
if self.fixed_digits is not None and len(value) > self.fixed_digits:
raise ValidationError()
converted_value = int(value)
if (
self.min is not None and self.min > converted_value or
self.max is not None and self.max < converted_value
):
raise ValidationError()
return converted_value
def to_url(self, value: int) -> str:
if self.fixed_digits is not None:
return f"{value:0{self.fixed_digits}d}"
else:
return str(value)
class FloatConverter(BaseConverter):
regex = r'\d+\.\d+'
weight = 50
def __init__(self, min: Optional[float]=None, max: Optional[float]=None) -> None:
self.min = min
self.max = max
def to_python(self, value: str) -> float:
converted_value = float(value)
if (
self.min is not None and self.min > converted_value or
self.max is not None and self.max < converted_value
):
raise ValidationError()
return converted_value
def to_url(self, value: float) -> str:
return str(value)
class UUIDConverter(BaseConverter):
regex = r'[A-Fa-f0-9]{8}-[A-Fa-f0-9]{4}-[A-Fa-f0-9]{4}-[A-Fa-f0-9]{4}-[A-Fa-f0-9]{12}' # noqa
def to_python(self, value: str) -> uuid.UUID:
return uuid.UUID(value)
def to_url(self, value: uuid.UUID) -> str:
return str(value)
class Map:
default_converters = {
'any': AnyConverter,
'default': StringConverter,
'float': FloatConverter,
'int': IntegerConverter,
'path': PathConverter,
'string': StringConverter,
'uuid': UUIDConverter,
}
def __init__(self, host_matching: bool=False) -> None:
self.rules = SortedListWithKey(key=lambda rule: rule.match_key)
self.endpoints: Dict[str, SortedListWithKey] = defaultdict(lambda: SortedListWithKey(key=lambda rule: rule.build_key)) # noqa
self.converters = self.default_converters.copy()
self.host_matching = host_matching
def add(self, rule: 'Rule') -> None:
rule.bind(self)
self.endpoints[rule.endpoint].add(rule)
self.rules.add(rule)
def bind_to_request(
self,
scheme: str,
server_name: str,
method: str,
path: str,
query_string: bytes,
) -> 'MapAdapter':
return MapAdapter(self, scheme, server_name, method, path, query_string)
def bind(self, scheme: str, server_name: str) -> 'MapAdapter':
return MapAdapter(self, scheme, server_name)
def iter_rules(self, endpoint: Optional[str]=None) -> Iterator['Rule']:
if endpoint is not None:
return iter(self.endpoints[endpoint])
return iter(self.rules)
class MapAdapter:
def __init__(
self,
map: Map,
scheme: str,
server_name: str,
method: Optional[str]=None,
path: Optional[str]=None,
query_string: Optional[bytes]=None,
) -> None:
self.map = map
self.scheme = scheme
self.server_name = server_name
self.path = f"/{path.lstrip('/')}" if path is not None else path
self.method = method
self.query_string = query_string
def build(
self,
endpoint: str,
values: Optional[dict]=None,
method: Optional[str]=None,
scheme: Optional[str]=None,
external: bool=False,
) -> str:
values = values or {}
rules = self.map.endpoints[endpoint]
for rule in rules:
if rule.buildable(values, method=method):
path = rule.build(**values)
if external:
scheme = scheme or self.scheme
host = rule.host or self.server_name
return f"{scheme}://{host}{path}"
else:
return path
raise BuildError(endpoint, rules, values, method)
def match(self) -> Tuple['Rule', Dict[str, Any]]:
allowed_methods: Set[str] = set()
for rule, variables, needs_slash in self._matches():
if self.method in rule.methods:
if needs_slash:
raise RedirectRequired(self._make_redirect_url(rule, variables))
# Check if there is a default rule that can be used instead
for potential_rule in self.map.endpoints[rule.endpoint]:
if potential_rule.provides_defaults_for(rule, **variables):
raise RedirectRequired(self._make_redirect_url(potential_rule, variables))
return rule, variables
else:
allowed_methods.update(rule.methods)
if allowed_methods:
raise MethodNotAllowed(allowed_methods=allowed_methods)
raise NotFound()
def _make_redirect_url(self, rule: 'Rule', variables: Dict[str, Any]) -> str:
path = rule.build(**variables)
suffix = self.query_string.decode('ascii')
return urlunsplit((self.scheme, self.server_name, path, suffix, ''))
def allowed_methods(self) -> Set[str]:
allowed_methods: Set[str] = set()
for rule, *_ in self._matches():
allowed_methods.update(rule.methods)
return allowed_methods
def _matches(self) -> Generator[Tuple['Rule', Dict[str, Any], bool], None, None]:
if self.map.host_matching:
full_path = f"{self.server_name}|{self.path}"
else:
full_path = f"|{self.path}"
for rule in self.map.rules:
variables, needs_slash = rule.match(full_path)
if variables is not None:
yield rule, variables, needs_slash
class Rule:
def __init__(
self,
rule: str,
methods: Set[str],
endpoint: str,
strict_slashes: bool=True,
defaults: Optional[dict]=None,
host: Optional[str]=None,
*,
provide_automatic_options: bool=True,
is_websocket: bool=False,
) -> None:
if not rule.startswith('/'):
raise ValueError(f"Rule '{rule}' does not start with a slash")
self.rule = rule
self.is_leaf = not rule.endswith('/')
self.is_websocket = is_websocket
if 'GET' in methods and 'HEAD' not in methods and not self.is_websocket:
methods.add('HEAD')
self.methods = frozenset(method.upper() for method in methods)
if self.is_websocket and self.methods != {'GET'}: # type: ignore
raise ValueError(f"{methods} must only be GET for a websocket route")
self.endpoint = endpoint
self.strict_slashes = strict_slashes
self.defaults = defaults or {}
self.host = host
self.map: Optional[Map] = None
self._pattern: Optional[Pattern] = None
self._builder: Optional[str] = None
self._converters: Dict[str, BaseConverter] = {}
self._weights: List[WeightedPart] = []
self.provide_automatic_options = provide_automatic_options
def __repr__(self) -> str:
return f"Rule({self.rule}, {self.methods}, {self.endpoint}, {self.strict_slashes})"
def match(self, path: str) -> Tuple[Optional[Dict[str, Any]], bool]:
"""Check if the path matches this Rule.
If it does it returns a dict of matched and converted values,
otherwise None is returned.
"""
match = self._pattern.match(path)
if match is not None:
# If the route is a branch (not leaf) and the path is
# missing a trailing slash then it needs one to be
# considered a match in the strict slashes mode.
needs_slash = (
self.strict_slashes and not self.is_leaf and match.groupdict()['__slash__'] != '/'
)
try:
converted_varaibles = {
name: self._converters[name].to_python(value)
for name, value in match.groupdict().items()
if name != '__slash__'
}
except ValidationError: # Doesn't meet conversion rules, no match
return None, False
else:
return {**self.defaults, **converted_varaibles}, needs_slash
else:
return None, False
def provides_defaults_for(self, rule: 'Rule', **values: Any) -> bool:
"""Returns true if this rule provides defaults for the argument and values."""
defaults_match = all(
values[key] == self.defaults[key] for key in self.defaults if key in values # noqa: S101, E501
)
return self != rule and bool(self.defaults) and defaults_match
def build(self, **values: Any) -> str:
"""Build this rule into a path using the values given."""
converted_values = {
key: self._converters[key].to_url(value)
for key, value in values.items()
if key in self._converters
}
result = self._builder.format(**converted_values).split('|', 1)[1]
query_string = urlencode(
{
key: value
for key, value in values.items()
if key not in self._converters and key not in self.defaults
},
doseq=True,
)
if query_string:
result = "{}?{}".format(result, query_string)
return result
def buildable(self, values: Optional[dict]=None, method: Optional[str]=None) -> bool:
"""Return True if this rule can build with the values and method."""
if method is not None and method not in self.methods:
return False
defaults_match = all(
values[key] == self.defaults[key] for key in self.defaults if key in values # noqa: S101, E501
)
return defaults_match and set(values.keys()) >= set(self._converters.keys())
def bind(self, map: Map) -> None:
"""Bind the Rule to a Map and compile it."""
if self.map is not None:
raise RuntimeError(f"{self!r} is already bound to {self.map!r}")
self.map = map
pattern = ''
builder = ''
full_rule = "{}\\|{}".format(self.host or '', self.rule)
for part in _parse_rule(full_rule):
if isinstance(part, VariablePart):
converter = self.map.converters[part.converter](
*part.arguments[0], **part.arguments[1],
)
pattern += f"(?P<{part.name}>{converter.regex})"
self._converters[part.name] = converter
builder += '{' + part.name + '}'
self._weights.append(WeightedPart(True, converter.weight))
else:
builder += part
pattern += part
self._weights.append(WeightedPart(False, -len(part)))
if not self.is_leaf or not self.strict_slashes:
# Pattern should match with or without a trailing slash
pattern = f"{pattern.rstrip('/')}(?<!/)(?P<__slash__>/?)$"
else:
pattern = f"{pattern}$"
self._pattern = re.compile(pattern)
self._builder = builder
@property
def match_key(self) -> Tuple[bool, bool, int, List[WeightedPart]]:
"""A Key to sort the rules by weight for matching.
The key leads to ordering:
- By first order by defaults as they are simple rules without
conversions.
- Then on the complexity of the rule, i.e. does it have any
converted parts. This is as simple rules are quick to match
or reject.
- Then by the number of parts, with more complex (more parts)
first.
- Finally by the weights themselves. Note that weights are also
sub keyed by converter first then weight second.
"""
if self.map is None:
raise RuntimeError(f"{self!r} is not bound to a Map")
complex_rule = any(weight.converter for weight in self._weights)
return (not bool(self.defaults), complex_rule, -len(self._weights), self._weights)
@property
def build_key(self) -> Tuple[bool, int]:
"""A Key to sort the rules by weight for building.
The key leads to ordering:
- By routes with defaults first, as these must be evaulated
for building before ones without.
- Then the more complex routes (most converted parts).
"""
if self.map is None:
raise RuntimeError(f"{self!r} is not bound to a Map")
return (not bool(self.defaults), -sum(1 for weight in self._weights if weight.converter))
def _parse_rule(rule: str) -> Generator[Union[str, VariablePart], None, None]:
variable_names: Set[str] = set()
final_match = 0
for match in ROUTE_VAR_RE.finditer(rule):
named_groups = match.groupdict()
if named_groups['static'] is not None:
yield named_groups['static']
variable = named_groups['variable']
if variable in variable_names:
raise ValueError(f"Variable name {variable} used more than once")
else:
variable_names.add(variable)
arguments = _parse_converter_args(named_groups['args'] or '')
yield VariablePart(named_groups['converter'] or 'default', arguments, variable)
final_match = match.span()[-1]
yield rule[final_match:]
def _parse_converter_args(raw: str) -> Tuple[List[Any], Dict[str, Any]]:
raw += ',' # Simplifies matching regex if each argument has a trailing comma
args = []
kwargs = {}
for match in CONVERTER_ARGS_RE.finditer(raw):
value = match.group('str_value') or match.group('value')
try:
value = literal_eval(value)
except ValueError:
value = str(value)
name = match.group('name')
if not name:
args.append(value)
else:
kwargs[name] = value
return args, kwargs | quart/routing.py | import re
import uuid
from ast import literal_eval
from collections import defaultdict
from typing import (
Any, Dict, Generator, Iterator, List, NamedTuple, Optional, Pattern, Set, Tuple, Union,
)
from urllib.parse import urlencode, urlunsplit
from sortedcontainers import SortedListWithKey
from .exceptions import MethodNotAllowed, NotFound, RedirectRequired
ROUTE_VAR_RE = re.compile(r''' # noqa
(?P<static>[^<]*) # static rule data
<
(?:
(?P<converter>[a-zA-Z_][a-zA-Z0-9_]*) # converter name
(?:\((?P<args>.*?)\))? # converter arguments
\: # variable delimiter
)?
(?P<variable>[a-zA-Z][a-zA-Z0-9_]*) # variable name
>
''', re.VERBOSE) # noqa
CONVERTER_ARGS_RE = re.compile(r''' # noqa
((?P<name>\w+)\s*=\s*)?
(?P<value>
True|False|
\d+.\d+|
\d+.|
\d+|
\w+|
[urUR]?(?P<str_value>"[^"]*?"|'[^']*')
)\s*,
''', re.VERBOSE | re.UNICODE) # noqa
VariablePart = NamedTuple(
'VariablePart',
[('converter', Optional[str]), ('arguments', Tuple[List[Any], Dict[str, Any]]), ('name', str)],
)
WeightedPart = NamedTuple('Weight', [('converter', bool), ('weight', int)])
class ValidationError(Exception):
pass
class BuildError(Exception):
def __init__(
self,
endpoint: str,
rules: List['Rule'],
values: Optional[Dict]=None,
method: Optional[str]=None,
) -> None:
self.endpoint = endpoint
self.rules = rules
self.values = values
self.method = method
def __str__(self) -> str:
message = [f"Could not build rule for endpoint '{self.endpoint}'."]
if len(self.rules):
for rule in self.rules:
message.append(f"{rule.rule} Cannot be built")
if self.method is not None and self.method not in rule.methods:
message.append(f"as {self.method} is not one of {rule.methods}.")
elif self.values is not None:
message.append(
f"as {self.values.keys()} do not match {rule._converters.keys()}.",
)
else:
message.append('No endpoint found.')
return ' '.join(message)
class BaseConverter:
regex = r'[^/]+'
weight = 100
def to_python(self, value: str) -> Any:
return value
def to_url(self, value: Any) -> str:
return value
class StringConverter(BaseConverter):
def __init__(
self, minlength: int=1, maxlength: Optional[int]=None, length: Optional[int]=None,
) -> None:
if length is not None:
re_length = '{%d}' % length
else:
maxlength = '' if maxlength is None else int(maxlength) # type: ignore
re_length = '{%d,%s}' % (minlength, maxlength)
self.regex = f"[^/]{re_length}"
class AnyConverter(BaseConverter):
def __init__(self, *items: str) -> None:
self.regex = '(?:%s)' % '|'.join((re.escape(x) for x in items))
class PathConverter(BaseConverter):
regex = r'[^/].*?'
weight = 200
class IntegerConverter(BaseConverter):
regex = r'\d+'
weight = 50
def __init__(
self, fixed_digits: Optional[int]=None, min: Optional[int]=None,
max: Optional[int]=None,
) -> None:
self.fixed_digits = fixed_digits
self.min = min
self.max = max
def to_python(self, value: str) -> int:
if self.fixed_digits is not None and len(value) > self.fixed_digits:
raise ValidationError()
converted_value = int(value)
if (
self.min is not None and self.min > converted_value or
self.max is not None and self.max < converted_value
):
raise ValidationError()
return converted_value
def to_url(self, value: int) -> str:
if self.fixed_digits is not None:
return f"{value:0{self.fixed_digits}d}"
else:
return str(value)
class FloatConverter(BaseConverter):
regex = r'\d+\.\d+'
weight = 50
def __init__(self, min: Optional[float]=None, max: Optional[float]=None) -> None:
self.min = min
self.max = max
def to_python(self, value: str) -> float:
converted_value = float(value)
if (
self.min is not None and self.min > converted_value or
self.max is not None and self.max < converted_value
):
raise ValidationError()
return converted_value
def to_url(self, value: float) -> str:
return str(value)
class UUIDConverter(BaseConverter):
regex = r'[A-Fa-f0-9]{8}-[A-Fa-f0-9]{4}-[A-Fa-f0-9]{4}-[A-Fa-f0-9]{4}-[A-Fa-f0-9]{12}' # noqa
def to_python(self, value: str) -> uuid.UUID:
return uuid.UUID(value)
def to_url(self, value: uuid.UUID) -> str:
return str(value)
class Map:
default_converters = {
'any': AnyConverter,
'default': StringConverter,
'float': FloatConverter,
'int': IntegerConverter,
'path': PathConverter,
'string': StringConverter,
'uuid': UUIDConverter,
}
def __init__(self, host_matching: bool=False) -> None:
self.rules = SortedListWithKey(key=lambda rule: rule.match_key)
self.endpoints: Dict[str, SortedListWithKey] = defaultdict(lambda: SortedListWithKey(key=lambda rule: rule.build_key)) # noqa
self.converters = self.default_converters.copy()
self.host_matching = host_matching
def add(self, rule: 'Rule') -> None:
rule.bind(self)
self.endpoints[rule.endpoint].add(rule)
self.rules.add(rule)
def bind_to_request(
self,
scheme: str,
server_name: str,
method: str,
path: str,
query_string: bytes,
) -> 'MapAdapter':
return MapAdapter(self, scheme, server_name, method, path, query_string)
def bind(self, scheme: str, server_name: str) -> 'MapAdapter':
return MapAdapter(self, scheme, server_name)
def iter_rules(self, endpoint: Optional[str]=None) -> Iterator['Rule']:
if endpoint is not None:
return iter(self.endpoints[endpoint])
return iter(self.rules)
class MapAdapter:
def __init__(
self,
map: Map,
scheme: str,
server_name: str,
method: Optional[str]=None,
path: Optional[str]=None,
query_string: Optional[bytes]=None,
) -> None:
self.map = map
self.scheme = scheme
self.server_name = server_name
self.path = f"/{path.lstrip('/')}" if path is not None else path
self.method = method
self.query_string = query_string
def build(
self,
endpoint: str,
values: Optional[dict]=None,
method: Optional[str]=None,
scheme: Optional[str]=None,
external: bool=False,
) -> str:
values = values or {}
rules = self.map.endpoints[endpoint]
for rule in rules:
if rule.buildable(values, method=method):
path = rule.build(**values)
if external:
scheme = scheme or self.scheme
host = rule.host or self.server_name
return f"{scheme}://{host}{path}"
else:
return path
raise BuildError(endpoint, rules, values, method)
def match(self) -> Tuple['Rule', Dict[str, Any]]:
allowed_methods: Set[str] = set()
for rule, variables, needs_slash in self._matches():
if self.method in rule.methods:
if needs_slash:
raise RedirectRequired(self._make_redirect_url(rule, variables))
# Check if there is a default rule that can be used instead
for potential_rule in self.map.endpoints[rule.endpoint]:
if potential_rule.provides_defaults_for(rule, **variables):
raise RedirectRequired(self._make_redirect_url(potential_rule, variables))
return rule, variables
else:
allowed_methods.update(rule.methods)
if allowed_methods:
raise MethodNotAllowed(allowed_methods=allowed_methods)
raise NotFound()
def _make_redirect_url(self, rule: 'Rule', variables: Dict[str, Any]) -> str:
path = rule.build(**variables)
suffix = self.query_string.decode('ascii')
return urlunsplit((self.scheme, self.server_name, path, suffix, ''))
def allowed_methods(self) -> Set[str]:
allowed_methods: Set[str] = set()
for rule, *_ in self._matches():
allowed_methods.update(rule.methods)
return allowed_methods
def _matches(self) -> Generator[Tuple['Rule', Dict[str, Any], bool], None, None]:
if self.map.host_matching:
full_path = f"{self.server_name}|{self.path}"
else:
full_path = f"|{self.path}"
for rule in self.map.rules:
variables, needs_slash = rule.match(full_path)
if variables is not None:
yield rule, variables, needs_slash
class Rule:
def __init__(
self,
rule: str,
methods: Set[str],
endpoint: str,
strict_slashes: bool=True,
defaults: Optional[dict]=None,
host: Optional[str]=None,
*,
provide_automatic_options: bool=True,
is_websocket: bool=False,
) -> None:
if not rule.startswith('/'):
raise ValueError(f"Rule '{rule}' does not start with a slash")
self.rule = rule
self.is_leaf = not rule.endswith('/')
self.is_websocket = is_websocket
if 'GET' in methods and 'HEAD' not in methods and not self.is_websocket:
methods.add('HEAD')
self.methods = frozenset(method.upper() for method in methods)
if self.is_websocket and self.methods != {'GET'}: # type: ignore
raise ValueError(f"{methods} must only be GET for a websocket route")
self.endpoint = endpoint
self.strict_slashes = strict_slashes
self.defaults = defaults or {}
self.host = host
self.map: Optional[Map] = None
self._pattern: Optional[Pattern] = None
self._builder: Optional[str] = None
self._converters: Dict[str, BaseConverter] = {}
self._weights: List[WeightedPart] = []
self.provide_automatic_options = provide_automatic_options
def __repr__(self) -> str:
return f"Rule({self.rule}, {self.methods}, {self.endpoint}, {self.strict_slashes})"
def match(self, path: str) -> Tuple[Optional[Dict[str, Any]], bool]:
"""Check if the path matches this Rule.
If it does it returns a dict of matched and converted values,
otherwise None is returned.
"""
match = self._pattern.match(path)
if match is not None:
# If the route is a branch (not leaf) and the path is
# missing a trailing slash then it needs one to be
# considered a match in the strict slashes mode.
needs_slash = (
self.strict_slashes and not self.is_leaf and match.groupdict()['__slash__'] != '/'
)
try:
converted_varaibles = {
name: self._converters[name].to_python(value)
for name, value in match.groupdict().items()
if name != '__slash__'
}
except ValidationError: # Doesn't meet conversion rules, no match
return None, False
else:
return {**self.defaults, **converted_varaibles}, needs_slash
else:
return None, False
def provides_defaults_for(self, rule: 'Rule', **values: Any) -> bool:
"""Returns true if this rule provides defaults for the argument and values."""
defaults_match = all(
values[key] == self.defaults[key] for key in self.defaults if key in values # noqa: S101, E501
)
return self != rule and bool(self.defaults) and defaults_match
def build(self, **values: Any) -> str:
"""Build this rule into a path using the values given."""
converted_values = {
key: self._converters[key].to_url(value)
for key, value in values.items()
if key in self._converters
}
result = self._builder.format(**converted_values).split('|', 1)[1]
query_string = urlencode(
{
key: value
for key, value in values.items()
if key not in self._converters and key not in self.defaults
},
doseq=True,
)
if query_string:
result = "{}?{}".format(result, query_string)
return result
def buildable(self, values: Optional[dict]=None, method: Optional[str]=None) -> bool:
"""Return True if this rule can build with the values and method."""
if method is not None and method not in self.methods:
return False
defaults_match = all(
values[key] == self.defaults[key] for key in self.defaults if key in values # noqa: S101, E501
)
return defaults_match and set(values.keys()) >= set(self._converters.keys())
def bind(self, map: Map) -> None:
"""Bind the Rule to a Map and compile it."""
if self.map is not None:
raise RuntimeError(f"{self!r} is already bound to {self.map!r}")
self.map = map
pattern = ''
builder = ''
full_rule = "{}\\|{}".format(self.host or '', self.rule)
for part in _parse_rule(full_rule):
if isinstance(part, VariablePart):
converter = self.map.converters[part.converter](
*part.arguments[0], **part.arguments[1],
)
pattern += f"(?P<{part.name}>{converter.regex})"
self._converters[part.name] = converter
builder += '{' + part.name + '}'
self._weights.append(WeightedPart(True, converter.weight))
else:
builder += part
pattern += part
self._weights.append(WeightedPart(False, -len(part)))
if not self.is_leaf or not self.strict_slashes:
# Pattern should match with or without a trailing slash
pattern = f"{pattern.rstrip('/')}(?<!/)(?P<__slash__>/?)$"
else:
pattern = f"{pattern}$"
self._pattern = re.compile(pattern)
self._builder = builder
@property
def match_key(self) -> Tuple[bool, bool, int, List[WeightedPart]]:
"""A Key to sort the rules by weight for matching.
The key leads to ordering:
- By first order by defaults as they are simple rules without
conversions.
- Then on the complexity of the rule, i.e. does it have any
converted parts. This is as simple rules are quick to match
or reject.
- Then by the number of parts, with more complex (more parts)
first.
- Finally by the weights themselves. Note that weights are also
sub keyed by converter first then weight second.
"""
if self.map is None:
raise RuntimeError(f"{self!r} is not bound to a Map")
complex_rule = any(weight.converter for weight in self._weights)
return (not bool(self.defaults), complex_rule, -len(self._weights), self._weights)
@property
def build_key(self) -> Tuple[bool, int]:
"""A Key to sort the rules by weight for building.
The key leads to ordering:
- By routes with defaults first, as these must be evaulated
for building before ones without.
- Then the more complex routes (most converted parts).
"""
if self.map is None:
raise RuntimeError(f"{self!r} is not bound to a Map")
return (not bool(self.defaults), -sum(1 for weight in self._weights if weight.converter))
def _parse_rule(rule: str) -> Generator[Union[str, VariablePart], None, None]:
variable_names: Set[str] = set()
final_match = 0
for match in ROUTE_VAR_RE.finditer(rule):
named_groups = match.groupdict()
if named_groups['static'] is not None:
yield named_groups['static']
variable = named_groups['variable']
if variable in variable_names:
raise ValueError(f"Variable name {variable} used more than once")
else:
variable_names.add(variable)
arguments = _parse_converter_args(named_groups['args'] or '')
yield VariablePart(named_groups['converter'] or 'default', arguments, variable)
final_match = match.span()[-1]
yield rule[final_match:]
def _parse_converter_args(raw: str) -> Tuple[List[Any], Dict[str, Any]]:
raw += ',' # Simplifies matching regex if each argument has a trailing comma
args = []
kwargs = {}
for match in CONVERTER_ARGS_RE.finditer(raw):
value = match.group('str_value') or match.group('value')
try:
value = literal_eval(value)
except ValueError:
value = str(value)
name = match.group('name')
if not name:
args.append(value)
else:
kwargs[name] = value
return args, kwargs | 0.770637 | 0.155751 |
from django.contrib.auth import views as auth_views
from django.urls import include, path, reverse_lazy
from .views import (
AccountView,
ActivationView,
LoginView,
become,
create_password,
oauth,
)
app_name = 'users'
public_urlpatterns = [
path(
'login/',
LoginView.as_view(
template_name='users/login.html',
redirect_authenticated_user=True
),
name='login'
),
# Log out
path('logout/', auth_views.LogoutView.as_view(next_page='/'), name='logout'),
]
urlpatterns = [
path('account/', include([
path('', AccountView.as_view(), name='account'),
path('become/', become, name='become'),
path('password/', include([
path(
'change/',
auth_views.PasswordChangeView.as_view(
template_name="users/change_password.html",
success_url=reverse_lazy('users:account')
),
name='password_change',
),
path(
'reset/',
auth_views.PasswordResetView.as_view(
template_name='users/password_reset/form.html',
email_template_name='users/password_reset/email.txt',
success_url=reverse_lazy('users:password_reset_done')
),
name='password_reset',
),
path(
'reset/done/',
auth_views.PasswordResetDoneView.as_view(template_name='users/password_reset/done.html'),
name='password_reset_done'
),
path(
'reset/confirm/<uidb64>/<token>/',
auth_views.PasswordResetConfirmView.as_view(
template_name='users/password_reset/confirm.html',
post_reset_login=True,
post_reset_login_backend='django.contrib.auth.backends.ModelBackend',
success_url=reverse_lazy('users:account')
),
name='password_reset_confirm'
),
path(
'reset/complete/',
auth_views.PasswordResetCompleteView.as_view(template_name='users/password_reset/complete.html'),
name='password_reset_complete'
),
])),
path(
'activate/<uidb64>/<token>/',
ActivationView.as_view(),
name='activate'
),
path('activate/', create_password, name="activate_password"),
path('oauth', oauth, name='oauth'),
])),
] | hypha/apply/users/urls.py | from django.contrib.auth import views as auth_views
from django.urls import include, path, reverse_lazy
from .views import (
AccountView,
ActivationView,
LoginView,
become,
create_password,
oauth,
)
app_name = 'users'
public_urlpatterns = [
path(
'login/',
LoginView.as_view(
template_name='users/login.html',
redirect_authenticated_user=True
),
name='login'
),
# Log out
path('logout/', auth_views.LogoutView.as_view(next_page='/'), name='logout'),
]
urlpatterns = [
path('account/', include([
path('', AccountView.as_view(), name='account'),
path('become/', become, name='become'),
path('password/', include([
path(
'change/',
auth_views.PasswordChangeView.as_view(
template_name="users/change_password.html",
success_url=reverse_lazy('users:account')
),
name='password_change',
),
path(
'reset/',
auth_views.PasswordResetView.as_view(
template_name='users/password_reset/form.html',
email_template_name='users/password_reset/email.txt',
success_url=reverse_lazy('users:password_reset_done')
),
name='password_reset',
),
path(
'reset/done/',
auth_views.PasswordResetDoneView.as_view(template_name='users/password_reset/done.html'),
name='password_reset_done'
),
path(
'reset/confirm/<uidb64>/<token>/',
auth_views.PasswordResetConfirmView.as_view(
template_name='users/password_reset/confirm.html',
post_reset_login=True,
post_reset_login_backend='django.contrib.auth.backends.ModelBackend',
success_url=reverse_lazy('users:account')
),
name='password_reset_confirm'
),
path(
'reset/complete/',
auth_views.PasswordResetCompleteView.as_view(template_name='users/password_reset/complete.html'),
name='password_reset_complete'
),
])),
path(
'activate/<uidb64>/<token>/',
ActivationView.as_view(),
name='activate'
),
path('activate/', create_password, name="activate_password"),
path('oauth', oauth, name='oauth'),
])),
] | 0.334372 | 0.094218 |
import asyncio
import logging
from typing import Any, Dict, List, Optional, Tuple, Type, Union
from uuid import UUID
import aiohttp
from aiohttp import ClientTimeout, web
from models_library.projects import ProjectID
from models_library.projects_pipeline import ComputationTask
from models_library.settings.services_common import ServicesCommonSettings
from models_library.users import UserID
from pydantic.types import PositiveInt
from servicelib.logging_utils import log_decorator
from servicelib.utils import logged_gather
from tenacity._asyncio import AsyncRetrying
from tenacity.before_sleep import before_sleep_log
from tenacity.stop import stop_after_attempt
from tenacity.wait import wait_random
from yarl import URL
from .director_v2_abc import AbstractProjectRunPolicy
from .director_v2_settings import Directorv2Settings, get_client_session, get_settings
log = logging.getLogger(__file__)
_APP_DIRECTOR_V2_CLIENT_KEY = f"{__name__}.DirectorV2ApiClient"
SERVICE_HEALTH_CHECK_TIMEOUT = ClientTimeout(total=2, connect=1) # type:ignore
SERVICE_RETRIEVE_HTTP_TIMEOUT = ClientTimeout(
total=60 * 60, connect=None, sock_connect=5 # type:ignore
)
DEFAULT_RETRY_POLICY = dict(
wait=wait_random(0, 1),
stop=stop_after_attempt(2),
reraise=True,
before_sleep=before_sleep_log(log, logging.WARNING),
)
DataType = Dict[str, Any]
DataBody = Union[DataType, List[DataType], None]
# base/ERRORS ------------------------------------------------
class DirectorServiceError(Exception):
"""Basic exception for errors raised by director"""
def __init__(self, status: int, reason: str):
self.status = status
self.reason = reason
super().__init__(f"forwarded call failed with status {status}, reason {reason}")
# base/HELPERS ------------------------------------------------
class DirectorV2ApiClient:
def __init__(self, app: web.Application) -> None:
self._app = app
self._settings: Directorv2Settings = get_settings(app)
self._base_url = URL(self._settings.endpoint)
async def start(self, project_id: ProjectID, user_id: UserID, **options) -> str:
computation_task_out = await _request_director_v2(
self._app,
"POST",
self._base_url / "computations",
expected_status=web.HTTPCreated,
data={"user_id": user_id, "project_id": project_id, **options},
)
assert isinstance(computation_task_out, dict) # nosec
return computation_task_out["id"]
async def stop(self, project_id: ProjectID, user_id: UserID):
await _request_director_v2(
self._app,
"POST",
self._base_url / "computations" / f"{project_id}:stop",
expected_status=web.HTTPAccepted,
data={"user_id": user_id},
)
def get_client(app: web.Application) -> Optional[DirectorV2ApiClient]:
return app.get(_APP_DIRECTOR_V2_CLIENT_KEY)
def set_client(app: web.Application, obj: DirectorV2ApiClient):
app[_APP_DIRECTOR_V2_CLIENT_KEY] = obj
async def _request_director_v2(
app: web.Application,
method: str,
url: URL,
expected_status: Type[web.HTTPSuccessful] = web.HTTPOk,
headers: Optional[Dict[str, str]] = None,
data: Optional[Any] = None,
**kwargs,
) -> DataBody:
try:
async for attempt in AsyncRetrying(**DEFAULT_RETRY_POLICY):
with attempt:
session = get_client_session(app)
async with session.request(
method, url, headers=headers, json=data, **kwargs
) as response:
payload = (
await response.json()
if response.content_type == "application/json"
else await response.text()
)
# NOTE:
# - `sometimes director-v0` (via redirects) replies
# in plain text and this is considered an error
# - `director-v2` and `director-v0` can reply with 204 no content
if response.status != expected_status.status_code or isinstance(
payload, str
):
raise DirectorServiceError(response.status, reason=f"{payload}")
return payload
# TODO: enrich with https://docs.aiohttp.org/en/stable/client_reference.html#hierarchy-of-exceptions
except asyncio.TimeoutError as err:
raise DirectorServiceError(
web.HTTPServiceUnavailable.status_code,
reason=f"request to director-v2 timed-out: {err}",
) from err
except aiohttp.ClientError as err:
raise DirectorServiceError(
web.HTTPServiceUnavailable.status_code,
reason=f"request to director-v2 service unexpected error {err}",
) from err
raise DirectorServiceError(
web.HTTPClientError.status_code, reason="Unexpected client error"
)
# POLICY ------------------------------------------------
class DefaultProjectRunPolicy(AbstractProjectRunPolicy):
# pylint: disable=unused-argument
async def get_runnable_projects_ids(
self,
request: web.Request,
project_uuid: ProjectID,
) -> List[ProjectID]:
return [
project_uuid,
]
async def get_or_create_runnable_projects(
self,
request: web.Request,
project_uuid: ProjectID,
) -> Tuple[List[ProjectID], List[int]]:
"""
Returns ids and refid of projects that can run
If project_uuid is a std-project, then it returns itself
If project_uuid is a meta-project, then it returns iterations
"""
return (
[
project_uuid,
],
[],
)
# calls to director-v2 API ------------------------------------------------
async def is_healthy(app: web.Application) -> bool:
try:
session = get_client_session(app)
settings: Directorv2Settings = get_settings(app)
health_check_url = URL(settings.endpoint).parent
await session.get(
url=health_check_url,
ssl=False,
raise_for_status=True,
timeout=SERVICE_HEALTH_CHECK_TIMEOUT,
)
return True
except (aiohttp.ClientError, asyncio.TimeoutError) as err:
# SEE https://docs.aiohttp.org/en/stable/client_reference.html#hierarchy-of-exceptions
log.warning("Director is NOT healthy: %s", err)
return False
@log_decorator(logger=log)
async def create_or_update_pipeline(
app: web.Application, user_id: PositiveInt, project_id: UUID
) -> Optional[DataType]:
settings: Directorv2Settings = get_settings(app)
backend_url = URL(f"{settings.endpoint}/computations")
body = {"user_id": user_id, "project_id": f"{project_id}"}
# request to director-v2
try:
computation_task_out = await _request_director_v2(
app, "POST", backend_url, expected_status=web.HTTPCreated, data=body
)
assert isinstance(computation_task_out, dict) # nosec
return computation_task_out
except DirectorServiceError as exc:
log.error("could not create pipeline from project %s: %s", project_id, exc)
@log_decorator(logger=log)
async def get_computation_task(
app: web.Application, user_id: PositiveInt, project_id: UUID
) -> Optional[ComputationTask]:
settings: Directorv2Settings = get_settings(app)
backend_url = URL(f"{settings.endpoint}/computations/{project_id}").update_query(
user_id=user_id
)
# request to director-v2
try:
computation_task_out_dict = await _request_director_v2(
app, "GET", backend_url, expected_status=web.HTTPAccepted
)
task_out = ComputationTask.parse_obj(computation_task_out_dict)
log.debug("found computation task: %s", f"{task_out=}")
return task_out
except DirectorServiceError as exc:
if exc.status == web.HTTPNotFound.status_code:
# the pipeline might not exist and that is ok
return
log.warning(
"getting pipeline for project %s failed: %s.", f"{project_id=}", exc
)
@log_decorator(logger=log)
async def delete_pipeline(
app: web.Application, user_id: PositiveInt, project_id: UUID
) -> None:
settings: Directorv2Settings = get_settings(app)
backend_url = URL(f"{settings.endpoint}/computations/{project_id}")
body = {"user_id": user_id, "force": True}
# request to director-v2
await _request_director_v2(
app, "DELETE", backend_url, expected_status=web.HTTPNoContent, data=body
)
@log_decorator(logger=log)
async def request_retrieve_dyn_service(
app: web.Application, service_uuid: str, port_keys: List[str]
) -> None:
settings: Directorv2Settings = get_settings(app)
backend_url = URL(f"{settings.endpoint}/dynamic_services/{service_uuid}:retrieve")
body = {"port_keys": port_keys}
try:
await _request_director_v2(
app, "POST", backend_url, data=body, timeout=SERVICE_RETRIEVE_HTTP_TIMEOUT
)
except DirectorServiceError as exc:
log.warning(
"Unable to call :retrieve endpoint on service %s, keys: [%s]: error: [%s:%s]",
service_uuid,
port_keys,
exc.status,
exc.reason,
)
@log_decorator(logger=log)
async def start_service(
app: web.Application,
user_id: PositiveInt,
project_id: str,
service_key: str,
service_version: str,
service_uuid: str,
request_dns: str,
request_scheme: str,
) -> DataType:
"""
Requests to start a service:
- legacy services request is redirected to `director-v0`
- dynamic-sidecar `director-v2` will handle the request
"""
data = {
"user_id": user_id,
"project_id": project_id,
"key": service_key,
"version": service_version,
"node_uuid": service_uuid,
"basepath": f"/x/{service_uuid}",
}
headers = {
"X-Dynamic-Sidecar-Request-DNS": request_dns,
"X-Dynamic-Sidecar-Request-Scheme": request_scheme,
}
settings: Directorv2Settings = get_settings(app)
backend_url = URL(settings.endpoint) / "dynamic_services"
started_service = await _request_director_v2(
app,
"POST",
backend_url,
data=data,
headers=headers,
expected_status=web.HTTPCreated,
)
assert isinstance(started_service, dict) # nosec
return started_service
@log_decorator(logger=log)
async def get_services(
app: web.Application,
user_id: Optional[PositiveInt] = None,
project_id: Optional[str] = None,
) -> List[DataType]:
params = {}
if user_id:
params["user_id"] = user_id
if project_id:
params["project_id"] = project_id
settings: Directorv2Settings = get_settings(app)
backend_url = URL(settings.endpoint) / "dynamic_services"
services = await _request_director_v2(
app, "GET", backend_url, params=params, expected_status=web.HTTPOk
)
assert isinstance(services, list) # nosec
return services
@log_decorator(logger=log)
async def stop_service(
app: web.Application, service_uuid: str, save_state: Optional[bool] = True
) -> None:
# stopping a service can take a lot of time
# bumping the stop command timeout to 1 hour
# this will allow to sava bigger datasets from the services
timeout = ServicesCommonSettings().webserver_director_stop_service_timeout
settings: Directorv2Settings = get_settings(app)
backend_url = (
URL(settings.endpoint) / "dynamic_services" / f"{service_uuid}"
).update_query(
save_state="true" if save_state else "false",
)
await _request_director_v2(
app, "DELETE", backend_url, expected_status=web.HTTPNoContent, timeout=timeout
)
@log_decorator(logger=log)
async def list_running_dynamic_services(
app: web.Application, user_id: PositiveInt, project_id: ProjectID
) -> List[DataType]:
"""
Retruns the running dynamic services from director-v0 and director-v2
"""
settings: Directorv2Settings = get_settings(app)
url = URL(settings.endpoint) / "dynamic_services"
backend_url = url.with_query(user_id=str(user_id), project_id=str(project_id))
services = await _request_director_v2(
app, "GET", backend_url, expected_status=web.HTTPOk
)
assert isinstance(services, list) # nosec
return services
@log_decorator(logger=log)
async def stop_services(
app: web.Application,
user_id: Optional[PositiveInt] = None,
project_id: Optional[str] = None,
save_state: Optional[bool] = True,
) -> None:
"""Stops all services in parallel"""
running_dynamic_services = await get_services(
app, user_id=user_id, project_id=project_id
)
services_to_stop = [
stop_service(
app=app, service_uuid=service["service_uuid"], save_state=save_state
)
for service in running_dynamic_services
]
await logged_gather(*services_to_stop)
@log_decorator(logger=log)
async def get_service_state(app: web.Application, node_uuid: str) -> DataType:
settings: Directorv2Settings = get_settings(app)
backend_url = URL(settings.endpoint) / "dynamic_services" / f"{node_uuid}"
service_state = await _request_director_v2(
app, "GET", backend_url, expected_status=web.HTTPOk
)
assert isinstance(service_state, dict) # nosec
return service_state
@log_decorator(logger=log)
async def retrieve(
app: web.Application, node_uuid: str, port_keys: List[str]
) -> DataBody:
# when triggering retrieve endpoint
# this will allow to sava bigger datasets from the services
timeout = ServicesCommonSettings().storage_service_upload_download_timeout
director2_settings: Directorv2Settings = get_settings(app)
backend_url = (
URL(director2_settings.endpoint) / "dynamic_services" / f"{node_uuid}:retrieve"
)
body = dict(port_keys=port_keys)
retry_result = await _request_director_v2(
app,
"POST",
backend_url,
expected_status=web.HTTPOk,
data=body,
timeout=timeout,
)
assert isinstance(retry_result, dict) # nosec
return retry_result
@log_decorator(logger=log)
async def restart(app: web.Application, node_uuid: str) -> None:
# when triggering retrieve endpoint
# this will allow to sava bigger datasets from the services
timeout = ServicesCommonSettings().restart_containers_timeout
director2_settings: Directorv2Settings = get_settings(app)
backend_url = (
URL(director2_settings.endpoint) / "dynamic_services" / f"{node_uuid}:restart"
)
await _request_director_v2(
app,
"POST",
backend_url,
expected_status=web.HTTPOk,
timeout=timeout,
) | services/web/server/src/simcore_service_webserver/director_v2_core.py | import asyncio
import logging
from typing import Any, Dict, List, Optional, Tuple, Type, Union
from uuid import UUID
import aiohttp
from aiohttp import ClientTimeout, web
from models_library.projects import ProjectID
from models_library.projects_pipeline import ComputationTask
from models_library.settings.services_common import ServicesCommonSettings
from models_library.users import UserID
from pydantic.types import PositiveInt
from servicelib.logging_utils import log_decorator
from servicelib.utils import logged_gather
from tenacity._asyncio import AsyncRetrying
from tenacity.before_sleep import before_sleep_log
from tenacity.stop import stop_after_attempt
from tenacity.wait import wait_random
from yarl import URL
from .director_v2_abc import AbstractProjectRunPolicy
from .director_v2_settings import Directorv2Settings, get_client_session, get_settings
log = logging.getLogger(__file__)
_APP_DIRECTOR_V2_CLIENT_KEY = f"{__name__}.DirectorV2ApiClient"
SERVICE_HEALTH_CHECK_TIMEOUT = ClientTimeout(total=2, connect=1) # type:ignore
SERVICE_RETRIEVE_HTTP_TIMEOUT = ClientTimeout(
total=60 * 60, connect=None, sock_connect=5 # type:ignore
)
DEFAULT_RETRY_POLICY = dict(
wait=wait_random(0, 1),
stop=stop_after_attempt(2),
reraise=True,
before_sleep=before_sleep_log(log, logging.WARNING),
)
DataType = Dict[str, Any]
DataBody = Union[DataType, List[DataType], None]
# base/ERRORS ------------------------------------------------
class DirectorServiceError(Exception):
"""Basic exception for errors raised by director"""
def __init__(self, status: int, reason: str):
self.status = status
self.reason = reason
super().__init__(f"forwarded call failed with status {status}, reason {reason}")
# base/HELPERS ------------------------------------------------
class DirectorV2ApiClient:
def __init__(self, app: web.Application) -> None:
self._app = app
self._settings: Directorv2Settings = get_settings(app)
self._base_url = URL(self._settings.endpoint)
async def start(self, project_id: ProjectID, user_id: UserID, **options) -> str:
computation_task_out = await _request_director_v2(
self._app,
"POST",
self._base_url / "computations",
expected_status=web.HTTPCreated,
data={"user_id": user_id, "project_id": project_id, **options},
)
assert isinstance(computation_task_out, dict) # nosec
return computation_task_out["id"]
async def stop(self, project_id: ProjectID, user_id: UserID):
await _request_director_v2(
self._app,
"POST",
self._base_url / "computations" / f"{project_id}:stop",
expected_status=web.HTTPAccepted,
data={"user_id": user_id},
)
def get_client(app: web.Application) -> Optional[DirectorV2ApiClient]:
return app.get(_APP_DIRECTOR_V2_CLIENT_KEY)
def set_client(app: web.Application, obj: DirectorV2ApiClient):
app[_APP_DIRECTOR_V2_CLIENT_KEY] = obj
async def _request_director_v2(
app: web.Application,
method: str,
url: URL,
expected_status: Type[web.HTTPSuccessful] = web.HTTPOk,
headers: Optional[Dict[str, str]] = None,
data: Optional[Any] = None,
**kwargs,
) -> DataBody:
try:
async for attempt in AsyncRetrying(**DEFAULT_RETRY_POLICY):
with attempt:
session = get_client_session(app)
async with session.request(
method, url, headers=headers, json=data, **kwargs
) as response:
payload = (
await response.json()
if response.content_type == "application/json"
else await response.text()
)
# NOTE:
# - `sometimes director-v0` (via redirects) replies
# in plain text and this is considered an error
# - `director-v2` and `director-v0` can reply with 204 no content
if response.status != expected_status.status_code or isinstance(
payload, str
):
raise DirectorServiceError(response.status, reason=f"{payload}")
return payload
# TODO: enrich with https://docs.aiohttp.org/en/stable/client_reference.html#hierarchy-of-exceptions
except asyncio.TimeoutError as err:
raise DirectorServiceError(
web.HTTPServiceUnavailable.status_code,
reason=f"request to director-v2 timed-out: {err}",
) from err
except aiohttp.ClientError as err:
raise DirectorServiceError(
web.HTTPServiceUnavailable.status_code,
reason=f"request to director-v2 service unexpected error {err}",
) from err
raise DirectorServiceError(
web.HTTPClientError.status_code, reason="Unexpected client error"
)
# POLICY ------------------------------------------------
class DefaultProjectRunPolicy(AbstractProjectRunPolicy):
# pylint: disable=unused-argument
async def get_runnable_projects_ids(
self,
request: web.Request,
project_uuid: ProjectID,
) -> List[ProjectID]:
return [
project_uuid,
]
async def get_or_create_runnable_projects(
self,
request: web.Request,
project_uuid: ProjectID,
) -> Tuple[List[ProjectID], List[int]]:
"""
Returns ids and refid of projects that can run
If project_uuid is a std-project, then it returns itself
If project_uuid is a meta-project, then it returns iterations
"""
return (
[
project_uuid,
],
[],
)
# calls to director-v2 API ------------------------------------------------
async def is_healthy(app: web.Application) -> bool:
try:
session = get_client_session(app)
settings: Directorv2Settings = get_settings(app)
health_check_url = URL(settings.endpoint).parent
await session.get(
url=health_check_url,
ssl=False,
raise_for_status=True,
timeout=SERVICE_HEALTH_CHECK_TIMEOUT,
)
return True
except (aiohttp.ClientError, asyncio.TimeoutError) as err:
# SEE https://docs.aiohttp.org/en/stable/client_reference.html#hierarchy-of-exceptions
log.warning("Director is NOT healthy: %s", err)
return False
@log_decorator(logger=log)
async def create_or_update_pipeline(
app: web.Application, user_id: PositiveInt, project_id: UUID
) -> Optional[DataType]:
settings: Directorv2Settings = get_settings(app)
backend_url = URL(f"{settings.endpoint}/computations")
body = {"user_id": user_id, "project_id": f"{project_id}"}
# request to director-v2
try:
computation_task_out = await _request_director_v2(
app, "POST", backend_url, expected_status=web.HTTPCreated, data=body
)
assert isinstance(computation_task_out, dict) # nosec
return computation_task_out
except DirectorServiceError as exc:
log.error("could not create pipeline from project %s: %s", project_id, exc)
@log_decorator(logger=log)
async def get_computation_task(
app: web.Application, user_id: PositiveInt, project_id: UUID
) -> Optional[ComputationTask]:
settings: Directorv2Settings = get_settings(app)
backend_url = URL(f"{settings.endpoint}/computations/{project_id}").update_query(
user_id=user_id
)
# request to director-v2
try:
computation_task_out_dict = await _request_director_v2(
app, "GET", backend_url, expected_status=web.HTTPAccepted
)
task_out = ComputationTask.parse_obj(computation_task_out_dict)
log.debug("found computation task: %s", f"{task_out=}")
return task_out
except DirectorServiceError as exc:
if exc.status == web.HTTPNotFound.status_code:
# the pipeline might not exist and that is ok
return
log.warning(
"getting pipeline for project %s failed: %s.", f"{project_id=}", exc
)
@log_decorator(logger=log)
async def delete_pipeline(
app: web.Application, user_id: PositiveInt, project_id: UUID
) -> None:
settings: Directorv2Settings = get_settings(app)
backend_url = URL(f"{settings.endpoint}/computations/{project_id}")
body = {"user_id": user_id, "force": True}
# request to director-v2
await _request_director_v2(
app, "DELETE", backend_url, expected_status=web.HTTPNoContent, data=body
)
@log_decorator(logger=log)
async def request_retrieve_dyn_service(
app: web.Application, service_uuid: str, port_keys: List[str]
) -> None:
settings: Directorv2Settings = get_settings(app)
backend_url = URL(f"{settings.endpoint}/dynamic_services/{service_uuid}:retrieve")
body = {"port_keys": port_keys}
try:
await _request_director_v2(
app, "POST", backend_url, data=body, timeout=SERVICE_RETRIEVE_HTTP_TIMEOUT
)
except DirectorServiceError as exc:
log.warning(
"Unable to call :retrieve endpoint on service %s, keys: [%s]: error: [%s:%s]",
service_uuid,
port_keys,
exc.status,
exc.reason,
)
@log_decorator(logger=log)
async def start_service(
app: web.Application,
user_id: PositiveInt,
project_id: str,
service_key: str,
service_version: str,
service_uuid: str,
request_dns: str,
request_scheme: str,
) -> DataType:
"""
Requests to start a service:
- legacy services request is redirected to `director-v0`
- dynamic-sidecar `director-v2` will handle the request
"""
data = {
"user_id": user_id,
"project_id": project_id,
"key": service_key,
"version": service_version,
"node_uuid": service_uuid,
"basepath": f"/x/{service_uuid}",
}
headers = {
"X-Dynamic-Sidecar-Request-DNS": request_dns,
"X-Dynamic-Sidecar-Request-Scheme": request_scheme,
}
settings: Directorv2Settings = get_settings(app)
backend_url = URL(settings.endpoint) / "dynamic_services"
started_service = await _request_director_v2(
app,
"POST",
backend_url,
data=data,
headers=headers,
expected_status=web.HTTPCreated,
)
assert isinstance(started_service, dict) # nosec
return started_service
@log_decorator(logger=log)
async def get_services(
app: web.Application,
user_id: Optional[PositiveInt] = None,
project_id: Optional[str] = None,
) -> List[DataType]:
params = {}
if user_id:
params["user_id"] = user_id
if project_id:
params["project_id"] = project_id
settings: Directorv2Settings = get_settings(app)
backend_url = URL(settings.endpoint) / "dynamic_services"
services = await _request_director_v2(
app, "GET", backend_url, params=params, expected_status=web.HTTPOk
)
assert isinstance(services, list) # nosec
return services
@log_decorator(logger=log)
async def stop_service(
app: web.Application, service_uuid: str, save_state: Optional[bool] = True
) -> None:
# stopping a service can take a lot of time
# bumping the stop command timeout to 1 hour
# this will allow to sava bigger datasets from the services
timeout = ServicesCommonSettings().webserver_director_stop_service_timeout
settings: Directorv2Settings = get_settings(app)
backend_url = (
URL(settings.endpoint) / "dynamic_services" / f"{service_uuid}"
).update_query(
save_state="true" if save_state else "false",
)
await _request_director_v2(
app, "DELETE", backend_url, expected_status=web.HTTPNoContent, timeout=timeout
)
@log_decorator(logger=log)
async def list_running_dynamic_services(
app: web.Application, user_id: PositiveInt, project_id: ProjectID
) -> List[DataType]:
"""
Retruns the running dynamic services from director-v0 and director-v2
"""
settings: Directorv2Settings = get_settings(app)
url = URL(settings.endpoint) / "dynamic_services"
backend_url = url.with_query(user_id=str(user_id), project_id=str(project_id))
services = await _request_director_v2(
app, "GET", backend_url, expected_status=web.HTTPOk
)
assert isinstance(services, list) # nosec
return services
@log_decorator(logger=log)
async def stop_services(
app: web.Application,
user_id: Optional[PositiveInt] = None,
project_id: Optional[str] = None,
save_state: Optional[bool] = True,
) -> None:
"""Stops all services in parallel"""
running_dynamic_services = await get_services(
app, user_id=user_id, project_id=project_id
)
services_to_stop = [
stop_service(
app=app, service_uuid=service["service_uuid"], save_state=save_state
)
for service in running_dynamic_services
]
await logged_gather(*services_to_stop)
@log_decorator(logger=log)
async def get_service_state(app: web.Application, node_uuid: str) -> DataType:
settings: Directorv2Settings = get_settings(app)
backend_url = URL(settings.endpoint) / "dynamic_services" / f"{node_uuid}"
service_state = await _request_director_v2(
app, "GET", backend_url, expected_status=web.HTTPOk
)
assert isinstance(service_state, dict) # nosec
return service_state
@log_decorator(logger=log)
async def retrieve(
app: web.Application, node_uuid: str, port_keys: List[str]
) -> DataBody:
# when triggering retrieve endpoint
# this will allow to sava bigger datasets from the services
timeout = ServicesCommonSettings().storage_service_upload_download_timeout
director2_settings: Directorv2Settings = get_settings(app)
backend_url = (
URL(director2_settings.endpoint) / "dynamic_services" / f"{node_uuid}:retrieve"
)
body = dict(port_keys=port_keys)
retry_result = await _request_director_v2(
app,
"POST",
backend_url,
expected_status=web.HTTPOk,
data=body,
timeout=timeout,
)
assert isinstance(retry_result, dict) # nosec
return retry_result
@log_decorator(logger=log)
async def restart(app: web.Application, node_uuid: str) -> None:
# when triggering retrieve endpoint
# this will allow to sava bigger datasets from the services
timeout = ServicesCommonSettings().restart_containers_timeout
director2_settings: Directorv2Settings = get_settings(app)
backend_url = (
URL(director2_settings.endpoint) / "dynamic_services" / f"{node_uuid}:restart"
)
await _request_director_v2(
app,
"POST",
backend_url,
expected_status=web.HTTPOk,
timeout=timeout,
) | 0.721743 | 0.119305 |
import re
import sys
import textwrap
import time
import xml.etree.ElementTree as ET
from urllib.parse import urlparse
import requests
from django.utils.translation import ugettext_lazy as _
from orchestra.contrib.orchestration import ServiceController
from orchestra.contrib.resources import ServiceMonitor
from . import ApacheTrafficByName
from .. import settings
class OwnClouwAPIMixin(object):
def validate_response(self, response):
request = response.request
context = (request.method, response.url, request.body, response.status_code)
sys.stderr.write("%s %s '%s' HTTP %s\n" % context)
if response.status_code != requests.codes.ok:
raise RuntimeError("%s %s '%s' HTTP %s" % context)
root = ET.fromstring(response.text)
statuscode = root.find("./meta/statuscode").text
if statuscode != '100':
message = root.find("./meta/status").text
request = response.request
context = (request.method, response.url, request.body, statuscode, message)
raise RuntimeError("%s %s '%s' ERROR %s, %s" % context)
def api_call(self, action, url_path, *args, **kwargs):
BASE_URL = settings.SAAS_OWNCLOUD_API_URL.rstrip('/')
url = '/'.join((BASE_URL, url_path))
response = action(url, *args, **kwargs)
self.validate_response(response)
return response
def api_get(self, url_path, *args, **kwargs):
return self.api_call(requests.get, url_path, *args, **kwargs)
def api_post(self, url_path, *args, **kwargs):
return self.api_call(requests.post, url_path, *args, **kwargs)
def api_put(self, url_path, *args, **kwargs):
return self.api_call(requests.put, url_path, *args, **kwargs)
def api_delete(self, url_path, *args, **kwargs):
return self.api_call(requests.delete, url_path, *args, **kwargs)
def create(self, saas):
data = {
'userid': saas.name,
'password': <PASSWORD>
}
self.api_post('users', data)
def update(self, saas):
"""
key: email|quota|display|password
value: el valor a modificar.
Si es un email, tornarà un error si la direcció no te la "@"
Si es una quota, sembla que algo per l'estil "5G", "100M", etc. funciona. Quota 0 = infinit
"display" es el display name, no crec que el fem servir, és cosmetic
"""
data = {
'key': 'password',
'value': saas.password,
}
self.api_put('users/%s' % saas.name, data)
def get_user(self, saas):
"""
{
'displayname'
'email'
'quota' =>
{
'free' (en Bytes)
'relative' (en tant per cent sense signe %, e.g. 68.17)
'total' (en Bytes)
'used' (en Bytes)
}
}
"""
response = self.api_get('users/%s' % saas.name)
root = ET.fromstring(response.text)
ret = {}
for data in root.find('./data'):
ret[data.tag] = data.text
ret['quota'] = {}
for data in root.find('.data/quota'):
ret['quota'][data.tag] = data.text
return ret
class OwnCloudController(OwnClouwAPIMixin, ServiceController):
"""
Creates a wordpress site on a WordPress MultiSite installation.
You should point it to the database server
"""
verbose_name = _("ownCloud SaaS")
model = 'saas.SaaS'
default_route_match = "saas.service == 'owncloud'"
doc_settings = (settings,
('SAAS_OWNCLOUD_API_URL',)
)
def update_or_create(self, saas, server):
try:
self.api_get('users/%s' % saas.name)
except RuntimeError:
if getattr(saas, 'password'):
self.create(saas)
else:
raise
else:
if getattr(saas, 'password'):
self.update(saas)
def remove(self, saas, server):
self.api_delete('users/%s' % saas.name)
def save(self, saas):
# TODO disable user https://github.com/owncloud/core/issues/12601
self.append(self.update_or_create, saas)
def delete(self, saas):
self.append(self.remove, saas)
class OwncloudTraffic(ApacheTrafficByName):
__doc__ = ApacheTrafficByName.__doc__
verbose_name = _("ownCloud SaaS Traffic")
default_route_match = "saas.service == 'owncloud'"
doc_settings = (settings,
('SAAS_TRAFFIC_IGNORE_HOSTS', 'SAAS_OWNCLOUD_LOG_PATH')
)
log_path = settings.SAAS_OWNCLOUD_LOG_PATH
class OwnCloudDiskQuota(OwnClouwAPIMixin, ServiceMonitor):
model = 'saas.SaaS'
verbose_name = _("ownCloud SaaS Disk Quota")
default_route_match = "saas.service == 'owncloud'"
resource = ServiceMonitor.DISK
delete_old_equal_values = True
def monitor(self, user):
context = self.get_context(user)
self.append("echo %(object_id)s $(monitor %(base_home)s)" % context)
def get_context(self, user):
context = {
'object_id': user.pk,
'base_home': user.get_base_home(),
}
return replace(context, "'", '"')
def get_quota(self, saas, server):
try:
user = self.get_user(saas)
except requests.exceptions.ConnectionError:
time.sleep(2)
user = self.get_user(saas)
context = {
'object_id': saas.pk,
'used': int(user['quota'].get('used', 0)),
}
sys.stdout.write('%(object_id)i %(used)i\n' % context)
def monitor(self, saas):
self.append(self.get_quota, saas) | orchestra/contrib/saas/backends/owncloud.py | import re
import sys
import textwrap
import time
import xml.etree.ElementTree as ET
from urllib.parse import urlparse
import requests
from django.utils.translation import ugettext_lazy as _
from orchestra.contrib.orchestration import ServiceController
from orchestra.contrib.resources import ServiceMonitor
from . import ApacheTrafficByName
from .. import settings
class OwnClouwAPIMixin(object):
def validate_response(self, response):
request = response.request
context = (request.method, response.url, request.body, response.status_code)
sys.stderr.write("%s %s '%s' HTTP %s\n" % context)
if response.status_code != requests.codes.ok:
raise RuntimeError("%s %s '%s' HTTP %s" % context)
root = ET.fromstring(response.text)
statuscode = root.find("./meta/statuscode").text
if statuscode != '100':
message = root.find("./meta/status").text
request = response.request
context = (request.method, response.url, request.body, statuscode, message)
raise RuntimeError("%s %s '%s' ERROR %s, %s" % context)
def api_call(self, action, url_path, *args, **kwargs):
BASE_URL = settings.SAAS_OWNCLOUD_API_URL.rstrip('/')
url = '/'.join((BASE_URL, url_path))
response = action(url, *args, **kwargs)
self.validate_response(response)
return response
def api_get(self, url_path, *args, **kwargs):
return self.api_call(requests.get, url_path, *args, **kwargs)
def api_post(self, url_path, *args, **kwargs):
return self.api_call(requests.post, url_path, *args, **kwargs)
def api_put(self, url_path, *args, **kwargs):
return self.api_call(requests.put, url_path, *args, **kwargs)
def api_delete(self, url_path, *args, **kwargs):
return self.api_call(requests.delete, url_path, *args, **kwargs)
def create(self, saas):
data = {
'userid': saas.name,
'password': <PASSWORD>
}
self.api_post('users', data)
def update(self, saas):
"""
key: email|quota|display|password
value: el valor a modificar.
Si es un email, tornarà un error si la direcció no te la "@"
Si es una quota, sembla que algo per l'estil "5G", "100M", etc. funciona. Quota 0 = infinit
"display" es el display name, no crec que el fem servir, és cosmetic
"""
data = {
'key': 'password',
'value': saas.password,
}
self.api_put('users/%s' % saas.name, data)
def get_user(self, saas):
"""
{
'displayname'
'email'
'quota' =>
{
'free' (en Bytes)
'relative' (en tant per cent sense signe %, e.g. 68.17)
'total' (en Bytes)
'used' (en Bytes)
}
}
"""
response = self.api_get('users/%s' % saas.name)
root = ET.fromstring(response.text)
ret = {}
for data in root.find('./data'):
ret[data.tag] = data.text
ret['quota'] = {}
for data in root.find('.data/quota'):
ret['quota'][data.tag] = data.text
return ret
class OwnCloudController(OwnClouwAPIMixin, ServiceController):
"""
Creates a wordpress site on a WordPress MultiSite installation.
You should point it to the database server
"""
verbose_name = _("ownCloud SaaS")
model = 'saas.SaaS'
default_route_match = "saas.service == 'owncloud'"
doc_settings = (settings,
('SAAS_OWNCLOUD_API_URL',)
)
def update_or_create(self, saas, server):
try:
self.api_get('users/%s' % saas.name)
except RuntimeError:
if getattr(saas, 'password'):
self.create(saas)
else:
raise
else:
if getattr(saas, 'password'):
self.update(saas)
def remove(self, saas, server):
self.api_delete('users/%s' % saas.name)
def save(self, saas):
# TODO disable user https://github.com/owncloud/core/issues/12601
self.append(self.update_or_create, saas)
def delete(self, saas):
self.append(self.remove, saas)
class OwncloudTraffic(ApacheTrafficByName):
__doc__ = ApacheTrafficByName.__doc__
verbose_name = _("ownCloud SaaS Traffic")
default_route_match = "saas.service == 'owncloud'"
doc_settings = (settings,
('SAAS_TRAFFIC_IGNORE_HOSTS', 'SAAS_OWNCLOUD_LOG_PATH')
)
log_path = settings.SAAS_OWNCLOUD_LOG_PATH
class OwnCloudDiskQuota(OwnClouwAPIMixin, ServiceMonitor):
model = 'saas.SaaS'
verbose_name = _("ownCloud SaaS Disk Quota")
default_route_match = "saas.service == 'owncloud'"
resource = ServiceMonitor.DISK
delete_old_equal_values = True
def monitor(self, user):
context = self.get_context(user)
self.append("echo %(object_id)s $(monitor %(base_home)s)" % context)
def get_context(self, user):
context = {
'object_id': user.pk,
'base_home': user.get_base_home(),
}
return replace(context, "'", '"')
def get_quota(self, saas, server):
try:
user = self.get_user(saas)
except requests.exceptions.ConnectionError:
time.sleep(2)
user = self.get_user(saas)
context = {
'object_id': saas.pk,
'used': int(user['quota'].get('used', 0)),
}
sys.stdout.write('%(object_id)i %(used)i\n' % context)
def monitor(self, saas):
self.append(self.get_quota, saas) | 0.162081 | 0.063657 |
from __future__ import annotations
from coredis._utils import CaseAndEncodingInsensitiveEnum
class PureToken(CaseAndEncodingInsensitiveEnum):
"""
Enum for using pure-tokens with the redis api.
"""
#: Used by:
#:
#: - ``ACL LOG``
RESET = b"RESET"
#: Used by:
#:
#: - ``BGSAVE``
SCHEDULE = b"SCHEDULE"
#: Used by:
#:
#: - ``BITCOUNT``
#: - ``BITPOS``
BIT = b"BIT"
#: Used by:
#:
#: - ``BITCOUNT``
#: - ``BITPOS``
BYTE = b"BYTE"
#: Used by:
#:
#: - ``BITFIELD``
FAIL = b"FAIL"
#: Used by:
#:
#: - ``BITFIELD``
SAT = b"SAT"
#: Used by:
#:
#: - ``BITFIELD``
WRAP = b"WRAP"
#: Used by:
#:
#: - ``BLMOVE``
#: - ``BLMPOP``
#: - ``LMOVE``
#: - ``LMPOP``
LEFT = b"LEFT"
#: Used by:
#:
#: - ``BLMOVE``
#: - ``BLMPOP``
#: - ``LMOVE``
#: - ``LMPOP``
RIGHT = b"RIGHT"
#: Used by:
#:
#: - ``BZMPOP``
#: - ``ZINTER``
#: - ``ZINTERSTORE``
#: - ``ZMPOP``
#: - ``ZUNION``
#: - ``ZUNIONSTORE``
MAX = b"MAX"
#: Used by:
#:
#: - ``BZMPOP``
#: - ``ZINTER``
#: - ``ZINTERSTORE``
#: - ``ZMPOP``
#: - ``ZUNION``
#: - ``ZUNIONSTORE``
MIN = b"MIN"
#: Used by:
#:
#: - ``CLIENT CACHING``
#: - ``SCRIPT DEBUG``
NO = b"NO"
#: Used by:
#:
#: - ``CLIENT CACHING``
#: - ``SCRIPT DEBUG``
YES = b"YES"
#: Used by:
#:
#: - ``CLIENT KILL``
#: - ``CLIENT LIST``
MASTER = b"MASTER"
#: Used by:
#:
#: - ``CLIENT KILL``
#: - ``CLIENT LIST``
NORMAL = b"NORMAL"
#: Used by:
#:
#: - ``CLIENT KILL``
#: - ``CLIENT LIST``
PUBSUB = b"PUBSUB"
#: Used by:
#:
#: - ``CLIENT KILL``
#: - ``CLIENT LIST``
REPLICA = b"REPLICA"
#: Used by:
#:
#: - ``CLIENT KILL``
SLAVE = b"SLAVE"
#: Used by:
#:
#: - ``CLIENT NO-EVICT``
#: - ``CLIENT REPLY``
#: - ``CLIENT TRACKING``
OFF = b"OFF"
#: Used by:
#:
#: - ``CLIENT NO-EVICT``
#: - ``CLIENT REPLY``
#: - ``CLIENT TRACKING``
ON = b"ON"
#: Used by:
#:
#: - ``CLIENT PAUSE``
ALL = b"ALL"
#: Used by:
#:
#: - ``CLIENT PAUSE``
WRITE = b"WRITE"
#: Used by:
#:
#: - ``CLIENT REPLY``
SKIP = b"SKIP"
#: Used by:
#:
#: - ``CLIENT TRACKING``
BCAST = b"BCAST"
#: Used by:
#:
#: - ``CLIENT TRACKING``
NOLOOP = b"NOLOOP"
#: Used by:
#:
#: - ``CLIENT TRACKING``
OPTIN = b"OPTIN"
#: Used by:
#:
#: - ``CLIENT TRACKING``
OPTOUT = b"OPTOUT"
#: Used by:
#:
#: - ``CLIENT UNBLOCK``
ERROR = b"ERROR"
#: Used by:
#:
#: - ``CLIENT UNBLOCK``
TIMEOUT = b"TIMEOUT"
#: Used by:
#:
#: - ``CLUSTER FAILOVER``
#: - ``FAILOVER``
#: - ``SHUTDOWN``
#: - ``XCLAIM``
FORCE = b"FORCE"
#: Used by:
#:
#: - ``CLUSTER FAILOVER``
TAKEOVER = b"TAKEOVER"
#: Used by:
#:
#: - ``CLUSTER RESET``
HARD = b"HARD"
#: Used by:
#:
#: - ``CLUSTER RESET``
SOFT = b"SOFT"
#: Used by:
#:
#: - ``CLUSTER SETSLOT``
STABLE = b"STABLE"
#: Used by:
#:
#: - ``COPY``
#: - ``FUNCTION LOAD``
#: - ``FUNCTION RESTORE``
#: - ``MIGRATE``
#: - ``RESTORE``
REPLACE = b"REPLACE"
#: Used by:
#:
#: - ``EXPIRE``
#: - ``EXPIREAT``
#: - ``PEXPIRE``
#: - ``PEXPIREAT``
#: - ``ZADD``
GT = b"GT"
#: Used by:
#:
#: - ``EXPIRE``
#: - ``EXPIREAT``
#: - ``PEXPIRE``
#: - ``PEXPIREAT``
#: - ``ZADD``
LT = b"LT"
#: Used by:
#:
#: - ``EXPIRE``
#: - ``EXPIREAT``
#: - ``GEOADD``
#: - ``PEXPIRE``
#: - ``PEXPIREAT``
#: - ``SET``
#: - ``ZADD``
NX = b"NX"
#: Used by:
#:
#: - ``EXPIRE``
#: - ``EXPIREAT``
#: - ``GEOADD``
#: - ``PEXPIRE``
#: - ``PEXPIREAT``
#: - ``SET``
#: - ``ZADD``
XX = b"XX"
#: Used by:
#:
#: - ``FAILOVER``
#: - ``SHUTDOWN``
ABORT = b"ABORT"
#: Used by:
#:
#: - ``FLUSHALL``
#: - ``FLUSHDB``
#: - ``FUNCTION FLUSH``
#: - ``SCRIPT FLUSH``
ASYNC = b"ASYNC"
#: Used by:
#:
#: - ``FLUSHALL``
#: - ``FLUSHDB``
#: - ``FUNCTION FLUSH``
#: - ``SCRIPT DEBUG``
#: - ``SCRIPT FLUSH``
SYNC = b"SYNC"
#: Used by:
#:
#: - ``FUNCTION LIST``
WITHCODE = b"WITHCODE"
#: Used by:
#:
#: - ``FUNCTION RESTORE``
APPEND = b"APPEND"
#: Used by:
#:
#: - ``FUNCTION RESTORE``
FLUSH = b"FLUSH"
#: Used by:
#:
#: - ``GEOADD``
#: - ``ZADD``
CHANGE = b"CH"
#: Used by:
#:
#: - ``GEODIST``
#: - ``GEORADIUS``
#: - ``GEORADIUSBYMEMBER``
#: - ``GEORADIUSBYMEMBER_RO``
#: - ``GEORADIUS_RO``
#: - ``GEOSEARCH``
#: - ``GEOSEARCHSTORE``
FT = b"FT"
#: Used by:
#:
#: - ``GEODIST``
#: - ``GEORADIUS``
#: - ``GEORADIUSBYMEMBER``
#: - ``GEORADIUSBYMEMBER_RO``
#: - ``GEORADIUS_RO``
#: - ``GEOSEARCH``
#: - ``GEOSEARCHSTORE``
KM = b"KM"
#: Used by:
#:
#: - ``GEODIST``
#: - ``GEORADIUS``
#: - ``GEORADIUSBYMEMBER``
#: - ``GEORADIUSBYMEMBER_RO``
#: - ``GEORADIUS_RO``
#: - ``GEOSEARCH``
#: - ``GEOSEARCHSTORE``
M = b"M"
#: Used by:
#:
#: - ``GEODIST``
#: - ``GEORADIUS``
#: - ``GEORADIUSBYMEMBER``
#: - ``GEORADIUSBYMEMBER_RO``
#: - ``GEORADIUS_RO``
#: - ``GEOSEARCH``
#: - ``GEOSEARCHSTORE``
MI = b"MI"
#: Used by:
#:
#: - ``GEORADIUS``
#: - ``GEORADIUSBYMEMBER``
#: - ``GEORADIUSBYMEMBER_RO``
#: - ``GEORADIUS_RO``
#: - ``GEOSEARCH``
#: - ``GEOSEARCHSTORE``
ANY = b"ANY"
#: Used by:
#:
#: - ``GEORADIUS``
#: - ``GEORADIUSBYMEMBER``
#: - ``GEORADIUSBYMEMBER_RO``
#: - ``GEORADIUS_RO``
#: - ``GEOSEARCH``
#: - ``GEOSEARCHSTORE``
#: - ``SORT``
#: - ``SORT_RO``
ASC = b"ASC"
#: Used by:
#:
#: - ``GEORADIUS``
#: - ``GEORADIUSBYMEMBER``
#: - ``GEORADIUSBYMEMBER_RO``
#: - ``GEORADIUS_RO``
#: - ``GEOSEARCH``
#: - ``GEOSEARCHSTORE``
#: - ``SORT``
#: - ``SORT_RO``
DESC = b"DESC"
#: Used by:
#:
#: - ``GEORADIUS``
#: - ``GEORADIUSBYMEMBER``
#: - ``GEORADIUSBYMEMBER_RO``
#: - ``GEORADIUS_RO``
#: - ``GEOSEARCH``
WITHCOORD = b"WITHCOORD"
#: Used by:
#:
#: - ``GEORADIUS``
#: - ``GEORADIUSBYMEMBER``
#: - ``GEORADIUSBYMEMBER_RO``
#: - ``GEORADIUS_RO``
#: - ``GEOSEARCH``
WITHDIST = b"WITHDIST"
#: Used by:
#:
#: - ``GEORADIUS``
#: - ``GEORADIUSBYMEMBER``
#: - ``GEORADIUSBYMEMBER_RO``
#: - ``GEORADIUS_RO``
#: - ``GEOSEARCH``
WITHHASH = b"WITHHASH"
#: Used by:
#:
#: - ``GEOSEARCHSTORE``
STOREDIST = b"STOREDIST"
#: Used by:
#:
#: - ``GETEX``
PERSIST = b"PERSIST"
#: Used by:
#:
#: - ``HRANDFIELD``
WITHVALUES = b"WITHVALUES"
#: Used by:
#:
#: - ``LCS``
IDX = b"IDX"
#: Used by:
#:
#: - ``LCS``
LEN = b"LEN"
#: Used by:
#:
#: - ``LCS``
WITHMATCHLEN = b"WITHMATCHLEN"
#: Used by:
#:
#: - ``LINSERT``
AFTER = b"AFTER"
#: Used by:
#:
#: - ``LINSERT``
BEFORE = b"BEFORE"
#: Used by:
#:
#: - ``MIGRATE``
COPY = b"COPY"
#: Used by:
#:
#: - ``MIGRATE``
EMPTY_STRING = b""
#: Used by:
#:
#: - ``RESTORE``
ABSTTL = b"ABSTTL"
#: Used by:
#:
#: - ``SET``
GET = b"GET"
#: Used by:
#:
#: - ``SET``
KEEPTTL = b"KEEPTTL"
#: Used by:
#:
#: - ``SHUTDOWN``
NOSAVE = b"NOSAVE"
#: Used by:
#:
#: - ``SHUTDOWN``
NOW = b"NOW"
#: Used by:
#:
#: - ``SHUTDOWN``
SAVE = b"SAVE"
#: Used by:
#:
#: - ``SORT``
#: - ``SORT_RO``
SORTING = b"ALPHA"
#: Used by:
#:
#: - ``XADD``
#: - ``XTRIM``
APPROXIMATELY = b"~"
#: Used by:
#:
#: - ``XADD``
AUTO_ID = b"*"
#: Used by:
#:
#: - ``XADD``
#: - ``XTRIM``
EQUAL = b"="
#: Used by:
#:
#: - ``XADD``
#: - ``XTRIM``
MAXLEN = b"MAXLEN"
#: Used by:
#:
#: - ``XADD``
#: - ``XTRIM``
MINID = b"MINID"
#: Used by:
#:
#: - ``XADD``
NOMKSTREAM = b"NOMKSTREAM"
#: Used by:
#:
#: - ``XAUTOCLAIM``
#: - ``XCLAIM``
JUSTID = b"JUSTID"
#: Used by:
#:
#: - ``XGROUP CREATE``
MKSTREAM = b"MKSTREAM"
#: Used by:
#:
#: - ``XGROUP CREATE``
#: - ``XGROUP SETID``
NEW_ID = b"$"
#: Used by:
#:
#: - ``XREADGROUP``
NOACK = b"NOACK"
#: Used by:
#:
#: - ``ZADD``
INCREMENT = b"INCR"
#: Used by:
#:
#: - ``ZDIFF``
#: - ``ZINTER``
#: - ``ZRANDMEMBER``
#: - ``ZRANGE``
#: - ``ZRANGEBYSCORE``
#: - ``ZREVRANGE``
#: - ``ZREVRANGEBYSCORE``
#: - ``ZUNION``
WITHSCORES = b"WITHSCORES"
#: Used by:
#:
#: - ``ZINTER``
#: - ``ZINTERSTORE``
#: - ``ZUNION``
#: - ``ZUNIONSTORE``
SUM = b"SUM"
#: Used by:
#:
#: - ``ZRANGE``
#: - ``ZRANGESTORE``
BYLEX = b"BYLEX"
#: Used by:
#:
#: - ``ZRANGE``
#: - ``ZRANGESTORE``
BYSCORE = b"BYSCORE"
#: Used by:
#:
#: - ``ZRANGE``
#: - ``ZRANGESTORE``
REV = b"REV"
class PrefixToken(CaseAndEncodingInsensitiveEnum):
"""
Enum for internal use when adding prefixes to arguments
"""
#: Used by:
#:
#: - ``BITFIELD``
#: - ``BITFIELD_RO``
#: - ``SORT``
#: - ``SORT_RO``
GET = b"GET"
#: Used by:
#:
#: - ``BITFIELD``
INCRBY = b"INCRBY"
#: Used by:
#:
#: - ``BITFIELD``
OVERFLOW = b"OVERFLOW"
#: Used by:
#:
#: - ``BITFIELD``
SET = b"SET"
#: Used by:
#:
#: - ``BLMPOP``
#: - ``BZMPOP``
#: - ``GEORADIUS``
#: - ``GEORADIUSBYMEMBER``
#: - ``GEORADIUSBYMEMBER_RO``
#: - ``GEORADIUS_RO``
#: - ``GEOSEARCH``
#: - ``GEOSEARCHSTORE``
#: - ``HSCAN``
#: - ``LMPOP``
#: - ``LPOS``
#: - ``SCAN``
#: - ``SSCAN``
#: - ``XAUTOCLAIM``
#: - ``XINFO STREAM``
#: - ``XRANGE``
#: - ``XREAD``
#: - ``XREADGROUP``
#: - ``XREVRANGE``
#: - ``ZMPOP``
#: - ``ZSCAN``
COUNT = b"COUNT"
#: Used by:
#:
#: - ``CLIENT KILL``
ADDR = b"ADDR"
#: Used by:
#:
#: - ``CLIENT KILL``
#: - ``CLIENT LIST``
ID = b"ID"
#: Used by:
#:
#: - ``CLIENT KILL``
LADDR = b"LADDR"
#: Used by:
#:
#: - ``CLIENT KILL``
SKIPME = b"SKIPME"
#: Used by:
#:
#: - ``CLIENT KILL``
#: - ``CLIENT LIST``
#: - ``SCAN``
TYPE = b"TYPE"
#: Used by:
#:
#: - ``CLIENT KILL``
USER = b"USER"
#: Used by:
#:
#: - ``CLIENT TRACKING``
PREFIX = b"PREFIX"
#: Used by:
#:
#: - ``CLIENT TRACKING``
REDIRECT = b"REDIRECT"
#: Used by:
#:
#: - ``CLUSTER SETSLOT``
IMPORTING = b"IMPORTING"
#: Used by:
#:
#: - ``CLUSTER SETSLOT``
MIGRATING = b"MIGRATING"
#: Used by:
#:
#: - ``CLUSTER SETSLOT``
NODE = b"NODE"
#: Used by:
#:
#: - ``COMMAND LIST``
ACLCAT = b"ACLCAT"
#: Used by:
#:
#: - ``COMMAND LIST``
FILTERBY = b"FILTERBY"
#: Used by:
#:
#: - ``COMMAND LIST``
MODULE = b"MODULE"
#: Used by:
#:
#: - ``COMMAND LIST``
PATTERN = b"PATTERN"
#: Used by:
#:
#: - ``COPY``
DB = b"DB"
#: Used by:
#:
#: - ``FAILOVER``
TIMEOUT = b"TIMEOUT"
#: Used by:
#:
#: - ``FAILOVER``
TO = b"TO"
#: Used by:
#:
#: - ``FUNCTION LIST``
LIBRARYNAME = b"LIBRARYNAME"
#: Used by:
#:
#: - ``GEORADIUS``
#: - ``GEORADIUSBYMEMBER``
#: - ``SORT``
STORE = b"STORE"
#: Used by:
#:
#: - ``GEORADIUS``
#: - ``GEORADIUSBYMEMBER``
STOREDIST = b"STOREDIST"
#: Used by:
#:
#: - ``GEOSEARCH``
#: - ``GEOSEARCHSTORE``
BYBOX = b"BYBOX"
#: Used by:
#:
#: - ``GEOSEARCH``
#: - ``GEOSEARCHSTORE``
BYRADIUS = b"BYRADIUS"
#: Used by:
#:
#: - ``GEOSEARCH``
#: - ``GEOSEARCHSTORE``
FROMLONLAT = b"FROMLONLAT"
#: Used by:
#:
#: - ``GEOSEARCH``
#: - ``GEOSEARCHSTORE``
FROMMEMBER = b"FROMMEMBER"
#: Used by:
#:
#: - ``GETEX``
#: - ``SET``
EX = b"EX"
#: Used by:
#:
#: - ``GETEX``
#: - ``SET``
EXAT = b"EXAT"
#: Used by:
#:
#: - ``GETEX``
#: - ``SET``
PX = b"PX"
#: Used by:
#:
#: - ``GETEX``
#: - ``SET``
PXAT = b"PXAT"
#: Used by:
#:
#: - ``HELLO``
#: - ``MIGRATE``
AUTH = b"AUTH"
#: Used by:
#:
#: - ``HELLO``
SETNAME = b"SETNAME"
#: Used by:
#:
#: - ``HSCAN``
#: - ``SCAN``
#: - ``SSCAN``
#: - ``ZSCAN``
MATCH = b"MATCH"
#: Used by:
#:
#: - ``LCS``
MINMATCHLEN = b"MINMATCHLEN"
#: Used by:
#:
#: - ``LOLWUT``
VERSION = b"VERSION"
#: Used by:
#:
#: - ``LPOS``
MAXLEN = b"MAXLEN"
#: Used by:
#:
#: - ``LPOS``
RANK = b"RANK"
#: Used by:
#:
#: - ``MEMORY USAGE``
SAMPLES = b"SAMPLES"
#: Used by:
#:
#: - ``MIGRATE``
AUTH2 = b"AUTH2"
#: Used by:
#:
#: - ``MIGRATE``
KEYS = b"KEYS"
#: Used by:
#:
#: - ``MODULE LOADEX``
ARGS = b"ARGS"
#: Used by:
#:
#: - ``MODULE LOADEX``
CONFIG = b"CONFIG"
#: Used by:
#:
#: - ``RESTORE``
FREQ = b"FREQ"
#: Used by:
#:
#: - ``RESTORE``
IDLETIME = b"IDLETIME"
#: Used by:
#:
#: - ``SINTERCARD``
#: - ``SORT``
#: - ``SORT_RO``
#: - ``XADD``
#: - ``XTRIM``
#: - ``ZINTERCARD``
#: - ``ZRANGE``
#: - ``ZRANGEBYLEX``
#: - ``ZRANGEBYSCORE``
#: - ``ZRANGESTORE``
#: - ``ZREVRANGEBYLEX``
#: - ``ZREVRANGEBYSCORE``
LIMIT = b"LIMIT"
#: Used by:
#:
#: - ``SORT``
#: - ``SORT_RO``
BY = b"BY"
#: Used by:
#:
#: - ``XCLAIM``
#: - ``XPENDING``
IDLE = b"IDLE"
#: Used by:
#:
#: - ``XCLAIM``
RETRYCOUNT = b"RETRYCOUNT"
#: Used by:
#:
#: - ``XCLAIM``
TIME = b"TIME"
#: Used by:
#:
#: - ``XGROUP CREATE``
#: - ``XGROUP SETID``
ENTRIESREAD = b"ENTRIESREAD"
#: Used by:
#:
#: - ``XINFO STREAM``
FULL = b"FULL"
#: Used by:
#:
#: - ``XREAD``
#: - ``XREADGROUP``
BLOCK = b"BLOCK"
#: Used by:
#:
#: - ``XREAD``
#: - ``XREADGROUP``
STREAMS = b"STREAMS"
#: Used by:
#:
#: - ``XREADGROUP``
GROUP = b"GROUP"
#: Used by:
#:
#: - ``XSETID``
ENTRIESADDED = b"ENTRIESADDED"
#: Used by:
#:
#: - ``XSETID``
MAXDELETEDID = b"MAXDELETEDID"
#: Used by:
#:
#: - ``ZINTER``
#: - ``ZINTERSTORE``
#: - ``ZUNION``
#: - ``ZUNIONSTORE``
AGGREGATE = b"AGGREGATE"
#: Used by:
#:
#: - ``ZINTER``
#: - ``ZINTERSTORE``
#: - ``ZUNION``
#: - ``ZUNIONSTORE``
WEIGHTS = b"WEIGHTS" | coredis/tokens.py | from __future__ import annotations
from coredis._utils import CaseAndEncodingInsensitiveEnum
class PureToken(CaseAndEncodingInsensitiveEnum):
"""
Enum for using pure-tokens with the redis api.
"""
#: Used by:
#:
#: - ``ACL LOG``
RESET = b"RESET"
#: Used by:
#:
#: - ``BGSAVE``
SCHEDULE = b"SCHEDULE"
#: Used by:
#:
#: - ``BITCOUNT``
#: - ``BITPOS``
BIT = b"BIT"
#: Used by:
#:
#: - ``BITCOUNT``
#: - ``BITPOS``
BYTE = b"BYTE"
#: Used by:
#:
#: - ``BITFIELD``
FAIL = b"FAIL"
#: Used by:
#:
#: - ``BITFIELD``
SAT = b"SAT"
#: Used by:
#:
#: - ``BITFIELD``
WRAP = b"WRAP"
#: Used by:
#:
#: - ``BLMOVE``
#: - ``BLMPOP``
#: - ``LMOVE``
#: - ``LMPOP``
LEFT = b"LEFT"
#: Used by:
#:
#: - ``BLMOVE``
#: - ``BLMPOP``
#: - ``LMOVE``
#: - ``LMPOP``
RIGHT = b"RIGHT"
#: Used by:
#:
#: - ``BZMPOP``
#: - ``ZINTER``
#: - ``ZINTERSTORE``
#: - ``ZMPOP``
#: - ``ZUNION``
#: - ``ZUNIONSTORE``
MAX = b"MAX"
#: Used by:
#:
#: - ``BZMPOP``
#: - ``ZINTER``
#: - ``ZINTERSTORE``
#: - ``ZMPOP``
#: - ``ZUNION``
#: - ``ZUNIONSTORE``
MIN = b"MIN"
#: Used by:
#:
#: - ``CLIENT CACHING``
#: - ``SCRIPT DEBUG``
NO = b"NO"
#: Used by:
#:
#: - ``CLIENT CACHING``
#: - ``SCRIPT DEBUG``
YES = b"YES"
#: Used by:
#:
#: - ``CLIENT KILL``
#: - ``CLIENT LIST``
MASTER = b"MASTER"
#: Used by:
#:
#: - ``CLIENT KILL``
#: - ``CLIENT LIST``
NORMAL = b"NORMAL"
#: Used by:
#:
#: - ``CLIENT KILL``
#: - ``CLIENT LIST``
PUBSUB = b"PUBSUB"
#: Used by:
#:
#: - ``CLIENT KILL``
#: - ``CLIENT LIST``
REPLICA = b"REPLICA"
#: Used by:
#:
#: - ``CLIENT KILL``
SLAVE = b"SLAVE"
#: Used by:
#:
#: - ``CLIENT NO-EVICT``
#: - ``CLIENT REPLY``
#: - ``CLIENT TRACKING``
OFF = b"OFF"
#: Used by:
#:
#: - ``CLIENT NO-EVICT``
#: - ``CLIENT REPLY``
#: - ``CLIENT TRACKING``
ON = b"ON"
#: Used by:
#:
#: - ``CLIENT PAUSE``
ALL = b"ALL"
#: Used by:
#:
#: - ``CLIENT PAUSE``
WRITE = b"WRITE"
#: Used by:
#:
#: - ``CLIENT REPLY``
SKIP = b"SKIP"
#: Used by:
#:
#: - ``CLIENT TRACKING``
BCAST = b"BCAST"
#: Used by:
#:
#: - ``CLIENT TRACKING``
NOLOOP = b"NOLOOP"
#: Used by:
#:
#: - ``CLIENT TRACKING``
OPTIN = b"OPTIN"
#: Used by:
#:
#: - ``CLIENT TRACKING``
OPTOUT = b"OPTOUT"
#: Used by:
#:
#: - ``CLIENT UNBLOCK``
ERROR = b"ERROR"
#: Used by:
#:
#: - ``CLIENT UNBLOCK``
TIMEOUT = b"TIMEOUT"
#: Used by:
#:
#: - ``CLUSTER FAILOVER``
#: - ``FAILOVER``
#: - ``SHUTDOWN``
#: - ``XCLAIM``
FORCE = b"FORCE"
#: Used by:
#:
#: - ``CLUSTER FAILOVER``
TAKEOVER = b"TAKEOVER"
#: Used by:
#:
#: - ``CLUSTER RESET``
HARD = b"HARD"
#: Used by:
#:
#: - ``CLUSTER RESET``
SOFT = b"SOFT"
#: Used by:
#:
#: - ``CLUSTER SETSLOT``
STABLE = b"STABLE"
#: Used by:
#:
#: - ``COPY``
#: - ``FUNCTION LOAD``
#: - ``FUNCTION RESTORE``
#: - ``MIGRATE``
#: - ``RESTORE``
REPLACE = b"REPLACE"
#: Used by:
#:
#: - ``EXPIRE``
#: - ``EXPIREAT``
#: - ``PEXPIRE``
#: - ``PEXPIREAT``
#: - ``ZADD``
GT = b"GT"
#: Used by:
#:
#: - ``EXPIRE``
#: - ``EXPIREAT``
#: - ``PEXPIRE``
#: - ``PEXPIREAT``
#: - ``ZADD``
LT = b"LT"
#: Used by:
#:
#: - ``EXPIRE``
#: - ``EXPIREAT``
#: - ``GEOADD``
#: - ``PEXPIRE``
#: - ``PEXPIREAT``
#: - ``SET``
#: - ``ZADD``
NX = b"NX"
#: Used by:
#:
#: - ``EXPIRE``
#: - ``EXPIREAT``
#: - ``GEOADD``
#: - ``PEXPIRE``
#: - ``PEXPIREAT``
#: - ``SET``
#: - ``ZADD``
XX = b"XX"
#: Used by:
#:
#: - ``FAILOVER``
#: - ``SHUTDOWN``
ABORT = b"ABORT"
#: Used by:
#:
#: - ``FLUSHALL``
#: - ``FLUSHDB``
#: - ``FUNCTION FLUSH``
#: - ``SCRIPT FLUSH``
ASYNC = b"ASYNC"
#: Used by:
#:
#: - ``FLUSHALL``
#: - ``FLUSHDB``
#: - ``FUNCTION FLUSH``
#: - ``SCRIPT DEBUG``
#: - ``SCRIPT FLUSH``
SYNC = b"SYNC"
#: Used by:
#:
#: - ``FUNCTION LIST``
WITHCODE = b"WITHCODE"
#: Used by:
#:
#: - ``FUNCTION RESTORE``
APPEND = b"APPEND"
#: Used by:
#:
#: - ``FUNCTION RESTORE``
FLUSH = b"FLUSH"
#: Used by:
#:
#: - ``GEOADD``
#: - ``ZADD``
CHANGE = b"CH"
#: Used by:
#:
#: - ``GEODIST``
#: - ``GEORADIUS``
#: - ``GEORADIUSBYMEMBER``
#: - ``GEORADIUSBYMEMBER_RO``
#: - ``GEORADIUS_RO``
#: - ``GEOSEARCH``
#: - ``GEOSEARCHSTORE``
FT = b"FT"
#: Used by:
#:
#: - ``GEODIST``
#: - ``GEORADIUS``
#: - ``GEORADIUSBYMEMBER``
#: - ``GEORADIUSBYMEMBER_RO``
#: - ``GEORADIUS_RO``
#: - ``GEOSEARCH``
#: - ``GEOSEARCHSTORE``
KM = b"KM"
#: Used by:
#:
#: - ``GEODIST``
#: - ``GEORADIUS``
#: - ``GEORADIUSBYMEMBER``
#: - ``GEORADIUSBYMEMBER_RO``
#: - ``GEORADIUS_RO``
#: - ``GEOSEARCH``
#: - ``GEOSEARCHSTORE``
M = b"M"
#: Used by:
#:
#: - ``GEODIST``
#: - ``GEORADIUS``
#: - ``GEORADIUSBYMEMBER``
#: - ``GEORADIUSBYMEMBER_RO``
#: - ``GEORADIUS_RO``
#: - ``GEOSEARCH``
#: - ``GEOSEARCHSTORE``
MI = b"MI"
#: Used by:
#:
#: - ``GEORADIUS``
#: - ``GEORADIUSBYMEMBER``
#: - ``GEORADIUSBYMEMBER_RO``
#: - ``GEORADIUS_RO``
#: - ``GEOSEARCH``
#: - ``GEOSEARCHSTORE``
ANY = b"ANY"
#: Used by:
#:
#: - ``GEORADIUS``
#: - ``GEORADIUSBYMEMBER``
#: - ``GEORADIUSBYMEMBER_RO``
#: - ``GEORADIUS_RO``
#: - ``GEOSEARCH``
#: - ``GEOSEARCHSTORE``
#: - ``SORT``
#: - ``SORT_RO``
ASC = b"ASC"
#: Used by:
#:
#: - ``GEORADIUS``
#: - ``GEORADIUSBYMEMBER``
#: - ``GEORADIUSBYMEMBER_RO``
#: - ``GEORADIUS_RO``
#: - ``GEOSEARCH``
#: - ``GEOSEARCHSTORE``
#: - ``SORT``
#: - ``SORT_RO``
DESC = b"DESC"
#: Used by:
#:
#: - ``GEORADIUS``
#: - ``GEORADIUSBYMEMBER``
#: - ``GEORADIUSBYMEMBER_RO``
#: - ``GEORADIUS_RO``
#: - ``GEOSEARCH``
WITHCOORD = b"WITHCOORD"
#: Used by:
#:
#: - ``GEORADIUS``
#: - ``GEORADIUSBYMEMBER``
#: - ``GEORADIUSBYMEMBER_RO``
#: - ``GEORADIUS_RO``
#: - ``GEOSEARCH``
WITHDIST = b"WITHDIST"
#: Used by:
#:
#: - ``GEORADIUS``
#: - ``GEORADIUSBYMEMBER``
#: - ``GEORADIUSBYMEMBER_RO``
#: - ``GEORADIUS_RO``
#: - ``GEOSEARCH``
WITHHASH = b"WITHHASH"
#: Used by:
#:
#: - ``GEOSEARCHSTORE``
STOREDIST = b"STOREDIST"
#: Used by:
#:
#: - ``GETEX``
PERSIST = b"PERSIST"
#: Used by:
#:
#: - ``HRANDFIELD``
WITHVALUES = b"WITHVALUES"
#: Used by:
#:
#: - ``LCS``
IDX = b"IDX"
#: Used by:
#:
#: - ``LCS``
LEN = b"LEN"
#: Used by:
#:
#: - ``LCS``
WITHMATCHLEN = b"WITHMATCHLEN"
#: Used by:
#:
#: - ``LINSERT``
AFTER = b"AFTER"
#: Used by:
#:
#: - ``LINSERT``
BEFORE = b"BEFORE"
#: Used by:
#:
#: - ``MIGRATE``
COPY = b"COPY"
#: Used by:
#:
#: - ``MIGRATE``
EMPTY_STRING = b""
#: Used by:
#:
#: - ``RESTORE``
ABSTTL = b"ABSTTL"
#: Used by:
#:
#: - ``SET``
GET = b"GET"
#: Used by:
#:
#: - ``SET``
KEEPTTL = b"KEEPTTL"
#: Used by:
#:
#: - ``SHUTDOWN``
NOSAVE = b"NOSAVE"
#: Used by:
#:
#: - ``SHUTDOWN``
NOW = b"NOW"
#: Used by:
#:
#: - ``SHUTDOWN``
SAVE = b"SAVE"
#: Used by:
#:
#: - ``SORT``
#: - ``SORT_RO``
SORTING = b"ALPHA"
#: Used by:
#:
#: - ``XADD``
#: - ``XTRIM``
APPROXIMATELY = b"~"
#: Used by:
#:
#: - ``XADD``
AUTO_ID = b"*"
#: Used by:
#:
#: - ``XADD``
#: - ``XTRIM``
EQUAL = b"="
#: Used by:
#:
#: - ``XADD``
#: - ``XTRIM``
MAXLEN = b"MAXLEN"
#: Used by:
#:
#: - ``XADD``
#: - ``XTRIM``
MINID = b"MINID"
#: Used by:
#:
#: - ``XADD``
NOMKSTREAM = b"NOMKSTREAM"
#: Used by:
#:
#: - ``XAUTOCLAIM``
#: - ``XCLAIM``
JUSTID = b"JUSTID"
#: Used by:
#:
#: - ``XGROUP CREATE``
MKSTREAM = b"MKSTREAM"
#: Used by:
#:
#: - ``XGROUP CREATE``
#: - ``XGROUP SETID``
NEW_ID = b"$"
#: Used by:
#:
#: - ``XREADGROUP``
NOACK = b"NOACK"
#: Used by:
#:
#: - ``ZADD``
INCREMENT = b"INCR"
#: Used by:
#:
#: - ``ZDIFF``
#: - ``ZINTER``
#: - ``ZRANDMEMBER``
#: - ``ZRANGE``
#: - ``ZRANGEBYSCORE``
#: - ``ZREVRANGE``
#: - ``ZREVRANGEBYSCORE``
#: - ``ZUNION``
WITHSCORES = b"WITHSCORES"
#: Used by:
#:
#: - ``ZINTER``
#: - ``ZINTERSTORE``
#: - ``ZUNION``
#: - ``ZUNIONSTORE``
SUM = b"SUM"
#: Used by:
#:
#: - ``ZRANGE``
#: - ``ZRANGESTORE``
BYLEX = b"BYLEX"
#: Used by:
#:
#: - ``ZRANGE``
#: - ``ZRANGESTORE``
BYSCORE = b"BYSCORE"
#: Used by:
#:
#: - ``ZRANGE``
#: - ``ZRANGESTORE``
REV = b"REV"
class PrefixToken(CaseAndEncodingInsensitiveEnum):
"""
Enum for internal use when adding prefixes to arguments
"""
#: Used by:
#:
#: - ``BITFIELD``
#: - ``BITFIELD_RO``
#: - ``SORT``
#: - ``SORT_RO``
GET = b"GET"
#: Used by:
#:
#: - ``BITFIELD``
INCRBY = b"INCRBY"
#: Used by:
#:
#: - ``BITFIELD``
OVERFLOW = b"OVERFLOW"
#: Used by:
#:
#: - ``BITFIELD``
SET = b"SET"
#: Used by:
#:
#: - ``BLMPOP``
#: - ``BZMPOP``
#: - ``GEORADIUS``
#: - ``GEORADIUSBYMEMBER``
#: - ``GEORADIUSBYMEMBER_RO``
#: - ``GEORADIUS_RO``
#: - ``GEOSEARCH``
#: - ``GEOSEARCHSTORE``
#: - ``HSCAN``
#: - ``LMPOP``
#: - ``LPOS``
#: - ``SCAN``
#: - ``SSCAN``
#: - ``XAUTOCLAIM``
#: - ``XINFO STREAM``
#: - ``XRANGE``
#: - ``XREAD``
#: - ``XREADGROUP``
#: - ``XREVRANGE``
#: - ``ZMPOP``
#: - ``ZSCAN``
COUNT = b"COUNT"
#: Used by:
#:
#: - ``CLIENT KILL``
ADDR = b"ADDR"
#: Used by:
#:
#: - ``CLIENT KILL``
#: - ``CLIENT LIST``
ID = b"ID"
#: Used by:
#:
#: - ``CLIENT KILL``
LADDR = b"LADDR"
#: Used by:
#:
#: - ``CLIENT KILL``
SKIPME = b"SKIPME"
#: Used by:
#:
#: - ``CLIENT KILL``
#: - ``CLIENT LIST``
#: - ``SCAN``
TYPE = b"TYPE"
#: Used by:
#:
#: - ``CLIENT KILL``
USER = b"USER"
#: Used by:
#:
#: - ``CLIENT TRACKING``
PREFIX = b"PREFIX"
#: Used by:
#:
#: - ``CLIENT TRACKING``
REDIRECT = b"REDIRECT"
#: Used by:
#:
#: - ``CLUSTER SETSLOT``
IMPORTING = b"IMPORTING"
#: Used by:
#:
#: - ``CLUSTER SETSLOT``
MIGRATING = b"MIGRATING"
#: Used by:
#:
#: - ``CLUSTER SETSLOT``
NODE = b"NODE"
#: Used by:
#:
#: - ``COMMAND LIST``
ACLCAT = b"ACLCAT"
#: Used by:
#:
#: - ``COMMAND LIST``
FILTERBY = b"FILTERBY"
#: Used by:
#:
#: - ``COMMAND LIST``
MODULE = b"MODULE"
#: Used by:
#:
#: - ``COMMAND LIST``
PATTERN = b"PATTERN"
#: Used by:
#:
#: - ``COPY``
DB = b"DB"
#: Used by:
#:
#: - ``FAILOVER``
TIMEOUT = b"TIMEOUT"
#: Used by:
#:
#: - ``FAILOVER``
TO = b"TO"
#: Used by:
#:
#: - ``FUNCTION LIST``
LIBRARYNAME = b"LIBRARYNAME"
#: Used by:
#:
#: - ``GEORADIUS``
#: - ``GEORADIUSBYMEMBER``
#: - ``SORT``
STORE = b"STORE"
#: Used by:
#:
#: - ``GEORADIUS``
#: - ``GEORADIUSBYMEMBER``
STOREDIST = b"STOREDIST"
#: Used by:
#:
#: - ``GEOSEARCH``
#: - ``GEOSEARCHSTORE``
BYBOX = b"BYBOX"
#: Used by:
#:
#: - ``GEOSEARCH``
#: - ``GEOSEARCHSTORE``
BYRADIUS = b"BYRADIUS"
#: Used by:
#:
#: - ``GEOSEARCH``
#: - ``GEOSEARCHSTORE``
FROMLONLAT = b"FROMLONLAT"
#: Used by:
#:
#: - ``GEOSEARCH``
#: - ``GEOSEARCHSTORE``
FROMMEMBER = b"FROMMEMBER"
#: Used by:
#:
#: - ``GETEX``
#: - ``SET``
EX = b"EX"
#: Used by:
#:
#: - ``GETEX``
#: - ``SET``
EXAT = b"EXAT"
#: Used by:
#:
#: - ``GETEX``
#: - ``SET``
PX = b"PX"
#: Used by:
#:
#: - ``GETEX``
#: - ``SET``
PXAT = b"PXAT"
#: Used by:
#:
#: - ``HELLO``
#: - ``MIGRATE``
AUTH = b"AUTH"
#: Used by:
#:
#: - ``HELLO``
SETNAME = b"SETNAME"
#: Used by:
#:
#: - ``HSCAN``
#: - ``SCAN``
#: - ``SSCAN``
#: - ``ZSCAN``
MATCH = b"MATCH"
#: Used by:
#:
#: - ``LCS``
MINMATCHLEN = b"MINMATCHLEN"
#: Used by:
#:
#: - ``LOLWUT``
VERSION = b"VERSION"
#: Used by:
#:
#: - ``LPOS``
MAXLEN = b"MAXLEN"
#: Used by:
#:
#: - ``LPOS``
RANK = b"RANK"
#: Used by:
#:
#: - ``MEMORY USAGE``
SAMPLES = b"SAMPLES"
#: Used by:
#:
#: - ``MIGRATE``
AUTH2 = b"AUTH2"
#: Used by:
#:
#: - ``MIGRATE``
KEYS = b"KEYS"
#: Used by:
#:
#: - ``MODULE LOADEX``
ARGS = b"ARGS"
#: Used by:
#:
#: - ``MODULE LOADEX``
CONFIG = b"CONFIG"
#: Used by:
#:
#: - ``RESTORE``
FREQ = b"FREQ"
#: Used by:
#:
#: - ``RESTORE``
IDLETIME = b"IDLETIME"
#: Used by:
#:
#: - ``SINTERCARD``
#: - ``SORT``
#: - ``SORT_RO``
#: - ``XADD``
#: - ``XTRIM``
#: - ``ZINTERCARD``
#: - ``ZRANGE``
#: - ``ZRANGEBYLEX``
#: - ``ZRANGEBYSCORE``
#: - ``ZRANGESTORE``
#: - ``ZREVRANGEBYLEX``
#: - ``ZREVRANGEBYSCORE``
LIMIT = b"LIMIT"
#: Used by:
#:
#: - ``SORT``
#: - ``SORT_RO``
BY = b"BY"
#: Used by:
#:
#: - ``XCLAIM``
#: - ``XPENDING``
IDLE = b"IDLE"
#: Used by:
#:
#: - ``XCLAIM``
RETRYCOUNT = b"RETRYCOUNT"
#: Used by:
#:
#: - ``XCLAIM``
TIME = b"TIME"
#: Used by:
#:
#: - ``XGROUP CREATE``
#: - ``XGROUP SETID``
ENTRIESREAD = b"ENTRIESREAD"
#: Used by:
#:
#: - ``XINFO STREAM``
FULL = b"FULL"
#: Used by:
#:
#: - ``XREAD``
#: - ``XREADGROUP``
BLOCK = b"BLOCK"
#: Used by:
#:
#: - ``XREAD``
#: - ``XREADGROUP``
STREAMS = b"STREAMS"
#: Used by:
#:
#: - ``XREADGROUP``
GROUP = b"GROUP"
#: Used by:
#:
#: - ``XSETID``
ENTRIESADDED = b"ENTRIESADDED"
#: Used by:
#:
#: - ``XSETID``
MAXDELETEDID = b"MAXDELETEDID"
#: Used by:
#:
#: - ``ZINTER``
#: - ``ZINTERSTORE``
#: - ``ZUNION``
#: - ``ZUNIONSTORE``
AGGREGATE = b"AGGREGATE"
#: Used by:
#:
#: - ``ZINTER``
#: - ``ZINTERSTORE``
#: - ``ZUNION``
#: - ``ZUNIONSTORE``
WEIGHTS = b"WEIGHTS" | 0.629775 | 0.057467 |
import sys
import os
import vtk
import numpy
from siconos.io.mechanics_io import Hdf5
## the best way to dump all data
# $ h5dump toto.hdf5 > toto.txt
import h5py
import getopt
def usage():
"""
{0} <hdf5 file>
""".format(sys.argv[0])
print '{0}: Usage'.format(sys.argv[0])
print """
{0} [--help] [--output_frequency=n] [--output_filename=] <hdf5 file>
"""
try:
opts, args = getopt.gnu_getopt(sys.argv[1:], '',
['output_frequency=',
'output_filename=',
'cf-scale='])
except getopt.GetoptError, err:
sys.stderr.write('{0}\n'.format(str(err)))
usage()
exit(2)
min_time = None
max_time = None
scale_factor = 1
output_frequency=10
output_filename=None
for o, a in opts:
if o == '--help':
usage()
exit(0)
elif o == '--output_frequency':
output_frequency = float(a)
elif o == '--output_filename':
out_filename=a
elif o == '--cf-scale':
scale_factor = float(a)
if len(args) > 0:
io_filename = args[0]
if output_filename == None:
out_filename=''.join(io_filename.rsplit('.')[:-1])+'-filtered.hdf5'
else:
usage()
exit(1)
with Hdf5(io_filename=io_filename, mode='r') as io:
with Hdf5(io_filename=out_filename, mode='w') as out:
hdf1 = io._out
hdf2 = out._out
# copy /data/input
hdf2.__delitem__('data/input')
h5py.h5o.copy(hdf1.id, "data/input", hdf2.id, "data/input")
# copy /data/nslaws
hdf2.__delitem__('data/nslaws')
h5py.h5o.copy(hdf1.id, "data/nslaws", hdf2.id, "data/nslaws")
# copy /data/ref
hdf2.__delitem__('data/ref')
h5py.h5o.copy(hdf1.id, "data/ref", hdf2.id, "data/ref")
print('***************************************************** ')
print('************ Parsing simulation data ****************')
print('***************************************************** ')
def load():
ispos_data = io.static_data()
idpos_data = io.dynamic_data()
icf_data = io.contact_forces_data()[:]
isolv_data = io.solver_data()
return ispos_data, idpos_data, icf_data, isolv_data
spos_data, dpos_data, cf_data, solv_data = load()
#print('io._data',io._data)
#print('static position data : spos_data',spos_data)
#print('spos_data.value',spos_data.value)
#print('dynamic position data : dpos_data',dpos_data)
print('dpos_data.value',dpos_data.value)
print('cf_data',cf_data)
#print('solv_data',solv_data)
times = list(set(dpos_data[:, 0]))
times.sort()
print('len(times)',len(times))
if (len(times) ==0 ):
print('no results in the hdf5 file')
else:
print('Results for ',len(times),' steps in the hdf5 file')
#ndyna = len(numpy.where(dpos_data[:, 0] == times[0])) does not work
ndyna = len(dpos_data[:, 0])/len(times)
print('ndyna =', ndyna)
if len(spos_data) > 0:
nstatic = len(numpy.where(spos_data[:, 0] == times[0]))
nstatic = spos_data.shape[0]
else:
nstatic = 0
print('nstatic =', nstatic)
# instances = set(dpos_data[:, 1])
# filtering
p=0
current_line=0
for k in range(len(times)):
#print(times[k])
if (k+1 < len(times) ):
time_step=times[k+1]-times[k]
#print('time_step',time_step)
if (k%output_frequency==0):
if k==0 :
print('filter for k',k,'at times', times[k], 'p', p)
out._dynamic_data.resize((p+1)*ndyna,0)
out._dynamic_data[p*ndyna:(p+1)*ndyna,:] = numpy.array(dpos_data[k*ndyna:(k+1)*ndyna,:])
#print('times',dpos_data[k*ndyna:(k+1)*ndyna,0])
out._static_data.resize((p+1)*nstatic,0)
out._static_data[p*nstatic:(p+1)*nstatic,:] = numpy.array(spos_data[0:nstatic,:])
out._solv_data.resize((p+1),0)
out._solv_data[p:(p+1),:] = numpy.array(solv_data[0:1,:])
id_f = numpy.where(abs(cf_data[:, 0] - times[k]) < time_step*1e-5)[0]
if len(id_f) == 0:
print('no contact data at time',times[k])
else:
#print('index of contact :', min(id_f), max(id_f))
out._cf_data.resize(max(id_f)+1,0)
out._cf_data[min(id_f):max(id_f),:] = numpy.array(cf_data[min(id_f):max(id_f),:])
current_line = max(id_f)
else:
print('filter for k',k,'at times', times[k], 'p', p)
out._dynamic_data.resize((p+1)*ndyna,0)
#print( dpos_data[k*ndyna:(k+1)*ndyna,:])
#print('times',dpos_data[(k+1)*ndyna:(k+2)*ndyna,0])
out._dynamic_data[p*ndyna:(p+1)*ndyna,:] = dpos_data[(k+1)*ndyna:(k+2)*ndyna,:]
# out._static_data.resize((p+1)*nstatic,0)
# #print( dpos_data[k*nstatic:(k+1)*nstatic,:])
# out._static_data[p*nstatic:(p+1)*nstatic,:] = spos_data[k*nstatic:(k+1)*nstatic,:]
out._solv_data.resize((p+1),0)
out._solv_data[p:(p+1),:] = numpy.array(solv_data[k:k+1,:])
id_f = numpy.where(abs(cf_data[:, 0] - times[k]) < time_step*1e-5)[0]
if len(id_f) == 0:
print('no contact data at time',times[k])
else:
#print('index of contact :', min(id_f), max(id_f))
new_line = current_line+max(id_f)-min(id_f)+1
#print('new_line',new_line)
#print('current_line',current_line)
#print('size of contact data', max(id_f)-min(id_f)+1)
out._cf_data.resize(new_line,0)
#print('fill out._cf_data indices', current_line, new_line-1)
out._cf_data[current_line:new_line,:] = numpy.array(cf_data[min(id_f):max(id_f)+1,:])
current_line=new_line
#print('current_line',current_line)
p = p+1
#print(dpos_data)
print(out._dynamic_data.shape)
print(out._static_data.shape)
print(out.static_data().value)
print(out._cf_data.shape)
print(out._solv_data.shape) | examples/Mechanics/ContactDetection/BulletIO/filter_output_frequency.py | import sys
import os
import vtk
import numpy
from siconos.io.mechanics_io import Hdf5
## the best way to dump all data
# $ h5dump toto.hdf5 > toto.txt
import h5py
import getopt
def usage():
"""
{0} <hdf5 file>
""".format(sys.argv[0])
print '{0}: Usage'.format(sys.argv[0])
print """
{0} [--help] [--output_frequency=n] [--output_filename=] <hdf5 file>
"""
try:
opts, args = getopt.gnu_getopt(sys.argv[1:], '',
['output_frequency=',
'output_filename=',
'cf-scale='])
except getopt.GetoptError, err:
sys.stderr.write('{0}\n'.format(str(err)))
usage()
exit(2)
min_time = None
max_time = None
scale_factor = 1
output_frequency=10
output_filename=None
for o, a in opts:
if o == '--help':
usage()
exit(0)
elif o == '--output_frequency':
output_frequency = float(a)
elif o == '--output_filename':
out_filename=a
elif o == '--cf-scale':
scale_factor = float(a)
if len(args) > 0:
io_filename = args[0]
if output_filename == None:
out_filename=''.join(io_filename.rsplit('.')[:-1])+'-filtered.hdf5'
else:
usage()
exit(1)
with Hdf5(io_filename=io_filename, mode='r') as io:
with Hdf5(io_filename=out_filename, mode='w') as out:
hdf1 = io._out
hdf2 = out._out
# copy /data/input
hdf2.__delitem__('data/input')
h5py.h5o.copy(hdf1.id, "data/input", hdf2.id, "data/input")
# copy /data/nslaws
hdf2.__delitem__('data/nslaws')
h5py.h5o.copy(hdf1.id, "data/nslaws", hdf2.id, "data/nslaws")
# copy /data/ref
hdf2.__delitem__('data/ref')
h5py.h5o.copy(hdf1.id, "data/ref", hdf2.id, "data/ref")
print('***************************************************** ')
print('************ Parsing simulation data ****************')
print('***************************************************** ')
def load():
ispos_data = io.static_data()
idpos_data = io.dynamic_data()
icf_data = io.contact_forces_data()[:]
isolv_data = io.solver_data()
return ispos_data, idpos_data, icf_data, isolv_data
spos_data, dpos_data, cf_data, solv_data = load()
#print('io._data',io._data)
#print('static position data : spos_data',spos_data)
#print('spos_data.value',spos_data.value)
#print('dynamic position data : dpos_data',dpos_data)
print('dpos_data.value',dpos_data.value)
print('cf_data',cf_data)
#print('solv_data',solv_data)
times = list(set(dpos_data[:, 0]))
times.sort()
print('len(times)',len(times))
if (len(times) ==0 ):
print('no results in the hdf5 file')
else:
print('Results for ',len(times),' steps in the hdf5 file')
#ndyna = len(numpy.where(dpos_data[:, 0] == times[0])) does not work
ndyna = len(dpos_data[:, 0])/len(times)
print('ndyna =', ndyna)
if len(spos_data) > 0:
nstatic = len(numpy.where(spos_data[:, 0] == times[0]))
nstatic = spos_data.shape[0]
else:
nstatic = 0
print('nstatic =', nstatic)
# instances = set(dpos_data[:, 1])
# filtering
p=0
current_line=0
for k in range(len(times)):
#print(times[k])
if (k+1 < len(times) ):
time_step=times[k+1]-times[k]
#print('time_step',time_step)
if (k%output_frequency==0):
if k==0 :
print('filter for k',k,'at times', times[k], 'p', p)
out._dynamic_data.resize((p+1)*ndyna,0)
out._dynamic_data[p*ndyna:(p+1)*ndyna,:] = numpy.array(dpos_data[k*ndyna:(k+1)*ndyna,:])
#print('times',dpos_data[k*ndyna:(k+1)*ndyna,0])
out._static_data.resize((p+1)*nstatic,0)
out._static_data[p*nstatic:(p+1)*nstatic,:] = numpy.array(spos_data[0:nstatic,:])
out._solv_data.resize((p+1),0)
out._solv_data[p:(p+1),:] = numpy.array(solv_data[0:1,:])
id_f = numpy.where(abs(cf_data[:, 0] - times[k]) < time_step*1e-5)[0]
if len(id_f) == 0:
print('no contact data at time',times[k])
else:
#print('index of contact :', min(id_f), max(id_f))
out._cf_data.resize(max(id_f)+1,0)
out._cf_data[min(id_f):max(id_f),:] = numpy.array(cf_data[min(id_f):max(id_f),:])
current_line = max(id_f)
else:
print('filter for k',k,'at times', times[k], 'p', p)
out._dynamic_data.resize((p+1)*ndyna,0)
#print( dpos_data[k*ndyna:(k+1)*ndyna,:])
#print('times',dpos_data[(k+1)*ndyna:(k+2)*ndyna,0])
out._dynamic_data[p*ndyna:(p+1)*ndyna,:] = dpos_data[(k+1)*ndyna:(k+2)*ndyna,:]
# out._static_data.resize((p+1)*nstatic,0)
# #print( dpos_data[k*nstatic:(k+1)*nstatic,:])
# out._static_data[p*nstatic:(p+1)*nstatic,:] = spos_data[k*nstatic:(k+1)*nstatic,:]
out._solv_data.resize((p+1),0)
out._solv_data[p:(p+1),:] = numpy.array(solv_data[k:k+1,:])
id_f = numpy.where(abs(cf_data[:, 0] - times[k]) < time_step*1e-5)[0]
if len(id_f) == 0:
print('no contact data at time',times[k])
else:
#print('index of contact :', min(id_f), max(id_f))
new_line = current_line+max(id_f)-min(id_f)+1
#print('new_line',new_line)
#print('current_line',current_line)
#print('size of contact data', max(id_f)-min(id_f)+1)
out._cf_data.resize(new_line,0)
#print('fill out._cf_data indices', current_line, new_line-1)
out._cf_data[current_line:new_line,:] = numpy.array(cf_data[min(id_f):max(id_f)+1,:])
current_line=new_line
#print('current_line',current_line)
p = p+1
#print(dpos_data)
print(out._dynamic_data.shape)
print(out._static_data.shape)
print(out.static_data().value)
print(out._cf_data.shape)
print(out._solv_data.shape) | 0.045131 | 0.167185 |
import numpy as np
import vispy.app
from vispy import gloo
from vispy import visuals
from vispy.visuals.transforms import (AffineTransform, STTransform,
arg_to_array, TransformSystem,
LogTransform, PolarTransform,
BaseTransform)
image = np.random.normal(size=(100, 100, 3))
image[20:80, 20:80] += 3.
image[50] += 3.
image[:, 50] += 3.
image = ((image-image.min()) *
(253. / (image.max()-image.min()))).astype(np.ubyte)
class Canvas(vispy.app.Canvas):
def __init__(self):
vispy.app.Canvas.__init__(self, keys='interactive', size=(800, 800))
self.images = [visuals.ImageVisual(image, method='impostor')
for i in range(4)]
self.images[0].transform = (STTransform(scale=(30, 30),
translate=(600, 600)) *
SineTransform() *
STTransform(scale=(0.1, 0.1),
translate=(-5, -5)))
tr = AffineTransform()
tr.rotate(30, (0, 0, 1))
tr.rotate(40, (0, 1, 0))
tr.scale((3, 3))
self.images[1].transform = (STTransform(translate=(200, 600)) *
tr *
STTransform(translate=(-50, -50)))
self.images[2].transform = (STTransform(scale=(3, -150),
translate=(200, 100)) *
LogTransform((0, 2, 0)) *
STTransform(scale=(1, -0.01),
translate=(-50, 1.3)))
self.images[3].transform = (STTransform(scale=(400, 400),
translate=(600, 300)) *
PolarTransform() *
STTransform(scale=(np.pi/200, 0.005),
translate=(-3*np.pi/4., 0.1)))
for img in self.images:
img.tr_sys = TransformSystem(self)
img.tr_sys.visual_to_document = img.transform
self.show()
def on_draw(self, ev):
gloo.clear(color='black', depth=True)
gloo.set_viewport(0, 0, *self.physical_size)
# Create a TransformSystem that will tell the visual how to draw
for img in self.images:
img.draw(img.tr_sys)
# A simple custom Transform
class SineTransform(BaseTransform):
"""
Add sine wave to y-value for wavy effect.
"""
glsl_map = """
vec4 sineTransform(vec4 pos) {
return vec4(pos.x, pos.y + sin(pos.x), pos.z, 1);
}"""
glsl_imap = """
vec4 sineTransform(vec4 pos) {
return vec4(pos.x, pos.y - sin(pos.x), pos.z, 1);
}"""
Linear = False
@arg_to_array
def map(self, coords):
ret = coords.copy()
ret[..., 1] += np.sin(ret[..., 0])
return ret
@arg_to_array
def imap(self, coords):
ret = coords.copy()
ret[..., 1] -= np.sin(ret[..., 0])
return ret
def inverse(self):
return InvSineTransform()
class InvSineTransform(BaseTransform):
glsl_map = SineTransform.glsl_imap
glsl_imap = SineTransform.glsl_map
Linear = False
map = SineTransform.imap
imap = SineTransform.map
def inverse(self):
return SineTransform()
if __name__ == '__main__':
win = Canvas()
import sys
if sys.flags.interactive != 1:
vispy.app.run() | examples/basics/visuals/image_transforms.py | import numpy as np
import vispy.app
from vispy import gloo
from vispy import visuals
from vispy.visuals.transforms import (AffineTransform, STTransform,
arg_to_array, TransformSystem,
LogTransform, PolarTransform,
BaseTransform)
image = np.random.normal(size=(100, 100, 3))
image[20:80, 20:80] += 3.
image[50] += 3.
image[:, 50] += 3.
image = ((image-image.min()) *
(253. / (image.max()-image.min()))).astype(np.ubyte)
class Canvas(vispy.app.Canvas):
def __init__(self):
vispy.app.Canvas.__init__(self, keys='interactive', size=(800, 800))
self.images = [visuals.ImageVisual(image, method='impostor')
for i in range(4)]
self.images[0].transform = (STTransform(scale=(30, 30),
translate=(600, 600)) *
SineTransform() *
STTransform(scale=(0.1, 0.1),
translate=(-5, -5)))
tr = AffineTransform()
tr.rotate(30, (0, 0, 1))
tr.rotate(40, (0, 1, 0))
tr.scale((3, 3))
self.images[1].transform = (STTransform(translate=(200, 600)) *
tr *
STTransform(translate=(-50, -50)))
self.images[2].transform = (STTransform(scale=(3, -150),
translate=(200, 100)) *
LogTransform((0, 2, 0)) *
STTransform(scale=(1, -0.01),
translate=(-50, 1.3)))
self.images[3].transform = (STTransform(scale=(400, 400),
translate=(600, 300)) *
PolarTransform() *
STTransform(scale=(np.pi/200, 0.005),
translate=(-3*np.pi/4., 0.1)))
for img in self.images:
img.tr_sys = TransformSystem(self)
img.tr_sys.visual_to_document = img.transform
self.show()
def on_draw(self, ev):
gloo.clear(color='black', depth=True)
gloo.set_viewport(0, 0, *self.physical_size)
# Create a TransformSystem that will tell the visual how to draw
for img in self.images:
img.draw(img.tr_sys)
# A simple custom Transform
class SineTransform(BaseTransform):
"""
Add sine wave to y-value for wavy effect.
"""
glsl_map = """
vec4 sineTransform(vec4 pos) {
return vec4(pos.x, pos.y + sin(pos.x), pos.z, 1);
}"""
glsl_imap = """
vec4 sineTransform(vec4 pos) {
return vec4(pos.x, pos.y - sin(pos.x), pos.z, 1);
}"""
Linear = False
@arg_to_array
def map(self, coords):
ret = coords.copy()
ret[..., 1] += np.sin(ret[..., 0])
return ret
@arg_to_array
def imap(self, coords):
ret = coords.copy()
ret[..., 1] -= np.sin(ret[..., 0])
return ret
def inverse(self):
return InvSineTransform()
class InvSineTransform(BaseTransform):
glsl_map = SineTransform.glsl_imap
glsl_imap = SineTransform.glsl_map
Linear = False
map = SineTransform.imap
imap = SineTransform.map
def inverse(self):
return SineTransform()
if __name__ == '__main__':
win = Canvas()
import sys
if sys.flags.interactive != 1:
vispy.app.run() | 0.603348 | 0.605507 |
# pylint: disable=import-error, no-member, too-many-arguments, no-self-use
import torch
import torch.nn as nn
import regular_nd_grid_interpolation as rgi
def query_local_implicit_grid(model, latent_grid, query_pts, xmin, xmax):
"""Function for querying local implicit grid.
The latent feature grid can be of aribtrary physical dimensions. query_pts are query points
representing the coordinates at which the grid is queried. xmin and xmax are the bounds of
the grid. E.g. for a 2 dimensional grid,
*--------*--------* xmax
| | x |
| | |
| x | |
| | x |
*--------*--------*
| | |
| | |
| x | |
| | x |
xmin *--------*--------*
In the schematic above, for the latent grid, n1=n2=2, num_pts=5, etc.
Args:
model: nn.Module instance, model for decoding local latents. Must accept input of length
d+c.
latent_grid: tensor of shape [b, n1, n2, ..., nd, c] where b is the batch size, n1, ..., nd
are the spatial resolution in each dimension, c is the number of latent channels.
query_pts: tensor of shape [b, num_pts, d] where num_pts is the number of query points, d is
the dimension of the query points. The query points must fall within xmin and xmax, or else
will be clipped to xmin and xmax.
xmin: float or tuple of floats or tensor. If float, automatically broadcast to the
corresponding dimensions. Reference spatial coordinate of the lower left corner of the grid.
xmax:float or tuple of floats or tensor. If float, automatically broadcast to the
corresponding dimensions. Reference spatial coordinate of the upper right corner of the
grid.
Returns:
query_vals: tensor of shape [b, num_pts, o], queried values at query points, where o is the
number output channels from the model.
"""
corner_values, weights, x_relative = rgi.regular_nd_grid_interpolation_coefficients(
latent_grid, query_pts, xmin, xmax)
concat_features = torch.cat([x_relative, corner_values], axis=-1) # [b, num_points, 2**d, d+c]
input_shape = concat_features.shape
# flatten and feed through model
output = model(concat_features.reshape([-1, input_shape[-1]]))
# reshape output
output = output.reshape([input_shape[0], input_shape[1], input_shape[2], -1]) # [b, p, 2**d, o]
# interpolate the output values
output = torch.sum(output * weights.unsqueeze(-1), axis=-2) # [b, p, o]
return output | src/local_implicit_grid.py |
# pylint: disable=import-error, no-member, too-many-arguments, no-self-use
import torch
import torch.nn as nn
import regular_nd_grid_interpolation as rgi
def query_local_implicit_grid(model, latent_grid, query_pts, xmin, xmax):
"""Function for querying local implicit grid.
The latent feature grid can be of aribtrary physical dimensions. query_pts are query points
representing the coordinates at which the grid is queried. xmin and xmax are the bounds of
the grid. E.g. for a 2 dimensional grid,
*--------*--------* xmax
| | x |
| | |
| x | |
| | x |
*--------*--------*
| | |
| | |
| x | |
| | x |
xmin *--------*--------*
In the schematic above, for the latent grid, n1=n2=2, num_pts=5, etc.
Args:
model: nn.Module instance, model for decoding local latents. Must accept input of length
d+c.
latent_grid: tensor of shape [b, n1, n2, ..., nd, c] where b is the batch size, n1, ..., nd
are the spatial resolution in each dimension, c is the number of latent channels.
query_pts: tensor of shape [b, num_pts, d] where num_pts is the number of query points, d is
the dimension of the query points. The query points must fall within xmin and xmax, or else
will be clipped to xmin and xmax.
xmin: float or tuple of floats or tensor. If float, automatically broadcast to the
corresponding dimensions. Reference spatial coordinate of the lower left corner of the grid.
xmax:float or tuple of floats or tensor. If float, automatically broadcast to the
corresponding dimensions. Reference spatial coordinate of the upper right corner of the
grid.
Returns:
query_vals: tensor of shape [b, num_pts, o], queried values at query points, where o is the
number output channels from the model.
"""
corner_values, weights, x_relative = rgi.regular_nd_grid_interpolation_coefficients(
latent_grid, query_pts, xmin, xmax)
concat_features = torch.cat([x_relative, corner_values], axis=-1) # [b, num_points, 2**d, d+c]
input_shape = concat_features.shape
# flatten and feed through model
output = model(concat_features.reshape([-1, input_shape[-1]]))
# reshape output
output = output.reshape([input_shape[0], input_shape[1], input_shape[2], -1]) # [b, p, 2**d, o]
# interpolate the output values
output = torch.sum(output * weights.unsqueeze(-1), axis=-2) # [b, p, o]
return output | 0.941345 | 0.699627 |
from dataclasses import dataclass
from typing import Optional
from aioredis import Redis
from fastapi import Depends, Request
from httpx import AsyncClient
from safir.dependencies.http_client import http_client_dependency
from safir.dependencies.logger import logger_dependency
from sqlalchemy.ext.asyncio import AsyncSession
from structlog.stdlib import BoundLogger
from ..config import Config
from ..factory import ComponentFactory
from ..models.state import State
from .config import config_dependency
from .db_session import db_session_dependency
from .redis import redis_dependency
from .token_cache import TokenCache, token_cache_dependency
__all__ = ["RequestContext", "context_dependency"]
@dataclass
class RequestContext:
"""Holds the incoming request and its surrounding context.
The primary reason for the existence of this class is to allow the
functions involved in request processing to repeated rebind the request
logger to include more information, without having to pass both the
request and the logger separately to every function.
"""
request: Request
"""The incoming request."""
config: Config
"""Gafaelfawr's configuration."""
logger: BoundLogger
"""The request logger, rebound with discovered context."""
redis: Redis
"""Connection pool to use to talk to Redis."""
session: AsyncSession
"""The database session."""
http_client: AsyncClient
"""Shared HTTP client."""
token_cache: TokenCache
"""Shared token cache."""
@property
def factory(self) -> ComponentFactory:
"""A factory for constructing Gafaelfawr components.
This is constructed on the fly at each reference to ensure that we get
the latest logger, which may have additional bound context.
"""
return ComponentFactory(
config=self.config,
redis=self.redis,
session=self.session,
http_client=self.http_client,
token_cache=self.token_cache,
logger=self.logger,
)
@property
def state(self) -> State:
"""Convenience property to access the cookie state."""
return self.request.state.cookie
@state.setter
def state(self, state: State) -> None:
"""Convenience property to set the cookie state."""
self.request.state.cookie = state
def rebind_logger(self, **values: Optional[str]) -> None:
"""Add the given values to the logging context.
Also updates the logging context stored in the request object in case
the request context later needs to be recreated from the request.
Parameters
----------
**values : `str` or `None`
Additional values that should be added to the logging context.
"""
self.logger = self.logger.bind(**values)
async def context_dependency(
request: Request,
config: Config = Depends(config_dependency),
logger: BoundLogger = Depends(logger_dependency),
redis: Redis = Depends(redis_dependency),
session: AsyncSession = Depends(db_session_dependency),
http_client: AsyncClient = Depends(http_client_dependency),
token_cache: TokenCache = Depends(token_cache_dependency),
) -> RequestContext:
"""Provides a RequestContext as a dependency."""
return RequestContext(
request=request,
config=config,
logger=logger,
redis=redis,
session=session,
http_client=http_client,
token_cache=token_cache,
) | src/gafaelfawr/dependencies/context.py | from dataclasses import dataclass
from typing import Optional
from aioredis import Redis
from fastapi import Depends, Request
from httpx import AsyncClient
from safir.dependencies.http_client import http_client_dependency
from safir.dependencies.logger import logger_dependency
from sqlalchemy.ext.asyncio import AsyncSession
from structlog.stdlib import BoundLogger
from ..config import Config
from ..factory import ComponentFactory
from ..models.state import State
from .config import config_dependency
from .db_session import db_session_dependency
from .redis import redis_dependency
from .token_cache import TokenCache, token_cache_dependency
__all__ = ["RequestContext", "context_dependency"]
@dataclass
class RequestContext:
"""Holds the incoming request and its surrounding context.
The primary reason for the existence of this class is to allow the
functions involved in request processing to repeated rebind the request
logger to include more information, without having to pass both the
request and the logger separately to every function.
"""
request: Request
"""The incoming request."""
config: Config
"""Gafaelfawr's configuration."""
logger: BoundLogger
"""The request logger, rebound with discovered context."""
redis: Redis
"""Connection pool to use to talk to Redis."""
session: AsyncSession
"""The database session."""
http_client: AsyncClient
"""Shared HTTP client."""
token_cache: TokenCache
"""Shared token cache."""
@property
def factory(self) -> ComponentFactory:
"""A factory for constructing Gafaelfawr components.
This is constructed on the fly at each reference to ensure that we get
the latest logger, which may have additional bound context.
"""
return ComponentFactory(
config=self.config,
redis=self.redis,
session=self.session,
http_client=self.http_client,
token_cache=self.token_cache,
logger=self.logger,
)
@property
def state(self) -> State:
"""Convenience property to access the cookie state."""
return self.request.state.cookie
@state.setter
def state(self, state: State) -> None:
"""Convenience property to set the cookie state."""
self.request.state.cookie = state
def rebind_logger(self, **values: Optional[str]) -> None:
"""Add the given values to the logging context.
Also updates the logging context stored in the request object in case
the request context later needs to be recreated from the request.
Parameters
----------
**values : `str` or `None`
Additional values that should be added to the logging context.
"""
self.logger = self.logger.bind(**values)
async def context_dependency(
request: Request,
config: Config = Depends(config_dependency),
logger: BoundLogger = Depends(logger_dependency),
redis: Redis = Depends(redis_dependency),
session: AsyncSession = Depends(db_session_dependency),
http_client: AsyncClient = Depends(http_client_dependency),
token_cache: TokenCache = Depends(token_cache_dependency),
) -> RequestContext:
"""Provides a RequestContext as a dependency."""
return RequestContext(
request=request,
config=config,
logger=logger,
redis=redis,
session=session,
http_client=http_client,
token_cache=token_cache,
) | 0.913497 | 0.159971 |
from __util import util
class RequestPacket(util.struct):
mapcls_context = util.mapclass(util.string, util.string)
mapcls_status = util.mapclass(util.string, util.string)
def __init__(self):
self.iVersion = 0
self.cPacketType = 0
self.iMessageType = 0
self.iRequestId = 0
self.sServantName = ''
self.sFuncName = ''
self.sBuffer = bytes()
self.iTimeout = 0
self.context = RequestPacket.mapcls_context()
self.status = RequestPacket.mapcls_status()
@staticmethod
def writeTo(oos, value):
oos.write(util.int16, 1, value.iVersion)
oos.write(util.int8, 2, value.cPacketType)
oos.write(util.int32, 3, value.iMessageType)
oos.write(util.int32, 4, value.iRequestId)
oos.write(util.string, 5, value.sServantName)
oos.write(util.string, 6, value.sFuncName)
oos.write(util.bytes, 7, value.sBuffer)
oos.write(util.int32, 8, value.iTimeout)
oos.write(RequestPacket.mapcls_context, 9, value.context)
oos.write(RequestPacket.mapcls_status, 10, value.status)
@staticmethod
def readFrom(ios):
value = RequestPacket()
value.iVersion = ios.read(util.int16, 1, True, 0)
print("iVersion = %d" % value.iVersion)
value.cPacketType = ios.read(util.int8, 2, True, 0)
print("cPackerType = %d" % value.cPacketType)
value.iMessageType = ios.read(util.int32, 3, True, 0)
print("iMessageType = %d" % value.iMessageType)
value.iRequestId = ios.read(util.int32, 4, True, 0)
print("iRequestId = %d" % value.iRequestId)
value.sServantName = ios.read(util.string, 5, True, '22222222')
value.sFuncName = ios.read(util.string, 6, True, '')
value.sBuffer = ios.read(util.bytes, 7, True, value.sBuffer)
value.iTimeout = ios.read(util.int32, 8, True, 0)
value.context = ios.read(
RequestPacket.mapcls_context, 9, True, value.context)
value.status = ios.read(
RequestPacket.mapcls_status, 10, True, value.status)
return value
class ResponsePacket(util.struct):
__tars_class__ = "tars.RpcMessage.ResponsePacket"
mapcls_status = util.mapclass(util.string, util.string)
def __init__(self):
self.iVersion = 0
self.cPacketType = 0
self.iRequestId = 0
self.iMessageType = 0
self.iRet = 0
self.sBuffer = bytes()
self.status = RequestPacket.mapcls_status()
@staticmethod
def writeTo(oos, value):
oos.write(util.int16, 1, value.iVersion)
oos.write(util.int8, 2, value.cPacketType)
oos.write(util.int32, 3, value.iRequestId)
oos.write(util.int32, 4, value.iMessageType)
oos.write(util.int32, 5, value.iRet)
oos.write(util.bytes, 6, value.sBuffer)
oos.write(value.mapcls_status, 7, value.status)
@staticmethod
def readFrom(ios):
value = ResponsePacket()
value.iVersion = ios.read(util.int16, 1, True)
value.cPacketType = ios.read(util.int8, 2, True)
value.iRequestId = ios.read(util.int32, 3, True)
value.iMessageType = ios.read(util.int32, 4, True)
value.iRet = ios.read(util.int32, 5, True)
value.sBuffer = ios.read(util.bytes, 6, True)
value.status = ios.read(value.mapcls_status, 7, True)
return value | tup-python/tars/__packet.py |
from __util import util
class RequestPacket(util.struct):
mapcls_context = util.mapclass(util.string, util.string)
mapcls_status = util.mapclass(util.string, util.string)
def __init__(self):
self.iVersion = 0
self.cPacketType = 0
self.iMessageType = 0
self.iRequestId = 0
self.sServantName = ''
self.sFuncName = ''
self.sBuffer = bytes()
self.iTimeout = 0
self.context = RequestPacket.mapcls_context()
self.status = RequestPacket.mapcls_status()
@staticmethod
def writeTo(oos, value):
oos.write(util.int16, 1, value.iVersion)
oos.write(util.int8, 2, value.cPacketType)
oos.write(util.int32, 3, value.iMessageType)
oos.write(util.int32, 4, value.iRequestId)
oos.write(util.string, 5, value.sServantName)
oos.write(util.string, 6, value.sFuncName)
oos.write(util.bytes, 7, value.sBuffer)
oos.write(util.int32, 8, value.iTimeout)
oos.write(RequestPacket.mapcls_context, 9, value.context)
oos.write(RequestPacket.mapcls_status, 10, value.status)
@staticmethod
def readFrom(ios):
value = RequestPacket()
value.iVersion = ios.read(util.int16, 1, True, 0)
print("iVersion = %d" % value.iVersion)
value.cPacketType = ios.read(util.int8, 2, True, 0)
print("cPackerType = %d" % value.cPacketType)
value.iMessageType = ios.read(util.int32, 3, True, 0)
print("iMessageType = %d" % value.iMessageType)
value.iRequestId = ios.read(util.int32, 4, True, 0)
print("iRequestId = %d" % value.iRequestId)
value.sServantName = ios.read(util.string, 5, True, '22222222')
value.sFuncName = ios.read(util.string, 6, True, '')
value.sBuffer = ios.read(util.bytes, 7, True, value.sBuffer)
value.iTimeout = ios.read(util.int32, 8, True, 0)
value.context = ios.read(
RequestPacket.mapcls_context, 9, True, value.context)
value.status = ios.read(
RequestPacket.mapcls_status, 10, True, value.status)
return value
class ResponsePacket(util.struct):
__tars_class__ = "tars.RpcMessage.ResponsePacket"
mapcls_status = util.mapclass(util.string, util.string)
def __init__(self):
self.iVersion = 0
self.cPacketType = 0
self.iRequestId = 0
self.iMessageType = 0
self.iRet = 0
self.sBuffer = bytes()
self.status = RequestPacket.mapcls_status()
@staticmethod
def writeTo(oos, value):
oos.write(util.int16, 1, value.iVersion)
oos.write(util.int8, 2, value.cPacketType)
oos.write(util.int32, 3, value.iRequestId)
oos.write(util.int32, 4, value.iMessageType)
oos.write(util.int32, 5, value.iRet)
oos.write(util.bytes, 6, value.sBuffer)
oos.write(value.mapcls_status, 7, value.status)
@staticmethod
def readFrom(ios):
value = ResponsePacket()
value.iVersion = ios.read(util.int16, 1, True)
value.cPacketType = ios.read(util.int8, 2, True)
value.iRequestId = ios.read(util.int32, 3, True)
value.iMessageType = ios.read(util.int32, 4, True)
value.iRet = ios.read(util.int32, 5, True)
value.sBuffer = ios.read(util.bytes, 6, True)
value.status = ios.read(value.mapcls_status, 7, True)
return value | 0.538983 | 0.1679 |
import os
import re
import subprocess
from setuptools import find_packages, setup
from setuptools.command.develop import develop
from setuptools.command.install import install
from setuptools.command.test import test
__author__ = '<NAME>'
__copyright__ = 'Copyright (c) 2018, Pawelzny'
with open('README.rst', 'r') as readme_file:
readme = readme_file.read()
def get_version(*file_paths):
"""Retrieves the version from project/__init__.py"""
filename = os.path.join(os.path.dirname(__file__), *file_paths)
version_file = open(filename).read()
version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]",
version_file, re.M)
if version_match:
return version_match.group(1)
raise RuntimeError('Unable to find version string.')
class PostDevelopCommand(develop):
"""Post-installation for development mode."""
def run(self):
subprocess.check_call(['pipenv', 'install', '--dev', '--deploy', '--system'])
develop.run(self)
class TestCommand(test):
"""Run tests"""
def run(self):
subprocess.check_call(['pytest'])
test.run(self)
setup(
name='dotty_dict',
version=get_version('dotty_dict', '__init__.py'),
description="Dictionary wrapper for quick access to deeply nested keys.",
long_description=readme,
license="MIT license",
author="<NAME> @pawelzny",
author_email='<EMAIL>',
url='https://github.com/pawelzny/dotty_dict',
packages=find_packages(exclude=('tests', 'docs', 'bin', 'example')),
package_dir={'dotty_dict': 'dotty_dict'},
include_package_data=True,
use_scm_version=True,
setup_requires=['setuptools_scm'],
zip_safe=False,
keywords='dot notation dict wrapper helper utils lib',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Natural Language :: English',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries :: Python Modules',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'License :: OSI Approved :: MIT License',
],
cmdclass={
'develop': PostDevelopCommand,
'test': TestCommand,
},
) | setup.py | import os
import re
import subprocess
from setuptools import find_packages, setup
from setuptools.command.develop import develop
from setuptools.command.install import install
from setuptools.command.test import test
__author__ = '<NAME>'
__copyright__ = 'Copyright (c) 2018, Pawelzny'
with open('README.rst', 'r') as readme_file:
readme = readme_file.read()
def get_version(*file_paths):
"""Retrieves the version from project/__init__.py"""
filename = os.path.join(os.path.dirname(__file__), *file_paths)
version_file = open(filename).read()
version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]",
version_file, re.M)
if version_match:
return version_match.group(1)
raise RuntimeError('Unable to find version string.')
class PostDevelopCommand(develop):
"""Post-installation for development mode."""
def run(self):
subprocess.check_call(['pipenv', 'install', '--dev', '--deploy', '--system'])
develop.run(self)
class TestCommand(test):
"""Run tests"""
def run(self):
subprocess.check_call(['pytest'])
test.run(self)
setup(
name='dotty_dict',
version=get_version('dotty_dict', '__init__.py'),
description="Dictionary wrapper for quick access to deeply nested keys.",
long_description=readme,
license="MIT license",
author="<NAME> @pawelzny",
author_email='<EMAIL>',
url='https://github.com/pawelzny/dotty_dict',
packages=find_packages(exclude=('tests', 'docs', 'bin', 'example')),
package_dir={'dotty_dict': 'dotty_dict'},
include_package_data=True,
use_scm_version=True,
setup_requires=['setuptools_scm'],
zip_safe=False,
keywords='dot notation dict wrapper helper utils lib',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Natural Language :: English',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries :: Python Modules',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'License :: OSI Approved :: MIT License',
],
cmdclass={
'develop': PostDevelopCommand,
'test': TestCommand,
},
) | 0.506103 | 0.104843 |
import os
import multiprocessing
import re
import datetime
import tempfile
import numpy as np
from collections import Counter
from tokenizers import BertWordPieceTokenizer
omitted_strings = ["<media omitted>", "messages to this chat and calls are now secured with end-to-end encryption. tap for more info.", "you deleted this message", "this message was deleted", "missed voice call"]
class Message:
def __init__(self, time, sender, content, i, chat_id):
self.content = content
self.time = time
self.sender = sender
self.i = i
self.chat_id = chat_id
def __str__(self):
if self.time > 2<<16:
dt = datetime.datetime.fromtimestamp(self.time * 3600).strftime('%d/%m/%Y, %H:%M')
else:
dt = self.time
return "{}. {} - {}: {}".format(self.i, dt, self.sender, self.content)
class WhatsappParser:
def __init__(self, processes=None):
if processes is None:
self.processes = os.cpu_count()
else:
self.processes = processes
self.messages = []
self.people = {} # Name to id
self.time_delta = 0
self.manager = multiprocessing.Manager()
def __parse_messages(self, lines, return_dict, return_id, start_id=0, chat_id=0):
messages = []
for i, l in enumerate(lines):
l = l.lower()
if all(_ not in l for _ in omitted_strings):
try:
day, month, year = l.split("/", 3)
year, hour = year.split(", ", 1)
hour, minute = hour.split(":", 1)
minute, sender = minute.split(" - ", 1)
sender, content = sender.split(": ", 1)
content = content[:-1]
if minute[-1] == "m":
if minute[-2] == "p":
hour = int(hour) + 12
minute = minute[:2]
if int(year) < 2000:
year = int(year) + 2000
time = int(datetime.datetime(int(year), int(month), int(day), int(hour), int(minute)).timestamp() / 3600)
m = Message(time, sender, content, i=i + start_id, chat_id=chat_id)
messages.append(m)
except ValueError:
pass
return_dict[return_id] = messages
def parse_file(self, filename, chat_id=0):
'''Parse messages from a Whatsapp chat log using multiprocessing'''
lines = open(filename, encoding="utf8").readlines()[-2<<14:]
# Multiprocessing
chunk_size = len(lines) // self.processes
return_dict = self.manager.dict()
jobs = []
for j in range(self.processes):
l = lines[j * chunk_size:(j + 1) * chunk_size]
jobs.append(multiprocessing.Process(target=self.__parse_messages, args=(l, return_dict, j, j * chunk_size, chat_id)))
for j in jobs:
j.start()
for j in jobs:
j.join()
# Flatten the return_dict
for _ in return_dict.values():
self.messages = np.concatenate((self.messages, _))
def encode_senders(self):
'''Encode the sender names as numbers'''
# Make a dictionary of all of the people
c = Counter(map(lambda m: m.sender, self.messages)).most_common()
c = [_[0] for _ in c]
for i, p in enumerate(c):
self.people[p] = i
for i, m in enumerate(self.messages):
self.messages[i].sender = self.people[m.sender]
def flatten(self):
'''Concatenate the contents of all of the messages into a single string'''
return ''.join([_.content for _ in self.messages])
def normalize_time(self, delta=None):
'''Normalize the time and date on all the messages'''
if delta is None:
delta = - sum([_.time for _ in self.messages]) // len(self.messages)
self.time_delta += delta
for i, m in enumerate(self.messages):
self.messages[i].time += delta
def gen_tokenizer(self, min_frequency=6, limit_alphabet=150):
'''Create a WordPiece tokenizer from the parsed data'''
# Store the flattened text in a temporary file
f = tempfile.NamedTemporaryFile()
text = self.flatten()
f.write(text.encode("utf8"))
# Create the tokenizer
tokenizer = BertWordPieceTokenizer()
tokenizer.train([f.name], min_frequency=min_frequency, limit_alphabet=limit_alphabet)
f.close()
return tokenizer | src/whatsapp_parser.py | import os
import multiprocessing
import re
import datetime
import tempfile
import numpy as np
from collections import Counter
from tokenizers import BertWordPieceTokenizer
omitted_strings = ["<media omitted>", "messages to this chat and calls are now secured with end-to-end encryption. tap for more info.", "you deleted this message", "this message was deleted", "missed voice call"]
class Message:
def __init__(self, time, sender, content, i, chat_id):
self.content = content
self.time = time
self.sender = sender
self.i = i
self.chat_id = chat_id
def __str__(self):
if self.time > 2<<16:
dt = datetime.datetime.fromtimestamp(self.time * 3600).strftime('%d/%m/%Y, %H:%M')
else:
dt = self.time
return "{}. {} - {}: {}".format(self.i, dt, self.sender, self.content)
class WhatsappParser:
def __init__(self, processes=None):
if processes is None:
self.processes = os.cpu_count()
else:
self.processes = processes
self.messages = []
self.people = {} # Name to id
self.time_delta = 0
self.manager = multiprocessing.Manager()
def __parse_messages(self, lines, return_dict, return_id, start_id=0, chat_id=0):
messages = []
for i, l in enumerate(lines):
l = l.lower()
if all(_ not in l for _ in omitted_strings):
try:
day, month, year = l.split("/", 3)
year, hour = year.split(", ", 1)
hour, minute = hour.split(":", 1)
minute, sender = minute.split(" - ", 1)
sender, content = sender.split(": ", 1)
content = content[:-1]
if minute[-1] == "m":
if minute[-2] == "p":
hour = int(hour) + 12
minute = minute[:2]
if int(year) < 2000:
year = int(year) + 2000
time = int(datetime.datetime(int(year), int(month), int(day), int(hour), int(minute)).timestamp() / 3600)
m = Message(time, sender, content, i=i + start_id, chat_id=chat_id)
messages.append(m)
except ValueError:
pass
return_dict[return_id] = messages
def parse_file(self, filename, chat_id=0):
'''Parse messages from a Whatsapp chat log using multiprocessing'''
lines = open(filename, encoding="utf8").readlines()[-2<<14:]
# Multiprocessing
chunk_size = len(lines) // self.processes
return_dict = self.manager.dict()
jobs = []
for j in range(self.processes):
l = lines[j * chunk_size:(j + 1) * chunk_size]
jobs.append(multiprocessing.Process(target=self.__parse_messages, args=(l, return_dict, j, j * chunk_size, chat_id)))
for j in jobs:
j.start()
for j in jobs:
j.join()
# Flatten the return_dict
for _ in return_dict.values():
self.messages = np.concatenate((self.messages, _))
def encode_senders(self):
'''Encode the sender names as numbers'''
# Make a dictionary of all of the people
c = Counter(map(lambda m: m.sender, self.messages)).most_common()
c = [_[0] for _ in c]
for i, p in enumerate(c):
self.people[p] = i
for i, m in enumerate(self.messages):
self.messages[i].sender = self.people[m.sender]
def flatten(self):
'''Concatenate the contents of all of the messages into a single string'''
return ''.join([_.content for _ in self.messages])
def normalize_time(self, delta=None):
'''Normalize the time and date on all the messages'''
if delta is None:
delta = - sum([_.time for _ in self.messages]) // len(self.messages)
self.time_delta += delta
for i, m in enumerate(self.messages):
self.messages[i].time += delta
def gen_tokenizer(self, min_frequency=6, limit_alphabet=150):
'''Create a WordPiece tokenizer from the parsed data'''
# Store the flattened text in a temporary file
f = tempfile.NamedTemporaryFile()
text = self.flatten()
f.write(text.encode("utf8"))
# Create the tokenizer
tokenizer = BertWordPieceTokenizer()
tokenizer.train([f.name], min_frequency=min_frequency, limit_alphabet=limit_alphabet)
f.close()
return tokenizer | 0.264738 | 0.10942 |
import time
import smbus
from Adafruit_I2C import Adafruit_I2C
# ===========================================================================
# INA219 Class
# ===========================================================================
class INA219:
i2c = None
# ===========================================================================
# I2C ADDRESS/BITS
# ==========================================================================
__INA219_ADDRESS = 0x40 # 1000000 (A0+A1=GND)
__INA219_READ = 0x01
# ===========================================================================
# ===========================================================================
# CONFIG REGISTER (R/W)
# ===========================================================================
__INA219_REG_CONFIG = 0x00
# ===========================================================================
__INA219_CONFIG_RESET = 0x8000 # Reset Bit
__INA219_CONFIG_BVOLTAGERANGE_MASK = 0x2000 # Bus Voltage Range Mask
__INA219_CONFIG_BVOLTAGERANGE_16V = 0x0000 # 0-16V Range
__INA219_CONFIG_BVOLTAGERANGE_32V = 0x2000 # 0-32V Range
__INA219_CONFIG_GAIN_MASK = 0x1800 # Gain Mask
__INA219_CONFIG_GAIN_1_40MV = 0x0000 # Gain 1, 40mV Range
__INA219_CONFIG_GAIN_2_80MV = 0x0800 # Gain 2, 80mV Range
__INA219_CONFIG_GAIN_4_160MV = 0x1000 # Gain 4, 160mV Range
__INA219_CONFIG_GAIN_8_320MV = 0x1800 # Gain 8, 320mV Range
__INA219_CONFIG_BADCRES_MASK = 0x0780 # Bus ADC Resolution Mask
__INA219_CONFIG_BADCRES_9BIT = 0x0080 # 9-bit bus res = 0..511
__INA219_CONFIG_BADCRES_10BIT = 0x0100 # 10-bit bus res = 0..1023
__INA219_CONFIG_BADCRES_11BIT = 0x0200 # 11-bit bus res = 0..2047
__INA219_CONFIG_BADCRES_12BIT = 0x0400 # 12-bit bus res = 0..4097
__INA219_CONFIG_SADCRES_MASK = 0x0078 # Shunt ADC Resolution and Averaging Mask
__INA219_CONFIG_SADCRES_9BIT_1S_84US = 0x0000 # 1 x 9-bit shunt sample
__INA219_CONFIG_SADCRES_10BIT_1S_148US = 0x0008 # 1 x 10-bit shunt sample
__INA219_CONFIG_SADCRES_11BIT_1S_276US = 0x0010 # 1 x 11-bit shunt sample
__INA219_CONFIG_SADCRES_12BIT_1S_532US = 0x0018 # 1 x 12-bit shunt sample
__INA219_CONFIG_SADCRES_12BIT_2S_1060US = 0x0048 # 2 x 12-bit shunt samples averaged together
__INA219_CONFIG_SADCRES_12BIT_4S_2130US = 0x0050 # 4 x 12-bit shunt samples averaged together
__INA219_CONFIG_SADCRES_12BIT_8S_4260US = 0x0058 # 8 x 12-bit shunt samples averaged together
__INA219_CONFIG_SADCRES_12BIT_16S_8510US = 0x0060 # 16 x 12-bit shunt samples averaged together
__INA219_CONFIG_SADCRES_12BIT_32S_17MS = 0x0068 # 32 x 12-bit shunt samples averaged together
__INA219_CONFIG_SADCRES_12BIT_64S_34MS = 0x0070 # 64 x 12-bit shunt samples averaged together
__INA219_CONFIG_SADCRES_12BIT_128S_69MS = 0x0078 # 128 x 12-bit shunt samples averaged together
__INA219_CONFIG_MODE_MASK = 0x0007 # Operating Mode Mask
__INA219_CONFIG_MODE_POWERDOWN = 0x0000
__INA219_CONFIG_MODE_SVOLT_TRIGGERED = 0x0001
__INA219_CONFIG_MODE_BVOLT_TRIGGERED = 0x0002
__INA219_CONFIG_MODE_SANDBVOLT_TRIGGERED = 0x0003
__INA219_CONFIG_MODE_ADCOFF = 0x0004
__INA219_CONFIG_MODE_SVOLT_CONTINUOUS = 0x0005
__INA219_CONFIG_MODE_BVOLT_CONTINUOUS = 0x0006
__INA219_CONFIG_MODE_SANDBVOLT_CONTINUOUS = 0x0007
# ===========================================================================
# ===========================================================================
# SHUNT VOLTAGE REGISTER (R)
# ===========================================================================
__INA219_REG_SHUNTVOLTAGE = 0x01
# ===========================================================================
# ===========================================================================
# BUS VOLTAGE REGISTER (R)
# ===========================================================================
__INA219_REG_BUSVOLTAGE = 0x02
# ===========================================================================
# ===========================================================================
# POWER REGISTER (R)
# ===========================================================================
__INA219_REG_POWER = 0x03
# ===========================================================================
# ==========================================================================
# CURRENT REGISTER (R)
# ===========================================================================
__INA219_REG_CURRENT = 0x04
# ===========================================================================
# ===========================================================================
# CALIBRATION REGISTER (R/W)
# ===========================================================================
__INA219_REG_CALIBRATION = 0x05
# ===========================================================================
# Constructor
def __init__(self, address=__INA219_ADDRESS, debug=False):
self.i2c = Adafruit_I2C(address, debug=False)
self.address = address
self.debug = debug
self.ina219SetCalibration_32V_2A()
def twosToInt(self, val, len):
# Convert twos compliment to integer
if(val & (1 << len - 1)):
val = val - (1<<len)
return val
def ina219SetCalibration_32V_2A(self):
self.ina219_currentDivider_mA = 10 # Current LSB = 100uA per bit (1000/100 = 10)
self.ina219_powerDivider_mW = 2 # Power LSB = 1mW per bit (2/1)
# Set Calibration register to 'Cal' calculated above
bytes = [(0x1000 >> 8) & 0xFF, 0x1000 & 0xFF]
self.i2c.writeList(self.__INA219_REG_CALIBRATION, bytes)
# Set Config register to take into account the settings above
config = self.__INA219_CONFIG_BVOLTAGERANGE_32V | \
self.__INA219_CONFIG_GAIN_8_320MV | \
self.__INA219_CONFIG_BADCRES_12BIT | \
self.__INA219_CONFIG_SADCRES_12BIT_1S_532US | \
self.__INA219_CONFIG_MODE_SANDBVOLT_CONTINUOUS
bytes = [(config >> 8) & 0xFF, config & 0xFF]
self.i2c.writeList(self.__INA219_REG_CONFIG, bytes)
def getBusVoltage_raw(self):
result = self.i2c.readU16(self.__INA219_REG_BUSVOLTAGE)
# Shift to the right 3 to drop CNVR and OVF and multiply by LSB
return (result >> 3) * 4
def getShuntVoltage_raw(self):
result = self.i2c.readList(self.__INA219_REG_SHUNTVOLTAGE,2)
if (result[0] >> 7 == 1):
testint = (result[0]*256 + result[1])
othernew = self.twosToInt(testint, 16)
return othernew
else:
return (result[0] << 8) | (result[1])
def getCurrent_raw(self):
result = self.i2c.readList(self.__INA219_REG_CURRENT,2)
if (result[0] >> 7 == 1):
testint = (result[0]*256 + result[1])
othernew = self.twosToInt(testint, 16)
return othernew
else:
return (result[0] << 8) | (result[1])
def getPower_raw(self):
result = self.i2c.readList(self.__INA219_REG_POWER,2)
if (result[0] >> 7 == 1):
testint = (result[0]*256 + result[1])
othernew = self.twosToInt(testint, 16)
return othernew
else:
return (result[0] << 8) | (result[1])
def getShuntVoltage_mV(self):
value = self.getShuntVoltage_raw()
return value * 0.01
def getBusVoltage_V(self):
value = self.getBusVoltage_raw()
return value * 0.001
def getCurrent_mA(self):
valueDec = self.getCurrent_raw()
valueDec /= self.ina219_currentDivider_mA
return valueDec
def getPower_mW(self):
valueDec = self.getPower_raw()
valueDec /= self.ina219_powerDivider_mW
return valueDec | Subfact_ina219.py |
import time
import smbus
from Adafruit_I2C import Adafruit_I2C
# ===========================================================================
# INA219 Class
# ===========================================================================
class INA219:
i2c = None
# ===========================================================================
# I2C ADDRESS/BITS
# ==========================================================================
__INA219_ADDRESS = 0x40 # 1000000 (A0+A1=GND)
__INA219_READ = 0x01
# ===========================================================================
# ===========================================================================
# CONFIG REGISTER (R/W)
# ===========================================================================
__INA219_REG_CONFIG = 0x00
# ===========================================================================
__INA219_CONFIG_RESET = 0x8000 # Reset Bit
__INA219_CONFIG_BVOLTAGERANGE_MASK = 0x2000 # Bus Voltage Range Mask
__INA219_CONFIG_BVOLTAGERANGE_16V = 0x0000 # 0-16V Range
__INA219_CONFIG_BVOLTAGERANGE_32V = 0x2000 # 0-32V Range
__INA219_CONFIG_GAIN_MASK = 0x1800 # Gain Mask
__INA219_CONFIG_GAIN_1_40MV = 0x0000 # Gain 1, 40mV Range
__INA219_CONFIG_GAIN_2_80MV = 0x0800 # Gain 2, 80mV Range
__INA219_CONFIG_GAIN_4_160MV = 0x1000 # Gain 4, 160mV Range
__INA219_CONFIG_GAIN_8_320MV = 0x1800 # Gain 8, 320mV Range
__INA219_CONFIG_BADCRES_MASK = 0x0780 # Bus ADC Resolution Mask
__INA219_CONFIG_BADCRES_9BIT = 0x0080 # 9-bit bus res = 0..511
__INA219_CONFIG_BADCRES_10BIT = 0x0100 # 10-bit bus res = 0..1023
__INA219_CONFIG_BADCRES_11BIT = 0x0200 # 11-bit bus res = 0..2047
__INA219_CONFIG_BADCRES_12BIT = 0x0400 # 12-bit bus res = 0..4097
__INA219_CONFIG_SADCRES_MASK = 0x0078 # Shunt ADC Resolution and Averaging Mask
__INA219_CONFIG_SADCRES_9BIT_1S_84US = 0x0000 # 1 x 9-bit shunt sample
__INA219_CONFIG_SADCRES_10BIT_1S_148US = 0x0008 # 1 x 10-bit shunt sample
__INA219_CONFIG_SADCRES_11BIT_1S_276US = 0x0010 # 1 x 11-bit shunt sample
__INA219_CONFIG_SADCRES_12BIT_1S_532US = 0x0018 # 1 x 12-bit shunt sample
__INA219_CONFIG_SADCRES_12BIT_2S_1060US = 0x0048 # 2 x 12-bit shunt samples averaged together
__INA219_CONFIG_SADCRES_12BIT_4S_2130US = 0x0050 # 4 x 12-bit shunt samples averaged together
__INA219_CONFIG_SADCRES_12BIT_8S_4260US = 0x0058 # 8 x 12-bit shunt samples averaged together
__INA219_CONFIG_SADCRES_12BIT_16S_8510US = 0x0060 # 16 x 12-bit shunt samples averaged together
__INA219_CONFIG_SADCRES_12BIT_32S_17MS = 0x0068 # 32 x 12-bit shunt samples averaged together
__INA219_CONFIG_SADCRES_12BIT_64S_34MS = 0x0070 # 64 x 12-bit shunt samples averaged together
__INA219_CONFIG_SADCRES_12BIT_128S_69MS = 0x0078 # 128 x 12-bit shunt samples averaged together
__INA219_CONFIG_MODE_MASK = 0x0007 # Operating Mode Mask
__INA219_CONFIG_MODE_POWERDOWN = 0x0000
__INA219_CONFIG_MODE_SVOLT_TRIGGERED = 0x0001
__INA219_CONFIG_MODE_BVOLT_TRIGGERED = 0x0002
__INA219_CONFIG_MODE_SANDBVOLT_TRIGGERED = 0x0003
__INA219_CONFIG_MODE_ADCOFF = 0x0004
__INA219_CONFIG_MODE_SVOLT_CONTINUOUS = 0x0005
__INA219_CONFIG_MODE_BVOLT_CONTINUOUS = 0x0006
__INA219_CONFIG_MODE_SANDBVOLT_CONTINUOUS = 0x0007
# ===========================================================================
# ===========================================================================
# SHUNT VOLTAGE REGISTER (R)
# ===========================================================================
__INA219_REG_SHUNTVOLTAGE = 0x01
# ===========================================================================
# ===========================================================================
# BUS VOLTAGE REGISTER (R)
# ===========================================================================
__INA219_REG_BUSVOLTAGE = 0x02
# ===========================================================================
# ===========================================================================
# POWER REGISTER (R)
# ===========================================================================
__INA219_REG_POWER = 0x03
# ===========================================================================
# ==========================================================================
# CURRENT REGISTER (R)
# ===========================================================================
__INA219_REG_CURRENT = 0x04
# ===========================================================================
# ===========================================================================
# CALIBRATION REGISTER (R/W)
# ===========================================================================
__INA219_REG_CALIBRATION = 0x05
# ===========================================================================
# Constructor
def __init__(self, address=__INA219_ADDRESS, debug=False):
self.i2c = Adafruit_I2C(address, debug=False)
self.address = address
self.debug = debug
self.ina219SetCalibration_32V_2A()
def twosToInt(self, val, len):
# Convert twos compliment to integer
if(val & (1 << len - 1)):
val = val - (1<<len)
return val
def ina219SetCalibration_32V_2A(self):
self.ina219_currentDivider_mA = 10 # Current LSB = 100uA per bit (1000/100 = 10)
self.ina219_powerDivider_mW = 2 # Power LSB = 1mW per bit (2/1)
# Set Calibration register to 'Cal' calculated above
bytes = [(0x1000 >> 8) & 0xFF, 0x1000 & 0xFF]
self.i2c.writeList(self.__INA219_REG_CALIBRATION, bytes)
# Set Config register to take into account the settings above
config = self.__INA219_CONFIG_BVOLTAGERANGE_32V | \
self.__INA219_CONFIG_GAIN_8_320MV | \
self.__INA219_CONFIG_BADCRES_12BIT | \
self.__INA219_CONFIG_SADCRES_12BIT_1S_532US | \
self.__INA219_CONFIG_MODE_SANDBVOLT_CONTINUOUS
bytes = [(config >> 8) & 0xFF, config & 0xFF]
self.i2c.writeList(self.__INA219_REG_CONFIG, bytes)
def getBusVoltage_raw(self):
result = self.i2c.readU16(self.__INA219_REG_BUSVOLTAGE)
# Shift to the right 3 to drop CNVR and OVF and multiply by LSB
return (result >> 3) * 4
def getShuntVoltage_raw(self):
result = self.i2c.readList(self.__INA219_REG_SHUNTVOLTAGE,2)
if (result[0] >> 7 == 1):
testint = (result[0]*256 + result[1])
othernew = self.twosToInt(testint, 16)
return othernew
else:
return (result[0] << 8) | (result[1])
def getCurrent_raw(self):
result = self.i2c.readList(self.__INA219_REG_CURRENT,2)
if (result[0] >> 7 == 1):
testint = (result[0]*256 + result[1])
othernew = self.twosToInt(testint, 16)
return othernew
else:
return (result[0] << 8) | (result[1])
def getPower_raw(self):
result = self.i2c.readList(self.__INA219_REG_POWER,2)
if (result[0] >> 7 == 1):
testint = (result[0]*256 + result[1])
othernew = self.twosToInt(testint, 16)
return othernew
else:
return (result[0] << 8) | (result[1])
def getShuntVoltage_mV(self):
value = self.getShuntVoltage_raw()
return value * 0.01
def getBusVoltage_V(self):
value = self.getBusVoltage_raw()
return value * 0.001
def getCurrent_mA(self):
valueDec = self.getCurrent_raw()
valueDec /= self.ina219_currentDivider_mA
return valueDec
def getPower_mW(self):
valueDec = self.getPower_raw()
valueDec /= self.ina219_powerDivider_mW
return valueDec | 0.273769 | 0.087408 |
from __future__ import absolute_import
from time import strptime, mktime
from datetime import datetime
import fnmatch
import os
from astropy.units import Unit, nm, equivalencies
from sqlalchemy import Column, Integer, Float, String, DateTime, Boolean,\
Table, ForeignKey
from sqlalchemy.orm import relationship
from sqlalchemy.ext.declarative import declarative_base
from sunpy.time import parse_time
from sunpy.io import fits, file_tools as sunpy_filetools
from sunpy.util import print_table
from sunpy.extern.six.moves import map as imap
from sunpy.extern import six
from sunpy import config
TIME_FORMAT = config.get("general", "time_format")
__all__ = [
'WaveunitNotFoundError', 'WaveunitNotConvertibleError', 'JSONDump',
'FitsHeaderEntry', 'FitsKeyComment', 'Tag', 'DatabaseEntry',
'entries_from_query_result', 'entries_from_file', 'entries_from_dir',
'display_entries']
Base = declarative_base()
# required for the many-to-many relation on tags:entries
association_table = Table('association', Base.metadata,
Column('tag_name', String, ForeignKey('tags.name')),
Column('entry_id', Integer, ForeignKey('data.id'))
)
class WaveunitNotFoundError(Exception):
"""This exception is raised if a wavelength unit cannot be found in a FITS
header or in a VSO query result block.
"""
def __init__(self, obj):
self.obj = obj
def __str__(self): # pragma: no cover
return 'the wavelength unit cannot be found in {0}'.format(self.obj) + \
' and default_waveunit not specified when opening the database'
class WaveunitNotConvertibleError(Exception):
"""This exception is raised if a wavelength cannot be converted to an
astropy.units.Unit instance.
"""
def __init__(self, waveunit):
self.waveunit = waveunit
def __str__(self): # pragma: no cover
return (
'the waveunit {0!r} cannot be converted to an '
'astropy.units.Unit instance'.format(self.waveunit))
# TODO: move this function outside this package (sunpy.util? sunpy.time?)
def timestamp2datetime(format, string):
return datetime.fromtimestamp(mktime(strptime(string, format)))
class JSONDump(Base):
__tablename__ = 'jsondumps'
dump = Column(String, nullable=False, primary_key=True)
def __init__(self, dump):
self.dump = dump
def __eq__(self, other):
return self.dump == other.dump
def __ne__(self, other):
return not (self == other)
def __str__(self):
return self.dump
def __repr__(self): # pragma: no cover
return '<{0}(dump {1!r})>'.format(self.__class__.__name__, self.dump)
class FitsHeaderEntry(Base):
__tablename__ = 'fitsheaderentries'
dbentry_id = Column(Integer, ForeignKey('data.id'))
id = Column(Integer, primary_key=True)
key = Column(String, nullable=False)
value = Column(String)
def __init__(self, key, value):
self.key = key
self.value = value
def __eq__(self, other):
return (
(self.id == other.id or self.id is None or other.id is None) and
self.key == other.key and
self.value == other.value)
def __ne__(self, other):
return not (self == other)
def __repr__(self): # pragma: no cover
return '<{0}(id {1}, key {2!r}, value {3!r})>'.format(
self.__class__.__name__, self.id, self.key, self.value)
class FitsKeyComment(Base):
__tablename__ = 'fitskeycomments'
dbentry_id = Column(Integer, ForeignKey('data.id'))
id = Column(Integer, primary_key=True)
key = Column(String, nullable=False)
value = Column(String)
def __init__(self, key, value):
self.key = key
self.value = value
def __eq__(self, other):
return (
(self.id == other.id or self.id is None or other.id is None) and
self.key == other.key and
self.value == other.value)
def __ne__(self, other):
return not (self == other)
def __repr__(self): # pragma: no cover
return '<{0}(id {1}, key {2!r}, value {3!r})>'.format(
self.__class__.__name__, self.id, self.key, self.value)
class Tag(Base):
__tablename__ = 'tags'
name = Column(String, nullable=False, primary_key=True)
def __init__(self, name):
self.name = name
def __eq__(self, other):
return self.name == other.name
def __ne__(self, other):
return not (self == other)
def __str__(self):
return self.name
def __repr__(self): # pragma: no cover
return '<{0}(name {1!r})>'.format(self.__class__.__name__, self.name)
class DatabaseEntry(Base):
"""
DatabaseEntry()
The class :class:`DatabaseEntry` represents the main table of the database
and each instance represents one record that *can* be saved in the
database.
Parameters
----------
id : int
A unique ID number. By default it is None, but automatically set to the
maximum number plus one when this entry is added to the database.
source : string
The source is the name of an observatory or the name of a network of
observatories.
provider : string
The name of the server which provides the retrieved data.
physobs : string
A physical observable identifier used by VSO.
fileid : string
The file ID is a string defined by the data provider that should point
to a specific data product. The association of fileid to the specific
data may change sometime, if the fileid always points to the latest
calibrated data.
observation_time_start : datetime
The date and time when the observation of the data started.
observation_time_end : datetime
The date and time when the observation of the data ended.
instrument : string
The instrument which was used to observe the data.
size : float
The size of the data in kilobytes.
wavemin : float
The value of the measured wave length.
wavemax : float
This is the same value as ``wavemin``. The value is stored twice,
because each ``suds.sudsobject.QueryResponseBlock`` which is used by
the vso package contains both these values.
path : string
A local file path where the according FITS file is saved.
download_time : datetime
The date and time when the files connected to a query have been
downloaded. Note: this is not the date and time when this entry has
been added to a database!
starred : bool
Entries can be starred to mark them. By default, this value is False.
fits_header_entries : list
A list of ``FitsHeaderEntry`` instances.
tags : list
A list of ``Tag`` instances. Use `sunpy.database.Database.tag` to
add a new tag or multiple tags to a specific entry.
"""
__tablename__ = 'data'
# FIXME: primary key is data provider + file ID + download_time!
id = Column(Integer, primary_key=True)
source = Column(String)
provider = Column(String)
physobs = Column(String)
fileid = Column(String)
observation_time_start = Column(DateTime)
observation_time_end = Column(DateTime)
instrument = Column(String)
size = Column(Float)
wavemin = Column(Float)
wavemax = Column(Float)
path = Column(String)
download_time = Column(DateTime)
starred = Column(Boolean, default=False)
fits_header_entries = relationship('FitsHeaderEntry')
fits_key_comments = relationship('FitsKeyComment')
tags = relationship('Tag', secondary=association_table, backref='data')
@classmethod
def _from_query_result_block(cls, qr_block, default_waveunit=None):
"""Make a new :class:`DatabaseEntry` instance from a VSO query result
block. The values of :attr:`wavemin` and :attr:`wavemax` are converted
to nm (nanometres).
Parameters
----------
qr_block : suds.sudsobject.QueryResponseBlock
A query result block is usually not created directly; instead,
one gets instances of ``suds.sudsobject.QueryResponseBlock`` by
iterating over a VSO query result.
default_waveunit : str, optional
The wavelength unit that is used if it cannot be found in the
`qr_block`.
Examples
--------
>>> from sunpy.net import vso
>>> from sunpy.database.tables import DatabaseEntry
>>> client = vso.VSOClient()
>>> qr = client.query(
... vso.attrs.Time('2001/1/1', '2001/1/2'),
... vso.attrs.Instrument('eit'))
>>> entry = DatabaseEntry._from_query_result_block(qr[0])
>>> entry.source
'SOHO'
>>> entry.provider
'SDAC'
>>> entry.physobs
'intensity'
>>> entry.fileid
'/archive/soho/private/data/processed/eit/lz/2001/01/efz20010101.010014'
>>> entry.observation_time_start, entry.observation_time_end
(datetime.datetime(2001, 1, 1, 1, 0, 14), datetime.datetime(2001, 1, 1, 1, 0, 21))
>>> entry.instrument
'EIT'
>>> entry.size
2059.0
>>> entry.wavemin, entry.wavemax
(17.1, 17.1)
"""
time_start = timestamp2datetime('%Y%m%d%H%M%S', qr_block.time.start)
time_end = timestamp2datetime('%Y%m%d%H%M%S', qr_block.time.end)
wave = qr_block.wave
unit = None
if wave.waveunit is None:
if default_waveunit is not None:
unit = Unit(default_waveunit)
else:
# some query response blocks store the unit "kev",
# but AstroPy only understands "keV". See issue #766.
waveunit = wave.waveunit
if waveunit == "kev":
waveunit = "keV"
unit = Unit(waveunit)
if wave.wavemin is None:
wavemin = None
else:
if unit is None:
raise WaveunitNotFoundError(qr_block)
wavemin = unit.to(nm, float(wave.wavemin), equivalencies.spectral())
if wave.wavemax is None:
wavemax = None
else:
if unit is None:
raise WaveunitNotFoundError(qr_block)
wavemax = unit.to(nm, float(wave.wavemax), equivalencies.spectral())
source = str(qr_block.source) if qr_block.source is not None else None
provider = str(qr_block.provider) if qr_block.provider is not None else None
fileid = str(qr_block.fileid) if qr_block.fileid is not None else None
instrument = str(qr_block.instrument) if qr_block.instrument is not None else None
physobs = getattr(qr_block, 'physobs', None)
if physobs is not None:
physobs = str(physobs)
return cls(
source=source, provider=provider, physobs=physobs, fileid=fileid,
observation_time_start=time_start, observation_time_end=time_end,
instrument=instrument, size=qr_block.size,
wavemin=wavemin, wavemax=wavemax)
def __eq__(self, other):
wavemins_equal = self.wavemin is None and other.wavemin is None or\
self.wavemin is not None and other.wavemin is not None and\
round(self.wavemin, 10) == round(other.wavemin, 10)
wavemaxs_equal = self.wavemax is None and other.wavemax is None or\
self.wavemax is not None and other.wavemax is not None and\
round(self.wavemax, 10) == round(other.wavemax, 10)
return (
(self.id == other.id or self.id is None or other.id is None) and
self.source == other.source and
self.provider == other.provider and
self.physobs == other.physobs and
self.fileid == other.fileid and
self.observation_time_start == other.observation_time_start and
self.observation_time_end == other.observation_time_end and
self.instrument == other.instrument and
self.size == other.size and
wavemins_equal and
wavemaxs_equal and
self.path == other.path and
self.download_time == other.download_time and
bool(self.starred) == bool(other.starred) and
self.fits_header_entries == other.fits_header_entries and
self.tags == other.tags)
def __ne__(self, other): # pragma: no cover
return not (self == other)
def __repr__(self): # pragma: no cover
attrs = [
'id', 'source', 'provider', 'physobs', 'fileid',
'observation_time_start', 'observation_time_end', 'instrument',
'size', 'wavemin', 'wavemax', 'path', 'download_time', 'starred',
'fits_header_entries', 'tags']
ret = '<{0}('.format(self.__class__.__name__)
for attr in attrs:
value = getattr(self, attr, None)
if value:
ret += '{0} {1!r}, '.format(attr, value)
ret = ret.rstrip(', ')
ret += ')>'
return ret
def entries_from_query_result(qr, default_waveunit=None):
"""Use a query response returned from :meth:`sunpy.net.vso.VSOClient.query`
or :meth:`sunpy.net.vso.VSOClient.query_legacy` to generate instances of
:class:`DatabaseEntry`. Return an iterator over those instances.
Parameters
----------
qr : sunpy.net.vso.vso.QueryResponse
The query response from which to build the database entries.
default_waveunit : str, optional
See :meth:`sunpy.database.DatabaseEntry.from_query_result_block`.
Examples
--------
>>> from sunpy.net import vso
>>> from sunpy.database.tables import entries_from_query_result
>>> client = vso.VSOClient()
>>> qr = client.query(
... vso.attrs.Time('2001/1/1', '2001/1/2'),
... vso.attrs.Instrument('eit'))
>>> entries = entries_from_query_result(qr)
>>> entry = entries.next()
>>> entry.source
'SOHO'
>>> entry.provider
'SDAC'
>>> entry.physobs
'intensity'
>>> entry.fileid
'/archive/soho/private/data/processed/eit/lz/2001/01/efz20010101.010014'
>>> entry.observation_time_start, entry.observation_time_end
(datetime.datetime(2001, 1, 1, 1, 0, 14), datetime.datetime(2001, 1, 1, 1, 0, 21))
>>> entry.instrument
'EIT'
>>> entry.size
2059.0
>>> entry.wavemin, entry.wavemax
(17.1, 17.1)
"""
for block in qr:
yield DatabaseEntry._from_query_result_block(block, default_waveunit)
def entries_from_file(file, default_waveunit=None):
"""Use the headers of a FITS file to generate an iterator of
:class:`sunpy.database.tables.DatabaseEntry` instances. Gathered
information will be saved in the attribute `fits_header_entries`. If the
key INSTRUME, WAVELNTH or DATE-OBS / DATE_OBS is available, the attribute
`instrument`, `wavemin` and `wavemax` or `observation_time_start` is set,
respectively. If the wavelength unit can be read, the values of `wavemin`
and `wavemax` are converted to nm (nanometres). The value of the `file`
parameter is used to set the attribute `path` of each generated database
entry.
Parameters
----------
file : str or file-like object
Either a path pointing to a FITS file or a an opened file-like object.
If an opened file object, its mode must be one of the following rb,
rb+, or ab+.
default_waveunit : str, optional
The wavelength unit that is used for a header if it cannot be
found.
Raises
------
sunpy.database.WaveunitNotFoundError
If `default_waveunit` is not given and the wavelength unit cannot
be found in one of the FITS headers
sunpy.WaveunitNotConvertibleError
If a wavelength unit could be found but cannot be used to create an
instance of the type ``astropy.units.Unit``. This can be the case
for example if a FITS header has the key `WAVEUNIT` with the value
`nonsense`.
Examples
--------
>>> from sunpy.database.tables import entries_from_file
>>> import sunpy.data
>>> sunpy.data.download_sample_data(overwrite=False) # doctest: +SKIP
>>> import sunpy.data.sample
>>> entries = list(entries_from_file(sunpy.data.sample.SWAP_LEVEL1_IMAGE))
>>> len(entries)
1
>>> entry = entries.pop()
>>> entry.instrument
'SWAP'
>>> entry.observation_time_start, entry.observation_time_end
(datetime.datetime(2012, 1, 1, 0, 16, 7, 836000), None)
>>> entry.wavemin, entry.wavemax
(17.400000000000002, 17.400000000000002)
>>> len(entry.fits_header_entries)
111
"""
headers = fits.get_header(file)
if isinstance(file, (str, six.text_type)):
filename = file
else:
filename = getattr(file, 'name', None)
for header in headers:
entry = DatabaseEntry(path=filename)
for key, value in six.iteritems(header):
# Yes, it is possible to have an empty key in a FITS file.
# Example: sunpy.data.sample.EIT_195_IMAGE
# Don't ask me why this could be a good idea.
if key == '':
value = str(value)
elif key == 'KEYCOMMENTS':
for k, v in six.iteritems(value):
entry.fits_key_comments.append(FitsKeyComment(k, v))
continue
entry.fits_header_entries.append(FitsHeaderEntry(key, value))
waveunit = fits.extract_waveunit(header)
if waveunit is None:
waveunit = default_waveunit
unit = None
if waveunit is not None:
try:
unit = Unit(waveunit)
except ValueError:
raise WaveunitNotConvertibleError(waveunit)
for header_entry in entry.fits_header_entries:
key, value = header_entry.key, header_entry.value
if key == 'INSTRUME':
entry.instrument = value
elif key == 'WAVELNTH':
if unit is None:
raise WaveunitNotFoundError(file)
# use the value of `unit` to convert the wavelength to nm
entry.wavemin = entry.wavemax = unit.to(
nm, value, equivalencies.spectral())
# NOTE: the key DATE-END or DATE_END is not part of the official
# FITS standard, but many FITS files use it in their header
elif key in ('DATE-END', 'DATE_END'):
entry.observation_time_end = parse_time(value)
elif key in ('DATE-OBS', 'DATE_OBS'):
entry.observation_time_start = parse_time(value)
yield entry
def entries_from_dir(fitsdir, recursive=False, pattern='*',
default_waveunit=None):
"""Search the given directory for FITS files and use the corresponding FITS
headers to generate instances of :class:`DatabaseEntry`. FITS files are
detected by reading the content of each file, the `pattern` argument may be
used to avoid reading entire directories if one knows that all FITS files
have the same filename extension.
Parameters
----------
fitsdir : string
The directory where to look for FITS files.
recursive : bool, optional
If True, the given directory will be searched recursively. Otherwise,
only the given directory and no subdirectories are searched. The
default is `False`, i.e. the given directory is not searched
recursively.
pattern : string, optional
The pattern can be used to filter the list of filenames before the
files are attempted to be read. The default is to collect all files.
This value is passed to the function :func:`fnmatch.filter`, see its
documentation for more information on the supported syntax.
default_waveunit : str, optional
See
:meth:`sunpy.database.tables.DatabaseEntry.add_fits_header_entries_from_file`.
Returns
-------
generator of (DatabaseEntry, str) pairs
A generator where each item is a tuple consisting of a
:class:`DatabaseEntry` instance and the absolute path to the filename
which was used to make the database entry.
Examples
--------
>>> from sunpy.data.test import rootdir as fitsdir
>>> from sunpy.database.tables import entries_from_dir
>>> entries = list(entries_from_dir(fitsdir, default_waveunit='angstrom'))
>>> len(entries)
38
>>> # and now search `fitsdir` recursive
>>> entries = list(entries_from_dir(fitsdir, True, default_waveunit='angstrom'))
>>> len(entries)
59
"""
for dirpath, dirnames, filenames in os.walk(fitsdir):
filename_paths = (os.path.join(dirpath, name) for name in filenames)
for path in fnmatch.filter(filename_paths, pattern):
try:
filetype = sunpy_filetools._detect_filetype(path)
except (
sunpy_filetools.UnrecognizedFileTypeError,
sunpy_filetools.InvalidJPEG2000FileExtension):
continue
if filetype == 'fits':
for entry in entries_from_file(path, default_waveunit):
yield entry, path
if not recursive:
break
def display_entries(database_entries, columns, sort=False):
"""Generate a table to display the database entries.
Parameters
----------
database_entries : iterable of :class:`DatabaseEntry` instances
The database entries will be the rows in the resulting table.
columns : iterable of str
The columns that will be displayed in the resulting table. Possible
values for the strings are all attributes of :class:`DatabaseEntry`.
sort : bool (optional)
If True, sorts the entries before displaying them.
Returns
-------
str
A formatted table that can be printed on the console or written to a
file.
"""
header = [columns]
rulers = [['-' * len(col) for col in columns]]
data = []
for entry in database_entries:
row = []
for col in columns:
if col == 'starred':
row.append('Yes' if entry.starred else 'No')
elif col == 'tags':
row.append(', '.join(imap(str, entry.tags)) or 'N/A')
# do not display microseconds in datetime columns
elif col in (
'observation_time_start',
'observation_time_end',
'download_time'):
time = getattr(entry, col, None)
if time is None:
formatted_time = 'N/A'
else:
formatted_time = time.strftime(TIME_FORMAT)
row.append(formatted_time)
else:
row.append(str(getattr(entry, col) or 'N/A'))
if not row:
raise TypeError('at least one column must be given')
data.append(row)
if not data:
raise TypeError('given iterable is empty')
if sort:
data.sort()
return print_table(header + rulers + data) | sunpy/database/tables.py |
from __future__ import absolute_import
from time import strptime, mktime
from datetime import datetime
import fnmatch
import os
from astropy.units import Unit, nm, equivalencies
from sqlalchemy import Column, Integer, Float, String, DateTime, Boolean,\
Table, ForeignKey
from sqlalchemy.orm import relationship
from sqlalchemy.ext.declarative import declarative_base
from sunpy.time import parse_time
from sunpy.io import fits, file_tools as sunpy_filetools
from sunpy.util import print_table
from sunpy.extern.six.moves import map as imap
from sunpy.extern import six
from sunpy import config
TIME_FORMAT = config.get("general", "time_format")
__all__ = [
'WaveunitNotFoundError', 'WaveunitNotConvertibleError', 'JSONDump',
'FitsHeaderEntry', 'FitsKeyComment', 'Tag', 'DatabaseEntry',
'entries_from_query_result', 'entries_from_file', 'entries_from_dir',
'display_entries']
Base = declarative_base()
# required for the many-to-many relation on tags:entries
association_table = Table('association', Base.metadata,
Column('tag_name', String, ForeignKey('tags.name')),
Column('entry_id', Integer, ForeignKey('data.id'))
)
class WaveunitNotFoundError(Exception):
"""This exception is raised if a wavelength unit cannot be found in a FITS
header or in a VSO query result block.
"""
def __init__(self, obj):
self.obj = obj
def __str__(self): # pragma: no cover
return 'the wavelength unit cannot be found in {0}'.format(self.obj) + \
' and default_waveunit not specified when opening the database'
class WaveunitNotConvertibleError(Exception):
"""This exception is raised if a wavelength cannot be converted to an
astropy.units.Unit instance.
"""
def __init__(self, waveunit):
self.waveunit = waveunit
def __str__(self): # pragma: no cover
return (
'the waveunit {0!r} cannot be converted to an '
'astropy.units.Unit instance'.format(self.waveunit))
# TODO: move this function outside this package (sunpy.util? sunpy.time?)
def timestamp2datetime(format, string):
return datetime.fromtimestamp(mktime(strptime(string, format)))
class JSONDump(Base):
__tablename__ = 'jsondumps'
dump = Column(String, nullable=False, primary_key=True)
def __init__(self, dump):
self.dump = dump
def __eq__(self, other):
return self.dump == other.dump
def __ne__(self, other):
return not (self == other)
def __str__(self):
return self.dump
def __repr__(self): # pragma: no cover
return '<{0}(dump {1!r})>'.format(self.__class__.__name__, self.dump)
class FitsHeaderEntry(Base):
__tablename__ = 'fitsheaderentries'
dbentry_id = Column(Integer, ForeignKey('data.id'))
id = Column(Integer, primary_key=True)
key = Column(String, nullable=False)
value = Column(String)
def __init__(self, key, value):
self.key = key
self.value = value
def __eq__(self, other):
return (
(self.id == other.id or self.id is None or other.id is None) and
self.key == other.key and
self.value == other.value)
def __ne__(self, other):
return not (self == other)
def __repr__(self): # pragma: no cover
return '<{0}(id {1}, key {2!r}, value {3!r})>'.format(
self.__class__.__name__, self.id, self.key, self.value)
class FitsKeyComment(Base):
__tablename__ = 'fitskeycomments'
dbentry_id = Column(Integer, ForeignKey('data.id'))
id = Column(Integer, primary_key=True)
key = Column(String, nullable=False)
value = Column(String)
def __init__(self, key, value):
self.key = key
self.value = value
def __eq__(self, other):
return (
(self.id == other.id or self.id is None or other.id is None) and
self.key == other.key and
self.value == other.value)
def __ne__(self, other):
return not (self == other)
def __repr__(self): # pragma: no cover
return '<{0}(id {1}, key {2!r}, value {3!r})>'.format(
self.__class__.__name__, self.id, self.key, self.value)
class Tag(Base):
__tablename__ = 'tags'
name = Column(String, nullable=False, primary_key=True)
def __init__(self, name):
self.name = name
def __eq__(self, other):
return self.name == other.name
def __ne__(self, other):
return not (self == other)
def __str__(self):
return self.name
def __repr__(self): # pragma: no cover
return '<{0}(name {1!r})>'.format(self.__class__.__name__, self.name)
class DatabaseEntry(Base):
"""
DatabaseEntry()
The class :class:`DatabaseEntry` represents the main table of the database
and each instance represents one record that *can* be saved in the
database.
Parameters
----------
id : int
A unique ID number. By default it is None, but automatically set to the
maximum number plus one when this entry is added to the database.
source : string
The source is the name of an observatory or the name of a network of
observatories.
provider : string
The name of the server which provides the retrieved data.
physobs : string
A physical observable identifier used by VSO.
fileid : string
The file ID is a string defined by the data provider that should point
to a specific data product. The association of fileid to the specific
data may change sometime, if the fileid always points to the latest
calibrated data.
observation_time_start : datetime
The date and time when the observation of the data started.
observation_time_end : datetime
The date and time when the observation of the data ended.
instrument : string
The instrument which was used to observe the data.
size : float
The size of the data in kilobytes.
wavemin : float
The value of the measured wave length.
wavemax : float
This is the same value as ``wavemin``. The value is stored twice,
because each ``suds.sudsobject.QueryResponseBlock`` which is used by
the vso package contains both these values.
path : string
A local file path where the according FITS file is saved.
download_time : datetime
The date and time when the files connected to a query have been
downloaded. Note: this is not the date and time when this entry has
been added to a database!
starred : bool
Entries can be starred to mark them. By default, this value is False.
fits_header_entries : list
A list of ``FitsHeaderEntry`` instances.
tags : list
A list of ``Tag`` instances. Use `sunpy.database.Database.tag` to
add a new tag or multiple tags to a specific entry.
"""
__tablename__ = 'data'
# FIXME: primary key is data provider + file ID + download_time!
id = Column(Integer, primary_key=True)
source = Column(String)
provider = Column(String)
physobs = Column(String)
fileid = Column(String)
observation_time_start = Column(DateTime)
observation_time_end = Column(DateTime)
instrument = Column(String)
size = Column(Float)
wavemin = Column(Float)
wavemax = Column(Float)
path = Column(String)
download_time = Column(DateTime)
starred = Column(Boolean, default=False)
fits_header_entries = relationship('FitsHeaderEntry')
fits_key_comments = relationship('FitsKeyComment')
tags = relationship('Tag', secondary=association_table, backref='data')
@classmethod
def _from_query_result_block(cls, qr_block, default_waveunit=None):
"""Make a new :class:`DatabaseEntry` instance from a VSO query result
block. The values of :attr:`wavemin` and :attr:`wavemax` are converted
to nm (nanometres).
Parameters
----------
qr_block : suds.sudsobject.QueryResponseBlock
A query result block is usually not created directly; instead,
one gets instances of ``suds.sudsobject.QueryResponseBlock`` by
iterating over a VSO query result.
default_waveunit : str, optional
The wavelength unit that is used if it cannot be found in the
`qr_block`.
Examples
--------
>>> from sunpy.net import vso
>>> from sunpy.database.tables import DatabaseEntry
>>> client = vso.VSOClient()
>>> qr = client.query(
... vso.attrs.Time('2001/1/1', '2001/1/2'),
... vso.attrs.Instrument('eit'))
>>> entry = DatabaseEntry._from_query_result_block(qr[0])
>>> entry.source
'SOHO'
>>> entry.provider
'SDAC'
>>> entry.physobs
'intensity'
>>> entry.fileid
'/archive/soho/private/data/processed/eit/lz/2001/01/efz20010101.010014'
>>> entry.observation_time_start, entry.observation_time_end
(datetime.datetime(2001, 1, 1, 1, 0, 14), datetime.datetime(2001, 1, 1, 1, 0, 21))
>>> entry.instrument
'EIT'
>>> entry.size
2059.0
>>> entry.wavemin, entry.wavemax
(17.1, 17.1)
"""
time_start = timestamp2datetime('%Y%m%d%H%M%S', qr_block.time.start)
time_end = timestamp2datetime('%Y%m%d%H%M%S', qr_block.time.end)
wave = qr_block.wave
unit = None
if wave.waveunit is None:
if default_waveunit is not None:
unit = Unit(default_waveunit)
else:
# some query response blocks store the unit "kev",
# but AstroPy only understands "keV". See issue #766.
waveunit = wave.waveunit
if waveunit == "kev":
waveunit = "keV"
unit = Unit(waveunit)
if wave.wavemin is None:
wavemin = None
else:
if unit is None:
raise WaveunitNotFoundError(qr_block)
wavemin = unit.to(nm, float(wave.wavemin), equivalencies.spectral())
if wave.wavemax is None:
wavemax = None
else:
if unit is None:
raise WaveunitNotFoundError(qr_block)
wavemax = unit.to(nm, float(wave.wavemax), equivalencies.spectral())
source = str(qr_block.source) if qr_block.source is not None else None
provider = str(qr_block.provider) if qr_block.provider is not None else None
fileid = str(qr_block.fileid) if qr_block.fileid is not None else None
instrument = str(qr_block.instrument) if qr_block.instrument is not None else None
physobs = getattr(qr_block, 'physobs', None)
if physobs is not None:
physobs = str(physobs)
return cls(
source=source, provider=provider, physobs=physobs, fileid=fileid,
observation_time_start=time_start, observation_time_end=time_end,
instrument=instrument, size=qr_block.size,
wavemin=wavemin, wavemax=wavemax)
def __eq__(self, other):
wavemins_equal = self.wavemin is None and other.wavemin is None or\
self.wavemin is not None and other.wavemin is not None and\
round(self.wavemin, 10) == round(other.wavemin, 10)
wavemaxs_equal = self.wavemax is None and other.wavemax is None or\
self.wavemax is not None and other.wavemax is not None and\
round(self.wavemax, 10) == round(other.wavemax, 10)
return (
(self.id == other.id or self.id is None or other.id is None) and
self.source == other.source and
self.provider == other.provider and
self.physobs == other.physobs and
self.fileid == other.fileid and
self.observation_time_start == other.observation_time_start and
self.observation_time_end == other.observation_time_end and
self.instrument == other.instrument and
self.size == other.size and
wavemins_equal and
wavemaxs_equal and
self.path == other.path and
self.download_time == other.download_time and
bool(self.starred) == bool(other.starred) and
self.fits_header_entries == other.fits_header_entries and
self.tags == other.tags)
def __ne__(self, other): # pragma: no cover
return not (self == other)
def __repr__(self): # pragma: no cover
attrs = [
'id', 'source', 'provider', 'physobs', 'fileid',
'observation_time_start', 'observation_time_end', 'instrument',
'size', 'wavemin', 'wavemax', 'path', 'download_time', 'starred',
'fits_header_entries', 'tags']
ret = '<{0}('.format(self.__class__.__name__)
for attr in attrs:
value = getattr(self, attr, None)
if value:
ret += '{0} {1!r}, '.format(attr, value)
ret = ret.rstrip(', ')
ret += ')>'
return ret
def entries_from_query_result(qr, default_waveunit=None):
"""Use a query response returned from :meth:`sunpy.net.vso.VSOClient.query`
or :meth:`sunpy.net.vso.VSOClient.query_legacy` to generate instances of
:class:`DatabaseEntry`. Return an iterator over those instances.
Parameters
----------
qr : sunpy.net.vso.vso.QueryResponse
The query response from which to build the database entries.
default_waveunit : str, optional
See :meth:`sunpy.database.DatabaseEntry.from_query_result_block`.
Examples
--------
>>> from sunpy.net import vso
>>> from sunpy.database.tables import entries_from_query_result
>>> client = vso.VSOClient()
>>> qr = client.query(
... vso.attrs.Time('2001/1/1', '2001/1/2'),
... vso.attrs.Instrument('eit'))
>>> entries = entries_from_query_result(qr)
>>> entry = entries.next()
>>> entry.source
'SOHO'
>>> entry.provider
'SDAC'
>>> entry.physobs
'intensity'
>>> entry.fileid
'/archive/soho/private/data/processed/eit/lz/2001/01/efz20010101.010014'
>>> entry.observation_time_start, entry.observation_time_end
(datetime.datetime(2001, 1, 1, 1, 0, 14), datetime.datetime(2001, 1, 1, 1, 0, 21))
>>> entry.instrument
'EIT'
>>> entry.size
2059.0
>>> entry.wavemin, entry.wavemax
(17.1, 17.1)
"""
for block in qr:
yield DatabaseEntry._from_query_result_block(block, default_waveunit)
def entries_from_file(file, default_waveunit=None):
"""Use the headers of a FITS file to generate an iterator of
:class:`sunpy.database.tables.DatabaseEntry` instances. Gathered
information will be saved in the attribute `fits_header_entries`. If the
key INSTRUME, WAVELNTH or DATE-OBS / DATE_OBS is available, the attribute
`instrument`, `wavemin` and `wavemax` or `observation_time_start` is set,
respectively. If the wavelength unit can be read, the values of `wavemin`
and `wavemax` are converted to nm (nanometres). The value of the `file`
parameter is used to set the attribute `path` of each generated database
entry.
Parameters
----------
file : str or file-like object
Either a path pointing to a FITS file or a an opened file-like object.
If an opened file object, its mode must be one of the following rb,
rb+, or ab+.
default_waveunit : str, optional
The wavelength unit that is used for a header if it cannot be
found.
Raises
------
sunpy.database.WaveunitNotFoundError
If `default_waveunit` is not given and the wavelength unit cannot
be found in one of the FITS headers
sunpy.WaveunitNotConvertibleError
If a wavelength unit could be found but cannot be used to create an
instance of the type ``astropy.units.Unit``. This can be the case
for example if a FITS header has the key `WAVEUNIT` with the value
`nonsense`.
Examples
--------
>>> from sunpy.database.tables import entries_from_file
>>> import sunpy.data
>>> sunpy.data.download_sample_data(overwrite=False) # doctest: +SKIP
>>> import sunpy.data.sample
>>> entries = list(entries_from_file(sunpy.data.sample.SWAP_LEVEL1_IMAGE))
>>> len(entries)
1
>>> entry = entries.pop()
>>> entry.instrument
'SWAP'
>>> entry.observation_time_start, entry.observation_time_end
(datetime.datetime(2012, 1, 1, 0, 16, 7, 836000), None)
>>> entry.wavemin, entry.wavemax
(17.400000000000002, 17.400000000000002)
>>> len(entry.fits_header_entries)
111
"""
headers = fits.get_header(file)
if isinstance(file, (str, six.text_type)):
filename = file
else:
filename = getattr(file, 'name', None)
for header in headers:
entry = DatabaseEntry(path=filename)
for key, value in six.iteritems(header):
# Yes, it is possible to have an empty key in a FITS file.
# Example: sunpy.data.sample.EIT_195_IMAGE
# Don't ask me why this could be a good idea.
if key == '':
value = str(value)
elif key == 'KEYCOMMENTS':
for k, v in six.iteritems(value):
entry.fits_key_comments.append(FitsKeyComment(k, v))
continue
entry.fits_header_entries.append(FitsHeaderEntry(key, value))
waveunit = fits.extract_waveunit(header)
if waveunit is None:
waveunit = default_waveunit
unit = None
if waveunit is not None:
try:
unit = Unit(waveunit)
except ValueError:
raise WaveunitNotConvertibleError(waveunit)
for header_entry in entry.fits_header_entries:
key, value = header_entry.key, header_entry.value
if key == 'INSTRUME':
entry.instrument = value
elif key == 'WAVELNTH':
if unit is None:
raise WaveunitNotFoundError(file)
# use the value of `unit` to convert the wavelength to nm
entry.wavemin = entry.wavemax = unit.to(
nm, value, equivalencies.spectral())
# NOTE: the key DATE-END or DATE_END is not part of the official
# FITS standard, but many FITS files use it in their header
elif key in ('DATE-END', 'DATE_END'):
entry.observation_time_end = parse_time(value)
elif key in ('DATE-OBS', 'DATE_OBS'):
entry.observation_time_start = parse_time(value)
yield entry
def entries_from_dir(fitsdir, recursive=False, pattern='*',
default_waveunit=None):
"""Search the given directory for FITS files and use the corresponding FITS
headers to generate instances of :class:`DatabaseEntry`. FITS files are
detected by reading the content of each file, the `pattern` argument may be
used to avoid reading entire directories if one knows that all FITS files
have the same filename extension.
Parameters
----------
fitsdir : string
The directory where to look for FITS files.
recursive : bool, optional
If True, the given directory will be searched recursively. Otherwise,
only the given directory and no subdirectories are searched. The
default is `False`, i.e. the given directory is not searched
recursively.
pattern : string, optional
The pattern can be used to filter the list of filenames before the
files are attempted to be read. The default is to collect all files.
This value is passed to the function :func:`fnmatch.filter`, see its
documentation for more information on the supported syntax.
default_waveunit : str, optional
See
:meth:`sunpy.database.tables.DatabaseEntry.add_fits_header_entries_from_file`.
Returns
-------
generator of (DatabaseEntry, str) pairs
A generator where each item is a tuple consisting of a
:class:`DatabaseEntry` instance and the absolute path to the filename
which was used to make the database entry.
Examples
--------
>>> from sunpy.data.test import rootdir as fitsdir
>>> from sunpy.database.tables import entries_from_dir
>>> entries = list(entries_from_dir(fitsdir, default_waveunit='angstrom'))
>>> len(entries)
38
>>> # and now search `fitsdir` recursive
>>> entries = list(entries_from_dir(fitsdir, True, default_waveunit='angstrom'))
>>> len(entries)
59
"""
for dirpath, dirnames, filenames in os.walk(fitsdir):
filename_paths = (os.path.join(dirpath, name) for name in filenames)
for path in fnmatch.filter(filename_paths, pattern):
try:
filetype = sunpy_filetools._detect_filetype(path)
except (
sunpy_filetools.UnrecognizedFileTypeError,
sunpy_filetools.InvalidJPEG2000FileExtension):
continue
if filetype == 'fits':
for entry in entries_from_file(path, default_waveunit):
yield entry, path
if not recursive:
break
def display_entries(database_entries, columns, sort=False):
"""Generate a table to display the database entries.
Parameters
----------
database_entries : iterable of :class:`DatabaseEntry` instances
The database entries will be the rows in the resulting table.
columns : iterable of str
The columns that will be displayed in the resulting table. Possible
values for the strings are all attributes of :class:`DatabaseEntry`.
sort : bool (optional)
If True, sorts the entries before displaying them.
Returns
-------
str
A formatted table that can be printed on the console or written to a
file.
"""
header = [columns]
rulers = [['-' * len(col) for col in columns]]
data = []
for entry in database_entries:
row = []
for col in columns:
if col == 'starred':
row.append('Yes' if entry.starred else 'No')
elif col == 'tags':
row.append(', '.join(imap(str, entry.tags)) or 'N/A')
# do not display microseconds in datetime columns
elif col in (
'observation_time_start',
'observation_time_end',
'download_time'):
time = getattr(entry, col, None)
if time is None:
formatted_time = 'N/A'
else:
formatted_time = time.strftime(TIME_FORMAT)
row.append(formatted_time)
else:
row.append(str(getattr(entry, col) or 'N/A'))
if not row:
raise TypeError('at least one column must be given')
data.append(row)
if not data:
raise TypeError('given iterable is empty')
if sort:
data.sort()
return print_table(header + rulers + data) | 0.657428 | 0.216094 |
import unittest
from unittest.mock import MagicMock, call
from qtt.instrument_drivers.virtualAwg.awgs.ZurichInstrumentsHDAWG8 import ZurichInstrumentsHDAWG8
from qtt.instrument_drivers.virtualAwg.awgs.common import AwgCommonError
class TestZurichInstrumentsHDAWG8(unittest.TestCase):
def setUp(self):
class ZIHDAWG8(MagicMock):
pass
self.awg = ZIHDAWG8()
self.zi_hdawg8 = ZurichInstrumentsHDAWG8(self.awg, 0)
def test_enable_outputs(self):
self.zi_hdawg8.enable_outputs()
calls = [call.enable_channel(ch) for ch in range(0, 8)]
self.awg.assert_has_calls(calls)
with self.assertRaises(AwgCommonError):
self.zi_hdawg8.enable_outputs([0, 1, 2, 3, 8])
self.zi_hdawg8.enable_outputs([6, 7])
calls = [call.enable_channel(ch) for ch in range(6, 7)]
self.awg.assert_has_calls(calls)
def test_disable_outputs(self):
self.zi_hdawg8.disable_outputs()
calls = [call.disable_channel(ch) for ch in range(0, 8)]
self.awg.assert_has_calls(calls)
with self.assertRaises(AwgCommonError):
self.zi_hdawg8.disable_outputs([0, 1, 2, 3, 8])
self.zi_hdawg8.disable_outputs([6, 7])
calls = [call.disable_channel(ch) for ch in range(6, 7)]
self.awg.assert_has_calls(calls)
def test_change_setting(self):
self.awg.get.return_value = 0
self.zi_hdawg8.change_setting('sampling_rate', 2.4e9)
self.assertEqual(self.zi_hdawg8.retrieve_setting('sampling_rate'), 2.4e9)
with self.assertRaises(ValueError):
self.zi_hdawg8.change_setting('gain', 0.5)
def test_update_sampling_rate(self):
sample_rates = [2400000000.0, 1200000000.0, 600000000.0, 300000000.0, 150000000.0, 72000000.0, 37500000.0,
18750000.0, 9400000.0, 4500000.0, 2340000.0, 1200.0, 586000.0, 293000.0]
for sample_rate in sample_rates:
self.zi_hdawg8.update_sampling_rate(sample_rate)
calls = [call.set('awgs_0_time', i) for i in range(0, 14)]
self.awg.assert_has_calls(calls)
with self.assertRaises(ValueError):
self.zi_hdawg8.update_sampling_rate(99)
def test_retrieve_sampling_rate(self):
sampling_rate_index = 5
self.awg.get.return_value = sampling_rate_index
self.assertEqual(72e6, self.zi_hdawg8.retrieve_sampling_rate())
def test_update_gain(self):
self.zi_hdawg8.update_gain(0.5)
calls = [call.set('sigouts_{}_range'.format(ch), 0.5) for ch in range(8)]
self.awg.assert_has_calls(calls)
def test_retrieve_gain(self):
self.awg.get.return_value = 0.2
self.assertEqual(0.2, self.zi_hdawg8.retrieve_gain())
with self.assertRaises(ValueError):
self.awg.get.side_effect = lambda v: v
self.zi_hdawg8.retrieve_gain()
def test_upload_waveforms(self):
sequence_names = ['seq1', 'seq2', 'seq3']
sequence_channels = [(1, 1), (1, 0, 1), (2, 0)]
sequence_items = [range(10), range(1, 11), range(2, 12)]
self.awg.generate_csv_sequence_program.return_value = 'program'
self.zi_hdawg8.upload_waveforms(sequence_names, sequence_channels, sequence_items)
calls = [call.waveform_to_csv('seq1', range(10)),
call.waveform_to_csv('seq2', range(1, 11)),
call.waveform_to_csv('seq3', range(2, 12)),
call.generate_csv_sequence_program(sequence_names, [2, 2, 3]),
call.upload_sequence_program(0, 'program')]
self.awg.assert_has_calls(calls) | qtt/tests/test_zi_hdawg8.py | import unittest
from unittest.mock import MagicMock, call
from qtt.instrument_drivers.virtualAwg.awgs.ZurichInstrumentsHDAWG8 import ZurichInstrumentsHDAWG8
from qtt.instrument_drivers.virtualAwg.awgs.common import AwgCommonError
class TestZurichInstrumentsHDAWG8(unittest.TestCase):
def setUp(self):
class ZIHDAWG8(MagicMock):
pass
self.awg = ZIHDAWG8()
self.zi_hdawg8 = ZurichInstrumentsHDAWG8(self.awg, 0)
def test_enable_outputs(self):
self.zi_hdawg8.enable_outputs()
calls = [call.enable_channel(ch) for ch in range(0, 8)]
self.awg.assert_has_calls(calls)
with self.assertRaises(AwgCommonError):
self.zi_hdawg8.enable_outputs([0, 1, 2, 3, 8])
self.zi_hdawg8.enable_outputs([6, 7])
calls = [call.enable_channel(ch) for ch in range(6, 7)]
self.awg.assert_has_calls(calls)
def test_disable_outputs(self):
self.zi_hdawg8.disable_outputs()
calls = [call.disable_channel(ch) for ch in range(0, 8)]
self.awg.assert_has_calls(calls)
with self.assertRaises(AwgCommonError):
self.zi_hdawg8.disable_outputs([0, 1, 2, 3, 8])
self.zi_hdawg8.disable_outputs([6, 7])
calls = [call.disable_channel(ch) for ch in range(6, 7)]
self.awg.assert_has_calls(calls)
def test_change_setting(self):
self.awg.get.return_value = 0
self.zi_hdawg8.change_setting('sampling_rate', 2.4e9)
self.assertEqual(self.zi_hdawg8.retrieve_setting('sampling_rate'), 2.4e9)
with self.assertRaises(ValueError):
self.zi_hdawg8.change_setting('gain', 0.5)
def test_update_sampling_rate(self):
sample_rates = [2400000000.0, 1200000000.0, 600000000.0, 300000000.0, 150000000.0, 72000000.0, 37500000.0,
18750000.0, 9400000.0, 4500000.0, 2340000.0, 1200.0, 586000.0, 293000.0]
for sample_rate in sample_rates:
self.zi_hdawg8.update_sampling_rate(sample_rate)
calls = [call.set('awgs_0_time', i) for i in range(0, 14)]
self.awg.assert_has_calls(calls)
with self.assertRaises(ValueError):
self.zi_hdawg8.update_sampling_rate(99)
def test_retrieve_sampling_rate(self):
sampling_rate_index = 5
self.awg.get.return_value = sampling_rate_index
self.assertEqual(72e6, self.zi_hdawg8.retrieve_sampling_rate())
def test_update_gain(self):
self.zi_hdawg8.update_gain(0.5)
calls = [call.set('sigouts_{}_range'.format(ch), 0.5) for ch in range(8)]
self.awg.assert_has_calls(calls)
def test_retrieve_gain(self):
self.awg.get.return_value = 0.2
self.assertEqual(0.2, self.zi_hdawg8.retrieve_gain())
with self.assertRaises(ValueError):
self.awg.get.side_effect = lambda v: v
self.zi_hdawg8.retrieve_gain()
def test_upload_waveforms(self):
sequence_names = ['seq1', 'seq2', 'seq3']
sequence_channels = [(1, 1), (1, 0, 1), (2, 0)]
sequence_items = [range(10), range(1, 11), range(2, 12)]
self.awg.generate_csv_sequence_program.return_value = 'program'
self.zi_hdawg8.upload_waveforms(sequence_names, sequence_channels, sequence_items)
calls = [call.waveform_to_csv('seq1', range(10)),
call.waveform_to_csv('seq2', range(1, 11)),
call.waveform_to_csv('seq3', range(2, 12)),
call.generate_csv_sequence_program(sequence_names, [2, 2, 3]),
call.upload_sequence_program(0, 'program')]
self.awg.assert_has_calls(calls) | 0.690663 | 0.439687 |
import uuid
import os
from cloudify_rest_client.exceptions import CloudifyClientError
from integration_tests import AgentlessTestCase
from integration_tests.tests.utils import get_resource as resource
from integration_tests.tests.utils import (
verify_deployment_env_created,
wait_for_deployment_deletion_to_complete
)
from integration_tests.tests.utils import do_retries
from integration_tests.framework.constants import (PLUGIN_STORAGE_DIR,
CLOUDIFY_USER)
class TestDeploymentWorkflows(AgentlessTestCase):
def test_deployment_workflows(self):
dsl_path = resource("dsl/custom_workflow_mapping.yaml")
deployment, _ = self.deploy_application(dsl_path)
deployment_id = deployment.id
workflows = self.client.deployments.get(deployment_id).workflows
self.assertEqual(12, len(workflows))
wf_ids = [x.name for x in workflows]
self.assertIn('uninstall', wf_ids)
self.assertIn('install', wf_ids)
self.assertIn('execute_operation', wf_ids)
self.assertIn('custom', wf_ids)
self.assertIn('scale', wf_ids)
self.assertIn('heal', wf_ids)
self.assertIn('install_new_agents', wf_ids)
self.assertIn('update', wf_ids)
self.assertIn('start', wf_ids)
self.assertIn('stop', wf_ids)
self.assertIn('restart', wf_ids)
def test_workflow_parameters_pass_from_blueprint(self):
dsl_path = resource('dsl/workflow_parameters.yaml')
_id = uuid.uuid1()
blueprint_id = 'blueprint_{0}'.format(_id)
deployment_id = 'deployment_{0}'.format(_id)
self.client.blueprints.upload(dsl_path, blueprint_id)
self.client.deployments.create(blueprint_id, deployment_id,
skip_plugins_validation=True)
do_retries(verify_deployment_env_created, 30,
deployment_id=deployment_id)
execution = self.client.executions.start(deployment_id,
'custom_execute_operation')
self.wait_for_execution_to_end(execution)
invocations = self.get_plugin_data(
plugin_name='testmockoperations',
deployment_id=deployment_id
)['mock_operation_invocation']
self.assertEqual(1, len(invocations))
self.assertDictEqual(invocations[0], {'test_key': 'test_value'})
def test_get_workflow_parameters(self):
dsl_path = resource('dsl/workflow_parameters.yaml')
_id = uuid.uuid1()
blueprint_id = 'blueprint_{0}'.format(_id)
deployment_id = 'deployment_{0}'.format(_id)
self.client.blueprints.upload(dsl_path, blueprint_id)
self.client.deployments.create(blueprint_id, deployment_id,
skip_plugins_validation=True)
workflows = self.client.deployments.get(deployment_id).workflows
execute_op_workflow = next(wf for wf in workflows if
wf.name == 'another_execute_operation')
expected_params = {
u'node_id': {u'default': u'test_node'},
u'operation': {},
u'properties': {
u'default': {
u'key': u'test_key',
u'value': u'test_value'
}
}
}
self.assertEqual(expected_params, execute_op_workflow.parameters)
def test_delete_botched_deployment(self):
dsl_path = resource('dsl/basic.yaml')
_id = uuid.uuid1()
blueprint_id = 'blueprint_{0}'.format(_id)
deployment_id = 'deployment_{0}'.format(_id)
data = {deployment_id: {'raise_exception_on_delete': True}}
agent_json_path = os.path.join(PLUGIN_STORAGE_DIR, 'agent.json')
self.write_data_to_file_on_manager(
data,
agent_json_path,
to_json=True,
owner=CLOUDIFY_USER
)
self.client.blueprints.upload(dsl_path, blueprint_id)
self.client.deployments.create(blueprint_id, deployment_id,
skip_plugins_validation=True)
execution = self.client.executions.list(deployment_id=deployment_id)[0]
self.wait_for_execution_to_end(execution)
self.client.deployments.delete(deployment_id)
wait_for_deployment_deletion_to_complete(deployment_id)
try:
self.client.deployments.get(deployment_id)
self.fail("Expected deployment to be deleted")
except CloudifyClientError as e:
self.assertEquals(404, e.status_code) | tests/integration_tests/tests/agentless_tests/test_deployment_workflows.py |
import uuid
import os
from cloudify_rest_client.exceptions import CloudifyClientError
from integration_tests import AgentlessTestCase
from integration_tests.tests.utils import get_resource as resource
from integration_tests.tests.utils import (
verify_deployment_env_created,
wait_for_deployment_deletion_to_complete
)
from integration_tests.tests.utils import do_retries
from integration_tests.framework.constants import (PLUGIN_STORAGE_DIR,
CLOUDIFY_USER)
class TestDeploymentWorkflows(AgentlessTestCase):
def test_deployment_workflows(self):
dsl_path = resource("dsl/custom_workflow_mapping.yaml")
deployment, _ = self.deploy_application(dsl_path)
deployment_id = deployment.id
workflows = self.client.deployments.get(deployment_id).workflows
self.assertEqual(12, len(workflows))
wf_ids = [x.name for x in workflows]
self.assertIn('uninstall', wf_ids)
self.assertIn('install', wf_ids)
self.assertIn('execute_operation', wf_ids)
self.assertIn('custom', wf_ids)
self.assertIn('scale', wf_ids)
self.assertIn('heal', wf_ids)
self.assertIn('install_new_agents', wf_ids)
self.assertIn('update', wf_ids)
self.assertIn('start', wf_ids)
self.assertIn('stop', wf_ids)
self.assertIn('restart', wf_ids)
def test_workflow_parameters_pass_from_blueprint(self):
dsl_path = resource('dsl/workflow_parameters.yaml')
_id = uuid.uuid1()
blueprint_id = 'blueprint_{0}'.format(_id)
deployment_id = 'deployment_{0}'.format(_id)
self.client.blueprints.upload(dsl_path, blueprint_id)
self.client.deployments.create(blueprint_id, deployment_id,
skip_plugins_validation=True)
do_retries(verify_deployment_env_created, 30,
deployment_id=deployment_id)
execution = self.client.executions.start(deployment_id,
'custom_execute_operation')
self.wait_for_execution_to_end(execution)
invocations = self.get_plugin_data(
plugin_name='testmockoperations',
deployment_id=deployment_id
)['mock_operation_invocation']
self.assertEqual(1, len(invocations))
self.assertDictEqual(invocations[0], {'test_key': 'test_value'})
def test_get_workflow_parameters(self):
dsl_path = resource('dsl/workflow_parameters.yaml')
_id = uuid.uuid1()
blueprint_id = 'blueprint_{0}'.format(_id)
deployment_id = 'deployment_{0}'.format(_id)
self.client.blueprints.upload(dsl_path, blueprint_id)
self.client.deployments.create(blueprint_id, deployment_id,
skip_plugins_validation=True)
workflows = self.client.deployments.get(deployment_id).workflows
execute_op_workflow = next(wf for wf in workflows if
wf.name == 'another_execute_operation')
expected_params = {
u'node_id': {u'default': u'test_node'},
u'operation': {},
u'properties': {
u'default': {
u'key': u'test_key',
u'value': u'test_value'
}
}
}
self.assertEqual(expected_params, execute_op_workflow.parameters)
def test_delete_botched_deployment(self):
dsl_path = resource('dsl/basic.yaml')
_id = uuid.uuid1()
blueprint_id = 'blueprint_{0}'.format(_id)
deployment_id = 'deployment_{0}'.format(_id)
data = {deployment_id: {'raise_exception_on_delete': True}}
agent_json_path = os.path.join(PLUGIN_STORAGE_DIR, 'agent.json')
self.write_data_to_file_on_manager(
data,
agent_json_path,
to_json=True,
owner=CLOUDIFY_USER
)
self.client.blueprints.upload(dsl_path, blueprint_id)
self.client.deployments.create(blueprint_id, deployment_id,
skip_plugins_validation=True)
execution = self.client.executions.list(deployment_id=deployment_id)[0]
self.wait_for_execution_to_end(execution)
self.client.deployments.delete(deployment_id)
wait_for_deployment_deletion_to_complete(deployment_id)
try:
self.client.deployments.get(deployment_id)
self.fail("Expected deployment to be deleted")
except CloudifyClientError as e:
self.assertEquals(404, e.status_code) | 0.428592 | 0.296107 |
import unittest
from fabric_cf.actor.core.common.constants import Constants
from fabric_cf.actor.core.kernel.kernel_tick import KernelTick
class TickTest(unittest.TestCase):
from fabric_cf.actor.core.container.globals import Globals
Globals.config_file = "./config/config.test.yaml"
Constants.SUPERBLOCK_LOCATION = './state_recovery.lock'
from fabric_cf.actor.core.container.globals import GlobalsSingleton
GlobalsSingleton.get().initialize()
def get_tick(self):
return KernelTick()
def test_a_create(self):
tick = self.get_tick()
tick.initialize()
self.assertEqual(0, tick.get_beginning_of_time())
self.assertEqual(1, tick.get_cycle_millis())
self.assertEqual(False, tick.is_manual())
self.assertEqual(True, tick.stopped)
self.assertIsNotNone(tick.clock)
self.assertIsNotNone(tick.logger)
self.assertIsNotNone(tick.subscribers)
def test_b_properties(self):
beginning = 1000
cycle_length = 234
tick = self.get_tick()
tick.set_beginning_of_time(value=beginning)
tick.set_cycle_millis(cycle_millis=cycle_length)
tick.initialize()
self.assertEqual(beginning, tick.get_beginning_of_time())
self.assertEqual(cycle_length, tick.get_cycle_millis())
failed = False
try:
tick.set_cycle_millis(cycle_millis=cycle_length + 10)
except Exception:
failed = True
if not failed:
self.fail()
self.assertEqual(beginning, tick.get_beginning_of_time())
def test_c_start_stop(self):
tick = self.get_tick()
tick.initialize()
failed = False
try:
tick.tick()
except Exception:
failed = True
self.assertTrue(failed)
tick.start()
failed = False
try:
tick.start()
except Exception:
failed = True
self.assertTrue(failed)
tick.stop()
failed = False
try:
tick.stop()
except Exception:
failed = True
self.assertTrue(failed)
tick.start() | fabric_cf/actor/test/core/kernel/tick_test.py | import unittest
from fabric_cf.actor.core.common.constants import Constants
from fabric_cf.actor.core.kernel.kernel_tick import KernelTick
class TickTest(unittest.TestCase):
from fabric_cf.actor.core.container.globals import Globals
Globals.config_file = "./config/config.test.yaml"
Constants.SUPERBLOCK_LOCATION = './state_recovery.lock'
from fabric_cf.actor.core.container.globals import GlobalsSingleton
GlobalsSingleton.get().initialize()
def get_tick(self):
return KernelTick()
def test_a_create(self):
tick = self.get_tick()
tick.initialize()
self.assertEqual(0, tick.get_beginning_of_time())
self.assertEqual(1, tick.get_cycle_millis())
self.assertEqual(False, tick.is_manual())
self.assertEqual(True, tick.stopped)
self.assertIsNotNone(tick.clock)
self.assertIsNotNone(tick.logger)
self.assertIsNotNone(tick.subscribers)
def test_b_properties(self):
beginning = 1000
cycle_length = 234
tick = self.get_tick()
tick.set_beginning_of_time(value=beginning)
tick.set_cycle_millis(cycle_millis=cycle_length)
tick.initialize()
self.assertEqual(beginning, tick.get_beginning_of_time())
self.assertEqual(cycle_length, tick.get_cycle_millis())
failed = False
try:
tick.set_cycle_millis(cycle_millis=cycle_length + 10)
except Exception:
failed = True
if not failed:
self.fail()
self.assertEqual(beginning, tick.get_beginning_of_time())
def test_c_start_stop(self):
tick = self.get_tick()
tick.initialize()
failed = False
try:
tick.tick()
except Exception:
failed = True
self.assertTrue(failed)
tick.start()
failed = False
try:
tick.start()
except Exception:
failed = True
self.assertTrue(failed)
tick.stop()
failed = False
try:
tick.stop()
except Exception:
failed = True
self.assertTrue(failed)
tick.start() | 0.52902 | 0.543166 |
"""Tests for combinator layers."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import absltest
from trax.backend import numpy as np
from trax.layers import base
from trax.layers import combinators as cb
from trax.layers import core
from trax.layers import normalization
from trax.shapes import ShapeDtype
class CombinatorLayerTest(absltest.TestCase):
def test_drop(self):
layer = cb.Drop()
input_signature = ShapeDtype((3, 2))
expected_shape = ()
output_shape = base.check_shape_agreement(layer, input_signature)
self.assertEqual(output_shape, expected_shape)
def test_dup(self):
layer = cb.Dup()
input_signature = ShapeDtype((3, 2))
expected_shape = ((3, 2), (3, 2))
output_shape = base.check_shape_agreement(layer, input_signature)
self.assertEqual(output_shape, expected_shape)
def test_swap(self):
layer = cb.Swap()
input_signature = (ShapeDtype((3, 2)), ShapeDtype((4, 7)))
expected_shape = ((4, 7), (3, 2))
output_shape = base.check_shape_agreement(layer, input_signature)
self.assertEqual(output_shape, expected_shape)
def test_serial_no_op(self):
layer = cb.Serial(None)
input_signature = (ShapeDtype((3, 2)), ShapeDtype((4, 7)))
expected_shape = ((3, 2), (4, 7))
output_shape = base.check_shape_agreement(layer, input_signature)
self.assertEqual(output_shape, expected_shape)
def test_serial_no_op_list(self):
layer = cb.Serial([])
input_signature = (ShapeDtype((3, 2)), ShapeDtype((4, 7)))
expected_shape = ((3, 2), (4, 7))
output_shape = base.check_shape_agreement(layer, input_signature)
self.assertEqual(output_shape, expected_shape)
def test_serial_one_in_one_out(self):
layer = cb.Serial(core.Div(divisor=2.0))
input_signature = ShapeDtype((3, 2))
expected_shape = (3, 2)
output_shape = base.check_shape_agreement(layer, input_signature)
self.assertEqual(output_shape, expected_shape)
def test_serial_div_div(self):
layer = cb.Serial(core.Div(divisor=2.0), core.Div(divisor=5.0))
input_signature = ShapeDtype((3, 2))
expected_shape = (3, 2)
output_shape = base.check_shape_agreement(layer, input_signature)
self.assertEqual(output_shape, expected_shape)
def test_serial_dup_dup(self):
layer = cb.Serial(cb.Dup(), cb.Dup())
input_signature = ShapeDtype((3, 2))
expected_shape = ((3, 2), (3, 2), (3, 2))
output_shape = base.check_shape_agreement(layer, input_signature)
self.assertEqual(output_shape, expected_shape)
def test_serial_with_side_outputs_div_div(self):
def some_layer():
return cb.Parallel(core.Div(divisor=2.0), core.Div(divisor=5.0))
layer = cb.SerialWithSideOutputs([some_layer(), some_layer()])
input_signature = (ShapeDtype((3, 2)), ShapeDtype((4, 2)),
ShapeDtype((5, 2)))
expected_shape = ((3, 2), (4, 2), (5, 2))
output_shape = base.check_shape_agreement(layer, input_signature)
self.assertEqual(output_shape, expected_shape)
def test_parallel_dup_dup(self):
layer = cb.Parallel(cb.Dup(), cb.Dup())
input_signature = (ShapeDtype((3, 2)), ShapeDtype((4, 7)))
expected_shape = ((3, 2), (3, 2), (4, 7), (4, 7))
output_shape = base.check_shape_agreement(layer, input_signature)
self.assertEqual(output_shape, expected_shape)
def test_parallel_div_div(self):
layer = cb.Parallel(core.Div(divisor=0.5), core.Div(divisor=3.0))
input_signature = (ShapeDtype((3, 2)), ShapeDtype((4, 7)))
expected_shape = ((3, 2), (4, 7))
output_shape = base.check_shape_agreement(layer, input_signature)
self.assertEqual(output_shape, expected_shape)
def test_parallel_no_ops(self):
layer = cb.Parallel([], None)
input_signature = (ShapeDtype((3, 2)), ShapeDtype((4, 7)))
expected_shape = ((3, 2), (4, 7))
output_shape = base.check_shape_agreement(layer, input_signature)
self.assertEqual(output_shape, expected_shape)
def test_branch_noop_dup(self):
layer = cb.Branch([], cb.Dup())
input_signature = ShapeDtype((3, 2))
expected_shape = ((3, 2), (3, 2), (3, 2))
output_shape = base.check_shape_agreement(layer, input_signature)
self.assertEqual(output_shape, expected_shape)
def test_branch_add_div(self):
layer = cb.Branch(cb.Add(), core.Div(divisor=0.5))
input_signature = (ShapeDtype((3, 2)), ShapeDtype((3, 2)))
expected_shape = ((3, 2), (3, 2))
output_shape = base.check_shape_agreement(layer, input_signature)
self.assertEqual(output_shape, expected_shape)
def test_scan_basic(self):
@base.layer(n_in=2, n_out=2)
def add(x, **unused_kwargs):
res = x[0] + x[1]
return res, res
scan_layer = cb.Scan(add()) # pylint: disable=no-value-for-parameter
input_signature = (ShapeDtype((3, 2, 7)), ShapeDtype((2, 7)))
expected_shape = ((3, 2, 7), (2, 7))
output_shape = base.check_shape_agreement(scan_layer, input_signature)
self.assertEqual(output_shape, expected_shape)
inp = (np.array([1, 2, 3]), np.array(0))
o, v = scan_layer(inp)
self.assertEqual(int(v), 6)
self.assertEqual([int(x) for x in o], [1, 3, 6])
def test_scan_axis1(self):
@base.layer(n_in=2, n_out=2)
def add(x, **unused_kwargs):
res = x[0] + x[1]
return res, res
scan = cb.Scan(add(), axis=1) # pylint: disable=no-value-for-parameter
input_signature = (ShapeDtype((3, 2, 7)), ShapeDtype((3, 7)))
expected_shape = ((3, 2, 7), (3, 7))
output_shape = base.check_shape_agreement(scan, input_signature)
self.assertEqual(output_shape, expected_shape)
def test_scan_multiinput(self):
@base.layer(n_in=3, n_out=2)
def foo(x, **unused_kwargs):
a, b, carry = x
return a + b, b, carry + 1
scan = cb.Scan(foo(), axis=1) # pylint: disable=no-value-for-parameter
input_signature = (ShapeDtype((3, 2, 7)), ShapeDtype((3, 2, 7)),
ShapeDtype((3, 7)))
expected_shape = ((3, 2, 7), (3, 2, 7), (3, 7))
output_shape = base.check_shape_agreement(scan, input_signature)
self.assertEqual(output_shape, expected_shape)
def test_fn_layer_example(self):
layer = cb.Fn(lambda x, y: (x + y, np.concatenate([x, y], axis=0)))
input_signature = (ShapeDtype((2, 7)), ShapeDtype((2, 7)))
expected_shape = ((2, 7), (4, 7))
output_shape = base.check_shape_agreement(layer, input_signature)
self.assertEqual(output_shape, expected_shape)
inp = (np.array([2]), np.array([3]))
x, xs = layer(inp)
self.assertEqual(int(x), 5)
self.assertEqual([int(y) for y in xs], [2, 3])
def test_fn_layer_fails_wrong_f(self):
with self.assertRaisesRegexp(ValueError, 'default arg'):
cb.Fn(lambda x, sth=None: x)
with self.assertRaisesRegexp(ValueError, 'keyword arg'):
cb.Fn(lambda x, **kwargs: x)
def test_fn_layer_varargs_n_in(self):
with self.assertRaisesRegexp(ValueError, 'variable arg'):
cb.Fn(lambda *args: args[0])
# Check that varargs work when n_in is set.
id_layer = cb.Fn(lambda *args: args[0], n_in=1)
input_signature = ShapeDtype((2, 7))
expected_shape = (2, 7)
output_shape = base.check_shape_agreement(id_layer, input_signature)
self.assertEqual(output_shape, expected_shape)
def test_fn_layer_difficult_n_out(self):
with self.assertRaisesRegexp(ValueError, 'n_out'):
# Determining the output of this layer is hard with dummies.
cb.Fn(lambda x: np.concatencate([x, x], axis=4))
# Check that this layer works when n_out is set.
layer = cb.Fn(lambda x: np.concatenate([x, x], axis=4), n_out=1)
input_signature = ShapeDtype((2, 1, 2, 2, 3))
expected_shape = (2, 1, 2, 2, 6)
output_shape = base.check_shape_agreement(layer, input_signature)
self.assertEqual(output_shape, expected_shape)
def test_input_signatures_serial(self):
layer = cb.Serial(core.Div(divisor=2.0), core.Div(divisor=5.0))
self.assertIsNone(layer.input_signature)
layer._set_input_signature_recursive(ShapeDtype((3, 2)))
self.assertEqual(layer.input_signature, ShapeDtype((3, 2)))
self.assertLen(layer.sublayers, 2)
for sublayer in layer.sublayers:
self.assertEqual(sublayer.input_signature, ShapeDtype((3, 2)))
def test_input_signatures_serial_batch_norm(self):
# Include a layer that actively uses state.
input_signature = ShapeDtype((3, 28, 28))
batch_norm = normalization.BatchNorm()
relu = core.Relu()
batch_norm_and_relu = cb.Serial(batch_norm, relu)
batch_norm_and_relu.init(input_signature)
# Check for correct shapes entering and exiting the batch_norm layer.
# And the code should run without errors.
batch_norm_and_relu._set_input_signature_recursive(input_signature)
self.assertEqual(batch_norm.input_signature, input_signature)
self.assertEqual(relu.input_signature, input_signature)
def test_input_signatures_parallel(self):
layer = cb.Parallel(core.Div(divisor=0.5), core.Div(divisor=3.0))
self.assertIsNone(layer.input_signature)
layer._set_input_signature_recursive((ShapeDtype((3, 2)),
ShapeDtype((4, 7))))
self.assertEqual(layer.input_signature,
(ShapeDtype((3, 2)), ShapeDtype((4, 7))))
self.assertLen(layer.sublayers, 2)
sublayer_0, sublayer_1 = layer.sublayers
self.assertEqual(sublayer_0.input_signature, ShapeDtype((3, 2)))
self.assertEqual(sublayer_1.input_signature, ShapeDtype((4, 7)))
if __name__ == '__main__':
absltest.main() | trax/layers/combinators_test.py |
"""Tests for combinator layers."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import absltest
from trax.backend import numpy as np
from trax.layers import base
from trax.layers import combinators as cb
from trax.layers import core
from trax.layers import normalization
from trax.shapes import ShapeDtype
class CombinatorLayerTest(absltest.TestCase):
def test_drop(self):
layer = cb.Drop()
input_signature = ShapeDtype((3, 2))
expected_shape = ()
output_shape = base.check_shape_agreement(layer, input_signature)
self.assertEqual(output_shape, expected_shape)
def test_dup(self):
layer = cb.Dup()
input_signature = ShapeDtype((3, 2))
expected_shape = ((3, 2), (3, 2))
output_shape = base.check_shape_agreement(layer, input_signature)
self.assertEqual(output_shape, expected_shape)
def test_swap(self):
layer = cb.Swap()
input_signature = (ShapeDtype((3, 2)), ShapeDtype((4, 7)))
expected_shape = ((4, 7), (3, 2))
output_shape = base.check_shape_agreement(layer, input_signature)
self.assertEqual(output_shape, expected_shape)
def test_serial_no_op(self):
layer = cb.Serial(None)
input_signature = (ShapeDtype((3, 2)), ShapeDtype((4, 7)))
expected_shape = ((3, 2), (4, 7))
output_shape = base.check_shape_agreement(layer, input_signature)
self.assertEqual(output_shape, expected_shape)
def test_serial_no_op_list(self):
layer = cb.Serial([])
input_signature = (ShapeDtype((3, 2)), ShapeDtype((4, 7)))
expected_shape = ((3, 2), (4, 7))
output_shape = base.check_shape_agreement(layer, input_signature)
self.assertEqual(output_shape, expected_shape)
def test_serial_one_in_one_out(self):
layer = cb.Serial(core.Div(divisor=2.0))
input_signature = ShapeDtype((3, 2))
expected_shape = (3, 2)
output_shape = base.check_shape_agreement(layer, input_signature)
self.assertEqual(output_shape, expected_shape)
def test_serial_div_div(self):
layer = cb.Serial(core.Div(divisor=2.0), core.Div(divisor=5.0))
input_signature = ShapeDtype((3, 2))
expected_shape = (3, 2)
output_shape = base.check_shape_agreement(layer, input_signature)
self.assertEqual(output_shape, expected_shape)
def test_serial_dup_dup(self):
layer = cb.Serial(cb.Dup(), cb.Dup())
input_signature = ShapeDtype((3, 2))
expected_shape = ((3, 2), (3, 2), (3, 2))
output_shape = base.check_shape_agreement(layer, input_signature)
self.assertEqual(output_shape, expected_shape)
def test_serial_with_side_outputs_div_div(self):
def some_layer():
return cb.Parallel(core.Div(divisor=2.0), core.Div(divisor=5.0))
layer = cb.SerialWithSideOutputs([some_layer(), some_layer()])
input_signature = (ShapeDtype((3, 2)), ShapeDtype((4, 2)),
ShapeDtype((5, 2)))
expected_shape = ((3, 2), (4, 2), (5, 2))
output_shape = base.check_shape_agreement(layer, input_signature)
self.assertEqual(output_shape, expected_shape)
def test_parallel_dup_dup(self):
layer = cb.Parallel(cb.Dup(), cb.Dup())
input_signature = (ShapeDtype((3, 2)), ShapeDtype((4, 7)))
expected_shape = ((3, 2), (3, 2), (4, 7), (4, 7))
output_shape = base.check_shape_agreement(layer, input_signature)
self.assertEqual(output_shape, expected_shape)
def test_parallel_div_div(self):
layer = cb.Parallel(core.Div(divisor=0.5), core.Div(divisor=3.0))
input_signature = (ShapeDtype((3, 2)), ShapeDtype((4, 7)))
expected_shape = ((3, 2), (4, 7))
output_shape = base.check_shape_agreement(layer, input_signature)
self.assertEqual(output_shape, expected_shape)
def test_parallel_no_ops(self):
layer = cb.Parallel([], None)
input_signature = (ShapeDtype((3, 2)), ShapeDtype((4, 7)))
expected_shape = ((3, 2), (4, 7))
output_shape = base.check_shape_agreement(layer, input_signature)
self.assertEqual(output_shape, expected_shape)
def test_branch_noop_dup(self):
layer = cb.Branch([], cb.Dup())
input_signature = ShapeDtype((3, 2))
expected_shape = ((3, 2), (3, 2), (3, 2))
output_shape = base.check_shape_agreement(layer, input_signature)
self.assertEqual(output_shape, expected_shape)
def test_branch_add_div(self):
layer = cb.Branch(cb.Add(), core.Div(divisor=0.5))
input_signature = (ShapeDtype((3, 2)), ShapeDtype((3, 2)))
expected_shape = ((3, 2), (3, 2))
output_shape = base.check_shape_agreement(layer, input_signature)
self.assertEqual(output_shape, expected_shape)
def test_scan_basic(self):
@base.layer(n_in=2, n_out=2)
def add(x, **unused_kwargs):
res = x[0] + x[1]
return res, res
scan_layer = cb.Scan(add()) # pylint: disable=no-value-for-parameter
input_signature = (ShapeDtype((3, 2, 7)), ShapeDtype((2, 7)))
expected_shape = ((3, 2, 7), (2, 7))
output_shape = base.check_shape_agreement(scan_layer, input_signature)
self.assertEqual(output_shape, expected_shape)
inp = (np.array([1, 2, 3]), np.array(0))
o, v = scan_layer(inp)
self.assertEqual(int(v), 6)
self.assertEqual([int(x) for x in o], [1, 3, 6])
def test_scan_axis1(self):
@base.layer(n_in=2, n_out=2)
def add(x, **unused_kwargs):
res = x[0] + x[1]
return res, res
scan = cb.Scan(add(), axis=1) # pylint: disable=no-value-for-parameter
input_signature = (ShapeDtype((3, 2, 7)), ShapeDtype((3, 7)))
expected_shape = ((3, 2, 7), (3, 7))
output_shape = base.check_shape_agreement(scan, input_signature)
self.assertEqual(output_shape, expected_shape)
def test_scan_multiinput(self):
@base.layer(n_in=3, n_out=2)
def foo(x, **unused_kwargs):
a, b, carry = x
return a + b, b, carry + 1
scan = cb.Scan(foo(), axis=1) # pylint: disable=no-value-for-parameter
input_signature = (ShapeDtype((3, 2, 7)), ShapeDtype((3, 2, 7)),
ShapeDtype((3, 7)))
expected_shape = ((3, 2, 7), (3, 2, 7), (3, 7))
output_shape = base.check_shape_agreement(scan, input_signature)
self.assertEqual(output_shape, expected_shape)
def test_fn_layer_example(self):
layer = cb.Fn(lambda x, y: (x + y, np.concatenate([x, y], axis=0)))
input_signature = (ShapeDtype((2, 7)), ShapeDtype((2, 7)))
expected_shape = ((2, 7), (4, 7))
output_shape = base.check_shape_agreement(layer, input_signature)
self.assertEqual(output_shape, expected_shape)
inp = (np.array([2]), np.array([3]))
x, xs = layer(inp)
self.assertEqual(int(x), 5)
self.assertEqual([int(y) for y in xs], [2, 3])
def test_fn_layer_fails_wrong_f(self):
with self.assertRaisesRegexp(ValueError, 'default arg'):
cb.Fn(lambda x, sth=None: x)
with self.assertRaisesRegexp(ValueError, 'keyword arg'):
cb.Fn(lambda x, **kwargs: x)
def test_fn_layer_varargs_n_in(self):
with self.assertRaisesRegexp(ValueError, 'variable arg'):
cb.Fn(lambda *args: args[0])
# Check that varargs work when n_in is set.
id_layer = cb.Fn(lambda *args: args[0], n_in=1)
input_signature = ShapeDtype((2, 7))
expected_shape = (2, 7)
output_shape = base.check_shape_agreement(id_layer, input_signature)
self.assertEqual(output_shape, expected_shape)
def test_fn_layer_difficult_n_out(self):
with self.assertRaisesRegexp(ValueError, 'n_out'):
# Determining the output of this layer is hard with dummies.
cb.Fn(lambda x: np.concatencate([x, x], axis=4))
# Check that this layer works when n_out is set.
layer = cb.Fn(lambda x: np.concatenate([x, x], axis=4), n_out=1)
input_signature = ShapeDtype((2, 1, 2, 2, 3))
expected_shape = (2, 1, 2, 2, 6)
output_shape = base.check_shape_agreement(layer, input_signature)
self.assertEqual(output_shape, expected_shape)
def test_input_signatures_serial(self):
layer = cb.Serial(core.Div(divisor=2.0), core.Div(divisor=5.0))
self.assertIsNone(layer.input_signature)
layer._set_input_signature_recursive(ShapeDtype((3, 2)))
self.assertEqual(layer.input_signature, ShapeDtype((3, 2)))
self.assertLen(layer.sublayers, 2)
for sublayer in layer.sublayers:
self.assertEqual(sublayer.input_signature, ShapeDtype((3, 2)))
def test_input_signatures_serial_batch_norm(self):
# Include a layer that actively uses state.
input_signature = ShapeDtype((3, 28, 28))
batch_norm = normalization.BatchNorm()
relu = core.Relu()
batch_norm_and_relu = cb.Serial(batch_norm, relu)
batch_norm_and_relu.init(input_signature)
# Check for correct shapes entering and exiting the batch_norm layer.
# And the code should run without errors.
batch_norm_and_relu._set_input_signature_recursive(input_signature)
self.assertEqual(batch_norm.input_signature, input_signature)
self.assertEqual(relu.input_signature, input_signature)
def test_input_signatures_parallel(self):
layer = cb.Parallel(core.Div(divisor=0.5), core.Div(divisor=3.0))
self.assertIsNone(layer.input_signature)
layer._set_input_signature_recursive((ShapeDtype((3, 2)),
ShapeDtype((4, 7))))
self.assertEqual(layer.input_signature,
(ShapeDtype((3, 2)), ShapeDtype((4, 7))))
self.assertLen(layer.sublayers, 2)
sublayer_0, sublayer_1 = layer.sublayers
self.assertEqual(sublayer_0.input_signature, ShapeDtype((3, 2)))
self.assertEqual(sublayer_1.input_signature, ShapeDtype((4, 7)))
if __name__ == '__main__':
absltest.main() | 0.866669 | 0.583619 |
from copy import deepcopy
from datetime import datetime, date
from json import load as json_in_str
from os.path import isfile, exists, join as path_join
from random import choice
from string import digits, ascii_letters
from typing import Any, Type, Union
from unittest import TestCase
from marshmallow import Schema
from mongoengine import Document
from mongoengine.base import TopLevelDocumentMetaclass
class CommonTestCase(TestCase):
test_db_name = None
db = None
client = None
app_context = None
maxDiff = None
authorized = False
url = None
request_method = None
test_docs = []
test_data_file_name = None
_base_dir = None
models_map = None
counter_map = {}
template_url = None
user_for_auth = None
password_for_auth = '<PASSWORD>'
@classmethod
def setUpClass(cls, create_app, config, db, *args):
"""
Start Flask app end check test database name in current db
:param create_app: Function for create Flask app
:param config: Config for Flask app
:param db: Database for tests
"""
# Start flask app
app = create_app(config)
cls.client = app.test_client()
cls.app_context = app.app_context()
cls.app_context.push()
cls.db = db
cls._prepare_database(config=config, db=db)
@classmethod
def _prepare_database(cls, config, db):
"""Prepare test database"""
cls.test_db_name = db.connection._MongoClient__default_database_name
# Check test database name in test db
if cls.test_db_name in db.connection.list_database_names():
db.connection.drop_database(cls.test_db_name)
# Create all collections from dev db
for collection_name in list(set([item._get_collection_name() for item in cls.models_map.values()])):
db.connection[cls.test_db_name].create_collection(name=collection_name)
@classmethod
def tearDownClass(cls):
"""Delete test data and stop Flask app"""
# Удаление тестовой базы и завершение Flask приложения
cls.db.connection.drop_database(cls.test_db_name)
cls.app_context.pop()
@classmethod
def setUp(cls):
if not cls.request_method:
raise AssertionError("Not found request method!")
@classmethod
def create_user(cls, key: str = 'user', password: str = None, **other_data) -> Document:
"""
Create user and set password
:param key: Key data for user. (In *.json file)
:param password: <PASSWORD>
:param other_data: Other fields
:return Created user
"""
password = password if password else cls.password_for_auth
user = cls.generate_test_data(key=key, **other_data)
user.set_password(password)
user.save()
return user
def auth(self,
username: str = None,
password: str = None,
auth_url: str = '/api/login/',
blocked_user: bool = False,
not_found_user: bool = False,
bad_auth: bool = False):
"""
Authorization function.
:param auth_url URL for authorization
:param username Username
:param password Password
:param blocked_user
:param not_found_user
"""
self.client.cookie_jar.clear()
self.authorized = False
username = username if username else self.user_for_auth.email
password = password if password else <PASSWORD>
status_code = 200
if bad_auth:
status_code = 400
elif blocked_user:
status_code = 403
elif not_found_user:
status_code = 404
json_request = {"email": username, "password": password}
json_response = self._send_request(url=auth_url, params=json_request, expected_status_code=status_code,
request_method=self.client.post)
if blocked_user:
self.assertIn('errors', json_response)
self.assertIn("email", json_response['errors'])
self.assertEqual("The user is blocked.", json_response['errors']["email"])
elif not_found_user:
self.assertIn('errors', json_response)
self.assertIn("email", json_response['errors'])
self.assertEqual("No user found for this email address.", json_response['errors']["email"])
else:
self.authorized = True
self.assertIn("email", json_response)
self.assertEqual(username, json_response["email"])
def validate_invalid_doc_id(self, id_in_data: bool = False, field: str = 'pk', bad_id: str = 'a1',
status_code: int = 400, many: bool = False):
"""
Validate invalid identifier
:param id_in_data
:param field
:param bad_id
:param status_code
:param many
"""
if many:
bad_id = [bad_id]
if id_in_data:
request_data = {field: bad_id}
url = self.url
else:
request_data = {}
url = '/'.join(self.url.split('/')[:-2] + [bad_id])
json_response = self._send_request(url=url, params=request_data, expected_status_code=status_code)
if many:
return self.assertIn('Could not find document.', json_response['errors'][field])
self.assertIn('Could not find document.', json_response['errors'][field])
def validate_not_found_doc(self,
id_in_data: bool = False,
field: str = 'pk',
status_code: int = 400,
not_found_id: str = '555555555555555555555555',
many: bool = False,
check_error: bool = True):
"""
Validate error: Could not find document.
:param id_in_data True/False. (False = id in url)
:param field Field (Only id_in_data=True)
:param not_found_id Not found id
:param status_code Expected status code
:param many
"""
if id_in_data:
if many:
not_found_id = [not_found_id]
request_data = {field: not_found_id}
url = self.url
else:
request_data = {}
url = self.template_url.format(**{field: not_found_id})
json_response = self._send_request(url=url, params=request_data, expected_status_code=status_code)
if check_error:
self.assertIn(f'Could not find document.', json_response['errors'][field])
return json_response
def validate_forbidden_access(self, role_keys: list):
"""
Validate forbidden access
:param role_keys List not allowed roles
"""
for role in role_keys:
self.client.cookie_jar.clear()
email = f'{<EMAIL>'
password = '<PASSWORD>'
self.create_user(email=email, password=password, role=role)
self.auth(username=email, password=password)
json_response = self._send_request(expected_status_code=403)
self.assertIn('errors', json_response)
self.assertIn("role", json_response['errors'])
self.assertEqual(f"insufficient rights for {role} role", json_response['errors']['role'])
def validate_field_in_bad_request(self,
field_name: str,
valid_type: Any = None,
bad_data: list = None,
field_is_required: bool = False,
required_data: dict = None):
"""
Success validate field in bad request
:param field_name Field name
:param valid_type Valid type for this field
:param bad_data Bad data
:param field_is_required Field is required in request? True/False
:param required_data Required data for request
"""
data = {}
bad_data = bad_data if bad_data else self.generate_bad_data(valid_type=valid_type)
json_response = None
for invalid_param in bad_data:
if required_data:
data.update(required_data)
data[field_name] = invalid_param
json_response = self._send_request(params=data, expected_status_code=400)
self.assertIn('errors', json_response)
self.assertIn(field_name, json_response['errors'])
if field_is_required:
self.validate_required_field(field_name)
return json_response
def validate_required_field(self, field_name: str):
"""
Validate required field
:param field_name Field is required in request
"""
json_response = self._send_request(params={"test": "data"}, expected_status_code=400)
self.assertIn('errors', json_response)
self.assertIn(field_name, json_response['errors'])
self.assertIn('Missing data for required field.', json_response['errors'][field_name])
def validate_error_parse_json(self):
"""Check request. Error, if not json in request"""
json_response = self._send_request(expected_status_code=400)
self.assertIn('errors', json_response)
self.assertIn('common', json_response['errors'])
self.assertIn('Cannot parse json', json_response['errors']['common'])
def validate_json(self, response_json, schema):
"""Validate json response"""
self.assertIsNotNone(response_json)
validation_errors = schema(unknown='exclude').validate(response_json)
if validation_errors:
print(f"Ошибки при валидации ответа: \n{validation_errors}")
self.assertDictEqual(validation_errors, {})
def validate_response(self, return_schema: Type[Schema], limit: int = None):
"""
Validate response and limit from GET method
:param return_schema Marshmallow Schema for validate response
:param limit Check limit
"""
json_response = self._send_request()
self.validate_json(json_response, return_schema)
if limit:
self.assertEqual(len(json_response['items']), limit)
def validate_offset(self, return_schema):
"""
Validate offset. GET Method
:param return_schema Marshmallow Schema for validate response
"""
json_response = self._send_request(params={'limit': 2})
self.validate_json(json_response, return_schema)
total_count = json_response['total_count']
# Set second identifier to var
self.assertEqual(len(json_response['items']), 2)
second_doc_id = json_response['items'][1]['id']
# Request offset=1&limit=1, the identifier specified in second_doc_id is expected
json_response = self._send_request(params={'limit': 1, 'offset': 1})
self.validate_json(json_response, return_schema)
self.assertEqual(json_response['total_count'], total_count)
self.assertEqual(len(json_response['items']), 1)
self.assertEqual(json_response['items'][0]['id'], second_doc_id)
def validate_filter(self,
return_schema: Type[Schema],
field: str,
value: Union[bool, str, int, list],
check_value: bool = True,
icontains: bool = False):
"""
Validate filtered response
:param return_schema Marshmallow Schema for validate response
:param field Filter by field
:param value value filter
:param check_value Check value in response
:param icontains True/False
"""
json_response = self._send_request(params={field: value})
self.validate_json(json_response, return_schema)
items = json_response['items']
if check_value:
for item in items:
self.assertIn(value, item[field]) if icontains else self.assertEqual(value, item[field])
return items
def validate_sorting(self, field_name: str, return_schema: Type[Schema], reverse: bool = True):
"""
Validate sorting
:param field_name Order by field name
:param return_schema Return schema
:param reverse Reverse sorting
"""
json_response = self._send_request(params={"order_by": f"-{field_name}" if reverse else field_name})
self.validate_json(json_response, return_schema)
first_iteration = True
self.assertGreater(json_response["total_count"], 0)
prev_value = None
for item in json_response['items']:
if first_iteration:
prev_value = item[field_name]
first_iteration = False
continue
if reverse:
self.assertLessEqual(item[field_name], prev_value)
else:
self.assertGreaterEqual(item[field_name], prev_value)
def create_success(self, model, required_data):
"""Create success. Only required fields"""
json_response = self._send_request(params=required_data, expected_status_code=201)
instance = model.objects.filter(pk=json_response['id']).first()
self.assertNotEqual(instance, None)
instance.delete()
def edit_success(self, edit_obj, edit_field: str, new_value: Union[str, list, int], check_new_value=True):
"""
Success edit object.
:param edit_obj Object for edit
:param edit_field Edit field
:param new_value New value
:param check_new_value Check new value in edit field. True/False
"""
url = '/'.join(self.url.split('/')[:-2] + [str(edit_obj.id)])
json_response = self._send_request(url=url, params={edit_field: new_value})
self.assertIn('status', json_response)
self.assertEqual('success', json_response['status'])
edit_obj.reload()
if check_new_value:
self.assertEqual(getattr(edit_obj, edit_field), new_value)
def edit_success_all_fields(self, edit_obj, data: dict, check_new_values: bool = True):
"""
Success edit object. (All fields)
:param edit_obj Object for edit
:param data Data
:param check_new_values Check new values. True/False
"""
url = '/'.join(self.url.split('/')[:-2] + [str(edit_obj.id)])
json_response = self._send_request(url=url, params=data)
self.assertIn('status', json_response)
self.assertEqual('success', json_response['status'])
edit_obj.reload()
if check_new_values:
self._check_new_values(edit_obj, expected_values=data)
def delete_success(self, delete_obj, deleted_state='deleted'):
"""
Success delete object
:param delete_obj Object for delete
:param deleted_state Deleted state. For check deleted doc.
"""
json_response = self._send_request(params={"id": str(delete_obj.id)})
self.assertIn('status', json_response)
self.assertEqual('success', json_response['status'])
delete_obj.reload()
self.assertEqual(getattr(delete_obj, "state"), deleted_state)
def check_response(self, response, status_code=200):
self.assertEqual(status_code, response.status_code)
self.assertTrue(response.is_json)
try:
return response.json
except Exception:
self.assertTrue(False)
return None
@classmethod
def generate_test_data(cls, key: str, many: bool = False, count: int = 21, **other_fields):
"""
Generate test data for devices tests. This method reading file ./test_data.json
:param key Model name in data json
:param many Create many instances. True/False
:param count Count create instances. Only many=True.
:param other_fields Other data for create or update default data
"""
if not cls.test_data_file_name or not cls._base_dir:
raise AssertionError("Error! ")
if not (model := cls.models_map.get(key)):
raise AssertionError("Error! ")
other_data = other_fields if other_fields else {}
count_create = count if many else 1
instance = None
instances = []
def get_data_from_file():
"""Read data in json file"""
path = path_join(cls._base_dir, "backend", 'app', 'tests', cls.test_data_file_name)
if exists(path) and isfile(path):
with open(path, encoding='utf-8') as file:
return json_in_str(file).get(key)
else:
raise AssertionError(f'File not found! {path}')
raw_data = get_data_from_file()
raw_data.update(other_data)
for i in range(count_create):
data = cls._counter_data(key_object=key, raw_data=raw_data)
instance = model.objects.create(**data)
instances.append(instance)
cls.test_docs.append(instance)
if not many or count_create == 1:
return instance
else:
return instances
def generate_bad_data(self, valid_type=None, max_length=None, min_length=None):
self.assertIsNotNone(valid_type)
invalid_data_map = {
int: [None, True, "", {}, [], "string", "string1", {"key": "value"}, ["item1"], [1, 2], 1.45],
float: [None, True, "", {}, [], "string", "string1", {"key": "value"}, ["item1"], [1, 2]],
str: [None, True, {}, [], 1, {"key": "value"}, ["item1"], [1, 2]],
bool: [None, "", {}, [], 123, "string", "string1", {"key": "value"}, ["item1"], [1, 2], 1.45],
list: [None, "", {}, 123, "string", "string1", {"key": "value"}, 1.45],
"date": [None, True, {}, [], 1, "string", {"key": "value"}, ["item1"], [1, 2], '2020-01-01 10:10'],
"datetime": [None, True, {}, [], 1, "string", {"key": "value"}, ["item1"], [1, 2], '2020-01-01'],
"email": [1, None, True, [], {}, "", "string", {"k": "v"}, ["i"], [1], 1.2],
"doc_id": [None, True, {}, [], {"key": "value"}, ["item1"], [1, 2]],
}
bad_data = invalid_data_map[valid_type]
# TODO Сделать более универсальным max_length min_length
if max_length is not None:
bad_item = ""
for item in range(max_length + 1):
bad_item += "s"
bad_data.append(bad_item)
if min_length is not None:
if valid_type == str:
bad_item = ''.join(choice(ascii_letters + digits)
for _ in range(1, min_length))
bad_data.append(bad_item)
else:
bad_data.append(0)
return bad_data
@classmethod
def _counter_data(cls, key_object, raw_data):
new_data = {}
for field, value in deepcopy(raw_data).items():
if isinstance(value, str) and '{i}' in value:
cls.counter_map.setdefault(key_object, {})
last_count = cls.counter_map[key_object].get(field, 0)
cls.counter_map[key_object][field] = last_count + 1
new_data[field] = value.format(i=cls.counter_map[key_object][field])
else:
new_data[field] = value
return new_data
def _send_request(self,
url: str = None,
params: dict = None,
return_to_json: bool = True,
expected_status_code: int = 200,
request_method: Any = None):
"""
Send request method.
:param url String url for request
:param params Parameters for request
:param return_to_json True/False
:param expected_status_code Allowed status code in response
:return Response or json_response
"""
url_for_request = url if url else self.url
request_method = request_method if request_method else self.request_method
if params:
request_params = {"json": params}
if request_method == self.client.get:
request_params['query_string'] = request_params.pop('json', {})
else:
request_params = {}
response = request_method(url_for_request, **request_params)
self.assertEqual(expected_status_code, response.status_code)
if return_to_json:
return self.check_response(response, status_code=expected_status_code)
return response
def _check_new_values(self, document, expected_values):
"""
Check new values in document.
:param document Document for check
:param expected_values Expected values
"""
def convert_value_to_str(value):
if isinstance(value, str):
return value
elif isinstance(value.__class__, TopLevelDocumentMetaclass):
return str(value.id)
else:
raise AssertionError("Error convert to string: unknown type")
document.reload()
for field, exp_value in expected_values.items():
cur_value = getattr(document, field)
if isinstance(cur_value, list) and isinstance(exp_value, list):
for sub_value in cur_value:
self.assertIn(convert_value_to_str(sub_value), exp_value)
elif isinstance(cur_value, datetime):
self.assertEqual(datetime.strptime(exp_value, "%Y-%m-%dT%H:%M:%S.%fZ"), cur_value)
elif isinstance(cur_value, date):
self.assertEqual(datetime.strptime(exp_value, "%Y-%m-%d").date(), cur_value)
elif field == 'password':
self.assertEqual(document.check_password(exp_value), True)
else:
self.assertEqual(exp_value, convert_value_to_str(cur_value)) | ms_tools/flask/test_case/nosql.py | from copy import deepcopy
from datetime import datetime, date
from json import load as json_in_str
from os.path import isfile, exists, join as path_join
from random import choice
from string import digits, ascii_letters
from typing import Any, Type, Union
from unittest import TestCase
from marshmallow import Schema
from mongoengine import Document
from mongoengine.base import TopLevelDocumentMetaclass
class CommonTestCase(TestCase):
test_db_name = None
db = None
client = None
app_context = None
maxDiff = None
authorized = False
url = None
request_method = None
test_docs = []
test_data_file_name = None
_base_dir = None
models_map = None
counter_map = {}
template_url = None
user_for_auth = None
password_for_auth = '<PASSWORD>'
@classmethod
def setUpClass(cls, create_app, config, db, *args):
"""
Start Flask app end check test database name in current db
:param create_app: Function for create Flask app
:param config: Config for Flask app
:param db: Database for tests
"""
# Start flask app
app = create_app(config)
cls.client = app.test_client()
cls.app_context = app.app_context()
cls.app_context.push()
cls.db = db
cls._prepare_database(config=config, db=db)
@classmethod
def _prepare_database(cls, config, db):
"""Prepare test database"""
cls.test_db_name = db.connection._MongoClient__default_database_name
# Check test database name in test db
if cls.test_db_name in db.connection.list_database_names():
db.connection.drop_database(cls.test_db_name)
# Create all collections from dev db
for collection_name in list(set([item._get_collection_name() for item in cls.models_map.values()])):
db.connection[cls.test_db_name].create_collection(name=collection_name)
@classmethod
def tearDownClass(cls):
"""Delete test data and stop Flask app"""
# Удаление тестовой базы и завершение Flask приложения
cls.db.connection.drop_database(cls.test_db_name)
cls.app_context.pop()
@classmethod
def setUp(cls):
if not cls.request_method:
raise AssertionError("Not found request method!")
@classmethod
def create_user(cls, key: str = 'user', password: str = None, **other_data) -> Document:
"""
Create user and set password
:param key: Key data for user. (In *.json file)
:param password: <PASSWORD>
:param other_data: Other fields
:return Created user
"""
password = password if password else cls.password_for_auth
user = cls.generate_test_data(key=key, **other_data)
user.set_password(password)
user.save()
return user
def auth(self,
username: str = None,
password: str = None,
auth_url: str = '/api/login/',
blocked_user: bool = False,
not_found_user: bool = False,
bad_auth: bool = False):
"""
Authorization function.
:param auth_url URL for authorization
:param username Username
:param password Password
:param blocked_user
:param not_found_user
"""
self.client.cookie_jar.clear()
self.authorized = False
username = username if username else self.user_for_auth.email
password = password if password else <PASSWORD>
status_code = 200
if bad_auth:
status_code = 400
elif blocked_user:
status_code = 403
elif not_found_user:
status_code = 404
json_request = {"email": username, "password": password}
json_response = self._send_request(url=auth_url, params=json_request, expected_status_code=status_code,
request_method=self.client.post)
if blocked_user:
self.assertIn('errors', json_response)
self.assertIn("email", json_response['errors'])
self.assertEqual("The user is blocked.", json_response['errors']["email"])
elif not_found_user:
self.assertIn('errors', json_response)
self.assertIn("email", json_response['errors'])
self.assertEqual("No user found for this email address.", json_response['errors']["email"])
else:
self.authorized = True
self.assertIn("email", json_response)
self.assertEqual(username, json_response["email"])
def validate_invalid_doc_id(self, id_in_data: bool = False, field: str = 'pk', bad_id: str = 'a1',
status_code: int = 400, many: bool = False):
"""
Validate invalid identifier
:param id_in_data
:param field
:param bad_id
:param status_code
:param many
"""
if many:
bad_id = [bad_id]
if id_in_data:
request_data = {field: bad_id}
url = self.url
else:
request_data = {}
url = '/'.join(self.url.split('/')[:-2] + [bad_id])
json_response = self._send_request(url=url, params=request_data, expected_status_code=status_code)
if many:
return self.assertIn('Could not find document.', json_response['errors'][field])
self.assertIn('Could not find document.', json_response['errors'][field])
def validate_not_found_doc(self,
id_in_data: bool = False,
field: str = 'pk',
status_code: int = 400,
not_found_id: str = '555555555555555555555555',
many: bool = False,
check_error: bool = True):
"""
Validate error: Could not find document.
:param id_in_data True/False. (False = id in url)
:param field Field (Only id_in_data=True)
:param not_found_id Not found id
:param status_code Expected status code
:param many
"""
if id_in_data:
if many:
not_found_id = [not_found_id]
request_data = {field: not_found_id}
url = self.url
else:
request_data = {}
url = self.template_url.format(**{field: not_found_id})
json_response = self._send_request(url=url, params=request_data, expected_status_code=status_code)
if check_error:
self.assertIn(f'Could not find document.', json_response['errors'][field])
return json_response
def validate_forbidden_access(self, role_keys: list):
"""
Validate forbidden access
:param role_keys List not allowed roles
"""
for role in role_keys:
self.client.cookie_jar.clear()
email = f'{<EMAIL>'
password = '<PASSWORD>'
self.create_user(email=email, password=password, role=role)
self.auth(username=email, password=password)
json_response = self._send_request(expected_status_code=403)
self.assertIn('errors', json_response)
self.assertIn("role", json_response['errors'])
self.assertEqual(f"insufficient rights for {role} role", json_response['errors']['role'])
def validate_field_in_bad_request(self,
field_name: str,
valid_type: Any = None,
bad_data: list = None,
field_is_required: bool = False,
required_data: dict = None):
"""
Success validate field in bad request
:param field_name Field name
:param valid_type Valid type for this field
:param bad_data Bad data
:param field_is_required Field is required in request? True/False
:param required_data Required data for request
"""
data = {}
bad_data = bad_data if bad_data else self.generate_bad_data(valid_type=valid_type)
json_response = None
for invalid_param in bad_data:
if required_data:
data.update(required_data)
data[field_name] = invalid_param
json_response = self._send_request(params=data, expected_status_code=400)
self.assertIn('errors', json_response)
self.assertIn(field_name, json_response['errors'])
if field_is_required:
self.validate_required_field(field_name)
return json_response
def validate_required_field(self, field_name: str):
"""
Validate required field
:param field_name Field is required in request
"""
json_response = self._send_request(params={"test": "data"}, expected_status_code=400)
self.assertIn('errors', json_response)
self.assertIn(field_name, json_response['errors'])
self.assertIn('Missing data for required field.', json_response['errors'][field_name])
def validate_error_parse_json(self):
"""Check request. Error, if not json in request"""
json_response = self._send_request(expected_status_code=400)
self.assertIn('errors', json_response)
self.assertIn('common', json_response['errors'])
self.assertIn('Cannot parse json', json_response['errors']['common'])
def validate_json(self, response_json, schema):
"""Validate json response"""
self.assertIsNotNone(response_json)
validation_errors = schema(unknown='exclude').validate(response_json)
if validation_errors:
print(f"Ошибки при валидации ответа: \n{validation_errors}")
self.assertDictEqual(validation_errors, {})
def validate_response(self, return_schema: Type[Schema], limit: int = None):
"""
Validate response and limit from GET method
:param return_schema Marshmallow Schema for validate response
:param limit Check limit
"""
json_response = self._send_request()
self.validate_json(json_response, return_schema)
if limit:
self.assertEqual(len(json_response['items']), limit)
def validate_offset(self, return_schema):
"""
Validate offset. GET Method
:param return_schema Marshmallow Schema for validate response
"""
json_response = self._send_request(params={'limit': 2})
self.validate_json(json_response, return_schema)
total_count = json_response['total_count']
# Set second identifier to var
self.assertEqual(len(json_response['items']), 2)
second_doc_id = json_response['items'][1]['id']
# Request offset=1&limit=1, the identifier specified in second_doc_id is expected
json_response = self._send_request(params={'limit': 1, 'offset': 1})
self.validate_json(json_response, return_schema)
self.assertEqual(json_response['total_count'], total_count)
self.assertEqual(len(json_response['items']), 1)
self.assertEqual(json_response['items'][0]['id'], second_doc_id)
def validate_filter(self,
return_schema: Type[Schema],
field: str,
value: Union[bool, str, int, list],
check_value: bool = True,
icontains: bool = False):
"""
Validate filtered response
:param return_schema Marshmallow Schema for validate response
:param field Filter by field
:param value value filter
:param check_value Check value in response
:param icontains True/False
"""
json_response = self._send_request(params={field: value})
self.validate_json(json_response, return_schema)
items = json_response['items']
if check_value:
for item in items:
self.assertIn(value, item[field]) if icontains else self.assertEqual(value, item[field])
return items
def validate_sorting(self, field_name: str, return_schema: Type[Schema], reverse: bool = True):
"""
Validate sorting
:param field_name Order by field name
:param return_schema Return schema
:param reverse Reverse sorting
"""
json_response = self._send_request(params={"order_by": f"-{field_name}" if reverse else field_name})
self.validate_json(json_response, return_schema)
first_iteration = True
self.assertGreater(json_response["total_count"], 0)
prev_value = None
for item in json_response['items']:
if first_iteration:
prev_value = item[field_name]
first_iteration = False
continue
if reverse:
self.assertLessEqual(item[field_name], prev_value)
else:
self.assertGreaterEqual(item[field_name], prev_value)
def create_success(self, model, required_data):
"""Create success. Only required fields"""
json_response = self._send_request(params=required_data, expected_status_code=201)
instance = model.objects.filter(pk=json_response['id']).first()
self.assertNotEqual(instance, None)
instance.delete()
def edit_success(self, edit_obj, edit_field: str, new_value: Union[str, list, int], check_new_value=True):
"""
Success edit object.
:param edit_obj Object for edit
:param edit_field Edit field
:param new_value New value
:param check_new_value Check new value in edit field. True/False
"""
url = '/'.join(self.url.split('/')[:-2] + [str(edit_obj.id)])
json_response = self._send_request(url=url, params={edit_field: new_value})
self.assertIn('status', json_response)
self.assertEqual('success', json_response['status'])
edit_obj.reload()
if check_new_value:
self.assertEqual(getattr(edit_obj, edit_field), new_value)
def edit_success_all_fields(self, edit_obj, data: dict, check_new_values: bool = True):
"""
Success edit object. (All fields)
:param edit_obj Object for edit
:param data Data
:param check_new_values Check new values. True/False
"""
url = '/'.join(self.url.split('/')[:-2] + [str(edit_obj.id)])
json_response = self._send_request(url=url, params=data)
self.assertIn('status', json_response)
self.assertEqual('success', json_response['status'])
edit_obj.reload()
if check_new_values:
self._check_new_values(edit_obj, expected_values=data)
def delete_success(self, delete_obj, deleted_state='deleted'):
"""
Success delete object
:param delete_obj Object for delete
:param deleted_state Deleted state. For check deleted doc.
"""
json_response = self._send_request(params={"id": str(delete_obj.id)})
self.assertIn('status', json_response)
self.assertEqual('success', json_response['status'])
delete_obj.reload()
self.assertEqual(getattr(delete_obj, "state"), deleted_state)
def check_response(self, response, status_code=200):
self.assertEqual(status_code, response.status_code)
self.assertTrue(response.is_json)
try:
return response.json
except Exception:
self.assertTrue(False)
return None
@classmethod
def generate_test_data(cls, key: str, many: bool = False, count: int = 21, **other_fields):
"""
Generate test data for devices tests. This method reading file ./test_data.json
:param key Model name in data json
:param many Create many instances. True/False
:param count Count create instances. Only many=True.
:param other_fields Other data for create or update default data
"""
if not cls.test_data_file_name or not cls._base_dir:
raise AssertionError("Error! ")
if not (model := cls.models_map.get(key)):
raise AssertionError("Error! ")
other_data = other_fields if other_fields else {}
count_create = count if many else 1
instance = None
instances = []
def get_data_from_file():
"""Read data in json file"""
path = path_join(cls._base_dir, "backend", 'app', 'tests', cls.test_data_file_name)
if exists(path) and isfile(path):
with open(path, encoding='utf-8') as file:
return json_in_str(file).get(key)
else:
raise AssertionError(f'File not found! {path}')
raw_data = get_data_from_file()
raw_data.update(other_data)
for i in range(count_create):
data = cls._counter_data(key_object=key, raw_data=raw_data)
instance = model.objects.create(**data)
instances.append(instance)
cls.test_docs.append(instance)
if not many or count_create == 1:
return instance
else:
return instances
def generate_bad_data(self, valid_type=None, max_length=None, min_length=None):
self.assertIsNotNone(valid_type)
invalid_data_map = {
int: [None, True, "", {}, [], "string", "string1", {"key": "value"}, ["item1"], [1, 2], 1.45],
float: [None, True, "", {}, [], "string", "string1", {"key": "value"}, ["item1"], [1, 2]],
str: [None, True, {}, [], 1, {"key": "value"}, ["item1"], [1, 2]],
bool: [None, "", {}, [], 123, "string", "string1", {"key": "value"}, ["item1"], [1, 2], 1.45],
list: [None, "", {}, 123, "string", "string1", {"key": "value"}, 1.45],
"date": [None, True, {}, [], 1, "string", {"key": "value"}, ["item1"], [1, 2], '2020-01-01 10:10'],
"datetime": [None, True, {}, [], 1, "string", {"key": "value"}, ["item1"], [1, 2], '2020-01-01'],
"email": [1, None, True, [], {}, "", "string", {"k": "v"}, ["i"], [1], 1.2],
"doc_id": [None, True, {}, [], {"key": "value"}, ["item1"], [1, 2]],
}
bad_data = invalid_data_map[valid_type]
# TODO Сделать более универсальным max_length min_length
if max_length is not None:
bad_item = ""
for item in range(max_length + 1):
bad_item += "s"
bad_data.append(bad_item)
if min_length is not None:
if valid_type == str:
bad_item = ''.join(choice(ascii_letters + digits)
for _ in range(1, min_length))
bad_data.append(bad_item)
else:
bad_data.append(0)
return bad_data
@classmethod
def _counter_data(cls, key_object, raw_data):
new_data = {}
for field, value in deepcopy(raw_data).items():
if isinstance(value, str) and '{i}' in value:
cls.counter_map.setdefault(key_object, {})
last_count = cls.counter_map[key_object].get(field, 0)
cls.counter_map[key_object][field] = last_count + 1
new_data[field] = value.format(i=cls.counter_map[key_object][field])
else:
new_data[field] = value
return new_data
def _send_request(self,
url: str = None,
params: dict = None,
return_to_json: bool = True,
expected_status_code: int = 200,
request_method: Any = None):
"""
Send request method.
:param url String url for request
:param params Parameters for request
:param return_to_json True/False
:param expected_status_code Allowed status code in response
:return Response or json_response
"""
url_for_request = url if url else self.url
request_method = request_method if request_method else self.request_method
if params:
request_params = {"json": params}
if request_method == self.client.get:
request_params['query_string'] = request_params.pop('json', {})
else:
request_params = {}
response = request_method(url_for_request, **request_params)
self.assertEqual(expected_status_code, response.status_code)
if return_to_json:
return self.check_response(response, status_code=expected_status_code)
return response
def _check_new_values(self, document, expected_values):
"""
Check new values in document.
:param document Document for check
:param expected_values Expected values
"""
def convert_value_to_str(value):
if isinstance(value, str):
return value
elif isinstance(value.__class__, TopLevelDocumentMetaclass):
return str(value.id)
else:
raise AssertionError("Error convert to string: unknown type")
document.reload()
for field, exp_value in expected_values.items():
cur_value = getattr(document, field)
if isinstance(cur_value, list) and isinstance(exp_value, list):
for sub_value in cur_value:
self.assertIn(convert_value_to_str(sub_value), exp_value)
elif isinstance(cur_value, datetime):
self.assertEqual(datetime.strptime(exp_value, "%Y-%m-%dT%H:%M:%S.%fZ"), cur_value)
elif isinstance(cur_value, date):
self.assertEqual(datetime.strptime(exp_value, "%Y-%m-%d").date(), cur_value)
elif field == 'password':
self.assertEqual(document.check_password(exp_value), True)
else:
self.assertEqual(exp_value, convert_value_to_str(cur_value)) | 0.668231 | 0.130535 |
import re
from django.utils.translation import gettext as _
import publications.six as six
from publications.models import CustomLink, CustomFile, Publication, Type
from publisher.models import Publisher
# mapping of months
MONTHS = {
'jan': 1, 'january': 1,
'feb': 2, 'february': 2,
'mar': 3, 'march': 3,
'apr': 4, 'april': 4,
'may': 5,
'jun': 6, 'june': 6,
'jul': 7, 'july': 7,
'aug': 8, 'august': 8,
'sep': 9, 'september': 9,
'oct': 10, 'october': 10,
'nov': 11, 'november': 11,
'dec': 12, 'december': 12}
def populate(publications):
"""
Load custom links and files from database and attach to publications.
"""
customlinks = CustomLink.objects.filter(publication__in=publications)
customfiles = CustomFile.objects.filter(publication__in=publications)
publications_ = {}
for publication in publications:
publication.links = []
publication.files = []
publications_[publication.id] = publication
for link in customlinks:
publications_[link.publication_id].links.append(link)
for file in customfiles:
publications_[file.publication_id].files.append(file)
def populate_from_bib(bib):
publications = []
errors = {}
# publication types
types = Type.objects.all()
for entry in bib:
if 'title' in entry and \
'author' in entry and \
'year' in entry:
# parse authors
authors = entry['author'].split(' and ')
for i in range(len(authors)):
author = authors[i].split(',')
author = [author[-1]] + author[:-1]
authors[i] = ' '.join(author)
authors = ', '.join(authors)
# add missing keys
keys = [
'journal',
'booktitle',
'institution',
'url',
'doi',
'isbn',
'keywords',
'pages',
'note',
'abstract',
'month']
for key in keys:
if not key in entry:
entry[key] = ''
# map integer fields to integers
entry['month'] = MONTHS.get(entry['month'].lower(), 0)
entry['volume'] = entry.get('volume', None)
entry['number'] = entry.get('number', None)
if isinstance(entry['volume'], six.text_type):
entry['volume'] = int(re.sub('[^0-9]', '', entry['volume']))
if isinstance(entry['number'], six.text_type):
entry['number'] = int(re.sub('[^0-9]', '', entry['number']))
# remove whitespace characters (likely due to line breaks)
entry['url'] = re.sub(r'\s', '', entry['url'])
# determine type
type_id = None
for t in types:
if entry['type'] in t.bibtex_type_list:
type_id = t.id
break
if type_id is None:
errors['bibliography'] = 'Type "' + entry['type'] + '" unknown.'
break
publisher = None
entry['publisher'] = entry.get('publisher', None)
if entry['doi']:
doi_prefix = extract_doi_prefix(entry['doi'])
publisher = Publisher.objects.get(doi=doi_prefix)
if publisher is None and entry['publisher']:
#TODO shortname, maybe some sort of slug?
publisher, created = Publisher.objects.get_or_create(name=entry['publisher'])
entry['publisher'] = publisher
# add publication
publications.append(Publication(
type_id=type_id,
citekey=entry['key'],
title=entry['title'],
authors=authors,
year=entry['year'],
month=entry['month'],
journal=entry['journal'],
book_title=entry['booktitle'],
publisher=entry['publisher'],
institution=entry['institution'],
volume=entry['volume'],
number=entry['number'],
pages=entry['pages'],
note=entry['note'],
url=entry['url'],
doi=entry['doi'],
isbn=entry['isbn'],
external=False,
abstract=entry['abstract'],
keywords=entry['keywords']))
else:
errors['bibliography'] = _('Make sure that the keys title, author and year are present.')
break
return publications, errors
def extract_doi_prefix(doi):
m = re.search(r'10[\.\d+]+/', doi)
if m:
return m.group(0)[:-1] | publications/utils.py | import re
from django.utils.translation import gettext as _
import publications.six as six
from publications.models import CustomLink, CustomFile, Publication, Type
from publisher.models import Publisher
# mapping of months
MONTHS = {
'jan': 1, 'january': 1,
'feb': 2, 'february': 2,
'mar': 3, 'march': 3,
'apr': 4, 'april': 4,
'may': 5,
'jun': 6, 'june': 6,
'jul': 7, 'july': 7,
'aug': 8, 'august': 8,
'sep': 9, 'september': 9,
'oct': 10, 'october': 10,
'nov': 11, 'november': 11,
'dec': 12, 'december': 12}
def populate(publications):
"""
Load custom links and files from database and attach to publications.
"""
customlinks = CustomLink.objects.filter(publication__in=publications)
customfiles = CustomFile.objects.filter(publication__in=publications)
publications_ = {}
for publication in publications:
publication.links = []
publication.files = []
publications_[publication.id] = publication
for link in customlinks:
publications_[link.publication_id].links.append(link)
for file in customfiles:
publications_[file.publication_id].files.append(file)
def populate_from_bib(bib):
publications = []
errors = {}
# publication types
types = Type.objects.all()
for entry in bib:
if 'title' in entry and \
'author' in entry and \
'year' in entry:
# parse authors
authors = entry['author'].split(' and ')
for i in range(len(authors)):
author = authors[i].split(',')
author = [author[-1]] + author[:-1]
authors[i] = ' '.join(author)
authors = ', '.join(authors)
# add missing keys
keys = [
'journal',
'booktitle',
'institution',
'url',
'doi',
'isbn',
'keywords',
'pages',
'note',
'abstract',
'month']
for key in keys:
if not key in entry:
entry[key] = ''
# map integer fields to integers
entry['month'] = MONTHS.get(entry['month'].lower(), 0)
entry['volume'] = entry.get('volume', None)
entry['number'] = entry.get('number', None)
if isinstance(entry['volume'], six.text_type):
entry['volume'] = int(re.sub('[^0-9]', '', entry['volume']))
if isinstance(entry['number'], six.text_type):
entry['number'] = int(re.sub('[^0-9]', '', entry['number']))
# remove whitespace characters (likely due to line breaks)
entry['url'] = re.sub(r'\s', '', entry['url'])
# determine type
type_id = None
for t in types:
if entry['type'] in t.bibtex_type_list:
type_id = t.id
break
if type_id is None:
errors['bibliography'] = 'Type "' + entry['type'] + '" unknown.'
break
publisher = None
entry['publisher'] = entry.get('publisher', None)
if entry['doi']:
doi_prefix = extract_doi_prefix(entry['doi'])
publisher = Publisher.objects.get(doi=doi_prefix)
if publisher is None and entry['publisher']:
#TODO shortname, maybe some sort of slug?
publisher, created = Publisher.objects.get_or_create(name=entry['publisher'])
entry['publisher'] = publisher
# add publication
publications.append(Publication(
type_id=type_id,
citekey=entry['key'],
title=entry['title'],
authors=authors,
year=entry['year'],
month=entry['month'],
journal=entry['journal'],
book_title=entry['booktitle'],
publisher=entry['publisher'],
institution=entry['institution'],
volume=entry['volume'],
number=entry['number'],
pages=entry['pages'],
note=entry['note'],
url=entry['url'],
doi=entry['doi'],
isbn=entry['isbn'],
external=False,
abstract=entry['abstract'],
keywords=entry['keywords']))
else:
errors['bibliography'] = _('Make sure that the keys title, author and year are present.')
break
return publications, errors
def extract_doi_prefix(doi):
m = re.search(r'10[\.\d+]+/', doi)
if m:
return m.group(0)[:-1] | 0.213705 | 0.108472 |
import contextlib
import os
import threading
from textwrap import dedent
import unittest
import time
from test import support
from test.support import import_helper
_interpreters = import_helper.import_module('_xxsubinterpreters')
from test.support import interpreters
def _captured_script(script):
r, w = os.pipe()
indented = script.replace('\n', '\n ')
wrapped = dedent(f"""
import contextlib
with open({w}, 'w', encoding='utf-8') as spipe:
with contextlib.redirect_stdout(spipe):
{indented}
""")
return wrapped, open(r, encoding='utf-8')
def clean_up_interpreters():
for interp in interpreters.list_all():
if interp.id == 0: # main
continue
try:
interp.close()
except RuntimeError:
pass # already destroyed
def _run_output(interp, request, channels=None):
script, rpipe = _captured_script(request)
with rpipe:
interp.run(script, channels=channels)
return rpipe.read()
@contextlib.contextmanager
def _running(interp):
r, w = os.pipe()
def run():
interp.run(dedent(f"""
# wait for "signal"
with open({r}) as rpipe:
rpipe.read()
"""))
t = threading.Thread(target=run)
t.start()
yield
with open(w, 'w') as spipe:
spipe.write('done')
t.join()
class TestBase(unittest.TestCase):
def tearDown(self):
clean_up_interpreters()
class CreateTests(TestBase):
def test_in_main(self):
interp = interpreters.create()
self.assertIsInstance(interp, interpreters.Interpreter)
self.assertIn(interp, interpreters.list_all())
def test_in_thread(self):
lock = threading.Lock()
interp = None
def f():
nonlocal interp
interp = interpreters.create()
lock.acquire()
lock.release()
t = threading.Thread(target=f)
with lock:
t.start()
t.join()
self.assertIn(interp, interpreters.list_all())
def test_in_subinterpreter(self):
main, = interpreters.list_all()
interp = interpreters.create()
out = _run_output(interp, dedent("""
from test.support import interpreters
interp = interpreters.create()
print(interp.id)
"""))
interp2 = interpreters.Interpreter(int(out))
self.assertEqual(interpreters.list_all(), [main, interp, interp2])
def test_after_destroy_all(self):
before = set(interpreters.list_all())
# Create 3 subinterpreters.
interp_lst = []
for _ in range(3):
interps = interpreters.create()
interp_lst.append(interps)
# Now destroy them.
for interp in interp_lst:
interp.close()
# Finally, create another.
interp = interpreters.create()
self.assertEqual(set(interpreters.list_all()), before | {interp})
def test_after_destroy_some(self):
before = set(interpreters.list_all())
# Create 3 subinterpreters.
interp1 = interpreters.create()
interp2 = interpreters.create()
interp3 = interpreters.create()
# Now destroy 2 of them.
interp1.close()
interp2.close()
# Finally, create another.
interp = interpreters.create()
self.assertEqual(set(interpreters.list_all()), before | {interp3, interp})
class GetCurrentTests(TestBase):
def test_main(self):
main = interpreters.get_main()
current = interpreters.get_current()
self.assertEqual(current, main)
def test_subinterpreter(self):
main = _interpreters.get_main()
interp = interpreters.create()
out = _run_output(interp, dedent("""
from test.support import interpreters
cur = interpreters.get_current()
print(cur.id)
"""))
current = interpreters.Interpreter(int(out))
self.assertNotEqual(current, main)
class ListAllTests(TestBase):
def test_initial(self):
interps = interpreters.list_all()
self.assertEqual(1, len(interps))
def test_after_creating(self):
main = interpreters.get_current()
first = interpreters.create()
second = interpreters.create()
ids = []
for interp in interpreters.list_all():
ids.append(interp.id)
self.assertEqual(ids, [main.id, first.id, second.id])
def test_after_destroying(self):
main = interpreters.get_current()
first = interpreters.create()
second = interpreters.create()
first.close()
ids = []
for interp in interpreters.list_all():
ids.append(interp.id)
self.assertEqual(ids, [main.id, second.id])
class TestInterpreterAttrs(TestBase):
def test_id_type(self):
main = interpreters.get_main()
current = interpreters.get_current()
interp = interpreters.create()
self.assertIsInstance(main.id, _interpreters.InterpreterID)
self.assertIsInstance(current.id, _interpreters.InterpreterID)
self.assertIsInstance(interp.id, _interpreters.InterpreterID)
def test_main_id(self):
main = interpreters.get_main()
self.assertEqual(main.id, 0)
def test_custom_id(self):
interp = interpreters.Interpreter(1)
self.assertEqual(interp.id, 1)
with self.assertRaises(TypeError):
interpreters.Interpreter('1')
def test_id_readonly(self):
interp = interpreters.Interpreter(1)
with self.assertRaises(AttributeError):
interp.id = 2
@unittest.skip('not ready yet (see bpo-32604)')
def test_main_isolated(self):
main = interpreters.get_main()
self.assertFalse(main.isolated)
@unittest.skip('not ready yet (see bpo-32604)')
def test_subinterpreter_isolated_default(self):
interp = interpreters.create()
self.assertFalse(interp.isolated)
def test_subinterpreter_isolated_explicit(self):
interp1 = interpreters.create(isolated=True)
interp2 = interpreters.create(isolated=False)
self.assertTrue(interp1.isolated)
self.assertFalse(interp2.isolated)
@unittest.skip('not ready yet (see bpo-32604)')
def test_custom_isolated_default(self):
interp = interpreters.Interpreter(1)
self.assertFalse(interp.isolated)
def test_custom_isolated_explicit(self):
interp1 = interpreters.Interpreter(1, isolated=True)
interp2 = interpreters.Interpreter(1, isolated=False)
self.assertTrue(interp1.isolated)
self.assertFalse(interp2.isolated)
def test_isolated_readonly(self):
interp = interpreters.Interpreter(1)
with self.assertRaises(AttributeError):
interp.isolated = True
def test_equality(self):
interp1 = interpreters.create()
interp2 = interpreters.create()
self.assertEqual(interp1, interp1)
self.assertNotEqual(interp1, interp2)
class TestInterpreterIsRunning(TestBase):
def test_main(self):
main = interpreters.get_main()
self.assertTrue(main.is_running())
@unittest.skip('Fails on FreeBSD')
def test_subinterpreter(self):
interp = interpreters.create()
self.assertFalse(interp.is_running())
with _running(interp):
self.assertTrue(interp.is_running())
self.assertFalse(interp.is_running())
def test_from_subinterpreter(self):
interp = interpreters.create()
out = _run_output(interp, dedent(f"""
import _xxsubinterpreters as _interpreters
if _interpreters.is_running({interp.id}):
print(True)
else:
print(False)
"""))
self.assertEqual(out.strip(), 'True')
def test_already_destroyed(self):
interp = interpreters.create()
interp.close()
with self.assertRaises(RuntimeError):
interp.is_running()
def test_does_not_exist(self):
interp = interpreters.Interpreter(1_000_000)
with self.assertRaises(RuntimeError):
interp.is_running()
def test_bad_id(self):
interp = interpreters.Interpreter(-1)
with self.assertRaises(ValueError):
interp.is_running()
class TestInterpreterClose(TestBase):
def test_basic(self):
main = interpreters.get_main()
interp1 = interpreters.create()
interp2 = interpreters.create()
interp3 = interpreters.create()
self.assertEqual(set(interpreters.list_all()),
{main, interp1, interp2, interp3})
interp2.close()
self.assertEqual(set(interpreters.list_all()),
{main, interp1, interp3})
def test_all(self):
before = set(interpreters.list_all())
interps = set()
for _ in range(3):
interp = interpreters.create()
interps.add(interp)
self.assertEqual(set(interpreters.list_all()), before | interps)
for interp in interps:
interp.close()
self.assertEqual(set(interpreters.list_all()), before)
def test_main(self):
main, = interpreters.list_all()
with self.assertRaises(RuntimeError):
main.close()
def f():
with self.assertRaises(RuntimeError):
main.close()
t = threading.Thread(target=f)
t.start()
t.join()
def test_already_destroyed(self):
interp = interpreters.create()
interp.close()
with self.assertRaises(RuntimeError):
interp.close()
def test_does_not_exist(self):
interp = interpreters.Interpreter(1_000_000)
with self.assertRaises(RuntimeError):
interp.close()
def test_bad_id(self):
interp = interpreters.Interpreter(-1)
with self.assertRaises(ValueError):
interp.close()
def test_from_current(self):
main, = interpreters.list_all()
interp = interpreters.create()
out = _run_output(interp, dedent(f"""
from test.support import interpreters
interp = interpreters.Interpreter({int(interp.id)})
try:
interp.close()
except RuntimeError:
print('failed')
"""))
self.assertEqual(out.strip(), 'failed')
self.assertEqual(set(interpreters.list_all()), {main, interp})
def test_from_sibling(self):
main, = interpreters.list_all()
interp1 = interpreters.create()
interp2 = interpreters.create()
self.assertEqual(set(interpreters.list_all()),
{main, interp1, interp2})
interp1.run(dedent(f"""
from test.support import interpreters
interp2 = interpreters.Interpreter(int({interp2.id}))
interp2.close()
interp3 = interpreters.create()
interp3.close()
"""))
self.assertEqual(set(interpreters.list_all()), {main, interp1})
def test_from_other_thread(self):
interp = interpreters.create()
def f():
interp.close()
t = threading.Thread(target=f)
t.start()
t.join()
@unittest.skip('Fails on FreeBSD')
def test_still_running(self):
main, = interpreters.list_all()
interp = interpreters.create()
with _running(interp):
with self.assertRaises(RuntimeError):
interp.close()
self.assertTrue(interp.is_running())
class TestInterpreterRun(TestBase):
def test_success(self):
interp = interpreters.create()
script, file = _captured_script('print("it worked!", end="")')
with file:
interp.run(script)
out = file.read()
self.assertEqual(out, 'it worked!')
def test_in_thread(self):
interp = interpreters.create()
script, file = _captured_script('print("it worked!", end="")')
with file:
def f():
interp.run(script)
t = threading.Thread(target=f)
t.start()
t.join()
out = file.read()
self.assertEqual(out, 'it worked!')
@support.requires_fork()
def test_fork(self):
interp = interpreters.create()
import tempfile
with tempfile.NamedTemporaryFile('w+', encoding='utf-8') as file:
file.write('')
file.flush()
expected = 'spam spam spam spam spam'
script = dedent(f"""
import os
try:
os.fork()
except RuntimeError:
with open('{file.name}', 'w', encoding='utf-8') as out:
out.write('{expected}')
""")
interp.run(script)
file.seek(0)
content = file.read()
self.assertEqual(content, expected)
@unittest.skip('Fails on FreeBSD')
def test_already_running(self):
interp = interpreters.create()
with _running(interp):
with self.assertRaises(RuntimeError):
interp.run('print("spam")')
def test_does_not_exist(self):
interp = interpreters.Interpreter(1_000_000)
with self.assertRaises(RuntimeError):
interp.run('print("spam")')
def test_bad_id(self):
interp = interpreters.Interpreter(-1)
with self.assertRaises(ValueError):
interp.run('print("spam")')
def test_bad_script(self):
interp = interpreters.create()
with self.assertRaises(TypeError):
interp.run(10)
def test_bytes_for_script(self):
interp = interpreters.create()
with self.assertRaises(TypeError):
interp.run(b'print("spam")')
# test_xxsubinterpreters covers the remaining Interpreter.run() behavior.
class TestIsShareable(TestBase):
def test_default_shareables(self):
shareables = [
# singletons
None,
# builtin objects
b'spam',
'spam',
10,
-10,
]
for obj in shareables:
with self.subTest(obj):
shareable = interpreters.is_shareable(obj)
self.assertTrue(shareable)
def test_not_shareable(self):
class Cheese:
def __init__(self, name):
self.name = name
def __str__(self):
return self.name
class SubBytes(bytes):
"""A subclass of a shareable type."""
not_shareables = [
# singletons
True,
False,
NotImplemented,
...,
# builtin types and objects
type,
object,
object(),
Exception(),
100.0,
# user-defined types and objects
Cheese,
Cheese('Wensleydale'),
SubBytes(b'spam'),
]
for obj in not_shareables:
with self.subTest(repr(obj)):
self.assertFalse(
interpreters.is_shareable(obj))
class TestChannels(TestBase):
def test_create(self):
r, s = interpreters.create_channel()
self.assertIsInstance(r, interpreters.RecvChannel)
self.assertIsInstance(s, interpreters.SendChannel)
def test_list_all(self):
self.assertEqual(interpreters.list_all_channels(), [])
created = set()
for _ in range(3):
ch = interpreters.create_channel()
created.add(ch)
after = set(interpreters.list_all_channels())
self.assertEqual(after, created)
class TestRecvChannelAttrs(TestBase):
def test_id_type(self):
rch, _ = interpreters.create_channel()
self.assertIsInstance(rch.id, _interpreters.ChannelID)
def test_custom_id(self):
rch = interpreters.RecvChannel(1)
self.assertEqual(rch.id, 1)
with self.assertRaises(TypeError):
interpreters.RecvChannel('1')
def test_id_readonly(self):
rch = interpreters.RecvChannel(1)
with self.assertRaises(AttributeError):
rch.id = 2
def test_equality(self):
ch1, _ = interpreters.create_channel()
ch2, _ = interpreters.create_channel()
self.assertEqual(ch1, ch1)
self.assertNotEqual(ch1, ch2)
class TestSendChannelAttrs(TestBase):
def test_id_type(self):
_, sch = interpreters.create_channel()
self.assertIsInstance(sch.id, _interpreters.ChannelID)
def test_custom_id(self):
sch = interpreters.SendChannel(1)
self.assertEqual(sch.id, 1)
with self.assertRaises(TypeError):
interpreters.SendChannel('1')
def test_id_readonly(self):
sch = interpreters.SendChannel(1)
with self.assertRaises(AttributeError):
sch.id = 2
def test_equality(self):
_, ch1 = interpreters.create_channel()
_, ch2 = interpreters.create_channel()
self.assertEqual(ch1, ch1)
self.assertNotEqual(ch1, ch2)
class TestSendRecv(TestBase):
def test_send_recv_main(self):
r, s = interpreters.create_channel()
orig = b'spam'
s.send_nowait(orig)
obj = r.recv()
self.assertEqual(obj, orig)
self.assertIsNot(obj, orig)
def test_send_recv_same_interpreter(self):
interp = interpreters.create()
interp.run(dedent("""
from test.support import interpreters
r, s = interpreters.create_channel()
orig = b'spam'
s.send_nowait(orig)
obj = r.recv()
assert obj == orig, 'expected: obj == orig'
assert obj is not orig, 'expected: obj is not orig'
"""))
@unittest.skip('broken (see BPO-...)')
def test_send_recv_different_interpreters(self):
r1, s1 = interpreters.create_channel()
r2, s2 = interpreters.create_channel()
orig1 = b'spam'
s1.send_nowait(orig1)
out = _run_output(
interpreters.create(),
dedent(f"""
obj1 = r.recv()
assert obj1 == b'spam', 'expected: obj1 == orig1'
# When going to another interpreter we get a copy.
assert id(obj1) != {id(orig1)}, 'expected: obj1 is not orig1'
orig2 = b'eggs'
print(id(orig2))
s.send_nowait(orig2)
"""),
channels=dict(r=r1, s=s2),
)
obj2 = r2.recv()
self.assertEqual(obj2, b'eggs')
self.assertNotEqual(id(obj2), int(out))
def test_send_recv_different_threads(self):
r, s = interpreters.create_channel()
def f():
while True:
try:
obj = r.recv()
break
except interpreters.ChannelEmptyError:
time.sleep(0.1)
s.send(obj)
t = threading.Thread(target=f)
t.start()
orig = b'spam'
s.send(orig)
t.join()
obj = r.recv()
self.assertEqual(obj, orig)
self.assertIsNot(obj, orig)
def test_send_recv_nowait_main(self):
r, s = interpreters.create_channel()
orig = b'spam'
s.send_nowait(orig)
obj = r.recv_nowait()
self.assertEqual(obj, orig)
self.assertIsNot(obj, orig)
def test_send_recv_nowait_main_with_default(self):
r, _ = interpreters.create_channel()
obj = r.recv_nowait(None)
self.assertIsNone(obj)
def test_send_recv_nowait_same_interpreter(self):
interp = interpreters.create()
interp.run(dedent("""
from test.support import interpreters
r, s = interpreters.create_channel()
orig = b'spam'
s.send_nowait(orig)
obj = r.recv_nowait()
assert obj == orig, 'expected: obj == orig'
# When going back to the same interpreter we get the same object.
assert obj is not orig, 'expected: obj is not orig'
"""))
@unittest.skip('broken (see BPO-...)')
def test_send_recv_nowait_different_interpreters(self):
r1, s1 = interpreters.create_channel()
r2, s2 = interpreters.create_channel()
orig1 = b'spam'
s1.send_nowait(orig1)
out = _run_output(
interpreters.create(),
dedent(f"""
obj1 = r.recv_nowait()
assert obj1 == b'spam', 'expected: obj1 == orig1'
# When going to another interpreter we get a copy.
assert id(obj1) != {id(orig1)}, 'expected: obj1 is not orig1'
orig2 = b'eggs'
print(id(orig2))
s.send_nowait(orig2)
"""),
channels=dict(r=r1, s=s2),
)
obj2 = r2.recv_nowait()
self.assertEqual(obj2, b'eggs')
self.assertNotEqual(id(obj2), int(out))
def test_recv_channel_does_not_exist(self):
ch = interpreters.RecvChannel(1_000_000)
with self.assertRaises(interpreters.ChannelNotFoundError):
ch.recv()
def test_send_channel_does_not_exist(self):
ch = interpreters.SendChannel(1_000_000)
with self.assertRaises(interpreters.ChannelNotFoundError):
ch.send(b'spam')
def test_recv_nowait_channel_does_not_exist(self):
ch = interpreters.RecvChannel(1_000_000)
with self.assertRaises(interpreters.ChannelNotFoundError):
ch.recv_nowait()
def test_send_nowait_channel_does_not_exist(self):
ch = interpreters.SendChannel(1_000_000)
with self.assertRaises(interpreters.ChannelNotFoundError):
ch.send_nowait(b'spam')
def test_recv_nowait_empty(self):
ch, _ = interpreters.create_channel()
with self.assertRaises(interpreters.ChannelEmptyError):
ch.recv_nowait()
def test_recv_nowait_default(self):
default = object()
rch, sch = interpreters.create_channel()
obj1 = rch.recv_nowait(default)
sch.send_nowait(None)
sch.send_nowait(1)
sch.send_nowait(b'spam')
sch.send_nowait(b'eggs')
obj2 = rch.recv_nowait(default)
obj3 = rch.recv_nowait(default)
obj4 = rch.recv_nowait()
obj5 = rch.recv_nowait(default)
obj6 = rch.recv_nowait(default)
self.assertIs(obj1, default)
self.assertIs(obj2, None)
self.assertEqual(obj3, 1)
self.assertEqual(obj4, b'spam')
self.assertEqual(obj5, b'eggs')
self.assertIs(obj6, default) | Lib/test/test_interpreters.py | import contextlib
import os
import threading
from textwrap import dedent
import unittest
import time
from test import support
from test.support import import_helper
_interpreters = import_helper.import_module('_xxsubinterpreters')
from test.support import interpreters
def _captured_script(script):
r, w = os.pipe()
indented = script.replace('\n', '\n ')
wrapped = dedent(f"""
import contextlib
with open({w}, 'w', encoding='utf-8') as spipe:
with contextlib.redirect_stdout(spipe):
{indented}
""")
return wrapped, open(r, encoding='utf-8')
def clean_up_interpreters():
for interp in interpreters.list_all():
if interp.id == 0: # main
continue
try:
interp.close()
except RuntimeError:
pass # already destroyed
def _run_output(interp, request, channels=None):
script, rpipe = _captured_script(request)
with rpipe:
interp.run(script, channels=channels)
return rpipe.read()
@contextlib.contextmanager
def _running(interp):
r, w = os.pipe()
def run():
interp.run(dedent(f"""
# wait for "signal"
with open({r}) as rpipe:
rpipe.read()
"""))
t = threading.Thread(target=run)
t.start()
yield
with open(w, 'w') as spipe:
spipe.write('done')
t.join()
class TestBase(unittest.TestCase):
def tearDown(self):
clean_up_interpreters()
class CreateTests(TestBase):
def test_in_main(self):
interp = interpreters.create()
self.assertIsInstance(interp, interpreters.Interpreter)
self.assertIn(interp, interpreters.list_all())
def test_in_thread(self):
lock = threading.Lock()
interp = None
def f():
nonlocal interp
interp = interpreters.create()
lock.acquire()
lock.release()
t = threading.Thread(target=f)
with lock:
t.start()
t.join()
self.assertIn(interp, interpreters.list_all())
def test_in_subinterpreter(self):
main, = interpreters.list_all()
interp = interpreters.create()
out = _run_output(interp, dedent("""
from test.support import interpreters
interp = interpreters.create()
print(interp.id)
"""))
interp2 = interpreters.Interpreter(int(out))
self.assertEqual(interpreters.list_all(), [main, interp, interp2])
def test_after_destroy_all(self):
before = set(interpreters.list_all())
# Create 3 subinterpreters.
interp_lst = []
for _ in range(3):
interps = interpreters.create()
interp_lst.append(interps)
# Now destroy them.
for interp in interp_lst:
interp.close()
# Finally, create another.
interp = interpreters.create()
self.assertEqual(set(interpreters.list_all()), before | {interp})
def test_after_destroy_some(self):
before = set(interpreters.list_all())
# Create 3 subinterpreters.
interp1 = interpreters.create()
interp2 = interpreters.create()
interp3 = interpreters.create()
# Now destroy 2 of them.
interp1.close()
interp2.close()
# Finally, create another.
interp = interpreters.create()
self.assertEqual(set(interpreters.list_all()), before | {interp3, interp})
class GetCurrentTests(TestBase):
def test_main(self):
main = interpreters.get_main()
current = interpreters.get_current()
self.assertEqual(current, main)
def test_subinterpreter(self):
main = _interpreters.get_main()
interp = interpreters.create()
out = _run_output(interp, dedent("""
from test.support import interpreters
cur = interpreters.get_current()
print(cur.id)
"""))
current = interpreters.Interpreter(int(out))
self.assertNotEqual(current, main)
class ListAllTests(TestBase):
def test_initial(self):
interps = interpreters.list_all()
self.assertEqual(1, len(interps))
def test_after_creating(self):
main = interpreters.get_current()
first = interpreters.create()
second = interpreters.create()
ids = []
for interp in interpreters.list_all():
ids.append(interp.id)
self.assertEqual(ids, [main.id, first.id, second.id])
def test_after_destroying(self):
main = interpreters.get_current()
first = interpreters.create()
second = interpreters.create()
first.close()
ids = []
for interp in interpreters.list_all():
ids.append(interp.id)
self.assertEqual(ids, [main.id, second.id])
class TestInterpreterAttrs(TestBase):
def test_id_type(self):
main = interpreters.get_main()
current = interpreters.get_current()
interp = interpreters.create()
self.assertIsInstance(main.id, _interpreters.InterpreterID)
self.assertIsInstance(current.id, _interpreters.InterpreterID)
self.assertIsInstance(interp.id, _interpreters.InterpreterID)
def test_main_id(self):
main = interpreters.get_main()
self.assertEqual(main.id, 0)
def test_custom_id(self):
interp = interpreters.Interpreter(1)
self.assertEqual(interp.id, 1)
with self.assertRaises(TypeError):
interpreters.Interpreter('1')
def test_id_readonly(self):
interp = interpreters.Interpreter(1)
with self.assertRaises(AttributeError):
interp.id = 2
@unittest.skip('not ready yet (see bpo-32604)')
def test_main_isolated(self):
main = interpreters.get_main()
self.assertFalse(main.isolated)
@unittest.skip('not ready yet (see bpo-32604)')
def test_subinterpreter_isolated_default(self):
interp = interpreters.create()
self.assertFalse(interp.isolated)
def test_subinterpreter_isolated_explicit(self):
interp1 = interpreters.create(isolated=True)
interp2 = interpreters.create(isolated=False)
self.assertTrue(interp1.isolated)
self.assertFalse(interp2.isolated)
@unittest.skip('not ready yet (see bpo-32604)')
def test_custom_isolated_default(self):
interp = interpreters.Interpreter(1)
self.assertFalse(interp.isolated)
def test_custom_isolated_explicit(self):
interp1 = interpreters.Interpreter(1, isolated=True)
interp2 = interpreters.Interpreter(1, isolated=False)
self.assertTrue(interp1.isolated)
self.assertFalse(interp2.isolated)
def test_isolated_readonly(self):
interp = interpreters.Interpreter(1)
with self.assertRaises(AttributeError):
interp.isolated = True
def test_equality(self):
interp1 = interpreters.create()
interp2 = interpreters.create()
self.assertEqual(interp1, interp1)
self.assertNotEqual(interp1, interp2)
class TestInterpreterIsRunning(TestBase):
def test_main(self):
main = interpreters.get_main()
self.assertTrue(main.is_running())
@unittest.skip('Fails on FreeBSD')
def test_subinterpreter(self):
interp = interpreters.create()
self.assertFalse(interp.is_running())
with _running(interp):
self.assertTrue(interp.is_running())
self.assertFalse(interp.is_running())
def test_from_subinterpreter(self):
interp = interpreters.create()
out = _run_output(interp, dedent(f"""
import _xxsubinterpreters as _interpreters
if _interpreters.is_running({interp.id}):
print(True)
else:
print(False)
"""))
self.assertEqual(out.strip(), 'True')
def test_already_destroyed(self):
interp = interpreters.create()
interp.close()
with self.assertRaises(RuntimeError):
interp.is_running()
def test_does_not_exist(self):
interp = interpreters.Interpreter(1_000_000)
with self.assertRaises(RuntimeError):
interp.is_running()
def test_bad_id(self):
interp = interpreters.Interpreter(-1)
with self.assertRaises(ValueError):
interp.is_running()
class TestInterpreterClose(TestBase):
def test_basic(self):
main = interpreters.get_main()
interp1 = interpreters.create()
interp2 = interpreters.create()
interp3 = interpreters.create()
self.assertEqual(set(interpreters.list_all()),
{main, interp1, interp2, interp3})
interp2.close()
self.assertEqual(set(interpreters.list_all()),
{main, interp1, interp3})
def test_all(self):
before = set(interpreters.list_all())
interps = set()
for _ in range(3):
interp = interpreters.create()
interps.add(interp)
self.assertEqual(set(interpreters.list_all()), before | interps)
for interp in interps:
interp.close()
self.assertEqual(set(interpreters.list_all()), before)
def test_main(self):
main, = interpreters.list_all()
with self.assertRaises(RuntimeError):
main.close()
def f():
with self.assertRaises(RuntimeError):
main.close()
t = threading.Thread(target=f)
t.start()
t.join()
def test_already_destroyed(self):
interp = interpreters.create()
interp.close()
with self.assertRaises(RuntimeError):
interp.close()
def test_does_not_exist(self):
interp = interpreters.Interpreter(1_000_000)
with self.assertRaises(RuntimeError):
interp.close()
def test_bad_id(self):
interp = interpreters.Interpreter(-1)
with self.assertRaises(ValueError):
interp.close()
def test_from_current(self):
main, = interpreters.list_all()
interp = interpreters.create()
out = _run_output(interp, dedent(f"""
from test.support import interpreters
interp = interpreters.Interpreter({int(interp.id)})
try:
interp.close()
except RuntimeError:
print('failed')
"""))
self.assertEqual(out.strip(), 'failed')
self.assertEqual(set(interpreters.list_all()), {main, interp})
def test_from_sibling(self):
main, = interpreters.list_all()
interp1 = interpreters.create()
interp2 = interpreters.create()
self.assertEqual(set(interpreters.list_all()),
{main, interp1, interp2})
interp1.run(dedent(f"""
from test.support import interpreters
interp2 = interpreters.Interpreter(int({interp2.id}))
interp2.close()
interp3 = interpreters.create()
interp3.close()
"""))
self.assertEqual(set(interpreters.list_all()), {main, interp1})
def test_from_other_thread(self):
interp = interpreters.create()
def f():
interp.close()
t = threading.Thread(target=f)
t.start()
t.join()
@unittest.skip('Fails on FreeBSD')
def test_still_running(self):
main, = interpreters.list_all()
interp = interpreters.create()
with _running(interp):
with self.assertRaises(RuntimeError):
interp.close()
self.assertTrue(interp.is_running())
class TestInterpreterRun(TestBase):
def test_success(self):
interp = interpreters.create()
script, file = _captured_script('print("it worked!", end="")')
with file:
interp.run(script)
out = file.read()
self.assertEqual(out, 'it worked!')
def test_in_thread(self):
interp = interpreters.create()
script, file = _captured_script('print("it worked!", end="")')
with file:
def f():
interp.run(script)
t = threading.Thread(target=f)
t.start()
t.join()
out = file.read()
self.assertEqual(out, 'it worked!')
@support.requires_fork()
def test_fork(self):
interp = interpreters.create()
import tempfile
with tempfile.NamedTemporaryFile('w+', encoding='utf-8') as file:
file.write('')
file.flush()
expected = 'spam spam spam spam spam'
script = dedent(f"""
import os
try:
os.fork()
except RuntimeError:
with open('{file.name}', 'w', encoding='utf-8') as out:
out.write('{expected}')
""")
interp.run(script)
file.seek(0)
content = file.read()
self.assertEqual(content, expected)
@unittest.skip('Fails on FreeBSD')
def test_already_running(self):
interp = interpreters.create()
with _running(interp):
with self.assertRaises(RuntimeError):
interp.run('print("spam")')
def test_does_not_exist(self):
interp = interpreters.Interpreter(1_000_000)
with self.assertRaises(RuntimeError):
interp.run('print("spam")')
def test_bad_id(self):
interp = interpreters.Interpreter(-1)
with self.assertRaises(ValueError):
interp.run('print("spam")')
def test_bad_script(self):
interp = interpreters.create()
with self.assertRaises(TypeError):
interp.run(10)
def test_bytes_for_script(self):
interp = interpreters.create()
with self.assertRaises(TypeError):
interp.run(b'print("spam")')
# test_xxsubinterpreters covers the remaining Interpreter.run() behavior.
class TestIsShareable(TestBase):
def test_default_shareables(self):
shareables = [
# singletons
None,
# builtin objects
b'spam',
'spam',
10,
-10,
]
for obj in shareables:
with self.subTest(obj):
shareable = interpreters.is_shareable(obj)
self.assertTrue(shareable)
def test_not_shareable(self):
class Cheese:
def __init__(self, name):
self.name = name
def __str__(self):
return self.name
class SubBytes(bytes):
"""A subclass of a shareable type."""
not_shareables = [
# singletons
True,
False,
NotImplemented,
...,
# builtin types and objects
type,
object,
object(),
Exception(),
100.0,
# user-defined types and objects
Cheese,
Cheese('Wensleydale'),
SubBytes(b'spam'),
]
for obj in not_shareables:
with self.subTest(repr(obj)):
self.assertFalse(
interpreters.is_shareable(obj))
class TestChannels(TestBase):
def test_create(self):
r, s = interpreters.create_channel()
self.assertIsInstance(r, interpreters.RecvChannel)
self.assertIsInstance(s, interpreters.SendChannel)
def test_list_all(self):
self.assertEqual(interpreters.list_all_channels(), [])
created = set()
for _ in range(3):
ch = interpreters.create_channel()
created.add(ch)
after = set(interpreters.list_all_channels())
self.assertEqual(after, created)
class TestRecvChannelAttrs(TestBase):
def test_id_type(self):
rch, _ = interpreters.create_channel()
self.assertIsInstance(rch.id, _interpreters.ChannelID)
def test_custom_id(self):
rch = interpreters.RecvChannel(1)
self.assertEqual(rch.id, 1)
with self.assertRaises(TypeError):
interpreters.RecvChannel('1')
def test_id_readonly(self):
rch = interpreters.RecvChannel(1)
with self.assertRaises(AttributeError):
rch.id = 2
def test_equality(self):
ch1, _ = interpreters.create_channel()
ch2, _ = interpreters.create_channel()
self.assertEqual(ch1, ch1)
self.assertNotEqual(ch1, ch2)
class TestSendChannelAttrs(TestBase):
def test_id_type(self):
_, sch = interpreters.create_channel()
self.assertIsInstance(sch.id, _interpreters.ChannelID)
def test_custom_id(self):
sch = interpreters.SendChannel(1)
self.assertEqual(sch.id, 1)
with self.assertRaises(TypeError):
interpreters.SendChannel('1')
def test_id_readonly(self):
sch = interpreters.SendChannel(1)
with self.assertRaises(AttributeError):
sch.id = 2
def test_equality(self):
_, ch1 = interpreters.create_channel()
_, ch2 = interpreters.create_channel()
self.assertEqual(ch1, ch1)
self.assertNotEqual(ch1, ch2)
class TestSendRecv(TestBase):
def test_send_recv_main(self):
r, s = interpreters.create_channel()
orig = b'spam'
s.send_nowait(orig)
obj = r.recv()
self.assertEqual(obj, orig)
self.assertIsNot(obj, orig)
def test_send_recv_same_interpreter(self):
interp = interpreters.create()
interp.run(dedent("""
from test.support import interpreters
r, s = interpreters.create_channel()
orig = b'spam'
s.send_nowait(orig)
obj = r.recv()
assert obj == orig, 'expected: obj == orig'
assert obj is not orig, 'expected: obj is not orig'
"""))
@unittest.skip('broken (see BPO-...)')
def test_send_recv_different_interpreters(self):
r1, s1 = interpreters.create_channel()
r2, s2 = interpreters.create_channel()
orig1 = b'spam'
s1.send_nowait(orig1)
out = _run_output(
interpreters.create(),
dedent(f"""
obj1 = r.recv()
assert obj1 == b'spam', 'expected: obj1 == orig1'
# When going to another interpreter we get a copy.
assert id(obj1) != {id(orig1)}, 'expected: obj1 is not orig1'
orig2 = b'eggs'
print(id(orig2))
s.send_nowait(orig2)
"""),
channels=dict(r=r1, s=s2),
)
obj2 = r2.recv()
self.assertEqual(obj2, b'eggs')
self.assertNotEqual(id(obj2), int(out))
def test_send_recv_different_threads(self):
r, s = interpreters.create_channel()
def f():
while True:
try:
obj = r.recv()
break
except interpreters.ChannelEmptyError:
time.sleep(0.1)
s.send(obj)
t = threading.Thread(target=f)
t.start()
orig = b'spam'
s.send(orig)
t.join()
obj = r.recv()
self.assertEqual(obj, orig)
self.assertIsNot(obj, orig)
def test_send_recv_nowait_main(self):
r, s = interpreters.create_channel()
orig = b'spam'
s.send_nowait(orig)
obj = r.recv_nowait()
self.assertEqual(obj, orig)
self.assertIsNot(obj, orig)
def test_send_recv_nowait_main_with_default(self):
r, _ = interpreters.create_channel()
obj = r.recv_nowait(None)
self.assertIsNone(obj)
def test_send_recv_nowait_same_interpreter(self):
interp = interpreters.create()
interp.run(dedent("""
from test.support import interpreters
r, s = interpreters.create_channel()
orig = b'spam'
s.send_nowait(orig)
obj = r.recv_nowait()
assert obj == orig, 'expected: obj == orig'
# When going back to the same interpreter we get the same object.
assert obj is not orig, 'expected: obj is not orig'
"""))
@unittest.skip('broken (see BPO-...)')
def test_send_recv_nowait_different_interpreters(self):
r1, s1 = interpreters.create_channel()
r2, s2 = interpreters.create_channel()
orig1 = b'spam'
s1.send_nowait(orig1)
out = _run_output(
interpreters.create(),
dedent(f"""
obj1 = r.recv_nowait()
assert obj1 == b'spam', 'expected: obj1 == orig1'
# When going to another interpreter we get a copy.
assert id(obj1) != {id(orig1)}, 'expected: obj1 is not orig1'
orig2 = b'eggs'
print(id(orig2))
s.send_nowait(orig2)
"""),
channels=dict(r=r1, s=s2),
)
obj2 = r2.recv_nowait()
self.assertEqual(obj2, b'eggs')
self.assertNotEqual(id(obj2), int(out))
def test_recv_channel_does_not_exist(self):
ch = interpreters.RecvChannel(1_000_000)
with self.assertRaises(interpreters.ChannelNotFoundError):
ch.recv()
def test_send_channel_does_not_exist(self):
ch = interpreters.SendChannel(1_000_000)
with self.assertRaises(interpreters.ChannelNotFoundError):
ch.send(b'spam')
def test_recv_nowait_channel_does_not_exist(self):
ch = interpreters.RecvChannel(1_000_000)
with self.assertRaises(interpreters.ChannelNotFoundError):
ch.recv_nowait()
def test_send_nowait_channel_does_not_exist(self):
ch = interpreters.SendChannel(1_000_000)
with self.assertRaises(interpreters.ChannelNotFoundError):
ch.send_nowait(b'spam')
def test_recv_nowait_empty(self):
ch, _ = interpreters.create_channel()
with self.assertRaises(interpreters.ChannelEmptyError):
ch.recv_nowait()
def test_recv_nowait_default(self):
default = object()
rch, sch = interpreters.create_channel()
obj1 = rch.recv_nowait(default)
sch.send_nowait(None)
sch.send_nowait(1)
sch.send_nowait(b'spam')
sch.send_nowait(b'eggs')
obj2 = rch.recv_nowait(default)
obj3 = rch.recv_nowait(default)
obj4 = rch.recv_nowait()
obj5 = rch.recv_nowait(default)
obj6 = rch.recv_nowait(default)
self.assertIs(obj1, default)
self.assertIs(obj2, None)
self.assertEqual(obj3, 1)
self.assertEqual(obj4, b'spam')
self.assertEqual(obj5, b'eggs')
self.assertIs(obj6, default) | 0.425605 | 0.241093 |
from __future__ import annotations
import zipfile
import tarfile
import typing as T
from pathlib import Path
import tempfile
try:
import zstandard
except ImportError:
zstandard = None # type: ignore
Pathlike = T.Union[str, Path]
def extract_zst(archive: Pathlike, out_path: Pathlike):
"""extract .zst file
works on Windows, Linux, MacOS, etc.
Parameters
----------
archive: pathlib.Path or str
.zst file to extract
out_path: pathlib.Path or str
directory to extract files and directories to
"""
if zstandard is None:
raise ImportError("pip install zstandard")
archive = Path(archive).expanduser().resolve()
out_path = Path(out_path).expanduser().resolve()
# need .resolve() in case intermediate relative dir doesn't exist
dctx = zstandard.ZstdDecompressor()
with tempfile.TemporaryFile(suffix=".tar") as ofh:
with archive.open("rb") as ifh:
dctx.copy_stream(ifh, ofh)
ofh.seek(0)
with tarfile.open(fileobj=ofh) as z:
z.extractall(out_path)
def extract_zip(archive: Pathlike, outpath: Pathlike):
outpath = Path(outpath).expanduser().resolve()
# need .resolve() in case intermediate relative dir doesn't exist
archive = Path(archive).expanduser().resolve()
with zipfile.ZipFile(archive) as z:
z.extractall(outpath)
def extract_tar(archive: Pathlike, outpath: Pathlike):
outpath = Path(outpath).expanduser().resolve()
# need .resolve() in case intermediate relative dir doesn't exist
archive = Path(archive).expanduser().resolve()
if not archive.is_file():
# tarfile gives confusing error on missing file
raise FileNotFoundError(archive)
try:
with tarfile.open(archive) as z:
z.extractall(outpath)
except tarfile.TarError as e:
raise RuntimeError(
f"""failed to extract {archive} with error {e}.
This file may be corrupt or system libz may be broken.
Try deleting {archive} or manually extracting it."""
) | src/gemini3d/archive.py | from __future__ import annotations
import zipfile
import tarfile
import typing as T
from pathlib import Path
import tempfile
try:
import zstandard
except ImportError:
zstandard = None # type: ignore
Pathlike = T.Union[str, Path]
def extract_zst(archive: Pathlike, out_path: Pathlike):
"""extract .zst file
works on Windows, Linux, MacOS, etc.
Parameters
----------
archive: pathlib.Path or str
.zst file to extract
out_path: pathlib.Path or str
directory to extract files and directories to
"""
if zstandard is None:
raise ImportError("pip install zstandard")
archive = Path(archive).expanduser().resolve()
out_path = Path(out_path).expanduser().resolve()
# need .resolve() in case intermediate relative dir doesn't exist
dctx = zstandard.ZstdDecompressor()
with tempfile.TemporaryFile(suffix=".tar") as ofh:
with archive.open("rb") as ifh:
dctx.copy_stream(ifh, ofh)
ofh.seek(0)
with tarfile.open(fileobj=ofh) as z:
z.extractall(out_path)
def extract_zip(archive: Pathlike, outpath: Pathlike):
outpath = Path(outpath).expanduser().resolve()
# need .resolve() in case intermediate relative dir doesn't exist
archive = Path(archive).expanduser().resolve()
with zipfile.ZipFile(archive) as z:
z.extractall(outpath)
def extract_tar(archive: Pathlike, outpath: Pathlike):
outpath = Path(outpath).expanduser().resolve()
# need .resolve() in case intermediate relative dir doesn't exist
archive = Path(archive).expanduser().resolve()
if not archive.is_file():
# tarfile gives confusing error on missing file
raise FileNotFoundError(archive)
try:
with tarfile.open(archive) as z:
z.extractall(outpath)
except tarfile.TarError as e:
raise RuntimeError(
f"""failed to extract {archive} with error {e}.
This file may be corrupt or system libz may be broken.
Try deleting {archive} or manually extracting it."""
) | 0.645008 | 0.235724 |
import os
import json
import requests
import urllib.parse
import concurrent.futures as thread
import urllib3
import getopt
import sys
import re
r = '\033[1;31m'
g = '\033[1;32m'
y = '\033[1;33m'
b = '\033[1;34m'
r_ = '\033[0;31m'
g_ = '\033[0;32m'
y_ = '\033[0;33m'
b_ = '\033[0;34m'
e = '\033[0m'
global _output
_output = []
global k_
k_ = {
'domain': None,
'threads': 1,
'd_list': None,
'proxy': None,
'output': None,
'timeout': None,
'process': False,
'user_agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/76.0.3809.36 Safari/537.36',
'verbose': False,
'dict_len': 0
}
# index/lenght * 100
def PERCENT(x, y): return float(x)/float(y) * 100
services = {
'AWS/S3': {'error': r'The specified bucket does not exist'},
'BitBucket': {'error': r'Repository not found'},
'Github': {'error': r'There isn\\\'t a Github Pages site here\.'},
'Shopify': {'error': r'Sorry\, this shop is currently unavailable\.'},
'Fastly': {'error': r'Fastly error\: unknown domain\:'},
'Ghost': {'error': r'The thing you were looking for is no longer here\, or never was'},
'Heroku': {'error': r'no-such-app.html|<title>no such app</title>|herokucdn.com/error-pages/no-such-app.html'},
'Pantheon': {'error': r'The gods are wise, but do not know of the site which you seek.'},
'Tumbler': {'error': r'Whatever you were looking for doesn\\\'t currently exist at this address.'},
'Wordpress': {'error': r'Do you want to register'},
'TeamWork': {'error': r'Oops - We didn\'t find your site.'},
'Helpjuice': {'error': r'We could not find what you\'re looking for.'},
'Helpscout': {'error': r'No settings were found for this company:'},
'Cargo': {'error': r'<title>404 — File not found</title>'},
'Uservoice': {'error': r'This UserVoice subdomain is currently available!'},
'Surge': {'error': r'project not found'},
'Intercom': {'error': r'This page is reserved for artistic dogs\.|Uh oh\. That page doesn\'t exist</h1>'},
'Webflow': {'error': r'<p class=\"description\">The page you are looking for doesn\'t exist or has been moved.</p>'},
'Kajabi': {'error': r'<h1>The page you were looking for doesn\'t exist.</h1>'},
'Thinkific': {'error': r'You may have mistyped the address or the page may have moved.'},
'Tave': {'error': r'<h1>Error 404: Page Not Found</h1>'},
'Wishpond': {'error': r'<h1>https://www.wishpond.com/404?campaign=true'},
'Aftership': {'error': r'Oops.</h2><p class=\"text-muted text-tight\">The page you\'re looking for doesn\'t exist.'},
'Aha': {'error': r'There is no portal here \.\.\. sending you back to Aha!'},
'Tictail': {'error': r'to target URL: <a href=\"https://tictail.com|Start selling on Tictail.'},
'Brightcove': {'error': r'<p class=\"bc-gallery-error-code\">Error Code: 404</p>'},
'Bigcartel': {'error': r'<h1>Oops! We couldn’t find that page.</h1>'},
'ActiveCampaign': {'error': r'alt=\"LIGHTTPD - fly light.\"'},
'Campaignmonitor': {'error': r'Double check the URL or <a href=\"mailto:<EMAIL>'},
'Acquia': {'error': r'The site you are looking for could not be found.|If you are an Acquia Cloud customer and expect to see your site at this address'},
'Proposify': {'error': r'If you need immediate assistance, please contact <a href=\"mailto:<EMAIL>'},
'Simplebooklet': {'error': r'We can\'t find this <a href=\"https://simplebooklet.com'},
'GetResponse': {'error': r'With GetResponse Landing Pages, lead generation has never been easier'},
'Vend': {'error': r'Looks like you\'ve traveled too far into cyberspace.'},
'Jetbrains': {'error': r'is not a registered InCloud YouTrack.'},
'Smartling': {'error': r'Domain is not configured'},
'Pingdom': {'error': r'pingdom'},
'Tilda': {'error': r'Domain has been assigned'},
'Surveygizmo': {'error': r'data-html-name'},
'Mashery': {'error': r'Unrecognized domain <strong>'},
'Divio': {'error': r'Application not responding'},
'feedpress': {'error': r'The feed has not been found.'},
'readme': {'error': r'Project doesnt exist... yet!'},
'statuspage': {'error': r'You are being <a href=\'https>'},
'zendesk': {'error': r'Help Center Closed'},
'worksites.net': {'error': r'Hello! Sorry, but the webs>'}
}
def plus(string):
print('{0}[ + ]{1} {2}'.format(g, e, string))
def warn(string, exit=not 1):
print('{0}[ ! ]{1} {2}'.format(r, e, string))
if exit:
sys.exit()
def info(string):
print('{0}[ i ]{1} {2}'.format(y, e, string))
def _info():
return '{0}[ i ]{1} '.format(y, e)
def err(string):
print(r' |= [REGEX]: {0}{1}{2}'.format(y_, string, e))
def request(domain, proxy, timeout, user_agent):
url = checkurl(domain)
timeout = timeout
proxies = {
'http': proxy,
'https': proxy
}
redirect = True
headers = {
'User-Agent': user_agent
}
try:
req = requests.packages.urllib3.disable_warnings(
urllib3.exceptions.InsecureRequestWarning
)
req = requests.get(
url=url,
headers=headers,
verify=False,
allow_redirects=redirect,
timeout=int(timeout) if timeout != None else None,
proxies=proxies
)
return req.status_code, req.content
except Exception as err:
if k_.get('d_list'):
print("")
warn('Failed to establish a new connection for: %s' % (domain), 1)
else:
warn('Failed to establish a new connection for: %s' % (domain), 1)
def find(status, content, ok):
for service in services:
for values in services[service].items():
if re.findall(str(values[1]), str(content), re.I) and int(status) in range(201 if ok is False else 200, 599):
return str(service), str(values[1])
def banner():
print("\n /~\\")
print(" C oo ---------------")
print(" _( ^) |T|A|K|E|O|V|E|R|")
print("/ ~\\ ----------------")
print("#> by M'hamed (@m4ll0k) Outaadi")
print("#> http://github.com/m4ll0k")
print("-"*40)
def help(_exit_=False):
banner()
print("Usage: %s [OPTION]\n" % sys.argv[0])
print("\t-d\tSet domain URL (e.g: www.test.com)")
print("\t-t\tSet threads, default 1")
print("\t-l\tScan multiple targets in a text file")
print("\t-p\tUse a proxy to connect the target URL")
print("\t-o\tUse this settings for save a file, args=json or text")
print("\t-T\tSet a request timeout,default value is 20 seconds")
print("\t-k\tProcess 200 http code, cause more false positive")
print("\t-u\tSet custom user agent (e.g: takeover-bot)")
print("\t-v\tVerbose, print more info\n")
if _exit_:
sys.exit()
def checkpath(path):
if os.path.exists(path):
return path
elif os.path.isdir(path):
warn('"%s" is directory!', 1)
elif os.path.exists(path) is False:
warn('"%s" not exists!' % path, 1)
else:
warn('Error in: "%s"' % path, 1)
def readfile(path):
info('Read wordlist.. "%s"' % path)
return [x.strip() for x in open(checkpath(path), 'r')]
def checkurl(url):
o = urllib.parse.urlsplit(url)
if o.scheme not in ['http', 'https', '']:
warn('Scheme "%s" not supported!' % o.scheme, 1)
if o.netloc == '':
return 'http://' + o.path
elif o.netloc:
return o.scheme + '://' + o.netloc
else:
return 'http://' + o.netloc
def print_(string):
sys.stdout.write('\033[1K')
sys.stdout.write('\033[0G')
sys.stdout.write(string)
sys.stdout.flush()
def runner(k):
threadpool = thread.ThreadPoolExecutor(max_workers=k.get('threads'))
if k.get('verbose'):
info('Set %s threads..' % k.get('threads'))
futures = (threadpool.submit(requester, domain, k.get("proxy"), k.get("timeout"), k.get("user_agent"),
k.get("output"), k.get('process'), k.get('verbose')) for domain in k.get("domains"))
for i, results in enumerate(thread.as_completed(futures)):
if k.get('verbose') and k.get('d_list'):
str_ = "{i}{b:.2f}% Domain: {d}".format(
i=_info(),
b=PERCENT(int(i),
int(k.get('dict_len'))), d=k.get('domains')[i]
)
print_(str_)
else:
info('Domain: {}'.format(k.get('domains')[i]))
pass
def requester(domain, proxy, timeout, user_agent, output, ok, v):
code, html = request(domain, proxy, timeout, user_agent)
service, error = find(code, html, ok)
if service and error:
if output:
_output.append((domain, service, error))
if v and not k_.get('d_list'):
plus('%s service found! Potential domain takeover found! - %s' %
(service, domain))
elif v and k_.get('d_list'):
print("")
plus('%s service found! Potential domain takeover found! - %s' %
(service, domain))
else:
if k_.get('d_list'):
print("")
plus('%s service found! Potential domain takeover found! - %s' %
(service, domain))
elif not k_.get('d_list'):
plus('%s service found! Potential domain takeover found! - %s' %
(service, domain))
if v:
err(error)
def savejson(path, content, v):
if v and not k_.get('d_list'):
info('Writing file..')
elif v and k_.get('d_list'):
print("")
info("Writing file..")
a = {}
b = {"domains": {}}
for i in content:
a.update({i[0]: {'service': i[1], 'error': i[2]}})
b['domains'] = a
with open(path, 'w+') as outjsonfile:
json.dump(b, outjsonfile, indent=4)
outjsonfile.close()
info('Saved at '+path+'..')
def savetxt(path, content, v):
if v and not k_.get('d_list'):
info('Writing file..')
elif v and k_.get('d_list'):
print("")
info("Writing file..")
br = '-'*40
bf = '='*40
out = ''+br+'\n'
for i in content:
out += 'Domain\t: %s\n' % i[0]
out += 'Service\t: %s\n' % i[1]
out += 'Error\t: %s\n' % i[2]
out += ''+bf+'\n'
out += ''+br+'\n'
with open(path, 'w+') as outtxtfile:
outtxtfile.write(out)
outtxtfile.close()
info('Saved at '+path+'..')
def main():
# --
if len(sys.argv) < 2:
help(1)
try:
opts, args = getopt.getopt(sys.argv[1:],
'd:l:p:o:t:T::u:kv',
['d=', 'l=', 'p=', 'v', 'o=', 't=', 'T=', 'u=', 'k'])
except Exception as e:
warn(e, 1)
for o, a in opts:
if o == '-d':
k_['domain'] = a
if o == '-t':
k_['threads'] = int(a)
if o == '-l':
k_['d_list'] = a
if o == '-p':
k_['proxy'] = a
if o == '-o':
k_['output'] = a
if o == '-T':
k_['timeout'] = int(a)
if o == '-k':
k_['process'] = True
if o == '-u':
k_['user_agent'] = a
if o == '-v':
k_['verbose'] = True
if k_.get("domain") or k_.get("d_list"):
banner()
domains = []
if k_.get('verbose'):
info('Starting..')
if k_.get("d_list"):
domains.extend(readfile(k_.get("d_list")))
else:
domains.append(k_.get("domain"))
k_['domains'] = domains
k_['dict_len'] = len(domains)
runner(k_)
if k_.get("output"):
if '.txt' in k_.get('output'):
savetxt(k_.get('output'), _output, k_.get('verbose'))
elif '.json' in k_.get('output'):
savejson(k_.get('output'), _output, k_.get('verbose'))
else:
warn('Output Error: %s extension not supported, only .txt or .json' % k_.get(
'output').split('.')[1], 1)
elif k_.get('domain') is None and k_.get('d_list') is None:
help(1)
if __name__ == '__main__':
try:
main()
except (KeyboardInterrupt) as e:
sys.exit(0) | takeover.py |
import os
import json
import requests
import urllib.parse
import concurrent.futures as thread
import urllib3
import getopt
import sys
import re
r = '\033[1;31m'
g = '\033[1;32m'
y = '\033[1;33m'
b = '\033[1;34m'
r_ = '\033[0;31m'
g_ = '\033[0;32m'
y_ = '\033[0;33m'
b_ = '\033[0;34m'
e = '\033[0m'
global _output
_output = []
global k_
k_ = {
'domain': None,
'threads': 1,
'd_list': None,
'proxy': None,
'output': None,
'timeout': None,
'process': False,
'user_agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/76.0.3809.36 Safari/537.36',
'verbose': False,
'dict_len': 0
}
# index/lenght * 100
def PERCENT(x, y): return float(x)/float(y) * 100
services = {
'AWS/S3': {'error': r'The specified bucket does not exist'},
'BitBucket': {'error': r'Repository not found'},
'Github': {'error': r'There isn\\\'t a Github Pages site here\.'},
'Shopify': {'error': r'Sorry\, this shop is currently unavailable\.'},
'Fastly': {'error': r'Fastly error\: unknown domain\:'},
'Ghost': {'error': r'The thing you were looking for is no longer here\, or never was'},
'Heroku': {'error': r'no-such-app.html|<title>no such app</title>|herokucdn.com/error-pages/no-such-app.html'},
'Pantheon': {'error': r'The gods are wise, but do not know of the site which you seek.'},
'Tumbler': {'error': r'Whatever you were looking for doesn\\\'t currently exist at this address.'},
'Wordpress': {'error': r'Do you want to register'},
'TeamWork': {'error': r'Oops - We didn\'t find your site.'},
'Helpjuice': {'error': r'We could not find what you\'re looking for.'},
'Helpscout': {'error': r'No settings were found for this company:'},
'Cargo': {'error': r'<title>404 — File not found</title>'},
'Uservoice': {'error': r'This UserVoice subdomain is currently available!'},
'Surge': {'error': r'project not found'},
'Intercom': {'error': r'This page is reserved for artistic dogs\.|Uh oh\. That page doesn\'t exist</h1>'},
'Webflow': {'error': r'<p class=\"description\">The page you are looking for doesn\'t exist or has been moved.</p>'},
'Kajabi': {'error': r'<h1>The page you were looking for doesn\'t exist.</h1>'},
'Thinkific': {'error': r'You may have mistyped the address or the page may have moved.'},
'Tave': {'error': r'<h1>Error 404: Page Not Found</h1>'},
'Wishpond': {'error': r'<h1>https://www.wishpond.com/404?campaign=true'},
'Aftership': {'error': r'Oops.</h2><p class=\"text-muted text-tight\">The page you\'re looking for doesn\'t exist.'},
'Aha': {'error': r'There is no portal here \.\.\. sending you back to Aha!'},
'Tictail': {'error': r'to target URL: <a href=\"https://tictail.com|Start selling on Tictail.'},
'Brightcove': {'error': r'<p class=\"bc-gallery-error-code\">Error Code: 404</p>'},
'Bigcartel': {'error': r'<h1>Oops! We couldn’t find that page.</h1>'},
'ActiveCampaign': {'error': r'alt=\"LIGHTTPD - fly light.\"'},
'Campaignmonitor': {'error': r'Double check the URL or <a href=\"mailto:<EMAIL>'},
'Acquia': {'error': r'The site you are looking for could not be found.|If you are an Acquia Cloud customer and expect to see your site at this address'},
'Proposify': {'error': r'If you need immediate assistance, please contact <a href=\"mailto:<EMAIL>'},
'Simplebooklet': {'error': r'We can\'t find this <a href=\"https://simplebooklet.com'},
'GetResponse': {'error': r'With GetResponse Landing Pages, lead generation has never been easier'},
'Vend': {'error': r'Looks like you\'ve traveled too far into cyberspace.'},
'Jetbrains': {'error': r'is not a registered InCloud YouTrack.'},
'Smartling': {'error': r'Domain is not configured'},
'Pingdom': {'error': r'pingdom'},
'Tilda': {'error': r'Domain has been assigned'},
'Surveygizmo': {'error': r'data-html-name'},
'Mashery': {'error': r'Unrecognized domain <strong>'},
'Divio': {'error': r'Application not responding'},
'feedpress': {'error': r'The feed has not been found.'},
'readme': {'error': r'Project doesnt exist... yet!'},
'statuspage': {'error': r'You are being <a href=\'https>'},
'zendesk': {'error': r'Help Center Closed'},
'worksites.net': {'error': r'Hello! Sorry, but the webs>'}
}
def plus(string):
print('{0}[ + ]{1} {2}'.format(g, e, string))
def warn(string, exit=not 1):
print('{0}[ ! ]{1} {2}'.format(r, e, string))
if exit:
sys.exit()
def info(string):
print('{0}[ i ]{1} {2}'.format(y, e, string))
def _info():
return '{0}[ i ]{1} '.format(y, e)
def err(string):
print(r' |= [REGEX]: {0}{1}{2}'.format(y_, string, e))
def request(domain, proxy, timeout, user_agent):
url = checkurl(domain)
timeout = timeout
proxies = {
'http': proxy,
'https': proxy
}
redirect = True
headers = {
'User-Agent': user_agent
}
try:
req = requests.packages.urllib3.disable_warnings(
urllib3.exceptions.InsecureRequestWarning
)
req = requests.get(
url=url,
headers=headers,
verify=False,
allow_redirects=redirect,
timeout=int(timeout) if timeout != None else None,
proxies=proxies
)
return req.status_code, req.content
except Exception as err:
if k_.get('d_list'):
print("")
warn('Failed to establish a new connection for: %s' % (domain), 1)
else:
warn('Failed to establish a new connection for: %s' % (domain), 1)
def find(status, content, ok):
for service in services:
for values in services[service].items():
if re.findall(str(values[1]), str(content), re.I) and int(status) in range(201 if ok is False else 200, 599):
return str(service), str(values[1])
def banner():
print("\n /~\\")
print(" C oo ---------------")
print(" _( ^) |T|A|K|E|O|V|E|R|")
print("/ ~\\ ----------------")
print("#> by M'hamed (@m4ll0k) Outaadi")
print("#> http://github.com/m4ll0k")
print("-"*40)
def help(_exit_=False):
banner()
print("Usage: %s [OPTION]\n" % sys.argv[0])
print("\t-d\tSet domain URL (e.g: www.test.com)")
print("\t-t\tSet threads, default 1")
print("\t-l\tScan multiple targets in a text file")
print("\t-p\tUse a proxy to connect the target URL")
print("\t-o\tUse this settings for save a file, args=json or text")
print("\t-T\tSet a request timeout,default value is 20 seconds")
print("\t-k\tProcess 200 http code, cause more false positive")
print("\t-u\tSet custom user agent (e.g: takeover-bot)")
print("\t-v\tVerbose, print more info\n")
if _exit_:
sys.exit()
def checkpath(path):
if os.path.exists(path):
return path
elif os.path.isdir(path):
warn('"%s" is directory!', 1)
elif os.path.exists(path) is False:
warn('"%s" not exists!' % path, 1)
else:
warn('Error in: "%s"' % path, 1)
def readfile(path):
info('Read wordlist.. "%s"' % path)
return [x.strip() for x in open(checkpath(path), 'r')]
def checkurl(url):
o = urllib.parse.urlsplit(url)
if o.scheme not in ['http', 'https', '']:
warn('Scheme "%s" not supported!' % o.scheme, 1)
if o.netloc == '':
return 'http://' + o.path
elif o.netloc:
return o.scheme + '://' + o.netloc
else:
return 'http://' + o.netloc
def print_(string):
sys.stdout.write('\033[1K')
sys.stdout.write('\033[0G')
sys.stdout.write(string)
sys.stdout.flush()
def runner(k):
threadpool = thread.ThreadPoolExecutor(max_workers=k.get('threads'))
if k.get('verbose'):
info('Set %s threads..' % k.get('threads'))
futures = (threadpool.submit(requester, domain, k.get("proxy"), k.get("timeout"), k.get("user_agent"),
k.get("output"), k.get('process'), k.get('verbose')) for domain in k.get("domains"))
for i, results in enumerate(thread.as_completed(futures)):
if k.get('verbose') and k.get('d_list'):
str_ = "{i}{b:.2f}% Domain: {d}".format(
i=_info(),
b=PERCENT(int(i),
int(k.get('dict_len'))), d=k.get('domains')[i]
)
print_(str_)
else:
info('Domain: {}'.format(k.get('domains')[i]))
pass
def requester(domain, proxy, timeout, user_agent, output, ok, v):
code, html = request(domain, proxy, timeout, user_agent)
service, error = find(code, html, ok)
if service and error:
if output:
_output.append((domain, service, error))
if v and not k_.get('d_list'):
plus('%s service found! Potential domain takeover found! - %s' %
(service, domain))
elif v and k_.get('d_list'):
print("")
plus('%s service found! Potential domain takeover found! - %s' %
(service, domain))
else:
if k_.get('d_list'):
print("")
plus('%s service found! Potential domain takeover found! - %s' %
(service, domain))
elif not k_.get('d_list'):
plus('%s service found! Potential domain takeover found! - %s' %
(service, domain))
if v:
err(error)
def savejson(path, content, v):
if v and not k_.get('d_list'):
info('Writing file..')
elif v and k_.get('d_list'):
print("")
info("Writing file..")
a = {}
b = {"domains": {}}
for i in content:
a.update({i[0]: {'service': i[1], 'error': i[2]}})
b['domains'] = a
with open(path, 'w+') as outjsonfile:
json.dump(b, outjsonfile, indent=4)
outjsonfile.close()
info('Saved at '+path+'..')
def savetxt(path, content, v):
if v and not k_.get('d_list'):
info('Writing file..')
elif v and k_.get('d_list'):
print("")
info("Writing file..")
br = '-'*40
bf = '='*40
out = ''+br+'\n'
for i in content:
out += 'Domain\t: %s\n' % i[0]
out += 'Service\t: %s\n' % i[1]
out += 'Error\t: %s\n' % i[2]
out += ''+bf+'\n'
out += ''+br+'\n'
with open(path, 'w+') as outtxtfile:
outtxtfile.write(out)
outtxtfile.close()
info('Saved at '+path+'..')
def main():
# --
if len(sys.argv) < 2:
help(1)
try:
opts, args = getopt.getopt(sys.argv[1:],
'd:l:p:o:t:T::u:kv',
['d=', 'l=', 'p=', 'v', 'o=', 't=', 'T=', 'u=', 'k'])
except Exception as e:
warn(e, 1)
for o, a in opts:
if o == '-d':
k_['domain'] = a
if o == '-t':
k_['threads'] = int(a)
if o == '-l':
k_['d_list'] = a
if o == '-p':
k_['proxy'] = a
if o == '-o':
k_['output'] = a
if o == '-T':
k_['timeout'] = int(a)
if o == '-k':
k_['process'] = True
if o == '-u':
k_['user_agent'] = a
if o == '-v':
k_['verbose'] = True
if k_.get("domain") or k_.get("d_list"):
banner()
domains = []
if k_.get('verbose'):
info('Starting..')
if k_.get("d_list"):
domains.extend(readfile(k_.get("d_list")))
else:
domains.append(k_.get("domain"))
k_['domains'] = domains
k_['dict_len'] = len(domains)
runner(k_)
if k_.get("output"):
if '.txt' in k_.get('output'):
savetxt(k_.get('output'), _output, k_.get('verbose'))
elif '.json' in k_.get('output'):
savejson(k_.get('output'), _output, k_.get('verbose'))
else:
warn('Output Error: %s extension not supported, only .txt or .json' % k_.get(
'output').split('.')[1], 1)
elif k_.get('domain') is None and k_.get('d_list') is None:
help(1)
if __name__ == '__main__':
try:
main()
except (KeyboardInterrupt) as e:
sys.exit(0) | 0.233969 | 0.102394 |
import os
import unittest
from copy import deepcopy
from typing import Dict
import pytorch_lightning as pl # type: ignore
import torch
from d2go.config import CfgNode, temp_defrost
from d2go.runner import create_runner
from d2go.runner.callbacks.quantization import (
QuantizationAwareTraining,
)
from d2go.runner.lightning_task import GeneralizedRCNNTask
from d2go.utils.testing import meta_arch_helper as mah
from d2go.utils.testing.helper import tempdir
from detectron2.modeling import META_ARCH_REGISTRY
from detectron2.utils.events import EventStorage
from pytorch_lightning.callbacks.model_checkpoint import ModelCheckpoint
from torch import Tensor
from torch.quantization.quantize_fx import prepare_qat_fx, convert_fx
class TestLightningTask(unittest.TestCase):
def _get_cfg(self, tmp_dir: str) -> CfgNode:
cfg = mah.create_detection_cfg(GeneralizedRCNNTask, tmp_dir)
cfg.TEST.EVAL_PERIOD = cfg.SOLVER.MAX_ITER
return cfg
def _get_trainer(self, output_dir: str) -> pl.Trainer:
checkpoint_callback = ModelCheckpoint(dirpath=output_dir, save_last=True)
return pl.Trainer(
max_steps=1,
limit_train_batches=1,
num_sanity_val_steps=0,
callbacks=[checkpoint_callback],
logger=None,
)
def _compare_state_dict(
self, state1: Dict[str, Tensor], state2: Dict[str, Tensor]
) -> bool:
if state1.keys() != state2.keys():
return False
for k in state1:
if not torch.allclose(state1[k], state2[k]):
return False
return True
@tempdir
def test_load_from_checkpoint(self, tmp_dir) -> None:
task = GeneralizedRCNNTask(self._get_cfg(tmp_dir))
trainer = self._get_trainer(tmp_dir)
with EventStorage() as storage:
task.storage = storage
trainer.fit(task)
ckpt_path = os.path.join(tmp_dir, "test.ckpt")
trainer.save_checkpoint(ckpt_path)
self.assertTrue(os.path.exists(ckpt_path))
# load model weights from checkpoint
task2 = GeneralizedRCNNTask.load_from_checkpoint(ckpt_path)
self.assertTrue(
self._compare_state_dict(
task.model.state_dict(), task2.model.state_dict()
)
)
@tempdir
def test_train_ema(self, tmp_dir):
cfg = self._get_cfg(tmp_dir)
cfg.MODEL_EMA.ENABLED = True
cfg.MODEL_EMA.DECAY = 0.7
task = GeneralizedRCNNTask(cfg)
init_state = deepcopy(task.model.state_dict())
trainer = self._get_trainer(tmp_dir)
with EventStorage() as storage:
task.storage = storage
trainer.fit(task)
for k, v in task.model.state_dict().items():
init_state[k].copy_(init_state[k] * 0.7 + 0.3 * v)
self.assertTrue(
self._compare_state_dict(init_state, task.ema_state.state_dict())
)
@tempdir
def test_load_ema_weights(self, tmp_dir):
cfg = self._get_cfg(tmp_dir)
cfg.MODEL_EMA.ENABLED = True
task = GeneralizedRCNNTask(cfg)
trainer = self._get_trainer(tmp_dir)
with EventStorage() as storage:
task.storage = storage
trainer.fit(task)
# load EMA weights from checkpoint
task2 = GeneralizedRCNNTask.load_from_checkpoint(
os.path.join(tmp_dir, "last.ckpt")
)
self.assertTrue(
self._compare_state_dict(
task.ema_state.state_dict(), task2.ema_state.state_dict()
)
)
# apply EMA weights to model
task2.ema_state.apply_to(task2.model)
self.assertTrue(
self._compare_state_dict(
task.ema_state.state_dict(), task2.model.state_dict()
)
)
def test_create_runner(self):
task_cls = create_runner(
f"{GeneralizedRCNNTask.__module__}.{GeneralizedRCNNTask.__name__}"
)
self.assertTrue(task_cls == GeneralizedRCNNTask)
@tempdir
def test_build_model(self, tmp_dir):
cfg = self._get_cfg(tmp_dir)
cfg.MODEL_EMA.ENABLED = True
task = GeneralizedRCNNTask(cfg)
trainer = self._get_trainer(tmp_dir)
with EventStorage() as storage:
task.storage = storage
trainer.fit(task)
# test building untrained model
model = GeneralizedRCNNTask.build_model(cfg)
self.assertTrue(model.training)
# test loading regular weights
with temp_defrost(cfg):
cfg.MODEL.WEIGHTS = os.path.join(tmp_dir, "last.ckpt")
model = GeneralizedRCNNTask.build_model(cfg, eval_only=True)
self.assertFalse(model.training)
self.assertTrue(
self._compare_state_dict(model.state_dict(), task.model.state_dict())
)
# test loading EMA weights
with temp_defrost(cfg):
cfg.MODEL.WEIGHTS = os.path.join(tmp_dir, "last.ckpt")
cfg.MODEL_EMA.USE_EMA_WEIGHTS_FOR_EVAL_ONLY = True
model = GeneralizedRCNNTask.build_model(cfg, eval_only=True)
self.assertFalse(model.training)
self.assertTrue(
self._compare_state_dict(
model.state_dict(), task.ema_state.state_dict()
)
)
@tempdir
def test_qat(self, tmp_dir):
@META_ARCH_REGISTRY.register()
class QuantizableDetMetaArchForTest(mah.DetMetaArchForTest):
custom_config_dict = {"preserved_attributes": ["preserved_attr"]}
def __init__(self, cfg):
super().__init__(cfg)
self.avgpool.preserved_attr = "foo"
self.avgpool.not_preserved_attr = "bar"
def prepare_for_quant(self, cfg):
self.avgpool = prepare_qat_fx(
self.avgpool,
{"": torch.quantization.get_default_qat_qconfig()},
self.custom_config_dict,
)
return self
def prepare_for_quant_convert(self, cfg):
self.avgpool = convert_fx(
self.avgpool, convert_custom_config_dict=self.custom_config_dict
)
return self
cfg = self._get_cfg(tmp_dir)
cfg.MODEL.META_ARCHITECTURE = "QuantizableDetMetaArchForTest"
cfg.QUANTIZATION.QAT.ENABLED = True
task = GeneralizedRCNNTask(cfg)
callbacks = [
QuantizationAwareTraining.from_config(cfg),
ModelCheckpoint(dirpath=task.cfg.OUTPUT_DIR, save_last=True),
]
trainer = pl.Trainer(
max_steps=1,
limit_train_batches=1,
num_sanity_val_steps=0,
callbacks=callbacks,
logger=None,
)
with EventStorage() as storage:
task.storage = storage
trainer.fit(task)
prepared_avgpool = task._prepared.model.avgpool
self.assertEqual(prepared_avgpool.preserved_attr, "foo")
self.assertFalse(hasattr(prepared_avgpool, "not_preserved_attr"))
with temp_defrost(cfg):
cfg.MODEL.WEIGHTS = os.path.join(tmp_dir, "last.ckpt")
model = GeneralizedRCNNTask.build_model(cfg, eval_only=True)
self.assertTrue(isinstance(model.avgpool, torch.fx.GraphModule)) | tests/runner/test_runner_lightning_task.py |
import os
import unittest
from copy import deepcopy
from typing import Dict
import pytorch_lightning as pl # type: ignore
import torch
from d2go.config import CfgNode, temp_defrost
from d2go.runner import create_runner
from d2go.runner.callbacks.quantization import (
QuantizationAwareTraining,
)
from d2go.runner.lightning_task import GeneralizedRCNNTask
from d2go.utils.testing import meta_arch_helper as mah
from d2go.utils.testing.helper import tempdir
from detectron2.modeling import META_ARCH_REGISTRY
from detectron2.utils.events import EventStorage
from pytorch_lightning.callbacks.model_checkpoint import ModelCheckpoint
from torch import Tensor
from torch.quantization.quantize_fx import prepare_qat_fx, convert_fx
class TestLightningTask(unittest.TestCase):
def _get_cfg(self, tmp_dir: str) -> CfgNode:
cfg = mah.create_detection_cfg(GeneralizedRCNNTask, tmp_dir)
cfg.TEST.EVAL_PERIOD = cfg.SOLVER.MAX_ITER
return cfg
def _get_trainer(self, output_dir: str) -> pl.Trainer:
checkpoint_callback = ModelCheckpoint(dirpath=output_dir, save_last=True)
return pl.Trainer(
max_steps=1,
limit_train_batches=1,
num_sanity_val_steps=0,
callbacks=[checkpoint_callback],
logger=None,
)
def _compare_state_dict(
self, state1: Dict[str, Tensor], state2: Dict[str, Tensor]
) -> bool:
if state1.keys() != state2.keys():
return False
for k in state1:
if not torch.allclose(state1[k], state2[k]):
return False
return True
@tempdir
def test_load_from_checkpoint(self, tmp_dir) -> None:
task = GeneralizedRCNNTask(self._get_cfg(tmp_dir))
trainer = self._get_trainer(tmp_dir)
with EventStorage() as storage:
task.storage = storage
trainer.fit(task)
ckpt_path = os.path.join(tmp_dir, "test.ckpt")
trainer.save_checkpoint(ckpt_path)
self.assertTrue(os.path.exists(ckpt_path))
# load model weights from checkpoint
task2 = GeneralizedRCNNTask.load_from_checkpoint(ckpt_path)
self.assertTrue(
self._compare_state_dict(
task.model.state_dict(), task2.model.state_dict()
)
)
@tempdir
def test_train_ema(self, tmp_dir):
cfg = self._get_cfg(tmp_dir)
cfg.MODEL_EMA.ENABLED = True
cfg.MODEL_EMA.DECAY = 0.7
task = GeneralizedRCNNTask(cfg)
init_state = deepcopy(task.model.state_dict())
trainer = self._get_trainer(tmp_dir)
with EventStorage() as storage:
task.storage = storage
trainer.fit(task)
for k, v in task.model.state_dict().items():
init_state[k].copy_(init_state[k] * 0.7 + 0.3 * v)
self.assertTrue(
self._compare_state_dict(init_state, task.ema_state.state_dict())
)
@tempdir
def test_load_ema_weights(self, tmp_dir):
cfg = self._get_cfg(tmp_dir)
cfg.MODEL_EMA.ENABLED = True
task = GeneralizedRCNNTask(cfg)
trainer = self._get_trainer(tmp_dir)
with EventStorage() as storage:
task.storage = storage
trainer.fit(task)
# load EMA weights from checkpoint
task2 = GeneralizedRCNNTask.load_from_checkpoint(
os.path.join(tmp_dir, "last.ckpt")
)
self.assertTrue(
self._compare_state_dict(
task.ema_state.state_dict(), task2.ema_state.state_dict()
)
)
# apply EMA weights to model
task2.ema_state.apply_to(task2.model)
self.assertTrue(
self._compare_state_dict(
task.ema_state.state_dict(), task2.model.state_dict()
)
)
def test_create_runner(self):
task_cls = create_runner(
f"{GeneralizedRCNNTask.__module__}.{GeneralizedRCNNTask.__name__}"
)
self.assertTrue(task_cls == GeneralizedRCNNTask)
@tempdir
def test_build_model(self, tmp_dir):
cfg = self._get_cfg(tmp_dir)
cfg.MODEL_EMA.ENABLED = True
task = GeneralizedRCNNTask(cfg)
trainer = self._get_trainer(tmp_dir)
with EventStorage() as storage:
task.storage = storage
trainer.fit(task)
# test building untrained model
model = GeneralizedRCNNTask.build_model(cfg)
self.assertTrue(model.training)
# test loading regular weights
with temp_defrost(cfg):
cfg.MODEL.WEIGHTS = os.path.join(tmp_dir, "last.ckpt")
model = GeneralizedRCNNTask.build_model(cfg, eval_only=True)
self.assertFalse(model.training)
self.assertTrue(
self._compare_state_dict(model.state_dict(), task.model.state_dict())
)
# test loading EMA weights
with temp_defrost(cfg):
cfg.MODEL.WEIGHTS = os.path.join(tmp_dir, "last.ckpt")
cfg.MODEL_EMA.USE_EMA_WEIGHTS_FOR_EVAL_ONLY = True
model = GeneralizedRCNNTask.build_model(cfg, eval_only=True)
self.assertFalse(model.training)
self.assertTrue(
self._compare_state_dict(
model.state_dict(), task.ema_state.state_dict()
)
)
@tempdir
def test_qat(self, tmp_dir):
@META_ARCH_REGISTRY.register()
class QuantizableDetMetaArchForTest(mah.DetMetaArchForTest):
custom_config_dict = {"preserved_attributes": ["preserved_attr"]}
def __init__(self, cfg):
super().__init__(cfg)
self.avgpool.preserved_attr = "foo"
self.avgpool.not_preserved_attr = "bar"
def prepare_for_quant(self, cfg):
self.avgpool = prepare_qat_fx(
self.avgpool,
{"": torch.quantization.get_default_qat_qconfig()},
self.custom_config_dict,
)
return self
def prepare_for_quant_convert(self, cfg):
self.avgpool = convert_fx(
self.avgpool, convert_custom_config_dict=self.custom_config_dict
)
return self
cfg = self._get_cfg(tmp_dir)
cfg.MODEL.META_ARCHITECTURE = "QuantizableDetMetaArchForTest"
cfg.QUANTIZATION.QAT.ENABLED = True
task = GeneralizedRCNNTask(cfg)
callbacks = [
QuantizationAwareTraining.from_config(cfg),
ModelCheckpoint(dirpath=task.cfg.OUTPUT_DIR, save_last=True),
]
trainer = pl.Trainer(
max_steps=1,
limit_train_batches=1,
num_sanity_val_steps=0,
callbacks=callbacks,
logger=None,
)
with EventStorage() as storage:
task.storage = storage
trainer.fit(task)
prepared_avgpool = task._prepared.model.avgpool
self.assertEqual(prepared_avgpool.preserved_attr, "foo")
self.assertFalse(hasattr(prepared_avgpool, "not_preserved_attr"))
with temp_defrost(cfg):
cfg.MODEL.WEIGHTS = os.path.join(tmp_dir, "last.ckpt")
model = GeneralizedRCNNTask.build_model(cfg, eval_only=True)
self.assertTrue(isinstance(model.avgpool, torch.fx.GraphModule)) | 0.698535 | 0.392395 |
from rest_framework import status
from mayan.apps.documents.permissions import permission_document_type_view
from mayan.apps.documents.tests.mixins import DocumentTestMixin
from mayan.apps.rest_api.tests.base import BaseAPITestCase
from ..models import Workflow
from ..permissions import (
permission_workflow_create, permission_workflow_delete,
permission_workflow_edit, permission_workflow_transition,
permission_workflow_view
)
from .literals import (
TEST_WORKFLOW_LABEL, TEST_WORKFLOW_LABEL_EDITED,
TEST_WORKFLOW_STATE_LABEL, TEST_WORKFLOW_STATE_LABEL_EDITED,
TEST_WORKFLOW_TRANSITION_LABEL, TEST_WORKFLOW_TRANSITION_LABEL_EDITED
)
from .mixins import (
DocumentWorkflowAPIViewTestMixin, WorkflowAPIViewTestMixin,
WorkflowStateAPIViewTestMixin, WorkflowTestMixin,
WorkflowTransitionAPIViewTestMixin,
WorkflowTransitionFieldAPIViewTestMixin, WorkflowTransitionFieldTestMixin
)
class DocumentWorkflowsAPIViewTestCase(
DocumentWorkflowAPIViewTestMixin, DocumentTestMixin, WorkflowTestMixin,
BaseAPITestCase
):
auto_upload_test_document = False
def test_workflow_instance_detail_view_no_permission(self):
self._create_test_workflow(add_document_type=True)
self._create_test_workflow_states()
self._create_test_workflow_transition()
self._create_test_document_stub()
response = self._request_test_workflow_instance_detail_api_view()
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertFalse('workflow' in response.data)
def test_workflow_instance_detail_view_with_workflow_access(self):
self._create_test_workflow(add_document_type=True)
self._create_test_workflow_states()
self._create_test_workflow_transition()
self._create_test_document_stub()
self.grant_access(
obj=self.test_workflow, permission=permission_workflow_view
)
response = self._request_test_workflow_instance_detail_api_view()
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertFalse('workflow' in response.data)
def test_workflow_instance_detail_view_with_document_access(self):
self._create_test_workflow(add_document_type=True)
self._create_test_workflow_states()
self._create_test_workflow_transition()
self._create_test_document_stub()
self.grant_access(
obj=self.test_document, permission=permission_workflow_view
)
response = self._request_test_workflow_instance_detail_api_view()
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
self.assertFalse('workflow' in response.data)
def test_workflow_instance_detail_view_with_full_access(self):
self._create_test_workflow(add_document_type=True)
self._create_test_workflow_states()
self._create_test_workflow_transition()
self._create_test_document_stub()
self.grant_access(
obj=self.test_workflow, permission=permission_workflow_view
)
self.grant_access(
obj=self.test_document, permission=permission_workflow_view
)
response = self._request_test_workflow_instance_detail_api_view()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(
response.data['workflow']['label'],
TEST_WORKFLOW_LABEL
)
def test_workflow_instance_list_view_no_permission(self):
self._create_test_workflow(add_document_type=True)
self._create_test_workflow_states()
self._create_test_workflow_transition()
self._create_test_document_stub()
response = self._request_test_workflow_instance_list_api_view()
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertFalse('result' in response.data)
def test_workflow_instance_list_view_with_document_access(self):
self._create_test_workflow(add_document_type=True)
self._create_test_workflow_states()
self._create_test_workflow_transition()
self._create_test_document_stub()
self.grant_access(
obj=self.test_document, permission=permission_workflow_view
)
response = self._request_test_workflow_instance_list_api_view()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['count'], 0)
def test_workflow_instance_list_view_with_workflow_access(self):
self._create_test_workflow(add_document_type=True)
self._create_test_workflow_states()
self._create_test_workflow_transition()
self._create_test_document_stub()
self.grant_access(
obj=self.test_workflow, permission=permission_workflow_view
)
response = self._request_test_workflow_instance_list_api_view()
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertFalse('result' in response.data)
def test_workflow_instance_list_view_with_full_access(self):
self._create_test_workflow(add_document_type=True)
self._create_test_workflow_states()
self._create_test_workflow_transition()
self._create_test_document_stub()
self.grant_access(
obj=self.test_workflow, permission=permission_workflow_view
)
self.grant_access(
obj=self.test_document, permission=permission_workflow_view
)
response = self._request_test_workflow_instance_list_api_view()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(
response.data['results'][0]['workflow']['label'],
TEST_WORKFLOW_LABEL
)
def test_workflow_instance_log_entries_create_view_no_permission(self):
self._create_test_workflow(add_document_type=True)
self._create_test_workflow_states()
self._create_test_workflow_transition()
self._create_test_document_stub()
workflow_instance = self.test_document.workflows.first()
response = self._request_test_workflow_instance_log_entry_create_api_view(
workflow_instance=workflow_instance
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
# We get bad request because we try to create a transition for which
# we don't have permission and therefore is not valid for this
# workflow instance current state
self.assertEqual(workflow_instance.log_entries.count(), 0)
def test_workflow_instance_log_entries_create_view_with_workflow_access(self):
self._create_test_workflow(add_document_type=True)
self._create_test_workflow_states()
self._create_test_workflow_transition()
self._create_test_document_stub()
self.grant_access(
obj=self.test_workflow, permission=permission_workflow_transition
)
workflow_instance = self.test_document.workflows.first()
response = self._request_test_workflow_instance_log_entry_create_api_view(
workflow_instance=workflow_instance
)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
workflow_instance.refresh_from_db()
self.assertEqual(
workflow_instance.log_entries.first().transition.label,
TEST_WORKFLOW_TRANSITION_LABEL
)
def test_workflow_instance_log_entries_list_view_no_permission(self):
self._create_test_workflow(add_document_type=True)
self._create_test_workflow_states()
self._create_test_workflow_transition()
self._create_test_document_stub()
self._create_test_workflow_instance_log_entry()
response = self._request_test_workflow_instance_log_entry_list_api_view()
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertFalse('results' in response.data)
def test_workflow_instance_log_entries_list_view_with_document_access(self):
self._create_test_workflow(add_document_type=True)
self._create_test_workflow_states()
self._create_test_workflow_transition()
self._create_test_document_stub()
self._create_test_workflow_instance_log_entry()
self.grant_access(
obj=self.test_document, permission=permission_workflow_view
)
response = self._request_test_workflow_instance_log_entry_list_api_view()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(
response.data['results'][0]['transition']['label'],
TEST_WORKFLOW_TRANSITION_LABEL
)
class WorkflowAPIViewTestCase(
WorkflowAPIViewTestMixin, DocumentTestMixin, WorkflowTestMixin,
BaseAPITestCase
):
auto_upload_test_document = False
def test_workflow_create_view_no_permission(self):
response = self._request_test_workflow_create_api_view()
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertEqual(Workflow.objects.count(), 0)
def test_workflow_create_view_with_permission(self):
self.grant_permission(permission=permission_workflow_create)
response = self._request_test_workflow_create_api_view()
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(
response.data['label'], TEST_WORKFLOW_LABEL
)
self.assertEqual(Workflow.objects.count(), 1)
def test_workflow_create_with_document_type_view_no_permission(self):
response = self._request_test_workflow_create_api_view(
extra_data={
'document_types_pk_list': '{}'.format(
self.test_document_type.pk
)
}
)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertEqual(Workflow.objects.count(), 0)
def test_workflow_create_with_document_type_view_with_permission(self):
self.grant_permission(permission=permission_workflow_create)
response = self._request_test_workflow_create_api_view(
extra_data={
'document_types_pk_list': '{}'.format(
self.test_document_type.pk
)
}
)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(Workflow.objects.count(), 1)
workflow = Workflow.objects.first()
self.assertQuerysetEqual(
workflow.document_types.all(), (repr(self.test_document_type),)
)
self.assertEqual(response.data['id'], workflow.pk)
def test_workflow_delete_view_no_permission(self):
self._create_test_workflow()
response = self._request_test_workflow_delete_api_view()
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
self.assertEqual(Workflow.objects.count(), 1)
def test_workflow_delete_view_with_permission(self):
self._create_test_workflow()
self.grant_access(
obj=self.test_workflow, permission=permission_workflow_delete
)
response = self._request_test_workflow_delete_api_view()
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
self.assertEqual(Workflow.objects.count(), 0)
def test_workflow_detail_view_no_permission(self):
self._create_test_workflow()
response = self._request_test_workflow_detail_api_view()
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
self.assertFalse('label' in response.data)
def test_workflow_detail_view_with_access(self):
self._create_test_workflow()
self.grant_access(
obj=self.test_workflow, permission=permission_workflow_view
)
response = self._request_test_workflow_detail_api_view()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['label'], self.test_workflow.label)
def test_workflow_document_type_create_view_no_permission(self):
self._create_test_workflow(add_document_type=False)
response = self._request_test_workflow_document_type_list_create_api_view()
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertEqual(self.test_workflow.document_types.count(), 0)
def test_workflow_document_type_create_view_with_access(self):
self._create_test_workflow(add_document_type=False)
self.grant_access(
permission=permission_workflow_edit, obj=self.test_workflow
)
response = self._request_test_workflow_document_type_list_create_api_view()
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertQuerysetEqual(
self.test_workflow.document_types.all(),
(repr(self.test_document_type),)
)
def test_workflow_document_type_delete_view_no_permission(self):
self._create_test_workflow(add_document_type=True)
response = self._request_test_workflow_document_type_delete_api_view()
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.test_workflow.refresh_from_db()
self.assertEqual(self.test_workflow.document_types.count(), 1)
def test_workflow_document_type_delete_view_with_access(self):
self._create_test_workflow(add_document_type=True)
self.grant_access(
permission=permission_workflow_edit, obj=self.test_workflow
)
response = self._request_test_workflow_document_type_delete_api_view()
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
self.test_workflow.refresh_from_db()
self.assertEqual(self.test_workflow.document_types.count(), 0)
def test_workflow_document_type_detail_view_no_permission(self):
self._create_test_workflow(add_document_type=True)
response = self._request_test_workflow_document_type_detail_api_view()
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertFalse('label' in response.data)
def test_workflow_document_type_detail_view_with_workflow_access(self):
self._create_test_workflow(add_document_type=True)
self.grant_access(
obj=self.test_workflow, permission=permission_workflow_view
)
response = self._request_test_workflow_document_type_detail_api_view()
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
self.assertFalse('label' in response.data)
def test_workflow_document_type_detail_view_with_document_access(self):
self._create_test_workflow(add_document_type=True)
self.grant_access(
obj=self.test_document_type,
permission=permission_document_type_view
)
response = self._request_test_workflow_document_type_detail_api_view()
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertFalse('label' in response.data)
def test_workflow_document_type_detail_view_with_access(self):
self._create_test_workflow(add_document_type=True)
self.grant_access(
obj=self.test_document_type,
permission=permission_document_type_view
)
self.grant_access(
obj=self.test_workflow, permission=permission_workflow_view
)
response = self._request_test_workflow_document_type_detail_api_view()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(
response.data['label'], self.test_document_type.label
)
def test_workflow_document_type_list_view_no_permission(self):
self._create_test_workflow(add_document_type=True)
response = self._request_test_workflow_document_type_list_api_view()
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_workflow_document_type_list_view_with_workflow_access(self):
self._create_test_workflow(add_document_type=True)
self.grant_access(
obj=self.test_workflow, permission=permission_workflow_view
)
response = self._request_test_workflow_document_type_list_api_view()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['count'], 0)
def test_workflow_document_type_list_view_with_document_access(self):
self._create_test_workflow(add_document_type=True)
self.grant_access(
obj=self.test_document_type,
permission=permission_document_type_view
)
response = self._request_test_workflow_document_type_list_api_view()
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_workflow_document_type_list_view_with_access(self):
self._create_test_workflow(add_document_type=True)
self.grant_access(
obj=self.test_document_type,
permission=permission_document_type_view
)
self.grant_access(
obj=self.test_workflow, permission=permission_workflow_view
)
response = self._request_test_workflow_document_type_list_api_view()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(
response.data['results'][0]['label'], self.test_document_type.label
)
def test_workflow_list_view_no_permission(self):
self._create_test_workflow()
response = self._request_test_workflow_list_api_view()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['count'], 0)
def test_workflow_list_view_with_access(self):
self._create_test_workflow()
self.grant_access(
obj=self.test_workflow, permission=permission_workflow_view
)
response = self._request_test_workflow_list_api_view()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(
response.data['results'][0]['label'], self.test_workflow.label
)
def test_workflow_patch_view_no_permission(self):
self._create_test_workflow()
response = self._request_test_workflow_edit_patch_view()
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
self.test_workflow.refresh_from_db()
self.assertEqual(self.test_workflow.label, TEST_WORKFLOW_LABEL)
def test_workflow_patch_view_with_access(self):
self._create_test_workflow()
self.grant_access(
permission=permission_workflow_edit, obj=self.test_workflow
)
response = self._request_test_workflow_edit_patch_view()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.test_workflow.refresh_from_db()
self.assertEqual(self.test_workflow.label, TEST_WORKFLOW_LABEL_EDITED)
def test_workflow_put_view_no_permission(self):
self._create_test_workflow()
response = self._request_test_workflow_edit_put_view()
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
self.test_workflow.refresh_from_db()
self.assertEqual(self.test_workflow.label, TEST_WORKFLOW_LABEL)
def test_workflow_put_view_with_access(self):
self._create_test_workflow()
self.grant_access(
obj=self.test_workflow, permission=permission_workflow_edit
)
response = self._request_test_workflow_edit_put_view()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.test_workflow.refresh_from_db()
self.assertEqual(self.test_workflow.label, TEST_WORKFLOW_LABEL_EDITED)
def test_document_type_workflow_list_no_permission(self):
self._create_test_workflow(add_document_type=True)
response = self._request_test_document_type_workflow_list_api_view()
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertFalse('results' in response.data)
def test_document_type_workflow_list_with_workflow_access(self):
self._create_test_workflow(add_document_type=True)
self.grant_access(
obj=self.test_workflow, permission=permission_workflow_view
)
response = self._request_test_document_type_workflow_list_api_view()
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertFalse('results' in response.data)
def test_document_type_workflow_list_with_document_access(self):
self._create_test_workflow(add_document_type=True)
self.grant_access(
obj=self.test_document_type,
permission=permission_document_type_view
)
response = self._request_test_document_type_workflow_list_api_view()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['count'], 0)
def test_document_type_workflow_list_with_access(self):
self._create_test_workflow(add_document_type=True)
self.grant_access(
obj=self.test_workflow, permission=permission_workflow_view
)
self.grant_access(
obj=self.test_document_type,
permission=permission_document_type_view
)
response = self._request_test_document_type_workflow_list_api_view()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(
response.data['results'][0]['label'], self.test_workflow.label
)
class WorkflowStatesAPIViewTestCase(
WorkflowStateAPIViewTestMixin, DocumentTestMixin, WorkflowTestMixin,
BaseAPITestCase
):
auto_upload_test_document = False
def test_workflow_state_create_view_no_permission(self):
self._create_test_workflow()
response = self._request_test_workflow_state_create_api_view()
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.test_workflow.refresh_from_db()
self.assertEqual(self.test_workflow.states.count(), 0)
def test_workflow_state_create_view_with_access(self):
self._create_test_workflow()
self.grant_access(
permission=permission_workflow_edit, obj=self.test_workflow
)
response = self._request_test_workflow_state_create_api_view()
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.test_workflow.refresh_from_db()
self.assertEqual(
self.test_workflow.states.first().label, TEST_WORKFLOW_STATE_LABEL
)
def test_workflow_state_delete_view_no_permission(self):
self._create_test_workflow()
self._create_test_workflow_state()
response = self._request_test_workflow_state_delete_api_view()
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.test_workflow.refresh_from_db()
self.assertEqual(self.test_workflow.states.count(), 1)
def test_workflow_state_delete_view_with_access(self):
self._create_test_workflow()
self._create_test_workflow_state()
self.grant_access(
obj=self.test_workflow, permission=permission_workflow_edit
)
response = self._request_test_workflow_state_delete_api_view()
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
self.test_workflow.refresh_from_db()
self.assertEqual(self.test_workflow.states.count(), 0)
def test_workflow_state_detail_view_no_permission(self):
self._create_test_workflow()
self._create_test_workflow_state()
response = self._request_test_workflow_state_detail_api_view()
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertFalse('label' in response.data)
def test_workflow_state_detail_view_with_access(self):
self._create_test_workflow()
self._create_test_workflow_state()
self.grant_access(
obj=self.test_workflow, permission=permission_workflow_view
)
response = self._request_test_workflow_state_detail_api_view()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(
response.data['label'], TEST_WORKFLOW_STATE_LABEL
)
def test_workflow_state_list_view_no_permission(self):
self._create_test_workflow()
self._create_test_workflow_state()
response = self._request_test_workflow_state_list_api_view()
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertFalse('label' in response.data)
def test_workflow_state_list_view_with_access(self):
self._create_test_workflow()
self._create_test_workflow_state()
self.grant_access(permission=permission_workflow_view, obj=self.test_workflow)
response = self._request_test_workflow_state_list_api_view()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(
response.data['results'][0]['label'], TEST_WORKFLOW_STATE_LABEL
)
def test_workflow_state_edit_view_via_patch_no_permission(self):
self._create_test_workflow()
self._create_test_workflow_state()
response = self._request_test_workflow_state_edit_patch_api_view()
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.test_workflow_state.refresh_from_db()
self.assertEqual(
self.test_workflow_state.label, TEST_WORKFLOW_STATE_LABEL
)
def test_workflow_state_edit_view_via_patch_with_access(self):
self._create_test_workflow()
self._create_test_workflow_state()
self.grant_access(permission=permission_workflow_edit, obj=self.test_workflow)
response = self._request_test_workflow_state_edit_patch_api_view()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.test_workflow_state.refresh_from_db()
self.assertEqual(
self.test_workflow_state.label, TEST_WORKFLOW_STATE_LABEL_EDITED
)
def test_workflow_state_edit_view_via_put_no_permission(self):
self._create_test_workflow()
self._create_test_workflow_state()
response = self._request_test_workflow_state_edit_put_api_view()
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.test_workflow_state.refresh_from_db()
self.assertEqual(
self.test_workflow_state.label, TEST_WORKFLOW_STATE_LABEL
)
def test_workflow_state_edit_view_via_put_with_access(self):
self._create_test_workflow()
self._create_test_workflow_state()
self.grant_access(permission=permission_workflow_edit, obj=self.test_workflow)
response = self._request_test_workflow_state_edit_put_api_view()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.test_workflow_state.refresh_from_db()
self.assertEqual(
self.test_workflow_state.label, TEST_WORKFLOW_STATE_LABEL_EDITED
)
class WorkflowTransitionAPIViewTestCase(
WorkflowTransitionAPIViewTestMixin, DocumentTestMixin, WorkflowTestMixin,
BaseAPITestCase
):
auto_upload_test_document = False
def test_workflow_transition_create_view_no_permission(self):
self._create_test_workflow()
self._create_test_workflow_states()
response = self._request_test_workflow_transition_create_api_view()
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.test_workflow.refresh_from_db()
self.assertEqual(self.test_workflow.transitions.count(), 0)
def test_workflow_transition_create_view_with_access(self):
self._create_test_workflow()
self._create_test_workflow_states()
self.grant_access(
obj=self.test_workflow, permission=permission_workflow_edit
)
response = self._request_test_workflow_transition_create_api_view()
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.test_workflow.refresh_from_db()
self.assertEqual(
self.test_workflow.transitions.first().label,
TEST_WORKFLOW_TRANSITION_LABEL
)
def test_workflow_transition_delete_view_no_permission(self):
self._create_test_workflow()
self._create_test_workflow_states()
self._create_test_workflow_transition()
response = self._request_test_workflow_transition_delete_api_view()
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.test_workflow.refresh_from_db()
self.assertEqual(self.test_workflow.transitions.count(), 1)
def test_workflow_transition_delete_view_with_access(self):
self._create_test_workflow()
self._create_test_workflow_states()
self._create_test_workflow_transition()
self.grant_access(
obj=self.test_workflow, permission=permission_workflow_edit
)
response = self._request_test_workflow_transition_delete_api_view()
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
self.test_workflow.refresh_from_db()
self.assertEqual(self.test_workflow.transitions.count(), 0)
def test_workflow_transition_detail_view_no_permission(self):
self._create_test_workflow()
self._create_test_workflow_states()
self._create_test_workflow_transition()
response = self._request_test_workflow_transition_detail_api_view()
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertFalse('label' in response.data)
def test_workflow_transition_detail_view_with_access(self):
self._create_test_workflow()
self._create_test_workflow_states()
self._create_test_workflow_transition()
self.grant_access(
obj=self.test_workflow, permission=permission_workflow_view
)
response = self._request_test_workflow_transition_detail_api_view()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(
response.data['label'], TEST_WORKFLOW_TRANSITION_LABEL
)
def test_workflow_transition_list_view_no_permission(self):
self._create_test_workflow()
self._create_test_workflow_states()
self._create_test_workflow_transition()
response = self._request_test_workflow_transition_list_api_view()
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertFalse('results' in response.data)
def test_workflow_transition_list_view_with_access(self):
self._create_test_workflow()
self._create_test_workflow_states()
self._create_test_workflow_transition()
self.grant_access(
obj=self.test_workflow, permission=permission_workflow_view
)
response = self._request_test_workflow_transition_list_api_view()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(
response.data['results'][0]['label'],
TEST_WORKFLOW_TRANSITION_LABEL
)
def test_workflow_transition_edit_view_via_patch_no_permission(self):
self._create_test_workflow()
self._create_test_workflow_states()
self._create_test_workflow_transition()
response = self._request_test_workflow_transition_edit_patch_api_view()
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.test_workflow_transition.refresh_from_db()
self.assertEqual(
self.test_workflow_transition.label,
TEST_WORKFLOW_TRANSITION_LABEL
)
self.assertEqual(
self.test_workflow_transition.origin_state,
self.test_workflow_state_1
)
self.assertEqual(
self.test_workflow_transition.destination_state,
self.test_workflow_state_2
)
def test_workflow_transition_edit_view_via_patch_with_access(self):
self._create_test_workflow()
self._create_test_workflow_states()
self._create_test_workflow_transition()
self.grant_access(
obj=self.test_workflow, permission=permission_workflow_edit
)
response = self._request_test_workflow_transition_edit_patch_api_view()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.test_workflow_transition.refresh_from_db()
self.assertEqual(
self.test_workflow_transition.label,
TEST_WORKFLOW_TRANSITION_LABEL_EDITED
)
self.assertEqual(
self.test_workflow_transition.origin_state,
self.test_workflow_state_2
)
self.assertEqual(
self.test_workflow_transition.destination_state,
self.test_workflow_state_1
)
def test_workflow_transition_edit_view_via_put_no_permission(self):
self._create_test_workflow()
self._create_test_workflow_states()
self._create_test_workflow_transition()
response = self._request_test_workflow_transition_edit_put_api_view_via()
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.test_workflow_transition.refresh_from_db()
self.assertEqual(
self.test_workflow_transition.label,
TEST_WORKFLOW_TRANSITION_LABEL
)
self.assertEqual(
self.test_workflow_transition.origin_state,
self.test_workflow_state_1
)
self.assertEqual(
self.test_workflow_transition.destination_state,
self.test_workflow_state_2
)
def test_workflow_transition_edit_view_via_put_with_access(self):
self._create_test_workflow()
self._create_test_workflow_states()
self._create_test_workflow_transition()
self.grant_access(
obj=self.test_workflow, permission=permission_workflow_edit
)
response = self._request_test_workflow_transition_edit_put_api_view_via()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.test_workflow_transition.refresh_from_db()
self.assertEqual(
self.test_workflow_transition.label,
TEST_WORKFLOW_TRANSITION_LABEL_EDITED
)
self.assertEqual(
self.test_workflow_transition.origin_state,
self.test_workflow_state_2
)
self.assertEqual(
self.test_workflow_transition.destination_state,
self.test_workflow_state_1
)
class WorkflowTransitionFieldAPIViewTestCase(
WorkflowTransitionFieldAPIViewTestMixin, DocumentTestMixin,
WorkflowTestMixin, WorkflowTransitionFieldTestMixin, BaseAPITestCase
):
auto_upload_test_document = False
def test_workflow_transition_field_create_view_no_permission(self):
self._create_test_workflow()
self._create_test_workflow_states()
self._create_test_workflow_transition()
transition_field_count = self.test_workflow_transition.fields.count()
response = self._request_test_workflow_transition_field_create_api_view()
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.test_workflow_transition.refresh_from_db()
self.assertEqual(
self.test_workflow_transition.fields.count(),
transition_field_count
)
def test_workflow_transition_field_create_view_with_access(self):
self._create_test_workflow()
self._create_test_workflow_states()
self._create_test_workflow_transition()
self.grant_access(
obj=self.test_workflow, permission=permission_workflow_edit
)
transition_field_count = self.test_workflow_transition.fields.count()
response = self._request_test_workflow_transition_field_create_api_view()
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.test_workflow_transition.refresh_from_db()
self.assertEqual(
self.test_workflow_transition.fields.count(),
transition_field_count + 1
)
def test_workflow_transition_field_delete_view_no_permission(self):
self._create_test_workflow()
self._create_test_workflow_states()
self._create_test_workflow_transition()
self._create_test_workflow_transition_field()
transition_field_count = self.test_workflow_transition.fields.count()
response = self._request_test_workflow_transition_field_delete_api_view()
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertEqual(
self.test_workflow_transition.fields.count(),
transition_field_count
)
def test_workflow_transition_field_delete_view_with_access(self):
self._create_test_workflow()
self._create_test_workflow_states()
self._create_test_workflow_transition()
self._create_test_workflow_transition_field()
self.grant_access(
obj=self.test_workflow, permission=permission_workflow_edit
)
transition_field_count = self.test_workflow_transition.fields.count()
response = self._request_test_workflow_transition_field_delete_api_view()
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
self.assertEqual(
self.test_workflow_transition.fields.count(),
transition_field_count - 1
)
def test_workflow_transition_field_detail_view_no_permission(self):
self._create_test_workflow()
self._create_test_workflow_states()
self._create_test_workflow_transition()
self._create_test_workflow_transition_field()
response = self._request_test_workflow_transition_field_detail_api_view()
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertFalse('results' in response.data)
def test_workflow_transition_field_detail_view_with_access(self):
self._create_test_workflow()
self._create_test_workflow_states()
self._create_test_workflow_transition()
self._create_test_workflow_transition_field()
self.grant_access(
obj=self.test_workflow, permission=permission_workflow_view
)
response = self._request_test_workflow_transition_field_detail_api_view()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(
response.data['label'],
self.test_workflow_transition_field.label
)
def test_workflow_transition_field_edit_via_patch_view_no_permission(self):
self._create_test_workflow()
self._create_test_workflow_states()
self._create_test_workflow_transition()
self._create_test_workflow_transition_field()
transition_field_label = self.test_workflow_transition_field.label
response = self._request_test_workflow_transition_field_edit_via_patch_api_view()
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.test_workflow_transition_field.refresh_from_db()
self.assertEqual(
self.test_workflow_transition_field.label, transition_field_label
)
def test_workflow_transition_field_edit_via_patch_view_with_access(self):
self._create_test_workflow()
self._create_test_workflow_states()
self._create_test_workflow_transition()
self._create_test_workflow_transition_field()
self.grant_access(
obj=self.test_workflow, permission=permission_workflow_edit
)
transition_field_label = self.test_workflow_transition_field.label
response = self._request_test_workflow_transition_field_edit_via_patch_api_view()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.test_workflow_transition_field.refresh_from_db()
self.assertNotEqual(
self.test_workflow_transition_field.label, transition_field_label
)
def test_workflow_transition_field_list_view_no_permission(self):
self._create_test_workflow()
self._create_test_workflow_states()
self._create_test_workflow_transition()
self._create_test_workflow_transition_field()
response = self._request_test_workflow_transition_field_list_api_view()
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertFalse('results' in response.data)
def test_workflow_transition_field_list_view_with_access(self):
self._create_test_workflow()
self._create_test_workflow_states()
self._create_test_workflow_transition()
self._create_test_workflow_transition_field()
self.grant_access(
obj=self.test_workflow, permission=permission_workflow_view
)
response = self._request_test_workflow_transition_field_list_api_view()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(
response.data['results'][0]['label'],
self.test_workflow_transition_field.label
) | mayan/apps/document_states/tests/test_api.py | from rest_framework import status
from mayan.apps.documents.permissions import permission_document_type_view
from mayan.apps.documents.tests.mixins import DocumentTestMixin
from mayan.apps.rest_api.tests.base import BaseAPITestCase
from ..models import Workflow
from ..permissions import (
permission_workflow_create, permission_workflow_delete,
permission_workflow_edit, permission_workflow_transition,
permission_workflow_view
)
from .literals import (
TEST_WORKFLOW_LABEL, TEST_WORKFLOW_LABEL_EDITED,
TEST_WORKFLOW_STATE_LABEL, TEST_WORKFLOW_STATE_LABEL_EDITED,
TEST_WORKFLOW_TRANSITION_LABEL, TEST_WORKFLOW_TRANSITION_LABEL_EDITED
)
from .mixins import (
DocumentWorkflowAPIViewTestMixin, WorkflowAPIViewTestMixin,
WorkflowStateAPIViewTestMixin, WorkflowTestMixin,
WorkflowTransitionAPIViewTestMixin,
WorkflowTransitionFieldAPIViewTestMixin, WorkflowTransitionFieldTestMixin
)
class DocumentWorkflowsAPIViewTestCase(
DocumentWorkflowAPIViewTestMixin, DocumentTestMixin, WorkflowTestMixin,
BaseAPITestCase
):
auto_upload_test_document = False
def test_workflow_instance_detail_view_no_permission(self):
self._create_test_workflow(add_document_type=True)
self._create_test_workflow_states()
self._create_test_workflow_transition()
self._create_test_document_stub()
response = self._request_test_workflow_instance_detail_api_view()
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertFalse('workflow' in response.data)
def test_workflow_instance_detail_view_with_workflow_access(self):
self._create_test_workflow(add_document_type=True)
self._create_test_workflow_states()
self._create_test_workflow_transition()
self._create_test_document_stub()
self.grant_access(
obj=self.test_workflow, permission=permission_workflow_view
)
response = self._request_test_workflow_instance_detail_api_view()
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertFalse('workflow' in response.data)
def test_workflow_instance_detail_view_with_document_access(self):
self._create_test_workflow(add_document_type=True)
self._create_test_workflow_states()
self._create_test_workflow_transition()
self._create_test_document_stub()
self.grant_access(
obj=self.test_document, permission=permission_workflow_view
)
response = self._request_test_workflow_instance_detail_api_view()
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
self.assertFalse('workflow' in response.data)
def test_workflow_instance_detail_view_with_full_access(self):
self._create_test_workflow(add_document_type=True)
self._create_test_workflow_states()
self._create_test_workflow_transition()
self._create_test_document_stub()
self.grant_access(
obj=self.test_workflow, permission=permission_workflow_view
)
self.grant_access(
obj=self.test_document, permission=permission_workflow_view
)
response = self._request_test_workflow_instance_detail_api_view()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(
response.data['workflow']['label'],
TEST_WORKFLOW_LABEL
)
def test_workflow_instance_list_view_no_permission(self):
self._create_test_workflow(add_document_type=True)
self._create_test_workflow_states()
self._create_test_workflow_transition()
self._create_test_document_stub()
response = self._request_test_workflow_instance_list_api_view()
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertFalse('result' in response.data)
def test_workflow_instance_list_view_with_document_access(self):
self._create_test_workflow(add_document_type=True)
self._create_test_workflow_states()
self._create_test_workflow_transition()
self._create_test_document_stub()
self.grant_access(
obj=self.test_document, permission=permission_workflow_view
)
response = self._request_test_workflow_instance_list_api_view()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['count'], 0)
def test_workflow_instance_list_view_with_workflow_access(self):
self._create_test_workflow(add_document_type=True)
self._create_test_workflow_states()
self._create_test_workflow_transition()
self._create_test_document_stub()
self.grant_access(
obj=self.test_workflow, permission=permission_workflow_view
)
response = self._request_test_workflow_instance_list_api_view()
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertFalse('result' in response.data)
def test_workflow_instance_list_view_with_full_access(self):
self._create_test_workflow(add_document_type=True)
self._create_test_workflow_states()
self._create_test_workflow_transition()
self._create_test_document_stub()
self.grant_access(
obj=self.test_workflow, permission=permission_workflow_view
)
self.grant_access(
obj=self.test_document, permission=permission_workflow_view
)
response = self._request_test_workflow_instance_list_api_view()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(
response.data['results'][0]['workflow']['label'],
TEST_WORKFLOW_LABEL
)
def test_workflow_instance_log_entries_create_view_no_permission(self):
self._create_test_workflow(add_document_type=True)
self._create_test_workflow_states()
self._create_test_workflow_transition()
self._create_test_document_stub()
workflow_instance = self.test_document.workflows.first()
response = self._request_test_workflow_instance_log_entry_create_api_view(
workflow_instance=workflow_instance
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
# We get bad request because we try to create a transition for which
# we don't have permission and therefore is not valid for this
# workflow instance current state
self.assertEqual(workflow_instance.log_entries.count(), 0)
def test_workflow_instance_log_entries_create_view_with_workflow_access(self):
self._create_test_workflow(add_document_type=True)
self._create_test_workflow_states()
self._create_test_workflow_transition()
self._create_test_document_stub()
self.grant_access(
obj=self.test_workflow, permission=permission_workflow_transition
)
workflow_instance = self.test_document.workflows.first()
response = self._request_test_workflow_instance_log_entry_create_api_view(
workflow_instance=workflow_instance
)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
workflow_instance.refresh_from_db()
self.assertEqual(
workflow_instance.log_entries.first().transition.label,
TEST_WORKFLOW_TRANSITION_LABEL
)
def test_workflow_instance_log_entries_list_view_no_permission(self):
self._create_test_workflow(add_document_type=True)
self._create_test_workflow_states()
self._create_test_workflow_transition()
self._create_test_document_stub()
self._create_test_workflow_instance_log_entry()
response = self._request_test_workflow_instance_log_entry_list_api_view()
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertFalse('results' in response.data)
def test_workflow_instance_log_entries_list_view_with_document_access(self):
self._create_test_workflow(add_document_type=True)
self._create_test_workflow_states()
self._create_test_workflow_transition()
self._create_test_document_stub()
self._create_test_workflow_instance_log_entry()
self.grant_access(
obj=self.test_document, permission=permission_workflow_view
)
response = self._request_test_workflow_instance_log_entry_list_api_view()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(
response.data['results'][0]['transition']['label'],
TEST_WORKFLOW_TRANSITION_LABEL
)
class WorkflowAPIViewTestCase(
WorkflowAPIViewTestMixin, DocumentTestMixin, WorkflowTestMixin,
BaseAPITestCase
):
auto_upload_test_document = False
def test_workflow_create_view_no_permission(self):
response = self._request_test_workflow_create_api_view()
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertEqual(Workflow.objects.count(), 0)
def test_workflow_create_view_with_permission(self):
self.grant_permission(permission=permission_workflow_create)
response = self._request_test_workflow_create_api_view()
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(
response.data['label'], TEST_WORKFLOW_LABEL
)
self.assertEqual(Workflow.objects.count(), 1)
def test_workflow_create_with_document_type_view_no_permission(self):
response = self._request_test_workflow_create_api_view(
extra_data={
'document_types_pk_list': '{}'.format(
self.test_document_type.pk
)
}
)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertEqual(Workflow.objects.count(), 0)
def test_workflow_create_with_document_type_view_with_permission(self):
self.grant_permission(permission=permission_workflow_create)
response = self._request_test_workflow_create_api_view(
extra_data={
'document_types_pk_list': '{}'.format(
self.test_document_type.pk
)
}
)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(Workflow.objects.count(), 1)
workflow = Workflow.objects.first()
self.assertQuerysetEqual(
workflow.document_types.all(), (repr(self.test_document_type),)
)
self.assertEqual(response.data['id'], workflow.pk)
def test_workflow_delete_view_no_permission(self):
self._create_test_workflow()
response = self._request_test_workflow_delete_api_view()
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
self.assertEqual(Workflow.objects.count(), 1)
def test_workflow_delete_view_with_permission(self):
self._create_test_workflow()
self.grant_access(
obj=self.test_workflow, permission=permission_workflow_delete
)
response = self._request_test_workflow_delete_api_view()
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
self.assertEqual(Workflow.objects.count(), 0)
def test_workflow_detail_view_no_permission(self):
self._create_test_workflow()
response = self._request_test_workflow_detail_api_view()
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
self.assertFalse('label' in response.data)
def test_workflow_detail_view_with_access(self):
self._create_test_workflow()
self.grant_access(
obj=self.test_workflow, permission=permission_workflow_view
)
response = self._request_test_workflow_detail_api_view()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['label'], self.test_workflow.label)
def test_workflow_document_type_create_view_no_permission(self):
self._create_test_workflow(add_document_type=False)
response = self._request_test_workflow_document_type_list_create_api_view()
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertEqual(self.test_workflow.document_types.count(), 0)
def test_workflow_document_type_create_view_with_access(self):
self._create_test_workflow(add_document_type=False)
self.grant_access(
permission=permission_workflow_edit, obj=self.test_workflow
)
response = self._request_test_workflow_document_type_list_create_api_view()
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertQuerysetEqual(
self.test_workflow.document_types.all(),
(repr(self.test_document_type),)
)
def test_workflow_document_type_delete_view_no_permission(self):
self._create_test_workflow(add_document_type=True)
response = self._request_test_workflow_document_type_delete_api_view()
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.test_workflow.refresh_from_db()
self.assertEqual(self.test_workflow.document_types.count(), 1)
def test_workflow_document_type_delete_view_with_access(self):
self._create_test_workflow(add_document_type=True)
self.grant_access(
permission=permission_workflow_edit, obj=self.test_workflow
)
response = self._request_test_workflow_document_type_delete_api_view()
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
self.test_workflow.refresh_from_db()
self.assertEqual(self.test_workflow.document_types.count(), 0)
def test_workflow_document_type_detail_view_no_permission(self):
self._create_test_workflow(add_document_type=True)
response = self._request_test_workflow_document_type_detail_api_view()
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertFalse('label' in response.data)
def test_workflow_document_type_detail_view_with_workflow_access(self):
self._create_test_workflow(add_document_type=True)
self.grant_access(
obj=self.test_workflow, permission=permission_workflow_view
)
response = self._request_test_workflow_document_type_detail_api_view()
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
self.assertFalse('label' in response.data)
def test_workflow_document_type_detail_view_with_document_access(self):
self._create_test_workflow(add_document_type=True)
self.grant_access(
obj=self.test_document_type,
permission=permission_document_type_view
)
response = self._request_test_workflow_document_type_detail_api_view()
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertFalse('label' in response.data)
def test_workflow_document_type_detail_view_with_access(self):
self._create_test_workflow(add_document_type=True)
self.grant_access(
obj=self.test_document_type,
permission=permission_document_type_view
)
self.grant_access(
obj=self.test_workflow, permission=permission_workflow_view
)
response = self._request_test_workflow_document_type_detail_api_view()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(
response.data['label'], self.test_document_type.label
)
def test_workflow_document_type_list_view_no_permission(self):
self._create_test_workflow(add_document_type=True)
response = self._request_test_workflow_document_type_list_api_view()
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_workflow_document_type_list_view_with_workflow_access(self):
self._create_test_workflow(add_document_type=True)
self.grant_access(
obj=self.test_workflow, permission=permission_workflow_view
)
response = self._request_test_workflow_document_type_list_api_view()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['count'], 0)
def test_workflow_document_type_list_view_with_document_access(self):
self._create_test_workflow(add_document_type=True)
self.grant_access(
obj=self.test_document_type,
permission=permission_document_type_view
)
response = self._request_test_workflow_document_type_list_api_view()
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_workflow_document_type_list_view_with_access(self):
self._create_test_workflow(add_document_type=True)
self.grant_access(
obj=self.test_document_type,
permission=permission_document_type_view
)
self.grant_access(
obj=self.test_workflow, permission=permission_workflow_view
)
response = self._request_test_workflow_document_type_list_api_view()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(
response.data['results'][0]['label'], self.test_document_type.label
)
def test_workflow_list_view_no_permission(self):
self._create_test_workflow()
response = self._request_test_workflow_list_api_view()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['count'], 0)
def test_workflow_list_view_with_access(self):
self._create_test_workflow()
self.grant_access(
obj=self.test_workflow, permission=permission_workflow_view
)
response = self._request_test_workflow_list_api_view()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(
response.data['results'][0]['label'], self.test_workflow.label
)
def test_workflow_patch_view_no_permission(self):
self._create_test_workflow()
response = self._request_test_workflow_edit_patch_view()
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
self.test_workflow.refresh_from_db()
self.assertEqual(self.test_workflow.label, TEST_WORKFLOW_LABEL)
def test_workflow_patch_view_with_access(self):
self._create_test_workflow()
self.grant_access(
permission=permission_workflow_edit, obj=self.test_workflow
)
response = self._request_test_workflow_edit_patch_view()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.test_workflow.refresh_from_db()
self.assertEqual(self.test_workflow.label, TEST_WORKFLOW_LABEL_EDITED)
def test_workflow_put_view_no_permission(self):
self._create_test_workflow()
response = self._request_test_workflow_edit_put_view()
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
self.test_workflow.refresh_from_db()
self.assertEqual(self.test_workflow.label, TEST_WORKFLOW_LABEL)
def test_workflow_put_view_with_access(self):
self._create_test_workflow()
self.grant_access(
obj=self.test_workflow, permission=permission_workflow_edit
)
response = self._request_test_workflow_edit_put_view()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.test_workflow.refresh_from_db()
self.assertEqual(self.test_workflow.label, TEST_WORKFLOW_LABEL_EDITED)
def test_document_type_workflow_list_no_permission(self):
self._create_test_workflow(add_document_type=True)
response = self._request_test_document_type_workflow_list_api_view()
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertFalse('results' in response.data)
def test_document_type_workflow_list_with_workflow_access(self):
self._create_test_workflow(add_document_type=True)
self.grant_access(
obj=self.test_workflow, permission=permission_workflow_view
)
response = self._request_test_document_type_workflow_list_api_view()
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertFalse('results' in response.data)
def test_document_type_workflow_list_with_document_access(self):
self._create_test_workflow(add_document_type=True)
self.grant_access(
obj=self.test_document_type,
permission=permission_document_type_view
)
response = self._request_test_document_type_workflow_list_api_view()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['count'], 0)
def test_document_type_workflow_list_with_access(self):
self._create_test_workflow(add_document_type=True)
self.grant_access(
obj=self.test_workflow, permission=permission_workflow_view
)
self.grant_access(
obj=self.test_document_type,
permission=permission_document_type_view
)
response = self._request_test_document_type_workflow_list_api_view()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(
response.data['results'][0]['label'], self.test_workflow.label
)
class WorkflowStatesAPIViewTestCase(
WorkflowStateAPIViewTestMixin, DocumentTestMixin, WorkflowTestMixin,
BaseAPITestCase
):
auto_upload_test_document = False
def test_workflow_state_create_view_no_permission(self):
self._create_test_workflow()
response = self._request_test_workflow_state_create_api_view()
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.test_workflow.refresh_from_db()
self.assertEqual(self.test_workflow.states.count(), 0)
def test_workflow_state_create_view_with_access(self):
self._create_test_workflow()
self.grant_access(
permission=permission_workflow_edit, obj=self.test_workflow
)
response = self._request_test_workflow_state_create_api_view()
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.test_workflow.refresh_from_db()
self.assertEqual(
self.test_workflow.states.first().label, TEST_WORKFLOW_STATE_LABEL
)
def test_workflow_state_delete_view_no_permission(self):
self._create_test_workflow()
self._create_test_workflow_state()
response = self._request_test_workflow_state_delete_api_view()
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.test_workflow.refresh_from_db()
self.assertEqual(self.test_workflow.states.count(), 1)
def test_workflow_state_delete_view_with_access(self):
self._create_test_workflow()
self._create_test_workflow_state()
self.grant_access(
obj=self.test_workflow, permission=permission_workflow_edit
)
response = self._request_test_workflow_state_delete_api_view()
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
self.test_workflow.refresh_from_db()
self.assertEqual(self.test_workflow.states.count(), 0)
def test_workflow_state_detail_view_no_permission(self):
self._create_test_workflow()
self._create_test_workflow_state()
response = self._request_test_workflow_state_detail_api_view()
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertFalse('label' in response.data)
def test_workflow_state_detail_view_with_access(self):
self._create_test_workflow()
self._create_test_workflow_state()
self.grant_access(
obj=self.test_workflow, permission=permission_workflow_view
)
response = self._request_test_workflow_state_detail_api_view()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(
response.data['label'], TEST_WORKFLOW_STATE_LABEL
)
def test_workflow_state_list_view_no_permission(self):
self._create_test_workflow()
self._create_test_workflow_state()
response = self._request_test_workflow_state_list_api_view()
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertFalse('label' in response.data)
def test_workflow_state_list_view_with_access(self):
self._create_test_workflow()
self._create_test_workflow_state()
self.grant_access(permission=permission_workflow_view, obj=self.test_workflow)
response = self._request_test_workflow_state_list_api_view()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(
response.data['results'][0]['label'], TEST_WORKFLOW_STATE_LABEL
)
def test_workflow_state_edit_view_via_patch_no_permission(self):
self._create_test_workflow()
self._create_test_workflow_state()
response = self._request_test_workflow_state_edit_patch_api_view()
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.test_workflow_state.refresh_from_db()
self.assertEqual(
self.test_workflow_state.label, TEST_WORKFLOW_STATE_LABEL
)
def test_workflow_state_edit_view_via_patch_with_access(self):
self._create_test_workflow()
self._create_test_workflow_state()
self.grant_access(permission=permission_workflow_edit, obj=self.test_workflow)
response = self._request_test_workflow_state_edit_patch_api_view()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.test_workflow_state.refresh_from_db()
self.assertEqual(
self.test_workflow_state.label, TEST_WORKFLOW_STATE_LABEL_EDITED
)
def test_workflow_state_edit_view_via_put_no_permission(self):
self._create_test_workflow()
self._create_test_workflow_state()
response = self._request_test_workflow_state_edit_put_api_view()
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.test_workflow_state.refresh_from_db()
self.assertEqual(
self.test_workflow_state.label, TEST_WORKFLOW_STATE_LABEL
)
def test_workflow_state_edit_view_via_put_with_access(self):
self._create_test_workflow()
self._create_test_workflow_state()
self.grant_access(permission=permission_workflow_edit, obj=self.test_workflow)
response = self._request_test_workflow_state_edit_put_api_view()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.test_workflow_state.refresh_from_db()
self.assertEqual(
self.test_workflow_state.label, TEST_WORKFLOW_STATE_LABEL_EDITED
)
class WorkflowTransitionAPIViewTestCase(
WorkflowTransitionAPIViewTestMixin, DocumentTestMixin, WorkflowTestMixin,
BaseAPITestCase
):
auto_upload_test_document = False
def test_workflow_transition_create_view_no_permission(self):
self._create_test_workflow()
self._create_test_workflow_states()
response = self._request_test_workflow_transition_create_api_view()
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.test_workflow.refresh_from_db()
self.assertEqual(self.test_workflow.transitions.count(), 0)
def test_workflow_transition_create_view_with_access(self):
self._create_test_workflow()
self._create_test_workflow_states()
self.grant_access(
obj=self.test_workflow, permission=permission_workflow_edit
)
response = self._request_test_workflow_transition_create_api_view()
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.test_workflow.refresh_from_db()
self.assertEqual(
self.test_workflow.transitions.first().label,
TEST_WORKFLOW_TRANSITION_LABEL
)
def test_workflow_transition_delete_view_no_permission(self):
self._create_test_workflow()
self._create_test_workflow_states()
self._create_test_workflow_transition()
response = self._request_test_workflow_transition_delete_api_view()
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.test_workflow.refresh_from_db()
self.assertEqual(self.test_workflow.transitions.count(), 1)
def test_workflow_transition_delete_view_with_access(self):
self._create_test_workflow()
self._create_test_workflow_states()
self._create_test_workflow_transition()
self.grant_access(
obj=self.test_workflow, permission=permission_workflow_edit
)
response = self._request_test_workflow_transition_delete_api_view()
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
self.test_workflow.refresh_from_db()
self.assertEqual(self.test_workflow.transitions.count(), 0)
def test_workflow_transition_detail_view_no_permission(self):
self._create_test_workflow()
self._create_test_workflow_states()
self._create_test_workflow_transition()
response = self._request_test_workflow_transition_detail_api_view()
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertFalse('label' in response.data)
def test_workflow_transition_detail_view_with_access(self):
self._create_test_workflow()
self._create_test_workflow_states()
self._create_test_workflow_transition()
self.grant_access(
obj=self.test_workflow, permission=permission_workflow_view
)
response = self._request_test_workflow_transition_detail_api_view()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(
response.data['label'], TEST_WORKFLOW_TRANSITION_LABEL
)
def test_workflow_transition_list_view_no_permission(self):
self._create_test_workflow()
self._create_test_workflow_states()
self._create_test_workflow_transition()
response = self._request_test_workflow_transition_list_api_view()
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertFalse('results' in response.data)
def test_workflow_transition_list_view_with_access(self):
self._create_test_workflow()
self._create_test_workflow_states()
self._create_test_workflow_transition()
self.grant_access(
obj=self.test_workflow, permission=permission_workflow_view
)
response = self._request_test_workflow_transition_list_api_view()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(
response.data['results'][0]['label'],
TEST_WORKFLOW_TRANSITION_LABEL
)
def test_workflow_transition_edit_view_via_patch_no_permission(self):
self._create_test_workflow()
self._create_test_workflow_states()
self._create_test_workflow_transition()
response = self._request_test_workflow_transition_edit_patch_api_view()
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.test_workflow_transition.refresh_from_db()
self.assertEqual(
self.test_workflow_transition.label,
TEST_WORKFLOW_TRANSITION_LABEL
)
self.assertEqual(
self.test_workflow_transition.origin_state,
self.test_workflow_state_1
)
self.assertEqual(
self.test_workflow_transition.destination_state,
self.test_workflow_state_2
)
def test_workflow_transition_edit_view_via_patch_with_access(self):
self._create_test_workflow()
self._create_test_workflow_states()
self._create_test_workflow_transition()
self.grant_access(
obj=self.test_workflow, permission=permission_workflow_edit
)
response = self._request_test_workflow_transition_edit_patch_api_view()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.test_workflow_transition.refresh_from_db()
self.assertEqual(
self.test_workflow_transition.label,
TEST_WORKFLOW_TRANSITION_LABEL_EDITED
)
self.assertEqual(
self.test_workflow_transition.origin_state,
self.test_workflow_state_2
)
self.assertEqual(
self.test_workflow_transition.destination_state,
self.test_workflow_state_1
)
def test_workflow_transition_edit_view_via_put_no_permission(self):
self._create_test_workflow()
self._create_test_workflow_states()
self._create_test_workflow_transition()
response = self._request_test_workflow_transition_edit_put_api_view_via()
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.test_workflow_transition.refresh_from_db()
self.assertEqual(
self.test_workflow_transition.label,
TEST_WORKFLOW_TRANSITION_LABEL
)
self.assertEqual(
self.test_workflow_transition.origin_state,
self.test_workflow_state_1
)
self.assertEqual(
self.test_workflow_transition.destination_state,
self.test_workflow_state_2
)
def test_workflow_transition_edit_view_via_put_with_access(self):
self._create_test_workflow()
self._create_test_workflow_states()
self._create_test_workflow_transition()
self.grant_access(
obj=self.test_workflow, permission=permission_workflow_edit
)
response = self._request_test_workflow_transition_edit_put_api_view_via()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.test_workflow_transition.refresh_from_db()
self.assertEqual(
self.test_workflow_transition.label,
TEST_WORKFLOW_TRANSITION_LABEL_EDITED
)
self.assertEqual(
self.test_workflow_transition.origin_state,
self.test_workflow_state_2
)
self.assertEqual(
self.test_workflow_transition.destination_state,
self.test_workflow_state_1
)
class WorkflowTransitionFieldAPIViewTestCase(
WorkflowTransitionFieldAPIViewTestMixin, DocumentTestMixin,
WorkflowTestMixin, WorkflowTransitionFieldTestMixin, BaseAPITestCase
):
auto_upload_test_document = False
def test_workflow_transition_field_create_view_no_permission(self):
self._create_test_workflow()
self._create_test_workflow_states()
self._create_test_workflow_transition()
transition_field_count = self.test_workflow_transition.fields.count()
response = self._request_test_workflow_transition_field_create_api_view()
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.test_workflow_transition.refresh_from_db()
self.assertEqual(
self.test_workflow_transition.fields.count(),
transition_field_count
)
def test_workflow_transition_field_create_view_with_access(self):
self._create_test_workflow()
self._create_test_workflow_states()
self._create_test_workflow_transition()
self.grant_access(
obj=self.test_workflow, permission=permission_workflow_edit
)
transition_field_count = self.test_workflow_transition.fields.count()
response = self._request_test_workflow_transition_field_create_api_view()
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.test_workflow_transition.refresh_from_db()
self.assertEqual(
self.test_workflow_transition.fields.count(),
transition_field_count + 1
)
def test_workflow_transition_field_delete_view_no_permission(self):
self._create_test_workflow()
self._create_test_workflow_states()
self._create_test_workflow_transition()
self._create_test_workflow_transition_field()
transition_field_count = self.test_workflow_transition.fields.count()
response = self._request_test_workflow_transition_field_delete_api_view()
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertEqual(
self.test_workflow_transition.fields.count(),
transition_field_count
)
def test_workflow_transition_field_delete_view_with_access(self):
self._create_test_workflow()
self._create_test_workflow_states()
self._create_test_workflow_transition()
self._create_test_workflow_transition_field()
self.grant_access(
obj=self.test_workflow, permission=permission_workflow_edit
)
transition_field_count = self.test_workflow_transition.fields.count()
response = self._request_test_workflow_transition_field_delete_api_view()
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
self.assertEqual(
self.test_workflow_transition.fields.count(),
transition_field_count - 1
)
def test_workflow_transition_field_detail_view_no_permission(self):
self._create_test_workflow()
self._create_test_workflow_states()
self._create_test_workflow_transition()
self._create_test_workflow_transition_field()
response = self._request_test_workflow_transition_field_detail_api_view()
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertFalse('results' in response.data)
def test_workflow_transition_field_detail_view_with_access(self):
self._create_test_workflow()
self._create_test_workflow_states()
self._create_test_workflow_transition()
self._create_test_workflow_transition_field()
self.grant_access(
obj=self.test_workflow, permission=permission_workflow_view
)
response = self._request_test_workflow_transition_field_detail_api_view()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(
response.data['label'],
self.test_workflow_transition_field.label
)
def test_workflow_transition_field_edit_via_patch_view_no_permission(self):
self._create_test_workflow()
self._create_test_workflow_states()
self._create_test_workflow_transition()
self._create_test_workflow_transition_field()
transition_field_label = self.test_workflow_transition_field.label
response = self._request_test_workflow_transition_field_edit_via_patch_api_view()
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.test_workflow_transition_field.refresh_from_db()
self.assertEqual(
self.test_workflow_transition_field.label, transition_field_label
)
def test_workflow_transition_field_edit_via_patch_view_with_access(self):
self._create_test_workflow()
self._create_test_workflow_states()
self._create_test_workflow_transition()
self._create_test_workflow_transition_field()
self.grant_access(
obj=self.test_workflow, permission=permission_workflow_edit
)
transition_field_label = self.test_workflow_transition_field.label
response = self._request_test_workflow_transition_field_edit_via_patch_api_view()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.test_workflow_transition_field.refresh_from_db()
self.assertNotEqual(
self.test_workflow_transition_field.label, transition_field_label
)
def test_workflow_transition_field_list_view_no_permission(self):
self._create_test_workflow()
self._create_test_workflow_states()
self._create_test_workflow_transition()
self._create_test_workflow_transition_field()
response = self._request_test_workflow_transition_field_list_api_view()
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertFalse('results' in response.data)
def test_workflow_transition_field_list_view_with_access(self):
self._create_test_workflow()
self._create_test_workflow_states()
self._create_test_workflow_transition()
self._create_test_workflow_transition_field()
self.grant_access(
obj=self.test_workflow, permission=permission_workflow_view
)
response = self._request_test_workflow_transition_field_list_api_view()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(
response.data['results'][0]['label'],
self.test_workflow_transition_field.label
) | 0.580709 | 0.343094 |
import os
from os.path import join, dirname
import sys
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import Session
from sqlalchemy import create_engine
from sqlalchemy.exc import SQLAlchemyError
from sqlalchemy.exc import IntegrityError
import environs
import logging
import json
import lib.utils as utils
from config import get_configs_by_filename
from zephir_cluster_lookup import list_to_str
from zephir_cluster_lookup import valid_sql_in_clause_str
from zephir_cluster_lookup import invalid_sql_in_clause_str
def prepare_database(db_connect_str):
engine = create_engine(db_connect_str)
session = Session(engine)
Base = automap_base()
# reflect the tables
Base.prepare(engine, reflect=True)
# map table to class
CidMintingStore = Base.classes.cid_minting_store
return {
'engine': engine,
'session': session,
'table': CidMintingStore}
def find_all(CidMintingStore, session):
query = session.query(CidMintingStore)
return query.all()
def find_by_identifier(CidMintingStore, session, data_type, value):
query = session.query(CidMintingStore).filter(CidMintingStore.type==data_type).filter(CidMintingStore.identifier==value)
record = query.first()
return record
def find_query(engine, sql, params=None):
with engine.connect() as connection:
results = connection.execute(sql, params or ())
results_dict = [dict(row) for row in results.fetchall()]
return results_dict
def find_cids_by_ocns(engine, ocns_list):
"""Find matched CIDs by ocns
Return: a dict with the following keys:
'inquiry_ocns': list of inquiry ocns
'matched_cids': list of cids
'min_cid': the lowest cid in the matched list
'num_of_cids': number of matched cids
"""
matched_cids = {
'inquiry_ocns': ocns_list,
'matched_cids': [],
'min_cid': None,
'num_of_cids': 0
}
# Convert list item to a single quoted string, concat with a comma and space
ocns = list_to_str(ocns_list)
if valid_sql_in_clause_str(ocns):
sql = "SELECT cid FROM cid_minting_store WHERE type='ocn' AND identifier IN (" + ocns + ")"
results = find_query(engine, sql)
if results:
matched_cids['matched_cids'] = results
matched_cids['min_cid'] = min([cid.get("cid") for cid in results])
matched_cids['num_of_cids'] = len(results)
return matched_cids
def find_cid_by_sysid(CidMintingStore, session, sysid):
results = {}
record = find_by_identifier(CidMintingStore, session, 'sysid', sysid)
if record:
results['inquiry_sys_id'] = sysid
results['matched_cid'] = record.cid
return results
def insert_a_record(session, record):
try:
session.add(record)
session.flush()
except IntegrityError as e:
session.rollback()
logging.error("IntegrityError adding record")
logging.info("type: {}, value: {}, cid: {} ".format(record.type, record.identifier, record.cid))
return "IntegrityError"
else:
session.commit()
return "Success"
def usage(script_name):
print("Usage: {} env[dev|stg|prd] action[read|write] type[ocn|sysid] data[comma_separated_ocns|sys_id] cid".format(script_name))
print("{} dev read ocn 8727632,32882115".format(script_name))
print("{} dev read sysid uc1234567".format(script_name))
print("{} dev write ocn 30461866 011323406".format(script_name))
print("{} dev write sysid uc1234567 011323407".format(script_name))
def main():
""" Performs read and write actions to the cid_minting_store table which stores the identifier and CID in pairs.
Command line arguments:
argv[1]: Server environemnt (Required). Can be test, dev, stg, or prd.
argv[2]: Action. Can only be 'read' or 'write'
argv[3]: Data type. Can only be 'ocn' and 'sysid'
argv[4]: Data. OCNs or a local system ID.
OCNs format:
Comma separated strings without spaces in between any two values.
For example: 8727632,32882115
Local system ID: a string.
argv[5]: A CID. Only required when Action='write'
"""
if (len(sys.argv) != 5 and len(sys.argv) != 6):
print("Parameter error.")
usage(sys.argv[0])
exit(1)
env = sys.argv[1]
action = sys.argv[2]
data_type = sys.argv[3]
data = sys.argv[4]
cid = None
if len(sys.argv) == 6:
cid = sys.argv[5]
if env not in ["test", "dev", "stg", "prd"]:
usage(sys.argv[0])
exit(1)
if action not in ["read", "write"]:
usage(sys.argv[0])
exit(1)
if data_type not in ["ocn", "sysid"]:
usage(sys.argv[0])
exit(1)
if action == "write" and cid == None:
usage(sys.argv[0])
exit(1)
cmd_options = "cmd options: {} {} {} {}".format(env, action, data_type, data)
if cid:
cmd_options += " " + cid
configs= get_configs_by_filename('config', 'cid_minting')
logfile = configs['logpath']
db_config = str(utils.db_connect_url(configs[env]['minter_db']))
logging.basicConfig(
level=logging.DEBUG,
filename=logfile,
format="%(asctime)s %(levelname)-4s %(message)s",
)
logging.info("Start " + os.path.basename(__file__))
logging.info(cmd_options)
DB_CONNECT_STR = os.environ.get('OVERRIDE_DB_CONNECT_STR') or db_config
db = prepare_database(DB_CONNECT_STR)
engine = db['engine']
session = db['session']
CidMintingStore = db['table']
results = {}
if action == "read":
if data_type == "ocn":
results = find_cids_by_ocns(engine, data.split(","))
if data_type == "sysid":
results = find_cid_by_sysid(CidMintingStore, session, data)
engine.dispose()
print(json.dumps(results))
exit(0)
if action == "write":
record = CidMintingStore(type=data_type, identifier=data, cid=cid)
inserted = insert_a_record(session, record)
engine.dispose()
if inserted != "Success":
exit(1)
else:
exit(0)
if __name__ == "__main__":
main() | cid-minting/local_cid_minter.py | import os
from os.path import join, dirname
import sys
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import Session
from sqlalchemy import create_engine
from sqlalchemy.exc import SQLAlchemyError
from sqlalchemy.exc import IntegrityError
import environs
import logging
import json
import lib.utils as utils
from config import get_configs_by_filename
from zephir_cluster_lookup import list_to_str
from zephir_cluster_lookup import valid_sql_in_clause_str
from zephir_cluster_lookup import invalid_sql_in_clause_str
def prepare_database(db_connect_str):
engine = create_engine(db_connect_str)
session = Session(engine)
Base = automap_base()
# reflect the tables
Base.prepare(engine, reflect=True)
# map table to class
CidMintingStore = Base.classes.cid_minting_store
return {
'engine': engine,
'session': session,
'table': CidMintingStore}
def find_all(CidMintingStore, session):
query = session.query(CidMintingStore)
return query.all()
def find_by_identifier(CidMintingStore, session, data_type, value):
query = session.query(CidMintingStore).filter(CidMintingStore.type==data_type).filter(CidMintingStore.identifier==value)
record = query.first()
return record
def find_query(engine, sql, params=None):
with engine.connect() as connection:
results = connection.execute(sql, params or ())
results_dict = [dict(row) for row in results.fetchall()]
return results_dict
def find_cids_by_ocns(engine, ocns_list):
"""Find matched CIDs by ocns
Return: a dict with the following keys:
'inquiry_ocns': list of inquiry ocns
'matched_cids': list of cids
'min_cid': the lowest cid in the matched list
'num_of_cids': number of matched cids
"""
matched_cids = {
'inquiry_ocns': ocns_list,
'matched_cids': [],
'min_cid': None,
'num_of_cids': 0
}
# Convert list item to a single quoted string, concat with a comma and space
ocns = list_to_str(ocns_list)
if valid_sql_in_clause_str(ocns):
sql = "SELECT cid FROM cid_minting_store WHERE type='ocn' AND identifier IN (" + ocns + ")"
results = find_query(engine, sql)
if results:
matched_cids['matched_cids'] = results
matched_cids['min_cid'] = min([cid.get("cid") for cid in results])
matched_cids['num_of_cids'] = len(results)
return matched_cids
def find_cid_by_sysid(CidMintingStore, session, sysid):
results = {}
record = find_by_identifier(CidMintingStore, session, 'sysid', sysid)
if record:
results['inquiry_sys_id'] = sysid
results['matched_cid'] = record.cid
return results
def insert_a_record(session, record):
try:
session.add(record)
session.flush()
except IntegrityError as e:
session.rollback()
logging.error("IntegrityError adding record")
logging.info("type: {}, value: {}, cid: {} ".format(record.type, record.identifier, record.cid))
return "IntegrityError"
else:
session.commit()
return "Success"
def usage(script_name):
print("Usage: {} env[dev|stg|prd] action[read|write] type[ocn|sysid] data[comma_separated_ocns|sys_id] cid".format(script_name))
print("{} dev read ocn 8727632,32882115".format(script_name))
print("{} dev read sysid uc1234567".format(script_name))
print("{} dev write ocn 30461866 011323406".format(script_name))
print("{} dev write sysid uc1234567 011323407".format(script_name))
def main():
""" Performs read and write actions to the cid_minting_store table which stores the identifier and CID in pairs.
Command line arguments:
argv[1]: Server environemnt (Required). Can be test, dev, stg, or prd.
argv[2]: Action. Can only be 'read' or 'write'
argv[3]: Data type. Can only be 'ocn' and 'sysid'
argv[4]: Data. OCNs or a local system ID.
OCNs format:
Comma separated strings without spaces in between any two values.
For example: 8727632,32882115
Local system ID: a string.
argv[5]: A CID. Only required when Action='write'
"""
if (len(sys.argv) != 5 and len(sys.argv) != 6):
print("Parameter error.")
usage(sys.argv[0])
exit(1)
env = sys.argv[1]
action = sys.argv[2]
data_type = sys.argv[3]
data = sys.argv[4]
cid = None
if len(sys.argv) == 6:
cid = sys.argv[5]
if env not in ["test", "dev", "stg", "prd"]:
usage(sys.argv[0])
exit(1)
if action not in ["read", "write"]:
usage(sys.argv[0])
exit(1)
if data_type not in ["ocn", "sysid"]:
usage(sys.argv[0])
exit(1)
if action == "write" and cid == None:
usage(sys.argv[0])
exit(1)
cmd_options = "cmd options: {} {} {} {}".format(env, action, data_type, data)
if cid:
cmd_options += " " + cid
configs= get_configs_by_filename('config', 'cid_minting')
logfile = configs['logpath']
db_config = str(utils.db_connect_url(configs[env]['minter_db']))
logging.basicConfig(
level=logging.DEBUG,
filename=logfile,
format="%(asctime)s %(levelname)-4s %(message)s",
)
logging.info("Start " + os.path.basename(__file__))
logging.info(cmd_options)
DB_CONNECT_STR = os.environ.get('OVERRIDE_DB_CONNECT_STR') or db_config
db = prepare_database(DB_CONNECT_STR)
engine = db['engine']
session = db['session']
CidMintingStore = db['table']
results = {}
if action == "read":
if data_type == "ocn":
results = find_cids_by_ocns(engine, data.split(","))
if data_type == "sysid":
results = find_cid_by_sysid(CidMintingStore, session, data)
engine.dispose()
print(json.dumps(results))
exit(0)
if action == "write":
record = CidMintingStore(type=data_type, identifier=data, cid=cid)
inserted = insert_a_record(session, record)
engine.dispose()
if inserted != "Success":
exit(1)
else:
exit(0)
if __name__ == "__main__":
main() | 0.277179 | 0.148232 |
from openpyxl import load_workbook
import re
filename = 'aalh_iit_jeep_001.xlsx'
wb = load_workbook(filename)
ws = wb['Metadata Template']
minimumcol = 31
maximumcol = 31
minimumrow = 7
maximumrow = 394
iterationrow = 7
targetcol = 31
for row in ws.iter_rows(min_row=minimumrow, min_col=minimumcol, max_row=maximumrow, max_col=maximumcol):
for cell in row:
testvar = ws.cell(row=iterationrow, column=targetcol).value
#print(testvar)
if testvar == None:
continue
#print('No Date Digitized')
elif testvar.find('/') != -1:
testvar2 = re.search('\d\d\d\d\/\d\d\/\d\d', testvar)
if testvar2:
testvar3 = testvar2[0]
testvar3 = testvar3.replace('/','-')
ws.cell(row=iterationrow, column=targetcol).value = testvar3
print(iterationrow,'|',testvar,'|',ws.cell(row=iterationrow, column=targetcol).value)
else:
testvarlist = testvar.split('/')
testvaryear = testvarlist[2]
testvaryear = testvaryear.strip()
testvarmonth = testvarlist[0]
testvarmonth = testvarmonth.strip()
testvarmonth = int(testvarmonth)
if testvarmonth < 10:
testvarmonth = str(testvarmonth)
testvarmonth = '0' + testvarmonth
else:
testvarmonth = str(testvarmonth)
testvarday = testvarlist[1]
testvarday = testvarday.strip()
testvarday = int(testvarday)
if testvarday < 10:
testvarday = str(testvarday)
testvarday = '0' + testvarday
else:
testvarday = str(testvarday)
isodate = testvaryear + '-' + testvarmonth + '-' + testvarday
ws.cell(row=iterationrow, column=targetcol).value = isodate
print(iterationrow,'|',testvar,'|',ws.cell(row=iterationrow, column=targetcol).value)
else:
continue
#print('Date is already formatted correctly')
for cell in row:
testvar2 = ws.cell(row=iterationrow, column=targetcol).value
if testvar2 == None:
continue
#print('Still No Date Digitized')
elif testvar2.find('-') != -1:
length = len(testvar2)
if length > 10:
print('***CHECK THIS LINE FOR INCORRECT FORMATTING***')
elif length < 10:
print('***CHECK THIS LINE FOR INCORRECT FORMATTING***')
#else:
#print('Date is correctly formatted')
iterationrow = iterationrow + 1
wb.save('aalh_iit_jeep_001.xlsx') | aalh_iit_jeep_001/debug-convert-dates.py | from openpyxl import load_workbook
import re
filename = 'aalh_iit_jeep_001.xlsx'
wb = load_workbook(filename)
ws = wb['Metadata Template']
minimumcol = 31
maximumcol = 31
minimumrow = 7
maximumrow = 394
iterationrow = 7
targetcol = 31
for row in ws.iter_rows(min_row=minimumrow, min_col=minimumcol, max_row=maximumrow, max_col=maximumcol):
for cell in row:
testvar = ws.cell(row=iterationrow, column=targetcol).value
#print(testvar)
if testvar == None:
continue
#print('No Date Digitized')
elif testvar.find('/') != -1:
testvar2 = re.search('\d\d\d\d\/\d\d\/\d\d', testvar)
if testvar2:
testvar3 = testvar2[0]
testvar3 = testvar3.replace('/','-')
ws.cell(row=iterationrow, column=targetcol).value = testvar3
print(iterationrow,'|',testvar,'|',ws.cell(row=iterationrow, column=targetcol).value)
else:
testvarlist = testvar.split('/')
testvaryear = testvarlist[2]
testvaryear = testvaryear.strip()
testvarmonth = testvarlist[0]
testvarmonth = testvarmonth.strip()
testvarmonth = int(testvarmonth)
if testvarmonth < 10:
testvarmonth = str(testvarmonth)
testvarmonth = '0' + testvarmonth
else:
testvarmonth = str(testvarmonth)
testvarday = testvarlist[1]
testvarday = testvarday.strip()
testvarday = int(testvarday)
if testvarday < 10:
testvarday = str(testvarday)
testvarday = '0' + testvarday
else:
testvarday = str(testvarday)
isodate = testvaryear + '-' + testvarmonth + '-' + testvarday
ws.cell(row=iterationrow, column=targetcol).value = isodate
print(iterationrow,'|',testvar,'|',ws.cell(row=iterationrow, column=targetcol).value)
else:
continue
#print('Date is already formatted correctly')
for cell in row:
testvar2 = ws.cell(row=iterationrow, column=targetcol).value
if testvar2 == None:
continue
#print('Still No Date Digitized')
elif testvar2.find('-') != -1:
length = len(testvar2)
if length > 10:
print('***CHECK THIS LINE FOR INCORRECT FORMATTING***')
elif length < 10:
print('***CHECK THIS LINE FOR INCORRECT FORMATTING***')
#else:
#print('Date is correctly formatted')
iterationrow = iterationrow + 1
wb.save('aalh_iit_jeep_001.xlsx') | 0.10566 | 0.15772 |
import os
import sys
import mock
import pathlib
import pytest
import six
@pytest.mark.parametrize("nproc", [1])
def test_ccp4_scalerA(regression_test, ccp4, dials_data, run_in_tmpdir, nproc):
if nproc is not None:
from xia2.Handlers.Phil import PhilIndex
PhilIndex.params.xia2.settings.multiprocessing.nproc = nproc
template = dials_data("insulin").join("insulin_1_###.img").strpath
tmpdir = run_in_tmpdir.strpath
from xia2.Modules.Indexer.DialsIndexer import DialsIndexer
from xia2.Modules.Refiner.DialsRefiner import DialsRefiner
from xia2.Modules.Integrater.DialsIntegrater import DialsIntegrater
from xia2.Modules.Scaler.CCP4ScalerA import CCP4ScalerA
indexer = DialsIndexer()
indexer.set_working_directory(tmpdir)
from dxtbx.model.experiment_list import ExperimentListTemplateImporter
importer = ExperimentListTemplateImporter([template])
experiments = importer.experiments
imageset = experiments.imagesets()[0]
indexer.add_indexer_imageset(imageset)
from xia2.Schema.XCrystal import XCrystal
from xia2.Schema.XWavelength import XWavelength
from xia2.Schema.XSweep import XSweep
from xia2.Schema.XSample import XSample
cryst = XCrystal("CRYST1", None)
wav = XWavelength("WAVE1", cryst, imageset.get_beam().get_wavelength())
samp = XSample("X1", cryst)
directory, image = os.path.split(imageset.get_path(1))
with mock.patch.object(sys, "argv", []):
sweep = XSweep("SWEEP1", wav, samp, directory=directory, image=image)
indexer.set_indexer_sweep(sweep)
refiner = DialsRefiner()
refiner.set_working_directory(tmpdir)
refiner.add_refiner_indexer(sweep.get_epoch(1), indexer)
integrater = DialsIntegrater()
integrater.set_output_format("hkl")
integrater.set_working_directory(tmpdir)
integrater.setup_from_image(imageset.get_path(1))
integrater.set_integrater_refiner(refiner)
# integrater.set_integrater_indexer(indexer)
integrater.set_integrater_sweep(sweep)
integrater.set_integrater_sweep_name("SWEEP1")
integrater.set_integrater_project_info("CRYST1", "WAVE1", "SWEEP1")
scaler = CCP4ScalerA(base_path=pathlib.Path(tmpdir))
scaler.add_scaler_integrater(integrater)
scaler.set_scaler_xcrystal(cryst)
scaler.set_scaler_project_info("CRYST1", "WAVE1")
check_scaler_files_exist(scaler)
# test serialization of scaler
json_str = scaler.as_json()
# print json_str
scaler2 = CCP4ScalerA.from_json(string=json_str)
scaler2.set_scaler_xcrystal(cryst)
check_scaler_files_exist(scaler2)
scaler2.set_scaler_done(False)
check_scaler_files_exist(scaler2)
scaler2._scalr_integraters = {} # XXX
scaler2.add_scaler_integrater(integrater)
scaler2.set_scaler_prepare_done(False)
check_scaler_files_exist(scaler2)
def check_scaler_files_exist(scaler):
merged = scaler.get_scaled_merged_reflections()
for filetype in ("mtz", "sca", "sca_unmerged"):
assert filetype in merged
if isinstance(merged[filetype], six.string_types):
files = [merged[filetype]]
else:
files = merged[filetype].values()
for f in files:
assert os.path.isfile(f) | Test/Modules/Scaler/test_CCP4ScalerA.py | import os
import sys
import mock
import pathlib
import pytest
import six
@pytest.mark.parametrize("nproc", [1])
def test_ccp4_scalerA(regression_test, ccp4, dials_data, run_in_tmpdir, nproc):
if nproc is not None:
from xia2.Handlers.Phil import PhilIndex
PhilIndex.params.xia2.settings.multiprocessing.nproc = nproc
template = dials_data("insulin").join("insulin_1_###.img").strpath
tmpdir = run_in_tmpdir.strpath
from xia2.Modules.Indexer.DialsIndexer import DialsIndexer
from xia2.Modules.Refiner.DialsRefiner import DialsRefiner
from xia2.Modules.Integrater.DialsIntegrater import DialsIntegrater
from xia2.Modules.Scaler.CCP4ScalerA import CCP4ScalerA
indexer = DialsIndexer()
indexer.set_working_directory(tmpdir)
from dxtbx.model.experiment_list import ExperimentListTemplateImporter
importer = ExperimentListTemplateImporter([template])
experiments = importer.experiments
imageset = experiments.imagesets()[0]
indexer.add_indexer_imageset(imageset)
from xia2.Schema.XCrystal import XCrystal
from xia2.Schema.XWavelength import XWavelength
from xia2.Schema.XSweep import XSweep
from xia2.Schema.XSample import XSample
cryst = XCrystal("CRYST1", None)
wav = XWavelength("WAVE1", cryst, imageset.get_beam().get_wavelength())
samp = XSample("X1", cryst)
directory, image = os.path.split(imageset.get_path(1))
with mock.patch.object(sys, "argv", []):
sweep = XSweep("SWEEP1", wav, samp, directory=directory, image=image)
indexer.set_indexer_sweep(sweep)
refiner = DialsRefiner()
refiner.set_working_directory(tmpdir)
refiner.add_refiner_indexer(sweep.get_epoch(1), indexer)
integrater = DialsIntegrater()
integrater.set_output_format("hkl")
integrater.set_working_directory(tmpdir)
integrater.setup_from_image(imageset.get_path(1))
integrater.set_integrater_refiner(refiner)
# integrater.set_integrater_indexer(indexer)
integrater.set_integrater_sweep(sweep)
integrater.set_integrater_sweep_name("SWEEP1")
integrater.set_integrater_project_info("CRYST1", "WAVE1", "SWEEP1")
scaler = CCP4ScalerA(base_path=pathlib.Path(tmpdir))
scaler.add_scaler_integrater(integrater)
scaler.set_scaler_xcrystal(cryst)
scaler.set_scaler_project_info("CRYST1", "WAVE1")
check_scaler_files_exist(scaler)
# test serialization of scaler
json_str = scaler.as_json()
# print json_str
scaler2 = CCP4ScalerA.from_json(string=json_str)
scaler2.set_scaler_xcrystal(cryst)
check_scaler_files_exist(scaler2)
scaler2.set_scaler_done(False)
check_scaler_files_exist(scaler2)
scaler2._scalr_integraters = {} # XXX
scaler2.add_scaler_integrater(integrater)
scaler2.set_scaler_prepare_done(False)
check_scaler_files_exist(scaler2)
def check_scaler_files_exist(scaler):
merged = scaler.get_scaled_merged_reflections()
for filetype in ("mtz", "sca", "sca_unmerged"):
assert filetype in merged
if isinstance(merged[filetype], six.string_types):
files = [merged[filetype]]
else:
files = merged[filetype].values()
for f in files:
assert os.path.isfile(f) | 0.195902 | 0.346486 |
import os
from instaloader import Instaloader
class Config:
API_ID = int(os.environ.get("API_ID", ""))
API_HASH = os.environ.get("API_HASH", "")
BOT_TOKEN = os.environ.get("BOT_TOKEN", "")
USER = os.environ.get("INSTAGRAM_USERNAME", "")
OWNER = os.environ.get("OWNER_ID", "")
INSTA_SESSIONFILE_ID = os.environ.get("INSTA_SESSIONFILE_ID", None)
S = "0"
STATUS = set(int(x) for x in (S).split())
L=Instaloader()
HELP="""
You can Download almost anything From your Instagram Account.
<b>What Can Be Downloaded?:</b>
1. All posts of any Profile. (Both Public and Private,for private profiles you need to be a follower.)
2. All Posts from your feed.
3. Stories of any profile (Both Public and Private,for private profiles you need to be a follower.)
4. DP of any profile (No need to follow)
5. Followers and Followees List of any Profile.
6. List of followees who follows back the given username.
7. List of followees who are not following back the given username.
8. Stories of your Followees.
9. Tagged posts of any profile.
10. Your saved Posts.
11. IGTV videos.
12. Highlights from any profiles.
13. Any Public Post from Link(Post/Reels/IGTV)
<b>How to Download:</b>
Its Easy!!
You Need to login into your account by /login.
You have two Options:
1. From Username:
Just send any instagram username.
For Example:
<code>samantharuthprabhuoffl</code>
<code>subin_p_s_</code>
<code>_chill_manh_7</code>
2. From URL:
You can also sent a post link to download the post or video.
For Example:
<code>https://www.instagram.com/p/CL4QbUiLRNW/?utm_medium=copy_link</code>
<b>Available Commands and Usage</b>
/start - Check wheather bot alive.
/restart - Restart the bot (If you messed up anything use /restart.)
/help - Shows this menu.
/login - Login into your account.
/logout - Logout of your account.
/account - Shows the details of logged in account.
/posts <username> - Download posts of any username. Use /posts to download own posts or <code> /posts <username> </code>for others.
Example : <code>/posts samantharuthprabhuoffl</code>
/igtv <username> - Download IGTV videos from given username. If no username given, downloads your IGTV.
/feed <number of posts to download> - Downloads posts from your feed.If no number specified all posts from feed will be downloaded.
Example: <code>/feed 10</code> to download latest 10 posts from feed.
/saved <number of posts to download> - Downloads your saved posts. If no number specified all saved posts will be downloaded.
Example: <code>/saved 10</code> to download latest 10 saved posts.
/followers <username> - Get a list of all followers of given username. If no username given, then your list will be retrieved.
Example: <code>/followers samantharuthprabhuoffl</code>
/followees <username> - Get a list of all followees of given username. If no username given, then your list will be retrieved.
/fans <username> - Get a list of of followees who follow back the given username. If no username given, your list will be retrieved.
/notfollowing <username> - Get a list of followees who is not following back the given username.
/tagged <username> - Downloads all posts in which given username is tagged. If nothing given your tagged posts will be downloaded.
/story <username> - Downloads all stories from given username. If nothing given your stories will be downloaded.
/stories - Downloads all the stories of all your followees.
/highlights <username> - Downloads highlights from given username, If nothing given your highlights will be downloaded.
"""
HOME_TEXT = """
<b>Helo, [{}](tg://user?id={})
This is a bot of [{}](www.instagram.com/{}) to manage his Instagram account.
I can only work for my master [{}](tg://user?id={}).
But you can Deploy the same bot for your use from the below source code.
Use /help to know What I can Do?</b>
"""
HOME_TEXT_OWNER = """
<b>Helo, [{}](tg://user?id={})
I am your assistant to manage your Instagram account.
Use /help to know what I can do for you.</b>
""" | config.py | import os
from instaloader import Instaloader
class Config:
API_ID = int(os.environ.get("API_ID", ""))
API_HASH = os.environ.get("API_HASH", "")
BOT_TOKEN = os.environ.get("BOT_TOKEN", "")
USER = os.environ.get("INSTAGRAM_USERNAME", "")
OWNER = os.environ.get("OWNER_ID", "")
INSTA_SESSIONFILE_ID = os.environ.get("INSTA_SESSIONFILE_ID", None)
S = "0"
STATUS = set(int(x) for x in (S).split())
L=Instaloader()
HELP="""
You can Download almost anything From your Instagram Account.
<b>What Can Be Downloaded?:</b>
1. All posts of any Profile. (Both Public and Private,for private profiles you need to be a follower.)
2. All Posts from your feed.
3. Stories of any profile (Both Public and Private,for private profiles you need to be a follower.)
4. DP of any profile (No need to follow)
5. Followers and Followees List of any Profile.
6. List of followees who follows back the given username.
7. List of followees who are not following back the given username.
8. Stories of your Followees.
9. Tagged posts of any profile.
10. Your saved Posts.
11. IGTV videos.
12. Highlights from any profiles.
13. Any Public Post from Link(Post/Reels/IGTV)
<b>How to Download:</b>
Its Easy!!
You Need to login into your account by /login.
You have two Options:
1. From Username:
Just send any instagram username.
For Example:
<code>samantharuthprabhuoffl</code>
<code>subin_p_s_</code>
<code>_chill_manh_7</code>
2. From URL:
You can also sent a post link to download the post or video.
For Example:
<code>https://www.instagram.com/p/CL4QbUiLRNW/?utm_medium=copy_link</code>
<b>Available Commands and Usage</b>
/start - Check wheather bot alive.
/restart - Restart the bot (If you messed up anything use /restart.)
/help - Shows this menu.
/login - Login into your account.
/logout - Logout of your account.
/account - Shows the details of logged in account.
/posts <username> - Download posts of any username. Use /posts to download own posts or <code> /posts <username> </code>for others.
Example : <code>/posts samantharuthprabhuoffl</code>
/igtv <username> - Download IGTV videos from given username. If no username given, downloads your IGTV.
/feed <number of posts to download> - Downloads posts from your feed.If no number specified all posts from feed will be downloaded.
Example: <code>/feed 10</code> to download latest 10 posts from feed.
/saved <number of posts to download> - Downloads your saved posts. If no number specified all saved posts will be downloaded.
Example: <code>/saved 10</code> to download latest 10 saved posts.
/followers <username> - Get a list of all followers of given username. If no username given, then your list will be retrieved.
Example: <code>/followers samantharuthprabhuoffl</code>
/followees <username> - Get a list of all followees of given username. If no username given, then your list will be retrieved.
/fans <username> - Get a list of of followees who follow back the given username. If no username given, your list will be retrieved.
/notfollowing <username> - Get a list of followees who is not following back the given username.
/tagged <username> - Downloads all posts in which given username is tagged. If nothing given your tagged posts will be downloaded.
/story <username> - Downloads all stories from given username. If nothing given your stories will be downloaded.
/stories - Downloads all the stories of all your followees.
/highlights <username> - Downloads highlights from given username, If nothing given your highlights will be downloaded.
"""
HOME_TEXT = """
<b>Helo, [{}](tg://user?id={})
This is a bot of [{}](www.instagram.com/{}) to manage his Instagram account.
I can only work for my master [{}](tg://user?id={}).
But you can Deploy the same bot for your use from the below source code.
Use /help to know What I can Do?</b>
"""
HOME_TEXT_OWNER = """
<b>Helo, [{}](tg://user?id={})
I am your assistant to manage your Instagram account.
Use /help to know what I can do for you.</b>
""" | 0.357007 | 0.205555 |
import pytest
import stk
import os
from os.path import join
import logging
import sys
from collections import Counter, defaultdict
from .fixtures import *
logging.basicConfig(
format='\n\n%(levelname)s:%(module)s:%(message)s',
stream=sys.stdout
)
logging.getLogger('stk').setLevel(logging.DEBUG)
# Run tests in a directory so that that generated files are easy to
# delete.
output_dir = 'tests_output'
if not os.path.exists(output_dir):
os.mkdir(output_dir)
os.chdir(output_dir)
def pytest_addoption(parser):
parser.addoption('--macromodel_path', default='')
parser.addoption('--mopac_path', default='')
parser.addoption('--xtb_path', default='')
def pytest_generate_tests(metafunc):
if 'macromodel_path' in metafunc.fixturenames:
mm_path = metafunc.config.getoption('macromodel_path')
metafunc.parametrize('macromodel_path', [mm_path])
if 'mopac_path' in metafunc.fixturenames:
mopac_path = metafunc.config.getoption('mopac_path')
metafunc.parametrize('mopac_path', [mopac_path])
if 'xtb_path' in metafunc.fixturenames:
xtb_path = metafunc.config.getoption('xtb_path')
metafunc.parametrize('xtb_path', [xtb_path])
@pytest.fixture(scope='session')
def make_reactor():
def inner(building_blocks, topology_graph):
mol = stk.ConstructedMolecule.__new__(stk.ConstructedMolecule)
mol.topology_graph = topology_graph
mol.atoms = []
mol.bonds = []
mol.construction_bonds = []
mol.func_groups = []
mol.building_block_counter = Counter()
mol._position_matrix = []
mol.building_block_vertices = defaultdict(list)
mol.building_block_vertices = (
topology_graph.assign_building_blocks_to_vertices(
building_blocks=building_blocks
)
)
vertex_clones = tuple(
mol.topology_graph._get_vertex_clones(mol, 1)
)
edge_clones = tuple(
mol.topology_graph._get_edge_clones(vertex_clones, 1)
)
mol._edge_clones = edge_clones
mol.topology_graph._prepare(mol)
mol.topology_graph._place_building_blocks(
mol=mol,
vertices=vertex_clones,
edges=edge_clones
)
return stk.molecular.reactor.Reactor(mol)
return inner
@pytest.fixture(scope='session')
def mae_path():
return join('..', 'data', 'molecule.mae')
@pytest.fixture(scope='session')
def bb_dir():
return join('..', 'data', 'building_block_init') | tests/conftest.py | import pytest
import stk
import os
from os.path import join
import logging
import sys
from collections import Counter, defaultdict
from .fixtures import *
logging.basicConfig(
format='\n\n%(levelname)s:%(module)s:%(message)s',
stream=sys.stdout
)
logging.getLogger('stk').setLevel(logging.DEBUG)
# Run tests in a directory so that that generated files are easy to
# delete.
output_dir = 'tests_output'
if not os.path.exists(output_dir):
os.mkdir(output_dir)
os.chdir(output_dir)
def pytest_addoption(parser):
parser.addoption('--macromodel_path', default='')
parser.addoption('--mopac_path', default='')
parser.addoption('--xtb_path', default='')
def pytest_generate_tests(metafunc):
if 'macromodel_path' in metafunc.fixturenames:
mm_path = metafunc.config.getoption('macromodel_path')
metafunc.parametrize('macromodel_path', [mm_path])
if 'mopac_path' in metafunc.fixturenames:
mopac_path = metafunc.config.getoption('mopac_path')
metafunc.parametrize('mopac_path', [mopac_path])
if 'xtb_path' in metafunc.fixturenames:
xtb_path = metafunc.config.getoption('xtb_path')
metafunc.parametrize('xtb_path', [xtb_path])
@pytest.fixture(scope='session')
def make_reactor():
def inner(building_blocks, topology_graph):
mol = stk.ConstructedMolecule.__new__(stk.ConstructedMolecule)
mol.topology_graph = topology_graph
mol.atoms = []
mol.bonds = []
mol.construction_bonds = []
mol.func_groups = []
mol.building_block_counter = Counter()
mol._position_matrix = []
mol.building_block_vertices = defaultdict(list)
mol.building_block_vertices = (
topology_graph.assign_building_blocks_to_vertices(
building_blocks=building_blocks
)
)
vertex_clones = tuple(
mol.topology_graph._get_vertex_clones(mol, 1)
)
edge_clones = tuple(
mol.topology_graph._get_edge_clones(vertex_clones, 1)
)
mol._edge_clones = edge_clones
mol.topology_graph._prepare(mol)
mol.topology_graph._place_building_blocks(
mol=mol,
vertices=vertex_clones,
edges=edge_clones
)
return stk.molecular.reactor.Reactor(mol)
return inner
@pytest.fixture(scope='session')
def mae_path():
return join('..', 'data', 'molecule.mae')
@pytest.fixture(scope='session')
def bb_dir():
return join('..', 'data', 'building_block_init') | 0.314998 | 0.127029 |
import unittest
from datafinder.core.configuration.icons import icon
from datafinder.core.error import ConfigurationError
from datafinder.persistence.error import PersistenceError
from datafinder_test.mocks import SimpleMock
__version__ = "$Revision-Id:$"
class IconTestCase(unittest.TestCase):
""" Tests the parsing of a specific directory for suitable icon files. """
def setUp(self):
""" Creates test setup. """
self._directoryFileStorer = SimpleMock(identifier="/test")
def testParsingSuccess(self):
""" Tests the successful parsing of a directory for icon files. """
self._directoryFileStorer.value = [SimpleMock(name="a16.png"), SimpleMock(name="a24.png"),
SimpleMock(name="b16.png"), SimpleMock(name="b24.png")]
self.assertEquals(len(icon.parseIconDirectory(self._directoryFileStorer)), 2)
self._directoryFileStorer.value = [SimpleMock(name="a6.png"), SimpleMock(name="a24.png"),
SimpleMock(name="b16.png"), SimpleMock(name="b24.png")]
self.assertEquals(len(icon.parseIconDirectory(self._directoryFileStorer)), 1)
self._directoryFileStorer.value = [SimpleMock(name="a6.png"), SimpleMock(name="a24.png"),
SimpleMock(name="b16.png"), SimpleMock(name="b24.ng")]
self.assertEquals(len(icon.parseIconDirectory(self._directoryFileStorer)), 1)
self._directoryFileStorer.value = [SimpleMock(name="a6.png"), SimpleMock(name="a24.png"),
SimpleMock(name="b6.png"), SimpleMock(name="b24.ng")]
self.assertEquals(len(icon.parseIconDirectory(self._directoryFileStorer)), 0)
def testErrorHandling(self):
""" Tests the error handling when parsing a directory for icon files. """
self._directoryFileStorer.value = [SimpleMock(name=""), SimpleMock(name="a24.png")]
self.assertEquals(len(icon.parseIconDirectory(self._directoryFileStorer)), 0)
self._directoryFileStorer.error = PersistenceError("")
self.assertRaises(ConfigurationError, icon.parseIconDirectory, self._directoryFileStorer)
def testIconComparison(self):
""" Tests the comparison of icons. """
anIcon = icon.Icon("a", "b", "c", "d")
self.assertEquals(anIcon, anIcon)
anotherIcon = icon.Icon("a", "b", "c", "d")
self.assertEquals(anIcon, anotherIcon)
anotherIcon.baseName = "d"
self.assertNotEquals(anIcon, anotherIcon)
self.assertNotEquals(anIcon, None) | test/unittest/datafinder_test/core/configuration/icons/icon_test.py | import unittest
from datafinder.core.configuration.icons import icon
from datafinder.core.error import ConfigurationError
from datafinder.persistence.error import PersistenceError
from datafinder_test.mocks import SimpleMock
__version__ = "$Revision-Id:$"
class IconTestCase(unittest.TestCase):
""" Tests the parsing of a specific directory for suitable icon files. """
def setUp(self):
""" Creates test setup. """
self._directoryFileStorer = SimpleMock(identifier="/test")
def testParsingSuccess(self):
""" Tests the successful parsing of a directory for icon files. """
self._directoryFileStorer.value = [SimpleMock(name="a16.png"), SimpleMock(name="a24.png"),
SimpleMock(name="b16.png"), SimpleMock(name="b24.png")]
self.assertEquals(len(icon.parseIconDirectory(self._directoryFileStorer)), 2)
self._directoryFileStorer.value = [SimpleMock(name="a6.png"), SimpleMock(name="a24.png"),
SimpleMock(name="b16.png"), SimpleMock(name="b24.png")]
self.assertEquals(len(icon.parseIconDirectory(self._directoryFileStorer)), 1)
self._directoryFileStorer.value = [SimpleMock(name="a6.png"), SimpleMock(name="a24.png"),
SimpleMock(name="b16.png"), SimpleMock(name="b24.ng")]
self.assertEquals(len(icon.parseIconDirectory(self._directoryFileStorer)), 1)
self._directoryFileStorer.value = [SimpleMock(name="a6.png"), SimpleMock(name="a24.png"),
SimpleMock(name="b6.png"), SimpleMock(name="b24.ng")]
self.assertEquals(len(icon.parseIconDirectory(self._directoryFileStorer)), 0)
def testErrorHandling(self):
""" Tests the error handling when parsing a directory for icon files. """
self._directoryFileStorer.value = [SimpleMock(name=""), SimpleMock(name="a24.png")]
self.assertEquals(len(icon.parseIconDirectory(self._directoryFileStorer)), 0)
self._directoryFileStorer.error = PersistenceError("")
self.assertRaises(ConfigurationError, icon.parseIconDirectory, self._directoryFileStorer)
def testIconComparison(self):
""" Tests the comparison of icons. """
anIcon = icon.Icon("a", "b", "c", "d")
self.assertEquals(anIcon, anIcon)
anotherIcon = icon.Icon("a", "b", "c", "d")
self.assertEquals(anIcon, anotherIcon)
anotherIcon.baseName = "d"
self.assertNotEquals(anIcon, anotherIcon)
self.assertNotEquals(anIcon, None) | 0.586286 | 0.314037 |
"""Tests for Kubeflow V2 step builder."""
from typing import Any, Dict
from kfp.pipeline_spec import pipeline_spec_pb2 as pipeline_pb2
import tensorflow as tf
from tfx import components
from tfx.dsl.components.common import importer
from tfx.dsl.components.common import resolver
from tfx.dsl.experimental import latest_artifacts_resolver
from tfx.dsl.experimental import latest_blessed_model_resolver
from tfx.extensions.google_cloud_big_query.example_gen import component as big_query_example_gen_component
from tfx.orchestration import data_types
from tfx.orchestration.kubeflow.v2 import step_builder
from tfx.orchestration.kubeflow.v2 import test_utils
from tfx.proto import example_gen_pb2
from tfx.types import channel
from tfx.types import channel_utils
from tfx.types import standard_artifacts
_TEST_CMDS = ('python', '-m', 'my_entrypoint.app_module')
class StepBuilderTest(tf.test.TestCase):
def _sole(self, d: Dict[Any, Any]) -> Any:
"""Asserts the dictionary has length 1 and returns the only value."""
self.assertLen(d, 1)
return list(d.values())[0]
def testBuildTask(self):
query = 'SELECT * FROM TABLE'
bq_example_gen = big_query_example_gen_component.BigQueryExampleGen(
query=query)
deployment_config = pipeline_pb2.PipelineDeploymentConfig()
component_defs = {}
my_builder = step_builder.StepBuilder(
node=bq_example_gen,
image='gcr.io/tensorflow/tfx:latest',
deployment_config=deployment_config,
component_defs=component_defs,
enable_cache=True)
actual_step_spec = self._sole(my_builder.build())
actual_component_def = self._sole(component_defs)
self.assertProtoEquals(
test_utils.get_proto_from_test_data(
'expected_bq_example_gen_component.pbtxt',
pipeline_pb2.ComponentSpec()), actual_component_def)
self.assertProtoEquals(
test_utils.get_proto_from_test_data(
'expected_bq_example_gen_task.pbtxt',
pipeline_pb2.PipelineTaskSpec()), actual_step_spec)
self.assertProtoEquals(
test_utils.get_proto_from_test_data(
'expected_bq_example_gen_executor.pbtxt',
pipeline_pb2.PipelineDeploymentConfig()), deployment_config)
def testBuildContainerTask(self):
task = test_utils.DummyProducerComponent(
output1=channel_utils.as_channel([standard_artifacts.Model()]),
param1='value1',
)
deployment_config = pipeline_pb2.PipelineDeploymentConfig()
component_defs = {}
my_builder = step_builder.StepBuilder(
node=task,
image='gcr.io/tensorflow/tfx:latest', # Note this has no effect here.
deployment_config=deployment_config,
component_defs=component_defs)
actual_step_spec = self._sole(my_builder.build())
actual_component_def = self._sole(component_defs)
self.assertProtoEquals(
test_utils.get_proto_from_test_data(
'expected_dummy_container_spec_component.pbtxt',
pipeline_pb2.ComponentSpec()), actual_component_def)
self.assertProtoEquals(
test_utils.get_proto_from_test_data(
'expected_dummy_container_spec_task.pbtxt',
pipeline_pb2.PipelineTaskSpec()), actual_step_spec)
self.assertProtoEquals(
test_utils.get_proto_from_test_data(
'expected_dummy_container_spec_executor.pbtxt',
pipeline_pb2.PipelineDeploymentConfig()), deployment_config)
def testBuildContainerTask2(self):
task = test_utils.dummy_producer_component(
output1=channel_utils.as_channel([standard_artifacts.Model()]),
param1='value1',
)
deployment_config = pipeline_pb2.PipelineDeploymentConfig()
component_defs = {}
my_builder = step_builder.StepBuilder(
node=task,
image='gcr.io/tensorflow/tfx:latest',
deployment_config=deployment_config,
component_defs=component_defs)
actual_step_spec = self._sole(my_builder.build())
actual_component_def = self._sole(component_defs)
# Same as in testBuildContainerTask
self.assertProtoEquals(
test_utils.get_proto_from_test_data(
'expected_dummy_container_spec_component.pbtxt',
pipeline_pb2.ComponentSpec()), actual_component_def)
self.assertProtoEquals(
test_utils.get_proto_from_test_data(
'expected_dummy_container_spec_task.pbtxt',
pipeline_pb2.PipelineTaskSpec()), actual_step_spec)
self.assertProtoEquals(
test_utils.get_proto_from_test_data(
'expected_dummy_container_spec_executor.pbtxt',
pipeline_pb2.PipelineDeploymentConfig()), deployment_config)
def testBuildFileBasedExampleGen(self):
beam_pipeline_args = ['runner=DataflowRunner']
example_gen = components.CsvExampleGen(input_base='path/to/data/root')
deployment_config = pipeline_pb2.PipelineDeploymentConfig()
component_defs = {}
my_builder = step_builder.StepBuilder(
node=example_gen,
image='gcr.io/tensorflow/tfx:latest',
image_cmds=_TEST_CMDS,
beam_pipeline_args=beam_pipeline_args,
deployment_config=deployment_config,
component_defs=component_defs)
actual_step_spec = self._sole(my_builder.build())
actual_component_def = self._sole(component_defs)
self.assertProtoEquals(
test_utils.get_proto_from_test_data(
'expected_csv_example_gen_component.pbtxt',
pipeline_pb2.ComponentSpec()), actual_component_def)
self.assertProtoEquals(
test_utils.get_proto_from_test_data(
'expected_csv_example_gen_task.pbtxt',
pipeline_pb2.PipelineTaskSpec()), actual_step_spec)
self.assertProtoEquals(
test_utils.get_proto_from_test_data(
'expected_csv_example_gen_executor.pbtxt',
pipeline_pb2.PipelineDeploymentConfig()), deployment_config)
def testBuildFileBasedExampleGenWithInputConfig(self):
input_config = example_gen_pb2.Input(splits=[
example_gen_pb2.Input.Split(name='train', pattern='*train.tfr'),
example_gen_pb2.Input.Split(name='eval', pattern='*test.tfr')
])
example_gen = components.ImportExampleGen(
input_base='path/to/data/root', input_config=input_config)
deployment_config = pipeline_pb2.PipelineDeploymentConfig()
component_defs = {}
my_builder = step_builder.StepBuilder(
node=example_gen,
image='gcr.io/tensorflow/tfx:latest',
deployment_config=deployment_config,
component_defs=component_defs)
actual_step_spec = self._sole(my_builder.build())
actual_component_def = self._sole(component_defs)
self.assertProtoEquals(
test_utils.get_proto_from_test_data(
'expected_import_example_gen_component.pbtxt',
pipeline_pb2.ComponentSpec()), actual_component_def)
self.assertProtoEquals(
test_utils.get_proto_from_test_data(
'expected_import_example_gen_task.pbtxt',
pipeline_pb2.PipelineTaskSpec()), actual_step_spec)
self.assertProtoEquals(
test_utils.get_proto_from_test_data(
'expected_import_example_gen_executor.pbtxt',
pipeline_pb2.PipelineDeploymentConfig()), deployment_config)
def testBuildImporter(self):
impt = importer.Importer(
source_uri='m/y/u/r/i',
properties={
'split_names': '["train", "eval"]',
},
custom_properties={
'str_custom_property': 'abc',
'int_custom_property': 123,
},
artifact_type=standard_artifacts.Examples).with_id('my_importer')
deployment_config = pipeline_pb2.PipelineDeploymentConfig()
component_defs = {}
my_builder = step_builder.StepBuilder(
node=impt,
deployment_config=deployment_config,
component_defs=component_defs)
actual_step_spec = self._sole(my_builder.build())
actual_component_def = self._sole(component_defs)
self.assertProtoEquals(
test_utils.get_proto_from_test_data('expected_importer_component.pbtxt',
pipeline_pb2.ComponentSpec()),
actual_component_def)
self.assertProtoEquals(
test_utils.get_proto_from_test_data('expected_importer_task.pbtxt',
pipeline_pb2.PipelineTaskSpec()),
actual_step_spec)
self.assertProtoEquals(
test_utils.get_proto_from_test_data(
'expected_importer_executor.pbtxt',
pipeline_pb2.PipelineDeploymentConfig()), deployment_config)
def testBuildLatestBlessedModelResolverSucceed(self):
latest_blessed_resolver = resolver.Resolver(
strategy_class=latest_blessed_model_resolver.LatestBlessedModelResolver,
model=channel.Channel(type=standard_artifacts.Model),
model_blessing=channel.Channel(
type=standard_artifacts.ModelBlessing)).with_id('my_resolver2')
test_pipeline_info = data_types.PipelineInfo(
pipeline_name='test-pipeline', pipeline_root='gs://path/to/my/root')
deployment_config = pipeline_pb2.PipelineDeploymentConfig()
component_defs = {}
my_builder = step_builder.StepBuilder(
node=latest_blessed_resolver,
deployment_config=deployment_config,
pipeline_info=test_pipeline_info,
component_defs=component_defs)
actual_step_specs = my_builder.build()
model_blessing_resolver_id = 'my_resolver2-model-blessing-resolver'
model_resolver_id = 'my_resolver2-model-resolver'
self.assertSameElements(actual_step_specs.keys(),
[model_blessing_resolver_id, model_resolver_id])
self.assertProtoEquals(
test_utils.get_proto_from_test_data(
'expected_latest_blessed_model_resolver_component_1.pbtxt',
pipeline_pb2.ComponentSpec()),
component_defs[model_blessing_resolver_id])
self.assertProtoEquals(
test_utils.get_proto_from_test_data(
'expected_latest_blessed_model_resolver_task_1.pbtxt',
pipeline_pb2.PipelineTaskSpec()),
actual_step_specs[model_blessing_resolver_id])
self.assertProtoEquals(
test_utils.get_proto_from_test_data(
'expected_latest_blessed_model_resolver_component_2.pbtxt',
pipeline_pb2.ComponentSpec()), component_defs[model_resolver_id])
self.assertProtoEquals(
test_utils.get_proto_from_test_data(
'expected_latest_blessed_model_resolver_task_2.pbtxt',
pipeline_pb2.PipelineTaskSpec()),
actual_step_specs[model_resolver_id])
self.assertProtoEquals(
test_utils.get_proto_from_test_data(
'expected_latest_blessed_model_resolver_executor.pbtxt',
pipeline_pb2.PipelineDeploymentConfig()), deployment_config)
def testBuildLatestArtifactResolverSucceed(self):
latest_model_resolver = resolver.Resolver(
strategy_class=latest_artifacts_resolver.LatestArtifactsResolver,
model=channel.Channel(type=standard_artifacts.Model),
examples=channel.Channel(
type=standard_artifacts.Examples)).with_id('my_resolver')
deployment_config = pipeline_pb2.PipelineDeploymentConfig()
component_defs = {}
test_pipeline_info = data_types.PipelineInfo(
pipeline_name='test-pipeline', pipeline_root='gs://path/to/my/root')
my_builder = step_builder.StepBuilder(
node=latest_model_resolver,
deployment_config=deployment_config,
pipeline_info=test_pipeline_info,
component_defs=component_defs)
actual_step_spec = self._sole(my_builder.build())
actual_component_def = self._sole(component_defs)
self.assertProtoEquals(
test_utils.get_proto_from_test_data(
'expected_latest_artifact_resolver_component.pbtxt',
pipeline_pb2.ComponentSpec()), actual_component_def)
self.assertProtoEquals(
test_utils.get_proto_from_test_data(
'expected_latest_artifact_resolver_task.pbtxt',
pipeline_pb2.PipelineTaskSpec()), actual_step_spec)
self.assertProtoEquals(
test_utils.get_proto_from_test_data(
'expected_latest_artifact_resolver_executor.pbtxt',
pipeline_pb2.PipelineDeploymentConfig()), deployment_config)
if __name__ == '__main__':
tf.test.main() | tfx/orchestration/kubeflow/v2/step_builder_test.py | """Tests for Kubeflow V2 step builder."""
from typing import Any, Dict
from kfp.pipeline_spec import pipeline_spec_pb2 as pipeline_pb2
import tensorflow as tf
from tfx import components
from tfx.dsl.components.common import importer
from tfx.dsl.components.common import resolver
from tfx.dsl.experimental import latest_artifacts_resolver
from tfx.dsl.experimental import latest_blessed_model_resolver
from tfx.extensions.google_cloud_big_query.example_gen import component as big_query_example_gen_component
from tfx.orchestration import data_types
from tfx.orchestration.kubeflow.v2 import step_builder
from tfx.orchestration.kubeflow.v2 import test_utils
from tfx.proto import example_gen_pb2
from tfx.types import channel
from tfx.types import channel_utils
from tfx.types import standard_artifacts
_TEST_CMDS = ('python', '-m', 'my_entrypoint.app_module')
class StepBuilderTest(tf.test.TestCase):
def _sole(self, d: Dict[Any, Any]) -> Any:
"""Asserts the dictionary has length 1 and returns the only value."""
self.assertLen(d, 1)
return list(d.values())[0]
def testBuildTask(self):
query = 'SELECT * FROM TABLE'
bq_example_gen = big_query_example_gen_component.BigQueryExampleGen(
query=query)
deployment_config = pipeline_pb2.PipelineDeploymentConfig()
component_defs = {}
my_builder = step_builder.StepBuilder(
node=bq_example_gen,
image='gcr.io/tensorflow/tfx:latest',
deployment_config=deployment_config,
component_defs=component_defs,
enable_cache=True)
actual_step_spec = self._sole(my_builder.build())
actual_component_def = self._sole(component_defs)
self.assertProtoEquals(
test_utils.get_proto_from_test_data(
'expected_bq_example_gen_component.pbtxt',
pipeline_pb2.ComponentSpec()), actual_component_def)
self.assertProtoEquals(
test_utils.get_proto_from_test_data(
'expected_bq_example_gen_task.pbtxt',
pipeline_pb2.PipelineTaskSpec()), actual_step_spec)
self.assertProtoEquals(
test_utils.get_proto_from_test_data(
'expected_bq_example_gen_executor.pbtxt',
pipeline_pb2.PipelineDeploymentConfig()), deployment_config)
def testBuildContainerTask(self):
task = test_utils.DummyProducerComponent(
output1=channel_utils.as_channel([standard_artifacts.Model()]),
param1='value1',
)
deployment_config = pipeline_pb2.PipelineDeploymentConfig()
component_defs = {}
my_builder = step_builder.StepBuilder(
node=task,
image='gcr.io/tensorflow/tfx:latest', # Note this has no effect here.
deployment_config=deployment_config,
component_defs=component_defs)
actual_step_spec = self._sole(my_builder.build())
actual_component_def = self._sole(component_defs)
self.assertProtoEquals(
test_utils.get_proto_from_test_data(
'expected_dummy_container_spec_component.pbtxt',
pipeline_pb2.ComponentSpec()), actual_component_def)
self.assertProtoEquals(
test_utils.get_proto_from_test_data(
'expected_dummy_container_spec_task.pbtxt',
pipeline_pb2.PipelineTaskSpec()), actual_step_spec)
self.assertProtoEquals(
test_utils.get_proto_from_test_data(
'expected_dummy_container_spec_executor.pbtxt',
pipeline_pb2.PipelineDeploymentConfig()), deployment_config)
def testBuildContainerTask2(self):
task = test_utils.dummy_producer_component(
output1=channel_utils.as_channel([standard_artifacts.Model()]),
param1='value1',
)
deployment_config = pipeline_pb2.PipelineDeploymentConfig()
component_defs = {}
my_builder = step_builder.StepBuilder(
node=task,
image='gcr.io/tensorflow/tfx:latest',
deployment_config=deployment_config,
component_defs=component_defs)
actual_step_spec = self._sole(my_builder.build())
actual_component_def = self._sole(component_defs)
# Same as in testBuildContainerTask
self.assertProtoEquals(
test_utils.get_proto_from_test_data(
'expected_dummy_container_spec_component.pbtxt',
pipeline_pb2.ComponentSpec()), actual_component_def)
self.assertProtoEquals(
test_utils.get_proto_from_test_data(
'expected_dummy_container_spec_task.pbtxt',
pipeline_pb2.PipelineTaskSpec()), actual_step_spec)
self.assertProtoEquals(
test_utils.get_proto_from_test_data(
'expected_dummy_container_spec_executor.pbtxt',
pipeline_pb2.PipelineDeploymentConfig()), deployment_config)
def testBuildFileBasedExampleGen(self):
beam_pipeline_args = ['runner=DataflowRunner']
example_gen = components.CsvExampleGen(input_base='path/to/data/root')
deployment_config = pipeline_pb2.PipelineDeploymentConfig()
component_defs = {}
my_builder = step_builder.StepBuilder(
node=example_gen,
image='gcr.io/tensorflow/tfx:latest',
image_cmds=_TEST_CMDS,
beam_pipeline_args=beam_pipeline_args,
deployment_config=deployment_config,
component_defs=component_defs)
actual_step_spec = self._sole(my_builder.build())
actual_component_def = self._sole(component_defs)
self.assertProtoEquals(
test_utils.get_proto_from_test_data(
'expected_csv_example_gen_component.pbtxt',
pipeline_pb2.ComponentSpec()), actual_component_def)
self.assertProtoEquals(
test_utils.get_proto_from_test_data(
'expected_csv_example_gen_task.pbtxt',
pipeline_pb2.PipelineTaskSpec()), actual_step_spec)
self.assertProtoEquals(
test_utils.get_proto_from_test_data(
'expected_csv_example_gen_executor.pbtxt',
pipeline_pb2.PipelineDeploymentConfig()), deployment_config)
def testBuildFileBasedExampleGenWithInputConfig(self):
input_config = example_gen_pb2.Input(splits=[
example_gen_pb2.Input.Split(name='train', pattern='*train.tfr'),
example_gen_pb2.Input.Split(name='eval', pattern='*test.tfr')
])
example_gen = components.ImportExampleGen(
input_base='path/to/data/root', input_config=input_config)
deployment_config = pipeline_pb2.PipelineDeploymentConfig()
component_defs = {}
my_builder = step_builder.StepBuilder(
node=example_gen,
image='gcr.io/tensorflow/tfx:latest',
deployment_config=deployment_config,
component_defs=component_defs)
actual_step_spec = self._sole(my_builder.build())
actual_component_def = self._sole(component_defs)
self.assertProtoEquals(
test_utils.get_proto_from_test_data(
'expected_import_example_gen_component.pbtxt',
pipeline_pb2.ComponentSpec()), actual_component_def)
self.assertProtoEquals(
test_utils.get_proto_from_test_data(
'expected_import_example_gen_task.pbtxt',
pipeline_pb2.PipelineTaskSpec()), actual_step_spec)
self.assertProtoEquals(
test_utils.get_proto_from_test_data(
'expected_import_example_gen_executor.pbtxt',
pipeline_pb2.PipelineDeploymentConfig()), deployment_config)
def testBuildImporter(self):
impt = importer.Importer(
source_uri='m/y/u/r/i',
properties={
'split_names': '["train", "eval"]',
},
custom_properties={
'str_custom_property': 'abc',
'int_custom_property': 123,
},
artifact_type=standard_artifacts.Examples).with_id('my_importer')
deployment_config = pipeline_pb2.PipelineDeploymentConfig()
component_defs = {}
my_builder = step_builder.StepBuilder(
node=impt,
deployment_config=deployment_config,
component_defs=component_defs)
actual_step_spec = self._sole(my_builder.build())
actual_component_def = self._sole(component_defs)
self.assertProtoEquals(
test_utils.get_proto_from_test_data('expected_importer_component.pbtxt',
pipeline_pb2.ComponentSpec()),
actual_component_def)
self.assertProtoEquals(
test_utils.get_proto_from_test_data('expected_importer_task.pbtxt',
pipeline_pb2.PipelineTaskSpec()),
actual_step_spec)
self.assertProtoEquals(
test_utils.get_proto_from_test_data(
'expected_importer_executor.pbtxt',
pipeline_pb2.PipelineDeploymentConfig()), deployment_config)
def testBuildLatestBlessedModelResolverSucceed(self):
latest_blessed_resolver = resolver.Resolver(
strategy_class=latest_blessed_model_resolver.LatestBlessedModelResolver,
model=channel.Channel(type=standard_artifacts.Model),
model_blessing=channel.Channel(
type=standard_artifacts.ModelBlessing)).with_id('my_resolver2')
test_pipeline_info = data_types.PipelineInfo(
pipeline_name='test-pipeline', pipeline_root='gs://path/to/my/root')
deployment_config = pipeline_pb2.PipelineDeploymentConfig()
component_defs = {}
my_builder = step_builder.StepBuilder(
node=latest_blessed_resolver,
deployment_config=deployment_config,
pipeline_info=test_pipeline_info,
component_defs=component_defs)
actual_step_specs = my_builder.build()
model_blessing_resolver_id = 'my_resolver2-model-blessing-resolver'
model_resolver_id = 'my_resolver2-model-resolver'
self.assertSameElements(actual_step_specs.keys(),
[model_blessing_resolver_id, model_resolver_id])
self.assertProtoEquals(
test_utils.get_proto_from_test_data(
'expected_latest_blessed_model_resolver_component_1.pbtxt',
pipeline_pb2.ComponentSpec()),
component_defs[model_blessing_resolver_id])
self.assertProtoEquals(
test_utils.get_proto_from_test_data(
'expected_latest_blessed_model_resolver_task_1.pbtxt',
pipeline_pb2.PipelineTaskSpec()),
actual_step_specs[model_blessing_resolver_id])
self.assertProtoEquals(
test_utils.get_proto_from_test_data(
'expected_latest_blessed_model_resolver_component_2.pbtxt',
pipeline_pb2.ComponentSpec()), component_defs[model_resolver_id])
self.assertProtoEquals(
test_utils.get_proto_from_test_data(
'expected_latest_blessed_model_resolver_task_2.pbtxt',
pipeline_pb2.PipelineTaskSpec()),
actual_step_specs[model_resolver_id])
self.assertProtoEquals(
test_utils.get_proto_from_test_data(
'expected_latest_blessed_model_resolver_executor.pbtxt',
pipeline_pb2.PipelineDeploymentConfig()), deployment_config)
def testBuildLatestArtifactResolverSucceed(self):
latest_model_resolver = resolver.Resolver(
strategy_class=latest_artifacts_resolver.LatestArtifactsResolver,
model=channel.Channel(type=standard_artifacts.Model),
examples=channel.Channel(
type=standard_artifacts.Examples)).with_id('my_resolver')
deployment_config = pipeline_pb2.PipelineDeploymentConfig()
component_defs = {}
test_pipeline_info = data_types.PipelineInfo(
pipeline_name='test-pipeline', pipeline_root='gs://path/to/my/root')
my_builder = step_builder.StepBuilder(
node=latest_model_resolver,
deployment_config=deployment_config,
pipeline_info=test_pipeline_info,
component_defs=component_defs)
actual_step_spec = self._sole(my_builder.build())
actual_component_def = self._sole(component_defs)
self.assertProtoEquals(
test_utils.get_proto_from_test_data(
'expected_latest_artifact_resolver_component.pbtxt',
pipeline_pb2.ComponentSpec()), actual_component_def)
self.assertProtoEquals(
test_utils.get_proto_from_test_data(
'expected_latest_artifact_resolver_task.pbtxt',
pipeline_pb2.PipelineTaskSpec()), actual_step_spec)
self.assertProtoEquals(
test_utils.get_proto_from_test_data(
'expected_latest_artifact_resolver_executor.pbtxt',
pipeline_pb2.PipelineDeploymentConfig()), deployment_config)
if __name__ == '__main__':
tf.test.main() | 0.865948 | 0.458712 |
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
"metadata_version": "1.1",
"status": ["preview"],
"supported_by": "community",
}
DOCUMENTATION = """
---
module: oci_database_database_actions
short_description: Perform actions on a Database resource in Oracle Cloud Infrastructure
description:
- Perform actions on a Database resource in Oracle Cloud Infrastructure
- For I(action=restore), restore a Database based on the request parameters you provide.
version_added: "2.9"
author: Oracle (@oracle)
options:
database_id:
description:
- The database L(OCID,https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm).
type: str
aliases: ["id"]
required: true
database_scn:
description:
- Restores using the backup with the System Change Number (SCN) specified.
type: str
timestamp:
description:
- Restores to the timestamp specified.
type: str
latest:
description:
- Restores to the last known good state with the least possible data loss.
type: bool
action:
description:
- The action to perform on the Database.
type: str
required: true
choices:
- "restore"
extends_documentation_fragment: [ oracle.oci.oracle, oracle.oci.oracle_wait_options ]
"""
EXAMPLES = """
- name: Perform action restore on database
oci_database_database_actions:
database_id: ocid1.database.oc1..xxxxxxEXAMPLExxxxxx
action: restore
"""
RETURN = """
database:
description:
- Details of the Database resource acted upon by the current operation
returned: on success
type: complex
contains:
id:
description:
- The L(OCID,https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm) of the database.
returned: on success
type: string
sample: ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx
compartment_id:
description:
- The L(OCID,https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm) of the compartment.
returned: on success
type: string
sample: ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx
character_set:
description:
- The character set for the database.
returned: on success
type: string
sample: character_set_example
ncharacter_set:
description:
- The national character set for the database.
returned: on success
type: string
sample: ncharacter_set_example
db_home_id:
description:
- The L(OCID,https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm) of the Database Home.
returned: on success
type: string
sample: ocid1.dbhome.oc1..xxxxxxEXAMPLExxxxxx
db_system_id:
description:
- The L(OCID,https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm) of the DB system.
returned: on success
type: string
sample: ocid1.dbsystem.oc1..xxxxxxEXAMPLExxxxxx
vm_cluster_id:
description:
- The L(OCID,https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm) of the VM cluster.
returned: on success
type: string
sample: ocid1.vmcluster.oc1..xxxxxxEXAMPLExxxxxx
db_name:
description:
- The database name.
returned: on success
type: string
sample: db_name_example
pdb_name:
description:
- The name of the pluggable database. The name must begin with an alphabetic character and can contain a maximum of eight alphanumeric
characters. Special characters are not permitted. Pluggable database should not be same as database name.
returned: on success
type: string
sample: pdb_name_example
db_workload:
description:
- The database workload type.
returned: on success
type: string
sample: db_workload_example
db_unique_name:
description:
- A system-generated name for the database to ensure uniqueness within an Oracle Data Guard group (a primary database and its standby
databases). The unique name cannot be changed.
returned: on success
type: string
sample: db_unique_name_example
lifecycle_details:
description:
- Additional information about the current lifecycleState.
returned: on success
type: string
sample: lifecycle_details_example
lifecycle_state:
description:
- The current state of the database.
returned: on success
type: string
sample: PROVISIONING
time_created:
description:
- The date and time the database was created.
returned: on success
type: string
sample: 2013-10-20T19:20:30+01:00
last_backup_timestamp:
description:
- The date and time when the latest database backup was created.
returned: on success
type: string
sample: 2013-10-20T19:20:30+01:00
db_backup_config:
description:
- ""
returned: on success
type: complex
contains:
auto_backup_enabled:
description:
- If set to true, configures automatic backups. If you previously used RMAN or dbcli to configure backups and then you switch to using
the Console or the API for backups, a new backup configuration is created and associated with your database. This means that you can
no longer rely on your previously configured unmanaged backups to work.
returned: on success
type: bool
sample: true
recovery_window_in_days:
description:
- Number of days between the current and the earliest point of recoverability covered by automatic backups.
This value applies to automatic backups only. After a new automatic backup has been created, Oracle removes old automatic backups that
are created before the window.
When the value is updated, it is applied to all existing automatic backups.
returned: on success
type: int
sample: 56
auto_backup_window:
description:
- Time window selected for initiating automatic backup for the database system. There are twelve available two-hour time windows. If no
option is selected, a start time between 12:00 AM to 7:00 AM in the region of the database is automatically chosen. For example, if
the user selects SLOT_TWO from the enum list, the automatic backup job will start in between 2:00 AM (inclusive) to 4:00 AM
(exclusive).
- "Example: `SLOT_TWO`"
returned: on success
type: string
sample: SLOT_TWO
backup_destination_details:
description:
- Backup destination details.
returned: on success
type: complex
contains:
type:
description:
- Type of the database backup destination.
returned: on success
type: string
sample: NFS
id:
description:
- The L(OCID,https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm) of the backup destination.
returned: on success
type: string
sample: ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx
vpc_user:
description:
- For a RECOVERY_APPLIANCE backup destination, the Virtual Private Catalog (VPC) user that is used to access the Recovery
Appliance.
returned: on success
type: string
sample: vpc_user_example
vpc_password:
description:
- For a RECOVERY_APPLIANCE backup destination, the password for the VPC user that is used to access the Recovery Appliance.
returned: on success
type: string
sample: vpc_password_example
internet_proxy:
description:
- Proxy URL to connect to object store.
returned: on success
type: string
sample: internet_proxy_example
freeform_tags:
description:
- Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace.
For more information, see L(Resource Tags,https://docs.cloud.oracle.com/Content/General/Concepts/resourcetags.htm).
- "Example: `{\\"Department\\": \\"Finance\\"}`"
returned: on success
type: dict
sample: {'Department': 'Finance'}
defined_tags:
description:
- Defined tags for this resource. Each key is predefined and scoped to a namespace.
For more information, see L(Resource Tags,https://docs.cloud.oracle.com/Content/General/Concepts/resourcetags.htm).
returned: on success
type: dict
sample: {'Operations': {'CostCenter': 'US'}}
connection_strings:
description:
- The Connection strings used to connect to the Oracle Database.
returned: on success
type: complex
contains:
cdb_default:
description:
- Host name based CDB Connection String.
returned: on success
type: string
sample: cdb_default_example
cdb_ip_default:
description:
- IP based CDB Connection String.
returned: on success
type: string
sample: cdb_ip_default_example
all_connection_strings:
description:
- All connection strings to use to connect to the Database.
returned: on success
type: dict
sample: {}
source_database_point_in_time_recovery_timestamp:
description:
- Point in time recovery timeStamp of the source database at which cloned database system is cloned from the source database system, as
described in L(RFC 3339,https://tools.ietf.org/rfc/rfc3339)
returned: on success
type: string
sample: 2013-10-20T19:20:30+01:00
database_software_image_id:
description:
- The database software image L(OCID,https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm)
returned: on success
type: string
sample: ocid1.databasesoftwareimage.oc1..xxxxxxEXAMPLExxxxxx
sample: {
"id": "ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx",
"compartment_id": "ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx",
"character_set": "character_set_example",
"ncharacter_set": "ncharacter_set_example",
"db_home_id": "ocid1.dbhome.oc1..xxxxxxEXAMPLExxxxxx",
"db_system_id": "ocid1.dbsystem.oc1..xxxxxxEXAMPLExxxxxx",
"vm_cluster_id": "ocid1.vmcluster.oc1..xxxxxxEXAMPLExxxxxx",
"db_name": "db_name_example",
"pdb_name": "pdb_name_example",
"db_workload": "db_workload_example",
"db_unique_name": "db_unique_name_example",
"lifecycle_details": "lifecycle_details_example",
"lifecycle_state": "PROVISIONING",
"time_created": "2013-10-20T19:20:30+01:00",
"last_backup_timestamp": "2013-10-20T19:20:30+01:00",
"db_backup_config": {
"auto_backup_enabled": true,
"recovery_window_in_days": 56,
"auto_backup_window": "SLOT_TWO",
"backup_destination_details": [{
"type": "NFS",
"id": "ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx",
"vpc_user": "vpc_user_example",
"vpc_password": "<PASSWORD>",
"internet_proxy": "internet_proxy_example"
}]
},
"freeform_tags": {'Department': 'Finance'},
"defined_tags": {'Operations': {'CostCenter': 'US'}},
"connection_strings": {
"cdb_default": "cdb_default_example",
"cdb_ip_default": "cdb_ip_default_example",
"all_connection_strings": {}
},
"source_database_point_in_time_recovery_timestamp": "2013-10-20T19:20:30+01:00",
"database_software_image_id": "ocid1.databasesoftwareimage.oc1..xxxxxxEXAMPLExxxxxx"
}
"""
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.oracle.oci.plugins.module_utils import (
oci_common_utils,
oci_wait_utils,
)
from ansible_collections.oracle.oci.plugins.module_utils.oci_resource_utils import (
OCIActionsHelperBase,
get_custom_class,
)
try:
from oci.work_requests import WorkRequestClient
from oci.database import DatabaseClient
from oci.database.models import RestoreDatabaseDetails
HAS_OCI_PY_SDK = True
except ImportError:
HAS_OCI_PY_SDK = False
class DatabaseActionsHelperGen(OCIActionsHelperBase):
"""
Supported actions:
restore
"""
def __init__(self, *args, **kwargs):
super(DatabaseActionsHelperGen, self).__init__(*args, **kwargs)
self.work_request_client = WorkRequestClient(
self.client._config, **self.client._kwargs
)
@staticmethod
def get_module_resource_id_param():
return "database_id"
def get_module_resource_id(self):
return self.module.params.get("database_id")
def get_get_fn(self):
return self.client.get_database
def get_resource(self):
return oci_common_utils.call_with_backoff(
self.client.get_database, database_id=self.module.params.get("database_id"),
)
def restore(self):
action_details = oci_common_utils.convert_input_data_to_model_class(
self.module.params, RestoreDatabaseDetails
)
return oci_wait_utils.call_and_wait(
call_fn=self.client.restore_database,
call_fn_args=(),
call_fn_kwargs=dict(
database_id=self.module.params.get("database_id"),
restore_database_details=action_details,
),
waiter_type=oci_wait_utils.WORK_REQUEST_WAITER_KEY,
operation="{0}_{1}".format(
self.module.params.get("action").upper(),
oci_common_utils.ACTION_OPERATION_KEY,
),
waiter_client=self.work_request_client,
resource_helper=self,
wait_for_states=oci_common_utils.get_work_request_completed_states(),
)
DatabaseActionsHelperCustom = get_custom_class("DatabaseActionsHelperCustom")
class ResourceHelper(DatabaseActionsHelperCustom, DatabaseActionsHelperGen):
pass
def main():
module_args = oci_common_utils.get_common_arg_spec(
supports_create=False, supports_wait=True
)
module_args.update(
dict(
database_id=dict(aliases=["id"], type="str", required=True),
database_scn=dict(type="str"),
timestamp=dict(type="str"),
latest=dict(type="bool"),
action=dict(type="str", required=True, choices=["restore"]),
)
)
module = AnsibleModule(argument_spec=module_args, supports_check_mode=True)
if not HAS_OCI_PY_SDK:
module.fail_json(msg="oci python sdk required for this module.")
resource_helper = ResourceHelper(
module=module,
resource_type="database",
service_client_class=DatabaseClient,
namespace="database",
)
result = resource_helper.perform_action(module.params.get("action"))
module.exit_json(**result)
if __name__ == "__main__":
main() | plugins/modules/oci_database_database_actions.py |
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
"metadata_version": "1.1",
"status": ["preview"],
"supported_by": "community",
}
DOCUMENTATION = """
---
module: oci_database_database_actions
short_description: Perform actions on a Database resource in Oracle Cloud Infrastructure
description:
- Perform actions on a Database resource in Oracle Cloud Infrastructure
- For I(action=restore), restore a Database based on the request parameters you provide.
version_added: "2.9"
author: Oracle (@oracle)
options:
database_id:
description:
- The database L(OCID,https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm).
type: str
aliases: ["id"]
required: true
database_scn:
description:
- Restores using the backup with the System Change Number (SCN) specified.
type: str
timestamp:
description:
- Restores to the timestamp specified.
type: str
latest:
description:
- Restores to the last known good state with the least possible data loss.
type: bool
action:
description:
- The action to perform on the Database.
type: str
required: true
choices:
- "restore"
extends_documentation_fragment: [ oracle.oci.oracle, oracle.oci.oracle_wait_options ]
"""
EXAMPLES = """
- name: Perform action restore on database
oci_database_database_actions:
database_id: ocid1.database.oc1..xxxxxxEXAMPLExxxxxx
action: restore
"""
RETURN = """
database:
description:
- Details of the Database resource acted upon by the current operation
returned: on success
type: complex
contains:
id:
description:
- The L(OCID,https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm) of the database.
returned: on success
type: string
sample: ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx
compartment_id:
description:
- The L(OCID,https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm) of the compartment.
returned: on success
type: string
sample: ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx
character_set:
description:
- The character set for the database.
returned: on success
type: string
sample: character_set_example
ncharacter_set:
description:
- The national character set for the database.
returned: on success
type: string
sample: ncharacter_set_example
db_home_id:
description:
- The L(OCID,https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm) of the Database Home.
returned: on success
type: string
sample: ocid1.dbhome.oc1..xxxxxxEXAMPLExxxxxx
db_system_id:
description:
- The L(OCID,https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm) of the DB system.
returned: on success
type: string
sample: ocid1.dbsystem.oc1..xxxxxxEXAMPLExxxxxx
vm_cluster_id:
description:
- The L(OCID,https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm) of the VM cluster.
returned: on success
type: string
sample: ocid1.vmcluster.oc1..xxxxxxEXAMPLExxxxxx
db_name:
description:
- The database name.
returned: on success
type: string
sample: db_name_example
pdb_name:
description:
- The name of the pluggable database. The name must begin with an alphabetic character and can contain a maximum of eight alphanumeric
characters. Special characters are not permitted. Pluggable database should not be same as database name.
returned: on success
type: string
sample: pdb_name_example
db_workload:
description:
- The database workload type.
returned: on success
type: string
sample: db_workload_example
db_unique_name:
description:
- A system-generated name for the database to ensure uniqueness within an Oracle Data Guard group (a primary database and its standby
databases). The unique name cannot be changed.
returned: on success
type: string
sample: db_unique_name_example
lifecycle_details:
description:
- Additional information about the current lifecycleState.
returned: on success
type: string
sample: lifecycle_details_example
lifecycle_state:
description:
- The current state of the database.
returned: on success
type: string
sample: PROVISIONING
time_created:
description:
- The date and time the database was created.
returned: on success
type: string
sample: 2013-10-20T19:20:30+01:00
last_backup_timestamp:
description:
- The date and time when the latest database backup was created.
returned: on success
type: string
sample: 2013-10-20T19:20:30+01:00
db_backup_config:
description:
- ""
returned: on success
type: complex
contains:
auto_backup_enabled:
description:
- If set to true, configures automatic backups. If you previously used RMAN or dbcli to configure backups and then you switch to using
the Console or the API for backups, a new backup configuration is created and associated with your database. This means that you can
no longer rely on your previously configured unmanaged backups to work.
returned: on success
type: bool
sample: true
recovery_window_in_days:
description:
- Number of days between the current and the earliest point of recoverability covered by automatic backups.
This value applies to automatic backups only. After a new automatic backup has been created, Oracle removes old automatic backups that
are created before the window.
When the value is updated, it is applied to all existing automatic backups.
returned: on success
type: int
sample: 56
auto_backup_window:
description:
- Time window selected for initiating automatic backup for the database system. There are twelve available two-hour time windows. If no
option is selected, a start time between 12:00 AM to 7:00 AM in the region of the database is automatically chosen. For example, if
the user selects SLOT_TWO from the enum list, the automatic backup job will start in between 2:00 AM (inclusive) to 4:00 AM
(exclusive).
- "Example: `SLOT_TWO`"
returned: on success
type: string
sample: SLOT_TWO
backup_destination_details:
description:
- Backup destination details.
returned: on success
type: complex
contains:
type:
description:
- Type of the database backup destination.
returned: on success
type: string
sample: NFS
id:
description:
- The L(OCID,https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm) of the backup destination.
returned: on success
type: string
sample: ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx
vpc_user:
description:
- For a RECOVERY_APPLIANCE backup destination, the Virtual Private Catalog (VPC) user that is used to access the Recovery
Appliance.
returned: on success
type: string
sample: vpc_user_example
vpc_password:
description:
- For a RECOVERY_APPLIANCE backup destination, the password for the VPC user that is used to access the Recovery Appliance.
returned: on success
type: string
sample: vpc_password_example
internet_proxy:
description:
- Proxy URL to connect to object store.
returned: on success
type: string
sample: internet_proxy_example
freeform_tags:
description:
- Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace.
For more information, see L(Resource Tags,https://docs.cloud.oracle.com/Content/General/Concepts/resourcetags.htm).
- "Example: `{\\"Department\\": \\"Finance\\"}`"
returned: on success
type: dict
sample: {'Department': 'Finance'}
defined_tags:
description:
- Defined tags for this resource. Each key is predefined and scoped to a namespace.
For more information, see L(Resource Tags,https://docs.cloud.oracle.com/Content/General/Concepts/resourcetags.htm).
returned: on success
type: dict
sample: {'Operations': {'CostCenter': 'US'}}
connection_strings:
description:
- The Connection strings used to connect to the Oracle Database.
returned: on success
type: complex
contains:
cdb_default:
description:
- Host name based CDB Connection String.
returned: on success
type: string
sample: cdb_default_example
cdb_ip_default:
description:
- IP based CDB Connection String.
returned: on success
type: string
sample: cdb_ip_default_example
all_connection_strings:
description:
- All connection strings to use to connect to the Database.
returned: on success
type: dict
sample: {}
source_database_point_in_time_recovery_timestamp:
description:
- Point in time recovery timeStamp of the source database at which cloned database system is cloned from the source database system, as
described in L(RFC 3339,https://tools.ietf.org/rfc/rfc3339)
returned: on success
type: string
sample: 2013-10-20T19:20:30+01:00
database_software_image_id:
description:
- The database software image L(OCID,https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm)
returned: on success
type: string
sample: ocid1.databasesoftwareimage.oc1..xxxxxxEXAMPLExxxxxx
sample: {
"id": "ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx",
"compartment_id": "ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx",
"character_set": "character_set_example",
"ncharacter_set": "ncharacter_set_example",
"db_home_id": "ocid1.dbhome.oc1..xxxxxxEXAMPLExxxxxx",
"db_system_id": "ocid1.dbsystem.oc1..xxxxxxEXAMPLExxxxxx",
"vm_cluster_id": "ocid1.vmcluster.oc1..xxxxxxEXAMPLExxxxxx",
"db_name": "db_name_example",
"pdb_name": "pdb_name_example",
"db_workload": "db_workload_example",
"db_unique_name": "db_unique_name_example",
"lifecycle_details": "lifecycle_details_example",
"lifecycle_state": "PROVISIONING",
"time_created": "2013-10-20T19:20:30+01:00",
"last_backup_timestamp": "2013-10-20T19:20:30+01:00",
"db_backup_config": {
"auto_backup_enabled": true,
"recovery_window_in_days": 56,
"auto_backup_window": "SLOT_TWO",
"backup_destination_details": [{
"type": "NFS",
"id": "ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx",
"vpc_user": "vpc_user_example",
"vpc_password": "<PASSWORD>",
"internet_proxy": "internet_proxy_example"
}]
},
"freeform_tags": {'Department': 'Finance'},
"defined_tags": {'Operations': {'CostCenter': 'US'}},
"connection_strings": {
"cdb_default": "cdb_default_example",
"cdb_ip_default": "cdb_ip_default_example",
"all_connection_strings": {}
},
"source_database_point_in_time_recovery_timestamp": "2013-10-20T19:20:30+01:00",
"database_software_image_id": "ocid1.databasesoftwareimage.oc1..xxxxxxEXAMPLExxxxxx"
}
"""
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.oracle.oci.plugins.module_utils import (
oci_common_utils,
oci_wait_utils,
)
from ansible_collections.oracle.oci.plugins.module_utils.oci_resource_utils import (
OCIActionsHelperBase,
get_custom_class,
)
try:
from oci.work_requests import WorkRequestClient
from oci.database import DatabaseClient
from oci.database.models import RestoreDatabaseDetails
HAS_OCI_PY_SDK = True
except ImportError:
HAS_OCI_PY_SDK = False
class DatabaseActionsHelperGen(OCIActionsHelperBase):
"""
Supported actions:
restore
"""
def __init__(self, *args, **kwargs):
super(DatabaseActionsHelperGen, self).__init__(*args, **kwargs)
self.work_request_client = WorkRequestClient(
self.client._config, **self.client._kwargs
)
@staticmethod
def get_module_resource_id_param():
return "database_id"
def get_module_resource_id(self):
return self.module.params.get("database_id")
def get_get_fn(self):
return self.client.get_database
def get_resource(self):
return oci_common_utils.call_with_backoff(
self.client.get_database, database_id=self.module.params.get("database_id"),
)
def restore(self):
action_details = oci_common_utils.convert_input_data_to_model_class(
self.module.params, RestoreDatabaseDetails
)
return oci_wait_utils.call_and_wait(
call_fn=self.client.restore_database,
call_fn_args=(),
call_fn_kwargs=dict(
database_id=self.module.params.get("database_id"),
restore_database_details=action_details,
),
waiter_type=oci_wait_utils.WORK_REQUEST_WAITER_KEY,
operation="{0}_{1}".format(
self.module.params.get("action").upper(),
oci_common_utils.ACTION_OPERATION_KEY,
),
waiter_client=self.work_request_client,
resource_helper=self,
wait_for_states=oci_common_utils.get_work_request_completed_states(),
)
DatabaseActionsHelperCustom = get_custom_class("DatabaseActionsHelperCustom")
class ResourceHelper(DatabaseActionsHelperCustom, DatabaseActionsHelperGen):
pass
def main():
module_args = oci_common_utils.get_common_arg_spec(
supports_create=False, supports_wait=True
)
module_args.update(
dict(
database_id=dict(aliases=["id"], type="str", required=True),
database_scn=dict(type="str"),
timestamp=dict(type="str"),
latest=dict(type="bool"),
action=dict(type="str", required=True, choices=["restore"]),
)
)
module = AnsibleModule(argument_spec=module_args, supports_check_mode=True)
if not HAS_OCI_PY_SDK:
module.fail_json(msg="oci python sdk required for this module.")
resource_helper = ResourceHelper(
module=module,
resource_type="database",
service_client_class=DatabaseClient,
namespace="database",
)
result = resource_helper.perform_action(module.params.get("action"))
module.exit_json(**result)
if __name__ == "__main__":
main() | 0.865039 | 0.332812 |
import pytest
import sklearn.decomposition
import sklearn.linear_model
from numpy.testing import assert_array_equal
from sklearn import datasets
from sklearn.model_selection import GridSearchCV, StratifiedKFold
from sklearn.pipeline import Pipeline
from baikal import Model, Input
from baikal.sklearn import SKLearnWrapper
from tests.helpers.sklearn_steps import PCA, LogisticRegression, RandomForestClassifier
iris = datasets.load_iris()
x_data = iris.data
y_t_data = iris.target
random_state = 123
verbose = 0
cv = StratifiedKFold(3) # cv will default to KFold if the estimator is a baikal Model
def test_grid_search_cv():
param_grid = {
"pca__n_components": [2, 4],
"logreg__C": [0.1, 1.0, 10],
"logreg__penalty": ["l1", "l2"],
}
# baikal way
def build_fn():
x = Input()
y_t = Input()
h = PCA(random_state=random_state, name="pca")(x)
y = LogisticRegression(
random_state=random_state, solver="liblinear", name="logreg"
)(h, y_t)
model = Model(x, y, y_t)
return model
sk_model = SKLearnWrapper(build_fn)
assert isinstance(sk_model.model, Model)
gscv_baikal = GridSearchCV(
sk_model,
param_grid,
cv=cv,
scoring="accuracy",
return_train_score=True,
verbose=verbose,
)
gscv_baikal.fit(x_data, y_t_data)
# traditional way
pca = PCA(random_state=random_state)
logreg = LogisticRegression(random_state=random_state, solver="liblinear")
pipe = Pipeline([("pca", pca), ("logreg", logreg)])
gscv_traditional = GridSearchCV(
pipe,
param_grid,
cv=cv,
scoring="accuracy",
return_train_score=True,
verbose=verbose,
)
gscv_traditional.fit(x_data, y_t_data)
assert gscv_baikal.best_params_ == gscv_traditional.best_params_
assert_array_equal(
gscv_traditional.cv_results_["mean_train_score"],
gscv_baikal.cv_results_["mean_train_score"],
)
assert_array_equal(
gscv_traditional.cv_results_["mean_test_score"],
gscv_baikal.cv_results_["mean_test_score"],
)
def test_grid_search_cv_with_tunable_step():
param_grid = {
"classifier": [
LogisticRegression(random_state=random_state),
RandomForestClassifier(random_state=random_state),
],
"pca__n_components": [2, 4],
}
# baikal way
def build_fn():
x = Input()
y_t = Input()
h = PCA(random_state=random_state, name="pca")(x)
y = LogisticRegression(random_state=random_state, name="classifier")(h, y_t)
model = Model(x, y, y_t)
return model
sk_model = SKLearnWrapper(build_fn)
gscv_baikal = GridSearchCV(
sk_model,
param_grid,
cv=cv,
scoring="accuracy",
return_train_score=True,
verbose=verbose,
)
gscv_baikal.fit(x_data, y_t_data)
# traditional way
pca = PCA(random_state=random_state)
classifier = LogisticRegression(random_state=random_state)
pipe = Pipeline([("pca", pca), ("classifier", classifier)])
gscv_traditional = GridSearchCV(
pipe,
param_grid,
cv=cv,
scoring="accuracy",
return_train_score=True,
verbose=verbose,
)
gscv_traditional.fit(x_data, y_t_data)
assert gscv_baikal.best_params_ == gscv_traditional.best_params_
assert_array_equal(
gscv_traditional.cv_results_["mean_train_score"],
gscv_baikal.cv_results_["mean_train_score"],
)
assert_array_equal(
gscv_traditional.cv_results_["mean_test_score"],
gscv_baikal.cv_results_["mean_test_score"],
) | tests/test_sklearn_wrapper.py | import pytest
import sklearn.decomposition
import sklearn.linear_model
from numpy.testing import assert_array_equal
from sklearn import datasets
from sklearn.model_selection import GridSearchCV, StratifiedKFold
from sklearn.pipeline import Pipeline
from baikal import Model, Input
from baikal.sklearn import SKLearnWrapper
from tests.helpers.sklearn_steps import PCA, LogisticRegression, RandomForestClassifier
iris = datasets.load_iris()
x_data = iris.data
y_t_data = iris.target
random_state = 123
verbose = 0
cv = StratifiedKFold(3) # cv will default to KFold if the estimator is a baikal Model
def test_grid_search_cv():
param_grid = {
"pca__n_components": [2, 4],
"logreg__C": [0.1, 1.0, 10],
"logreg__penalty": ["l1", "l2"],
}
# baikal way
def build_fn():
x = Input()
y_t = Input()
h = PCA(random_state=random_state, name="pca")(x)
y = LogisticRegression(
random_state=random_state, solver="liblinear", name="logreg"
)(h, y_t)
model = Model(x, y, y_t)
return model
sk_model = SKLearnWrapper(build_fn)
assert isinstance(sk_model.model, Model)
gscv_baikal = GridSearchCV(
sk_model,
param_grid,
cv=cv,
scoring="accuracy",
return_train_score=True,
verbose=verbose,
)
gscv_baikal.fit(x_data, y_t_data)
# traditional way
pca = PCA(random_state=random_state)
logreg = LogisticRegression(random_state=random_state, solver="liblinear")
pipe = Pipeline([("pca", pca), ("logreg", logreg)])
gscv_traditional = GridSearchCV(
pipe,
param_grid,
cv=cv,
scoring="accuracy",
return_train_score=True,
verbose=verbose,
)
gscv_traditional.fit(x_data, y_t_data)
assert gscv_baikal.best_params_ == gscv_traditional.best_params_
assert_array_equal(
gscv_traditional.cv_results_["mean_train_score"],
gscv_baikal.cv_results_["mean_train_score"],
)
assert_array_equal(
gscv_traditional.cv_results_["mean_test_score"],
gscv_baikal.cv_results_["mean_test_score"],
)
def test_grid_search_cv_with_tunable_step():
param_grid = {
"classifier": [
LogisticRegression(random_state=random_state),
RandomForestClassifier(random_state=random_state),
],
"pca__n_components": [2, 4],
}
# baikal way
def build_fn():
x = Input()
y_t = Input()
h = PCA(random_state=random_state, name="pca")(x)
y = LogisticRegression(random_state=random_state, name="classifier")(h, y_t)
model = Model(x, y, y_t)
return model
sk_model = SKLearnWrapper(build_fn)
gscv_baikal = GridSearchCV(
sk_model,
param_grid,
cv=cv,
scoring="accuracy",
return_train_score=True,
verbose=verbose,
)
gscv_baikal.fit(x_data, y_t_data)
# traditional way
pca = PCA(random_state=random_state)
classifier = LogisticRegression(random_state=random_state)
pipe = Pipeline([("pca", pca), ("classifier", classifier)])
gscv_traditional = GridSearchCV(
pipe,
param_grid,
cv=cv,
scoring="accuracy",
return_train_score=True,
verbose=verbose,
)
gscv_traditional.fit(x_data, y_t_data)
assert gscv_baikal.best_params_ == gscv_traditional.best_params_
assert_array_equal(
gscv_traditional.cv_results_["mean_train_score"],
gscv_baikal.cv_results_["mean_train_score"],
)
assert_array_equal(
gscv_traditional.cv_results_["mean_test_score"],
gscv_baikal.cv_results_["mean_test_score"],
) | 0.869632 | 0.604632 |
import collections
import tensorflow as tf
def load_data(filename, sep=' ', sep1=',', isCharacter=False):
label_list = []
features_list = []
with tf.gfile.GFile(filename, 'r') as f:
for line in f.readlines():
fields = line.strip().split(sep)
if len(fields) != 2:
continue
label = fields[0]
features = fields[1]
label_list.append(label)
if isCharacter:
features_list.append(list(features))
else:
features_list.append(features.split(sep1))
return label_list, features_list
def gen(filepath):
with tf.gfile.GFile(filepath, 'r') as f:
for line in f.readlines():
fields = line.strip().split(' ')
if len(fields) != 2:
continue
label = fields[0]
features = fields[1]
yield (label, features.split(','))
def build_word_dic(words_list, label_list, vocab_size=5000):
word_dic = dict()
word_dic['pad'] = 0
word_dic['unk'] = 1
all_words = []
for words in words_list:
all_words.extend(words)
counter = collections.Counter(all_words).most_common(vocab_size)
words, _ = list(zip(*counter))
for word in words:
word_dic[word] = len(word_dic)
label_set = set(label_list)
label_dic = dict()
for label in label_set:
label_dic[label] = len(label_dic)
return words, word_dic, label_set, label_dic
def build_dic_hash_table(word_dic, label_dic):
word_keys = tf.constant(list(word_dic.keys()))
word_values = tf.constant(list(word_dic.values()))
word_table = tf.contrib.lookup.HashTable(
tf.contrib.lookup.KeyValueTensorInitializer(word_keys, word_values), word_dic['unk'])
label_keys = tf.constant(list(label_dic.keys()))
label_values = tf.constant(list(label_dic.values()))
label_table = tf.contrib.lookup.HashTable(
tf.contrib.lookup.KeyValueTensorInitializer(label_keys, label_values), -1)
return word_table, label_table
def train_input_fn(label_list, features_list, shuffle_size, batch_size):
dataset = tf.data.Dataset.from_tensor_slices((label_list, features_list))
dataset = dataset.shuffle(shuffle_size).repeat().batch(batch_size)
return dataset
def build_table_from_text_file(filepath):
return tf.contrib.lookup.HashTable(
tf.contrib.lookup.TextFileInitializer(filepath, tf.string, 0, tf.int64, 1, delimiter=" "), -1)
if __name__ == '__main__':
# label_list, features_list = load_data('/tmp/1.csv')
# words, word_dic, labels, label_dic = build_word_dic(features_list, label_list)
# word_table, label_table = build_dic_hash_table(word_dic, label_dic)
sess = tf.InteractiveSession()
# word_out = word_table.lookup(tf.constant(list(word_dic.keys())))
# label_out = label_table.lookup(tf.constant(list(label_dic.keys())))
# tf.tables_initializer().run()
# print(word_out.eval())
# print(label_out.eval())
table = build_table_from_text_file('/tmp/2.csv')
out = table.lookup(tf.constant(['emerson']))
table.init.run()
print(out.eval()) | feng-ml-tf/src/data_helper.py | import collections
import tensorflow as tf
def load_data(filename, sep=' ', sep1=',', isCharacter=False):
label_list = []
features_list = []
with tf.gfile.GFile(filename, 'r') as f:
for line in f.readlines():
fields = line.strip().split(sep)
if len(fields) != 2:
continue
label = fields[0]
features = fields[1]
label_list.append(label)
if isCharacter:
features_list.append(list(features))
else:
features_list.append(features.split(sep1))
return label_list, features_list
def gen(filepath):
with tf.gfile.GFile(filepath, 'r') as f:
for line in f.readlines():
fields = line.strip().split(' ')
if len(fields) != 2:
continue
label = fields[0]
features = fields[1]
yield (label, features.split(','))
def build_word_dic(words_list, label_list, vocab_size=5000):
word_dic = dict()
word_dic['pad'] = 0
word_dic['unk'] = 1
all_words = []
for words in words_list:
all_words.extend(words)
counter = collections.Counter(all_words).most_common(vocab_size)
words, _ = list(zip(*counter))
for word in words:
word_dic[word] = len(word_dic)
label_set = set(label_list)
label_dic = dict()
for label in label_set:
label_dic[label] = len(label_dic)
return words, word_dic, label_set, label_dic
def build_dic_hash_table(word_dic, label_dic):
word_keys = tf.constant(list(word_dic.keys()))
word_values = tf.constant(list(word_dic.values()))
word_table = tf.contrib.lookup.HashTable(
tf.contrib.lookup.KeyValueTensorInitializer(word_keys, word_values), word_dic['unk'])
label_keys = tf.constant(list(label_dic.keys()))
label_values = tf.constant(list(label_dic.values()))
label_table = tf.contrib.lookup.HashTable(
tf.contrib.lookup.KeyValueTensorInitializer(label_keys, label_values), -1)
return word_table, label_table
def train_input_fn(label_list, features_list, shuffle_size, batch_size):
dataset = tf.data.Dataset.from_tensor_slices((label_list, features_list))
dataset = dataset.shuffle(shuffle_size).repeat().batch(batch_size)
return dataset
def build_table_from_text_file(filepath):
return tf.contrib.lookup.HashTable(
tf.contrib.lookup.TextFileInitializer(filepath, tf.string, 0, tf.int64, 1, delimiter=" "), -1)
if __name__ == '__main__':
# label_list, features_list = load_data('/tmp/1.csv')
# words, word_dic, labels, label_dic = build_word_dic(features_list, label_list)
# word_table, label_table = build_dic_hash_table(word_dic, label_dic)
sess = tf.InteractiveSession()
# word_out = word_table.lookup(tf.constant(list(word_dic.keys())))
# label_out = label_table.lookup(tf.constant(list(label_dic.keys())))
# tf.tables_initializer().run()
# print(word_out.eval())
# print(label_out.eval())
table = build_table_from_text_file('/tmp/2.csv')
out = table.lookup(tf.constant(['emerson']))
table.init.run()
print(out.eval()) | 0.354321 | 0.299293 |
from __future__ import absolute_import
from __future__ import unicode_literals
from builtins import zip
from builtins import object
from curation.tasks import update_instance, bulk_change_tracking_state, bulk_prepend_record_history, save_creation_to_citation
from django import forms
from django.http import QueryDict
from isisdata.models import *
import isisdata.tasks as dtasks
import curation.taskslib.citation_tasks as ctasks
import curation.taskslib.authority_tasks as atasks
from isisdata.filters import CitationFilter
import json
# TODO: refactor these actions to use bulk apply methods and then explicitly
# trigger search indexing (or whatever other post-save actions are needed).
class BaseAction(object):
def __init__(self):
if hasattr(self, 'default_value_field'):
self.value_field = self.default_value_field
if hasattr(self, 'default_value_field_kwargs'):
self.value_field_kwargs = self.default_value_field_kwargs
if hasattr(self, 'extra'):
self.extra_fields = self.extra
def get_value_field(self, **kwargs):
self.value_field_kwargs.update(kwargs)
return self.value_field(**self.value_field_kwargs)
def get_extra_fields(self, **kwargs):
if hasattr(self, 'extra_fields'):
return [(name, field(**kwargs)) for name, field, kwargs in self.extra_fields]
return []
def _build_filter_label(filter_params_raw):
citation_filter = CitationFilter(QueryDict(filter_params_raw, mutable=True))
filter_form = citation_filter.form
filter_data = {}
if filter_form.is_valid():
filter_data = filter_form.cleaned_data
return ', '.join([ '%s: %s' % (key, value) for key, value in list(filter_data.items()) if value ])
class PrependToRecordHistory(BaseAction):
model = Citation
label = u'Update record history'
default_value_field = forms.CharField
default_value_field_kwargs = {
'label': 'Prepend to record history',
'widget': forms.widgets.Textarea(attrs={'class': 'action-value'}),
}
def apply(self, user, filter_params_raw, value, **extra):
task = AsyncTask.objects.create()
type = extra['object_type'] if extra['object_type'] else None
if type:
result = bulk_prepend_record_history.delay(user.id, filter_params_raw,
value, task.id, type)
else:
result = bulk_prepend_record_history.delay(user.id, filter_params_raw,
value, task.id)
# We can use the AsyncResult's UUID to access this task later, e.g.
# to check the return value or task state.
task.async_uuid = result.id
task.value = ('record_status_explanation', value)
task.label = 'Updating set with filters: ' + _build_filter_label(filter_params_raw)
task.save()
return task.id
class StoreCreationDataToModel(BaseAction):
model = Citation
label = u'Store creation data to citations'
default_value_field = forms.CharField
default_value_field_kwargs = {
'label': 'Storing creation data to citations',
'widget': forms.widgets.Textarea(attrs={'class': 'action-value', 'readonly': True, 'initial': 'Storing creation data'}),
}
def apply(self, user, filter_params_raw, value, **extra):
task = AsyncTask.objects.create()
type = extra['object_type'] if extra['object_type'] else None
if type:
result = save_creation_to_citation.delay(user.id, filter_params_raw,
value, task.id, type)
result = save_creation_to_citation.delay(user.id, filter_params_raw,
value, task.id)
# We can use the AsyncResult's UUID to access this task later, e.g.
# to check the return value or task state.
task.async_uuid = result.id
task.value = ('created_native', '')
task.label = 'Storing creator in citation for set with filters: ' + _build_filter_label(filter_params_raw)
task.save()
return task.id
class SetRecordStatus(BaseAction):
model = Citation
label = u'Set record status'
default_value_field = forms.ChoiceField
default_value_field_kwargs = {
'choices': CuratedMixin.STATUS_CHOICES,
'label': 'Set record status',
'widget': forms.widgets.Select(attrs={'class': 'action-value'}),
}
def apply(self, user, filter_params_raw, value, **extra):
# We need this to exist first so that we can keep it up to date as the
# group of tasks is executed.
task = AsyncTask.objects.create()
type = extra['object_type'] if extra['object_type'] else None
if type:
result = dtasks.bulk_update_citations.delay(user.id,
filter_params_raw,
'record_status_value',
value, task.id, type)
else:
result = dtasks.bulk_update_citations.delay(user.id,
filter_params_raw,
'record_status_value',
value, task.id)
# We can use the AsyncResult's UUID to access this task later, e.g.
# to check the return value or task state.
task.async_uuid = result.id
task.value = ('record_status_value', value)
task.label = 'Updating set with filters: ' + _build_filter_label(filter_params_raw)
task.save()
return task.id
class SetRecordStatusExplanation(BaseAction):
model = Citation
label = u'Set record status explanation'
default_value_field = forms.CharField
default_value_field_kwargs = {
'label': 'Set record status explanation',
'widget': forms.widgets.TextInput(attrs={'class': 'action-value'}),
}
def apply(self, user, filter_params_raw, value, **extra):
task = AsyncTask.objects.create()
type = extra['object_type'] if extra['object_type'] else None
if type:
result = dtasks.bulk_update_citations.delay(user.id,
filter_params_raw,
'record_status_explanation',
value, task.id, type)
else:
result = dtasks.bulk_update_citations.delay(user.id,
filter_params_raw,
'record_status_explanation',
value, task.id)
# We can use the AsyncResult's UUID to access this task later, e.g.
# to check the return value or task state.
task.async_uuid = result.id
task.value = ('record_status_explanation', value)
task.label = 'Updating set with filters: ' + _build_filter_label(filter_params_raw)
task.save()
return task.id
def get_tracking_transition_counts(qs):
states = list(zip(*qs.model.TRACKING_CHOICES))[0]
transitions = dict(list(zip(states, [qs.filter(tracking_state=state).count() for state in states])))
# bugfix for Zotero imports: tracking_state is None not "NO"
transitions[qs.model.NONE] += qs.filter(tracking_state=None).count()
return transitions
def get_allowable_transition_states():
from curation.tracking import TrackingWorkflow
return dict([(target, source) for source, target in TrackingWorkflow.transitions])
def get_transition_labels():
from curation.tracking import TrackingWorkflow
return dict(Tracking.TYPE_CHOICES)
class SetTrackingStatus(BaseAction):
model = Citation
label = u'Set record tracking status'
default_value_field = forms.ChoiceField
default_value_field_kwargs = {
'choices': Tracking.TYPE_CHOICES,
'label': 'Set record tracking status',
'widget': forms.widgets.Select(attrs={'class': 'action-value'}),
}
extra_js = 'curation/js/bulktracking.js'
extra_fields = (
('info', forms.CharField, {'label': 'Tracking Info', 'required': False, 'widget': forms.widgets.TextInput(attrs={'class': 'form-control', 'part_of': 'SetTrackingStatus', 'required': False})}),
('notes', forms.CharField, {'label': 'Tracking Notes', 'required': False, 'widget': forms.widgets.Textarea(attrs={'class': 'form-control', 'part_of': 'SetTrackingStatus', 'required': False})}),
)
@staticmethod
def get_extra_data(queryset=None, **kwargs):
transition_counts = json.dumps(get_tracking_transition_counts(queryset))
allowable_states = json.dumps(get_allowable_transition_states())
transition_labels = json.dumps(get_transition_labels())
return """
var settrackingstatus_data = {
transition_counts: %s,
allowable_states: %s,
transition_labels: %s
}""" % (transition_counts, allowable_states, transition_labels)
def apply(self, user, filter_params_raw, value, info='', notes='', **extra):
task = AsyncTask.objects.create()
type = extra['object_type'] if extra['object_type'] else None
if type:
result = bulk_change_tracking_state.delay(user.id, filter_params_raw, value, info, notes, task.id, type)
else:
result = bulk_change_tracking_state.delay(user.id, filter_params_raw, value, info, notes, task.id)
# We can use the AsyncResult's UUID to access this task later, e.g.
# to check the return value or task state.
task.async_uuid = result.id
task.value = ('record_status_explanation', value)
task.label = 'Updating set with filters: ' + _build_filter_label(filter_params_raw)
task.save()
return task.id
class ReindexCitation(BaseAction):
model = Citation
label = u'Reindex citations'
default_value_field = forms.CharField
default_value_field_kwargs = {
'label': 'Reindex citations',
'widget': forms.widgets.Textarea(attrs={'class': 'action-value', 'readonly': True, 'initial': 'Reindex citations'}),
}
def apply(self, user, filter_params_raw, value, **extra):
task = AsyncTask.objects.create()
result = ctasks.reindex_citations.delay(user.id, filter_params_raw, task.id)
# We can use the AsyncResult's UUID to access this task later, e.g.
# to check the return value or task state.
task.async_uuid = result.id
task.value = ('reindex_citations', value)
task.label = 'Reindexing citations: ' + _build_filter_label(filter_params_raw)
task.save()
return task.id
class ReindexAuthorities(BaseAction):
model = Authority
label = u'Reindex authorities'
default_value_field = forms.CharField
default_value_field_kwargs = {
'label': 'Reindex authorities',
'widget': forms.widgets.Textarea(attrs={'class': 'action-value', 'readonly': True, 'initial': 'Reindex authorities'}),
}
def apply(self, user, filter_params_raw, value, **extra):
task = AsyncTask.objects.create()
result = atasks.reindex_authorities.delay(user.id, filter_params_raw, task.id)
# We can use the AsyncResult's UUID to access this task later, e.g.
# to check the return value or task state.
task.async_uuid = result.id
task.value = ('reindex_authorities', value)
task.label = 'Reindexing authorities: ' + _build_filter_label(filter_params_raw)
task.save()
return task.id
class DeleteDuplicateAttributes(BaseAction):
model = Authority
label = u'Delete Duplicate Attributes'
default_value_field = forms.CharField
default_value_field_kwargs = {
'label': 'Delete Duplicate Attributes',
'widget': forms.widgets.Textarea(attrs={'class': 'action-value', 'readonly': True, 'initial': 'Delete Duplicate Attributes'}),
}
def apply(self, user, filter_params_raw, value, **extra):
task = AsyncTask.objects.create()
result = atasks.delete_duplicate_attributes.delay(user.id, filter_params_raw, task.id)
# We can use the AsyncResult's UUID to access this task later, e.g.
# to check the return value or task state.
task.async_uuid = result.id
task.value = ('delete_duplicate_attributes', value)
task.label = 'Deleting Duplicate Attributes: ' + _build_filter_label(filter_params_raw)
task.save()
return task.id
AVAILABLE_ACTIONS = [SetRecordStatus, SetRecordStatusExplanation, SetTrackingStatus, PrependToRecordHistory, StoreCreationDataToModel, ReindexCitation]
AVAILABLE_ACTIONS_AUTHORITY = [StoreCreationDataToModel, ReindexAuthorities, DeleteDuplicateAttributes] | isiscb/curation/actions.py | from __future__ import absolute_import
from __future__ import unicode_literals
from builtins import zip
from builtins import object
from curation.tasks import update_instance, bulk_change_tracking_state, bulk_prepend_record_history, save_creation_to_citation
from django import forms
from django.http import QueryDict
from isisdata.models import *
import isisdata.tasks as dtasks
import curation.taskslib.citation_tasks as ctasks
import curation.taskslib.authority_tasks as atasks
from isisdata.filters import CitationFilter
import json
# TODO: refactor these actions to use bulk apply methods and then explicitly
# trigger search indexing (or whatever other post-save actions are needed).
class BaseAction(object):
def __init__(self):
if hasattr(self, 'default_value_field'):
self.value_field = self.default_value_field
if hasattr(self, 'default_value_field_kwargs'):
self.value_field_kwargs = self.default_value_field_kwargs
if hasattr(self, 'extra'):
self.extra_fields = self.extra
def get_value_field(self, **kwargs):
self.value_field_kwargs.update(kwargs)
return self.value_field(**self.value_field_kwargs)
def get_extra_fields(self, **kwargs):
if hasattr(self, 'extra_fields'):
return [(name, field(**kwargs)) for name, field, kwargs in self.extra_fields]
return []
def _build_filter_label(filter_params_raw):
citation_filter = CitationFilter(QueryDict(filter_params_raw, mutable=True))
filter_form = citation_filter.form
filter_data = {}
if filter_form.is_valid():
filter_data = filter_form.cleaned_data
return ', '.join([ '%s: %s' % (key, value) for key, value in list(filter_data.items()) if value ])
class PrependToRecordHistory(BaseAction):
model = Citation
label = u'Update record history'
default_value_field = forms.CharField
default_value_field_kwargs = {
'label': 'Prepend to record history',
'widget': forms.widgets.Textarea(attrs={'class': 'action-value'}),
}
def apply(self, user, filter_params_raw, value, **extra):
task = AsyncTask.objects.create()
type = extra['object_type'] if extra['object_type'] else None
if type:
result = bulk_prepend_record_history.delay(user.id, filter_params_raw,
value, task.id, type)
else:
result = bulk_prepend_record_history.delay(user.id, filter_params_raw,
value, task.id)
# We can use the AsyncResult's UUID to access this task later, e.g.
# to check the return value or task state.
task.async_uuid = result.id
task.value = ('record_status_explanation', value)
task.label = 'Updating set with filters: ' + _build_filter_label(filter_params_raw)
task.save()
return task.id
class StoreCreationDataToModel(BaseAction):
model = Citation
label = u'Store creation data to citations'
default_value_field = forms.CharField
default_value_field_kwargs = {
'label': 'Storing creation data to citations',
'widget': forms.widgets.Textarea(attrs={'class': 'action-value', 'readonly': True, 'initial': 'Storing creation data'}),
}
def apply(self, user, filter_params_raw, value, **extra):
task = AsyncTask.objects.create()
type = extra['object_type'] if extra['object_type'] else None
if type:
result = save_creation_to_citation.delay(user.id, filter_params_raw,
value, task.id, type)
result = save_creation_to_citation.delay(user.id, filter_params_raw,
value, task.id)
# We can use the AsyncResult's UUID to access this task later, e.g.
# to check the return value or task state.
task.async_uuid = result.id
task.value = ('created_native', '')
task.label = 'Storing creator in citation for set with filters: ' + _build_filter_label(filter_params_raw)
task.save()
return task.id
class SetRecordStatus(BaseAction):
model = Citation
label = u'Set record status'
default_value_field = forms.ChoiceField
default_value_field_kwargs = {
'choices': CuratedMixin.STATUS_CHOICES,
'label': 'Set record status',
'widget': forms.widgets.Select(attrs={'class': 'action-value'}),
}
def apply(self, user, filter_params_raw, value, **extra):
# We need this to exist first so that we can keep it up to date as the
# group of tasks is executed.
task = AsyncTask.objects.create()
type = extra['object_type'] if extra['object_type'] else None
if type:
result = dtasks.bulk_update_citations.delay(user.id,
filter_params_raw,
'record_status_value',
value, task.id, type)
else:
result = dtasks.bulk_update_citations.delay(user.id,
filter_params_raw,
'record_status_value',
value, task.id)
# We can use the AsyncResult's UUID to access this task later, e.g.
# to check the return value or task state.
task.async_uuid = result.id
task.value = ('record_status_value', value)
task.label = 'Updating set with filters: ' + _build_filter_label(filter_params_raw)
task.save()
return task.id
class SetRecordStatusExplanation(BaseAction):
model = Citation
label = u'Set record status explanation'
default_value_field = forms.CharField
default_value_field_kwargs = {
'label': 'Set record status explanation',
'widget': forms.widgets.TextInput(attrs={'class': 'action-value'}),
}
def apply(self, user, filter_params_raw, value, **extra):
task = AsyncTask.objects.create()
type = extra['object_type'] if extra['object_type'] else None
if type:
result = dtasks.bulk_update_citations.delay(user.id,
filter_params_raw,
'record_status_explanation',
value, task.id, type)
else:
result = dtasks.bulk_update_citations.delay(user.id,
filter_params_raw,
'record_status_explanation',
value, task.id)
# We can use the AsyncResult's UUID to access this task later, e.g.
# to check the return value or task state.
task.async_uuid = result.id
task.value = ('record_status_explanation', value)
task.label = 'Updating set with filters: ' + _build_filter_label(filter_params_raw)
task.save()
return task.id
def get_tracking_transition_counts(qs):
states = list(zip(*qs.model.TRACKING_CHOICES))[0]
transitions = dict(list(zip(states, [qs.filter(tracking_state=state).count() for state in states])))
# bugfix for Zotero imports: tracking_state is None not "NO"
transitions[qs.model.NONE] += qs.filter(tracking_state=None).count()
return transitions
def get_allowable_transition_states():
from curation.tracking import TrackingWorkflow
return dict([(target, source) for source, target in TrackingWorkflow.transitions])
def get_transition_labels():
from curation.tracking import TrackingWorkflow
return dict(Tracking.TYPE_CHOICES)
class SetTrackingStatus(BaseAction):
model = Citation
label = u'Set record tracking status'
default_value_field = forms.ChoiceField
default_value_field_kwargs = {
'choices': Tracking.TYPE_CHOICES,
'label': 'Set record tracking status',
'widget': forms.widgets.Select(attrs={'class': 'action-value'}),
}
extra_js = 'curation/js/bulktracking.js'
extra_fields = (
('info', forms.CharField, {'label': 'Tracking Info', 'required': False, 'widget': forms.widgets.TextInput(attrs={'class': 'form-control', 'part_of': 'SetTrackingStatus', 'required': False})}),
('notes', forms.CharField, {'label': 'Tracking Notes', 'required': False, 'widget': forms.widgets.Textarea(attrs={'class': 'form-control', 'part_of': 'SetTrackingStatus', 'required': False})}),
)
@staticmethod
def get_extra_data(queryset=None, **kwargs):
transition_counts = json.dumps(get_tracking_transition_counts(queryset))
allowable_states = json.dumps(get_allowable_transition_states())
transition_labels = json.dumps(get_transition_labels())
return """
var settrackingstatus_data = {
transition_counts: %s,
allowable_states: %s,
transition_labels: %s
}""" % (transition_counts, allowable_states, transition_labels)
def apply(self, user, filter_params_raw, value, info='', notes='', **extra):
task = AsyncTask.objects.create()
type = extra['object_type'] if extra['object_type'] else None
if type:
result = bulk_change_tracking_state.delay(user.id, filter_params_raw, value, info, notes, task.id, type)
else:
result = bulk_change_tracking_state.delay(user.id, filter_params_raw, value, info, notes, task.id)
# We can use the AsyncResult's UUID to access this task later, e.g.
# to check the return value or task state.
task.async_uuid = result.id
task.value = ('record_status_explanation', value)
task.label = 'Updating set with filters: ' + _build_filter_label(filter_params_raw)
task.save()
return task.id
class ReindexCitation(BaseAction):
model = Citation
label = u'Reindex citations'
default_value_field = forms.CharField
default_value_field_kwargs = {
'label': 'Reindex citations',
'widget': forms.widgets.Textarea(attrs={'class': 'action-value', 'readonly': True, 'initial': 'Reindex citations'}),
}
def apply(self, user, filter_params_raw, value, **extra):
task = AsyncTask.objects.create()
result = ctasks.reindex_citations.delay(user.id, filter_params_raw, task.id)
# We can use the AsyncResult's UUID to access this task later, e.g.
# to check the return value or task state.
task.async_uuid = result.id
task.value = ('reindex_citations', value)
task.label = 'Reindexing citations: ' + _build_filter_label(filter_params_raw)
task.save()
return task.id
class ReindexAuthorities(BaseAction):
model = Authority
label = u'Reindex authorities'
default_value_field = forms.CharField
default_value_field_kwargs = {
'label': 'Reindex authorities',
'widget': forms.widgets.Textarea(attrs={'class': 'action-value', 'readonly': True, 'initial': 'Reindex authorities'}),
}
def apply(self, user, filter_params_raw, value, **extra):
task = AsyncTask.objects.create()
result = atasks.reindex_authorities.delay(user.id, filter_params_raw, task.id)
# We can use the AsyncResult's UUID to access this task later, e.g.
# to check the return value or task state.
task.async_uuid = result.id
task.value = ('reindex_authorities', value)
task.label = 'Reindexing authorities: ' + _build_filter_label(filter_params_raw)
task.save()
return task.id
class DeleteDuplicateAttributes(BaseAction):
model = Authority
label = u'Delete Duplicate Attributes'
default_value_field = forms.CharField
default_value_field_kwargs = {
'label': 'Delete Duplicate Attributes',
'widget': forms.widgets.Textarea(attrs={'class': 'action-value', 'readonly': True, 'initial': 'Delete Duplicate Attributes'}),
}
def apply(self, user, filter_params_raw, value, **extra):
task = AsyncTask.objects.create()
result = atasks.delete_duplicate_attributes.delay(user.id, filter_params_raw, task.id)
# We can use the AsyncResult's UUID to access this task later, e.g.
# to check the return value or task state.
task.async_uuid = result.id
task.value = ('delete_duplicate_attributes', value)
task.label = 'Deleting Duplicate Attributes: ' + _build_filter_label(filter_params_raw)
task.save()
return task.id
AVAILABLE_ACTIONS = [SetRecordStatus, SetRecordStatusExplanation, SetTrackingStatus, PrependToRecordHistory, StoreCreationDataToModel, ReindexCitation]
AVAILABLE_ACTIONS_AUTHORITY = [StoreCreationDataToModel, ReindexAuthorities, DeleteDuplicateAttributes] | 0.327561 | 0.139133 |
from django.db import connection, transaction
from django.utils.translation import ugettext_lazy as _
from rest_framework import serializers
from rest_framework.reverse import reverse
from rest_framework.settings import api_settings
from rest_framework_recursive.fields import RecursiveField
from mayan.apps.documents.models import Document
from mayan.apps.documents.serializers import DocumentSerializer
from .models import Cabinet
class CabinetSerializer(serializers.ModelSerializer):
children = RecursiveField(
help_text=_('List of children cabinets.'), many=True, read_only=True
)
documents_count = serializers.SerializerMethodField(
help_text=_('Number of documents on this cabinet level.')
)
full_path = serializers.SerializerMethodField(
help_text=_(
'The name of this cabinet level appended to the names of its '
'ancestors.'
)
)
documents_url = serializers.HyperlinkedIdentityField(
help_text=_(
'URL of the API endpoint showing the list documents inside this '
'cabinet.'
), view_name='rest_api:cabinet-document-list'
)
parent_url = serializers.SerializerMethodField()
class Meta:
extra_kwargs = {
'url': {'view_name': 'rest_api:cabinet-detail'},
}
fields = (
'children', 'documents_count', 'documents_url', 'full_path', 'id',
'label', 'parent', 'parent_url', 'url'
)
model = Cabinet
def get_documents_count(self, obj):
return obj.get_document_count(user=self.context['request'].user)
def get_full_path(self, obj):
return obj.get_full_path()
def get_parent_url(self, obj):
if obj.parent:
return reverse(
'rest_api:cabinet-detail', args=(obj.parent.pk,),
format=self.context['format'],
request=self.context.get('request')
)
else:
return ''
class WritableCabinetSerializer(serializers.ModelSerializer):
documents_pk_list = serializers.CharField(
help_text=_(
'Comma separated list of document primary keys to add to this '
'cabinet.'
), required=False
)
# This is here because parent is optional in the model but the serializer
# sets it as required.
parent = serializers.PrimaryKeyRelatedField(
allow_null=True, queryset=Cabinet.objects.all(), required=False
)
class Meta:
fields = ('documents_pk_list', 'label', 'id', 'parent')
model = Cabinet
def _add_documents(self, documents_pk_list, instance):
instance.documents.add(
*Document.objects.filter(pk__in=documents_pk_list.split(','))
)
def create(self, validated_data):
documents_pk_list = validated_data.pop('documents_pk_list', '')
instance = super(WritableCabinetSerializer, self).create(validated_data)
if documents_pk_list:
self._add_documents(
documents_pk_list=documents_pk_list, instance=instance
)
return instance
def update(self, instance, validated_data):
documents_pk_list = validated_data.pop('documents_pk_list', '')
instance = super(WritableCabinetSerializer, self).update(
instance, validated_data
)
if documents_pk_list:
instance.documents.clear()
self._add_documents(
documents_pk_list=documents_pk_list, instance=instance
)
return instance
def run_validation(self, data=None):
# Copy data into a new dictionary since data is an immutable type
result = data.copy()
# Add None parent to keep validation from failing.
# This is here because parent is optional in the model but the serializer
# sets it as required.
result.setdefault('parent')
data = super(WritableCabinetSerializer, self).run_validation(result)
# Explicit validation of uniqueness of parent+label as the provided
# unique_together check in Meta is not working for all 100% cases
# when there is a FK in the unique_together tuple
# https://code.djangoproject.com/ticket/1751
with transaction.atomic():
if connection.vendor == 'oracle':
queryset = Cabinet.objects.filter(parent=data['parent'], label=data['label'])
else:
queryset = Cabinet.objects.select_for_update().filter(parent=data['parent'], label=data['label'])
if queryset.exists():
params = {
'model_name': _('Cabinet'),
'field_labels': _('Parent and Label')
}
raise serializers.ValidationError(
{
api_settings.NON_FIELD_ERRORS_KEY: [
_(
'%(model_name)s with this %(field_labels)s '
'already exists.'
) % params
],
},
)
return data
class CabinetDocumentSerializer(DocumentSerializer):
cabinet_document_url = serializers.SerializerMethodField(
help_text=_(
'API URL pointing to a document in relation to the cabinet '
'storing it. This URL is different than the canonical document '
'URL.'
)
)
class Meta(DocumentSerializer.Meta):
fields = DocumentSerializer.Meta.fields + ('cabinet_document_url',)
read_only_fields = DocumentSerializer.Meta.fields
def get_cabinet_document_url(self, instance):
return reverse(
'rest_api:cabinet-document', args=(
self.context['cabinet'].pk, instance.pk
), request=self.context['request'], format=self.context['format']
)
class NewCabinetDocumentSerializer(serializers.Serializer):
documents_pk_list = serializers.CharField(
help_text=_(
'Comma separated list of document primary keys to add to this '
'cabinet.'
)
)
def _add_documents(self, documents_pk_list, instance):
instance.documents.add(
*Document.objects.filter(pk__in=documents_pk_list.split(','))
)
def create(self, validated_data):
documents_pk_list = validated_data['documents_pk_list']
if documents_pk_list:
self._add_documents(
documents_pk_list=documents_pk_list,
instance=validated_data['cabinet']
)
return {'documents_pk_list': documents_pk_list} | mayan/apps/cabinets/serializers.py | from django.db import connection, transaction
from django.utils.translation import ugettext_lazy as _
from rest_framework import serializers
from rest_framework.reverse import reverse
from rest_framework.settings import api_settings
from rest_framework_recursive.fields import RecursiveField
from mayan.apps.documents.models import Document
from mayan.apps.documents.serializers import DocumentSerializer
from .models import Cabinet
class CabinetSerializer(serializers.ModelSerializer):
children = RecursiveField(
help_text=_('List of children cabinets.'), many=True, read_only=True
)
documents_count = serializers.SerializerMethodField(
help_text=_('Number of documents on this cabinet level.')
)
full_path = serializers.SerializerMethodField(
help_text=_(
'The name of this cabinet level appended to the names of its '
'ancestors.'
)
)
documents_url = serializers.HyperlinkedIdentityField(
help_text=_(
'URL of the API endpoint showing the list documents inside this '
'cabinet.'
), view_name='rest_api:cabinet-document-list'
)
parent_url = serializers.SerializerMethodField()
class Meta:
extra_kwargs = {
'url': {'view_name': 'rest_api:cabinet-detail'},
}
fields = (
'children', 'documents_count', 'documents_url', 'full_path', 'id',
'label', 'parent', 'parent_url', 'url'
)
model = Cabinet
def get_documents_count(self, obj):
return obj.get_document_count(user=self.context['request'].user)
def get_full_path(self, obj):
return obj.get_full_path()
def get_parent_url(self, obj):
if obj.parent:
return reverse(
'rest_api:cabinet-detail', args=(obj.parent.pk,),
format=self.context['format'],
request=self.context.get('request')
)
else:
return ''
class WritableCabinetSerializer(serializers.ModelSerializer):
documents_pk_list = serializers.CharField(
help_text=_(
'Comma separated list of document primary keys to add to this '
'cabinet.'
), required=False
)
# This is here because parent is optional in the model but the serializer
# sets it as required.
parent = serializers.PrimaryKeyRelatedField(
allow_null=True, queryset=Cabinet.objects.all(), required=False
)
class Meta:
fields = ('documents_pk_list', 'label', 'id', 'parent')
model = Cabinet
def _add_documents(self, documents_pk_list, instance):
instance.documents.add(
*Document.objects.filter(pk__in=documents_pk_list.split(','))
)
def create(self, validated_data):
documents_pk_list = validated_data.pop('documents_pk_list', '')
instance = super(WritableCabinetSerializer, self).create(validated_data)
if documents_pk_list:
self._add_documents(
documents_pk_list=documents_pk_list, instance=instance
)
return instance
def update(self, instance, validated_data):
documents_pk_list = validated_data.pop('documents_pk_list', '')
instance = super(WritableCabinetSerializer, self).update(
instance, validated_data
)
if documents_pk_list:
instance.documents.clear()
self._add_documents(
documents_pk_list=documents_pk_list, instance=instance
)
return instance
def run_validation(self, data=None):
# Copy data into a new dictionary since data is an immutable type
result = data.copy()
# Add None parent to keep validation from failing.
# This is here because parent is optional in the model but the serializer
# sets it as required.
result.setdefault('parent')
data = super(WritableCabinetSerializer, self).run_validation(result)
# Explicit validation of uniqueness of parent+label as the provided
# unique_together check in Meta is not working for all 100% cases
# when there is a FK in the unique_together tuple
# https://code.djangoproject.com/ticket/1751
with transaction.atomic():
if connection.vendor == 'oracle':
queryset = Cabinet.objects.filter(parent=data['parent'], label=data['label'])
else:
queryset = Cabinet.objects.select_for_update().filter(parent=data['parent'], label=data['label'])
if queryset.exists():
params = {
'model_name': _('Cabinet'),
'field_labels': _('Parent and Label')
}
raise serializers.ValidationError(
{
api_settings.NON_FIELD_ERRORS_KEY: [
_(
'%(model_name)s with this %(field_labels)s '
'already exists.'
) % params
],
},
)
return data
class CabinetDocumentSerializer(DocumentSerializer):
cabinet_document_url = serializers.SerializerMethodField(
help_text=_(
'API URL pointing to a document in relation to the cabinet '
'storing it. This URL is different than the canonical document '
'URL.'
)
)
class Meta(DocumentSerializer.Meta):
fields = DocumentSerializer.Meta.fields + ('cabinet_document_url',)
read_only_fields = DocumentSerializer.Meta.fields
def get_cabinet_document_url(self, instance):
return reverse(
'rest_api:cabinet-document', args=(
self.context['cabinet'].pk, instance.pk
), request=self.context['request'], format=self.context['format']
)
class NewCabinetDocumentSerializer(serializers.Serializer):
documents_pk_list = serializers.CharField(
help_text=_(
'Comma separated list of document primary keys to add to this '
'cabinet.'
)
)
def _add_documents(self, documents_pk_list, instance):
instance.documents.add(
*Document.objects.filter(pk__in=documents_pk_list.split(','))
)
def create(self, validated_data):
documents_pk_list = validated_data['documents_pk_list']
if documents_pk_list:
self._add_documents(
documents_pk_list=documents_pk_list,
instance=validated_data['cabinet']
)
return {'documents_pk_list': documents_pk_list} | 0.687 | 0.108001 |
import sys
print("Running tests with Python: {}".format(sys.version))
import scalopus
import time
import os
import unittest
import threading
try:
from thread import get_ident as thread_ident
except ImportError:
from threading import get_ident as thread_ident
# This is the same as test_tracing, except that it uses the exposer.
class TracingTesterWithExposer(unittest.TestCase):
def setUp(self):
self.factory = scalopus.transport.TransportLoopbackFactory()
self.exposer = scalopus.common.DefaultExposer("MyPythonProcess", self.factory)
scalopus.lib.lib.test_helpers.clearTraceNames()
def test_tracing(self):
trace_point = scalopus.tracing.TraceContext("MyTraceContext", trace_id=1337)
scalopus.general.setThreadName("MyTestThread")
for i in range(3):
with trace_point:
time.sleep(0.1)
time.sleep(0.1)
# add an extra manual mapping.
scalopus.tracing.setTraceName(10, "Ten")
# now try to retrieve as much as possible, first check if we can connect to the loopback server
# and make a client connection.
clients = self.factory.discover()
self.assertEqual(len(clients), 1)
client = self.factory.connect(clients[0])
# check if the mappings were stored.
mapping_client = scalopus.lib.tracing.EndpointTraceMapping()
client.addEndpoint(mapping_client)
mappings = mapping_client.mapping()
pid = os.getpid()
self.assertIn(pid, mappings)
self.assertDictEqual({1337: "MyTraceContext", 10:"Ten"}, mappings[pid])
# check if the process name was stored.
processinfo_client = scalopus.lib.general.EndpointProcessInfo()
client.addEndpoint(processinfo_client)
info = processinfo_client.processInfo()
self.assertEqual(info.name, "MyPythonProcess")
self.assertDictEqual({thread_ident(): "MyTestThread"}, info.threads)
if __name__ == '__main__':
unittest.main() | scalopus_python/test/test_tracing_exposer.py |
import sys
print("Running tests with Python: {}".format(sys.version))
import scalopus
import time
import os
import unittest
import threading
try:
from thread import get_ident as thread_ident
except ImportError:
from threading import get_ident as thread_ident
# This is the same as test_tracing, except that it uses the exposer.
class TracingTesterWithExposer(unittest.TestCase):
def setUp(self):
self.factory = scalopus.transport.TransportLoopbackFactory()
self.exposer = scalopus.common.DefaultExposer("MyPythonProcess", self.factory)
scalopus.lib.lib.test_helpers.clearTraceNames()
def test_tracing(self):
trace_point = scalopus.tracing.TraceContext("MyTraceContext", trace_id=1337)
scalopus.general.setThreadName("MyTestThread")
for i in range(3):
with trace_point:
time.sleep(0.1)
time.sleep(0.1)
# add an extra manual mapping.
scalopus.tracing.setTraceName(10, "Ten")
# now try to retrieve as much as possible, first check if we can connect to the loopback server
# and make a client connection.
clients = self.factory.discover()
self.assertEqual(len(clients), 1)
client = self.factory.connect(clients[0])
# check if the mappings were stored.
mapping_client = scalopus.lib.tracing.EndpointTraceMapping()
client.addEndpoint(mapping_client)
mappings = mapping_client.mapping()
pid = os.getpid()
self.assertIn(pid, mappings)
self.assertDictEqual({1337: "MyTraceContext", 10:"Ten"}, mappings[pid])
# check if the process name was stored.
processinfo_client = scalopus.lib.general.EndpointProcessInfo()
client.addEndpoint(processinfo_client)
info = processinfo_client.processInfo()
self.assertEqual(info.name, "MyPythonProcess")
self.assertDictEqual({thread_ident(): "MyTestThread"}, info.threads)
if __name__ == '__main__':
unittest.main() | 0.340595 | 0.217452 |