hexsha
stringlengths
40
40
size
int64
2
1.02M
ext
stringclasses
10 values
lang
stringclasses
1 value
max_stars_repo_path
stringlengths
4
245
max_stars_repo_name
stringlengths
6
130
max_stars_repo_head_hexsha
stringlengths
40
40
max_stars_repo_licenses
listlengths
1
10
max_stars_count
int64
1
191k
max_stars_repo_stars_event_min_datetime
stringlengths
24
24
max_stars_repo_stars_event_max_datetime
stringlengths
24
24
max_issues_repo_path
stringlengths
4
245
max_issues_repo_name
stringlengths
6
130
max_issues_repo_head_hexsha
stringlengths
40
40
max_issues_repo_licenses
listlengths
1
10
max_issues_count
int64
1
67k
max_issues_repo_issues_event_min_datetime
stringlengths
24
24
max_issues_repo_issues_event_max_datetime
stringlengths
24
24
max_forks_repo_path
stringlengths
4
245
max_forks_repo_name
stringlengths
6
130
max_forks_repo_head_hexsha
stringlengths
40
40
max_forks_repo_licenses
listlengths
1
10
max_forks_count
int64
1
105k
max_forks_repo_forks_event_min_datetime
stringlengths
24
24
max_forks_repo_forks_event_max_datetime
stringlengths
24
24
content
stringlengths
2
1.02M
avg_line_length
float64
1
417k
max_line_length
int64
1
987k
alphanum_fraction
float64
0
1
content_no_comment
stringlengths
0
1.01M
is_comment_constant_removed
bool
1 class
is_sharp_comment_removed
bool
1 class
f73bd153e1be469d4bf090142eeaf0c8c121dfd4
737
py
Python
paragen/modules/search/abstract_search.py
godweiyang/ParaGen
9665d1244ea38a41fc06b4e0a7f6411985e2221f
[ "Apache-2.0" ]
50
2022-01-18T07:25:46.000Z
2022-03-14T13:06:18.000Z
paragen/modules/search/abstract_search.py
JiangtaoFeng/ParaGen
509334bf16e3674e009bb9dc37ecc38ae3b5c977
[ "Apache-2.0" ]
2
2022-01-19T09:36:42.000Z
2022-02-23T07:16:02.000Z
paragen/modules/search/abstract_search.py
JiangtaoFeng/ParaGen
509334bf16e3674e009bb9dc37ecc38ae3b5c977
[ "Apache-2.0" ]
6
2022-01-19T09:28:53.000Z
2022-03-10T10:20:08.000Z
from torch.nn import Module class AbstractSearch(Module): """ AbstractSearch is search algorithm on original neural model to perform special inference. """ def __init__(self): super().__init__() self._mode = 'infer' def build(self, *args, **kwargs): """ Build search algorithm with task instance """ raise NotImplementedError def forward(self, *args, **kwargs): """ Process forward of search algorithm. """ raise NotImplementedError def reset(self, mode): """ Reset encoder and switch running mode Args: mode: running mode in [train, valid, infer] """ self._mode = mode
22.333333
93
0.576662
from torch.nn import Module class AbstractSearch(Module): def __init__(self): super().__init__() self._mode = 'infer' def build(self, *args, **kwargs): raise NotImplementedError def forward(self, *args, **kwargs): raise NotImplementedError def reset(self, mode): self._mode = mode
true
true
f73bd166cfd3612d52cefc09b1c71b0426f1c099
1,991
py
Python
stellar_sdk/operation/utils.py
bantalon/py-stellar-base
b452f0f92be0387c3e78c8149103978788d7ec0f
[ "Apache-2.0" ]
null
null
null
stellar_sdk/operation/utils.py
bantalon/py-stellar-base
b452f0f92be0387c3e78c8149103978788d7ec0f
[ "Apache-2.0" ]
null
null
null
stellar_sdk/operation/utils.py
bantalon/py-stellar-base
b452f0f92be0387c3e78c8149103978788d7ec0f
[ "Apache-2.0" ]
null
null
null
from decimal import Decimal from typing import Optional, Union from ..asset import Asset from ..exceptions import ValueError, TypeError from ..keypair import Keypair from ..muxed_account import MuxedAccount from ..price import Price from ..strkey import StrKey _LOWER_LIMIT = "0" _UPPER_LIMIT = "922337203685.4775807" _EXPONENT = 7 def check_source(source: Optional[str]) -> None: if source is not None: check_muxed_ed25519_account(source) def check_ed25519_public_key(public_key: str) -> None: Keypair.from_public_key(public_key) def check_muxed_ed25519_account(muxed_account: str) -> None: StrKey.decode_muxed_account(muxed_account) def check_asset_code(asset_code: str) -> None: Asset.check_if_asset_code_is_valid(asset_code) def check_price(price: Union[str, Decimal, Price]) -> None: if not ( isinstance(price, str) or isinstance(price, Decimal) or isinstance(price, Price) ): raise TypeError( "amount should be type of {}, {} or {}.".format(str, Decimal, Price) ) if isinstance(price, str) or isinstance(price, Decimal): check_amount(price) def check_amount(amount: Union[str, Decimal]) -> None: if not (isinstance(amount, str) or isinstance(amount, Decimal)): raise TypeError("amount should be type of {} or {}.".format(str, Decimal)) amount = Decimal(amount) if abs(amount.as_tuple().exponent) > _EXPONENT: raise ValueError( "Value of '{}' must have at most 7 digits after the decimal.".format(amount) ) if amount < Decimal(_LOWER_LIMIT) or amount > Decimal(_UPPER_LIMIT): raise ValueError( "Value of '{}' must represent a positive number " "and the max valid value is {}.".format(amount, _UPPER_LIMIT) ) def parse_mux_account_from_account(account: Union[str, MuxedAccount]) -> MuxedAccount: if isinstance(account, str): return MuxedAccount.from_account(account) return account
31.109375
88
0.696133
from decimal import Decimal from typing import Optional, Union from ..asset import Asset from ..exceptions import ValueError, TypeError from ..keypair import Keypair from ..muxed_account import MuxedAccount from ..price import Price from ..strkey import StrKey _LOWER_LIMIT = "0" _UPPER_LIMIT = "922337203685.4775807" _EXPONENT = 7 def check_source(source: Optional[str]) -> None: if source is not None: check_muxed_ed25519_account(source) def check_ed25519_public_key(public_key: str) -> None: Keypair.from_public_key(public_key) def check_muxed_ed25519_account(muxed_account: str) -> None: StrKey.decode_muxed_account(muxed_account) def check_asset_code(asset_code: str) -> None: Asset.check_if_asset_code_is_valid(asset_code) def check_price(price: Union[str, Decimal, Price]) -> None: if not ( isinstance(price, str) or isinstance(price, Decimal) or isinstance(price, Price) ): raise TypeError( "amount should be type of {}, {} or {}.".format(str, Decimal, Price) ) if isinstance(price, str) or isinstance(price, Decimal): check_amount(price) def check_amount(amount: Union[str, Decimal]) -> None: if not (isinstance(amount, str) or isinstance(amount, Decimal)): raise TypeError("amount should be type of {} or {}.".format(str, Decimal)) amount = Decimal(amount) if abs(amount.as_tuple().exponent) > _EXPONENT: raise ValueError( "Value of '{}' must have at most 7 digits after the decimal.".format(amount) ) if amount < Decimal(_LOWER_LIMIT) or amount > Decimal(_UPPER_LIMIT): raise ValueError( "Value of '{}' must represent a positive number " "and the max valid value is {}.".format(amount, _UPPER_LIMIT) ) def parse_mux_account_from_account(account: Union[str, MuxedAccount]) -> MuxedAccount: if isinstance(account, str): return MuxedAccount.from_account(account) return account
true
true
f73bd18b9f473cc357560a995441df733d0f54c0
6,550
py
Python
datasets.py
smarsu/mtcnn
98c3839e250b18c310efa920bc6289a70379f07d
[ "MIT" ]
null
null
null
datasets.py
smarsu/mtcnn
98c3839e250b18c310efa920bc6289a70379f07d
[ "MIT" ]
null
null
null
datasets.py
smarsu/mtcnn
98c3839e250b18c310efa920bc6289a70379f07d
[ "MIT" ]
null
null
null
# -------------------------------------------------------- # Face Datasets # Licensed under The MIT License [see LICENSE for details] # Copyright 2019 smarsu. All Rights Reserved. # -------------------------------------------------------- import os.path as osp import numpy as np class Dataset(object): """The base class of dataset. self._train_datas = [n, str] self._train_labels = [n, list of box] """ def __init__(self): pass @property def size(self): """Return the number of train datas.""" raise NotImplementedError def train_datas_debug(self, batch_size): """Yield batch size train datas per step. Train datas should be shuffled. Args: batch_size: int, > 0 """ if not isinstance(batch_size, int): raise ValueError('In Dataset, batch_size should be int, get ' '{}'.format(type(batch_size))) if batch_size <= 0: raise ValueError('In Dataset, batch_size should larger equal to ' '1, get {}'.format(batch_size)) indices = list(range(batch_size)) datas = [] # for label, we have box and landmark which is 0. datas.append([self._train_datas[:batch_size], self._train_labels[:batch_size]]) return datas def train_datas(self, batch_size): """Yield batch size train datas per step. Train datas should be shuffled. Args: batch_size: int, > 0 """ if not isinstance(batch_size, int): raise ValueError('In Dataset, batch_size should be int, get ' '{}'.format(type(batch_size))) if batch_size <= 0: raise ValueError('In Dataset, batch_size should larger equal to ' '1, get {}'.format(batch_size)) indices = list(range(self.size)) np.random.shuffle(indices) epoch_size = self.size // batch_size * batch_size self._train_datas = self._train_datas[indices][:epoch_size] # [epoch_size, ...] self._train_labels = self._train_labels[indices][:epoch_size] # [epoch_size, ...] datas = [] for i in range(self.size // batch_size): # for label, we have box and landmark which is 0. datas.append([self._train_datas[i*batch_size:(i+1)*batch_size], self._train_labels[i*batch_size:(i+1)*batch_size]]) return datas def merge(self, other): """Merge the other datas to self. Args: other: Dataset """ self._train_datas = np.concatenate( [self._train_datas, other._train_datas], 0) self._train_labels = np.concatenate( [self._train_labels, other._train_labels], 0) class WiderFace(Dataset): def __init__(self, train_image_path, label_path, value_image_path=None, test_image_path=None): """ TODO(smarsu): Add way to read `value_image_path` and `test_image_path`. Add way to read `value_label_path` and `test_label_path`. Args: train_image_path: str, the path of train images. label_path: str """ self._data_map = {} self.train_image_path = train_image_path self.label_path = label_path self.train_label_path = self.label_path self._train_datas, self._train_labels = self._read_train_datas() @property def size(self): """Return the number of train datas. Assert the size of self._train_datas and self._train_labels is equal. """ return len(self._train_datas) def data_map(self, key): """""" if key not in self._data_map: raise KeyError('{} not in the data map.'.format(key)) return self._data_map[key] def _real_image_path(self, path): """Get real path of image. self.train_image_path + '/' + path Args: path: str, the image name(id) of labels. """ return osp.join(self.train_image_path, path) def _read_train_datas(self): """The special way to read wider face labels. Args: label_path: str, """ with open(self.train_label_path, 'r') as fb: lines = fb.readlines() return self._parse_raw_labels(lines) def _parse_raw_labels(self, lines): """Parse raw str lines to python object. Args: lines: list of str, with the structure of File name Number of bounding box x1, y1, w, h, blur, expression, illumination, invalid, occlusion, pose Returns: images: numpy array, [n], image paths labels: numpy array, [n, 4], [x1, y1, x2, y2] """ images = [] labels = [] idx = 0 while idx < len(lines): image_path = lines[idx].strip() images.append(self._real_image_path(image_path)) idx += 1 num = int(lines[idx]) idx += 1 labels_ = [] for _ in range(num): x1, y1, w, h, blur, expression, illumination, invalid, \ occlusion, pose = [int(v) for v in lines[idx].strip().split()] x2, y2 = x1 + w - 1, y1 + h - 1 # -1 to get the read x2, y2 labels_.append([x1, y1, x2, y2]) idx += 1 labels.append(np.array(labels_)) self._data_map[self._real_image_path(image_path)] = np.array(labels_) return np.array(images), np.array(labels) if __name__ == '__main__': import time # Test wider face dataset wider = WiderFace('/datasets/wider/images', '/datasets/wider/wider_face_split/wider_face_train_bbx_gt.txt') t1 = time.time() for data, label in wider.train_datas(32): print(data, label) t2 = time.time() print('Time for read wider dataset:', t2 - t1) # 2.467153787612915s with `print` print(type(wider._train_datas)) print(type(wider._train_labels))
32.26601
90
0.528244
import os.path as osp import numpy as np class Dataset(object): def __init__(self): pass @property def size(self): raise NotImplementedError def train_datas_debug(self, batch_size): if not isinstance(batch_size, int): raise ValueError('In Dataset, batch_size should be int, get ' '{}'.format(type(batch_size))) if batch_size <= 0: raise ValueError('In Dataset, batch_size should larger equal to ' '1, get {}'.format(batch_size)) indices = list(range(batch_size)) datas = [] datas.append([self._train_datas[:batch_size], self._train_labels[:batch_size]]) return datas def train_datas(self, batch_size): if not isinstance(batch_size, int): raise ValueError('In Dataset, batch_size should be int, get ' '{}'.format(type(batch_size))) if batch_size <= 0: raise ValueError('In Dataset, batch_size should larger equal to ' '1, get {}'.format(batch_size)) indices = list(range(self.size)) np.random.shuffle(indices) epoch_size = self.size // batch_size * batch_size self._train_datas = self._train_datas[indices][:epoch_size] self._train_labels = self._train_labels[indices][:epoch_size] datas = [] for i in range(self.size // batch_size): datas.append([self._train_datas[i*batch_size:(i+1)*batch_size], self._train_labels[i*batch_size:(i+1)*batch_size]]) return datas def merge(self, other): self._train_datas = np.concatenate( [self._train_datas, other._train_datas], 0) self._train_labels = np.concatenate( [self._train_labels, other._train_labels], 0) class WiderFace(Dataset): def __init__(self, train_image_path, label_path, value_image_path=None, test_image_path=None): self._data_map = {} self.train_image_path = train_image_path self.label_path = label_path self.train_label_path = self.label_path self._train_datas, self._train_labels = self._read_train_datas() @property def size(self): return len(self._train_datas) def data_map(self, key): if key not in self._data_map: raise KeyError('{} not in the data map.'.format(key)) return self._data_map[key] def _real_image_path(self, path): return osp.join(self.train_image_path, path) def _read_train_datas(self): with open(self.train_label_path, 'r') as fb: lines = fb.readlines() return self._parse_raw_labels(lines) def _parse_raw_labels(self, lines): images = [] labels = [] idx = 0 while idx < len(lines): image_path = lines[idx].strip() images.append(self._real_image_path(image_path)) idx += 1 num = int(lines[idx]) idx += 1 labels_ = [] for _ in range(num): x1, y1, w, h, blur, expression, illumination, invalid, \ occlusion, pose = [int(v) for v in lines[idx].strip().split()] x2, y2 = x1 + w - 1, y1 + h - 1 labels_.append([x1, y1, x2, y2]) idx += 1 labels.append(np.array(labels_)) self._data_map[self._real_image_path(image_path)] = np.array(labels_) return np.array(images), np.array(labels) if __name__ == '__main__': import time wider = WiderFace('/datasets/wider/images', '/datasets/wider/wider_face_split/wider_face_train_bbx_gt.txt') t1 = time.time() for data, label in wider.train_datas(32): print(data, label) t2 = time.time() print('Time for read wider dataset:', t2 - t1) print(type(wider._train_datas)) print(type(wider._train_labels))
true
true
f73bd1bb0270072247ca3475da9fed4222ddf23e
164
py
Python
learning/drafts/random_nums.py
medale/pyschool
2bec74d602e3d323e6645c9ec661086cfa076037
[ "Apache-2.0" ]
null
null
null
learning/drafts/random_nums.py
medale/pyschool
2bec74d602e3d323e6645c9ec661086cfa076037
[ "Apache-2.0" ]
null
null
null
learning/drafts/random_nums.py
medale/pyschool
2bec74d602e3d323e6645c9ec661086cfa076037
[ "Apache-2.0" ]
null
null
null
import random def random_nums(): random_float = random.random() random_int = random.randint(1, 100) random_elem = random.choice(['heads', 'tails'])
16.4
51
0.670732
import random def random_nums(): random_float = random.random() random_int = random.randint(1, 100) random_elem = random.choice(['heads', 'tails'])
true
true
f73bd205e0400dad8111ce0cd70ef44926b2fd7f
4,930
py
Python
examples/learning_graph_embedding_and_predicting.py
YannCabanes/geomstats
ce3f4bab6cd59c2f071371a46e336086771d0493
[ "MIT" ]
743
2018-05-23T02:23:29.000Z
2022-03-29T22:59:22.000Z
examples/learning_graph_embedding_and_predicting.py
YannCabanes/geomstats
ce3f4bab6cd59c2f071371a46e336086771d0493
[ "MIT" ]
1,119
2018-05-15T05:29:38.000Z
2022-03-31T18:27:02.000Z
examples/learning_graph_embedding_and_predicting.py
YannCabanes/geomstats
ce3f4bab6cd59c2f071371a46e336086771d0493
[ "MIT" ]
159
2018-05-23T17:49:24.000Z
2022-03-30T16:44:47.000Z
"""Learning embedding of graph using Poincare Ball Model.""" import matplotlib.patches as mpatches import matplotlib.pyplot as plt import geomstats.backend as gs import geomstats.visualization as visualization from geomstats.datasets.prepare_graph_data import HyperbolicEmbedding from geomstats.datasets.utils import load_karate_graph from geomstats.learning.kmeans import RiemannianKMeans from geomstats.learning.kmedoids import RiemannianKMedoids def main(): """Learning Poincaré graph embedding. Learns Poincaré Ball embedding by using Riemannian gradient descent algorithm. Then K-means is applied to learn labels of each data sample. """ gs.random.seed(1234) karate_graph = load_karate_graph() hyperbolic_embedding = HyperbolicEmbedding(max_epochs=3) embeddings = hyperbolic_embedding.embed(karate_graph) colors = {1: "b", 2: "r"} group_1 = mpatches.Patch(color=colors[1], label="Group 1") group_2 = mpatches.Patch(color=colors[2], label="Group 2") circle = visualization.PoincareDisk(point_type="ball") _, ax = plt.subplots(figsize=(8, 8)) ax.axes.xaxis.set_visible(False) ax.axes.yaxis.set_visible(False) circle.set_ax(ax) circle.draw(ax=ax) for i_embedding, embedding in enumerate(embeddings): x_coords = embedding[0] y_coords = embedding[1] pt_id = i_embedding plt.scatter(x_coords, y_coords, c=colors[karate_graph.labels[pt_id][0]], s=150) ax.annotate(pt_id, (x_coords, y_coords)) plt.tick_params(which="both") plt.title("Poincare Ball Embedding of the Karate Club Network") plt.legend(handles=[group_1, group_2]) plt.show() n_clusters = 2 kmeans = RiemannianKMeans( metric=hyperbolic_embedding.manifold.metric, n_clusters=n_clusters, init="random", ) centroids = kmeans.fit(X=embeddings) labels = kmeans.predict(X=embeddings) colors = ["g", "c", "m"] circle = visualization.PoincareDisk(point_type="ball") _, ax2 = plt.subplots(figsize=(8, 8)) circle.set_ax(ax2) circle.draw(ax=ax2) ax2.axes.xaxis.set_visible(False) ax2.axes.yaxis.set_visible(False) group_1_predicted = mpatches.Patch(color=colors[0], label="Predicted Group 1") group_2_predicted = mpatches.Patch(color=colors[1], label="Predicted Group 2") group_centroids = mpatches.Patch(color=colors[2], label="Cluster centroids") for _ in range(n_clusters): for i_embedding, embedding in enumerate(embeddings): x_coords = embedding[0] y_coords = embedding[1] pt_id = i_embedding if labels[i_embedding] == 0: color = colors[0] else: color = colors[1] plt.scatter(x_coords, y_coords, c=color, s=150) ax2.annotate(pt_id, (x_coords, y_coords)) for _, centroid in enumerate(centroids): x_coords = centroid[0] y_coords = centroid[1] plt.scatter( x_coords, y_coords, c=colors[2], marker="*", s=150, ) plt.title("K-means applied to Karate club embedding") plt.legend(handles=[group_1_predicted, group_2_predicted, group_centroids]) plt.show() kmedoid = RiemannianKMedoids( metric=hyperbolic_embedding.manifold.metric, n_clusters=n_clusters, init="random", n_jobs=2, ) centroids = kmedoid.fit(data=embeddings, max_iter=100) labels = kmedoid.predict(data=embeddings) colors = ["g", "c", "m"] circle = visualization.PoincareDisk(point_type="ball") _, ax2 = plt.subplots(figsize=(8, 8)) circle.set_ax(ax2) circle.draw(ax=ax2) ax2.axes.xaxis.set_visible(False) ax2.axes.yaxis.set_visible(False) group_1_predicted = mpatches.Patch(color=colors[0], label="Predicted Group 1") group_2_predicted = mpatches.Patch(color=colors[1], label="Predicted Group 2") group_centroids = mpatches.Patch(color=colors[2], label="Cluster centroids") for _ in range(n_clusters): for i_embedding, embedding in enumerate(embeddings): x_coords = embedding[0] y_coords = embedding[1] pt_id = i_embedding if labels[i_embedding] == 0: color = colors[0] else: color = colors[1] plt.scatter(x_coords, y_coords, c=color, s=150) ax2.annotate(pt_id, (x_coords, y_coords)) for _, centroid in enumerate(centroids): x_coords = centroid[0] y_coords = centroid[1] plt.scatter( x_coords, y_coords, c=colors[2], marker="*", s=150, ) plt.title("K-Medoids applied to Karate club embedding") plt.legend(handles=[group_1_predicted, group_2_predicted, group_centroids]) plt.show() if __name__ == "__main__": main()
32.866667
87
0.65071
import matplotlib.patches as mpatches import matplotlib.pyplot as plt import geomstats.backend as gs import geomstats.visualization as visualization from geomstats.datasets.prepare_graph_data import HyperbolicEmbedding from geomstats.datasets.utils import load_karate_graph from geomstats.learning.kmeans import RiemannianKMeans from geomstats.learning.kmedoids import RiemannianKMedoids def main(): gs.random.seed(1234) karate_graph = load_karate_graph() hyperbolic_embedding = HyperbolicEmbedding(max_epochs=3) embeddings = hyperbolic_embedding.embed(karate_graph) colors = {1: "b", 2: "r"} group_1 = mpatches.Patch(color=colors[1], label="Group 1") group_2 = mpatches.Patch(color=colors[2], label="Group 2") circle = visualization.PoincareDisk(point_type="ball") _, ax = plt.subplots(figsize=(8, 8)) ax.axes.xaxis.set_visible(False) ax.axes.yaxis.set_visible(False) circle.set_ax(ax) circle.draw(ax=ax) for i_embedding, embedding in enumerate(embeddings): x_coords = embedding[0] y_coords = embedding[1] pt_id = i_embedding plt.scatter(x_coords, y_coords, c=colors[karate_graph.labels[pt_id][0]], s=150) ax.annotate(pt_id, (x_coords, y_coords)) plt.tick_params(which="both") plt.title("Poincare Ball Embedding of the Karate Club Network") plt.legend(handles=[group_1, group_2]) plt.show() n_clusters = 2 kmeans = RiemannianKMeans( metric=hyperbolic_embedding.manifold.metric, n_clusters=n_clusters, init="random", ) centroids = kmeans.fit(X=embeddings) labels = kmeans.predict(X=embeddings) colors = ["g", "c", "m"] circle = visualization.PoincareDisk(point_type="ball") _, ax2 = plt.subplots(figsize=(8, 8)) circle.set_ax(ax2) circle.draw(ax=ax2) ax2.axes.xaxis.set_visible(False) ax2.axes.yaxis.set_visible(False) group_1_predicted = mpatches.Patch(color=colors[0], label="Predicted Group 1") group_2_predicted = mpatches.Patch(color=colors[1], label="Predicted Group 2") group_centroids = mpatches.Patch(color=colors[2], label="Cluster centroids") for _ in range(n_clusters): for i_embedding, embedding in enumerate(embeddings): x_coords = embedding[0] y_coords = embedding[1] pt_id = i_embedding if labels[i_embedding] == 0: color = colors[0] else: color = colors[1] plt.scatter(x_coords, y_coords, c=color, s=150) ax2.annotate(pt_id, (x_coords, y_coords)) for _, centroid in enumerate(centroids): x_coords = centroid[0] y_coords = centroid[1] plt.scatter( x_coords, y_coords, c=colors[2], marker="*", s=150, ) plt.title("K-means applied to Karate club embedding") plt.legend(handles=[group_1_predicted, group_2_predicted, group_centroids]) plt.show() kmedoid = RiemannianKMedoids( metric=hyperbolic_embedding.manifold.metric, n_clusters=n_clusters, init="random", n_jobs=2, ) centroids = kmedoid.fit(data=embeddings, max_iter=100) labels = kmedoid.predict(data=embeddings) colors = ["g", "c", "m"] circle = visualization.PoincareDisk(point_type="ball") _, ax2 = plt.subplots(figsize=(8, 8)) circle.set_ax(ax2) circle.draw(ax=ax2) ax2.axes.xaxis.set_visible(False) ax2.axes.yaxis.set_visible(False) group_1_predicted = mpatches.Patch(color=colors[0], label="Predicted Group 1") group_2_predicted = mpatches.Patch(color=colors[1], label="Predicted Group 2") group_centroids = mpatches.Patch(color=colors[2], label="Cluster centroids") for _ in range(n_clusters): for i_embedding, embedding in enumerate(embeddings): x_coords = embedding[0] y_coords = embedding[1] pt_id = i_embedding if labels[i_embedding] == 0: color = colors[0] else: color = colors[1] plt.scatter(x_coords, y_coords, c=color, s=150) ax2.annotate(pt_id, (x_coords, y_coords)) for _, centroid in enumerate(centroids): x_coords = centroid[0] y_coords = centroid[1] plt.scatter( x_coords, y_coords, c=colors[2], marker="*", s=150, ) plt.title("K-Medoids applied to Karate club embedding") plt.legend(handles=[group_1_predicted, group_2_predicted, group_centroids]) plt.show() if __name__ == "__main__": main()
true
true
f73bd25ec6cf4c0e5139d31719717efc84f6ced8
5,798
py
Python
tests/test_config_parser.py
songchenwen/icloud-drive-docker
7188dfbcc34e29ddbeeb1324c62ea77bed8f0576
[ "BSD-3-Clause" ]
null
null
null
tests/test_config_parser.py
songchenwen/icloud-drive-docker
7188dfbcc34e29ddbeeb1324c62ea77bed8f0576
[ "BSD-3-Clause" ]
null
null
null
tests/test_config_parser.py
songchenwen/icloud-drive-docker
7188dfbcc34e29ddbeeb1324c62ea77bed8f0576
[ "BSD-3-Clause" ]
null
null
null
__author__ = 'Mandar Patil (mandarons@pm.me)' import os import unittest from src import config_parser, constants class TestConfigParser(unittest.TestCase): def setUp(self) -> None: pass def tearDown(self) -> None: pass def test_read_config_valids(self): # Default config path self.assertIsNotNone(config_parser.read_config()) # Overridden config path self.assertIsNotNone(config_parser.read_config(config_path=constants.DEFAULT_CONFIG_FILE_PATH)) def test_read_config_invalids(self): # Invalid config path self.assertIsNone(config_parser.read_config(config_path='invalid/path')) # None config path self.assertIsNone(config_parser.read_config(config_path=None)) def test_get_sync_interval_valids(self): # Given sync interval config = config_parser.read_config() self.assertEqual(config['settings']['sync_interval'], config_parser.get_sync_interval(config=config)) # Default sync interval del config['settings']['sync_interval'] self.assertEqual(constants.DEFAULT_SYNC_INTERVAL_SEC, config_parser.get_sync_interval(config=config)) def test_get_sync_interval_invalids(self): # None config self.assertEqual(constants.DEFAULT_SYNC_INTERVAL_SEC, config_parser.get_sync_interval(config=None)) def test_prepare_destination_valids(self): config = config_parser.read_config() # Given destination actual = config_parser.prepare_destination(config=config) self.assertEqual(os.path.abspath(config['settings']['destination']), actual) self.assertTrue(os.path.exists(actual)) self.assertTrue(os.path.isdir(actual)) os.rmdir(actual) # Default destination del config['settings']['destination'] actual = config_parser.prepare_destination(config=config) self.assertEqual(os.path.abspath(constants.DEFAULT_DRIVE_DESTINATION), actual) self.assertTrue(os.path.exists(actual)) self.assertTrue(os.path.isdir(actual)) os.rmdir(actual) def test_prepare_destination_invalids(self): # None config actual = config_parser.prepare_destination(config=None) self.assertEqual(os.path.abspath(constants.DEFAULT_DRIVE_DESTINATION), actual) self.assertTrue(os.path.exists(actual)) self.assertTrue(os.path.isdir(actual)) os.rmdir(actual) def test_get_username_valids(self): config = config_parser.read_config() # Given username self.assertEqual(config['credentials']['username'], config_parser.get_username(config=config)) def test_get_username_invalids(self): config = config_parser.read_config() # None config self.assertIsNone(config_parser.get_username(config=None)) # Empty username config['credentials']['username'] = '' self.assertIsNone(config_parser.get_username(config=config)) def test_get_remove_obsolete_valids(self): config = config_parser.read_config() config['settings']['remove_obsolete'] = True self.assertTrue(config_parser.get_remove_obsolete(config=config)) del config['settings']['remove_obsolete'] self.assertFalse(config_parser.get_remove_obsolete(config=config)) def test_get_remove_obsolete_invalids(self): self.assertFalse(config_parser.get_remove_obsolete(config=None)) def test_get_verbose_valids(self): config = config_parser.read_config() self.assertEqual(config['settings']['verbose'],config_parser.get_verbose(config=config)) config['settings']['verbose'] = True self.assertTrue(config_parser.get_verbose(config=config)) def test_get_verbose_invalids(self): config = config_parser.read_config() config['settings']['verbose'] = None self.assertFalse(config_parser.get_verbose(config=config)) del config['settings']['verbose'] self.assertFalse(config_parser.get_verbose(config=config)) del config['settings'] self.assertFalse(config_parser.get_verbose(config=config)) def test_get_smtp_no_tls(self): config = {'smtp':{'no_tls':True}} self.assertTrue(config_parser.get_smtp_no_tls(config=config)) config = {'smtp':{'no_tls':False}} self.assertFalse(config_parser.get_smtp_no_tls(config=config)) del config['smtp']['no_tls'] self.assertFalse(config_parser.get_smtp_no_tls(config=config)) def test_get_smtp_email_valids(self): # Given email config = {'smtp':{'email':'user@test.com'}} self.assertEqual(config['smtp']['email'], config_parser.get_smtp_email(config=config)) def test_smtp_email_invalids(self): self.assertIsNone(config_parser.get_smtp_email(config=None)) def test_get_smtp_host_valids(self): # Given host config = {'smtp':{'host':'smtp.test.com'}} self.assertEqual(config['smtp']['host'], config_parser.get_smtp_host(config=config)) def test_smtp_host_invalids(self): self.assertIsNone(config_parser.get_smtp_host(config=None)) def test_get_smtp_port_valids(self): # Given port config = {'smtp':{'port':'587'}} self.assertEqual(config['smtp']['port'], config_parser.get_smtp_port(config=config)) def test_smtp_port_invalids(self): self.assertIsNone(config_parser.get_smtp_port(config=None)) def test_get_smtp_password_valids(self): # Given password config = {'smtp':{'password':'password'}} self.assertEqual(config['smtp']['password'], config_parser.get_smtp_password(config=config)) def test_smtp_password_invalids(self): self.assertIsNone(config_parser.get_smtp_password(config=None))
40.830986
109
0.703518
__author__ = 'Mandar Patil (mandarons@pm.me)' import os import unittest from src import config_parser, constants class TestConfigParser(unittest.TestCase): def setUp(self) -> None: pass def tearDown(self) -> None: pass def test_read_config_valids(self): self.assertIsNotNone(config_parser.read_config()) self.assertIsNotNone(config_parser.read_config(config_path=constants.DEFAULT_CONFIG_FILE_PATH)) def test_read_config_invalids(self): self.assertIsNone(config_parser.read_config(config_path='invalid/path')) self.assertIsNone(config_parser.read_config(config_path=None)) def test_get_sync_interval_valids(self): config = config_parser.read_config() self.assertEqual(config['settings']['sync_interval'], config_parser.get_sync_interval(config=config)) del config['settings']['sync_interval'] self.assertEqual(constants.DEFAULT_SYNC_INTERVAL_SEC, config_parser.get_sync_interval(config=config)) def test_get_sync_interval_invalids(self): self.assertEqual(constants.DEFAULT_SYNC_INTERVAL_SEC, config_parser.get_sync_interval(config=None)) def test_prepare_destination_valids(self): config = config_parser.read_config() actual = config_parser.prepare_destination(config=config) self.assertEqual(os.path.abspath(config['settings']['destination']), actual) self.assertTrue(os.path.exists(actual)) self.assertTrue(os.path.isdir(actual)) os.rmdir(actual) del config['settings']['destination'] actual = config_parser.prepare_destination(config=config) self.assertEqual(os.path.abspath(constants.DEFAULT_DRIVE_DESTINATION), actual) self.assertTrue(os.path.exists(actual)) self.assertTrue(os.path.isdir(actual)) os.rmdir(actual) def test_prepare_destination_invalids(self): actual = config_parser.prepare_destination(config=None) self.assertEqual(os.path.abspath(constants.DEFAULT_DRIVE_DESTINATION), actual) self.assertTrue(os.path.exists(actual)) self.assertTrue(os.path.isdir(actual)) os.rmdir(actual) def test_get_username_valids(self): config = config_parser.read_config() self.assertEqual(config['credentials']['username'], config_parser.get_username(config=config)) def test_get_username_invalids(self): config = config_parser.read_config() self.assertIsNone(config_parser.get_username(config=None)) config['credentials']['username'] = '' self.assertIsNone(config_parser.get_username(config=config)) def test_get_remove_obsolete_valids(self): config = config_parser.read_config() config['settings']['remove_obsolete'] = True self.assertTrue(config_parser.get_remove_obsolete(config=config)) del config['settings']['remove_obsolete'] self.assertFalse(config_parser.get_remove_obsolete(config=config)) def test_get_remove_obsolete_invalids(self): self.assertFalse(config_parser.get_remove_obsolete(config=None)) def test_get_verbose_valids(self): config = config_parser.read_config() self.assertEqual(config['settings']['verbose'],config_parser.get_verbose(config=config)) config['settings']['verbose'] = True self.assertTrue(config_parser.get_verbose(config=config)) def test_get_verbose_invalids(self): config = config_parser.read_config() config['settings']['verbose'] = None self.assertFalse(config_parser.get_verbose(config=config)) del config['settings']['verbose'] self.assertFalse(config_parser.get_verbose(config=config)) del config['settings'] self.assertFalse(config_parser.get_verbose(config=config)) def test_get_smtp_no_tls(self): config = {'smtp':{'no_tls':True}} self.assertTrue(config_parser.get_smtp_no_tls(config=config)) config = {'smtp':{'no_tls':False}} self.assertFalse(config_parser.get_smtp_no_tls(config=config)) del config['smtp']['no_tls'] self.assertFalse(config_parser.get_smtp_no_tls(config=config)) def test_get_smtp_email_valids(self): config = {'smtp':{'email':'user@test.com'}} self.assertEqual(config['smtp']['email'], config_parser.get_smtp_email(config=config)) def test_smtp_email_invalids(self): self.assertIsNone(config_parser.get_smtp_email(config=None)) def test_get_smtp_host_valids(self): config = {'smtp':{'host':'smtp.test.com'}} self.assertEqual(config['smtp']['host'], config_parser.get_smtp_host(config=config)) def test_smtp_host_invalids(self): self.assertIsNone(config_parser.get_smtp_host(config=None)) def test_get_smtp_port_valids(self): config = {'smtp':{'port':'587'}} self.assertEqual(config['smtp']['port'], config_parser.get_smtp_port(config=config)) def test_smtp_port_invalids(self): self.assertIsNone(config_parser.get_smtp_port(config=None)) def test_get_smtp_password_valids(self): config = {'smtp':{'password':'password'}} self.assertEqual(config['smtp']['password'], config_parser.get_smtp_password(config=config)) def test_smtp_password_invalids(self): self.assertIsNone(config_parser.get_smtp_password(config=None))
true
true
f73bd334cf9a9c49b25222410d6a13cf4e8cb04d
4,386
py
Python
codesearch/retrieval.py
chinchila/codesearch
eba4c08d9098cbbfbf9caf252c608b61a63d1d1a
[ "BSD-3-Clause" ]
30
2020-08-27T13:54:23.000Z
2022-03-17T03:41:05.000Z
codesearch/retrieval.py
pritam004/Neural-Code-Search
9b91ce40e8a727932302436c4de2eddb04ebb396
[ "BSD-3-Clause" ]
1
2021-06-07T11:22:52.000Z
2021-06-07T11:22:52.000Z
codesearch/retrieval.py
pritam004/Neural-Code-Search
9b91ce40e8a727932302436c4de2eddb04ebb396
[ "BSD-3-Clause" ]
4
2021-06-06T02:52:40.000Z
2022-01-26T07:05:58.000Z
# © 2020 Nokia # # Licensed under the BSD 3 Clause license # # SPDX-License-Identifier: BSD-3-Clause # ============================================ import functools import sys import numpy as np from codesearch.utils import Saveable from codesearch.data_config import DESCRIPTION_FIELD, CODE_FIELD class RetrievalModel(Saveable): def __init__(self): super().__init__() self._id2snippet = {} def query(self, query, n=10, projection=[], **kwargs): n = min(n, len(self._id2snippet)) if projection and "id" not in projection: projection = ["id"] + projection sims = self.similarities(query, **kwargs) return_sims = "score" in projection or not projection if return_sims: ranking_ids, sims = self._sims_to_ranking(sims, n, return_sims) else: ranking_ids = self._sims_to_ranking(sims, n, return_sims) sims = None ranking_snippets = [dict(self._id2snippet[id_]) for id_ in ranking_ids] if sims: for s, sim in zip(ranking_snippets, sims): s["score"] = sim if projection: ranking_snippets = [ {f: r[f] for f in projection} for r in ranking_snippets] return ranking_snippets def log_query_results(self, queries, relevant_ids=[], projection=[DESCRIPTION_FIELD, CODE_FIELD], log_file=None): if log_file: stdout = sys.stdout with open(log_file, "a") as f: sys.stdout = f self._log_query_results(queries, relevant_ids, projection) sys.stdout = stdout else: self._log_query_results(queries, relevant_ids, projection) def _log_query_results(self, queries, relevant_ids=[], projection=[DESCRIPTION_FIELD, CODE_FIELD]): line1 = "*" * 40 line2 = "-" * 40 line3 = "-" * 10 for q in queries: results = self.query(q, n=5, projection=projection) print(line1) print(f"QUERY: {q}") print(line1) print() for i, r in enumerate(results): annotation = "" if relevant_ids and r["id"] in relevant_ids: annotation = " - RELEVANT" print(line2) print(f"RANK {i+1}{annotation}") print(line2) for f in projection: if "\n" in str(r[f]): print(f"{f.upper()}:") print(r[f]) else: print(f"{f.upper()}: {r[f]}") print(line2) print() print() print() def _sims_to_ranking(self, sims, n, return_sims): idxs = np.argpartition(sims, -n)[-n:] top_idxs = idxs[np.argsort(sims[idxs])][::-1] snippet_ids = [self.get_snippetid(top_idx) for top_idx in top_idxs] if return_sims: top_sims = [sims[top_idx] for top_idx in top_idxs] return snippet_ids, top_sims return snippet_ids def query_batch(self, queries, n=10, return_sims=False, **kwargs): sims_batch = self.similarities_batch(tuple(queries), **kwargs) snippet_ids_batch = [] for sims in sims_batch: snippet_ids = self._sims_to_ranking(sims, n, return_sims) snippet_ids_batch.append(snippet_ids) return snippet_ids_batch def add_snippets(self, snippets): for s in snippets: self._id2snippet[s["id"]] = s def get_snippetid(self, idx): pass def similarities(self, query, **kwargs): pass def save(self, path): super().save(path) class RetrievalEnsemble(RetrievalModel): def __init__(self, retrieval_models, weights): super().__init__() self.retrieval_models = retrieval_models self.weights = weights def similarities(self, query, model_kwargs=None): N = len(self.snippets) sims = np.zeros(shape=(N,), dtype=np.float32) for i, model in enumerate(self.retrieval_models): weight = self.weights[i] if self.weights else 1. sims += (weight * model.similarities(query)) return sims
33.480916
117
0.553808
import functools import sys import numpy as np from codesearch.utils import Saveable from codesearch.data_config import DESCRIPTION_FIELD, CODE_FIELD class RetrievalModel(Saveable): def __init__(self): super().__init__() self._id2snippet = {} def query(self, query, n=10, projection=[], **kwargs): n = min(n, len(self._id2snippet)) if projection and "id" not in projection: projection = ["id"] + projection sims = self.similarities(query, **kwargs) return_sims = "score" in projection or not projection if return_sims: ranking_ids, sims = self._sims_to_ranking(sims, n, return_sims) else: ranking_ids = self._sims_to_ranking(sims, n, return_sims) sims = None ranking_snippets = [dict(self._id2snippet[id_]) for id_ in ranking_ids] if sims: for s, sim in zip(ranking_snippets, sims): s["score"] = sim if projection: ranking_snippets = [ {f: r[f] for f in projection} for r in ranking_snippets] return ranking_snippets def log_query_results(self, queries, relevant_ids=[], projection=[DESCRIPTION_FIELD, CODE_FIELD], log_file=None): if log_file: stdout = sys.stdout with open(log_file, "a") as f: sys.stdout = f self._log_query_results(queries, relevant_ids, projection) sys.stdout = stdout else: self._log_query_results(queries, relevant_ids, projection) def _log_query_results(self, queries, relevant_ids=[], projection=[DESCRIPTION_FIELD, CODE_FIELD]): line1 = "*" * 40 line2 = "-" * 40 line3 = "-" * 10 for q in queries: results = self.query(q, n=5, projection=projection) print(line1) print(f"QUERY: {q}") print(line1) print() for i, r in enumerate(results): annotation = "" if relevant_ids and r["id"] in relevant_ids: annotation = " - RELEVANT" print(line2) print(f"RANK {i+1}{annotation}") print(line2) for f in projection: if "\n" in str(r[f]): print(f"{f.upper()}:") print(r[f]) else: print(f"{f.upper()}: {r[f]}") print(line2) print() print() print() def _sims_to_ranking(self, sims, n, return_sims): idxs = np.argpartition(sims, -n)[-n:] top_idxs = idxs[np.argsort(sims[idxs])][::-1] snippet_ids = [self.get_snippetid(top_idx) for top_idx in top_idxs] if return_sims: top_sims = [sims[top_idx] for top_idx in top_idxs] return snippet_ids, top_sims return snippet_ids def query_batch(self, queries, n=10, return_sims=False, **kwargs): sims_batch = self.similarities_batch(tuple(queries), **kwargs) snippet_ids_batch = [] for sims in sims_batch: snippet_ids = self._sims_to_ranking(sims, n, return_sims) snippet_ids_batch.append(snippet_ids) return snippet_ids_batch def add_snippets(self, snippets): for s in snippets: self._id2snippet[s["id"]] = s def get_snippetid(self, idx): pass def similarities(self, query, **kwargs): pass def save(self, path): super().save(path) class RetrievalEnsemble(RetrievalModel): def __init__(self, retrieval_models, weights): super().__init__() self.retrieval_models = retrieval_models self.weights = weights def similarities(self, query, model_kwargs=None): N = len(self.snippets) sims = np.zeros(shape=(N,), dtype=np.float32) for i, model in enumerate(self.retrieval_models): weight = self.weights[i] if self.weights else 1. sims += (weight * model.similarities(query)) return sims
true
true
f73bd3c4156de1c614cdd8360761e6080312f66a
470
py
Python
876. Middle of the Linked List.py
rohitpatwa/leetcode
f4826763e8f154cac9134d53b154b8299acd39a8
[ "Xnet", "X11", "CECILL-B" ]
1
2020-07-15T20:48:27.000Z
2020-07-15T20:48:27.000Z
876. Middle of the Linked List.py
rohitpatwa/leetcode
f4826763e8f154cac9134d53b154b8299acd39a8
[ "Xnet", "X11", "CECILL-B" ]
null
null
null
876. Middle of the Linked List.py
rohitpatwa/leetcode
f4826763e8f154cac9134d53b154b8299acd39a8
[ "Xnet", "X11", "CECILL-B" ]
null
null
null
# Use slow pointer, fast pointer approach # Definition for singly-linked list. # class ListNode(object): # def __init__(self, x): # self.val = x # self.next = None class Solution(object): def middleNode(self, head): """ :type head: ListNode :rtype: ListNode """ fast = head while fast and fast.next: head = head.next fast = fast.next.next return head
22.380952
41
0.53617
class Solution(object): def middleNode(self, head): fast = head while fast and fast.next: head = head.next fast = fast.next.next return head
true
true
f73bd3e16dc98f8ade8ad548b8b995c01e0d8edf
11,059
py
Python
tests/v2/test_users.py
dennisdnyce/Questioner
7d981019786d20ee5352b3379b6c34f3636417c0
[ "MIT" ]
null
null
null
tests/v2/test_users.py
dennisdnyce/Questioner
7d981019786d20ee5352b3379b6c34f3636417c0
[ "MIT" ]
7
2019-01-06T16:12:44.000Z
2022-03-08T21:09:34.000Z
tests/v2/test_users.py
dennisdnyce/Questioner
7d981019786d20ee5352b3379b6c34f3636417c0
[ "MIT" ]
2
2019-01-09T07:04:49.000Z
2019-01-09T13:08:29.000Z
import os import json import unittest import pytest from app import create_app from app.api.v2.models.user_models import UserRegistration from app.api.v2.views.user_views import myuser from app.api.v2.utils.validators import validate_users from app.api.v2.models.database_test import QuestionerTestDatabase connector = QuestionerTestDatabase() class TestUserRegistration(unittest.TestCase): ''' This class represents the User Registration test case ''' def setUp(self): ''' define test variables and initialize the app ''' self.app = create_app(config='testing') self.client = self.app.test_client() connector.destroy_questioner_test_tables() connector.create_questioner_test_tables() self.user ={ 'firstname': 'dennis', 'lastname': 'juma', 'othername': 'wafula', 'phoneNumber': '0716714835', 'username': 'dennisd', 'isAdmin': 'True', 'email': 'wafula@gmail.com', 'password': 'thisispass', 'confirm_password': 'thisispass' } def test_user_registration_no_firstname(self): ''' tests that a user cannot signup without a firstname ''' response = self.client.post("/api/v2/auth/signup", data=json.dumps(dict(firstname='', lastname='jumaa', othername='wafula', username="dennisdnycd", phoneNumber='0713714835', isAdmin='True', email="jumaspayd@gmail.com", password="thisispass", confirm_password="thisispass")), content_type="application/json") self.assertEqual(response.status_code, 406) response_msg = json.loads(response.data.decode("UTF-8")) self.assertIn("firstname is required", response_msg["error"]) def test_user_registration_no_lastname(self): ''' tests that a user cannot signup without a lastname ''' response = self.client.post("/api/v2/auth/signup", data=json.dumps(dict(firstname='dennis', lastname='', othername='wafula', username="dennisdnye", phoneNumber='0713714835', isAdmin='True', email="jumaspaye@gmail.com", password="thisispass", confirm_password="thisispass")), content_type="application/json") self.assertEqual(response.status_code, 406) response_msg = json.loads(response.data.decode("UTF-8")) self.assertIn("lastname is required", response_msg["error"]) def test_user_registration_no_othername(self): ''' tests that a user cannot signup without othername ''' response = self.client.post("/api/v2/auth/signup", data=json.dumps(dict(firstname='dennis', lastname='jumaa', othername='', username="dennisdnyf", phoneNumber='0713714835', isAdmin='True', email="jumaspayf@gmail.com", password="thisispass", confirm_password="thisispass")), content_type="application/json") self.assertEqual(response.status_code, 406) response_msg = json.loads(response.data.decode("UTF-8")) self.assertIn("othername is required", response_msg["error"]) def test_user_registration_no_phone(self): ''' tests that a user cannot signup without a phone ''' response = self.client.post("/api/v2/auth/signup", data=json.dumps(dict(firstname='dennis', lastname='jumaa', othername='wafula', username="dennisdnyg", phoneNumber='', isAdmin='True', email="jumaspayg@gmail.com", password="thisispass", confirm_password="thisispass")), content_type="application/json") self.assertEqual(response.status_code, 406) response_msg = json.loads(response.data.decode("UTF-8")) self.assertIn("phoneNumber is required", response_msg["error"]) def test_user_registration_no_username(self): ''' tests that a user cannot signup without a username ''' response = self.client.post("/api/v2/auth/signup", data=json.dumps(dict(firstname='dennisa', lastname='jumaa', othername='wafula', username="", phoneNumber='0713714835', isAdmin='True', email="jumaspayh@gmail.com", password="thisispass", confirm_password="thisispass")), content_type="application/json") self.assertEqual(response.status_code, 406) response_msg = json.loads(response.data.decode("UTF-8")) self.assertIn("Username is required", response_msg["error"]) def test_user_registration_invalid_username(self): ''' tests that a user cannot signup with an invalid username ''' response = self.client.post("/api/v2/auth/signup", data=json.dumps(dict(firstname='dennisa', lastname='jumaa', othername='wafula', username="dny", phoneNumber='0713714835', isAdmin='True', email="jumaspayi@gmail.com", password="thisispass", confirm_password="thisispass")), content_type="application/json") self.assertEqual(response.status_code, 422) response_msg = json.loads(response.data.decode("UTF-8")) self.assertIn("Invalid username, make sure its 5 to 12 characters long", response_msg["error"]) def test_user_registration_no_password(self): ''' tests that a user cannot signup without a password ''' response = self.client.post("/api/v2/auth/signup", data=json.dumps(dict(firstname='dennisa', lastname='jumaa', othername='wafula', username="dnyce1j", phoneNumber='0713714835', isAdmin='True', email="jumaspayj@gmail.com", password="", confirm_password="thisispass")), content_type="application/json") self.assertEqual(response.status_code, 406) response_msg = json.loads(response.data.decode("UTF-8")) self.assertIn("Password is required", response_msg["error"]) def test_user_registration_no_password_confirmation(self): ''' tests that a user cannot signup without password confirmation ''' response = self.client.post("/api/v2/auth/signup", data=json.dumps(dict(firstname='dennisa', lastname='jumaa', othername='wafula', username="dnyce1k", phoneNumber='0713714835', isAdmin='True', email="jumaspayk@gmail.com", password="thisispass", confirm_password="")), content_type="application/json") self.assertEqual(response.status_code, 406) response_msg = json.loads(response.data.decode("UTF-8")) self.assertIn("Password confirmation is required", response_msg["error"]) def test_user_registration_password_mismatch(self): ''' tests that a user cannot signup without confirming registered password ''' response = self.client.post("/api/v2/auth/signup", data=json.dumps(dict(firstname='dennisa', lastname='jumaa', othername='wafula', username="dnyce1l", phoneNumber='0713714835', isAdmin='True', email="jumaspayl@gmail.com", password="thisispass2", confirm_password="thisispass")), content_type="application/json") self.assertEqual(response.status_code, 406) response_msg = json.loads(response.data.decode("UTF-8")) self.assertIn("Password mismatch", response_msg["error"]) def test_user_registration_password_too_short(self): ''' tests that a user cannot signup with password length less than 8 characters ''' response = self.client.post("/api/v2/auth/signup", data=json.dumps(dict(firstname='dennisa', lastname='jumaa', othername='wafula', username="dnycem2", phoneNumber='0713714835', isAdmin='True', email="jumaspaym2@gmail.com", password="thisisp", confirm_password="thisisp")), content_type="application/json") self.assertEqual(response.status_code, 406) response_msg = json.loads(response.data.decode("UTF-8")) self.assertIn("Password length should be atleast 8 characters long and atmost 12 characters long", response_msg["error"]) def test_user_registration_password_too_long(self): ''' tests that a user cannot signup with password length more than 12 characters ''' response = self.client.post("/api/v2/auth/signup", data=json.dumps(dict(firstname='dennisa', lastname='jumaa', othername='wafula', username="dnyceu1", phoneNumber='0713714835', isAdmin='True', email="jumaspayu3n@gmail.com", password="thisispasswordlong", confirm_password="thisispasswordlong")), content_type="application/json") self.assertEqual(response.status_code, 406) response_msg = json.loads(response.data.decode("UTF-8")) self.assertIn("Password length should be atleast 8 characters long and atmost 12 characters long", response_msg["error"]) def test_user_registration_no_email(self): ''' tests that a user cannot signup without an email address ''' response = self.client.post("/api/v2/auth/signup", data=json.dumps(dict(firstname='dennisa', lastname='jumaa', othername='wafula', username="dnyce1op", phoneNumber='0713714835', isAdmin='True', email="", password="thisispass", confirm_password="thisispass")), content_type="application/json") self.assertEqual(response.status_code, 406) response_msg = json.loads(response.data.decode("UTF-8")) self.assertIn("Email is required", response_msg["error"]) def test_user_registration_invalid_email1(self): ''' tests that a user cannot signup with an invalid email address ''' response = self.client.post("/api/v2/auth/signup", data=json.dumps(dict(firstname='dennisa', lastname='jumaa', othername='wafula', username="dnyceq1", phoneNumber='0713714835', isAdmin='True', email="jumaspay", password="thisispass", confirm_password="thisispass")), content_type="application/json") self.assertEqual(response.status_code, 422) response_msg = json.loads(response.data.decode("UTF-8")) self.assertIn("Invalid email", response_msg["error"]) def test_user_registration_invalid_email2(self): ''' tests that a user cannot signup with an invalid email address ''' response = self.client.post("/api/v2/auth/signup", data=json.dumps(dict(firstname='dennisa', lastname='jumaa', othername='wafula', username="dnycer1", phoneNumber='0713714835', isAdmin='True', email="jumaspay3@gmail", password="thisispass", confirm_password="thisispass")), content_type="application/json") self.assertEqual(response.status_code, 422) response_msg = json.loads(response.data.decode("UTF-8")) self.assertIn("Invalid email", response_msg["error"]) def test_user_registration_invalid_email3(self): ''' tests that a user cannot signup with an invalid email address ''' response = self.client.post("/api/v2/auth/signup", data=json.dumps(dict(firstname='dennisa', lastname='jumaa', othername='wafula', username="dnyces1", phoneNumber='0713714835', isAdmin='True', email="jumaspay3@gmail.", password="thisispass", confirm_password="thisispass")), content_type="application/json") self.assertEqual(response.status_code, 422) response_msg = json.loads(response.data.decode("UTF-8")) self.assertIn("Invalid email", response_msg["error"]) def tearDown(self): ''' destroys the test variables after the tests finish executing ''' connector.destroy_questioner_test_tables() ''' make tests conveniently executable ''' if __name__ == '__main__': unittest.main()
67.432927
185
0.707116
import os import json import unittest import pytest from app import create_app from app.api.v2.models.user_models import UserRegistration from app.api.v2.views.user_views import myuser from app.api.v2.utils.validators import validate_users from app.api.v2.models.database_test import QuestionerTestDatabase connector = QuestionerTestDatabase() class TestUserRegistration(unittest.TestCase): def setUp(self): self.app = create_app(config='testing') self.client = self.app.test_client() connector.destroy_questioner_test_tables() connector.create_questioner_test_tables() self.user ={ 'firstname': 'dennis', 'lastname': 'juma', 'othername': 'wafula', 'phoneNumber': '0716714835', 'username': 'dennisd', 'isAdmin': 'True', 'email': 'wafula@gmail.com', 'password': 'thisispass', 'confirm_password': 'thisispass' } def test_user_registration_no_firstname(self): response = self.client.post("/api/v2/auth/signup", data=json.dumps(dict(firstname='', lastname='jumaa', othername='wafula', username="dennisdnycd", phoneNumber='0713714835', isAdmin='True', email="jumaspayd@gmail.com", password="thisispass", confirm_password="thisispass")), content_type="application/json") self.assertEqual(response.status_code, 406) response_msg = json.loads(response.data.decode("UTF-8")) self.assertIn("firstname is required", response_msg["error"]) def test_user_registration_no_lastname(self): response = self.client.post("/api/v2/auth/signup", data=json.dumps(dict(firstname='dennis', lastname='', othername='wafula', username="dennisdnye", phoneNumber='0713714835', isAdmin='True', email="jumaspaye@gmail.com", password="thisispass", confirm_password="thisispass")), content_type="application/json") self.assertEqual(response.status_code, 406) response_msg = json.loads(response.data.decode("UTF-8")) self.assertIn("lastname is required", response_msg["error"]) def test_user_registration_no_othername(self): response = self.client.post("/api/v2/auth/signup", data=json.dumps(dict(firstname='dennis', lastname='jumaa', othername='', username="dennisdnyf", phoneNumber='0713714835', isAdmin='True', email="jumaspayf@gmail.com", password="thisispass", confirm_password="thisispass")), content_type="application/json") self.assertEqual(response.status_code, 406) response_msg = json.loads(response.data.decode("UTF-8")) self.assertIn("othername is required", response_msg["error"]) def test_user_registration_no_phone(self): response = self.client.post("/api/v2/auth/signup", data=json.dumps(dict(firstname='dennis', lastname='jumaa', othername='wafula', username="dennisdnyg", phoneNumber='', isAdmin='True', email="jumaspayg@gmail.com", password="thisispass", confirm_password="thisispass")), content_type="application/json") self.assertEqual(response.status_code, 406) response_msg = json.loads(response.data.decode("UTF-8")) self.assertIn("phoneNumber is required", response_msg["error"]) def test_user_registration_no_username(self): response = self.client.post("/api/v2/auth/signup", data=json.dumps(dict(firstname='dennisa', lastname='jumaa', othername='wafula', username="", phoneNumber='0713714835', isAdmin='True', email="jumaspayh@gmail.com", password="thisispass", confirm_password="thisispass")), content_type="application/json") self.assertEqual(response.status_code, 406) response_msg = json.loads(response.data.decode("UTF-8")) self.assertIn("Username is required", response_msg["error"]) def test_user_registration_invalid_username(self): response = self.client.post("/api/v2/auth/signup", data=json.dumps(dict(firstname='dennisa', lastname='jumaa', othername='wafula', username="dny", phoneNumber='0713714835', isAdmin='True', email="jumaspayi@gmail.com", password="thisispass", confirm_password="thisispass")), content_type="application/json") self.assertEqual(response.status_code, 422) response_msg = json.loads(response.data.decode("UTF-8")) self.assertIn("Invalid username, make sure its 5 to 12 characters long", response_msg["error"]) def test_user_registration_no_password(self): response = self.client.post("/api/v2/auth/signup", data=json.dumps(dict(firstname='dennisa', lastname='jumaa', othername='wafula', username="dnyce1j", phoneNumber='0713714835', isAdmin='True', email="jumaspayj@gmail.com", password="", confirm_password="thisispass")), content_type="application/json") self.assertEqual(response.status_code, 406) response_msg = json.loads(response.data.decode("UTF-8")) self.assertIn("Password is required", response_msg["error"]) def test_user_registration_no_password_confirmation(self): response = self.client.post("/api/v2/auth/signup", data=json.dumps(dict(firstname='dennisa', lastname='jumaa', othername='wafula', username="dnyce1k", phoneNumber='0713714835', isAdmin='True', email="jumaspayk@gmail.com", password="thisispass", confirm_password="")), content_type="application/json") self.assertEqual(response.status_code, 406) response_msg = json.loads(response.data.decode("UTF-8")) self.assertIn("Password confirmation is required", response_msg["error"]) def test_user_registration_password_mismatch(self): response = self.client.post("/api/v2/auth/signup", data=json.dumps(dict(firstname='dennisa', lastname='jumaa', othername='wafula', username="dnyce1l", phoneNumber='0713714835', isAdmin='True', email="jumaspayl@gmail.com", password="thisispass2", confirm_password="thisispass")), content_type="application/json") self.assertEqual(response.status_code, 406) response_msg = json.loads(response.data.decode("UTF-8")) self.assertIn("Password mismatch", response_msg["error"]) def test_user_registration_password_too_short(self): response = self.client.post("/api/v2/auth/signup", data=json.dumps(dict(firstname='dennisa', lastname='jumaa', othername='wafula', username="dnycem2", phoneNumber='0713714835', isAdmin='True', email="jumaspaym2@gmail.com", password="thisisp", confirm_password="thisisp")), content_type="application/json") self.assertEqual(response.status_code, 406) response_msg = json.loads(response.data.decode("UTF-8")) self.assertIn("Password length should be atleast 8 characters long and atmost 12 characters long", response_msg["error"]) def test_user_registration_password_too_long(self): response = self.client.post("/api/v2/auth/signup", data=json.dumps(dict(firstname='dennisa', lastname='jumaa', othername='wafula', username="dnyceu1", phoneNumber='0713714835', isAdmin='True', email="jumaspayu3n@gmail.com", password="thisispasswordlong", confirm_password="thisispasswordlong")), content_type="application/json") self.assertEqual(response.status_code, 406) response_msg = json.loads(response.data.decode("UTF-8")) self.assertIn("Password length should be atleast 8 characters long and atmost 12 characters long", response_msg["error"]) def test_user_registration_no_email(self): response = self.client.post("/api/v2/auth/signup", data=json.dumps(dict(firstname='dennisa', lastname='jumaa', othername='wafula', username="dnyce1op", phoneNumber='0713714835', isAdmin='True', email="", password="thisispass", confirm_password="thisispass")), content_type="application/json") self.assertEqual(response.status_code, 406) response_msg = json.loads(response.data.decode("UTF-8")) self.assertIn("Email is required", response_msg["error"]) def test_user_registration_invalid_email1(self): response = self.client.post("/api/v2/auth/signup", data=json.dumps(dict(firstname='dennisa', lastname='jumaa', othername='wafula', username="dnyceq1", phoneNumber='0713714835', isAdmin='True', email="jumaspay", password="thisispass", confirm_password="thisispass")), content_type="application/json") self.assertEqual(response.status_code, 422) response_msg = json.loads(response.data.decode("UTF-8")) self.assertIn("Invalid email", response_msg["error"]) def test_user_registration_invalid_email2(self): response = self.client.post("/api/v2/auth/signup", data=json.dumps(dict(firstname='dennisa', lastname='jumaa', othername='wafula', username="dnycer1", phoneNumber='0713714835', isAdmin='True', email="jumaspay3@gmail", password="thisispass", confirm_password="thisispass")), content_type="application/json") self.assertEqual(response.status_code, 422) response_msg = json.loads(response.data.decode("UTF-8")) self.assertIn("Invalid email", response_msg["error"]) def test_user_registration_invalid_email3(self): response = self.client.post("/api/v2/auth/signup", data=json.dumps(dict(firstname='dennisa', lastname='jumaa', othername='wafula', username="dnyces1", phoneNumber='0713714835', isAdmin='True', email="jumaspay3@gmail.", password="thisispass", confirm_password="thisispass")), content_type="application/json") self.assertEqual(response.status_code, 422) response_msg = json.loads(response.data.decode("UTF-8")) self.assertIn("Invalid email", response_msg["error"]) def tearDown(self): connector.destroy_questioner_test_tables() if __name__ == '__main__': unittest.main()
true
true
f73bd557f268ab27de93314a2c8acfc4ab2c6627
32,036
py
Python
sdk/network/azure-mgmt-network/azure/mgmt/network/v2017_08_01/operations/_packet_captures_operations.py
vbarbaresi/azure-sdk-for-python
397ba46c51d001ff89c66b170f5576cf8f49c05f
[ "MIT" ]
8
2021-01-13T23:44:08.000Z
2021-03-17T10:13:36.000Z
sdk/network/azure-mgmt-network/azure/mgmt/network/v2017_08_01/operations/_packet_captures_operations.py
vbarbaresi/azure-sdk-for-python
397ba46c51d001ff89c66b170f5576cf8f49c05f
[ "MIT" ]
null
null
null
sdk/network/azure-mgmt-network/azure/mgmt/network/v2017_08_01/operations/_packet_captures_operations.py
vbarbaresi/azure-sdk-for-python
397ba46c51d001ff89c66b170f5576cf8f49c05f
[ "MIT" ]
null
null
null
# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import TYPE_CHECKING import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.arm_polling import ARMPolling from .. import models if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] class PacketCapturesOperations(object): """PacketCapturesOperations operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. :type models: ~azure.mgmt.network.v2017_08_01.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer. """ models = models def __init__(self, client, config, serializer, deserializer): self._client = client self._serialize = serializer self._deserialize = deserializer self._config = config def _create_initial( self, resource_group_name, # type: str network_watcher_name, # type: str packet_capture_name, # type: str parameters, # type: "models.PacketCapture" **kwargs # type: Any ): # type: (...) -> "models.PacketCaptureResult" cls = kwargs.pop('cls', None) # type: ClsType["models.PacketCaptureResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2017-08-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json, text/json" # Construct URL url = self._create_initial.metadata['url'] # type: ignore path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'), 'packetCaptureName': self._serialize.url("packet_capture_name", packet_capture_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(parameters, 'PacketCapture') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) deserialized = self._deserialize('PacketCaptureResult', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized _create_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/packetCaptures/{packetCaptureName}'} # type: ignore def begin_create( self, resource_group_name, # type: str network_watcher_name, # type: str packet_capture_name, # type: str parameters, # type: "models.PacketCapture" **kwargs # type: Any ): # type: (...) -> LROPoller["models.PacketCaptureResult"] """Create and start a packet capture on the specified VM. :param resource_group_name: The name of the resource group. :type resource_group_name: str :param network_watcher_name: The name of the network watcher. :type network_watcher_name: str :param packet_capture_name: The name of the packet capture session. :type packet_capture_name: str :param parameters: Parameters that define the create packet capture operation. :type parameters: ~azure.mgmt.network.v2017_08_01.models.PacketCapture :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: True for ARMPolling, False for no polling, or a polling object for personal polling strategy :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either PacketCaptureResult or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2017_08_01.models.PacketCaptureResult] :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] cls = kwargs.pop('cls', None) # type: ClsType["models.PacketCaptureResult"] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval ) cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] if cont_token is None: raw_result = self._create_initial( resource_group_name=resource_group_name, network_watcher_name=network_watcher_name, packet_capture_name=packet_capture_name, parameters=parameters, cls=lambda x,y,z: x, **kwargs ) kwargs.pop('error_map', None) kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): deserialized = self._deserialize('PacketCaptureResult', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output ) else: return LROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/packetCaptures/{packetCaptureName}'} # type: ignore def get( self, resource_group_name, # type: str network_watcher_name, # type: str packet_capture_name, # type: str **kwargs # type: Any ): # type: (...) -> "models.PacketCaptureResult" """Gets a packet capture session by name. :param resource_group_name: The name of the resource group. :type resource_group_name: str :param network_watcher_name: The name of the network watcher. :type network_watcher_name: str :param packet_capture_name: The name of the packet capture session. :type packet_capture_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: PacketCaptureResult, or the result of cls(response) :rtype: ~azure.mgmt.network.v2017_08_01.models.PacketCaptureResult :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.PacketCaptureResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2017-08-01" accept = "application/json, text/json" # Construct URL url = self.get.metadata['url'] # type: ignore path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'), 'packetCaptureName': self._serialize.url("packet_capture_name", packet_capture_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) deserialized = self._deserialize('PacketCaptureResult', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/packetCaptures/{packetCaptureName}'} # type: ignore def _delete_initial( self, resource_group_name, # type: str network_watcher_name, # type: str packet_capture_name, # type: str **kwargs # type: Any ): # type: (...) -> None cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2017-08-01" # Construct URL url = self._delete_initial.metadata['url'] # type: ignore path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'), 'packetCaptureName': self._serialize.url("packet_capture_name", packet_capture_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/packetCaptures/{packetCaptureName}'} # type: ignore def begin_delete( self, resource_group_name, # type: str network_watcher_name, # type: str packet_capture_name, # type: str **kwargs # type: Any ): # type: (...) -> LROPoller[None] """Deletes the specified packet capture session. :param resource_group_name: The name of the resource group. :type resource_group_name: str :param network_watcher_name: The name of the network watcher. :type network_watcher_name: str :param packet_capture_name: The name of the packet capture session. :type packet_capture_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: True for ARMPolling, False for no polling, or a polling object for personal polling strategy :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] cls = kwargs.pop('cls', None) # type: ClsType[None] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval ) cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] if cont_token is None: raw_result = self._delete_initial( resource_group_name=resource_group_name, network_watcher_name=network_watcher_name, packet_capture_name=packet_capture_name, cls=lambda x,y,z: x, **kwargs ) kwargs.pop('error_map', None) kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output ) else: return LROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/packetCaptures/{packetCaptureName}'} # type: ignore def _stop_initial( self, resource_group_name, # type: str network_watcher_name, # type: str packet_capture_name, # type: str **kwargs # type: Any ): # type: (...) -> None cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2017-08-01" # Construct URL url = self._stop_initial.metadata['url'] # type: ignore path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'), 'packetCaptureName': self._serialize.url("packet_capture_name", packet_capture_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) _stop_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/packetCaptures/{packetCaptureName}/stop'} # type: ignore def begin_stop( self, resource_group_name, # type: str network_watcher_name, # type: str packet_capture_name, # type: str **kwargs # type: Any ): # type: (...) -> LROPoller[None] """Stops a specified packet capture session. :param resource_group_name: The name of the resource group. :type resource_group_name: str :param network_watcher_name: The name of the network watcher. :type network_watcher_name: str :param packet_capture_name: The name of the packet capture session. :type packet_capture_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: True for ARMPolling, False for no polling, or a polling object for personal polling strategy :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] cls = kwargs.pop('cls', None) # type: ClsType[None] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval ) cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] if cont_token is None: raw_result = self._stop_initial( resource_group_name=resource_group_name, network_watcher_name=network_watcher_name, packet_capture_name=packet_capture_name, cls=lambda x,y,z: x, **kwargs ) kwargs.pop('error_map', None) kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output ) else: return LROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/packetCaptures/{packetCaptureName}/stop'} # type: ignore def _get_status_initial( self, resource_group_name, # type: str network_watcher_name, # type: str packet_capture_name, # type: str **kwargs # type: Any ): # type: (...) -> "models.PacketCaptureQueryStatusResult" cls = kwargs.pop('cls', None) # type: ClsType["models.PacketCaptureQueryStatusResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2017-08-01" accept = "application/json, text/json" # Construct URL url = self._get_status_initial.metadata['url'] # type: ignore path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'), 'packetCaptureName': self._serialize.url("packet_capture_name", packet_capture_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) if response.status_code == 200: deserialized = self._deserialize('PacketCaptureQueryStatusResult', pipeline_response) if response.status_code == 202: deserialized = self._deserialize('PacketCaptureQueryStatusResult', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized _get_status_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/packetCaptures/{packetCaptureName}/queryStatus'} # type: ignore def begin_get_status( self, resource_group_name, # type: str network_watcher_name, # type: str packet_capture_name, # type: str **kwargs # type: Any ): # type: (...) -> LROPoller["models.PacketCaptureQueryStatusResult"] """Query the status of a running packet capture session. :param resource_group_name: The name of the resource group. :type resource_group_name: str :param network_watcher_name: The name of the Network Watcher resource. :type network_watcher_name: str :param packet_capture_name: The name given to the packet capture session. :type packet_capture_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: True for ARMPolling, False for no polling, or a polling object for personal polling strategy :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either PacketCaptureQueryStatusResult or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2017_08_01.models.PacketCaptureQueryStatusResult] :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] cls = kwargs.pop('cls', None) # type: ClsType["models.PacketCaptureQueryStatusResult"] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval ) cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] if cont_token is None: raw_result = self._get_status_initial( resource_group_name=resource_group_name, network_watcher_name=network_watcher_name, packet_capture_name=packet_capture_name, cls=lambda x,y,z: x, **kwargs ) kwargs.pop('error_map', None) kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): deserialized = self._deserialize('PacketCaptureQueryStatusResult', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output ) else: return LROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_get_status.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/packetCaptures/{packetCaptureName}/queryStatus'} # type: ignore def list( self, resource_group_name, # type: str network_watcher_name, # type: str **kwargs # type: Any ): # type: (...) -> Iterable["models.PacketCaptureListResult"] """Lists all packet capture sessions within the specified resource group. :param resource_group_name: The name of the resource group. :type resource_group_name: str :param network_watcher_name: The name of the Network Watcher resource. :type network_watcher_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either PacketCaptureListResult or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2017_08_01.models.PacketCaptureListResult] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.PacketCaptureListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2017-08-01" accept = "application/json, text/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL url = self.list.metadata['url'] # type: ignore path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') request = self._client.get(url, query_parameters, header_parameters) else: url = next_link query_parameters = {} # type: Dict[str, Any] request = self._client.get(url, query_parameters, header_parameters) return request def extract_data(pipeline_response): deserialized = self._deserialize('PacketCaptureListResult', pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) return None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) return pipeline_response return ItemPaged( get_next, extract_data ) list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/packetCaptures'} # type: ignore
49.362096
240
0.667
from typing import TYPE_CHECKING import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.arm_polling import ARMPolling from .. import models if TYPE_CHECKING: from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] class PacketCapturesOperations(object): models = models def __init__(self, client, config, serializer, deserializer): self._client = client self._serialize = serializer self._deserialize = deserializer self._config = config def _create_initial( self, resource_group_name, network_watcher_name, packet_capture_name, parameters, **kwargs ): cls = kwargs.pop('cls', None) error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2017-08-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json, text/json" url = self._create_initial.metadata['url'] path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'), 'packetCaptureName': self._serialize.url("packet_capture_name", packet_capture_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) query_parameters = {} query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') header_parameters = {} header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} body_content = self._serialize.body(parameters, 'PacketCapture') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) deserialized = self._deserialize('PacketCaptureResult', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized _create_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/packetCaptures/{packetCaptureName}'} def begin_create( self, resource_group_name, network_watcher_name, packet_capture_name, parameters, **kwargs ): polling = kwargs.pop('polling', True) cls = kwargs.pop('cls', None) lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval ) cont_token = kwargs.pop('continuation_token', None) if cont_token is None: raw_result = self._create_initial( resource_group_name=resource_group_name, network_watcher_name=network_watcher_name, packet_capture_name=packet_capture_name, parameters=parameters, cls=lambda x,y,z: x, **kwargs ) kwargs.pop('error_map', None) kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): deserialized = self._deserialize('PacketCaptureResult', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output ) else: return LROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/packetCaptures/{packetCaptureName}'} def get( self, resource_group_name, network_watcher_name, packet_capture_name, **kwargs ): cls = kwargs.pop('cls', None) error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2017-08-01" accept = "application/json, text/json" url = self.get.metadata['url'] path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'), 'packetCaptureName': self._serialize.url("packet_capture_name", packet_capture_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) query_parameters = {} query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') header_parameters = {} header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) deserialized = self._deserialize('PacketCaptureResult', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/packetCaptures/{packetCaptureName}'} def _delete_initial( self, resource_group_name, network_watcher_name, packet_capture_name, **kwargs ): cls = kwargs.pop('cls', None) error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2017-08-01" url = self._delete_initial.metadata['url'] path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'), 'packetCaptureName': self._serialize.url("packet_capture_name", packet_capture_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) query_parameters = {} query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') header_parameters = {} request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/packetCaptures/{packetCaptureName}'} def begin_delete( self, resource_group_name, network_watcher_name, packet_capture_name, **kwargs ): polling = kwargs.pop('polling', True) cls = kwargs.pop('cls', None) lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval ) cont_token = kwargs.pop('continuation_token', None) if cont_token is None: raw_result = self._delete_initial( resource_group_name=resource_group_name, network_watcher_name=network_watcher_name, packet_capture_name=packet_capture_name, cls=lambda x,y,z: x, **kwargs ) kwargs.pop('error_map', None) kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output ) else: return LROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/packetCaptures/{packetCaptureName}'} def _stop_initial( self, resource_group_name, network_watcher_name, packet_capture_name, **kwargs ): cls = kwargs.pop('cls', None) error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2017-08-01" url = self._stop_initial.metadata['url'] path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'), 'packetCaptureName': self._serialize.url("packet_capture_name", packet_capture_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) query_parameters = {} query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') header_parameters = {} request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) _stop_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/packetCaptures/{packetCaptureName}/stop'} def begin_stop( self, resource_group_name, network_watcher_name, packet_capture_name, **kwargs ): polling = kwargs.pop('polling', True) cls = kwargs.pop('cls', None) lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval ) cont_token = kwargs.pop('continuation_token', None) if cont_token is None: raw_result = self._stop_initial( resource_group_name=resource_group_name, network_watcher_name=network_watcher_name, packet_capture_name=packet_capture_name, cls=lambda x,y,z: x, **kwargs ) kwargs.pop('error_map', None) kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output ) else: return LROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/packetCaptures/{packetCaptureName}/stop'} def _get_status_initial( self, resource_group_name, network_watcher_name, packet_capture_name, **kwargs ): cls = kwargs.pop('cls', None) error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2017-08-01" accept = "application/json, text/json" url = self._get_status_initial.metadata['url'] path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'), 'packetCaptureName': self._serialize.url("packet_capture_name", packet_capture_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) query_parameters = {} query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') header_parameters = {} header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) if response.status_code == 200: deserialized = self._deserialize('PacketCaptureQueryStatusResult', pipeline_response) if response.status_code == 202: deserialized = self._deserialize('PacketCaptureQueryStatusResult', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized _get_status_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/packetCaptures/{packetCaptureName}/queryStatus'} def begin_get_status( self, resource_group_name, network_watcher_name, packet_capture_name, **kwargs ): polling = kwargs.pop('polling', True) cls = kwargs.pop('cls', None) lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval ) cont_token = kwargs.pop('continuation_token', None) if cont_token is None: raw_result = self._get_status_initial( resource_group_name=resource_group_name, network_watcher_name=network_watcher_name, packet_capture_name=packet_capture_name, cls=lambda x,y,z: x, **kwargs ) kwargs.pop('error_map', None) kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): deserialized = self._deserialize('PacketCaptureQueryStatusResult', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output ) else: return LROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_get_status.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/packetCaptures/{packetCaptureName}/queryStatus'} def list( self, resource_group_name, network_watcher_name, **kwargs ): cls = kwargs.pop('cls', None) error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2017-08-01" accept = "application/json, text/json" def prepare_request(next_link=None): header_parameters = {} header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: url = self.list.metadata['url'] path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) query_parameters = {} query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') request = self._client.get(url, query_parameters, header_parameters) else: url = next_link query_parameters = {} request = self._client.get(url, query_parameters, header_parameters) return request def extract_data(pipeline_response): deserialized = self._deserialize('PacketCaptureListResult', pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) return None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) return pipeline_response return ItemPaged( get_next, extract_data ) list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/packetCaptures'}
true
true
f73bd73b96026f6ece6556dc2babadc1d614547e
2,001
py
Python
generator.py
Victorwz/fast-weights-pytorch
cef79631c9e41ae48fedbbc11e52be0cf084bfde
[ "Apache-2.0" ]
2
2020-04-22T20:01:40.000Z
2020-05-07T03:48:02.000Z
generator.py
Victorwz/fast-weights-pytorch
cef79631c9e41ae48fedbbc11e52be0cf084bfde
[ "Apache-2.0" ]
null
null
null
generator.py
Victorwz/fast-weights-pytorch
cef79631c9e41ae48fedbbc11e52be0cf084bfde
[ "Apache-2.0" ]
null
null
null
import numpy as np import random import pickle num_train = 60000 num_val = 10000 num_test = 10000 step_num = 4 elem_num = 26 + 10 + 1 x_train = np.zeros([num_train, step_num * 2 + 3, elem_num], dtype=np.float32) x_val = np.zeros([num_val, step_num * 2 + 3, elem_num], dtype=np.float32) x_test = np.zeros([num_test, step_num * 2 + 3, elem_num], dtype=np.float32) y_train = np.zeros([num_train, elem_num], dtype=np.float32) y_val = np.zeros([num_val, elem_num], dtype=np.float32) y_test = np.zeros([num_test, elem_num], dtype=np.float32) def get_one_hot(c): a = np.zeros([elem_num]) if ord('a') <= ord(c) <= ord('z'): a[ord(c) - ord('a')] = 1 elif ord('0') <= ord(c) <= ord('9'): a[ord(c) - ord('0') + 26] = 1 else: a[-1] = 1 return a def generate_one(): a = np.zeros([step_num * 2 + 3, elem_num]) d = {} st = '' for i in range(0, step_num): c = random.randint(0, 25) while d.has_key(c): c = random.randint(0, 25) b = random.randint(0, 9) d[c] = b s, t = chr(c + ord('a')), chr(b + ord('0')) st += s + t a[i*2] = get_one_hot(s) a[i*2+1] = get_one_hot(t) s = random.choice(d.keys()) t = chr(s + ord('a')) r = chr(d[s] + ord('0')) a[step_num * 2] = get_one_hot('?') a[step_num * 2 + 1] = get_one_hot('?') a[step_num * 2 + 2] = get_one_hot(t) st += '??' + t + r e = get_one_hot(r) return a, e if __name__ == '__main__': for i in range(0, num_train): x_train[i], y_train[i] = generate_one() for i in range(0, num_test): x_test[i], y_test[i] = generate_one() for i in range(0, num_val): x_val[i], y_val[i] = generate_one() d = { 'x_train': x_train, 'x_test': x_test, 'x_val': x_val, 'y_train': y_train, 'y_test': y_test, 'y_val': y_val } with open('associative-retrieval.pkl', 'wb') as f: pickle.dump(d, f, protocol=2)
25.987013
77
0.541229
import numpy as np import random import pickle num_train = 60000 num_val = 10000 num_test = 10000 step_num = 4 elem_num = 26 + 10 + 1 x_train = np.zeros([num_train, step_num * 2 + 3, elem_num], dtype=np.float32) x_val = np.zeros([num_val, step_num * 2 + 3, elem_num], dtype=np.float32) x_test = np.zeros([num_test, step_num * 2 + 3, elem_num], dtype=np.float32) y_train = np.zeros([num_train, elem_num], dtype=np.float32) y_val = np.zeros([num_val, elem_num], dtype=np.float32) y_test = np.zeros([num_test, elem_num], dtype=np.float32) def get_one_hot(c): a = np.zeros([elem_num]) if ord('a') <= ord(c) <= ord('z'): a[ord(c) - ord('a')] = 1 elif ord('0') <= ord(c) <= ord('9'): a[ord(c) - ord('0') + 26] = 1 else: a[-1] = 1 return a def generate_one(): a = np.zeros([step_num * 2 + 3, elem_num]) d = {} st = '' for i in range(0, step_num): c = random.randint(0, 25) while d.has_key(c): c = random.randint(0, 25) b = random.randint(0, 9) d[c] = b s, t = chr(c + ord('a')), chr(b + ord('0')) st += s + t a[i*2] = get_one_hot(s) a[i*2+1] = get_one_hot(t) s = random.choice(d.keys()) t = chr(s + ord('a')) r = chr(d[s] + ord('0')) a[step_num * 2] = get_one_hot('?') a[step_num * 2 + 1] = get_one_hot('?') a[step_num * 2 + 2] = get_one_hot(t) st += '??' + t + r e = get_one_hot(r) return a, e if __name__ == '__main__': for i in range(0, num_train): x_train[i], y_train[i] = generate_one() for i in range(0, num_test): x_test[i], y_test[i] = generate_one() for i in range(0, num_val): x_val[i], y_val[i] = generate_one() d = { 'x_train': x_train, 'x_test': x_test, 'x_val': x_val, 'y_train': y_train, 'y_test': y_test, 'y_val': y_val } with open('associative-retrieval.pkl', 'wb') as f: pickle.dump(d, f, protocol=2)
true
true
f73bd882a3ea329ad2d594ef0164253c0426c469
2,975
py
Python
DeepFashion2/model.py
lordtt13/Cybint-AI-projects
87ad57a2e9c432483c2256408dd15762b7897b56
[ "Apache-2.0" ]
2
2020-02-19T06:50:59.000Z
2020-02-19T06:51:04.000Z
DeepFashion2/model.py
lordtt13/Cybint-AI-projects
87ad57a2e9c432483c2256408dd15762b7897b56
[ "Apache-2.0" ]
null
null
null
DeepFashion2/model.py
lordtt13/Cybint-AI-projects
87ad57a2e9c432483c2256408dd15762b7897b56
[ "Apache-2.0" ]
1
2020-02-14T06:00:50.000Z
2020-02-14T06:00:50.000Z
from __future__ import print_function import config from config import * from utils import * # INPUT: # VGG16 - block5_pool (MaxPooling2D) (None, 7, 7, 512) # OUTPUT: # Branch1 - Class Prediction # Branch2 - IOU Prediction # NOTE: Both models in create_model_train() and create_model_predict() should be exaclty same def create_model(is_input_bottleneck, is_load_weights, input_shape, output_classes, optimizer='Adagrad', learn_rate=None, decay=0.0, momentum=0.0, activation='relu', dropout_rate=0.5): logging.debug('input_shape {}'.format(input_shape)) logging.debug('input_shape {}'.format(type(input_shape))) # Optimizer optimizer, learn_rate = get_optimizer(optimizer, learn_rate, decay, momentum) # Train if is_input_bottleneck is True: model_inputs = Input(shape=(input_shape)) common_inputs = model_inputs # Predict else: #input_shape = (img_width, img_height, 3) base_model = applications.VGG16(weights='imagenet', include_top=False, input_shape=input_shape) #base_model = applications.inception_v3.InceptionV3(include_top=False, weights='imagenet', input_shape=input_shape) logging.debug('base_model inputs {}'.format(base_model.input)) # shape=(?, 224, 224, 3) logging.debug('base_model outputs {}'.format(base_model.output)) # shape=(?, 7, 7, 512) model_inputs = base_model.input common_inputs = base_model.output ## Model Classification x = Flatten()(common_inputs) x = Dense(256, activation='tanh')(x) x = Dropout(dropout_rate)(x) predictions_class = Dense(output_classes, activation='softmax', name='predictions_class')(x) ## Model (Regression) IOU score x = Flatten()(common_inputs) x = Dense(256, activation='tanh')(x) x = Dropout(dropout_rate)(x) x = Dense(256, activation='tanh')(x) x = Dropout(dropout_rate)(x) predictions_iou = Dense(1, activation='sigmoid', name='predictions_iou')(x) ## Create Model model = Model(inputs=model_inputs, outputs=[predictions_class, predictions_iou]) # logging.debug('model summary {}'.format(model.summary())) ## Load weights if is_load_weights is True: model.load_weights(top_model_weights_path_load, by_name=True) ## Compile model.compile(optimizer=optimizer, loss={'predictions_class': 'sparse_categorical_crossentropy', 'predictions_iou': 'mean_squared_error'}, metrics=['accuracy'], loss_weights={'predictions_class': predictions_class_weight, 'predictions_iou': predictions_iou_weight}) logging.info('optimizer:{} learn_rate:{} decay:{} momentum:{} activation:{} dropout_rate:{}'.format( optimizer, learn_rate, decay, momentum, activation, dropout_rate)) return model
39.666667
184
0.65916
from __future__ import print_function import config from config import * from utils import * def create_model(is_input_bottleneck, is_load_weights, input_shape, output_classes, optimizer='Adagrad', learn_rate=None, decay=0.0, momentum=0.0, activation='relu', dropout_rate=0.5): logging.debug('input_shape {}'.format(input_shape)) logging.debug('input_shape {}'.format(type(input_shape))) optimizer, learn_rate = get_optimizer(optimizer, learn_rate, decay, momentum) if is_input_bottleneck is True: model_inputs = Input(shape=(input_shape)) common_inputs = model_inputs else: base_model = applications.VGG16(weights='imagenet', include_top=False, input_shape=input_shape) logging.debug('base_model inputs {}'.format(base_model.input)) logging.debug('base_model outputs {}'.format(base_model.output)) model_inputs = base_model.input common_inputs = base_model.output mon_inputs) x = Dense(256, activation='tanh')(x) x = Dropout(dropout_rate)(x) predictions_class = Dense(output_classes, activation='softmax', name='predictions_class')(x) ts) x = Dense(256, activation='tanh')(x) x = Dropout(dropout_rate)(x) x = Dense(256, activation='tanh')(x) x = Dropout(dropout_rate)(x) predictions_iou = Dense(1, activation='sigmoid', name='predictions_iou')(x) odel(inputs=model_inputs, outputs=[predictions_class, predictions_iou]) d_weights is True: model.load_weights(top_model_weights_path_load, by_name=True) l.compile(optimizer=optimizer, loss={'predictions_class': 'sparse_categorical_crossentropy', 'predictions_iou': 'mean_squared_error'}, metrics=['accuracy'], loss_weights={'predictions_class': predictions_class_weight, 'predictions_iou': predictions_iou_weight}) logging.info('optimizer:{} learn_rate:{} decay:{} momentum:{} activation:{} dropout_rate:{}'.format( optimizer, learn_rate, decay, momentum, activation, dropout_rate)) return model
true
true
f73bd9fef40d9af195f1e1eb4ae5b47154f56eb1
461
py
Python
sim/Simulator.py
adhocmaster/pyns
607feb56baf0900535130195163eac331e131a2e
[ "MIT" ]
1
2021-06-15T06:21:14.000Z
2021-06-15T06:21:14.000Z
sim/Simulator.py
adhocmaster/pyns
607feb56baf0900535130195163eac331e131a2e
[ "MIT" ]
null
null
null
sim/Simulator.py
adhocmaster/pyns
607feb56baf0900535130195163eac331e131a2e
[ "MIT" ]
1
2021-06-15T06:21:18.000Z
2021-06-15T06:21:18.000Z
from abc import ABC class Simulator(ABC): def __init__(self): self.stats = {} self.stats['dataInFlight'] = [] self.stats['dataInQueue'] = [] self.stats['packetsInFlight'] = [] self.stats['packetsInQueue'] = [] self.stats['queueSize'] = [] self.stats['packetsSent'] = [] self.stats['packetsAcked'] = [] self.stats['totalPacketsSent'] = [] self.stats['totalPacketsAcked'] = []
28.8125
44
0.553145
from abc import ABC class Simulator(ABC): def __init__(self): self.stats = {} self.stats['dataInFlight'] = [] self.stats['dataInQueue'] = [] self.stats['packetsInFlight'] = [] self.stats['packetsInQueue'] = [] self.stats['queueSize'] = [] self.stats['packetsSent'] = [] self.stats['packetsAcked'] = [] self.stats['totalPacketsSent'] = [] self.stats['totalPacketsAcked'] = []
true
true
f73bda976a8336d5699bf9fa8b6ceb17884743d6
5,187
py
Python
src/kids/cache/__init__.py
vaab/kids.cache
668f3b966877c4a0855d60e05cc3706cf37e4570
[ "BSD-2-Clause" ]
25
2015-08-03T08:39:49.000Z
2021-02-10T13:49:10.000Z
src/kids/cache/__init__.py
vaab/kids.cache
668f3b966877c4a0855d60e05cc3706cf37e4570
[ "BSD-2-Clause" ]
10
2015-02-02T02:12:04.000Z
2018-05-24T09:12:40.000Z
src/kids/cache/__init__.py
vaab/kids.cache
668f3b966877c4a0855d60e05cc3706cf37e4570
[ "BSD-2-Clause" ]
4
2016-11-28T17:59:13.000Z
2019-10-03T10:19:20.000Z
# Package placeholder import threading import functools import collections CacheInfo = collections.namedtuple( 'CacheInfo', 'type hits misses maxsize currsize') def make_key(obj, typed=True): args, kwargs = obj key = (tuple(args), tuple(sorted(kwargs.items()))) if typed: key += tuple(type(v) for v in args) key += tuple(type(v) for _, v in sorted(kwargs.items())) return key def is_hashable(obj): try: hash(obj) return True except Exception: ## pylint: disable-msg=W0703 return False def make_key_hippie(obj, typed=True): """Return hashable structure from non-hashable structure using hippie means dict and set are sorted and their content subjected to same hippie means. Note that the key identifies the current content of the structure. """ ftype = type if typed else lambda o: None if is_hashable(obj): ## DO NOT RETURN hash(obj), as hash collision would generate bad ## cache collisions. return obj, ftype(obj) ## should we try to convert to frozen{set,dict} to get the C ## hashing function speed ? But the convertion has a cost also. if isinstance(obj, set): obj = sorted(obj) if isinstance(obj, (list, tuple)): return tuple(make_key_hippie(e, typed) for e in obj) if isinstance(obj, dict): return tuple(sorted(((make_key_hippie(k, typed), make_key_hippie(v, typed)) for k, v in obj.items()))) raise ValueError( "%r can not be hashed. Try providing a custom key function." % obj) def hashing(typed=True, strict=False): """Returns a typed and/or strict key callable. A strict key callable will fail on traditionaly non-hashable object, while a strict=False hashing will use hippie hashing that can hash mutable object. A typed key callable will use type of each object in the hash and will distinguish with same hash but different type (example: 2 and 2.0). """ hashable_struct_producer = make_key if strict else make_key_hippie def _make_key(*args, **kwargs): ## use a list to avoid using hash of tuples... return hashable_struct_producer([list(args), kwargs], typed=typed) return _make_key SUPPORTED_DECORATOR = { property: lambda f: f.fget, classmethod: lambda f: f.__func__, staticmethod: lambda f: f.__func__, } def undecorate(func): """Returns the decorator and the undecorated function of given object.""" orig_call_wrapper = lambda x: x for call_wrapper, unwrap in SUPPORTED_DECORATOR.items(): if isinstance(func, call_wrapper): func = unwrap(func) orig_call_wrapper = call_wrapper break return orig_call_wrapper, func ## inspired by cachetools.decorators.cachedfunc def cachedfunc(cache_store, key=make_key_hippie): context = threading.RLock() ## stats lock def decorator(func): stats = [0, 0] wrapper, wrapped = undecorate(func) @functools.wraps(wrapped) def _cache_wrapper(*args, **kwargs): k = key(*args, **kwargs) with context: try: result = cache_store[k] stats[0] += 1 return result except KeyError: stats[1] += 1 result = wrapped(*args, **kwargs) with context: try: cache_store[k] = result except ValueError: ## Value 'too large', only casted with cachetools stores. pass return result ## mimic's python3 ``lru_cache`` facilities. def cache_info(): with context: hits, misses = stats maxsize = getattr(cache_store, "maxsize", None) currsize = getattr(cache_store, "currsize", len(cache_store)) return CacheInfo( type(cache_store).__name__, hits, misses, maxsize, currsize) def cache_clear(): with context: cache_store.clear() _cache_wrapper.cache_info = cache_info _cache_wrapper.cache_clear = cache_clear return wrapper(_cache_wrapper) return decorator def cache(*args, **kwargs): """The @cache decorator Compatility with using ``@cache()`` and ``@cache`` is managed in the current function. """ ## only one argument ? if len(args) == 1 and len(kwargs) == 0 and \ (callable(args[0]) or \ isinstance(args[0], tuple(SUPPORTED_DECORATOR.keys()))): return _cache_w_args(args[0]) return lambda f: _cache_w_args(f, *args, **kwargs) ## No locking mecanism because this should be implemented in the Cache ## objects if needed. def _cache_w_args(f, use=None, cache_factory=dict, key=None, strict=False, typed=False): if key is None: key = hashing(strict=strict, typed=typed) if use is None: use = cache_factory() return cachedfunc(cache_store=use, key=key)(f) hippie_hashing = hashing()
30.511765
79
0.614806
import threading import functools import collections CacheInfo = collections.namedtuple( 'CacheInfo', 'type hits misses maxsize currsize') def make_key(obj, typed=True): args, kwargs = obj key = (tuple(args), tuple(sorted(kwargs.items()))) if typed: key += tuple(type(v) for v in args) key += tuple(type(v) for _, v in sorted(kwargs.items())) return key def is_hashable(obj): try: hash(obj) return True except Exception: make_key_hippie(obj, typed=True): ftype = type if typed else lambda o: None if is_hashable(obj): j) if isinstance(obj, dict): return tuple(sorted(((make_key_hippie(k, typed), make_key_hippie(v, typed)) for k, v in obj.items()))) raise ValueError( "%r can not be hashed. Try providing a custom key function." % obj) def hashing(typed=True, strict=False): hashable_struct_producer = make_key if strict else make_key_hippie def _make_key(*args, **kwargs): t(args), kwargs], typed=typed) return _make_key SUPPORTED_DECORATOR = { property: lambda f: f.fget, classmethod: lambda f: f.__func__, staticmethod: lambda f: f.__func__, } def undecorate(func): orig_call_wrapper = lambda x: x for call_wrapper, unwrap in SUPPORTED_DECORATOR.items(): if isinstance(func, call_wrapper): func = unwrap(func) orig_call_wrapper = call_wrapper break return orig_call_wrapper, func ie): context = threading.RLock() corator(func): stats = [0, 0] wrapper, wrapped = undecorate(func) @functools.wraps(wrapped) def _cache_wrapper(*args, **kwargs): k = key(*args, **kwargs) with context: try: result = cache_store[k] stats[0] += 1 return result except KeyError: stats[1] += 1 result = wrapped(*args, **kwargs) with context: try: cache_store[k] = result except ValueError: h context: hits, misses = stats maxsize = getattr(cache_store, "maxsize", None) currsize = getattr(cache_store, "currsize", len(cache_store)) return CacheInfo( type(cache_store).__name__, hits, misses, maxsize, currsize) def cache_clear(): with context: cache_store.clear() _cache_wrapper.cache_info = cache_info _cache_wrapper.cache_clear = cache_clear return wrapper(_cache_wrapper) return decorator def cache(*args, **kwargs): ## only one argument ? if len(args) == 1 and len(kwargs) == 0 and \ (callable(args[0]) or \ isinstance(args[0], tuple(SUPPORTED_DECORATOR.keys()))): return _cache_w_args(args[0]) return lambda f: _cache_w_args(f, *args, **kwargs) ## No locking mecanism because this should be implemented in the Cache ## objects if needed. def _cache_w_args(f, use=None, cache_factory=dict, key=None, strict=False, typed=False): if key is None: key = hashing(strict=strict, typed=typed) if use is None: use = cache_factory() return cachedfunc(cache_store=use, key=key)(f) hippie_hashing = hashing()
true
true
f73bdbe602e188715f74357a12989a8527413446
2,383
py
Python
horovod/spark/mpi_run.py
hgao10/horovod_simulation
3678a7d1d424931f48af4b53ef3293073af71c2e
[ "Apache-2.0" ]
null
null
null
horovod/spark/mpi_run.py
hgao10/horovod_simulation
3678a7d1d424931f48af4b53ef3293073af71c2e
[ "Apache-2.0" ]
null
null
null
horovod/spark/mpi_run.py
hgao10/horovod_simulation
3678a7d1d424931f48af4b53ef3293073af71c2e
[ "Apache-2.0" ]
null
null
null
# Copyright 2020 Uber Technologies, Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== import os import sys from horovod.run.mpi_run import mpi_run as hr_mpi_run from horovod.run.common.util import codec, secret def mpi_run(settings, nics, driver, env, stdout=None, stderr=None): """ Runs mpirun. :param settings: Settings for running MPI. Note: settings.num_proc and settings.hosts must not be None. :param nics: Interfaces to include by MPI. :param driver: The Spark driver service that tasks are connected to. :param env: Environment dictionary to use for running MPI. :param stdout: Stdout of the mpi process. Only used when settings.run_func_mode is True. :param stderr: Stderr of the mpi process. Only used when settings.run_func_mode is True. """ if env is None: env = os.environ.copy() # Pass secret key through the environment variables. env[secret.HOROVOD_SECRET_KEY] = codec.dumps_base64(settings.key) rsh_agent = (sys.executable, '-m', 'horovod.spark.driver.mpirun_rsh', codec.dumps_base64(driver.addresses()), codec.dumps_base64(settings)) settings.extra_mpi_args = ('{extra_mpi_args} -x NCCL_DEBUG=INFO -mca plm_rsh_agent "{rsh_agent}"' .format(extra_mpi_args=settings.extra_mpi_args if settings.extra_mpi_args else '', rsh_agent=' '.join(rsh_agent))) command = (sys.executable, '-m', 'horovod.spark.task.mpirun_exec_fn', codec.dumps_base64(driver.addresses()), codec.dumps_base64(settings)) hr_mpi_run(settings, nics, env, command, stdout=stdout, stderr=stderr)
43.327273
113
0.652539
import os import sys from horovod.run.mpi_run import mpi_run as hr_mpi_run from horovod.run.common.util import codec, secret def mpi_run(settings, nics, driver, env, stdout=None, stderr=None): if env is None: env = os.environ.copy() env[secret.HOROVOD_SECRET_KEY] = codec.dumps_base64(settings.key) rsh_agent = (sys.executable, '-m', 'horovod.spark.driver.mpirun_rsh', codec.dumps_base64(driver.addresses()), codec.dumps_base64(settings)) settings.extra_mpi_args = ('{extra_mpi_args} -x NCCL_DEBUG=INFO -mca plm_rsh_agent "{rsh_agent}"' .format(extra_mpi_args=settings.extra_mpi_args if settings.extra_mpi_args else '', rsh_agent=' '.join(rsh_agent))) command = (sys.executable, '-m', 'horovod.spark.task.mpirun_exec_fn', codec.dumps_base64(driver.addresses()), codec.dumps_base64(settings)) hr_mpi_run(settings, nics, env, command, stdout=stdout, stderr=stderr)
true
true
f73bdc331de92a67cf6fdaa7ca361d0d53cdf647
1,080
py
Python
widgy/generic/__init__.py
isopets/django-widgy
de6920b2b25699888eee1cc9b4d28f2aa5905ce1
[ "Apache-2.0" ]
168
2015-01-04T17:22:45.000Z
2022-01-28T09:53:35.000Z
widgy/generic/__init__.py
isopets/django-widgy
de6920b2b25699888eee1cc9b4d28f2aa5905ce1
[ "Apache-2.0" ]
82
2015-01-09T18:14:32.000Z
2020-10-08T18:13:07.000Z
widgy/generic/__init__.py
isopets/django-widgy
de6920b2b25699888eee1cc9b4d28f2aa5905ce1
[ "Apache-2.0" ]
61
2015-01-09T17:16:51.000Z
2021-07-03T08:52:27.000Z
from django.contrib.contenttypes.fields import GenericForeignKey, GenericRelation class ProxyGenericForeignKey(GenericForeignKey): def __init__(self, *args, **kwargs): kwargs['for_concrete_model'] = False super(ProxyGenericForeignKey, self).__init__(*args, **kwargs) class ProxyGenericRelation(GenericRelation): def __init__(self, *args, **kwargs): kwargs['for_concrete_model'] = False super(ProxyGenericRelation, self).__init__(*args, **kwargs) class WidgyGenericForeignKey(ProxyGenericForeignKey): def __get__(self, instance, instance_type=None): try: return super(WidgyGenericForeignKey, self).__get__(instance, instance_type) except AttributeError: # The model for this content type couldn't be loaded. Use an # UnknownWidget instead. from widgy.models import UnknownWidget ret = UnknownWidget(getattr(instance, self.ct_field), getattr(instance, self.fk_field), instance) ret.node = instance ret.warn() return ret
38.571429
109
0.690741
from django.contrib.contenttypes.fields import GenericForeignKey, GenericRelation class ProxyGenericForeignKey(GenericForeignKey): def __init__(self, *args, **kwargs): kwargs['for_concrete_model'] = False super(ProxyGenericForeignKey, self).__init__(*args, **kwargs) class ProxyGenericRelation(GenericRelation): def __init__(self, *args, **kwargs): kwargs['for_concrete_model'] = False super(ProxyGenericRelation, self).__init__(*args, **kwargs) class WidgyGenericForeignKey(ProxyGenericForeignKey): def __get__(self, instance, instance_type=None): try: return super(WidgyGenericForeignKey, self).__get__(instance, instance_type) except AttributeError: # UnknownWidget instead. from widgy.models import UnknownWidget ret = UnknownWidget(getattr(instance, self.ct_field), getattr(instance, self.fk_field), instance) ret.node = instance ret.warn() return ret
true
true
f73bdd24d80de4652f229c6057b882f0ba52dc44
4,108
py
Python
tests/test_flights/test_flight.py
PatrickCmd/flight-booking-application
a521932530d622c6eef46ea6a0f968c8267b622e
[ "MIT" ]
null
null
null
tests/test_flights/test_flight.py
PatrickCmd/flight-booking-application
a521932530d622c6eef46ea6a0f968c8267b622e
[ "MIT" ]
8
2020-02-12T01:01:51.000Z
2022-03-11T23:59:40.000Z
tests/test_flights/test_flight.py
PatrickCmd/flight-booking-application
a521932530d622c6eef46ea6a0f968c8267b622e
[ "MIT" ]
1
2019-08-13T19:04:11.000Z
2019-08-13T19:04:11.000Z
from tests.base_test_case import FlightBaseTestCase class TestFlight(FlightBaseTestCase): def test_create_flight(self): login_uri = "/fbs-api/users/login/" params_user = { "email": "test@testadminuser.com", "password": "Testadminuser12344#", } self.set_authorization_header(login_uri, params_user) params = { "name": "Entebbe to Denver", "origin": "Entebbe", "destination": "Denver", "departure": "2019-08-02T08:00:00Z", "arrival": "2019-08-03T07:00:00Z", "aircraft": "Vintage", "status": "ON_TIME", "number": "KPQYWT72839", "capacity": 120, } response = self.client.post(self.flight_uri, params, format="json") self.assertEqual( response.status_code, 201, "Expected Response Code 201, received {0} instead.".format( response.status_code ), ) self.assertIn("Entebbe to Denver", str(response.data)) self.assertIn("capacity", str(response.data)) def test_create_flight_when_not_admin_user(self): login_uri = "/fbs-api/users/login/" params_user = {"email": "test@testuser.com", "password": "Testuser12344#"} self.set_authorization_header(login_uri, params_user) params = { "name": "Entebbe to Denver", "origin": "Entebbe", "destination": "Denver", "departure": "2019-08-02T08:00:00Z", "arrival": "2019-08-03T07:00:00Z", "aircraft": "Vintage", "status": "ON_TIME", "number": "KPQYWT72839", "capacity": 120, } response = self.client.post(self.flight_uri, params, format="json") self.assertEqual( response.status_code, 403, "Expected Response Code 403, received {0} instead.".format( response.status_code ), ) self.assertNotIn("Entebbe to Denver", str(response.data)) self.assertIn( "You don't have permissions to carry out this action.", str(response.data) ) def test_view_flights(self): login_uri = "/fbs-api/users/login/" params_user = { "email": "test@testadminuser.com", "password": "Testadminuser12344#", } self.set_authorization_header(login_uri, params_user) response = self.client.get(self.flight_uri, format="json") self.assertEqual( response.status_code, 200, "Expected Response Code 200, received {0} instead.".format( response.status_code ), ) def test_get_flight_details(self): login_uri = "/fbs-api/users/login/" params_user = { "email": "test@testadminuser.com", "password": "Testadminuser12344#", } self.set_authorization_header(login_uri, params_user) response = self.client.get(f"{self.flight_uri}{self.flight.pk}", format="json") self.assertEqual( response.status_code, 200, "Expected Response Code 200, received {0} instead.".format( response.status_code ), ) self.assertNotIn("Entebbe to Denver", str(response.data)) self.assertIn("test flight", str(response.data)) def test_get_non_existant_flight_details(self): login_uri = "/fbs-api/users/login/" params_user = { "email": "test@testadminuser.com", "password": "Testadminuser12344#", } self.set_authorization_header(login_uri, params_user) pk = 10 response = self.client.get(f"{self.flight_uri}{pk}", format="json") self.assertEqual( response.status_code, 404, "Expected Response Code 404, received {0} instead.".format( response.status_code ), ) self.assertIn("Not found", str(response.data))
36.353982
87
0.56037
from tests.base_test_case import FlightBaseTestCase class TestFlight(FlightBaseTestCase): def test_create_flight(self): login_uri = "/fbs-api/users/login/" params_user = { "email": "test@testadminuser.com", "password": "Testadminuser12344#", } self.set_authorization_header(login_uri, params_user) params = { "name": "Entebbe to Denver", "origin": "Entebbe", "destination": "Denver", "departure": "2019-08-02T08:00:00Z", "arrival": "2019-08-03T07:00:00Z", "aircraft": "Vintage", "status": "ON_TIME", "number": "KPQYWT72839", "capacity": 120, } response = self.client.post(self.flight_uri, params, format="json") self.assertEqual( response.status_code, 201, "Expected Response Code 201, received {0} instead.".format( response.status_code ), ) self.assertIn("Entebbe to Denver", str(response.data)) self.assertIn("capacity", str(response.data)) def test_create_flight_when_not_admin_user(self): login_uri = "/fbs-api/users/login/" params_user = {"email": "test@testuser.com", "password": "Testuser12344#"} self.set_authorization_header(login_uri, params_user) params = { "name": "Entebbe to Denver", "origin": "Entebbe", "destination": "Denver", "departure": "2019-08-02T08:00:00Z", "arrival": "2019-08-03T07:00:00Z", "aircraft": "Vintage", "status": "ON_TIME", "number": "KPQYWT72839", "capacity": 120, } response = self.client.post(self.flight_uri, params, format="json") self.assertEqual( response.status_code, 403, "Expected Response Code 403, received {0} instead.".format( response.status_code ), ) self.assertNotIn("Entebbe to Denver", str(response.data)) self.assertIn( "You don't have permissions to carry out this action.", str(response.data) ) def test_view_flights(self): login_uri = "/fbs-api/users/login/" params_user = { "email": "test@testadminuser.com", "password": "Testadminuser12344#", } self.set_authorization_header(login_uri, params_user) response = self.client.get(self.flight_uri, format="json") self.assertEqual( response.status_code, 200, "Expected Response Code 200, received {0} instead.".format( response.status_code ), ) def test_get_flight_details(self): login_uri = "/fbs-api/users/login/" params_user = { "email": "test@testadminuser.com", "password": "Testadminuser12344#", } self.set_authorization_header(login_uri, params_user) response = self.client.get(f"{self.flight_uri}{self.flight.pk}", format="json") self.assertEqual( response.status_code, 200, "Expected Response Code 200, received {0} instead.".format( response.status_code ), ) self.assertNotIn("Entebbe to Denver", str(response.data)) self.assertIn("test flight", str(response.data)) def test_get_non_existant_flight_details(self): login_uri = "/fbs-api/users/login/" params_user = { "email": "test@testadminuser.com", "password": "Testadminuser12344#", } self.set_authorization_header(login_uri, params_user) pk = 10 response = self.client.get(f"{self.flight_uri}{pk}", format="json") self.assertEqual( response.status_code, 404, "Expected Response Code 404, received {0} instead.".format( response.status_code ), ) self.assertIn("Not found", str(response.data))
true
true
f73bdd63271386f7b70e3a2f607104f30a30ed3e
2,413
py
Python
tensorflow_examples/lite/model_maker/__init__.py
ufocruz/examples
e1142ebb269802b5b518219a19cba2f6e1f54258
[ "Apache-2.0" ]
3
2021-05-04T20:00:41.000Z
2021-09-14T08:56:05.000Z
tensorflow_examples/lite/model_maker/__init__.py
ufocruz/examples
e1142ebb269802b5b518219a19cba2f6e1f54258
[ "Apache-2.0" ]
null
null
null
tensorflow_examples/lite/model_maker/__init__.py
ufocruz/examples
e1142ebb269802b5b518219a19cba2f6e1f54258
[ "Apache-2.0" ]
1
2021-04-20T17:33:19.000Z
2021-04-20T17:33:19.000Z
# Copyright 2019 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """A transfer learning library to train custom TFLite models.""" from tensorflow_examples.lite.model_maker.core import compat from tensorflow_examples.lite.model_maker.core.data_util import audio_dataloader from tensorflow_examples.lite.model_maker.core.data_util import image_dataloader from tensorflow_examples.lite.model_maker.core.data_util import object_detector_dataloader from tensorflow_examples.lite.model_maker.core.data_util import text_dataloader from tensorflow_examples.lite.model_maker.core.data_util.audio_dataloader import DataLoader as AudioDataLoader from tensorflow_examples.lite.model_maker.core.data_util.image_dataloader import ImageClassifierDataLoader from tensorflow_examples.lite.model_maker.core.data_util.object_detector_dataloader import DataLoader as ObjectDetectorDataloader from tensorflow_examples.lite.model_maker.core.data_util.recommendation_dataloader import RecommendationDataLoader from tensorflow_examples.lite.model_maker.core.data_util.text_dataloader import QuestionAnswerDataLoader from tensorflow_examples.lite.model_maker.core.data_util.text_dataloader import TextClassifierDataLoader from tensorflow_examples.lite.model_maker.core.export_format import ExportFormat from tensorflow_examples.lite.model_maker.core.task import audio_classifier from tensorflow_examples.lite.model_maker.core.task import configs from tensorflow_examples.lite.model_maker.core.task import image_classifier from tensorflow_examples.lite.model_maker.core.task import model_spec from tensorflow_examples.lite.model_maker.core.task import object_detector from tensorflow_examples.lite.model_maker.core.task import question_answer from tensorflow_examples.lite.model_maker.core.task import recommendation from tensorflow_examples.lite.model_maker.core.task import text_classifier
61.871795
129
0.862826
from tensorflow_examples.lite.model_maker.core import compat from tensorflow_examples.lite.model_maker.core.data_util import audio_dataloader from tensorflow_examples.lite.model_maker.core.data_util import image_dataloader from tensorflow_examples.lite.model_maker.core.data_util import object_detector_dataloader from tensorflow_examples.lite.model_maker.core.data_util import text_dataloader from tensorflow_examples.lite.model_maker.core.data_util.audio_dataloader import DataLoader as AudioDataLoader from tensorflow_examples.lite.model_maker.core.data_util.image_dataloader import ImageClassifierDataLoader from tensorflow_examples.lite.model_maker.core.data_util.object_detector_dataloader import DataLoader as ObjectDetectorDataloader from tensorflow_examples.lite.model_maker.core.data_util.recommendation_dataloader import RecommendationDataLoader from tensorflow_examples.lite.model_maker.core.data_util.text_dataloader import QuestionAnswerDataLoader from tensorflow_examples.lite.model_maker.core.data_util.text_dataloader import TextClassifierDataLoader from tensorflow_examples.lite.model_maker.core.export_format import ExportFormat from tensorflow_examples.lite.model_maker.core.task import audio_classifier from tensorflow_examples.lite.model_maker.core.task import configs from tensorflow_examples.lite.model_maker.core.task import image_classifier from tensorflow_examples.lite.model_maker.core.task import model_spec from tensorflow_examples.lite.model_maker.core.task import object_detector from tensorflow_examples.lite.model_maker.core.task import question_answer from tensorflow_examples.lite.model_maker.core.task import recommendation from tensorflow_examples.lite.model_maker.core.task import text_classifier
true
true
f73bdd8a837582d5bc07ef5d0cc81f29fba71831
3,737
py
Python
drmaa2/reservation_info.py
gridengine/drmaa2-python
36e84e8dc0079c9e3d772c1536f07ecb1e435684
[ "Apache-2.0" ]
10
2019-05-28T23:17:39.000Z
2022-01-14T08:52:54.000Z
drmaa2/reservation_info.py
iamh2o/drmaa2-python
36e84e8dc0079c9e3d772c1536f07ecb1e435684
[ "Apache-2.0" ]
5
2019-11-01T10:50:19.000Z
2021-12-13T11:56:19.000Z
drmaa2/reservation_info.py
iamh2o/drmaa2-python
36e84e8dc0079c9e3d772c1536f07ecb1e435684
[ "Apache-2.0" ]
2
2019-02-26T16:36:07.000Z
2019-10-29T02:02:06.000Z
#!/usr/bin/env python # ___INFO__MARK_BEGIN__ ####################################################################################### # Copyright 2008-2021 Univa Corporation (acquired and owned by Altair Engineering Inc.) # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. # # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # # See the License for the specific language governing permissions and # limitations under the License. ####################################################################################### # ___INFO__MARK_END__ from ctypes import POINTER from ctypes import pointer from .drmaa2_ctypes import drmaa2_rinfo from .drmaa2_object import Drmaa2Object from .drmaa2_exceptions import InvalidArgument class ReservationInfo(Drmaa2Object): """ High-level DRMAA2 reservation info class. """ reservation_id = Drmaa2Object.StringDescriptor('reservationId') """ Reservation id (str). """ reservation_name = Drmaa2Object.StringDescriptor('reservationName') """ Reservation name (str). """ reserved_start_time = Drmaa2Object.TimeDescriptor('reservedStartTime') """ Reserved start time (datetime). """ reserved_end_time = Drmaa2Object.TimeDescriptor('reservedEndTime') """ Reserved end time (datetime). """ users_acl = Drmaa2Object.StringListDescriptor('usersACL') """ Users ACL ([str]). """ reserved_slots = Drmaa2Object.LongLongDescriptor('reservedSlots') """ Reserved slots (long). """ reserved_machines = Drmaa2Object.StringListDescriptor('reservedMachines') """ Reserved machines ([str]). """ implementation_specific = Drmaa2Object.ImplSpecDescriptor('implementationSpecific') """ Implementation specific dictionary ({str:str}). """ def __init__(self, reservation_info={}): """ Constructor. :param reservation_info: Input structure representing the object; this structure is typically a dictionary, but it can also be a low-level drmaa2_rinfo struct. :type reservation_info: dict :raises InvalidArgument: in case of an invalid input argument. :raises Drmaa2Exception: for all other errors. >>> ri = ReservationInfo({'reservation_name' : 'sv-01'}) >>> print(ri.reservation_name) sv-01 >>> ri.reserved_slots = 3 """ Drmaa2Object.__init__(self) if isinstance(reservation_info, dict): self._struct = self.get_drmaa2_library().drmaa2_rinfo_create() self.init_impl_spec_key_values() self.from_dict(reservation_info) elif isinstance(reservation_info, POINTER(drmaa2_rinfo)): self._struct = reservation_info else: raise InvalidArgument('Invalid argument: %s' % str(reservation_info)) def __del__(self): # self.get_drmaa2_library().drmaa2_rinfo_free(pointer(self._struct)) pass @classmethod def get_implementation_specific_keys(cls): """ Retrieve list of implementation-specific keys. :returns: String list of implementation-specific keys. >>> print(ReservationInfo.get_implementation_specific_keys()) ['uge_ri_ar_json'] """ if cls.implementation_specific_keys is None: cls.implementation_specific_keys = cls.to_py_string_list(cls.get_drmaa2_library().drmaa2_rinfo_impl_spec()) return cls.implementation_specific_keys
41.065934
167
0.676478
true
true
f73bdde09b10c8d24414c68d205cd8684a52d650
1,979
py
Python
alibi_detect/base.py
Clusks/alibi-detect
b39406a6cf88f315f401562d4fea93a42aa6dcc1
[ "ECL-2.0", "Apache-2.0", "CC0-1.0" ]
1
2021-01-19T09:13:12.000Z
2021-01-19T09:13:12.000Z
alibi_detect/base.py
Clusks/alibi-detect
b39406a6cf88f315f401562d4fea93a42aa6dcc1
[ "ECL-2.0", "Apache-2.0", "CC0-1.0" ]
null
null
null
alibi_detect/base.py
Clusks/alibi-detect
b39406a6cf88f315f401562d4fea93a42aa6dcc1
[ "ECL-2.0", "Apache-2.0", "CC0-1.0" ]
null
null
null
from abc import ABC, abstractmethod import copy import numpy as np from typing import Dict DEFAULT_META = { "name": None, "detector_type": None, # online or offline "data_type": None # tabular, image or time-series } # type: Dict def outlier_prediction_dict(): data = { 'instance_score': None, 'feature_score': None, 'is_outlier': None } return copy.deepcopy({"data": data, "meta": DEFAULT_META}) def adversarial_prediction_dict(): data = { 'instance_score': None, 'is_adversarial': None } return copy.deepcopy({"data": data, "meta": DEFAULT_META}) def adversarial_correction_dict(): data = { 'instance_score': None, 'is_adversarial': None, 'corrected': None, 'no_defense': None, 'defense': None } return copy.deepcopy({"data": data, "meta": DEFAULT_META}) def concept_drift_dict(): data = { 'is_drift': None, 'distance': None, 'p_val': None, 'threshold': None } return copy.deepcopy({"data": data, "meta": DEFAULT_META}) class BaseDetector(ABC): """ Base class for outlier detection algorithms. """ def __init__(self): self.meta = copy.deepcopy(DEFAULT_META) self.meta['name'] = self.__class__.__name__ def __repr__(self): return self.__class__.__name__ @property def meta(self) -> Dict: return self._meta @meta.setter def meta(self, value: Dict): if not isinstance(value, dict): raise TypeError('meta must be a dictionary') self._meta = value @abstractmethod def score(self, X: np.ndarray): pass @abstractmethod def predict(self, X: np.ndarray): pass class FitMixin(ABC): @abstractmethod def fit(self, X: np.ndarray) -> None: pass class ThresholdMixin(ABC): @abstractmethod def infer_threshold(self, X: np.ndarray) -> None: pass
21.988889
62
0.608893
from abc import ABC, abstractmethod import copy import numpy as np from typing import Dict DEFAULT_META = { "name": None, "detector_type": None, "data_type": None } def outlier_prediction_dict(): data = { 'instance_score': None, 'feature_score': None, 'is_outlier': None } return copy.deepcopy({"data": data, "meta": DEFAULT_META}) def adversarial_prediction_dict(): data = { 'instance_score': None, 'is_adversarial': None } return copy.deepcopy({"data": data, "meta": DEFAULT_META}) def adversarial_correction_dict(): data = { 'instance_score': None, 'is_adversarial': None, 'corrected': None, 'no_defense': None, 'defense': None } return copy.deepcopy({"data": data, "meta": DEFAULT_META}) def concept_drift_dict(): data = { 'is_drift': None, 'distance': None, 'p_val': None, 'threshold': None } return copy.deepcopy({"data": data, "meta": DEFAULT_META}) class BaseDetector(ABC): def __init__(self): self.meta = copy.deepcopy(DEFAULT_META) self.meta['name'] = self.__class__.__name__ def __repr__(self): return self.__class__.__name__ @property def meta(self) -> Dict: return self._meta @meta.setter def meta(self, value: Dict): if not isinstance(value, dict): raise TypeError('meta must be a dictionary') self._meta = value @abstractmethod def score(self, X: np.ndarray): pass @abstractmethod def predict(self, X: np.ndarray): pass class FitMixin(ABC): @abstractmethod def fit(self, X: np.ndarray) -> None: pass class ThresholdMixin(ABC): @abstractmethod def infer_threshold(self, X: np.ndarray) -> None: pass
true
true
f73bdf294246507d49633746f6c969ffda3ef049
2,219
py
Python
ldbc_snb_datagen-dev/tools/sparkbench/distributions_extractor.py
netdb407/skh_project
8893132f898d21149f73cfb3a5f7806ce5518be5
[ "Apache-2.0" ]
null
null
null
ldbc_snb_datagen-dev/tools/sparkbench/distributions_extractor.py
netdb407/skh_project
8893132f898d21149f73cfb3a5f7806ce5518be5
[ "Apache-2.0" ]
9
2020-05-15T22:29:42.000Z
2021-12-14T21:44:49.000Z
ldbc_snb_datagen-dev/tools/sparkbench/distributions_extractor.py
netdb407/skh_project
8893132f898d21149f73cfb3a5f7806ce5518be5
[ "Apache-2.0" ]
null
null
null
import sys input_file = open(sys.argv[1]) def parse_line( input_file ): line = input_file.readline() if len(line) > 0: if line[0] == '(': output = line.replace("\n","").replace(" ","") while output.find(')') == -1: line = input_file.readline() output = output.replace("\n","").replace(" ","") + line.replace("\n","").replace(" ","") return output return line def parse_distribution(input_file, output_file): line = parse_line(input_file) accum = 0 entry_labels = [] entry_freq = [] while(len(line) > 0 and line[0] == "(" ): line = line.replace("(","") line = line.replace(")","") line = line.split(",") freq = int(line[0]) accum += freq prev = 0 if(len(entry_freq) > 0): prev = entry_freq[len(entry_freq)-1] entry_freq.append(prev + freq) entry_labels.append(line[1]) line = parse_line(input_file) index = 0 while(index < len(entry_labels)): output_file.write(str(entry_freq[index] / float(accum))+" "+entry_labels[index]+"\n") index += 1 return line words_file = open("words.csv", "w") hashtags_file = open("hashtags.csv", "w") sentence_count_file = open("sentence_count.csv", "w") sentence_lengths_file = open("sentence_lengths.csv", "w") line = parse_line(input_file) while(len(line) > 0): if line.find("Number of unique words",0,len("Number of unique words")) == 0: line = parse_distribution(input_file, words_file ) elif line.find("Number of unique hashtags",0,len("Number of unique hashtags")) == 0: line = parse_distribution(input_file, hashtags_file ) elif line.find("Number of sentence counts",0,len("Number of sentence counts")) == 0: line = parse_distribution(input_file, sentence_count_file ) elif line.find("Words per sentence count",0,len("Words per sentence count")) == 0: line = parse_distribution(input_file, sentence_lengths_file ) else: line = parse_line(input_file) input_file.close() words_file.close() hashtags_file.close() sentence_count_file.close() sentence_lengths_file.close()
34.138462
105
0.611987
import sys input_file = open(sys.argv[1]) def parse_line( input_file ): line = input_file.readline() if len(line) > 0: if line[0] == '(': output = line.replace("\n","").replace(" ","") while output.find(')') == -1: line = input_file.readline() output = output.replace("\n","").replace(" ","") + line.replace("\n","").replace(" ","") return output return line def parse_distribution(input_file, output_file): line = parse_line(input_file) accum = 0 entry_labels = [] entry_freq = [] while(len(line) > 0 and line[0] == "(" ): line = line.replace("(","") line = line.replace(")","") line = line.split(",") freq = int(line[0]) accum += freq prev = 0 if(len(entry_freq) > 0): prev = entry_freq[len(entry_freq)-1] entry_freq.append(prev + freq) entry_labels.append(line[1]) line = parse_line(input_file) index = 0 while(index < len(entry_labels)): output_file.write(str(entry_freq[index] / float(accum))+" "+entry_labels[index]+"\n") index += 1 return line words_file = open("words.csv", "w") hashtags_file = open("hashtags.csv", "w") sentence_count_file = open("sentence_count.csv", "w") sentence_lengths_file = open("sentence_lengths.csv", "w") line = parse_line(input_file) while(len(line) > 0): if line.find("Number of unique words",0,len("Number of unique words")) == 0: line = parse_distribution(input_file, words_file ) elif line.find("Number of unique hashtags",0,len("Number of unique hashtags")) == 0: line = parse_distribution(input_file, hashtags_file ) elif line.find("Number of sentence counts",0,len("Number of sentence counts")) == 0: line = parse_distribution(input_file, sentence_count_file ) elif line.find("Words per sentence count",0,len("Words per sentence count")) == 0: line = parse_distribution(input_file, sentence_lengths_file ) else: line = parse_line(input_file) input_file.close() words_file.close() hashtags_file.close() sentence_count_file.close() sentence_lengths_file.close()
true
true
f73be0ead8db0c9b2b9580b0fc630b9d96a79294
1,312
py
Python
setup.py
dlbrittain/celery-exporter
c7295bda6ce17c8a4602203ef26b8a9608aab6f1
[ "MIT" ]
null
null
null
setup.py
dlbrittain/celery-exporter
c7295bda6ce17c8a4602203ef26b8a9608aab6f1
[ "MIT" ]
null
null
null
setup.py
dlbrittain/celery-exporter
c7295bda6ce17c8a4602203ef26b8a9608aab6f1
[ "MIT" ]
null
null
null
import io from setuptools import setup from setuptools_rust import Binding, RustExtension long_description = "See https://github.com/OvalMoney/celery-exporter" with io.open("README.md", encoding="utf-8") as fp: long_description = fp.read() with open("README.md", "r") as fh: long_description = fh.read() setup( name="celery-exporter", description="Prometheus metrics exporter for Celery", long_description=long_description, long_description_content_type="text/markdown", version="1.4.0", author="Fabio Todaro, Nicola Martino", license="MIT", author_email="fbregist@gmail.com, mroci@bruttocarattere.org", url="https://github.com/OvalMoney/celery-exporter", classifiers=[ "Development Status :: 3 - Alpha", "Environment :: Console", "License :: OSI Approved :: MIT License", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3 :: Only", "Operating System :: OS Independent", ], rust_extensions=[RustExtension("celery_state", binding=Binding.PyO3, debug=False)], packages=["celery_exporter"], install_requires=["click>=7", "celery>=4", "prometheus_client>=0.0.20"], entry_points={ "console_scripts": ["celery-exporter = celery_exporter.__main__:main"] }, )
34.526316
87
0.677591
import io from setuptools import setup from setuptools_rust import Binding, RustExtension long_description = "See https://github.com/OvalMoney/celery-exporter" with io.open("README.md", encoding="utf-8") as fp: long_description = fp.read() with open("README.md", "r") as fh: long_description = fh.read() setup( name="celery-exporter", description="Prometheus metrics exporter for Celery", long_description=long_description, long_description_content_type="text/markdown", version="1.4.0", author="Fabio Todaro, Nicola Martino", license="MIT", author_email="fbregist@gmail.com, mroci@bruttocarattere.org", url="https://github.com/OvalMoney/celery-exporter", classifiers=[ "Development Status :: 3 - Alpha", "Environment :: Console", "License :: OSI Approved :: MIT License", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3 :: Only", "Operating System :: OS Independent", ], rust_extensions=[RustExtension("celery_state", binding=Binding.PyO3, debug=False)], packages=["celery_exporter"], install_requires=["click>=7", "celery>=4", "prometheus_client>=0.0.20"], entry_points={ "console_scripts": ["celery-exporter = celery_exporter.__main__:main"] }, )
true
true
f73be0f3da49275a7dea5a705f01b7713d6fe45d
2,852
py
Python
utils/PullData.py
MaxineSun/Paper_Graph
a7e0350c80309b35152e4bce939f522236e8efe0
[ "MIT" ]
null
null
null
utils/PullData.py
MaxineSun/Paper_Graph
a7e0350c80309b35152e4bce939f522236e8efe0
[ "MIT" ]
null
null
null
utils/PullData.py
MaxineSun/Paper_Graph
a7e0350c80309b35152e4bce939f522236e8efe0
[ "MIT" ]
null
null
null
from crossref_commons.retrieval import get_entity from crossref_commons.types import EntityType, OutputType class PullData: #transform this function as a class def __init__(self, doi): self.doi = doi def getInitials(self, str): #Put getInitials brfore as a alone function instead of part of pullData allNames = str.split() initials = [names[0] for names in allNames] initialsProcessced = ". ".join(initials) + "." return initialsProcessced def pullData(self): try: self.art = get_entity(self.doi, EntityType.PUBLICATION, OutputType.JSON) if 'DOI' in self.art: doi1 = self.art[ 'DOI' ] else: doi1 = None if 'type' in self.art: typ = self.art[ 'type' ] #book-chapter else: typ = 'None' if 'volume' in self.art: vol = self.art[ 'volume' ] else: vol = None if 'issue' in self.art: iss = self.art[ 'issue' ] else: iss = None if 'page' in self.art: pgs = self.art[ 'page' ] else: pgs = None if 'author' in self.art: aut__ = self.art[ 'author' ] aut = [] for a in aut__: if 'given' in a: firstName = a[ 'given' ] aut.append(self.getInitials(firstName) + ' ' + a[ 'family' ]) elif 'family' in a: aut.append(a['family']) else: aut = None else: aut = None if 'title' in self.art: tit = self.art[ 'title' ][ 0 ] else: tit = None if 'publisher' in self.art: pub = self.art[ 'publisher' ] else: pub = None if 'reference-count' in self.art: cit = self.art[ 'reference-count' ] else: cit = 'None' if 'container-title' in self.art and not len(self.art['container-title']) == 0: #prevents empty lists jnl = self.art[ 'container-title' ][ 0 ] else: jnl = None if 'reference' in self.art: ref = self.art[ 'reference' ] else: ref = None if 'created' in self.art: ymd = self.art[ 'created' ][ 'date-time' ][ 0:10 ] else: ymd = None # if 'reference-count' in self.art: # rct = self.art[ 'reference-count' ] # else: # rct = None if 'ISSN' in self.art: issn = self.art[ 'ISSN' ] else: issn = None if 'ISBN' in self.art: isbn = self.art[ 'ISBN' ] else: isbn = None if 'publisher-location' in self.art: loc = self.art[ 'publisher-location' ] else: loc = None if 'URL' in self.art: url = self.art[ 'URL' ] else: url = None if 'editor' in self.art: edt = self.art[ 'editor' ] else: edt = None if 'article-number' in self.art: anm = self.art[ 'article-number' ] else: anm = None return doi1, typ, vol, iss, pgs, aut, tit, pub, cit, jnl, ref, ymd, issn, isbn, loc, url, edt, anm except: pass
22.634921
105
0.56101
from crossref_commons.retrieval import get_entity from crossref_commons.types import EntityType, OutputType class PullData: def __init__(self, doi): self.doi = doi def getInitials(self, str): allNames = str.split() initials = [names[0] for names in allNames] initialsProcessced = ". ".join(initials) + "." return initialsProcessced def pullData(self): try: self.art = get_entity(self.doi, EntityType.PUBLICATION, OutputType.JSON) if 'DOI' in self.art: doi1 = self.art[ 'DOI' ] else: doi1 = None if 'type' in self.art: typ = self.art[ 'type' ] else: typ = 'None' if 'volume' in self.art: vol = self.art[ 'volume' ] else: vol = None if 'issue' in self.art: iss = self.art[ 'issue' ] else: iss = None if 'page' in self.art: pgs = self.art[ 'page' ] else: pgs = None if 'author' in self.art: aut__ = self.art[ 'author' ] aut = [] for a in aut__: if 'given' in a: firstName = a[ 'given' ] aut.append(self.getInitials(firstName) + ' ' + a[ 'family' ]) elif 'family' in a: aut.append(a['family']) else: aut = None else: aut = None if 'title' in self.art: tit = self.art[ 'title' ][ 0 ] else: tit = None if 'publisher' in self.art: pub = self.art[ 'publisher' ] else: pub = None if 'reference-count' in self.art: cit = self.art[ 'reference-count' ] else: cit = 'None' if 'container-title' in self.art and not len(self.art['container-title']) == 0: jnl = self.art[ 'container-title' ][ 0 ] else: jnl = None if 'reference' in self.art: ref = self.art[ 'reference' ] else: ref = None if 'created' in self.art: ymd = self.art[ 'created' ][ 'date-time' ][ 0:10 ] else: ymd = None if 'ISSN' in self.art: issn = self.art[ 'ISSN' ] else: issn = None if 'ISBN' in self.art: isbn = self.art[ 'ISBN' ] else: isbn = None if 'publisher-location' in self.art: loc = self.art[ 'publisher-location' ] else: loc = None if 'URL' in self.art: url = self.art[ 'URL' ] else: url = None if 'editor' in self.art: edt = self.art[ 'editor' ] else: edt = None if 'article-number' in self.art: anm = self.art[ 'article-number' ] else: anm = None return doi1, typ, vol, iss, pgs, aut, tit, pub, cit, jnl, ref, ymd, issn, isbn, loc, url, edt, anm except: pass
true
true
f73be1498618c64c0e1b596460a231018146a7a2
154,704
py
Python
tensorflow/python/framework/ops.py
ml-resources/tensorflow
4ecd72b68cd70c3930551aebbf0c80badc301d28
[ "Apache-2.0" ]
1
2019-06-19T08:43:26.000Z
2019-06-19T08:43:26.000Z
tensorflow/python/framework/ops.py
liudgit/tensorflow
4ecd72b68cd70c3930551aebbf0c80badc301d28
[ "Apache-2.0" ]
null
null
null
tensorflow/python/framework/ops.py
liudgit/tensorflow
4ecd72b68cd70c3930551aebbf0c80badc301d28
[ "Apache-2.0" ]
1
2019-06-19T08:43:23.000Z
2019-06-19T08:43:23.000Z
# Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Classes and functions used to construct graphs.""" # pylint: disable=g-bad-name from __future__ import absolute_import from __future__ import division from __future__ import print_function import collections import contextlib import copy import linecache import re import sys import threading import six from tensorflow.core.framework import attr_value_pb2 from tensorflow.core.framework import function_pb2 from tensorflow.core.framework import graph_pb2 from tensorflow.core.framework import node_def_pb2 from tensorflow.core.framework import tensor_shape_pb2 from tensorflow.core.framework import types_pb2 from tensorflow.core.framework import versions_pb2 from tensorflow.python.framework import device as pydev from tensorflow.python.framework import dtypes from tensorflow.python.framework import op_def_registry from tensorflow.python.framework import registry from tensorflow.python.framework import tensor_shape from tensorflow.python.framework import versions from tensorflow.python.platform import tf_logging as logging from tensorflow.python.util import compat from tensorflow.python.util import decorator_utils def _override_helper(clazz_object, operator, func): """Overrides (string) operator on Tensors to call func. Args: clazz_object: the class to override for; either Tensor or SparseTensor. operator: the string name of the operator to override. func: the function that replaces the overridden operator. Raises: ValueError: If operator has already been overwritten, or if operator is not allowed to be overwritten. """ existing = getattr(clazz_object, operator, None) if existing is not None: # Check to see if this is a default method-wrapper or slot wrapper which # will be true for the comparison operators. if not isinstance(existing, type(object.__lt__)): raise ValueError("operator %s cannot be overwritten again on class %s." % (operator, clazz_object)) if operator not in Tensor.OVERLOADABLE_OPERATORS: raise ValueError("Overriding %s is disallowed" % operator) setattr(clazz_object, operator, func) def _convert_stack(stack): """Converts a stack extracted using _extract_stack() to a traceback stack. Args: stack: A list of n 4-tuples, (filename, lineno, name, frame_globals). Returns: A list of n 4-tuples (filename, lineno, name, code), where the code tuple element is calculated from the corresponding elements of the input tuple. """ ret = [] for filename, lineno, name, frame_globals in stack: linecache.checkcache(filename) line = linecache.getline(filename, lineno, frame_globals) if line: line = line.strip() else: line = None ret.append((filename, lineno, name, line)) return ret # pylint: disable=line-too-long def _extract_stack(): """A lightweight re-implementation of traceback.extract_stack. NOTE(mrry): traceback.extract_stack eagerly retrieves the line of code for each stack frame using linecache, which results in an abundance of stat() calls. This implementation does not retrieve the code, and any consumer should apply _convert_stack to the result to obtain a traceback that can be formatted etc. using traceback methods. Returns: A list of 4-tuples (filename, lineno, name, frame_globals) corresponding to the call stack of the current thread. """ # pylint: enable=line-too-long try: raise ZeroDivisionError except ZeroDivisionError: f = sys.exc_info()[2].tb_frame.f_back ret = [] while f is not None: lineno = f.f_lineno co = f.f_code filename = co.co_filename name = co.co_name frame_globals = f.f_globals ret.append((filename, lineno, name, frame_globals)) f = f.f_back ret.reverse() return ret def _as_graph_element(obj): """Convert `obj` to a graph element if possible, otherwise return `None`. Args: obj: Object to convert. Returns: The result of `obj._as_graph_element()` if that method is available; otherwise `None`. """ conv_fn = getattr(obj, "_as_graph_element", None) if conv_fn and callable(conv_fn): return conv_fn() return None _TENSOR_LIKE_TYPES = tuple() def is_dense_tensor_like(t): """EXPERIMENTAL: Returns true if `t` implements the tensor interface. See `register_dense_tensor_like_type()` for the current definition of a "tensor-like type". Args: t: An object. Returns: True iff `t` is an instance of one of the registered "tensor-like" types. """ return isinstance(t, _TENSOR_LIKE_TYPES) def register_dense_tensor_like_type(tensor_type): """EXPERIMENTAL: Registers `tensor_type` as implementing the tensor interface. A "tensor-like type" can represent a single dense tensor, and implements the `name` and `dtype` properties. Args: tensor_type: A type implementing the tensor interface. Raises: TypeError: If `tensor_type` does not implement the tensor interface. """ try: if not isinstance(tensor_type.name, property): raise TypeError("Type %s does not define a `name` property") except AttributeError: raise TypeError("Type %s does not define a `name` property") try: if not isinstance(tensor_type.dtype, property): raise TypeError("Type %s does not define a `dtype` property") except AttributeError: raise TypeError("Type %s does not define a `dtype` property") # We expect this list to be small, so choose quadratic complexity # for registration, so that we have a tuple that can be used for # more efficient `isinstance` checks later. global _TENSOR_LIKE_TYPES _TENSOR_LIKE_TYPES = tuple(list(_TENSOR_LIKE_TYPES) + [tensor_type]) # NOTE(ebrevdo): Do not subclass this. If you do, I will break you on purpose. class _TensorLike(object): """Internal cls for grouping Tensor, SparseTensor, ..., for is_instance.""" pass class Tensor(_TensorLike): """Represents one of the outputs of an `Operation`. A `Tensor` is a symbolic handle to one of the outputs of an `Operation`. It does not hold the values of that operation's output, but instead provides a means of computing those values in a TensorFlow [`Session`](../../api_docs/python/client.md#Session). This class has two primary purposes: 1. A `Tensor` can be passed as an input to another `Operation`. This builds a dataflow connection between operations, which enables TensorFlow to execute an entire `Graph` that represents a large, multi-step computation. 2. After the graph has been launched in a session, the value of the `Tensor` can be computed by passing it to [`Session.run()`](../../api_docs/python/client.md#Session.run). `t.eval()` is a shortcut for calling `tf.get_default_session().run(t)`. In the following example, `c`, `d`, and `e` are symbolic `Tensor` objects, whereas `result` is a numpy array that stores a concrete value: ```python # Build a dataflow graph. c = tf.constant([[1.0, 2.0], [3.0, 4.0]]) d = tf.constant([[1.0, 1.0], [0.0, 1.0]]) e = tf.matmul(c, d) # Construct a `Session` to execute the graph. sess = tf.Session() # Execute the graph and store the value that `e` represents in `result`. result = sess.run(e) ``` @@dtype @@name @@value_index @@graph @@op @@consumers @@eval @@get_shape @@shape @@set_shape """ # List of Python operators that we allow to override. OVERLOADABLE_OPERATORS = { # Binary. "__add__", "__radd__", "__sub__", "__rsub__", "__mul__", "__rmul__", "__div__", "__rdiv__", "__truediv__", "__rtruediv__", "__floordiv__", "__rfloordiv__", "__mod__", "__rmod__", "__lt__", "__le__", "__gt__", "__ge__", "__and__", "__rand__", "__or__", "__ror__", "__xor__", "__rxor__", "__getitem__", "__pow__", "__rpow__", # Unary. "__invert__", "__neg__", "__abs__" } def __init__(self, op, value_index, dtype): """Creates a new `Tensor`. Args: op: An `Operation`. `Operation` that computes this tensor. value_index: An `int`. Index of the operation's endpoint that produces this tensor. dtype: A `DType`. Type of elements stored in this tensor. Raises: TypeError: If the op is not an `Operation`. """ if not isinstance(op, Operation): raise TypeError("op needs to be an Operation: %s" % op) self._op = op self._value_index = value_index self._dtype = dtypes.as_dtype(dtype) self._shape = tensor_shape.unknown_shape() # List of operations that use this Tensor as input. We maintain this list # to easily navigate a computation graph. self._consumers = [] # Attributes used for C++ shape inference. Not inspected, only forwarded. self._handle_shape = tensor_shape_pb2.TensorShapeProto() self._handle_dtype = types_pb2.DT_INVALID @property def op(self): """The `Operation` that produces this tensor as an output.""" return self._op @property def dtype(self): """The `DType` of elements in this tensor.""" return self._dtype @property def graph(self): """The `Graph` that contains this tensor.""" return self._op.graph @property def name(self): """The string name of this tensor.""" if not self._op.name: raise ValueError("Operation was not named: %s" % self._op) return "%s:%d" % (self._op.name, self._value_index) @property def device(self): """The name of the device on which this tensor will be produced, or None.""" return self._op.device @property def shape(self): """Returns the `TensorShape` that represents the shape of this tensor. The shape is computed using shape inference functions that are registered in the Op for each `Operation`. See [`TensorShape`](../../api_docs/python/framework.md#TensorShape) for more details of what a shape represents. The inferred shape of a tensor is used to provide shape information without having to launch the graph in a session. This can be used for debugging, and providing early error messages. For example: ```python c = tf.constant([[1.0, 2.0, 3.0], [4.0, 5.0, 6.0]]) print(c.shape) ==> TensorShape([Dimension(2), Dimension(3)]) d = tf.constant([[1.0, 0.0], [0.0, 1.0], [1.0, 0.0], [0.0, 1.0]]) print(d.shape) ==> TensorShape([Dimension(4), Dimension(2)]) # Raises a ValueError, because `c` and `d` do not have compatible # inner dimensions. e = tf.matmul(c, d) f = tf.matmul(c, d, transpose_a=True, transpose_b=True) print(f.shape) ==> TensorShape([Dimension(3), Dimension(4)]) ``` In some cases, the inferred shape may have unknown dimensions. If the caller has additional information about the values of these dimensions, `Tensor.set_shape()` can be used to augment the inferred shape. Returns: A `TensorShape` representing the shape of this tensor. """ return self._shape def _shape_as_list(self): if self._shape.ndims is not None: return [dim.value for dim in self._shape.dims] else: return None def get_shape(self): """Alias of Tensor.shape.""" return self.shape def set_shape(self, shape): """Updates the shape of this tensor. This method can be called multiple times, and will merge the given `shape` with the current shape of this tensor. It can be used to provide additional information about the shape of this tensor that cannot be inferred from the graph alone. For example, this can be used to provide additional information about the shapes of images: ```python _, image_data = tf.TFRecordReader(...).read(...) image = tf.image.decode_png(image_data, channels=3) # The height and width dimensions of `image` are data dependent, and # cannot be computed without executing the op. print(image.shape) ==> TensorShape([Dimension(None), Dimension(None), Dimension(3)]) # We know that each image in this dataset is 28 x 28 pixels. image.set_shape([28, 28, 3]) print(image.shape) ==> TensorShape([Dimension(28), Dimension(28), Dimension(3)]) ``` Args: shape: A `TensorShape` representing the shape of this tensor. Raises: ValueError: If `shape` is not compatible with the current shape of this tensor. """ self._shape = self._shape.merge_with(shape) @property def value_index(self): """The index of this tensor in the outputs of its `Operation`.""" return self._value_index def consumers(self): """Returns a list of `Operation`s that consume this tensor. Returns: A list of `Operation`s. """ return self._consumers def _add_consumer(self, consumer): """Add a consumer to this tensor. Args: consumer: an Operation. Raises: TypeError: if the consumer is not an Operation. """ if not isinstance(consumer, Operation): raise TypeError("Consumer must be an Operation: %s" % consumer) self._consumers.append(consumer) def _as_node_def_input(self): """Return a value to use for the NodeDef "input" attribute. The returned string can be used in a NodeDef "input" attribute to indicate that the NodeDef uses this Tensor as input. Raises: ValueError: if this Tensor's Operation does not have a name. Returns: a string. """ if not self._op.name: raise ValueError("Operation was not named: %s" % self._op) if self._value_index == 0: return self._op.name else: return "%s:%d" % (self._op.name, self._value_index) def __str__(self): return "Tensor(\"%s\"%s%s%s)" % ( self.name, (", shape=%s" % self.get_shape()) if self.get_shape().ndims is not None else "", (", dtype=%s" % self._dtype.name) if self._dtype else "", (", device=%s" % self.device) if self.device else "") def __repr__(self): return "<tf.Tensor '%s' shape=%s dtype=%s>" % ( self.name, self.get_shape(), self._dtype.name) def __hash__(self): # Necessary to support Python's collection membership operators return id(self) def __eq__(self, other): # Necessary to support Python's collection membership operators return id(self) == id(other) # NOTE(mrry): This enables the Tensor's overloaded "right" binary # operators to run when the left operand is an ndarray, because it # accords the Tensor class higher priority than an ndarray, or a # numpy matrix. # TODO(mrry): Convert this to using numpy's __numpy_ufunc__ # mechanism, which allows more control over how Tensors interact # with ndarrays. __array_priority__ = 100 @staticmethod def _override_operator(operator, func): _override_helper(Tensor, operator, func) def __iter__(self): """Dummy method to prevent iteration. Do not call. NOTE(mrry): If we register __getitem__ as an overloaded operator, Python will valiantly attempt to iterate over the Tensor from 0 to infinity. Declaring this method prevents this unintended behavior. Raises: TypeError: when invoked. """ raise TypeError("'Tensor' object is not iterable.") def __bool__(self): """Dummy method to prevent a tensor from being used as a Python `bool`. This overload raises a `TypeError` when the user inadvertently treats a `Tensor` as a boolean (e.g. in an `if` statement). For example: ```python if tf.constant(True): # Will raise. # ... if tf.constant(5) < tf.constant(7): # Will raise. # ... ``` This disallows ambiguities between testing the Python value vs testing the dynamic condition of the `Tensor`. Raises: `TypeError`. """ raise TypeError("Using a `tf.Tensor` as a Python `bool` is not allowed. " "Use `if t is not None:` instead of `if t:` to test if a " "tensor is defined, and use TensorFlow ops such as " "tf.cond to execute subgraphs conditioned on the value of " "a tensor.") def __nonzero__(self): """Dummy method to prevent a tensor from being used as a Python `bool`. This is the Python 2.x counterpart to `__bool__()` above. Raises: `TypeError`. """ raise TypeError("Using a `tf.Tensor` as a Python `bool` is not allowed. " "Use `if t is not None:` instead of `if t:` to test if a " "tensor is defined, and use TensorFlow ops such as " "tf.cond to execute subgraphs conditioned on the value of " "a tensor.") def eval(self, feed_dict=None, session=None): """Evaluates this tensor in a `Session`. Calling this method will execute all preceding operations that produce the inputs needed for the operation that produces this tensor. *N.B.* Before invoking `Tensor.eval()`, its graph must have been launched in a session, and either a default session must be available, or `session` must be specified explicitly. Args: feed_dict: A dictionary that maps `Tensor` objects to feed values. See [`Session.run()`](../../api_docs/python/client.md#Session.run) for a description of the valid feed values. session: (Optional.) The `Session` to be used to evaluate this tensor. If none, the default session will be used. Returns: A numpy array corresponding to the value of this tensor. """ return _eval_using_default_session(self, feed_dict, self.graph, session) def _TensorTensorConversionFunction(t, dtype=None, name=None, as_ref=False): _ = name, as_ref if dtype and not dtype.is_compatible_with(t.dtype): raise ValueError( "Tensor conversion requested dtype %s for Tensor with dtype %s: %r" % (dtype.name, t.dtype.name, str(t))) return t _tensor_conversion_func_registry = { 0: [(Tensor, _TensorTensorConversionFunction)]} register_dense_tensor_like_type(Tensor) def convert_to_tensor(value, dtype=None, name=None, preferred_dtype=None): """Converts the given `value` to a `Tensor`. This function converts Python objects of various types to `Tensor` objects. It accepts `Tensor` objects, numpy arrays, Python lists, and Python scalars. For example: ```python import numpy as np def my_func(arg): arg = tf.convert_to_tensor(arg, dtype=tf.float32) return tf.matmul(arg, arg) + arg # The following calls are equivalent. value_1 = my_func(tf.constant([[1.0, 2.0], [3.0, 4.0]])) value_2 = my_func([[1.0, 2.0], [3.0, 4.0]]) value_3 = my_func(np.array([[1.0, 2.0], [3.0, 4.0]], dtype=np.float32)) ``` This function can be useful when composing a new operation in Python (such as `my_func` in the example above). All standard Python op constructors apply this function to each of their Tensor-valued inputs, which allows those ops to accept numpy arrays, Python lists, and scalars in addition to `Tensor` objects. Args: value: An object whose type has a registered `Tensor` conversion function. dtype: Optional element type for the returned tensor. If missing, the type is inferred from the type of `value`. name: Optional name to use if a new `Tensor` is created. preferred_dtype: Optional element type for the returned tensor, used when dtype is None. In some cases, a caller may not have a dtype in mind when converting to a tensor, so preferred_dtype can be used as a soft preference. If the conversion to `preferred_dtype` is not possible, this argument has no effect. Returns: An `Output` based on `value`. Raises: TypeError: If no conversion function is registered for `value`. RuntimeError: If a registered conversion function returns an invalid value. """ return internal_convert_to_tensor( value=value, dtype=dtype, name=name, preferred_dtype=preferred_dtype, as_ref=False) def internal_convert_to_tensor(value, dtype=None, name=None, as_ref=False, preferred_dtype=None): """Converts the given `value` to an `Tensor`. This function converts Python objects of various types to `Tensor` objects. It accepts `Tensor` objects, numpy arrays, Python lists, and Python scalars. For example: This function can be useful when composing a new operation in Python All standard Python op constructors apply this function to each of their Tensor-valued inputs, which allows those ops to accept numpy arrays, Python lists, and scalars in addition to `Tensor` objects. Args: value: An object whose type has a registered `Tensor` conversion function. dtype: Optional element type for the returned tensor. If missing, the type is inferred from the type of `value`. name: Optional name to use if a new `Tensor` is created. as_ref: True if we want the mutable view of Variables, if applicable. preferred_dtype: Optional element type for the returned tensor, used when dtype is None. In some cases, a caller may not have a dtype in mind when converting to a tensor, so preferred_dtype can be used as a soft preference. If the conversion to `preferred_dtype` is not possible, this argument has no effect. Returns: A `Tensor` based on `value`. Raises: TypeError: If no conversion function is registered for `value`. RuntimeError: If a registered conversion function returns an invalid value. """ error_prefix = "" if name is None else "%s: " % name if dtype is not None: dtype = dtypes.as_dtype(dtype) for _, funcs_at_priority in sorted(_tensor_conversion_func_registry.items()): for base_type, conversion_func in funcs_at_priority: if isinstance(value, base_type): # If dtype is None but preferred_dtype is not None, we try to # cast to preferred_dtype first. ret = None if dtype is None and preferred_dtype is not None: try: ret = conversion_func( value, dtype=preferred_dtype, name=name, as_ref=as_ref) except (TypeError, ValueError): # Could not coerce the conversion to use the preferred dtype. ret = None if ret is not None and ret is not NotImplemented: if (ret.dtype.base_dtype != dtypes.as_dtype(preferred_dtype).base_dtype): raise TypeError("convert_to_tensor did not convert to " "the preferred dtype: %s vs %s " % (ret.dtype.base_dtype, dtypes.as_dtype(preferred_dtype).base_dtype)) if ret is None: ret = conversion_func(value, dtype=dtype, name=name, as_ref=as_ref) if ret is NotImplemented: continue if not isinstance(ret, Tensor): raise RuntimeError( "%sConversion function %r for type %s returned non-Tensor: %r" % (error_prefix, conversion_func, base_type, ret)) if dtype and not dtype.is_compatible_with(ret.dtype): raise RuntimeError( "%sConversion function %r for type %s returned incompatible " "dtype: requested = %s, actual = %s" % (error_prefix, conversion_func, base_type, dtype.name, ret.dtype.name)) return ret raise TypeError("%sCannot convert %r with type %s to Tensor: " "no conversion function registered." % (error_prefix, value, type(value))) def internal_convert_n_to_tensor(values, dtype=None, name=None, as_ref=False, preferred_dtype=None): """Converts `values` to a list of `Tensor` objects. Args: values: A list of objects that can be consumed by `tf.convert_to_tensor()`. dtype: (Optional.) The required `DType` of the returned `Tensor` objects. name: (Optional.) A name prefix to used when a new `Tensor` is created, in which case element `i` will be given the name `name + '_' + i`. as_ref: True if the caller wants the results as ref tensors. preferred_dtype: Optional element type for the returned tensors, used when dtype is None. In some cases, a caller may not have a dtype in mind when converting to a tensor, so preferred_dtype can be used as a soft preference. If the conversion to `preferred_dtype` is not possible, this argument has no effect. Returns: A list of `Tensor` and/or `IndexedSlices` objects. Raises: TypeError: If no conversion function is registered for an element in `values`. RuntimeError: If a registered conversion function returns an invalid value. """ if not isinstance(values, collections.Sequence): raise TypeError("values must be a list.") ret = [] for i, value in enumerate(values): n = None if name is None else "%s_%d" % (name, i) ret.append( internal_convert_to_tensor( value, dtype=dtype, name=n, as_ref=as_ref, preferred_dtype=preferred_dtype)) return ret def convert_n_to_tensor(values, dtype=None, name=None, preferred_dtype=None): """Converts `values` to a list of `Tensor` objects. Args: values: A list of objects that can be consumed by `tf.convert_to_tensor()`. dtype: (Optional.) The required `DType` of the returned `Tensor` objects. name: (Optional.) A name prefix to used when a new `Tensor` is created, in which case element `i` will be given the name `name + '_' + i`. preferred_dtype: Optional element type for the returned tensors, used when dtype is None. In some cases, a caller may not have a dtype in mind when converting to a tensor, so preferred_dtype can be used as a soft preference. If the conversion to `preferred_dtype` is not possible, this argument has no effect. Returns: A list of `Tensor` and/or `IndexedSlices` objects. Raises: TypeError: If no conversion function is registered for an element in `values`. RuntimeError: If a registered conversion function returns an invalid value. """ return internal_convert_n_to_tensor(values=values, dtype=dtype, name=name, preferred_dtype=preferred_dtype, as_ref=False) def convert_to_tensor_or_indexed_slices(value, dtype=None, name=None): """Converts the given object to a `Tensor` or an `IndexedSlices`. If `value` is an `IndexedSlices` or `SparseTensor` it is returned unmodified. Otherwise, it is converted to a `Tensor` using `convert_to_tensor()`. Args: value: An `IndexedSlices`, `SparseTensor`, or an object that can be consumed by `convert_to_tensor()`. dtype: (Optional.) The required `DType` of the returned `Tensor` or `IndexedSlices`. name: (Optional.) A name to use if a new `Tensor` is created. Returns: An `Tensor`, `IndexedSlices`, or `SparseTensor` based on `value`. Raises: ValueError: If `dtype` does not match the element type of `value`. """ return internal_convert_to_tensor_or_indexed_slices( value=value, dtype=dtype, name=name, as_ref=False) def internal_convert_to_tensor_or_indexed_slices(value, dtype=None, name=None, as_ref=False): """Converts the given object to an `Tensor` or an `IndexedSlices`. If `value` is an `IndexedSlices` or `SparseTensor` it is returned unmodified. Otherwise, it is converted to a `Tensor` using `convert_to_tensor()`. Args: value: An `IndexedSlices`, `SparseTensor`, or an object that can be consumed by `convert_to_tensor()`. dtype: (Optional.) The required `DType` of the returned `Tensor` or `IndexedSlices`. name: (Optional.) A name to use if a new `Tensor` is created. as_ref: True if the caller wants the results as ref tensors. Returns: An `Tensor`, `IndexedSlices`, or `SparseTensor` based on `value`. Raises: ValueError: If `dtype` does not match the element type of `value`. """ if isinstance(value, _TensorLike): if dtype and not dtypes.as_dtype(dtype).is_compatible_with(value.dtype): raise ValueError( "Tensor conversion requested dtype %s for Tensor with dtype %s: %r" % (dtypes.as_dtype(dtype).name, value.dtype.name, str(value))) return value else: return internal_convert_to_tensor(value, dtype=dtype, name=name, as_ref=as_ref) def internal_convert_n_to_tensor_or_indexed_slices(values, dtype=None, name=None, as_ref=False): """Converts `values` to a list of `Tensor` or `IndexedSlices` objects. Any `IndexedSlices` or `SparseTensor` objects in `values` are returned unmodified. Args: values: A list of `None`, `IndexedSlices`, `SparseTensor`, or objects that can be consumed by `convert_to_tensor()`. dtype: (Optional.) The required `DType` of the returned `Tensor` `IndexedSlices`. name: (Optional.) A name prefix to used when a new `Tensor` is created, in which case element `i` will be given the name `name + '_' + i`. as_ref: True if the caller wants the results as ref tensors. Returns: A list of `Tensor`, `IndexedSlices`, and/or `SparseTensor` objects. Raises: TypeError: If no conversion function is registered for an element in `values`. RuntimeError: If a registered conversion function returns an invalid value. """ if not isinstance(values, collections.Sequence): raise TypeError("values must be a list.") ret = [] for i, value in enumerate(values): if value is None: ret.append(value) else: n = None if name is None else "%s_%d" % (name, i) ret.append( internal_convert_to_tensor_or_indexed_slices( value, dtype=dtype, name=n, as_ref=as_ref)) return ret def convert_n_to_tensor_or_indexed_slices(values, dtype=None, name=None): """Converts `values` to a list of `Output` or `IndexedSlices` objects. Any `IndexedSlices` or `SparseTensor` objects in `values` are returned unmodified. Args: values: A list of `None`, `IndexedSlices`, `SparseTensor`, or objects that can be consumed by `convert_to_tensor()`. dtype: (Optional.) The required `DType` of the returned `Tensor` `IndexedSlices`. name: (Optional.) A name prefix to used when a new `Tensor` is created, in which case element `i` will be given the name `name + '_' + i`. Returns: A list of `Tensor`, `IndexedSlices`, and/or `SparseTensor` objects. Raises: TypeError: If no conversion function is registered for an element in `values`. RuntimeError: If a registered conversion function returns an invalid value. """ return internal_convert_n_to_tensor_or_indexed_slices( values=values, dtype=dtype, name=name, as_ref=False) def register_tensor_conversion_function(base_type, conversion_func, priority=100): """Registers a function for converting objects of `base_type` to `Tensor`. The conversion function must have the following signature: ```python def conversion_func(value, dtype=None, name=None, as_ref=False): # ... ``` It must return a `Tensor` with the given `dtype` if specified. If the conversion function creates a new `Tensor`, it should use the given `name` if specified. All exceptions will be propagated to the caller. The conversion function may return `NotImplemented` for some inputs. In this case, the conversion process will continue to try subsequent conversion functions. If `as_ref` is true, the function must return a `Tensor` reference, such as a `Variable`. NOTE: The conversion functions will execute in order of priority, followed by order of registration. To ensure that a conversion function `F` runs before another conversion function `G`, ensure that `F` is registered with a smaller priority than `G`. Args: base_type: The base type or tuple of base types for all objects that `conversion_func` accepts. conversion_func: A function that converts instances of `base_type` to `Tensor`. priority: Optional integer that indicates the priority for applying this conversion function. Conversion functions with smaller priority values run earlier than conversion functions with larger priority values. Defaults to 100. Raises: TypeError: If the arguments do not have the appropriate type. """ if not (isinstance(base_type, type) or (isinstance(base_type, tuple) and all(isinstance(x, type) for x in base_type))): raise TypeError("base_type must be a type or a tuple of types.") if not callable(conversion_func): raise TypeError("conversion_func must be callable.") try: funcs_at_priority = _tensor_conversion_func_registry[priority] except KeyError: funcs_at_priority = [] _tensor_conversion_func_registry[priority] = funcs_at_priority funcs_at_priority.append((base_type, conversion_func)) class IndexedSlices(_TensorLike): """A sparse representation of a set of tensor slices at given indices. This class is a simple wrapper for a pair of `Tensor` objects: * `values`: A `Tensor` of any dtype with shape `[D0, D1, ..., Dn]`. * `indices`: A 1-D integer `Tensor` with shape `[D0]`. An `IndexedSlices` is typically used to represent a subset of a larger tensor `dense` of shape `[LARGE0, D1, .. , DN]` where `LARGE0 >> D0`. The values in `indices` are the indices in the first dimension of the slices that have been extracted from the larger tensor. The dense tensor `dense` represented by an `IndexedSlices` `slices` has ```python dense[slices.indices[i], :, :, :, ...] = slices.values[i, :, :, :, ...] ``` The `IndexedSlices` class is used principally in the definition of gradients for operations that have sparse gradients (e.g. [`tf.gather`](../../api_docs/python/array_ops.md#gather)). Contrast this representation with [`SparseTensor`](../../api_docs/python/sparse_ops.md#SparseTensor), which uses multi-dimensional indices and scalar values. @@__init__ @@values @@indices @@dense_shape @@name @@dtype @@device @@op """ def __init__(self, values, indices, dense_shape=None): """Creates an `IndexedSlices`.""" _get_graph_from_inputs([values, indices, dense_shape]) self._values = values self._indices = indices self._dense_shape = dense_shape @property def values(self): """A `Tensor` containing the values of the slices.""" return self._values @property def indices(self): """A 1-D `Tensor` containing the indices of the slices.""" return self._indices @property def dense_shape(self): """A 1-D `Tensor` containing the shape of the corresponding dense tensor.""" return self._dense_shape @property def name(self): """The name of this `IndexedSlices`.""" return self.values.name @property def device(self): """The name of the device on which `values` will be produced, or `None`.""" return self.values.device @property def op(self): """The `Operation` that produces `values` as an output.""" return self.values.op @property def dtype(self): """The `DType` of elements in this tensor.""" return self.values.dtype @property def graph(self): """The `Graph` that contains the values, indices, and shape tensors.""" return self._values.graph def __str__(self): return "IndexedSlices(indices=%s, values=%s%s)" % ( self._indices, self._values, (", dense_shape=%s" % self._dense_shape) if self._dense_shape is not None else "") def __neg__(self): return IndexedSlices(-self.values, self.indices, self.dense_shape) IndexedSlicesValue = collections.namedtuple( "IndexedSlicesValue", ["values", "indices", "dense_shape"]) def _device_string(dev_spec): if isinstance(dev_spec, pydev.DeviceSpec): return dev_spec.to_string() else: return dev_spec def _NodeDef(op_type, name, device=None, attrs=None): """Create a NodeDef proto. Args: op_type: Value for the "op" attribute of the NodeDef proto. name: Value for the "name" attribute of the NodeDef proto. device: string, device, or function from NodeDef to string. Value for the "device" attribute of the NodeDef proto. attrs: Optional dictionary where the key is the attribute name (a string) and the value is the respective "attr" attribute of the NodeDef proto (an AttrValue). Returns: A node_def_pb2.NodeDef protocol buffer. """ node_def = node_def_pb2.NodeDef() node_def.op = compat.as_bytes(op_type) node_def.name = compat.as_bytes(name) if attrs is not None: for k, v in six.iteritems(attrs): node_def.attr[k].CopyFrom(v) if device is not None: if callable(device): node_def.device = device(node_def) else: node_def.device = _device_string(device) return node_def # Copied from core/framework/node_def_util.cc # TODO(mrry,josh11b): Consolidate this validation in C++ code. _VALID_OP_NAME_REGEX = re.compile("^[A-Za-z0-9.][A-Za-z0-9_.\\-/]*$") _VALID_SCOPE_NAME_REGEX = re.compile("^[A-Za-z0-9_.\\-/]*$") class Operation(object): """Represents a graph node that performs computation on tensors. An `Operation` is a node in a TensorFlow `Graph` that takes zero or more `Tensor` objects as input, and produces zero or more `Tensor` objects as output. Objects of type `Operation` are created by calling a Python op constructor (such as [`tf.matmul()`](../../api_docs/python/math_ops.md#matmul)) or [`Graph.create_op()`](../../api_docs/python/framework.md#Graph.create_op). For example `c = tf.matmul(a, b)` creates an `Operation` of type "MatMul" that takes tensors `a` and `b` as input, and produces `c` as output. After the graph has been launched in a session, an `Operation` can be executed by passing it to [`Session.run()`](../../api_docs/python/client.md#Session.run). `op.run()` is a shortcut for calling `tf.get_default_session().run(op)`. @@name @@type @@inputs @@control_inputs @@outputs @@device @@graph @@run @@get_attr @@traceback """ def __init__(self, node_def, g, inputs=None, output_types=None, control_inputs=None, input_types=None, original_op=None, op_def=None): r"""Creates an `Operation`. NOTE: This constructor validates the name of the `Operation` (passed as `node_def.name`). Valid `Operation` names match the following regular expression: [A-Za-z0-9.][A-Za-z0-9_.\\-/]* Args: node_def: `node_def_pb2.NodeDef`. `NodeDef` for the `Operation`. Used for attributes of `node_def_pb2.NodeDef`, typically `name`, `op`, and `device`. The `input` attribute is irrelevant here as it will be computed when generating the model. g: `Graph`. The parent graph. inputs: list of `Tensor` objects. The inputs to this `Operation`. output_types: list of `DType` objects. List of the types of the `Tensors` computed by this operation. The length of this list indicates the number of output endpoints of the `Operation`. control_inputs: list of operations or tensors from which to have a control dependency. input_types: List of `DType` objects representing the types of the tensors accepted by the `Operation`. By default uses `[x.dtype.base_dtype for x in inputs]`. Operations that expect reference-typed inputs must specify these explicitly. original_op: Optional. Used to associate the new `Operation` with an existing `Operation` (for example, a replica with the op that was replicated). op_def: Optional. The `op_def_pb2.OpDef` proto that describes the op type that this `Operation` represents. Raises: TypeError: if control inputs are not Operations or Tensors, or if `node_def` is not a `NodeDef`, or if `g` is not a `Graph`, or if `inputs` are not tensors, or if `inputs` and `input_types` are incompatible. ValueError: if the `node_def` name is not valid. """ if not isinstance(node_def, node_def_pb2.NodeDef): raise TypeError("node_def needs to be a NodeDef: %s" % node_def) if node_def.ByteSize() >= (1 << 31) or node_def.ByteSize() < 0: raise ValueError( "Cannot create a tensor proto whose content is larger than 2GB.") if not _VALID_OP_NAME_REGEX.match(node_def.name): raise ValueError("'%s' is not a valid node name" % node_def.name) if not isinstance(g, Graph): raise TypeError("g needs to be a Graph: %s" % g) self._node_def = copy.deepcopy(node_def) self._graph = g if inputs is None: inputs = [] elif not isinstance(inputs, list): raise TypeError("inputs needs to be a list of Tensors: %s" % inputs) self._inputs = list(inputs) # Defensive copy. for a in self._inputs: if not isinstance(a, Tensor): raise TypeError("input needs to be a Tensor: %s" % a) # Mark that we consume the inputs. a._add_consumer(self) # pylint: disable=protected-access if output_types is None: output_types = [] self._output_types = output_types self._outputs = [Tensor(self, i, output_type) for i, output_type in enumerate(output_types)] if input_types is None: input_types = [i.dtype.base_dtype for i in self._inputs] else: if not all(x.is_compatible_with(i.dtype) for i, x in zip(self._inputs, input_types)): raise TypeError("Inputs are not compatible with input types") self._input_types = input_types # Build the list of control inputs. self._control_inputs = [] if control_inputs: for c in control_inputs: c_op = None if isinstance(c, Operation): c_op = c elif isinstance(c, (Tensor, IndexedSlices)): c_op = c.op else: raise TypeError("Control input must be an Operation, " "a Tensor, or IndexedSlices: %s" % c) self._control_inputs.append(c_op) self._original_op = original_op self._op_def = op_def self._traceback = _extract_stack() # Add this op to the current control flow context: self._control_flow_context = g._get_control_flow_context() if self._control_flow_context is not None: self._control_flow_context.AddOp(self) # NOTE(keveman): Control flow context's AddOp could be creating new ops and # setting op.inputs[index] = new_op. Thus the new ops' id could be larger # than this op's id even though this op depend on them. Therefore, delaying # assigning id to this op until all ops this could be dependent on are # created. self._id_value = self._graph._next_id() # pylint: disable=protected-access self._recompute_node_def() def colocation_groups(self): """Returns the list of colocation groups of the op.""" default_colocation_group = [compat.as_bytes("loc:@%s" % self._node_def.name)] if "_class" not in self._node_def.attr: # This op has no explicit colocation group, so it is itself its # own root of a colocation group. return default_colocation_group attr_groups = [class_name for class_name in self.get_attr("_class") if class_name.startswith(b"loc:@")] # If there are no colocation groups in the explicit _class field, # return the default colocation group. return attr_groups if attr_groups else default_colocation_group def values(self): """DEPRECATED: Use outputs.""" return tuple(self.outputs) def _get_control_flow_context(self): """Returns the control flow context of this op. Returns: A context object. """ return self._control_flow_context def _set_control_flow_context(self, context): """Sets the current control flow context of this op. Args: context: a context object. """ self._control_flow_context = context @property def name(self): """The full name of this operation.""" return self._node_def.name @property def _id(self): """The unique integer id of this operation.""" return self._id_value @property def device(self): """The name of the device to which this op has been assigned, if any. Returns: The string name of the device to which this op has been assigned, or an empty string if it has not been assigned to a device. """ return self._node_def.device def _set_device(self, device): """Set the device of this operation. Args: device: string or device.. The device to set. """ self._node_def.device = _device_string(device) def _add_input(self, tensor, dtype=None): """Add a new input to this operation. Args: tensor: the Tensor to add as an input. dtype: tf.DType: type of the input; defaults to the tensor's dtype. Raises: TypeError: if tensor is not a Tensor, or if input tensor type is not convertible to dtype. ValueError: if the Tensor is from a different graph. """ if not isinstance(tensor, Tensor): raise TypeError("tensor must be a Tensor: %s" % tensor) _assert_same_graph(self, tensor) if dtype is None: dtype = tensor.dtype else: dtype = dtypes.as_dtype(dtype) if not dtype.is_compatible_with(tensor.dtype): raise TypeError( "Cannot convert a tensor of type %s to an input of type %s" % (tensor.dtype.name, dtype.name)) self._inputs.append(tensor) self._input_types.append(dtype) tensor._add_consumer(self) # pylint: disable=protected-access self._recompute_node_def() def _update_input(self, index, tensor, dtype=None): """Update the input to this operation at the given index. NOTE: This is for TF internal use only. Please don't use it. Args: index: the index of the input to update. tensor: the Tensor to be used as the input at the given index. dtype: tf.DType: type of the input; defaults to the tensor's dtype. Raises: TypeError: if tensor is not a Tensor, or if input tensor type is not convertible to dtype. ValueError: if the Tensor is from a different graph. """ if not isinstance(tensor, Tensor): raise TypeError("tensor must be a Tensor: %s" % tensor) _assert_same_graph(self, tensor) if dtype is None: dtype = tensor.dtype else: dtype = dtypes.as_dtype(dtype) if not dtype.is_compatible_with(tensor.dtype): raise TypeError( "Cannot convert a tensor of type %s to an input of type %s" % (tensor.dtype.name, dtype.name)) self._inputs[index].consumers().remove(self) self._inputs[index] = tensor self._input_types[index] = dtype tensor._add_consumer(self) # pylint: disable=protected-access self._recompute_node_def() def _add_control_inputs(self, ops): """Add a list of new control inputs to this operation. Args: ops: the list of Operations to add as control input. Raises: TypeError: if ops is not a list of Operations. ValueError: if any op in ops is from a different graph. """ if ops: for op in ops: if not isinstance(op, Operation): raise TypeError("op must be an Operation: %s" % op) _assert_same_graph(self, op) self._control_inputs.append(op) self._recompute_node_def() def _add_control_input(self, op): """Add a new control input to this operation. Args: op: the Operation to add as control input. Raises: TypeError: if op is not an Operation. ValueError: if op is from a different graph. """ self._add_control_inputs([op]) # Methods below are used when building the NodeDef and Graph proto. def _recompute_node_def(self): del self._node_def.input[:] self._node_def.input.extend([t._as_node_def_input() for t in self._inputs]) if self._control_inputs: self._node_def.input.extend(["^%s" % op.name for op in self._control_inputs]) def __str__(self): return str(self._node_def) def __repr__(self): return "<tf.Operation '%s' type=%s>" % (self.name, self.type) @property def outputs(self): """The list of `Tensor` objects representing the outputs of this op.""" return self._outputs # pylint: disable=protected-access class _InputList(object): """Immutable input list wrapper.""" def __init__(self, op): self._op = op def __iter__(self): return iter(self._op._inputs) def __len__(self): return len(self._op._inputs) def __bool__(self): return bool(self._op._inputs) # Python 3 wants __bool__, Python 2.7 wants __nonzero__ __nonzero__ = __bool__ def __getitem__(self, i): return self._op._inputs[i] # pylint: enable=protected-access @property def inputs(self): """The list of `Tensor` objects representing the data inputs of this op.""" return Operation._InputList(self) @property def _input_dtypes(self): return self._input_types @property def control_inputs(self): """The `Operation` objects on which this op has a control dependency. Before this op is executed, TensorFlow will ensure that the operations in `self.control_inputs` have finished executing. This mechanism can be used to run ops sequentially for performance reasons, or to ensure that the side effects of an op are observed in the correct order. Returns: A list of `Operation` objects. """ return self._control_inputs @property def type(self): """The type of the op (e.g. `"MatMul"`).""" return self._node_def.op @property def graph(self): """The `Graph` that contains this operation.""" return self._graph @property def node_def(self): """Returns a serialized `NodeDef` representation of this operation. Returns: A [`NodeDef`](https://www.tensorflow.org/code/tensorflow/core/framework/node_def.proto) protocol buffer. """ return self._node_def @property def op_def(self): """Returns the `OpDef` proto that represents the type of this op. Returns: An [`OpDef`](https://www.tensorflow.org/code/tensorflow/core/framework/op_def.proto) protocol buffer. """ return self._op_def @property def traceback(self): """Returns the call stack from when this operation was constructed.""" return _convert_stack(self._traceback) def get_attr(self, name): """Returns the value of the attr of this op with the given `name`. Args: name: The name of the attr to fetch. Returns: The value of the attr, as a Python object. Raises: ValueError: If this op does not have an attr with the given `name`. """ fields = ["s", "i", "f", "b", "type", "shape", "tensor"] if name not in self._node_def.attr: raise ValueError("No attr named '" + name + "' in " + str(self._node_def)) x = self._node_def.attr[name] # Treat an empty oneof value as an empty list. if not x.WhichOneof("value"): return [] if x.HasField("list"): for f in fields: if getattr(x.list, f): return list(getattr(x.list, f)) return [] else: for f in fields: if x.HasField(f): return getattr(x, f) assert False, "Unsupported field type in " + str(x) def run(self, feed_dict=None, session=None): """Runs this operation in a `Session`. Calling this method will execute all preceding operations that produce the inputs needed for this operation. *N.B.* Before invoking `Operation.run()`, its graph must have been launched in a session, and either a default session must be available, or `session` must be specified explicitly. Args: feed_dict: A dictionary that maps `Tensor` objects to feed values. See [`Session.run()`](../../api_docs/python/client.md#Session.run) for a description of the valid feed values. session: (Optional.) The `Session` to be used to run to this operation. If none, the default session will be used. """ _run_using_default_session(self, feed_dict, self.graph, session) _gradient_registry = registry.Registry("gradient") class RegisterGradient(object): """A decorator for registering the gradient function for an op type. This decorator is only used when defining a new op type. For an op with `m` inputs and `n` outputs, the gradient function is a function that takes the original `Operation` and `n` `Tensor` objects (representing the gradients with respect to each output of the op), and returns `m` `Tensor` objects (representing the partial gradients with respect to each input of the op). For example, assuming that operations of type `"Sub"` take two inputs `x` and `y`, and return a single output `x - y`, the following gradient function would be registered: ```python @tf.RegisterGradient("Sub") def _sub_grad(unused_op, grad): return grad, tf.negative(grad) ``` The decorator argument `op_type` is the string type of an operation. This corresponds to the `OpDef.name` field for the proto that defines the operation. @@__init__ """ def __init__(self, op_type): """Creates a new decorator with `op_type` as the Operation type. Args: op_type: The string type of an operation. This corresponds to the `OpDef.name` field for the proto that defines the operation. """ if not isinstance(op_type, six.string_types): raise TypeError("op_type must be a string") self._op_type = op_type def __call__(self, f): """Registers the function `f` as gradient function for `op_type`.""" _gradient_registry.register(f, self._op_type) return f def NotDifferentiable(op_type): """Specifies that ops of type `op_type` is not differentiable. This function should *not* be used for operations that have a well-defined gradient that is not yet implemented. This function is only used when defining a new op type. It may be used for ops such as `tf.size()` that are not differentiable. For example: ```python tf.NotDifferentiable("Size") ``` The gradient computed for 'op_type' will then propagate zeros. For ops that have a well-defined gradient but are not yet implemented, no declaration should be made, and an error *must* be thrown if an attempt to request its gradient is made. Args: op_type: The string type of an operation. This corresponds to the `OpDef.name` field for the proto that defines the operation. Raises: TypeError: If `op_type` is not a string. """ if not isinstance(op_type, six.string_types): raise TypeError("op_type must be a string") _gradient_registry.register(None, op_type) # Alias for the old name, will be eventually removed. NoGradient = NotDifferentiable def get_gradient_function(op): """Returns the function that computes gradients for "op".""" if not op.inputs: return None try: op_type = op.get_attr("_gradient_op_type") except ValueError: op_type = op.type return _gradient_registry.lookup(op_type) _shape_registry = registry.Registry("shape functions") _default_shape_function_registry = registry.Registry("default shape functions") # These are set to common_shapes.call_cpp_shape_fn by op generated code # (generated by python_op_gen.cc). # It is set outside ops.py to avoid a circular dependency. _call_cpp_shape_fn = None _call_cpp_shape_fn_and_require_op = None def _set_call_cpp_shape_fn(call_cpp_shape_fn): """Sets default shape fns from passed common_shapes.call_cpp_shape_fn.""" global _call_cpp_shape_fn, _call_cpp_shape_fn_and_require_op if _call_cpp_shape_fn: return # already registered def call_without_requiring(op): return call_cpp_shape_fn(op, require_shape_fn=False) _call_cpp_shape_fn = call_without_requiring def call_with_requiring(op): return call_cpp_shape_fn(op, require_shape_fn=True) _call_cpp_shape_fn_and_require_op = call_with_requiring class RegisterShape(object): """No longer used. Was: A decorator for registering a shape function. Shape functions must now be registered via the SetShapeFn on the original Op specification in C++. """ def __init__(self, op_type): """Saves the `op_type` as the `Operation` type.""" if not isinstance(op_type, six.string_types): raise TypeError("op_type must be a string") self._op_type = op_type def __call__(self, f): """Registers "f" as the shape function for "op_type".""" if f is None: assert _call_cpp_shape_fn # None is a special "weak" value that provides a default shape function, # and can be overridden by a non-None registration. try: _default_shape_function_registry.register(_call_cpp_shape_fn, self._op_type) except KeyError: # Ignore duplicate registrations of the weak value. This can # occur if the op library input to wrapper generation # inadvertently links in one or more of the standard op # libraries. pass else: _shape_registry.register(f, self._op_type) return f def set_shapes_for_outputs(op): """Uses the registered shape functions to set the shapes for op's outputs.""" try: shape_func = _shape_registry.lookup(op.type) except LookupError: try: shape_func = _default_shape_function_registry.lookup(op.type) except LookupError: shape_func = _call_cpp_shape_fn_and_require_op shapes = shape_func(op) if shapes is None: raise RuntimeError( "Shape function for op %s did not return any shapes" % op) elif isinstance(shapes, dict): # Returned by call_cpp_shape_fn shapes_dict = shapes shapes = shapes_dict["shapes"] handle_shapes = shapes_dict["handle_shapes"] handle_dtypes = shapes_dict["handle_dtypes"] for output, handle_shape, handle_dtype in zip(op.outputs, handle_shapes, handle_dtypes): # pylint: disable=protected-access output._handle_shape = handle_shape output._handle_dtype = handle_dtype # pylint: enable=protected-access if len(op.outputs) != len(shapes): raise RuntimeError( "Shape function for op %s returned %d shapes but expected %d %s %s" % (op, len(shapes), len(op.outputs), shape_func.__name__, str(shapes))) for output, s in zip(op.outputs, shapes): output.set_shape(s) class OpStats(object): """A holder for statistics about an operator. This class holds information about the resource requirements for an op, including the size of its weight parameters on-disk and how many FLOPS it requires to execute forward inference. If you define a new operation, you can create a function that will return a set of information about its usage of the CPU and disk space when serialized. The function itself takes a Graph object that's been set up so you can call methods like get_tensor_by_name to help calculate the results, and a NodeDef argument. """ def __init__(self, statistic_type, value=None): """Sets up the initial placeholders for the statistics.""" self.statistic_type = statistic_type self.value = value @property def statistic_type(self): return self._statistic_type @statistic_type.setter def statistic_type(self, statistic_type): self._statistic_type = statistic_type @property def value(self): return self._value @value.setter def value(self, value): self._value = value def __iadd__(self, other): if other.statistic_type != self.statistic_type: raise ValueError("Can't add an OpStat of type %s to one of %s.", self.statistic_type, other.statistic_type) if self.value is None: self.value = other.value elif other.value is not None: self._value += other.value return self _stats_registry = registry.Registry("statistical functions") class RegisterStatistics(object): """A decorator for registering the statistics function for an op type. This decorator can be defined for an op type so that it gives a report on the resources used by an instance of an operator, in the form of an OpStats object. Well-known types of statistics include these so far: - flops: When running a graph, the bulk of the computation happens doing numerical calculations like matrix multiplications. This type allows a node to return how many floating-point operations it takes to complete. The total number of FLOPs for a graph is a good guide to its expected latency. You can add your own statistics just by picking a new type string, registering functions for the ops you care about, and then calling get_stats_for_node_def. If a statistic for an op is registered multiple times, a KeyError will be raised. Since the statistics is counted on a per-op basis. It is not suitable for model parameters (capacity), which is expected to be counted only once, even if it is shared by multiple ops. (e.g. RNN) For example, you can define a new metric called doohickey for a Foo operation by placing this in your code: ```python @ops.RegisterStatistics("Foo", "doohickey") def _calc_foo_bojangles(unused_graph, unused_node_def): return ops.OpStats("doohickey", 20) ``` Then in client code you can retrieve the value by making this call: ```python doohickey = ops.get_stats_for_node_def(graph, node_def, "doohickey") ``` If the NodeDef is for an op with a registered doohickey function, you'll get back the calculated amount in doohickey.value, or None if it's not defined. """ def __init__(self, op_type, statistic_type): """Saves the `op_type` as the `Operation` type.""" if not isinstance(op_type, six.string_types): raise TypeError("op_type must be a string.") if "," in op_type: raise TypeError("op_type must not contain a comma.") self._op_type = op_type if not isinstance(statistic_type, six.string_types): raise TypeError("statistic_type must be a string.") if "," in statistic_type: raise TypeError("statistic_type must not contain a comma.") self._statistic_type = statistic_type def __call__(self, f): """Registers "f" as the statistics function for "op_type".""" _stats_registry.register(f, self._op_type + "," + self._statistic_type) return f def get_stats_for_node_def(graph, node, statistic_type): """Looks up the node's statistics function in the registry and calls it. This function takes a Graph object and a NodeDef from a GraphDef, and if there's an associated statistics method, calls it and returns a result. If no function has been registered for the particular node type, it returns an empty statistics object. Args: graph: A Graph object that's been set up with the node's graph. node: A NodeDef describing the operator. statistic_type: A string identifying the statistic we're interested in. Returns: An OpStats object containing information about resource usage. """ try: stats_func = _stats_registry.lookup(node.op + "," + statistic_type) result = stats_func(graph, node) except LookupError: result = OpStats(statistic_type) return result def _name_from_scope_name(name): """Returns the name of an op given the name of its scope. Args: name: the name of the scope. Returns: the name of the op (equal to scope name minus any trailing slash). """ return name[:-1] if name[-1] == "/" else name class Graph(object): """A TensorFlow computation, represented as a dataflow graph. A `Graph` contains a set of [`Operation`](../../api_docs/python/framework.md#Operation) objects, which represent units of computation; and [`Tensor`](../../api_docs/python/framework.md#Tensor) objects, which represent the units of data that flow between operations. A default `Graph` is always registered, and accessible by calling [`tf.get_default_graph()`](../../api_docs/python/framework.md#get_default_graph). To add an operation to the default graph, simply call one of the functions that defines a new `Operation`: ```python c = tf.constant(4.0) assert c.graph is tf.get_default_graph() ``` Another typical usage involves the [`Graph.as_default()`](../../api_docs/python/framework.md#Graph.as_default) context manager, which overrides the current default graph for the lifetime of the context: ```python g = tf.Graph() with g.as_default(): # Define operations and tensors in `g`. c = tf.constant(30.0) assert c.graph is g ``` Important note: This class *is not* thread-safe for graph construction. All operations should be created from a single thread, or external synchronization must be provided. Unless otherwise specified, all methods are not thread-safe. @@__init__ @@as_default @@as_graph_def @@finalize @@finalized @@control_dependencies @@device @@name_scope A `Graph` instance supports an arbitrary number of "collections" that are identified by name. For convenience when building a large graph, collections can store groups of related objects: for example, the `tf.Variable` uses a collection (named [`tf.GraphKeys.GLOBAL_VARIABLES`](../../api_docs/python/framework.md#GraphKeys)) for all variables that are created during the construction of a graph. The caller may define additional collections by specifying a new name. @@add_to_collection @@add_to_collections @@get_collection @@get_collection_ref @@as_graph_element @@get_operation_by_name @@get_tensor_by_name @@get_operations @@seed @@unique_name @@version @@graph_def_versions @@create_op @@gradient_override_map """ def __init__(self): """Creates a new, empty Graph.""" # Protects the core state that may be accessed by multiple readers. # Only state that can be returned via public accessors (`as_graph_def()`, # `get_operations()`, `as_graph_element()`, `get_collection()`, and # `get_collection_ref()`) is by the lock. Thread-safety is provided on a # best-effort basis to support buggy programs, and is not guaranteed by the # public `tf.Graph` API. # NOTE(mrry): This does not protect the various stacks. A warning will # be reported if these are used from multiple threads self._lock = threading.Lock() self._nodes_by_id = dict() # GUARDED_BY(self._lock) self._next_id_counter = 0 # GUARDED_BY(self._lock) self._nodes_by_name = dict() # GUARDED_BY(self._lock) self._version = 0 # GUARDED_BY(self._lock) # Current name stack: uniquified names self._name_stack = "" # Maps a name used in the graph to the next id to use for that name. self._names_in_use = {} # Functions that will be applied to choose a device if none is specified. self._device_function_stack = [] # Default original_op applied to new ops. self._default_original_op = None # Current control flow context. It could be either CondContext or # WhileContext defined in ops/control_flow_ops.py self._control_flow_context = None # A new node will depend of the union of all of the nodes in the stack. self._control_dependencies_stack = [] # Arbritrary collections of objects. self._collections = {} # The graph-level random seed self._seed = None # A dictionary of attributes that should be applied to all ops. self._attr_scope_map = {} # A map from op type to the kernel label that should be used. self._op_to_kernel_label_map = {} # A map from op type to an alternative op type that should be used when # computing gradients. self._gradient_override_map = {} # True if the graph is considered "finalized". In that case no # new operations can be added. self._finalized = False # Functions defined in the graph self._functions = collections.OrderedDict() # Default GraphDef versions self._graph_def_versions = versions_pb2.VersionDef( producer=versions.GRAPH_DEF_VERSION, min_consumer=versions.GRAPH_DEF_VERSION_MIN_CONSUMER) self._building_function = False # Stack of colocate_with ops self._colocation_stack = [] # Set of tensors that are dangerous to feed! self._unfeedable_tensors = set() # Set of operations that are dangerous to fetch! self._unfetchable_ops = set() # A map of tensor handle placeholder to tensor dtype. self._handle_feeders = {} # A map from tensor handle to its read op. self._handle_readers = {} # A map from tensor handle to its move op. self._handle_movers = {} # A map from tensor handle to its delete op. self._handle_deleters = {} # Resource container. self._container = "" self._registered_ops = op_def_registry.get_registered_ops() def _check_not_finalized(self): """Check if the graph is finalized. Raises: RuntimeError: If the graph finalized. """ if self._finalized: raise RuntimeError("Graph is finalized and cannot be modified.") def _add_op(self, op): """Adds 'op' to the graph. Args: op: the Operator or Tensor to add. Raises: TypeError: if op is not an Operation or Tensor. ValueError: if the op.name or op._id are already used. """ self._check_not_finalized() if not isinstance(op, (Tensor, Operation)): raise TypeError("op must be a Tensor or Operation: %s" % op) with self._lock: # pylint: disable=protected-access if op._id in self._nodes_by_id: raise ValueError("cannot add an op with id %d as it already " "exists in the graph" % op._id) if op.name in self._nodes_by_name: raise ValueError("cannot add op with name %s as that name " "is already used" % op.name) self._nodes_by_id[op._id] = op self._nodes_by_name[op.name] = op self._version = max(self._version, op._id) # pylint: enable=protected-access @property def version(self): """Returns a version number that increases as ops are added to the graph. Note that this is unrelated to the [GraphDef version](#Graph.graph_def_version). """ if self._finalized: return self._version with self._lock: return self._version @property def graph_def_versions(self): """The GraphDef version information of this graph. For details on the meaning of each version, see [`GraphDef`](https://www.tensorflow.org/code/tensorflow/core/framework/graph.proto). Returns: A `VersionDef`. """ return self._graph_def_versions @property def seed(self): """The graph-level random seed of this graph.""" return self._seed @seed.setter def seed(self, seed): self._seed = seed @property def finalized(self): """True if this graph has been finalized.""" return self._finalized def finalize(self): """Finalizes this graph, making it read-only. After calling `g.finalize()`, no new operations can be added to `g`. This method is used to ensure that no operations are added to a graph when it is shared between multiple threads, for example when using a [`QueueRunner`](../../api_docs/python/train.md#QueueRunner). """ self._finalized = True def _unsafe_unfinalize(self): """Opposite of `finalize`. Internal interface. NOTE: Unfinalizing a graph could have negative impact on performance, especially in a multi-threaded environment. Unfinalizing a graph when it is in use by a Session may lead to undefined behavior. Ensure that all sessions using a graph are closed before calling this method. """ self._finalized = False def _get_control_flow_context(self): """Returns the current control flow context. Returns: A context object. """ return self._control_flow_context def _set_control_flow_context(self, context): """Sets the current control flow context. Args: context: a context object. """ self._control_flow_context = context def _as_graph_def(self, from_version=None, add_shapes=False): """Returns a serialized `GraphDef` representation of this graph. The serialized `GraphDef` can be imported into another `Graph` (using [`import_graph_def()`](#import_graph_def)) or used with the [C++ Session API](../../api_docs/cc/index.md). This method is thread-safe. Args: from_version: Optional. If this is set, returns a `GraphDef` containing only the nodes that were added to this graph since its `version` property had the given value. add_shapes: If true, adds an "_output_shapes" list attr to each node with the inferred shapes of each of its outputs. Returns: A tuple containing a [`GraphDef`](https://www.tensorflow.org/code/tensorflow/core/framework/graph.proto) protocol buffer, and the version of the graph to which that `GraphDef` corresponds. Raises: ValueError: If the `graph_def` would be too large. """ with self._lock: graph = graph_pb2.GraphDef() graph.versions.CopyFrom(self._graph_def_versions) bytesize = 0 for op_id in sorted(self._nodes_by_id): op = self._nodes_by_id[op_id] if from_version is None or op_id > from_version: graph.node.extend([op.node_def]) if op.outputs and add_shapes: assert "_output_shapes" not in graph.node[-1].attr graph.node[-1].attr["_output_shapes"].list.shape.extend([ output.get_shape().as_proto() for output in op.outputs]) bytesize += op.node_def.ByteSize() if bytesize >= (1 << 31) or bytesize < 0: raise ValueError("GraphDef cannot be larger than 2GB.") if self._functions: for f in self._functions.values(): bytesize += f.definition.ByteSize() if bytesize >= (1 << 31) or bytesize < 0: raise ValueError("GraphDef cannot be larger than 2GB.") graph.library.function.extend([f.definition]) if f.grad_func_name: grad_def = function_pb2.GradientDef() grad_def.function_name = f.name grad_def.gradient_func = f.grad_func_name graph.library.gradient.extend([grad_def]) return graph, self._version def as_graph_def(self, from_version=None, add_shapes=False): """Returns a serialized `GraphDef` representation of this graph. The serialized `GraphDef` can be imported into another `Graph` (using [`import_graph_def()`](#import_graph_def)) or used with the [C++ Session API](../../api_docs/cc/index.md). This method is thread-safe. Args: from_version: Optional. If this is set, returns a `GraphDef` containing only the nodes that were added to this graph since its `version` property had the given value. add_shapes: If true, adds an "_output_shapes" list attr to each node with the inferred shapes of each of its outputs. Returns: A [`GraphDef`](https://www.tensorflow.org/code/tensorflow/core/framework/graph.proto) protocol buffer. Raises: ValueError: If the `graph_def` would be too large. """ result, _ = self._as_graph_def(from_version, add_shapes) return result def _is_function(self, name): """Tests whether 'name' is registered in this graph's function library. Args: name: string op name. Returns: bool indicating whether or not 'name' is registered in function library. """ return name in self._functions def _get_function(self, name): """Returns the function definition for 'name'. Args: name: string function name. Returns: The function def proto. """ return self._functions.get(name, None) def _add_function(self, function): """Adds a function to the graph. After the function has been added, you can call to the function by passing the function name in place of an op name to `Graph.create_op()`. Args: function: A `_DefinedFunction` object. Raises: ValueError: if another function is defined with the same name. """ name = function.name previous = self._functions.get(name, None) if previous: raise ValueError("Another function is already defined with that name") # Sanity checks on gradient definition. if (function.grad_func_name is not None) and ( function.python_grad_func is not None): raise ValueError("Gradient defined twice for function %s" % name) # Need a new-enough consumer to support the functions we add to the graph. if self._graph_def_versions.min_consumer < 12: self._graph_def_versions.min_consumer = 12 self._functions[name] = function @property def building_function(self): """Returns True iff this graph represents a function.""" return self._building_function # Helper functions to create operations. def create_op(self, op_type, inputs, dtypes, input_types=None, name=None, attrs=None, op_def=None, compute_shapes=True, compute_device=True): """Creates an `Operation` in this graph. This is a low-level interface for creating an `Operation`. Most programs will not call this method directly, and instead use the Python op constructors, such as `tf.constant()`, which add ops to the default graph. Args: op_type: The `Operation` type to create. This corresponds to the `OpDef.name` field for the proto that defines the operation. inputs: A list of `Tensor` objects that will be inputs to the `Operation`. dtypes: A list of `DType` objects that will be the types of the tensors that the operation produces. input_types: (Optional.) A list of `DType`s that will be the types of the tensors that the operation consumes. By default, uses the base `DType` of each input in `inputs`. Operations that expect reference-typed inputs must specify `input_types` explicitly. name: (Optional.) A string name for the operation. If not specified, a name is generated based on `op_type`. attrs: (Optional.) A dictionary where the key is the attribute name (a string) and the value is the respective `attr` attribute of the `NodeDef` proto that will represent the operation (an `AttrValue` proto). op_def: (Optional.) The `OpDef` proto that describes the `op_type` that the operation will have. compute_shapes: (Optional.) If True, shape inference will be performed to compute the shapes of the outputs. compute_device: (Optional.) If True, device functions will be executed to compute the device property of the Operation. Raises: TypeError: if any of the inputs is not a `Tensor`. ValueError: if colocation conflicts with existing device assignment. Returns: An `Operation` object. """ self._check_not_finalized() for idx, a in enumerate(inputs): if not isinstance(a, Tensor): raise TypeError("Input #%d is not a tensor: %s" % (idx, a)) if name is None: name = op_type # If a names ends with a '/' it is a "name scope" and we use it as-is, # after removing the trailing '/'. if name and name[-1] == "/": name = _name_from_scope_name(name) else: name = self.unique_name(name) node_def = _NodeDef(op_type, name, device=None, attrs=attrs) # Apply any additional attributes requested. Do not overwrite any existing # attributes. for key, value in self._attr_scope_map.items(): if key not in node_def.attr: if callable(value): value = value(node_def) if not isinstance(value, (type(None), attr_value_pb2.AttrValue)): raise TypeError( "Callable for scope map key '%s' must return either None or " "an AttrValue protocol buffer; but it returned: %s" % (key, value)) node_def.attr[key].CopyFrom(value) # Apply a kernel label if one has been specified for this op_type. try: kernel_label = self._op_to_kernel_label_map[op_type] node_def.attr["_kernel"].CopyFrom( attr_value_pb2.AttrValue(s=compat.as_bytes(kernel_label))) except KeyError: pass # Apply the overriding op_type for gradients if one has been # specified for this op_type. try: mapped_op_type = self._gradient_override_map[op_type] node_def.attr["_gradient_op_type"].CopyFrom( attr_value_pb2.AttrValue(s=compat.as_bytes(mapped_op_type))) except KeyError: pass control_inputs = self._control_dependencies_for_inputs(inputs) ret = Operation(node_def, self, inputs=inputs, output_types=dtypes, control_inputs=control_inputs, input_types=input_types, original_op=self._default_original_op, op_def=op_def) if compute_shapes: set_shapes_for_outputs(ret) self._add_op(ret) self._record_op_seen_by_control_dependencies(ret) if compute_device: self._apply_device_functions(ret) if self._colocation_stack: all_colocation_groups = [] for colocation_op in self._colocation_stack: all_colocation_groups.extend(colocation_op.colocation_groups()) if colocation_op.device: # Make this device match the device of the colocated op, to # provide consistency between the device and the colocation # property. if ret.device and ret.device != colocation_op.device: logging.warning("Tried to colocate %s with an op %s that had " "a different device: %s vs %s. " "Ignoring colocation property.", name, colocation_op.name, ret.device, colocation_op.device) else: ret._set_device(colocation_op.device) all_colocation_groups = sorted(set(all_colocation_groups)) ret.node_def.attr["_class"].CopyFrom(attr_value_pb2.AttrValue( list=attr_value_pb2.AttrValue.ListValue(s=all_colocation_groups))) # Sets "container" attribute if # (1) self._container is not None # (2) "is_stateful" is set in OpDef # (3) "container" attribute is in OpDef # (4) "container" attribute is None if (self._container and op_type in self._registered_ops and self._registered_ops[op_type].is_stateful and "container" in ret.node_def.attr and not ret.node_def.attr["container"].s): ret.node_def.attr["container"].CopyFrom( attr_value_pb2.AttrValue(s=compat.as_bytes(self._container))) return ret def as_graph_element(self, obj, allow_tensor=True, allow_operation=True): """Returns the object referred to by `obj`, as an `Operation` or `Tensor`. This function validates that `obj` represents an element of this graph, and gives an informative error message if it is not. This function is the canonical way to get/validate an object of one of the allowed types from an external argument reference in the Session API. This method may be called concurrently from multiple threads. Args: obj: A `Tensor`, an `Operation`, or the name of a tensor or operation. Can also be any object with an `_as_graph_element()` method that returns a value of one of these types. allow_tensor: If true, `obj` may refer to a `Tensor`. allow_operation: If true, `obj` may refer to an `Operation`. Returns: The `Tensor` or `Operation` in the Graph corresponding to `obj`. Raises: TypeError: If `obj` is not a type we support attempting to convert to types. ValueError: If `obj` is of an appropriate type but invalid. For example, an invalid string. KeyError: If `obj` is not an object in the graph. """ if self._finalized: return self._as_graph_element_locked(obj, allow_tensor, allow_operation) with self._lock: return self._as_graph_element_locked(obj, allow_tensor, allow_operation) def _as_graph_element_locked(self, obj, allow_tensor, allow_operation): """See `Graph.as_graph_element()` for details.""" # The vast majority of this function is figuring # out what an API user might be doing wrong, so # that we can give helpful error messages. # # Ideally, it would be nice to split it up, but we # need context to generate nice error messages. if allow_tensor and allow_operation: types_str = "Tensor or Operation" elif allow_tensor: types_str = "Tensor" elif allow_operation: types_str = "Operation" else: raise ValueError("allow_tensor and allow_operation can't both be False.") temp_obj = _as_graph_element(obj) if temp_obj is not None: obj = temp_obj # If obj appears to be a name... if isinstance(obj, compat.bytes_or_text_types): name = compat.as_str(obj) if ":" in name and allow_tensor: # Looks like a Tensor name and can be a Tensor. try: op_name, out_n = name.split(":") out_n = int(out_n) except: raise ValueError("The name %s looks a like a Tensor name, but is " "not a valid one. Tensor names must be of the " "form \"<op_name>:<output_index>\"." % repr(name)) if op_name in self._nodes_by_name: op = self._nodes_by_name[op_name] else: raise KeyError("The name %s refers to a Tensor which does not " "exist. The operation, %s, does not exist in the " "graph." % (repr(name), repr(op_name))) try: return op.outputs[out_n] except: raise KeyError("The name %s refers to a Tensor which does not " "exist. The operation, %s, exists but only has " "%s outputs." % (repr(name), repr(op_name), len(op.outputs))) elif ":" in name and not allow_tensor: # Looks like a Tensor name but can't be a Tensor. raise ValueError("Name %s appears to refer to a Tensor, not a %s." % (repr(name), types_str)) elif ":" not in name and allow_operation: # Looks like an Operation name and can be an Operation. if name not in self._nodes_by_name: raise KeyError("The name %s refers to an Operation not in the " "graph." % repr(name)) return self._nodes_by_name[name] elif ":" not in name and not allow_operation: # Looks like an Operation name but can't be an Operation. if name in self._nodes_by_name: # Yep, it's an Operation name err_msg = ("The name %s refers to an Operation, not a %s." % (repr(name), types_str)) else: err_msg = ("The name %s looks like an (invalid) Operation name, " "not a %s." % (repr(name), types_str)) err_msg += (" Tensor names must be of the form " "\"<op_name>:<output_index>\".") raise ValueError(err_msg) elif isinstance(obj, Tensor) and allow_tensor: # Actually obj is just the object it's referring to. if obj.graph is not self: raise ValueError("Tensor %s is not an element of this graph." % obj) return obj elif isinstance(obj, Operation) and allow_operation: # Actually obj is just the object it's referring to. if obj.graph is not self: raise ValueError("Operation %s is not an element of this graph." % obj) return obj else: # We give up! raise TypeError("Can not convert a %s into a %s." % (type(obj).__name__, types_str)) def get_operations(self): """Return the list of operations in the graph. You can modify the operations in place, but modifications to the list such as inserts/delete have no effect on the list of operations known to the graph. This method may be called concurrently from multiple threads. Returns: A list of Operations. """ if self._finalized: return list(self._nodes_by_id.values()) with self._lock: return list(self._nodes_by_id.values()) def get_operation_by_name(self, name): """Returns the `Operation` with the given `name`. This method may be called concurrently from multiple threads. Args: name: The name of the `Operation` to return. Returns: The `Operation` with the given `name`. Raises: TypeError: If `name` is not a string. KeyError: If `name` does not correspond to an operation in this graph. """ if not isinstance(name, six.string_types): raise TypeError("Operation names are strings (or similar), not %s." % type(name).__name__) return self.as_graph_element(name, allow_tensor=False, allow_operation=True) def get_tensor_by_name(self, name): """Returns the `Tensor` with the given `name`. This method may be called concurrently from multiple threads. Args: name: The name of the `Tensor` to return. Returns: The `Tensor` with the given `name`. Raises: TypeError: If `name` is not a string. KeyError: If `name` does not correspond to a tensor in this graph. """ # Names should be strings. if not isinstance(name, six.string_types): raise TypeError("Tensor names are strings (or similar), not %s." % type(name).__name__) return self.as_graph_element(name, allow_tensor=True, allow_operation=False) def _next_id(self): """Id for next Operation instance. Also increments the internal id.""" self._check_not_finalized() with self._lock: self._next_id_counter += 1 return self._next_id_counter @property def _last_id(self): return self._next_id_counter def as_default(self): """Returns a context manager that makes this `Graph` the default graph. This method should be used if you want to create multiple graphs in the same process. For convenience, a global default graph is provided, and all ops will be added to this graph if you do not create a new graph explicitly. Use this method with the `with` keyword to specify that ops created within the scope of a block should be added to this graph. The default graph is a property of the current thread. If you create a new thread, and wish to use the default graph in that thread, you must explicitly add a `with g.as_default():` in that thread's function. The following code examples are equivalent: ```python # 1. Using Graph.as_default(): g = tf.Graph() with g.as_default(): c = tf.constant(5.0) assert c.graph is g # 2. Constructing and making default: with tf.Graph().as_default() as g: c = tf.constant(5.0) assert c.graph is g ``` Returns: A context manager for using this graph as the default graph. """ return _default_graph_stack.get_controller(self) def add_to_collection(self, name, value): """Stores `value` in the collection with the given `name`. Note that collections are not sets, so it is possible to add a value to a collection several times. Args: name: The key for the collection. The `GraphKeys` class contains many standard names for collections. value: The value to add to the collection. """ self._check_not_finalized() with self._lock: if name not in self._collections: self._collections[name] = [value] else: self._collections[name].append(value) def add_to_collections(self, names, value): """Stores `value` in the collections given by `names`. Note that collections are not sets, so it is possible to add a value to a collection several times. This function makes sure that duplicates in `names` are ignored, but it will not check for pre-existing membership of `value` in any of the collections in `names`. `names` can be any iterable, but if `names` is a string, it is treated as a single collection name. Args: names: The keys for the collections to add to. The `GraphKeys` class contains many standard names for collections. value: The value to add to the collections. """ # Make sure names are unique, but treat strings as a single collection name names = (names,) if isinstance(names, six.string_types) else set(names) for name in names: self.add_to_collection(name, value) def get_collection_ref(self, name): """Returns a list of values in the collection with the given `name`. If the collection exists, this returns the list itself, which can be modified in place to change the collection. If the collection does not exist, it is created as an empty list and the list is returned. This is different from `get_collection()` which always returns a copy of the collection list if it exists and never creates an empty collection. Args: name: The key for the collection. For example, the `GraphKeys` class contains many standard names for collections. Returns: The list of values in the collection with the given `name`, or an empty list if no value has been added to that collection. """ with self._lock: coll_list = self._collections.get(name, None) if coll_list is None: coll_list = [] self._collections[name] = coll_list return coll_list def get_collection(self, name, scope=None): """Returns a list of values in the collection with the given `name`. This is different from `get_collection_ref()` which always returns the actual collection list if it exists in that it returns a new list each time it is called. Args: name: The key for the collection. For example, the `GraphKeys` class contains many standard names for collections. scope: (Optional.) If supplied, the resulting list is filtered to include only items whose `name` attribute matches using `re.match`. Items without a `name` attribute are never returned if a scope is supplied and the choice or `re.match` means that a `scope` without special tokens filters by prefix. Returns: The list of values in the collection with the given `name`, or an empty list if no value has been added to that collection. The list contains the values in the order under which they were collected. """ with self._lock: coll_list = self._collections.get(name, None) if coll_list is None: return [] if scope is None: return list(coll_list) else: c = [] regex = re.compile(scope) for item in coll_list: if hasattr(item, "name") and regex.match(item.name): c.append(item) return c def get_all_collection_keys(self): """Returns a list of collections used in this graph.""" with self._lock: return [x for x in self._collections if isinstance(x, six.string_types)] def clear_collection(self, name): """Clears all values in a collection. Args: name: The key for the collection. The `GraphKeys` class contains many standard names for collections. """ self._check_not_finalized() with self._lock: if name in self._collections: del self._collections[name] @contextlib.contextmanager def _original_op(self, op): """Python 'with' handler to help annotate ops with their originator. An op may have an 'original_op' property that indicates the op on which it was based. For example a replica op is based on the op that was replicated and a gradient op is based on the op that was differentiated. All ops created in the scope of this 'with' handler will have the given 'op' as their original op. Args: op: The Operation that all ops created in this scope will have as their original op. Yields: Nothing. """ old_original_op = self._default_original_op try: self._default_original_op = op yield finally: self._default_original_op = old_original_op # pylint: disable=g-doc-return-or-yield @contextlib.contextmanager def name_scope(self, name): r"""Returns a context manager that creates hierarchical names for operations. A graph maintains a stack of name scopes. A `with name_scope(...):` statement pushes a new name onto the stack for the lifetime of the context. The `name` argument will be interpreted as follows: * A string (not ending with '/') will create a new name scope, in which `name` is appended to the prefix of all operations created in the context. If `name` has been used before, it will be made unique by calling `self.unique_name(name)`. * A scope previously captured from a `with g.name_scope(...) as scope:` statement will be treated as an "absolute" name scope, which makes it possible to re-enter existing scopes. * A value of `None` or the empty string will reset the current name scope to the top-level (empty) name scope. For example: ```python with tf.Graph().as_default() as g: c = tf.constant(5.0, name="c") assert c.op.name == "c" c_1 = tf.constant(6.0, name="c") assert c_1.op.name == "c_1" # Creates a scope called "nested" with g.name_scope("nested") as scope: nested_c = tf.constant(10.0, name="c") assert nested_c.op.name == "nested/c" # Creates a nested scope called "inner". with g.name_scope("inner"): nested_inner_c = tf.constant(20.0, name="c") assert nested_inner_c.op.name == "nested/inner/c" # Create a nested scope called "inner_1". with g.name_scope("inner"): nested_inner_1_c = tf.constant(30.0, name="c") assert nested_inner_1_c.op.name == "nested/inner_1/c" # Treats `scope` as an absolute name scope, and # switches to the "nested/" scope. with g.name_scope(scope): nested_d = tf.constant(40.0, name="d") assert nested_d.op.name == "nested/d" with g.name_scope(""): e = tf.constant(50.0, name="e") assert e.op.name == "e" ``` The name of the scope itself can be captured by `with g.name_scope(...) as scope:`, which stores the name of the scope in the variable `scope`. This value can be used to name an operation that represents the overall result of executing the ops in a scope. For example: ```python inputs = tf.constant(...) with g.name_scope('my_layer') as scope: weights = tf.Variable(..., name="weights") biases = tf.Variable(..., name="biases") affine = tf.matmul(inputs, weights) + biases output = tf.nn.relu(affine, name=scope) ``` NOTE: This constructor validates the given `name`. Valid scope names match one of the following regular expressions: [A-Za-z0-9.][A-Za-z0-9_.\\-/]* (for scopes at the root) [A-Za-z0-9_.\\-/]* (for other scopes) Args: name: A name for the scope. Returns: A context manager that installs `name` as a new name scope. Raises: ValueError: If `name` is not a valid scope name, according to the rules above. """ if name: if self._name_stack: # Scopes created in a nested scope may have initial characters # that are illegal as the initial character of an op name # (viz. '-', '\', '/', and '_'). if not _VALID_SCOPE_NAME_REGEX.match(name): raise ValueError("'%s' is not a valid scope name" % name) else: # Scopes created in the root must match the more restrictive # op name regex, which constrains the initial character. if not _VALID_OP_NAME_REGEX.match(name): raise ValueError("'%s' is not a valid scope name" % name) try: old_stack = self._name_stack if not name: # Both for name=None and name="" we re-set to empty scope. new_stack = None elif name and name[-1] == "/": new_stack = _name_from_scope_name(name) else: new_stack = self.unique_name(name) self._name_stack = new_stack yield "" if new_stack is None else new_stack + "/" finally: self._name_stack = old_stack # pylint: enable=g-doc-return-or-yield def unique_name(self, name, mark_as_used=True): """Return a unique operation name for `name`. Note: You rarely need to call `unique_name()` directly. Most of the time you just need to create `with g.name_scope()` blocks to generate structured names. `unique_name` is used to generate structured names, separated by `"/"`, to help identify operations when debugging a graph. Operation names are displayed in error messages reported by the TensorFlow runtime, and in various visualization tools such as TensorBoard. If `mark_as_used` is set to `True`, which is the default, a new unique name is created and marked as in use. If it's set to `False`, the unique name is returned without actually being marked as used. This is useful when the caller simply wants to know what the name to be created will be. Args: name: The name for an operation. mark_as_used: Whether to mark this name as being used. Returns: A string to be passed to `create_op()` that will be used to name the operation being created. """ if self._name_stack: name = self._name_stack + "/" + name i = self._names_in_use.get(name, 0) # Increment the number for "name". if mark_as_used: self._names_in_use[name] = i + 1 if i > 0: base_name = name # Make sure the composed name is not already used. while name in self._names_in_use: name = "%s_%d" % (base_name, i) i += 1 # Mark the composed name as used in case someone wants # to call unique_name("name_1"). if mark_as_used: self._names_in_use[name] = 1 return name @contextlib.contextmanager def colocate_with(self, op, ignore_existing=False): """Returns a context manager that specifies an op to colocate with. Note: this function is not for public use, only for internal libraries. For example: ```python a = tf.Variable([1.0]) with g.colocate_with(a): b = tf.constant(1.0) c = tf.add(a, b) ``` `b` and `c` will always be colocated with `a`, no matter where `a` is eventually placed. **NOTE** Using a colocation scope resets any existing device constraints. If `op` is `None` then `ignore_existing` must be `True` and the new scope resets all colocation and device constraints. Args: op: The op to colocate all created ops with, or `None`. ignore_existing: If true, only applies colocation of this op within the context, rather than applying all colocation properties on the stack. If `op` is `None`, this value must be `True`. Raises: ValueError: if op is None but ignore_existing is False. Yields: A context manager that specifies the op with which to colocate newly created ops. """ if op is None and not ignore_existing: raise ValueError( "Trying to reset colocation (op is None) but " "ignore_existing is not True") if op is not None and not isinstance(op, Operation): # We always want to colocate with the reference op. op = internal_convert_to_tensor_or_indexed_slices(op, as_ref=True).op # By default, colocate_with resets the device function stack, # since colocate_with is typically used in specific internal # library functions where colocation is intended to be "stronger" # than device functions. # # In the future, a caller may specify that device_functions win # over colocation, in which case we can add support. device_fn_tmp = self._device_function_stack self._device_function_stack = [] if ignore_existing: current_stack = self._colocation_stack self._colocation_stack = [] if op is not None: self._colocation_stack.append(op) try: yield finally: # Restore device function stack self._device_function_stack = device_fn_tmp if op is not None: self._colocation_stack.pop() # Reset the colocation stack if requested. if ignore_existing: self._colocation_stack = current_stack @contextlib.contextmanager def device(self, device_name_or_function): """Returns a context manager that specifies the default device to use. The `device_name_or_function` argument may either be a device name string, a device function, or None: * If it is a device name string, all operations constructed in this context will be assigned to the device with that name, unless overridden by a nested `device()` context. * If it is a function, it will be treated as a function from Operation objects to device name strings, and invoked each time a new Operation is created. The Operation will be assigned to the device with the returned name. * If it is None, all `device()` invocations from the enclosing context will be ignored. For information about the valid syntax of device name strings, see the documentation in [`DeviceNameUtils`](https://www.tensorflow.org/code/tensorflow/core/util/device_name_utils.h). For example: ```python with g.device('/gpu:0'): # All operations constructed in this context will be placed # on GPU 0. with g.device(None): # All operations constructed in this context will have no # assigned device. # Defines a function from `Operation` to device string. def matmul_on_gpu(n): if n.type == "MatMul": return "/gpu:0" else: return "/cpu:0" with g.device(matmul_on_gpu): # All operations of type "MatMul" constructed in this context # will be placed on GPU 0; all other operations will be placed # on CPU 0. ``` **N.B.** The device scope may be overridden by op wrappers or other library code. For example, a variable assignment op `v.assign()` must be colocated with the `tf.Variable` `v`, and incompatible device scopes will be ignored. Args: device_name_or_function: The device name or function to use in the context. Returns: A context manager that specifies the default device to use for newly created ops. """ if (device_name_or_function is not None and not callable(device_name_or_function)): device_function = pydev.merge_device(device_name_or_function) else: device_function = device_name_or_function try: self._device_function_stack.append(device_function) yield finally: self._device_function_stack.pop() def _apply_device_functions(self, op): """Applies the current device function stack to the given operation.""" # Apply any device functions in reverse order, so that the most recently # pushed function has the first chance to apply a device to the op. # We apply here because the result can depend on the Operation's # signature, which is computed in the Operation constructor. for device_function in reversed(self._device_function_stack): if device_function is None: break op._set_device(device_function(op)) # pylint: disable=g-doc-return-or-yield @contextlib.contextmanager def container(self, container_name): """Returns a context manager that specifies the resource container to use. Stateful operations, such as variables and queues, can maintain their states on devices so that they can be shared by multiple processes. A resource container is a string name under which these stateful operations are tracked. These resources can be released or cleared with `tf.Session.reset()`. For example: ```python with g.container('experiment0'): # All stateful Operations constructed in this context will be placed # in resource container "experiment0". v1 = tf.Variable([1.0]) v2 = tf.Variable([2.0]) with g.container("experiment1"): # All stateful Operations constructed in this context will be # placed in resource container "experiment1". v3 = tf.Variable([3.0]) q1 = tf.FIFOQueue(10, tf.float32) # All stateful Operations constructed in this context will be # be created in the "experiment0". v4 = tf.Variable([4.0]) q1 = tf.FIFOQueue(20, tf.float32) with g.container(""): # All stateful Operations constructed in this context will be # be placed in the default resource container. v5 = tf.Variable([5.0]) q3 = tf.FIFOQueue(30, tf.float32) # Resets container "experiment0", after which the state of v1, v2, v4, q1 # will become undefined (such as uninitialized). tf.Session.reset(target, ["experiment0"]) ``` Args: container_name: container name string. Returns: A context manager for defining resource containers for stateful ops, yields the container name. """ original_container = self._container try: self._container = container_name yield self._container finally: self._container = original_container # pylint: enable=g-doc-return-or-yield class _ControlDependenciesController(object): """Context manager for `control_dependencies()`.""" def __init__(self, graph, control_inputs): """Create a new `_ControlDependenciesController`. A `_ControlDependenciesController` is the context manager for `with tf.control_dependencies()` blocks. These normally nest, as described in the documentation for `control_dependencies()`. The `control_inputs` argument list control dependencies that must be added to the current set of control dependencies. Because of uniquification the set can be empty even if the caller passed a list of ops. The special value `None` indicates that we want to start a new empty set of control dependencies instead of extending the current set. In that case we also clear the current control flow context, which is an additional mechanism to add control dependencies. Args: graph: The graph that this controller is managing. control_inputs: List of ops to use as control inputs in addition to the current control dependencies. None to indicate that the dependencies should be cleared. """ self._graph = graph if control_inputs is None: self._control_inputs = [] self._new_stack = True else: self._control_inputs = control_inputs self._new_stack = False self._seen_nodes = set() self._old_stack = None self._old_control_flow_context = None # pylint: disable=protected-access def __enter__(self): if self._new_stack: # Clear the control_dependencies graph. self._old_stack = self._graph._control_dependencies_stack self._graph._control_dependencies_stack = [] # Clear the control_flow_context too. self._old_control_flow_context = self._graph._get_control_flow_context() self._graph._set_control_flow_context(None) self._graph._push_control_dependencies_controller(self) def __exit__(self, unused_type, unused_value, unused_traceback): self._graph._pop_control_dependencies_controller(self) if self._new_stack: self._graph._control_dependencies_stack = self._old_stack self._graph._set_control_flow_context(self._old_control_flow_context) # pylint: enable=protected-access @property def control_inputs(self): return self._control_inputs def add_op(self, op): self._seen_nodes.add(op) def op_in_group(self, op): return op in self._seen_nodes def _push_control_dependencies_controller(self, controller): self._control_dependencies_stack.append(controller) def _pop_control_dependencies_controller(self, controller): assert self._control_dependencies_stack[-1] is controller self._control_dependencies_stack.pop() def _current_control_dependencies(self): ret = set() for controller in self._control_dependencies_stack: for op in controller.control_inputs: ret.add(op) return ret def _control_dependencies_for_inputs(self, input_tensors): """For an op that takes `input_tensors` as inputs, compute control inputs. The returned control dependencies should yield an execution that is equivalent to adding all control inputs in self._control_dependencies_stack to a newly created op. However, this function attempts to prune the returned control dependencies by observing that nodes created within the same `with control_dependencies(...):` block may have data dependencies that make the explicit approach redundant. Args: input_tensors: The direct data dependencies for an op to be created. Returns: A list of control inputs for the op to be created. """ ret = [] input_ops = set([t.op for t in input_tensors]) for controller in self._control_dependencies_stack: # If any of the input_ops already depends on the inputs from controller, # we say that the new op is dominated (by that input), and we therefore # do not need to add control dependencies for this controller's inputs. dominated = False for op in input_ops: if controller.op_in_group(op): dominated = True break if not dominated: # Don't add a control input if we already have a data dependency on i. # NOTE(mrry): We do not currently track transitive data dependencies, # so we may add redundant control inputs. ret.extend([c for c in controller.control_inputs if c not in input_ops]) return ret def _record_op_seen_by_control_dependencies(self, op): """Record that the given op depends on all registered control dependencies. Args: op: An Operation. """ for controller in self._control_dependencies_stack: controller.add_op(op) def control_dependencies(self, control_inputs): """Returns a context manager that specifies control dependencies. Use with the `with` keyword to specify that all operations constructed within the context should have control dependencies on `control_inputs`. For example: ```python with g.control_dependencies([a, b, c]): # `d` and `e` will only run after `a`, `b`, and `c` have executed. d = ... e = ... ``` Multiple calls to `control_dependencies()` can be nested, and in that case a new `Operation` will have control dependencies on the union of `control_inputs` from all active contexts. ```python with g.control_dependencies([a, b]): # Ops constructed here run after `a` and `b`. with g.control_dependencies([c, d]): # Ops constructed here run after `a`, `b`, `c`, and `d`. ``` You can pass None to clear the control dependencies: ```python with g.control_dependencies([a, b]): # Ops constructed here run after `a` and `b`. with g.control_dependencies(None): # Ops constructed here run normally, not waiting for either `a` or `b`. with g.control_dependencies([c, d]): # Ops constructed here run after `c` and `d`, also not waiting # for either `a` or `b`. ``` *N.B.* The control dependencies context applies *only* to ops that are constructed within the context. Merely using an op or tensor in the context does not add a control dependency. The following example illustrates this point: ```python # WRONG def my_func(pred, tensor): t = tf.matmul(tensor, tensor) with tf.control_dependencies([pred]): # The matmul op is created outside the context, so no control # dependency will be added. return t # RIGHT def my_func(pred, tensor): with tf.control_dependencies([pred]): # The matmul op is created in the context, so a control dependency # will be added. return tf.matmul(tensor, tensor) ``` Args: control_inputs: A list of `Operation` or `Tensor` objects which must be executed or computed before running the operations defined in the context. Can also be `None` to clear the control dependencies. Returns: A context manager that specifies control dependencies for all operations constructed within the context. Raises: TypeError: If `control_inputs` is not a list of `Operation` or `Tensor` objects. """ if control_inputs is None: return self._ControlDependenciesController(self, None) # First convert the inputs to ops, and deduplicate them. # NOTE(mrry): Other than deduplication, we do not currently track direct # or indirect dependencies between control_inputs, which may result in # redundant control inputs. control_ops = [] current = self._current_control_dependencies() for c in control_inputs: c = self.as_graph_element(c) if isinstance(c, Tensor): c = c.op elif not isinstance(c, Operation): raise TypeError("Control input must be Operation or Tensor: %s" % c) if c not in current: control_ops.append(c) current.add(c) return self._ControlDependenciesController(self, control_ops) # pylint: disable=g-doc-return-or-yield @contextlib.contextmanager def _attr_scope(self, attr_map): """EXPERIMENTAL: A context manager for setting attributes on operators. This context manager can be used to add additional attributes to operators within the scope of the context. For example: with ops.Graph().as_default() as g: f_1 = Foo() # No extra attributes with g._attr_scope({"_a": tf.attr_value_pb2.AttrValue(b=False)}): f_2 = Foo() # Additional attribute _a=False with g._attr_scope({"_a": tf.attr_value_pb2.AttrValue(b=True)}): f_3 = Foo() # Additional attribute _a=False with g._attr_scope({"_a": None}): f_4 = Foo() # No additional attributes. Args: attr_map: A dictionary mapping attr name strings to AttrValue protocol buffers or None. Returns: A context manager that sets the kernel label to be used for one or more ops created in that context. Raises: TypeError: If attr_map is not a dictionary mapping strings to AttrValue protobufs. """ if not isinstance(attr_map, dict): raise TypeError("attr_map must be a dictionary mapping " "strings to AttrValue protocol buffers") # The saved_attrs dictionary stores any currently-set labels that # will be overridden by this context manager. saved_attrs = {} # Install the given attribute for name, attr in attr_map.items(): if not (isinstance(name, six.string_types) and (isinstance(attr, (type(None), attr_value_pb2.AttrValue)) or callable(attr))): raise TypeError("attr_map must be a dictionary mapping " "strings to AttrValue protocol buffers or " "callables that emit AttrValue protocol buffers") try: saved_attrs[name] = self._attr_scope_map[name] except KeyError: pass if attr is None: del self._attr_scope_map[name] else: self._attr_scope_map[name] = attr try: yield # The code within the context runs here. finally: # Remove the attributes set for this context, and restore any saved # attributes. for name, attr in attr_map.items(): try: self._attr_scope_map[name] = saved_attrs[name] except KeyError: del self._attr_scope_map[name] # pylint: enable=g-doc-return-or-yield # pylint: disable=g-doc-return-or-yield @contextlib.contextmanager def _kernel_label_map(self, op_to_kernel_label_map): """EXPERIMENTAL: A context manager for setting kernel labels. This context manager can be used to select particular implementations of kernels within the scope of the context. For example: with ops.Graph().as_default() as g: f_1 = Foo() # Uses the default registered kernel for the Foo op. with g.kernel_label_map({"Foo": "v_2"}): f_2 = Foo() # Uses the registered kernel with label "v_2" # for the Foo op. with g.kernel_label_map({"Foo": "v_3"}): f_3 = Foo() # Uses the registered kernel with label "v_3" # for the Foo op. with g.kernel_label_map({"Foo": ""}): f_4 = Foo() # Uses the default registered kernel # for the Foo op. Args: op_to_kernel_label_map: A dictionary mapping op type strings to kernel label strings. Returns: A context manager that sets the kernel label to be used for one or more ops created in that context. Raises: TypeError: If op_to_kernel_label_map is not a dictionary mapping strings to strings. """ if not isinstance(op_to_kernel_label_map, dict): raise TypeError("op_to_kernel_label_map must be a dictionary mapping " "strings to strings") # The saved_labels dictionary stores any currently-set labels that # will be overridden by this context manager. saved_labels = {} # Install the given label for op_type, label in op_to_kernel_label_map.items(): if not (isinstance(op_type, six.string_types) and isinstance(label, six.string_types)): raise TypeError("op_to_kernel_label_map must be a dictionary mapping " "strings to strings") try: saved_labels[op_type] = self._op_to_kernel_label_map[op_type] except KeyError: pass self._op_to_kernel_label_map[op_type] = label try: yield # The code within the context runs here. finally: # Remove the labels set for this context, and restore any saved labels. for op_type, label in op_to_kernel_label_map.items(): try: self._op_to_kernel_label_map[op_type] = saved_labels[op_type] except KeyError: del self._op_to_kernel_label_map[op_type] # pylint: enable=g-doc-return-or-yield # pylint: disable=g-doc-return-or-yield @contextlib.contextmanager def gradient_override_map(self, op_type_map): """EXPERIMENTAL: A context manager for overriding gradient functions. This context manager can be used to override the gradient function that will be used for ops within the scope of the context. For example: ```python @tf.RegisterGradient("CustomSquare") def _custom_square_grad(op, grad): # ... with tf.Graph().as_default() as g: c = tf.constant(5.0) s_1 = tf.square(c) # Uses the default gradient for tf.square. with g.gradient_override_map({"Square": "CustomSquare"}): s_2 = tf.square(s_2) # Uses _custom_square_grad to compute the # gradient of s_2. ``` Args: op_type_map: A dictionary mapping op type strings to alternative op type strings. Returns: A context manager that sets the alternative op type to be used for one or more ops created in that context. Raises: TypeError: If `op_type_map` is not a dictionary mapping strings to strings. """ if not isinstance(op_type_map, dict): raise TypeError("op_type_map must be a dictionary mapping " "strings to strings") # The saved_mappings dictionary stores any currently-set mappings that # will be overridden by this context manager. saved_mappings = {} # Install the given label for op_type, mapped_op_type in op_type_map.items(): if not (isinstance(op_type, six.string_types) and isinstance(mapped_op_type, six.string_types)): raise TypeError("op_type_map must be a dictionary mapping " "strings to strings") try: saved_mappings[op_type] = self._gradient_override_map[op_type] except KeyError: pass self._gradient_override_map[op_type] = mapped_op_type try: yield # The code within the context runs here. finally: # Remove the labels set for this context, and restore any saved labels. for op_type, mapped_op_type in op_type_map.items(): try: self._gradient_override_map[op_type] = saved_mappings[op_type] except KeyError: del self._gradient_override_map[op_type] # pylint: enable=g-doc-return-or-yield def prevent_feeding(self, tensor): """Marks the given `tensor` as unfeedable in this graph.""" self._unfeedable_tensors.add(tensor) def is_feedable(self, tensor): """Returns `True` if and only if `tensor` is feedable.""" return tensor not in self._unfeedable_tensors def prevent_fetching(self, op): """Marks the given `op` as unfetchable in this graph.""" self._unfetchable_ops.add(op) def is_fetchable(self, tensor_or_op): """Returns `True` if and only if `tensor_or_op` is fetchable.""" if isinstance(tensor_or_op, Tensor): return tensor_or_op.op not in self._unfetchable_ops else: return tensor_or_op not in self._unfetchable_ops def device(device_name_or_function): """Wrapper for `Graph.device()` using the default graph. See [`Graph.device()`](../../api_docs/python/framework.md#Graph.device) for more details. Args: device_name_or_function: The device name or function to use in the context. Returns: A context manager that specifies the default device to use for newly created ops. """ return get_default_graph().device(device_name_or_function) def container(container_name): """Wrapper for `Graph.container()` using the default graph. Args: container_name: The container string to use in the context. Returns: A context manager that specifies the default container to use for newly created stateful ops. """ return get_default_graph().container(container_name) def colocate_with(op, ignore_existing=False): return get_default_graph().colocate_with(op, ignore_existing) def control_dependencies(control_inputs): """Wrapper for `Graph.control_dependencies()` using the default graph. See [`Graph.control_dependencies()`](../../api_docs/python/framework.md#Graph.control_dependencies) for more details. Args: control_inputs: A list of `Operation` or `Tensor` objects which must be executed or computed before running the operations defined in the context. Can also be `None` to clear the control dependencies. Returns: A context manager that specifies control dependencies for all operations constructed within the context. """ return get_default_graph().control_dependencies(control_inputs) class _DefaultStack(threading.local): """A thread-local stack of objects for providing implicit defaults.""" def __init__(self): super(_DefaultStack, self).__init__() self._enforce_nesting = True self.stack = [] def get_default(self): return self.stack[-1] if len(self.stack) >= 1 else None def reset(self): self.stack = [] @property def enforce_nesting(self): return self._enforce_nesting @enforce_nesting.setter def enforce_nesting(self, value): self._enforce_nesting = value @contextlib.contextmanager def get_controller(self, default): """A context manager for manipulating a default stack.""" try: self.stack.append(default) yield default finally: if self._enforce_nesting: if self.stack[-1] is not default: raise AssertionError( "Nesting violated for default stack of %s objects" % type(default)) self.stack.pop() else: self.stack.remove(default) _default_session_stack = _DefaultStack() def default_session(session): """Python "with" handler for defining a default session. This function provides a means of registering a session for handling Tensor.eval() and Operation.run() calls. It is primarily intended for use by session.Session, but can be used with any object that implements the Session.run() interface. Use with the "with" keyword to specify that Tensor.eval() and Operation.run() invocations within the scope of a block should be executed by a particular session. The default session applies to the current thread only, so it is always possible to inspect the call stack and determine the scope of a default session. If you create a new thread, and wish to use the default session in that thread, you must explicitly add a "with ops.default_session(sess):" block in that thread's function. Example: The following code examples are equivalent: # 1. Using the Session object directly: sess = ... c = tf.constant(5.0) sess.run(c) # 2. Using default_session(): sess = ... with ops.default_session(sess): c = tf.constant(5.0) result = c.eval() # 3. Overriding default_session(): sess = ... with ops.default_session(sess): c = tf.constant(5.0) with ops.default_session(...): c.eval(session=sess) Args: session: The session to be installed as the default session. Returns: A context manager for the default session. """ return _default_session_stack.get_controller(session) def get_default_session(): """Returns the default session for the current thread. The returned `Session` will be the innermost session on which a `Session` or `Session.as_default()` context has been entered. NOTE: The default session is a property of the current thread. If you create a new thread, and wish to use the default session in that thread, you must explicitly add a `with sess.as_default():` in that thread's function. Returns: The default `Session` being used in the current thread. """ return _default_session_stack.get_default() def _eval_using_default_session(tensors, feed_dict, graph, session=None): """Uses the default session to evaluate one or more tensors. Args: tensors: A single Tensor, or a list of Tensor objects. feed_dict: A dictionary that maps Tensor objects (or tensor names) to lists, numpy ndarrays, TensorProtos, or strings. graph: The graph in which the tensors are defined. session: (Optional) A different session to use to evaluate "tensors". Returns: Either a single numpy ndarray if "tensors" is a single tensor; or a list of numpy ndarrays that each correspond to the respective element in "tensors". Raises: ValueError: If no default session is available; the default session does not have "graph" as its graph; or if "session" is specified, and it does not have "graph" as its graph. """ if session is None: session = get_default_session() if session is None: raise ValueError("Cannot evaluate tensor using `eval()`: No default " "session is registered. Use `with " "sess.as_default()` or pass an explicit session to " "`eval(session=sess)`") if session.graph is not graph: raise ValueError("Cannot use the default session to evaluate tensor: " "the tensor's graph is different from the session's " "graph. Pass an explicit session to " "`eval(session=sess)`.") else: if session.graph is not graph: raise ValueError("Cannot use the given session to evaluate tensor: " "the tensor's graph is different from the session's " "graph.") return session.run(tensors, feed_dict) def _run_using_default_session(operation, feed_dict, graph, session=None): """Uses the default session to run "operation". Args: operation: The Operation to be run. feed_dict: A dictionary that maps Tensor objects (or tensor names) to lists, numpy ndarrays, TensorProtos, or strings. graph: The graph in which "operation" is defined. session: (Optional) A different session to use to run "operation". Raises: ValueError: If no default session is available; the default session does not have "graph" as its graph; or if "session" is specified, and it does not have "graph" as its graph. """ if session is None: session = get_default_session() if session is None: raise ValueError("Cannot execute operation using `run()`: No default " "session is registered. Use `with " "sess.as_default():` or pass an explicit session to " "`run(session=sess)`") if session.graph is not graph: raise ValueError("Cannot use the default session to execute operation: " "the operation's graph is different from the " "session's graph. Pass an explicit session to " "run(session=sess).") else: if session.graph is not graph: raise ValueError("Cannot use the given session to execute operation: " "the operation's graph is different from the session's " "graph.") session.run(operation, feed_dict) class _DefaultGraphStack(_DefaultStack): """A thread-local stack of objects for providing an implicit default graph.""" def __init__(self): super(_DefaultGraphStack, self).__init__() self._global_default_graph = None def get_default(self): """Override that returns a global default if the stack is empty.""" ret = super(_DefaultGraphStack, self).get_default() if ret is None: ret = self._GetGlobalDefaultGraph() return ret def _GetGlobalDefaultGraph(self): if self._global_default_graph is None: # TODO(mrry): Perhaps log that the default graph is being used, or set # provide some other feedback to prevent confusion when a mixture of # the global default graph and an explicit graph are combined in the # same process. self._global_default_graph = Graph() return self._global_default_graph def reset(self): super(_DefaultGraphStack, self).reset() self._global_default_graph = None _default_graph_stack = _DefaultGraphStack() def reset_default_graph(): """Clears the default graph stack and resets the global default graph. NOTE: The default graph is a property of the current thread. This function applies only to the current thread. Calling this function while a `tf.Session` or `tf.InteractiveSession` is active will result in undefined behavior. Using any previously created `tf.Operation` or `tf.Tensor` objects after calling this function will result in undefined behavior. """ _default_graph_stack.reset() def get_default_graph(): """Returns the default graph for the current thread. The returned graph will be the innermost graph on which a `Graph.as_default()` context has been entered, or a global default graph if none has been explicitly created. NOTE: The default graph is a property of the current thread. If you create a new thread, and wish to use the default graph in that thread, you must explicitly add a `with g.as_default():` in that thread's function. Returns: The default `Graph` being used in the current thread. """ return _default_graph_stack.get_default() def _assert_same_graph(original_item, item): """Fail if the 2 items are from different graphs. Args: original_item: Original item to check against. item: Item to check. Raises: ValueError: if graphs do not match. """ if original_item.graph is not item.graph: raise ValueError( "%s must be from the same graph as %s." % (item, original_item)) def _get_graph_from_inputs(op_input_list, graph=None): """Returns the appropriate graph to use for the given inputs. This library method provides a consistent algorithm for choosing the graph in which an Operation should be constructed: 1. If the default graph is being used to construct a function, we use the default graph. 2. If the "graph" is specified explicitly, we validate that all of the inputs in "op_input_list" are compatible with that graph. 3. Otherwise, we attempt to select a graph from the first Operation- or Tensor-valued input in "op_input_list", and validate that all other such inputs are in the same graph. 4. If the graph was not specified and it could not be inferred from "op_input_list", we attempt to use the default graph. Args: op_input_list: A list of inputs to an operation, which may include `Tensor`, `Operation`, and other objects that may be converted to a graph element. graph: (Optional) The explicit graph to use. Raises: TypeError: If op_input_list is not a list or tuple, or if graph is not a Graph. ValueError: If a graph is explicitly passed and not all inputs are from it, or if the inputs are from multiple graphs, or we could not find a graph and there was no default graph. Returns: The appropriate graph to use for the given inputs. """ if get_default_graph().building_function: return get_default_graph() op_input_list = tuple(op_input_list) # Handle generators correctly if graph and not isinstance(graph, Graph): raise TypeError("Input graph needs to be a Graph: %s" % graph) # 1. We validate that all of the inputs are from the same graph. This is # either the supplied graph parameter, or the first one selected from one # the graph-element-valued inputs. In the latter case, we hold onto # that input in original_graph_element so we can provide a more # informative error if a mismatch is found. original_graph_element = None for op_input in op_input_list: # Determine if this is a valid graph_element. graph_element = None if isinstance(op_input, (Operation, _TensorLike)): graph_element = op_input else: graph_element = _as_graph_element(op_input) if graph_element is not None: if not graph: original_graph_element = graph_element graph = graph_element.graph elif original_graph_element is not None: _assert_same_graph(original_graph_element, graph_element) elif graph_element.graph is not graph: raise ValueError( "%s is not from the passed-in graph." % graph_element) # 2. If all else fails, we use the default graph, which is always there. return graph or get_default_graph() class GraphKeys(object): """Standard names to use for graph collections. The standard library uses various well-known names to collect and retrieve values associated with a graph. For example, the `tf.Optimizer` subclasses default to optimizing the variables collected under `tf.GraphKeys.TRAINABLE_VARIABLES` if none is specified, but it is also possible to pass an explicit list of variables. The following standard keys are defined: * `GLOBAL_VARIABLES`: the default collection of `Variable` objects, shared across distributed environment (model variables are subset of these). See [`tf.global_variables()`](../../api_docs/python/state_ops.md#global_variables) for more details. Commonly, all `TRAINABLE_VARIABLES` variables will be in `MODEL_VARIABLES`, and all `MODEL_VARIABLES` variables will be in `GLOBAL_VARIABLES`. * `LOCAL_VARIABLES`: the subset of `Variable` objects that are local to each machine. Usually used for temporarily variables, like counters. Note: use `tf.contrib.framework.local_variable` to add to this collection. * `MODEL_VARIABLES`: the subset of `Variable` objects that are used in the model for inference (feed forward). Note: use `tf.contrib.framework.model_variable` to add to this collection. * `TRAINABLE_VARIABLES`: the subset of `Variable` objects that will be trained by an optimizer. See [`tf.trainable_variables()`](../../api_docs/python/state_ops.md#trainable_variables) for more details. * `SUMMARIES`: the summary `Tensor` objects that have been created in the graph. See [`tf.summary.merge_all()`](../../api_docs/python/summary.md#merge_all) for more details. * `QUEUE_RUNNERS`: the `QueueRunner` objects that are used to produce input for a computation. See [`tf.start_queue_runners()`](../../api_docs/python/train.md#start_queue_runners) for more details. * `MOVING_AVERAGE_VARIABLES`: the subset of `Variable` objects that will also keep moving averages. See [`tf.moving_average_variables()`](../../api_docs/python/state_ops.md#moving_average_variables) for more details. * `REGULARIZATION_LOSSES`: regularization losses collected during graph construction. * `WEIGHTS`: weights inside neural network layers * `BIASES`: biases inside neural network layers * `ACTIVATIONS`: activations of neural network layers """ # Key to collect Variable objects that are global (shared across machines). # Default collection for all variables, except local ones. GLOBAL_VARIABLES = "variables" # Key to collect local variables that are local to the machine and are not # saved/restored. LOCAL_VARIABLES = "local_variables" # Key to collect model variables defined by layers. MODEL_VARIABLES = "model_variables" # Key to collect Variable objects that will be trained by the # optimizers. TRAINABLE_VARIABLES = "trainable_variables" # Key to collect summaries. SUMMARIES = "summaries" # Key to collect QueueRunners. QUEUE_RUNNERS = "queue_runners" # Key to collect table initializers. TABLE_INITIALIZERS = "table_initializer" # Key to collect asset filepaths. An asset represents an external resource # like a vocabulary file. ASSET_FILEPATHS = "asset_filepaths" # Key to collect Variable objects that keep moving averages. MOVING_AVERAGE_VARIABLES = "moving_average_variables" # Key to collect regularization losses at graph construction. REGULARIZATION_LOSSES = "regularization_losses" # Key to collect concatenated sharded variables. CONCATENATED_VARIABLES = "concatenated_variables" # Key to collect savers. SAVERS = "savers" # Key to collect weights WEIGHTS = "weights" # Key to collect biases BIASES = "biases" # Key to collect activations ACTIVATIONS = "activations" # Key to collect update_ops UPDATE_OPS = "update_ops" # Key to collect losses LOSSES = "losses" # Key to collect BaseSaverBuilder.SaveableObject instances for checkpointing. SAVEABLE_OBJECTS = "saveable_objects" # Key to collect all shared resources used by the graph which need to be # initialized once per cluster. RESOURCES = "resources" # Key to collect all shared resources used in this graph which need to be # initialized once per session. LOCAL_RESOURCES = "local_resources" # Trainable resource-style variables. TRAINABLE_RESOURCE_VARIABLES = "trainable_resource_variables" # Key to indicate various ops. INIT_OP = "init_op" LOCAL_INIT_OP = "local_init_op" READY_OP = "ready_op" READY_FOR_LOCAL_INIT_OP = "ready_for_local_init_op" SUMMARY_OP = "summary_op" GLOBAL_STEP = "global_step" # Used to count the number of evaluations performed during a single evaluation # run. EVAL_STEP = "eval_step" TRAIN_OP = "train_op" # Key for control flow context. COND_CONTEXT = "cond_context" WHILE_CONTEXT = "while_context" @decorator_utils.classproperty def VARIABLES(cls): # pylint: disable=no-self-argument logging.warning("VARIABLES collection name is deprecated, " "please use GLOBAL_VARIABLES instead; " "VARIABLES will be removed after 2017-03-02.") return cls.GLOBAL_VARIABLES def add_to_collection(name, value): """Wrapper for `Graph.add_to_collection()` using the default graph. See [`Graph.add_to_collection()`](../../api_docs/python/framework.md#Graph.add_to_collection) for more details. Args: name: The key for the collection. For example, the `GraphKeys` class contains many standard names for collections. value: The value to add to the collection. """ get_default_graph().add_to_collection(name, value) def add_to_collections(names, value): """Wrapper for `Graph.add_to_collections()` using the default graph. See [`Graph.add_to_collections()`](../../api_docs/python/framework.md#Graph.add_to_collections) for more details. Args: names: The key for the collections. The `GraphKeys` class contains many standard names for collections. value: The value to add to the collections. """ get_default_graph().add_to_collections(names, value) def get_collection_ref(key): """Wrapper for `Graph.get_collection_ref()` using the default graph. See [`Graph.get_collection_ref()`](../../api_docs/python/framework.md#Graph.get_collection_ref) for more details. Args: key: The key for the collection. For example, the `GraphKeys` class contains many standard names for collections. Returns: The list of values in the collection with the given `name`, or an empty list if no value has been added to that collection. Note that this returns the collection list itself, which can be modified in place to change the collection. """ return get_default_graph().get_collection_ref(key) def get_collection(key, scope=None): """Wrapper for `Graph.get_collection()` using the default graph. See [`Graph.get_collection()`](../../api_docs/python/framework.md#Graph.get_collection) for more details. Args: key: The key for the collection. For example, the `GraphKeys` class contains many standard names for collections. scope: (Optional.) If supplied, the resulting list is filtered to include only items whose `name` attribute matches using `re.match`. Items without a `name` attribute are never returned if a scope is supplied and the choice or `re.match` means that a `scope` without special tokens filters by prefix. Returns: The list of values in the collection with the given `name`, or an empty list if no value has been added to that collection. The list contains the values in the order under which they were collected. """ return get_default_graph().get_collection(key, scope) def get_all_collection_keys(): """Returns a list of collections used in the default graph.""" return get_default_graph().get_all_collection_keys() # pylint: disable=g-doc-return-or-yield @contextlib.contextmanager def name_scope(name, default_name=None, values=None): """Returns a context manager for use when defining a Python op. This context manager validates that the given `values` are from the same graph, makes that graph the default graph, and pushes a name scope in that graph (see [`Graph.name_scope()`](../../api_docs/python/framework.md#Graph.name_scope) for more details on that). For example, to define a new Python op called `my_op`: ```python def my_op(a, b, c, name=None): with tf.name_scope(name, "MyOp", [a, b, c]) as scope: a = tf.convert_to_tensor(a, name="a") b = tf.convert_to_tensor(b, name="b") c = tf.convert_to_tensor(c, name="c") # Define some computation that uses `a`, `b`, and `c`. return foo_op(..., name=scope) ``` Args: name: The name argument that is passed to the op function. default_name: The default name to use if the `name` argument is `None`. values: The list of `Tensor` arguments that are passed to the op function. Returns: A context manager for use in defining Python ops. Yields the name scope. Raises: ValueError: if neither `name` nor `default_name` is provided but `values` are. """ n = default_name if name is None else name if n is None and values is not None: # We only raise an error if values is not None (provided) because currently # tf.name_scope(None) (values=None then) is sometimes used as an idiom # to reset to top scope. raise ValueError( "At least one of name (%s) and default_name (%s) must be provided." % ( name, default_name)) if values is None: values = [] g = _get_graph_from_inputs(values) with g.as_default(), g.name_scope(n) as scope: yield scope # pylint: enable=g-doc-return-or-yield def strip_name_scope(name, export_scope): """Removes name scope from a name. Args: name: A `string` name. export_scope: Optional `string`. Name scope to remove. Returns: Name with name scope removed, or the original name if export_scope is None. """ if export_scope: # Strips export_scope/, export_scope///, # ^export_scope/, loc:@export_scope/. str_to_replace = r"([\^]|loc:@|^)" + export_scope + r"[\/]+(.*)" return re.sub(str_to_replace, r"\1\2", compat.as_str(name), count=1) else: return name def prepend_name_scope(name, import_scope): """Prepends name scope to a name. Args: name: A `string` name. import_scope: Optional `string`. Name scope to add. Returns: Name with name scope added, or the original name if import_scope is None. """ if import_scope: str_to_replace = r"([\^]|loc:@|^)(.*)" return re.sub(str_to_replace, r"\1" + import_scope + r"/\2", compat.as_str(name)) else: return name # pylint: disable=g-doc-return-or-yield @contextlib.contextmanager def op_scope(values, name, default_name=None): """DEPRECATED. Same as name_scope above, just different argument order.""" logging.warn("tf.op_scope(values, name, default_name) is deprecated," " use tf.name_scope(name, default_name, values)") with name_scope(name, default_name=default_name, values=values) as scope: yield scope _proto_function_registry = registry.Registry("proto functions") def register_proto_function(collection_name, proto_type=None, to_proto=None, from_proto=None): """Registers `to_proto` and `from_proto` functions for collection_name. `to_proto` function converts a Python object to the corresponding protocol buffer, and returns the protocol buffer. `from_proto` function converts protocol buffer into a Python object, and returns the object.. Args: collection_name: Name of the collection. proto_type: Protobuf type, such as `saver_pb2.SaverDef`, `variable_pb2.VariableDef`, `queue_runner_pb2.QueueRunnerDef`.. to_proto: Function that implements Python object to protobuf conversion. from_proto: Function that implements protobuf to Python object conversion. """ if to_proto and not callable(to_proto): raise TypeError("to_proto must be callable.") if from_proto and not callable(from_proto): raise TypeError("from_proto must be callable.") _proto_function_registry.register((proto_type, to_proto, from_proto), collection_name) def get_collection_proto_type(collection_name): """Returns the proto_type for collection_name.""" try: return _proto_function_registry.lookup(collection_name)[0] except LookupError: return None def get_to_proto_function(collection_name): """Returns the to_proto function for collection_name.""" try: return _proto_function_registry.lookup(collection_name)[1] except LookupError: return None def get_from_proto_function(collection_name): """Returns the from_proto function for collection_name.""" try: return _proto_function_registry.lookup(collection_name)[2] except LookupError: return None def _operation_conversion_error(op, dtype=None, name=None, as_ref=False): """Produce a nice error if someone converts an Operation to a Tensor.""" raise TypeError( ("Can't convert Operation '%s' to Tensor " "(target dtype=%r, name=%r, as_ref=%r)") % (op.name, dtype, name, as_ref)) register_tensor_conversion_function(Operation, _operation_conversion_error)
35.596871
101
0.679944
from __future__ import absolute_import from __future__ import division from __future__ import print_function import collections import contextlib import copy import linecache import re import sys import threading import six from tensorflow.core.framework import attr_value_pb2 from tensorflow.core.framework import function_pb2 from tensorflow.core.framework import graph_pb2 from tensorflow.core.framework import node_def_pb2 from tensorflow.core.framework import tensor_shape_pb2 from tensorflow.core.framework import types_pb2 from tensorflow.core.framework import versions_pb2 from tensorflow.python.framework import device as pydev from tensorflow.python.framework import dtypes from tensorflow.python.framework import op_def_registry from tensorflow.python.framework import registry from tensorflow.python.framework import tensor_shape from tensorflow.python.framework import versions from tensorflow.python.platform import tf_logging as logging from tensorflow.python.util import compat from tensorflow.python.util import decorator_utils def _override_helper(clazz_object, operator, func): existing = getattr(clazz_object, operator, None) if existing is not None: if not isinstance(existing, type(object.__lt__)): raise ValueError("operator %s cannot be overwritten again on class %s." % (operator, clazz_object)) if operator not in Tensor.OVERLOADABLE_OPERATORS: raise ValueError("Overriding %s is disallowed" % operator) setattr(clazz_object, operator, func) def _convert_stack(stack): ret = [] for filename, lineno, name, frame_globals in stack: linecache.checkcache(filename) line = linecache.getline(filename, lineno, frame_globals) if line: line = line.strip() else: line = None ret.append((filename, lineno, name, line)) return ret def _extract_stack(): try: raise ZeroDivisionError except ZeroDivisionError: f = sys.exc_info()[2].tb_frame.f_back ret = [] while f is not None: lineno = f.f_lineno co = f.f_code filename = co.co_filename name = co.co_name frame_globals = f.f_globals ret.append((filename, lineno, name, frame_globals)) f = f.f_back ret.reverse() return ret def _as_graph_element(obj): conv_fn = getattr(obj, "_as_graph_element", None) if conv_fn and callable(conv_fn): return conv_fn() return None _TENSOR_LIKE_TYPES = tuple() def is_dense_tensor_like(t): return isinstance(t, _TENSOR_LIKE_TYPES) def register_dense_tensor_like_type(tensor_type): try: if not isinstance(tensor_type.name, property): raise TypeError("Type %s does not define a `name` property") except AttributeError: raise TypeError("Type %s does not define a `name` property") try: if not isinstance(tensor_type.dtype, property): raise TypeError("Type %s does not define a `dtype` property") except AttributeError: raise TypeError("Type %s does not define a `dtype` property") global _TENSOR_LIKE_TYPES _TENSOR_LIKE_TYPES = tuple(list(_TENSOR_LIKE_TYPES) + [tensor_type]) class _TensorLike(object): pass class Tensor(_TensorLike): OVERLOADABLE_OPERATORS = { "__add__", "__radd__", "__sub__", "__rsub__", "__mul__", "__rmul__", "__div__", "__rdiv__", "__truediv__", "__rtruediv__", "__floordiv__", "__rfloordiv__", "__mod__", "__rmod__", "__lt__", "__le__", "__gt__", "__ge__", "__and__", "__rand__", "__or__", "__ror__", "__xor__", "__rxor__", "__getitem__", "__pow__", "__rpow__", "__invert__", "__neg__", "__abs__" } def __init__(self, op, value_index, dtype): if not isinstance(op, Operation): raise TypeError("op needs to be an Operation: %s" % op) self._op = op self._value_index = value_index self._dtype = dtypes.as_dtype(dtype) self._shape = tensor_shape.unknown_shape() self._consumers = [] self._handle_shape = tensor_shape_pb2.TensorShapeProto() self._handle_dtype = types_pb2.DT_INVALID @property def op(self): return self._op @property def dtype(self): return self._dtype @property def graph(self): return self._op.graph @property def name(self): if not self._op.name: raise ValueError("Operation was not named: %s" % self._op) return "%s:%d" % (self._op.name, self._value_index) @property def device(self): return self._op.device @property def shape(self): return self._shape def _shape_as_list(self): if self._shape.ndims is not None: return [dim.value for dim in self._shape.dims] else: return None def get_shape(self): return self.shape def set_shape(self, shape): self._shape = self._shape.merge_with(shape) @property def value_index(self): return self._value_index def consumers(self): return self._consumers def _add_consumer(self, consumer): if not isinstance(consumer, Operation): raise TypeError("Consumer must be an Operation: %s" % consumer) self._consumers.append(consumer) def _as_node_def_input(self): if not self._op.name: raise ValueError("Operation was not named: %s" % self._op) if self._value_index == 0: return self._op.name else: return "%s:%d" % (self._op.name, self._value_index) def __str__(self): return "Tensor(\"%s\"%s%s%s)" % ( self.name, (", shape=%s" % self.get_shape()) if self.get_shape().ndims is not None else "", (", dtype=%s" % self._dtype.name) if self._dtype else "", (", device=%s" % self.device) if self.device else "") def __repr__(self): return "<tf.Tensor '%s' shape=%s dtype=%s>" % ( self.name, self.get_shape(), self._dtype.name) def __hash__(self): return id(self) def __eq__(self, other): # Necessary to support Python's collection membership operators return id(self) == id(other) # operators to run when the left operand is an ndarray, because it # accords the Tensor class higher priority than an ndarray, or a # numpy matrix. # TODO(mrry): Convert this to using numpy's __numpy_ufunc__ __array_priority__ = 100 @staticmethod def _override_operator(operator, func): _override_helper(Tensor, operator, func) def __iter__(self): raise TypeError("'Tensor' object is not iterable.") def __bool__(self): raise TypeError("Using a `tf.Tensor` as a Python `bool` is not allowed. " "Use `if t is not None:` instead of `if t:` to test if a " "tensor is defined, and use TensorFlow ops such as " "tf.cond to execute subgraphs conditioned on the value of " "a tensor.") def __nonzero__(self): raise TypeError("Using a `tf.Tensor` as a Python `bool` is not allowed. " "Use `if t is not None:` instead of `if t:` to test if a " "tensor is defined, and use TensorFlow ops such as " "tf.cond to execute subgraphs conditioned on the value of " "a tensor.") def eval(self, feed_dict=None, session=None): return _eval_using_default_session(self, feed_dict, self.graph, session) def _TensorTensorConversionFunction(t, dtype=None, name=None, as_ref=False): _ = name, as_ref if dtype and not dtype.is_compatible_with(t.dtype): raise ValueError( "Tensor conversion requested dtype %s for Tensor with dtype %s: %r" % (dtype.name, t.dtype.name, str(t))) return t _tensor_conversion_func_registry = { 0: [(Tensor, _TensorTensorConversionFunction)]} register_dense_tensor_like_type(Tensor) def convert_to_tensor(value, dtype=None, name=None, preferred_dtype=None): return internal_convert_to_tensor( value=value, dtype=dtype, name=name, preferred_dtype=preferred_dtype, as_ref=False) def internal_convert_to_tensor(value, dtype=None, name=None, as_ref=False, preferred_dtype=None): error_prefix = "" if name is None else "%s: " % name if dtype is not None: dtype = dtypes.as_dtype(dtype) for _, funcs_at_priority in sorted(_tensor_conversion_func_registry.items()): for base_type, conversion_func in funcs_at_priority: if isinstance(value, base_type): ret = None if dtype is None and preferred_dtype is not None: try: ret = conversion_func( value, dtype=preferred_dtype, name=name, as_ref=as_ref) except (TypeError, ValueError): ret = None if ret is not None and ret is not NotImplemented: if (ret.dtype.base_dtype != dtypes.as_dtype(preferred_dtype).base_dtype): raise TypeError("convert_to_tensor did not convert to " "the preferred dtype: %s vs %s " % (ret.dtype.base_dtype, dtypes.as_dtype(preferred_dtype).base_dtype)) if ret is None: ret = conversion_func(value, dtype=dtype, name=name, as_ref=as_ref) if ret is NotImplemented: continue if not isinstance(ret, Tensor): raise RuntimeError( "%sConversion function %r for type %s returned non-Tensor: %r" % (error_prefix, conversion_func, base_type, ret)) if dtype and not dtype.is_compatible_with(ret.dtype): raise RuntimeError( "%sConversion function %r for type %s returned incompatible " "dtype: requested = %s, actual = %s" % (error_prefix, conversion_func, base_type, dtype.name, ret.dtype.name)) return ret raise TypeError("%sCannot convert %r with type %s to Tensor: " "no conversion function registered." % (error_prefix, value, type(value))) def internal_convert_n_to_tensor(values, dtype=None, name=None, as_ref=False, preferred_dtype=None): if not isinstance(values, collections.Sequence): raise TypeError("values must be a list.") ret = [] for i, value in enumerate(values): n = None if name is None else "%s_%d" % (name, i) ret.append( internal_convert_to_tensor( value, dtype=dtype, name=n, as_ref=as_ref, preferred_dtype=preferred_dtype)) return ret def convert_n_to_tensor(values, dtype=None, name=None, preferred_dtype=None): return internal_convert_n_to_tensor(values=values, dtype=dtype, name=name, preferred_dtype=preferred_dtype, as_ref=False) def convert_to_tensor_or_indexed_slices(value, dtype=None, name=None): return internal_convert_to_tensor_or_indexed_slices( value=value, dtype=dtype, name=name, as_ref=False) def internal_convert_to_tensor_or_indexed_slices(value, dtype=None, name=None, as_ref=False): if isinstance(value, _TensorLike): if dtype and not dtypes.as_dtype(dtype).is_compatible_with(value.dtype): raise ValueError( "Tensor conversion requested dtype %s for Tensor with dtype %s: %r" % (dtypes.as_dtype(dtype).name, value.dtype.name, str(value))) return value else: return internal_convert_to_tensor(value, dtype=dtype, name=name, as_ref=as_ref) def internal_convert_n_to_tensor_or_indexed_slices(values, dtype=None, name=None, as_ref=False): if not isinstance(values, collections.Sequence): raise TypeError("values must be a list.") ret = [] for i, value in enumerate(values): if value is None: ret.append(value) else: n = None if name is None else "%s_%d" % (name, i) ret.append( internal_convert_to_tensor_or_indexed_slices( value, dtype=dtype, name=n, as_ref=as_ref)) return ret def convert_n_to_tensor_or_indexed_slices(values, dtype=None, name=None): return internal_convert_n_to_tensor_or_indexed_slices( values=values, dtype=dtype, name=name, as_ref=False) def register_tensor_conversion_function(base_type, conversion_func, priority=100): if not (isinstance(base_type, type) or (isinstance(base_type, tuple) and all(isinstance(x, type) for x in base_type))): raise TypeError("base_type must be a type or a tuple of types.") if not callable(conversion_func): raise TypeError("conversion_func must be callable.") try: funcs_at_priority = _tensor_conversion_func_registry[priority] except KeyError: funcs_at_priority = [] _tensor_conversion_func_registry[priority] = funcs_at_priority funcs_at_priority.append((base_type, conversion_func)) class IndexedSlices(_TensorLike): def __init__(self, values, indices, dense_shape=None): _get_graph_from_inputs([values, indices, dense_shape]) self._values = values self._indices = indices self._dense_shape = dense_shape @property def values(self): return self._values @property def indices(self): return self._indices @property def dense_shape(self): return self._dense_shape @property def name(self): return self.values.name @property def device(self): return self.values.device @property def op(self): return self.values.op @property def dtype(self): return self.values.dtype @property def graph(self): return self._values.graph def __str__(self): return "IndexedSlices(indices=%s, values=%s%s)" % ( self._indices, self._values, (", dense_shape=%s" % self._dense_shape) if self._dense_shape is not None else "") def __neg__(self): return IndexedSlices(-self.values, self.indices, self.dense_shape) IndexedSlicesValue = collections.namedtuple( "IndexedSlicesValue", ["values", "indices", "dense_shape"]) def _device_string(dev_spec): if isinstance(dev_spec, pydev.DeviceSpec): return dev_spec.to_string() else: return dev_spec def _NodeDef(op_type, name, device=None, attrs=None): node_def = node_def_pb2.NodeDef() node_def.op = compat.as_bytes(op_type) node_def.name = compat.as_bytes(name) if attrs is not None: for k, v in six.iteritems(attrs): node_def.attr[k].CopyFrom(v) if device is not None: if callable(device): node_def.device = device(node_def) else: node_def.device = _device_string(device) return node_def _VALID_OP_NAME_REGEX = re.compile("^[A-Za-z0-9.][A-Za-z0-9_.\\-/]*$") _VALID_SCOPE_NAME_REGEX = re.compile("^[A-Za-z0-9_.\\-/]*$") class Operation(object): def __init__(self, node_def, g, inputs=None, output_types=None, control_inputs=None, input_types=None, original_op=None, op_def=None): if not isinstance(node_def, node_def_pb2.NodeDef): raise TypeError("node_def needs to be a NodeDef: %s" % node_def) if node_def.ByteSize() >= (1 << 31) or node_def.ByteSize() < 0: raise ValueError( "Cannot create a tensor proto whose content is larger than 2GB.") if not _VALID_OP_NAME_REGEX.match(node_def.name): raise ValueError("'%s' is not a valid node name" % node_def.name) if not isinstance(g, Graph): raise TypeError("g needs to be a Graph: %s" % g) self._node_def = copy.deepcopy(node_def) self._graph = g if inputs is None: inputs = [] elif not isinstance(inputs, list): raise TypeError("inputs needs to be a list of Tensors: %s" % inputs) self._inputs = list(inputs) for a in self._inputs: if not isinstance(a, Tensor): raise TypeError("input needs to be a Tensor: %s" % a) a._add_consumer(self) if output_types is None: output_types = [] self._output_types = output_types self._outputs = [Tensor(self, i, output_type) for i, output_type in enumerate(output_types)] if input_types is None: input_types = [i.dtype.base_dtype for i in self._inputs] else: if not all(x.is_compatible_with(i.dtype) for i, x in zip(self._inputs, input_types)): raise TypeError("Inputs are not compatible with input types") self._input_types = input_types self._control_inputs = [] if control_inputs: for c in control_inputs: c_op = None if isinstance(c, Operation): c_op = c elif isinstance(c, (Tensor, IndexedSlices)): c_op = c.op else: raise TypeError("Control input must be an Operation, " "a Tensor, or IndexedSlices: %s" % c) self._control_inputs.append(c_op) self._original_op = original_op self._op_def = op_def self._traceback = _extract_stack() self._control_flow_context = g._get_control_flow_context() if self._control_flow_context is not None: self._control_flow_context.AddOp(self) # setting op.inputs[index] = new_op. Thus the new ops' id could be larger # assigning id to this op until all ops this could be dependent on are # created. self._id_value = self._graph._next_id() # pylint: disable=protected-access self._recompute_node_def() def colocation_groups(self): default_colocation_group = [compat.as_bytes("loc:@%s" % self._node_def.name)] if "_class" not in self._node_def.attr: # This op has no explicit colocation group, so it is itself its # own root of a colocation group. return default_colocation_group attr_groups = [class_name for class_name in self.get_attr("_class") if class_name.startswith(b"loc:@")] # If there are no colocation groups in the explicit _class field, # return the default colocation group. return attr_groups if attr_groups else default_colocation_group def values(self): return tuple(self.outputs) def _get_control_flow_context(self): return self._control_flow_context def _set_control_flow_context(self, context): self._control_flow_context = context @property def name(self): return self._node_def.name @property def _id(self): return self._id_value @property def device(self): return self._node_def.device def _set_device(self, device): self._node_def.device = _device_string(device) def _add_input(self, tensor, dtype=None): if not isinstance(tensor, Tensor): raise TypeError("tensor must be a Tensor: %s" % tensor) _assert_same_graph(self, tensor) if dtype is None: dtype = tensor.dtype else: dtype = dtypes.as_dtype(dtype) if not dtype.is_compatible_with(tensor.dtype): raise TypeError( "Cannot convert a tensor of type %s to an input of type %s" % (tensor.dtype.name, dtype.name)) self._inputs.append(tensor) self._input_types.append(dtype) tensor._add_consumer(self) # pylint: disable=protected-access self._recompute_node_def() def _update_input(self, index, tensor, dtype=None): if not isinstance(tensor, Tensor): raise TypeError("tensor must be a Tensor: %s" % tensor) _assert_same_graph(self, tensor) if dtype is None: dtype = tensor.dtype else: dtype = dtypes.as_dtype(dtype) if not dtype.is_compatible_with(tensor.dtype): raise TypeError( "Cannot convert a tensor of type %s to an input of type %s" % (tensor.dtype.name, dtype.name)) self._inputs[index].consumers().remove(self) self._inputs[index] = tensor self._input_types[index] = dtype tensor._add_consumer(self) # pylint: disable=protected-access self._recompute_node_def() def _add_control_inputs(self, ops): if ops: for op in ops: if not isinstance(op, Operation): raise TypeError("op must be an Operation: %s" % op) _assert_same_graph(self, op) self._control_inputs.append(op) self._recompute_node_def() def _add_control_input(self, op): self._add_control_inputs([op]) # Methods below are used when building the NodeDef and Graph proto. def _recompute_node_def(self): del self._node_def.input[:] self._node_def.input.extend([t._as_node_def_input() for t in self._inputs]) if self._control_inputs: self._node_def.input.extend(["^%s" % op.name for op in self._control_inputs]) def __str__(self): return str(self._node_def) def __repr__(self): return "<tf.Operation '%s' type=%s>" % (self.name, self.type) @property def outputs(self): return self._outputs # pylint: disable=protected-access class _InputList(object): def __init__(self, op): self._op = op def __iter__(self): return iter(self._op._inputs) def __len__(self): return len(self._op._inputs) def __bool__(self): return bool(self._op._inputs) # Python 3 wants __bool__, Python 2.7 wants __nonzero__ __nonzero__ = __bool__ def __getitem__(self, i): return self._op._inputs[i] # pylint: enable=protected-access @property def inputs(self): return Operation._InputList(self) @property def _input_dtypes(self): return self._input_types @property def control_inputs(self): return self._control_inputs @property def type(self): return self._node_def.op @property def graph(self): return self._graph @property def node_def(self): return self._node_def @property def op_def(self): return self._op_def @property def traceback(self): return _convert_stack(self._traceback) def get_attr(self, name): fields = ["s", "i", "f", "b", "type", "shape", "tensor"] if name not in self._node_def.attr: raise ValueError("No attr named '" + name + "' in " + str(self._node_def)) x = self._node_def.attr[name] # Treat an empty oneof value as an empty list. if not x.WhichOneof("value"): return [] if x.HasField("list"): for f in fields: if getattr(x.list, f): return list(getattr(x.list, f)) return [] else: for f in fields: if x.HasField(f): return getattr(x, f) assert False, "Unsupported field type in " + str(x) def run(self, feed_dict=None, session=None): _run_using_default_session(self, feed_dict, self.graph, session) _gradient_registry = registry.Registry("gradient") class RegisterGradient(object): def __init__(self, op_type): if not isinstance(op_type, six.string_types): raise TypeError("op_type must be a string") self._op_type = op_type def __call__(self, f): _gradient_registry.register(f, self._op_type) return f def NotDifferentiable(op_type): if not isinstance(op_type, six.string_types): raise TypeError("op_type must be a string") _gradient_registry.register(None, op_type) # Alias for the old name, will be eventually removed. NoGradient = NotDifferentiable def get_gradient_function(op): if not op.inputs: return None try: op_type = op.get_attr("_gradient_op_type") except ValueError: op_type = op.type return _gradient_registry.lookup(op_type) _shape_registry = registry.Registry("shape functions") _default_shape_function_registry = registry.Registry("default shape functions") # These are set to common_shapes.call_cpp_shape_fn by op generated code # (generated by python_op_gen.cc). # It is set outside ops.py to avoid a circular dependency. _call_cpp_shape_fn = None _call_cpp_shape_fn_and_require_op = None def _set_call_cpp_shape_fn(call_cpp_shape_fn): global _call_cpp_shape_fn, _call_cpp_shape_fn_and_require_op if _call_cpp_shape_fn: return # already registered def call_without_requiring(op): return call_cpp_shape_fn(op, require_shape_fn=False) _call_cpp_shape_fn = call_without_requiring def call_with_requiring(op): return call_cpp_shape_fn(op, require_shape_fn=True) _call_cpp_shape_fn_and_require_op = call_with_requiring class RegisterShape(object): def __init__(self, op_type): if not isinstance(op_type, six.string_types): raise TypeError("op_type must be a string") self._op_type = op_type def __call__(self, f): if f is None: assert _call_cpp_shape_fn # None is a special "weak" value that provides a default shape function, # and can be overridden by a non-None registration. try: _default_shape_function_registry.register(_call_cpp_shape_fn, self._op_type) except KeyError: # Ignore duplicate registrations of the weak value. This can # occur if the op library input to wrapper generation # inadvertently links in one or more of the standard op # libraries. pass else: _shape_registry.register(f, self._op_type) return f def set_shapes_for_outputs(op): try: shape_func = _shape_registry.lookup(op.type) except LookupError: try: shape_func = _default_shape_function_registry.lookup(op.type) except LookupError: shape_func = _call_cpp_shape_fn_and_require_op shapes = shape_func(op) if shapes is None: raise RuntimeError( "Shape function for op %s did not return any shapes" % op) elif isinstance(shapes, dict): # Returned by call_cpp_shape_fn shapes_dict = shapes shapes = shapes_dict["shapes"] handle_shapes = shapes_dict["handle_shapes"] handle_dtypes = shapes_dict["handle_dtypes"] for output, handle_shape, handle_dtype in zip(op.outputs, handle_shapes, handle_dtypes): # pylint: disable=protected-access output._handle_shape = handle_shape output._handle_dtype = handle_dtype # pylint: enable=protected-access if len(op.outputs) != len(shapes): raise RuntimeError( "Shape function for op %s returned %d shapes but expected %d %s %s" % (op, len(shapes), len(op.outputs), shape_func.__name__, str(shapes))) for output, s in zip(op.outputs, shapes): output.set_shape(s) class OpStats(object): def __init__(self, statistic_type, value=None): self.statistic_type = statistic_type self.value = value @property def statistic_type(self): return self._statistic_type @statistic_type.setter def statistic_type(self, statistic_type): self._statistic_type = statistic_type @property def value(self): return self._value @value.setter def value(self, value): self._value = value def __iadd__(self, other): if other.statistic_type != self.statistic_type: raise ValueError("Can't add an OpStat of type %s to one of %s.", self.statistic_type, other.statistic_type) if self.value is None: self.value = other.value elif other.value is not None: self._value += other.value return self _stats_registry = registry.Registry("statistical functions") class RegisterStatistics(object): def __init__(self, op_type, statistic_type): if not isinstance(op_type, six.string_types): raise TypeError("op_type must be a string.") if "," in op_type: raise TypeError("op_type must not contain a comma.") self._op_type = op_type if not isinstance(statistic_type, six.string_types): raise TypeError("statistic_type must be a string.") if "," in statistic_type: raise TypeError("statistic_type must not contain a comma.") self._statistic_type = statistic_type def __call__(self, f): _stats_registry.register(f, self._op_type + "," + self._statistic_type) return f def get_stats_for_node_def(graph, node, statistic_type): try: stats_func = _stats_registry.lookup(node.op + "," + statistic_type) result = stats_func(graph, node) except LookupError: result = OpStats(statistic_type) return result def _name_from_scope_name(name): return name[:-1] if name[-1] == "/" else name class Graph(object): def __init__(self): self._lock = threading.Lock() self._nodes_by_id = dict() self._next_id_counter = 0 self._nodes_by_name = dict() self._version = 0 self._name_stack = "" self._names_in_use = {} self._device_function_stack = [] self._default_original_op = None self._control_flow_context = None self._control_dependencies_stack = [] self._collections = {} self._seed = None self._attr_scope_map = {} self._op_to_kernel_label_map = {} self._gradient_override_map = {} self._finalized = False self._functions = collections.OrderedDict() self._graph_def_versions = versions_pb2.VersionDef( producer=versions.GRAPH_DEF_VERSION, min_consumer=versions.GRAPH_DEF_VERSION_MIN_CONSUMER) self._building_function = False self._colocation_stack = [] self._unfeedable_tensors = set() self._unfetchable_ops = set() self._handle_feeders = {} self._handle_readers = {} self._handle_movers = {} self._handle_deleters = {} self._container = "" self._registered_ops = op_def_registry.get_registered_ops() def _check_not_finalized(self): if self._finalized: raise RuntimeError("Graph is finalized and cannot be modified.") def _add_op(self, op): self._check_not_finalized() if not isinstance(op, (Tensor, Operation)): raise TypeError("op must be a Tensor or Operation: %s" % op) with self._lock: if op._id in self._nodes_by_id: raise ValueError("cannot add an op with id %d as it already " "exists in the graph" % op._id) if op.name in self._nodes_by_name: raise ValueError("cannot add op with name %s as that name " "is already used" % op.name) self._nodes_by_id[op._id] = op self._nodes_by_name[op.name] = op self._version = max(self._version, op._id) @property def version(self): if self._finalized: return self._version with self._lock: return self._version @property def graph_def_versions(self): return self._graph_def_versions @property def seed(self): return self._seed @seed.setter def seed(self, seed): self._seed = seed @property def finalized(self): return self._finalized def finalize(self): self._finalized = True def _unsafe_unfinalize(self): self._finalized = False def _get_control_flow_context(self): return self._control_flow_context def _set_control_flow_context(self, context): self._control_flow_context = context def _as_graph_def(self, from_version=None, add_shapes=False): with self._lock: graph = graph_pb2.GraphDef() graph.versions.CopyFrom(self._graph_def_versions) bytesize = 0 for op_id in sorted(self._nodes_by_id): op = self._nodes_by_id[op_id] if from_version is None or op_id > from_version: graph.node.extend([op.node_def]) if op.outputs and add_shapes: assert "_output_shapes" not in graph.node[-1].attr graph.node[-1].attr["_output_shapes"].list.shape.extend([ output.get_shape().as_proto() for output in op.outputs]) bytesize += op.node_def.ByteSize() if bytesize >= (1 << 31) or bytesize < 0: raise ValueError("GraphDef cannot be larger than 2GB.") if self._functions: for f in self._functions.values(): bytesize += f.definition.ByteSize() if bytesize >= (1 << 31) or bytesize < 0: raise ValueError("GraphDef cannot be larger than 2GB.") graph.library.function.extend([f.definition]) if f.grad_func_name: grad_def = function_pb2.GradientDef() grad_def.function_name = f.name grad_def.gradient_func = f.grad_func_name graph.library.gradient.extend([grad_def]) return graph, self._version def as_graph_def(self, from_version=None, add_shapes=False): result, _ = self._as_graph_def(from_version, add_shapes) return result def _is_function(self, name): return name in self._functions def _get_function(self, name): return self._functions.get(name, None) def _add_function(self, function): name = function.name previous = self._functions.get(name, None) if previous: raise ValueError("Another function is already defined with that name") if (function.grad_func_name is not None) and ( function.python_grad_func is not None): raise ValueError("Gradient defined twice for function %s" % name) if self._graph_def_versions.min_consumer < 12: self._graph_def_versions.min_consumer = 12 self._functions[name] = function @property def building_function(self): return self._building_function def create_op(self, op_type, inputs, dtypes, input_types=None, name=None, attrs=None, op_def=None, compute_shapes=True, compute_device=True): self._check_not_finalized() for idx, a in enumerate(inputs): if not isinstance(a, Tensor): raise TypeError("Input #%d is not a tensor: %s" % (idx, a)) if name is None: name = op_type if name and name[-1] == "/": name = _name_from_scope_name(name) else: name = self.unique_name(name) node_def = _NodeDef(op_type, name, device=None, attrs=attrs) for key, value in self._attr_scope_map.items(): if key not in node_def.attr: if callable(value): value = value(node_def) if not isinstance(value, (type(None), attr_value_pb2.AttrValue)): raise TypeError( "Callable for scope map key '%s' must return either None or " "an AttrValue protocol buffer; but it returned: %s" % (key, value)) node_def.attr[key].CopyFrom(value) try: kernel_label = self._op_to_kernel_label_map[op_type] node_def.attr["_kernel"].CopyFrom( attr_value_pb2.AttrValue(s=compat.as_bytes(kernel_label))) except KeyError: pass try: mapped_op_type = self._gradient_override_map[op_type] node_def.attr["_gradient_op_type"].CopyFrom( attr_value_pb2.AttrValue(s=compat.as_bytes(mapped_op_type))) except KeyError: pass control_inputs = self._control_dependencies_for_inputs(inputs) ret = Operation(node_def, self, inputs=inputs, output_types=dtypes, control_inputs=control_inputs, input_types=input_types, original_op=self._default_original_op, op_def=op_def) if compute_shapes: set_shapes_for_outputs(ret) self._add_op(ret) self._record_op_seen_by_control_dependencies(ret) if compute_device: self._apply_device_functions(ret) if self._colocation_stack: all_colocation_groups = [] for colocation_op in self._colocation_stack: all_colocation_groups.extend(colocation_op.colocation_groups()) if colocation_op.device: if ret.device and ret.device != colocation_op.device: logging.warning("Tried to colocate %s with an op %s that had " "a different device: %s vs %s. " "Ignoring colocation property.", name, colocation_op.name, ret.device, colocation_op.device) else: ret._set_device(colocation_op.device) all_colocation_groups = sorted(set(all_colocation_groups)) ret.node_def.attr["_class"].CopyFrom(attr_value_pb2.AttrValue( list=attr_value_pb2.AttrValue.ListValue(s=all_colocation_groups))) if (self._container and op_type in self._registered_ops and self._registered_ops[op_type].is_stateful and "container" in ret.node_def.attr and not ret.node_def.attr["container"].s): ret.node_def.attr["container"].CopyFrom( attr_value_pb2.AttrValue(s=compat.as_bytes(self._container))) return ret def as_graph_element(self, obj, allow_tensor=True, allow_operation=True): if self._finalized: return self._as_graph_element_locked(obj, allow_tensor, allow_operation) with self._lock: return self._as_graph_element_locked(obj, allow_tensor, allow_operation) def _as_graph_element_locked(self, obj, allow_tensor, allow_operation): if allow_tensor and allow_operation: types_str = "Tensor or Operation" elif allow_tensor: types_str = "Tensor" elif allow_operation: types_str = "Operation" else: raise ValueError("allow_tensor and allow_operation can't both be False.") temp_obj = _as_graph_element(obj) if temp_obj is not None: obj = temp_obj # If obj appears to be a name... if isinstance(obj, compat.bytes_or_text_types): name = compat.as_str(obj) if ":" in name and allow_tensor: # Looks like a Tensor name and can be a Tensor. try: op_name, out_n = name.split(":") out_n = int(out_n) except: raise ValueError("The name %s looks a like a Tensor name, but is " "not a valid one. Tensor names must be of the " "form \"<op_name>:<output_index>\"." % repr(name)) if op_name in self._nodes_by_name: op = self._nodes_by_name[op_name] else: raise KeyError("The name %s refers to a Tensor which does not " "exist. The operation, %s, does not exist in the " "graph." % (repr(name), repr(op_name))) try: return op.outputs[out_n] except: raise KeyError("The name %s refers to a Tensor which does not " "exist. The operation, %s, exists but only has " "%s outputs." % (repr(name), repr(op_name), len(op.outputs))) elif ":" in name and not allow_tensor: # Looks like a Tensor name but can't be a Tensor. raise ValueError("Name %s appears to refer to a Tensor, not a %s." % (repr(name), types_str)) elif ":" not in name and allow_operation: if name not in self._nodes_by_name: raise KeyError("The name %s refers to an Operation not in the " "graph." % repr(name)) return self._nodes_by_name[name] elif ":" not in name and not allow_operation: if name in self._nodes_by_name: # Yep, it's an Operation name err_msg = ("The name %s refers to an Operation, not a %s." % (repr(name), types_str)) else: err_msg = ("The name %s looks like an (invalid) Operation name, " "not a %s." % (repr(name), types_str)) err_msg += (" Tensor names must be of the form " "\"<op_name>:<output_index>\".") raise ValueError(err_msg) elif isinstance(obj, Tensor) and allow_tensor: if obj.graph is not self: raise ValueError("Tensor %s is not an element of this graph." % obj) return obj elif isinstance(obj, Operation) and allow_operation: # Actually obj is just the object it's referring to. if obj.graph is not self: raise ValueError("Operation %s is not an element of this graph." % obj) return obj else: raise TypeError("Can not convert a %s into a %s." % (type(obj).__name__, types_str)) def get_operations(self): if self._finalized: return list(self._nodes_by_id.values()) with self._lock: return list(self._nodes_by_id.values()) def get_operation_by_name(self, name): if not isinstance(name, six.string_types): raise TypeError("Operation names are strings (or similar), not %s." % type(name).__name__) return self.as_graph_element(name, allow_tensor=False, allow_operation=True) def get_tensor_by_name(self, name): if not isinstance(name, six.string_types): raise TypeError("Tensor names are strings (or similar), not %s." % type(name).__name__) return self.as_graph_element(name, allow_tensor=True, allow_operation=False) def _next_id(self): self._check_not_finalized() with self._lock: self._next_id_counter += 1 return self._next_id_counter @property def _last_id(self): return self._next_id_counter def as_default(self): return _default_graph_stack.get_controller(self) def add_to_collection(self, name, value): self._check_not_finalized() with self._lock: if name not in self._collections: self._collections[name] = [value] else: self._collections[name].append(value) def add_to_collections(self, names, value): names = (names,) if isinstance(names, six.string_types) else set(names) for name in names: self.add_to_collection(name, value) def get_collection_ref(self, name): with self._lock: coll_list = self._collections.get(name, None) if coll_list is None: coll_list = [] self._collections[name] = coll_list return coll_list def get_collection(self, name, scope=None): with self._lock: coll_list = self._collections.get(name, None) if coll_list is None: return [] if scope is None: return list(coll_list) else: c = [] regex = re.compile(scope) for item in coll_list: if hasattr(item, "name") and regex.match(item.name): c.append(item) return c def get_all_collection_keys(self): with self._lock: return [x for x in self._collections if isinstance(x, six.string_types)] def clear_collection(self, name): self._check_not_finalized() with self._lock: if name in self._collections: del self._collections[name] @contextlib.contextmanager def _original_op(self, op): old_original_op = self._default_original_op try: self._default_original_op = op yield finally: self._default_original_op = old_original_op @contextlib.contextmanager def name_scope(self, name): if name: if self._name_stack: if not _VALID_SCOPE_NAME_REGEX.match(name): raise ValueError("'%s' is not a valid scope name" % name) else: if not _VALID_OP_NAME_REGEX.match(name): raise ValueError("'%s' is not a valid scope name" % name) try: old_stack = self._name_stack if not name: new_stack = None elif name and name[-1] == "/": new_stack = _name_from_scope_name(name) else: new_stack = self.unique_name(name) self._name_stack = new_stack yield "" if new_stack is None else new_stack + "/" finally: self._name_stack = old_stack def unique_name(self, name, mark_as_used=True): if self._name_stack: name = self._name_stack + "/" + name i = self._names_in_use.get(name, 0) if mark_as_used: self._names_in_use[name] = i + 1 if i > 0: base_name = name while name in self._names_in_use: name = "%s_%d" % (base_name, i) i += 1 if mark_as_used: self._names_in_use[name] = 1 return name @contextlib.contextmanager def colocate_with(self, op, ignore_existing=False): if op is None and not ignore_existing: raise ValueError( "Trying to reset colocation (op is None) but " "ignore_existing is not True") if op is not None and not isinstance(op, Operation): op = internal_convert_to_tensor_or_indexed_slices(op, as_ref=True).op device_fn_tmp = self._device_function_stack self._device_function_stack = [] if ignore_existing: current_stack = self._colocation_stack self._colocation_stack = [] if op is not None: self._colocation_stack.append(op) try: yield finally: self._device_function_stack = device_fn_tmp if op is not None: self._colocation_stack.pop() if ignore_existing: self._colocation_stack = current_stack @contextlib.contextmanager def device(self, device_name_or_function): if (device_name_or_function is not None and not callable(device_name_or_function)): device_function = pydev.merge_device(device_name_or_function) else: device_function = device_name_or_function try: self._device_function_stack.append(device_function) yield finally: self._device_function_stack.pop() def _apply_device_functions(self, op): # signature, which is computed in the Operation constructor. for device_function in reversed(self._device_function_stack): if device_function is None: break op._set_device(device_function(op)) # pylint: disable=g-doc-return-or-yield @contextlib.contextmanager def container(self, container_name): original_container = self._container try: self._container = container_name yield self._container finally: self._container = original_container # pylint: enable=g-doc-return-or-yield class _ControlDependenciesController(object): def __init__(self, graph, control_inputs): self._graph = graph if control_inputs is None: self._control_inputs = [] self._new_stack = True else: self._control_inputs = control_inputs self._new_stack = False self._seen_nodes = set() self._old_stack = None self._old_control_flow_context = None # pylint: disable=protected-access def __enter__(self): if self._new_stack: # Clear the control_dependencies graph. self._old_stack = self._graph._control_dependencies_stack self._graph._control_dependencies_stack = [] # Clear the control_flow_context too. self._old_control_flow_context = self._graph._get_control_flow_context() self._graph._set_control_flow_context(None) self._graph._push_control_dependencies_controller(self) def __exit__(self, unused_type, unused_value, unused_traceback): self._graph._pop_control_dependencies_controller(self) if self._new_stack: self._graph._control_dependencies_stack = self._old_stack self._graph._set_control_flow_context(self._old_control_flow_context) # pylint: enable=protected-access @property def control_inputs(self): return self._control_inputs def add_op(self, op): self._seen_nodes.add(op) def op_in_group(self, op): return op in self._seen_nodes def _push_control_dependencies_controller(self, controller): self._control_dependencies_stack.append(controller) def _pop_control_dependencies_controller(self, controller): assert self._control_dependencies_stack[-1] is controller self._control_dependencies_stack.pop() def _current_control_dependencies(self): ret = set() for controller in self._control_dependencies_stack: for op in controller.control_inputs: ret.add(op) return ret def _control_dependencies_for_inputs(self, input_tensors): ret = [] input_ops = set([t.op for t in input_tensors]) for controller in self._control_dependencies_stack: # If any of the input_ops already depends on the inputs from controller, # we say that the new op is dominated (by that input), and we therefore # do not need to add control dependencies for this controller's inputs. dominated = False for op in input_ops: if controller.op_in_group(op): dominated = True break if not dominated: # NOTE(mrry): We do not currently track transitive data dependencies, # so we may add redundant control inputs. ret.extend([c for c in controller.control_inputs if c not in input_ops]) return ret def _record_op_seen_by_control_dependencies(self, op): for controller in self._control_dependencies_stack: controller.add_op(op) def control_dependencies(self, control_inputs): if control_inputs is None: return self._ControlDependenciesController(self, None) # First convert the inputs to ops, and deduplicate them. # NOTE(mrry): Other than deduplication, we do not currently track direct # or indirect dependencies between control_inputs, which may result in # redundant control inputs. control_ops = [] current = self._current_control_dependencies() for c in control_inputs: c = self.as_graph_element(c) if isinstance(c, Tensor): c = c.op elif not isinstance(c, Operation): raise TypeError("Control input must be Operation or Tensor: %s" % c) if c not in current: control_ops.append(c) current.add(c) return self._ControlDependenciesController(self, control_ops) # pylint: disable=g-doc-return-or-yield @contextlib.contextmanager def _attr_scope(self, attr_map): if not isinstance(attr_map, dict): raise TypeError("attr_map must be a dictionary mapping " "strings to AttrValue protocol buffers") # The saved_attrs dictionary stores any currently-set labels that # will be overridden by this context manager. saved_attrs = {} # Install the given attribute for name, attr in attr_map.items(): if not (isinstance(name, six.string_types) and (isinstance(attr, (type(None), attr_value_pb2.AttrValue)) or callable(attr))): raise TypeError("attr_map must be a dictionary mapping " "strings to AttrValue protocol buffers or " "callables that emit AttrValue protocol buffers") try: saved_attrs[name] = self._attr_scope_map[name] except KeyError: pass if attr is None: del self._attr_scope_map[name] else: self._attr_scope_map[name] = attr try: yield # The code within the context runs here. finally: # Remove the attributes set for this context, and restore any saved # attributes. for name, attr in attr_map.items(): try: self._attr_scope_map[name] = saved_attrs[name] except KeyError: del self._attr_scope_map[name] # pylint: enable=g-doc-return-or-yield # pylint: disable=g-doc-return-or-yield @contextlib.contextmanager def _kernel_label_map(self, op_to_kernel_label_map): if not isinstance(op_to_kernel_label_map, dict): raise TypeError("op_to_kernel_label_map must be a dictionary mapping " "strings to strings") # The saved_labels dictionary stores any currently-set labels that # will be overridden by this context manager. saved_labels = {} # Install the given label for op_type, label in op_to_kernel_label_map.items(): if not (isinstance(op_type, six.string_types) and isinstance(label, six.string_types)): raise TypeError("op_to_kernel_label_map must be a dictionary mapping " "strings to strings") try: saved_labels[op_type] = self._op_to_kernel_label_map[op_type] except KeyError: pass self._op_to_kernel_label_map[op_type] = label try: yield # The code within the context runs here. finally: # Remove the labels set for this context, and restore any saved labels. for op_type, label in op_to_kernel_label_map.items(): try: self._op_to_kernel_label_map[op_type] = saved_labels[op_type] except KeyError: del self._op_to_kernel_label_map[op_type] # pylint: enable=g-doc-return-or-yield # pylint: disable=g-doc-return-or-yield @contextlib.contextmanager def gradient_override_map(self, op_type_map): if not isinstance(op_type_map, dict): raise TypeError("op_type_map must be a dictionary mapping " "strings to strings") # The saved_mappings dictionary stores any currently-set mappings that # will be overridden by this context manager. saved_mappings = {} # Install the given label for op_type, mapped_op_type in op_type_map.items(): if not (isinstance(op_type, six.string_types) and isinstance(mapped_op_type, six.string_types)): raise TypeError("op_type_map must be a dictionary mapping " "strings to strings") try: saved_mappings[op_type] = self._gradient_override_map[op_type] except KeyError: pass self._gradient_override_map[op_type] = mapped_op_type try: yield # The code within the context runs here. finally: # Remove the labels set for this context, and restore any saved labels. for op_type, mapped_op_type in op_type_map.items(): try: self._gradient_override_map[op_type] = saved_mappings[op_type] except KeyError: del self._gradient_override_map[op_type] # pylint: enable=g-doc-return-or-yield def prevent_feeding(self, tensor): self._unfeedable_tensors.add(tensor) def is_feedable(self, tensor): return tensor not in self._unfeedable_tensors def prevent_fetching(self, op): self._unfetchable_ops.add(op) def is_fetchable(self, tensor_or_op): if isinstance(tensor_or_op, Tensor): return tensor_or_op.op not in self._unfetchable_ops else: return tensor_or_op not in self._unfetchable_ops def device(device_name_or_function): return get_default_graph().device(device_name_or_function) def container(container_name): return get_default_graph().container(container_name) def colocate_with(op, ignore_existing=False): return get_default_graph().colocate_with(op, ignore_existing) def control_dependencies(control_inputs): return get_default_graph().control_dependencies(control_inputs) class _DefaultStack(threading.local): def __init__(self): super(_DefaultStack, self).__init__() self._enforce_nesting = True self.stack = [] def get_default(self): return self.stack[-1] if len(self.stack) >= 1 else None def reset(self): self.stack = [] @property def enforce_nesting(self): return self._enforce_nesting @enforce_nesting.setter def enforce_nesting(self, value): self._enforce_nesting = value @contextlib.contextmanager def get_controller(self, default): try: self.stack.append(default) yield default finally: if self._enforce_nesting: if self.stack[-1] is not default: raise AssertionError( "Nesting violated for default stack of %s objects" % type(default)) self.stack.pop() else: self.stack.remove(default) _default_session_stack = _DefaultStack() def default_session(session): return _default_session_stack.get_controller(session) def get_default_session(): return _default_session_stack.get_default() def _eval_using_default_session(tensors, feed_dict, graph, session=None): if session is None: session = get_default_session() if session is None: raise ValueError("Cannot evaluate tensor using `eval()`: No default " "session is registered. Use `with " "sess.as_default()` or pass an explicit session to " "`eval(session=sess)`") if session.graph is not graph: raise ValueError("Cannot use the default session to evaluate tensor: " "the tensor's graph is different from the session's " "graph. Pass an explicit session to " "`eval(session=sess)`.") else: if session.graph is not graph: raise ValueError("Cannot use the given session to evaluate tensor: " "the tensor's graph is different from the session's " "graph.") return session.run(tensors, feed_dict) def _run_using_default_session(operation, feed_dict, graph, session=None): if session is None: session = get_default_session() if session is None: raise ValueError("Cannot execute operation using `run()`: No default " "session is registered. Use `with " "sess.as_default():` or pass an explicit session to " "`run(session=sess)`") if session.graph is not graph: raise ValueError("Cannot use the default session to execute operation: " "the operation's graph is different from the " "session's graph. Pass an explicit session to " "run(session=sess).") else: if session.graph is not graph: raise ValueError("Cannot use the given session to execute operation: " "the operation's graph is different from the session's " "graph.") session.run(operation, feed_dict) class _DefaultGraphStack(_DefaultStack): def __init__(self): super(_DefaultGraphStack, self).__init__() self._global_default_graph = None def get_default(self): ret = super(_DefaultGraphStack, self).get_default() if ret is None: ret = self._GetGlobalDefaultGraph() return ret def _GetGlobalDefaultGraph(self): if self._global_default_graph is None: # TODO(mrry): Perhaps log that the default graph is being used, or set # provide some other feedback to prevent confusion when a mixture of # the global default graph and an explicit graph are combined in the # same process. self._global_default_graph = Graph() return self._global_default_graph def reset(self): super(_DefaultGraphStack, self).reset() self._global_default_graph = None _default_graph_stack = _DefaultGraphStack() def reset_default_graph(): _default_graph_stack.reset() def get_default_graph(): return _default_graph_stack.get_default() def _assert_same_graph(original_item, item): if original_item.graph is not item.graph: raise ValueError( "%s must be from the same graph as %s." % (item, original_item)) def _get_graph_from_inputs(op_input_list, graph=None): if get_default_graph().building_function: return get_default_graph() op_input_list = tuple(op_input_list) # Handle generators correctly if graph and not isinstance(graph, Graph): raise TypeError("Input graph needs to be a Graph: %s" % graph) # 1. We validate that all of the inputs are from the same graph. This is # either the supplied graph parameter, or the first one selected from one # the graph-element-valued inputs. In the latter case, we hold onto # that input in original_graph_element so we can provide a more # informative error if a mismatch is found. original_graph_element = None for op_input in op_input_list: # Determine if this is a valid graph_element. graph_element = None if isinstance(op_input, (Operation, _TensorLike)): graph_element = op_input else: graph_element = _as_graph_element(op_input) if graph_element is not None: if not graph: original_graph_element = graph_element graph = graph_element.graph elif original_graph_element is not None: _assert_same_graph(original_graph_element, graph_element) elif graph_element.graph is not graph: raise ValueError( "%s is not from the passed-in graph." % graph_element) # 2. If all else fails, we use the default graph, which is always there. return graph or get_default_graph() class GraphKeys(object): # Key to collect Variable objects that are global (shared across machines). # Default collection for all variables, except local ones. GLOBAL_VARIABLES = "variables" # Key to collect local variables that are local to the machine and are not # saved/restored. LOCAL_VARIABLES = "local_variables" # Key to collect model variables defined by layers. MODEL_VARIABLES = "model_variables" # Key to collect Variable objects that will be trained by the # optimizers. TRAINABLE_VARIABLES = "trainable_variables" # Key to collect summaries. SUMMARIES = "summaries" # Key to collect QueueRunners. QUEUE_RUNNERS = "queue_runners" # Key to collect table initializers. TABLE_INITIALIZERS = "table_initializer" # Key to collect asset filepaths. An asset represents an external resource # like a vocabulary file. ASSET_FILEPATHS = "asset_filepaths" # Key to collect Variable objects that keep moving averages. MOVING_AVERAGE_VARIABLES = "moving_average_variables" # Key to collect regularization losses at graph construction. REGULARIZATION_LOSSES = "regularization_losses" # Key to collect concatenated sharded variables. CONCATENATED_VARIABLES = "concatenated_variables" # Key to collect savers. SAVERS = "savers" # Key to collect weights WEIGHTS = "weights" # Key to collect biases BIASES = "biases" # Key to collect activations ACTIVATIONS = "activations" # Key to collect update_ops UPDATE_OPS = "update_ops" # Key to collect losses LOSSES = "losses" # Key to collect BaseSaverBuilder.SaveableObject instances for checkpointing. SAVEABLE_OBJECTS = "saveable_objects" # Key to collect all shared resources used by the graph which need to be # initialized once per cluster. RESOURCES = "resources" # Key to collect all shared resources used in this graph which need to be # initialized once per session. LOCAL_RESOURCES = "local_resources" # Trainable resource-style variables. TRAINABLE_RESOURCE_VARIABLES = "trainable_resource_variables" # Key to indicate various ops. INIT_OP = "init_op" LOCAL_INIT_OP = "local_init_op" READY_OP = "ready_op" READY_FOR_LOCAL_INIT_OP = "ready_for_local_init_op" SUMMARY_OP = "summary_op" GLOBAL_STEP = "global_step" # Used to count the number of evaluations performed during a single evaluation # run. EVAL_STEP = "eval_step" TRAIN_OP = "train_op" # Key for control flow context. COND_CONTEXT = "cond_context" WHILE_CONTEXT = "while_context" @decorator_utils.classproperty def VARIABLES(cls): # pylint: disable=no-self-argument logging.warning("VARIABLES collection name is deprecated, " "please use GLOBAL_VARIABLES instead; " "VARIABLES will be removed after 2017-03-02.") return cls.GLOBAL_VARIABLES def add_to_collection(name, value): get_default_graph().add_to_collection(name, value) def add_to_collections(names, value): get_default_graph().add_to_collections(names, value) def get_collection_ref(key): return get_default_graph().get_collection_ref(key) def get_collection(key, scope=None): return get_default_graph().get_collection(key, scope) def get_all_collection_keys(): return get_default_graph().get_all_collection_keys() # pylint: disable=g-doc-return-or-yield @contextlib.contextmanager def name_scope(name, default_name=None, values=None): n = default_name if name is None else name if n is None and values is not None: # We only raise an error if values is not None (provided) because currently # tf.name_scope(None) (values=None then) is sometimes used as an idiom # to reset to top scope. raise ValueError( "At least one of name (%s) and default_name (%s) must be provided." % ( name, default_name)) if values is None: values = [] g = _get_graph_from_inputs(values) with g.as_default(), g.name_scope(n) as scope: yield scope # pylint: enable=g-doc-return-or-yield def strip_name_scope(name, export_scope): if export_scope: # Strips export_scope/, export_scope///, # ^export_scope/, loc:@export_scope/. str_to_replace = r"([\^]|loc:@|^)" + export_scope + r"[\/]+(.*)" return re.sub(str_to_replace, r"\1\2", compat.as_str(name), count=1) else: return name def prepend_name_scope(name, import_scope): if import_scope: str_to_replace = r"([\^]|loc:@|^)(.*)" return re.sub(str_to_replace, r"\1" + import_scope + r"/\2", compat.as_str(name)) else: return name # pylint: disable=g-doc-return-or-yield @contextlib.contextmanager def op_scope(values, name, default_name=None): logging.warn("tf.op_scope(values, name, default_name) is deprecated," " use tf.name_scope(name, default_name, values)") with name_scope(name, default_name=default_name, values=values) as scope: yield scope _proto_function_registry = registry.Registry("proto functions") def register_proto_function(collection_name, proto_type=None, to_proto=None, from_proto=None): if to_proto and not callable(to_proto): raise TypeError("to_proto must be callable.") if from_proto and not callable(from_proto): raise TypeError("from_proto must be callable.") _proto_function_registry.register((proto_type, to_proto, from_proto), collection_name) def get_collection_proto_type(collection_name): try: return _proto_function_registry.lookup(collection_name)[0] except LookupError: return None def get_to_proto_function(collection_name): try: return _proto_function_registry.lookup(collection_name)[1] except LookupError: return None def get_from_proto_function(collection_name): try: return _proto_function_registry.lookup(collection_name)[2] except LookupError: return None def _operation_conversion_error(op, dtype=None, name=None, as_ref=False): raise TypeError( ("Can't convert Operation '%s' to Tensor " "(target dtype=%r, name=%r, as_ref=%r)") % (op.name, dtype, name, as_ref)) register_tensor_conversion_function(Operation, _operation_conversion_error)
true
true
f73be1b188ec3aa0232075720e09d33b10e6ae07
5,793
py
Python
src/qfit/vdw_radii.py
ExcitedStates/qfit-3.0
8ed8e8f44015e4eb30fed7a5da65819a586c2bbf
[ "Artistic-2.0", "MIT" ]
13
2019-05-22T00:07:39.000Z
2022-03-09T18:12:47.000Z
src/qfit/vdw_radii.py
ExcitedStates/qfit-3.0
8ed8e8f44015e4eb30fed7a5da65819a586c2bbf
[ "Artistic-2.0", "MIT" ]
84
2019-02-14T00:46:57.000Z
2022-03-03T21:53:15.000Z
src/qfit/vdw_radii.py
ExcitedStates/qfit-3.0
8ed8e8f44015e4eb30fed7a5da65819a586c2bbf
[ "Artistic-2.0", "MIT" ]
7
2019-03-08T01:08:05.000Z
2021-10-02T14:40:45.000Z
''' Excited States software: qFit 3.0 Contributors: Saulo H. P. de Oliveira, Gydo van Zundert, and Henry van den Bedem. Contact: vdbedem@stanford.edu Copyright (C) 2009-2019 Stanford University Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: This entire text, including the above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS, CONTRIBUTORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ''' # ElementList=[ # " H", "HE", # "LI", "BE", " B", " C", " N", " O", " F", "NE", # "NA", "MG", "AL", "SI", " P", " S", "CL", "AR", # " K", "CA", # "SC", "TI", " V", "CR", "MN", "FE", # "CO", "NI", "CU", "ZN", # "GA", "GE", "AS", "SE", "BR", "KR", # "RB", "SR", # " Y", "ZR", "NB", "MO", "TC", "RU", # "RH", "PD", "AG", "CD", # "IN", "SN", "SB", "TE", " I", "XE", # "CS", "BA", # "LA", "CE", "PR", "ND", "PM", "SM", "EU", # "GD", "TB", "DY", "HO", "ER", "TM", "YB", # "LU", "HF", "TA", " W", "RE", "OS", # "IR", "PT", "AU", "HG", # "TL", "PB", "BI", "PO", "AT", "RN", # "FR", "RA", # "AC", "TH", "PA", " U", "NP", "PU", "AM", # "CM", "BK", "CF", "ES", "FM", "MD", "NO", # "LR", "RF", "DB", "SG", "BH", "HS", # "MT", "UN", "UU", "UB", # "UQ", "UH", "UO", # " D", "AN" # ] # # # VanderWaalsRadiiList = [ # 1.20, 1.40, # 1.82, 1.78, 1.74, 1.70, 1.55, 1.52, 1.47, 1.54, # 2.27, 1.73, 1.80, 2.10, 1.80, 1.80, 1.75, 1.88, # 2.75, 2.65, # 2.55, 2.45, 2.35, 2.20, 1.73, 1.90, # 1.75, 1.63, 1.40, 1.39, # 1.87, 1.86, 1.85, 1.90, 1.85, 2.02, # 2.75, 2.65, # 2.55, 2.45, 2.35, 2.20, 2.05, 1.90, # 1.75, 1.63, 1.72, 1.58, # 1.93, 2.17, 2.10, 2.06, 1.98, 2.16, # 2.75, 2.75, # 2.75, 2.75, 2.75, 2.75, 2.75, 2.75, 2.75, # 2.75, 2.75, 2.75, 2.75, 2.75, 2.65, 2.55, # 2.45, 2.35, 2.25, 2.15, 2.05, 1.95, # 1.85, 1.75, 1.66, 1.55, # 1.96, 2.02, 2.00, 2.00, 2.00, 2.00, # 2.75, 2.75, # 2.50, 2.25, 1.95, 1.86, 1.80, 1.80, 1.80, # 1.80, 1.80, 1.80, 1.80, 1.80, 1.80, 1.80, # 1.80, 1.80, 1.80, 1.80, 1.80, 1.80, # 1.80, 1.80, 1.80, 1.80, # 1.80, 1.80, 1.80, # 1.30, 1.50 # ] # print("vdwRadiiTable={") # for ele,radius in zip(ElementList,VanderWaalsRadiiList): # print(f"\'{ele}\':{radius},") # print("}") vdwRadiiTable={ 'H':1.2, 'HE':1.4, 'LI':1.82, 'BE':1.78, 'B':1.74, 'C':1.7, 'N':1.55, 'O':1.52, 'F':1.47, 'NE':1.54, 'NA':2.27, 'MG':1.73, 'AL':1.8, 'SI':2.1, 'P':1.8, 'S':1.8, 'CL':1.75, 'AR':1.88, 'K':2.75, 'CA':2.65, 'SC':2.55, 'TI':2.45, 'V':2.35, 'CR':2.2, 'MN':1.73, 'FE':1.9, 'CO':1.75, 'NI':1.63, 'CU':1.4, 'ZN':1.39, 'GA':1.87, 'GE':1.86, 'AS':1.85, 'SE':1.9, 'BR':1.85, 'KR':2.02, 'RB':2.75, 'SR':2.65, 'Y':2.55, 'ZR':2.45, 'NB':2.35, 'MO':2.2, 'TC':2.05, 'RU':1.9, 'RH':1.75, 'PD':1.63, 'AG':1.72, 'CD':1.58, 'IN':1.93, 'SN':2.17, 'SB':2.1, 'TE':2.06, 'I':1.98, 'XE':2.16, 'CS':2.75, 'BA':2.75, 'LA':2.75, 'CE':2.75, 'PR':2.75, 'ND':2.75, 'PM':2.75, 'SM':2.75, 'EU':2.75, 'GD':2.75, 'TB':2.75, 'DY':2.75, 'HO':2.75, 'ER':2.75, 'TM':2.65, 'YB':2.55, 'LU':2.45, 'HF':2.35, 'TA':2.25, 'W':2.15, 'RE':2.05, 'OS':1.95, 'IR':1.85, 'PT':1.75, 'AU':1.66, 'HG':1.55, 'TL':1.96, 'PB':2.02, 'BI':2.0, 'PO':2.0, 'AT':2.0, 'RN':2.0, 'FR':2.75, 'RA':2.75, 'AC':2.5, 'TH':2.25, 'PA':1.95, 'U':1.86, 'NP':1.8, 'PU':1.8, 'AM':1.8, 'CM':1.8, 'BK':1.8, 'CF':1.8, 'ES':1.8, 'FM':1.8, 'MD':1.8, 'NO':1.8, 'LR':1.8, 'RF':1.8, 'DB':1.8, 'SG':1.8, 'BH':1.8, 'HS':1.8, 'MT':1.8, 'UN':1.8, 'UU':1.8, 'UB':1.8, 'UQ':1.8, 'UH':1.8, 'UO':1.8, 'D':1.3, 'AN':1.5, } # from xml.dom import minidom # # # parse an xml file by name # mydoc = minidom.parse('/data/sauloho/qfit/qfit-3.0/qfit/epsilon.xml') # # firsts = mydoc.getElementsByTagName('first') # seconds = mydoc.getElementsByTagName('second') # epsilons = mydoc.getElementsByTagName('epsilon') # # print("EpsilonTable={") # for first,second,epsilon in zip(firsts,seconds,epsilons): # print(f"\'{first.firstChild.data}\':{{\'{second.firstChild.data}\':{epsilon.firstChild.data}}},") # print("}") EpsilonTable={ 'C':{'C':0.150, 'N':0.155, 'O':0.173, 'S':0.173, 'H':0.055}, 'N':{'C':0.155, 'N':0.160, 'O':0.179, 'S':0.179, 'H':0.057}, 'O':{'C':0.173, 'N':0.179, 'O':0.200, 'S':0.200, 'H':0.063}, 'S':{'C':0.173, 'N':0.179, 'O':0.200, 'S':0.200, 'H':0.063}, 'H':{'C':0.055, 'N':0.057, 'O':0.063, 'S':0.063, 'H':0.020}, } EpsilonIndex = ["H", "C", "N", "O", "S"] EpsilonArray = [[0.020, 0.055, 0.057, 0.063, 0.063], # H [0.055, 0.150, 0.155, 0.173, 0.173], # C [0.057, 0.155, 0.160, 0.179, 0.179], # N [0.063, 0.173, 0.179, 0.200, 0.200], # O [0.063, 0.173, 0.179, 0.200, 0.200]] # S
24.969828
103
0.493181
vdwRadiiTable={ 'H':1.2, 'HE':1.4, 'LI':1.82, 'BE':1.78, 'B':1.74, 'C':1.7, 'N':1.55, 'O':1.52, 'F':1.47, 'NE':1.54, 'NA':2.27, 'MG':1.73, 'AL':1.8, 'SI':2.1, 'P':1.8, 'S':1.8, 'CL':1.75, 'AR':1.88, 'K':2.75, 'CA':2.65, 'SC':2.55, 'TI':2.45, 'V':2.35, 'CR':2.2, 'MN':1.73, 'FE':1.9, 'CO':1.75, 'NI':1.63, 'CU':1.4, 'ZN':1.39, 'GA':1.87, 'GE':1.86, 'AS':1.85, 'SE':1.9, 'BR':1.85, 'KR':2.02, 'RB':2.75, 'SR':2.65, 'Y':2.55, 'ZR':2.45, 'NB':2.35, 'MO':2.2, 'TC':2.05, 'RU':1.9, 'RH':1.75, 'PD':1.63, 'AG':1.72, 'CD':1.58, 'IN':1.93, 'SN':2.17, 'SB':2.1, 'TE':2.06, 'I':1.98, 'XE':2.16, 'CS':2.75, 'BA':2.75, 'LA':2.75, 'CE':2.75, 'PR':2.75, 'ND':2.75, 'PM':2.75, 'SM':2.75, 'EU':2.75, 'GD':2.75, 'TB':2.75, 'DY':2.75, 'HO':2.75, 'ER':2.75, 'TM':2.65, 'YB':2.55, 'LU':2.45, 'HF':2.35, 'TA':2.25, 'W':2.15, 'RE':2.05, 'OS':1.95, 'IR':1.85, 'PT':1.75, 'AU':1.66, 'HG':1.55, 'TL':1.96, 'PB':2.02, 'BI':2.0, 'PO':2.0, 'AT':2.0, 'RN':2.0, 'FR':2.75, 'RA':2.75, 'AC':2.5, 'TH':2.25, 'PA':1.95, 'U':1.86, 'NP':1.8, 'PU':1.8, 'AM':1.8, 'CM':1.8, 'BK':1.8, 'CF':1.8, 'ES':1.8, 'FM':1.8, 'MD':1.8, 'NO':1.8, 'LR':1.8, 'RF':1.8, 'DB':1.8, 'SG':1.8, 'BH':1.8, 'HS':1.8, 'MT':1.8, 'UN':1.8, 'UU':1.8, 'UB':1.8, 'UQ':1.8, 'UH':1.8, 'UO':1.8, 'D':1.3, 'AN':1.5, } 'C':{'C':0.150, 'N':0.155, 'O':0.173, 'S':0.173, 'H':0.055}, 'N':{'C':0.155, 'N':0.160, 'O':0.179, 'S':0.179, 'H':0.057}, 'O':{'C':0.173, 'N':0.179, 'O':0.200, 'S':0.200, 'H':0.063}, 'S':{'C':0.173, 'N':0.179, 'O':0.200, 'S':0.200, 'H':0.063}, 'H':{'C':0.055, 'N':0.057, 'O':0.063, 'S':0.063, 'H':0.020}, } EpsilonIndex = ["H", "C", "N", "O", "S"] EpsilonArray = [[0.020, 0.055, 0.057, 0.063, 0.063], [0.055, 0.150, 0.155, 0.173, 0.173], [0.057, 0.155, 0.160, 0.179, 0.179], [0.063, 0.173, 0.179, 0.200, 0.200], [0.063, 0.173, 0.179, 0.200, 0.200]]
true
true
f73be2df781224e713a842b5feab5f7542120fad
3,683
py
Python
selfdrive/loggerd/tests/test_uploader.py
919bot/Tessa
9b48ff9020e8fb6992fc78271f2720fd19e01093
[ "MIT" ]
null
null
null
selfdrive/loggerd/tests/test_uploader.py
919bot/Tessa
9b48ff9020e8fb6992fc78271f2720fd19e01093
[ "MIT" ]
null
null
null
selfdrive/loggerd/tests/test_uploader.py
919bot/Tessa
9b48ff9020e8fb6992fc78271f2720fd19e01093
[ "MIT" ]
null
null
null
import os import time import threading import logging import json from selfdrive.swaglog import cloudlog import selfdrive.loggerd.uploader as uploader from common.timeout import Timeout from selfdrive.loggerd.tests.loggerd_tests_common import UploaderTestCase class TestLogHandler(logging.Handler): def __init__(self): logging.Handler.__init__(self) self.reset() def reset(self): self.upload_order = list() def emit(self, record): try: j = json.loads(record.message) if j["event"] == "upload_success": self.upload_order.append(j["key"]) except BaseException: pass log_handler = TestLogHandler() cloudlog.addHandler(log_handler) class TestUploader(UploaderTestCase): def setUp(self): super(TestUploader, self).setUp() log_handler.reset() def tearDown(self): super(TestUploader, self).tearDown() def start_thread(self): self.end_event = threading.Event() self.up_thread = threading.Thread(target=uploader.uploader_fn, args=[self.end_event]) self.up_thread.daemon = True self.up_thread.start() def join_thread(self): self.end_event.set() self.up_thread.join() def gen_files(self, lock=False): f_paths = list() for t in ["bootlog.bz2", "qlog.bz2", "rlog.bz2", "dcamera.hevc", "fcamera.hevc"]: f_paths.append(self.make_file_with_data(self.seg_dir, t, 1, lock=lock)) return f_paths def gen_order(self, seg1, seg2): keys = [f"{self.seg_format.format(i)}/qlog.bz2" for i in seg1] keys += [f"{self.seg_format.format(i)}/rlog.bz2" for i in seg1] keys += [f"{self.seg_format2.format(i)}/qlog.bz2" for i in seg2] keys += [f"{self.seg_format2.format(i)}/rlog.bz2" for i in seg2] for i in seg1: keys += [f"{self.seg_format.format(i)}/{f}" for f in ['fcamera.hevc','dcamera.hevc']] for i in seg2: keys += [f"{self.seg_format2.format(i)}/{f}" for f in ['fcamera.hevc','dcamera.hevc']] keys += [f"{self.seg_format.format(i)}/bootlog.bz2" for i in seg1] keys += [f"{self.seg_format2.format(i)}/bootlog.bz2" for i in seg2] return keys def test_upload(self): f_paths = self.gen_files(lock=False) self.start_thread() with Timeout(5, "Timeout waiting for file to be uploaded"): while len(os.listdir(self.root)): time.sleep(0.01) self.join_thread() for f_path in f_paths: self.assertFalse(os.path.exists(f_path), "All files not uploaded") exp_order = self.gen_order([self.seg_num], []) self.assertTrue(log_handler.upload_order == exp_order, "Files uploaded in wrong order") def test_upload_files_in_create_order(self): f_paths = list() seg1_nums = [0,1,2,10,20] for i in seg1_nums: self.seg_dir = self.seg_format.format(i) f_paths += self.gen_files() seg2_nums = [5,50,51] for i in seg2_nums: self.seg_dir = self.seg_format2.format(i) f_paths += self.gen_files() self.start_thread() with Timeout(5, "Timeout waiting for file to be upload"): while len(os.listdir(self.root)): time.sleep(0.01) self.join_thread() for f_path in f_paths: self.assertFalse(os.path.exists(f_path), "All files not uploaded") #exp_order = self.gen_order(seg1_nums, seg2_nums) #self.assertTrue(log_handler.upload_order == exp_order, "Files uploaded in wrong order") def test_no_upload_with_lock_file(self): f_paths = self.gen_files(lock=True) self.start_thread() # allow enough time that files should have been uploaded if they would be uploaded time.sleep(5) self.join_thread() for f_path in f_paths: self.assertTrue(os.path.exists(f_path), "File upload when locked")
30.94958
92
0.685039
import os import time import threading import logging import json from selfdrive.swaglog import cloudlog import selfdrive.loggerd.uploader as uploader from common.timeout import Timeout from selfdrive.loggerd.tests.loggerd_tests_common import UploaderTestCase class TestLogHandler(logging.Handler): def __init__(self): logging.Handler.__init__(self) self.reset() def reset(self): self.upload_order = list() def emit(self, record): try: j = json.loads(record.message) if j["event"] == "upload_success": self.upload_order.append(j["key"]) except BaseException: pass log_handler = TestLogHandler() cloudlog.addHandler(log_handler) class TestUploader(UploaderTestCase): def setUp(self): super(TestUploader, self).setUp() log_handler.reset() def tearDown(self): super(TestUploader, self).tearDown() def start_thread(self): self.end_event = threading.Event() self.up_thread = threading.Thread(target=uploader.uploader_fn, args=[self.end_event]) self.up_thread.daemon = True self.up_thread.start() def join_thread(self): self.end_event.set() self.up_thread.join() def gen_files(self, lock=False): f_paths = list() for t in ["bootlog.bz2", "qlog.bz2", "rlog.bz2", "dcamera.hevc", "fcamera.hevc"]: f_paths.append(self.make_file_with_data(self.seg_dir, t, 1, lock=lock)) return f_paths def gen_order(self, seg1, seg2): keys = [f"{self.seg_format.format(i)}/qlog.bz2" for i in seg1] keys += [f"{self.seg_format.format(i)}/rlog.bz2" for i in seg1] keys += [f"{self.seg_format2.format(i)}/qlog.bz2" for i in seg2] keys += [f"{self.seg_format2.format(i)}/rlog.bz2" for i in seg2] for i in seg1: keys += [f"{self.seg_format.format(i)}/{f}" for f in ['fcamera.hevc','dcamera.hevc']] for i in seg2: keys += [f"{self.seg_format2.format(i)}/{f}" for f in ['fcamera.hevc','dcamera.hevc']] keys += [f"{self.seg_format.format(i)}/bootlog.bz2" for i in seg1] keys += [f"{self.seg_format2.format(i)}/bootlog.bz2" for i in seg2] return keys def test_upload(self): f_paths = self.gen_files(lock=False) self.start_thread() with Timeout(5, "Timeout waiting for file to be uploaded"): while len(os.listdir(self.root)): time.sleep(0.01) self.join_thread() for f_path in f_paths: self.assertFalse(os.path.exists(f_path), "All files not uploaded") exp_order = self.gen_order([self.seg_num], []) self.assertTrue(log_handler.upload_order == exp_order, "Files uploaded in wrong order") def test_upload_files_in_create_order(self): f_paths = list() seg1_nums = [0,1,2,10,20] for i in seg1_nums: self.seg_dir = self.seg_format.format(i) f_paths += self.gen_files() seg2_nums = [5,50,51] for i in seg2_nums: self.seg_dir = self.seg_format2.format(i) f_paths += self.gen_files() self.start_thread() with Timeout(5, "Timeout waiting for file to be upload"): while len(os.listdir(self.root)): time.sleep(0.01) self.join_thread() for f_path in f_paths: self.assertFalse(os.path.exists(f_path), "All files not uploaded") def test_no_upload_with_lock_file(self): f_paths = self.gen_files(lock=True) self.start_thread() time.sleep(5) self.join_thread() for f_path in f_paths: self.assertTrue(os.path.exists(f_path), "File upload when locked")
true
true
f73be3a7b3130605aab47153b280e3d8ce8398c6
111
py
Python
back/db/gustafdb.py
rkohser/gustaf2
b9f4dc0a9b5adca94161f9c59fa9907e1842b091
[ "MIT" ]
null
null
null
back/db/gustafdb.py
rkohser/gustaf2
b9f4dc0a9b5adca94161f9c59fa9907e1842b091
[ "MIT" ]
null
null
null
back/db/gustafdb.py
rkohser/gustaf2
b9f4dc0a9b5adca94161f9c59fa9907e1842b091
[ "MIT" ]
null
null
null
from eve import Eve app = Eve() if __name__ == '__main__': app.run(host='0.0.0.0', port=5001, debug=True)
18.5
50
0.63964
from eve import Eve app = Eve() if __name__ == '__main__': app.run(host='0.0.0.0', port=5001, debug=True)
true
true
f73be3ae374fd3d931826c70906f0dcb8897a992
3,374
py
Python
main.py
charlespcamargo/pycococreator
764ea9055dca173012c36569369315e57fee4682
[ "Apache-2.0" ]
null
null
null
main.py
charlespcamargo/pycococreator
764ea9055dca173012c36569369315e57fee4682
[ "Apache-2.0" ]
1
2020-11-21T00:12:49.000Z
2020-11-21T00:12:49.000Z
main.py
charlespcamargo/pycococreator
764ea9055dca173012c36569369315e57fee4682
[ "Apache-2.0" ]
null
null
null
from pycococreator import PyCocoCreator from pycococreatortools import PyCocoCreatorTools from coco_dataset import CocoDataset from coco_json_utils import CocoJsonCreator from args import Args import os if __name__ == "__main__": import argparse parser = argparse.ArgumentParser(description="Generate") parser.add_argument("-dn", "--database_name", dest="database_name", default="hedychium_coronarium", help="path to root of datasets") parser.add_argument("-b", "--base_path", dest="base_path", default="../images/train/", help="base path to images") parser.add_argument("-i", "--images_path", dest="images_path", default="images/", help="path to images") parser.add_argument("-m", "--masks_path", dest="masks_path", default="annotations/", help="path to masks") parser.add_argument("-j", "--instances_json", dest="instances_json", default="coco_instances.json", help="path to JSON path of coco instances") parser.add_argument("-mw", "--max_width", dest="max_width", default=920, type=int, help="max width to show images") parser.add_argument("-id", "--image_id", dest="image_id", default=10, type=int, help="image to open/generate HTML") # Generate COCO JSON - coco_json_utils parser.add_argument("-md", "--mask_definition", dest="mask_definition", default="mask_definition.json", help="path to a mask definition JSON file, generated by MaskJsonUtils module") parser.add_argument("-di", "--dataset_info", dest="dataset_info", help="path to a dataset info JSON file") parser.add_argument("-at", "--generate_automatic_info", dest="generate_automatic_info", default=1, type=int, help="to generate automatic info: 0 or 1") parser.add_argument("-rw", "--width", dest="width", default=4000, type=int, help="width to resize images") parser.add_argument("-rh", "--height", dest="height", default=3000, type=int, help="height to resize images") #args = parser.parse_args() args = Args() args.show() coco_dataset = CocoDataset() pycococreator = PyCocoCreator() pycococreatortools = PyCocoCreatorTools() cocojsoncreator = CocoJsonCreator() pycococreator.main(args, pycococreatortools) cocojsoncreator.main(args) try: # just to show and check coco_dataset.main(args) except: print("In File('coco_instances.json') not found Error, please generate before run") # In error # File not found "coco_instances.json", please generate before run. #if(args.generate_automatic_info == 1): #cocojsoncreator.main(args) if(not os.path.exists(args.annotation_path)): raise Exception(f'File not found {args.annotation_path}') # all loaded images images_ids = coco_dataset.images # take just some of all # if(True): # n = 10 # images_ids = list(images_ids)[0:n] # #images_ids = list(images_ids) #filteredList = filter('DJI_0594', images_ids) coco_dataset.display_categories() coco_dataset.save_images_to_html(images_ids, max_width=900)
43.25641
112
0.635448
from pycococreator import PyCocoCreator from pycococreatortools import PyCocoCreatorTools from coco_dataset import CocoDataset from coco_json_utils import CocoJsonCreator from args import Args import os if __name__ == "__main__": import argparse parser = argparse.ArgumentParser(description="Generate") parser.add_argument("-dn", "--database_name", dest="database_name", default="hedychium_coronarium", help="path to root of datasets") parser.add_argument("-b", "--base_path", dest="base_path", default="../images/train/", help="base path to images") parser.add_argument("-i", "--images_path", dest="images_path", default="images/", help="path to images") parser.add_argument("-m", "--masks_path", dest="masks_path", default="annotations/", help="path to masks") parser.add_argument("-j", "--instances_json", dest="instances_json", default="coco_instances.json", help="path to JSON path of coco instances") parser.add_argument("-mw", "--max_width", dest="max_width", default=920, type=int, help="max width to show images") parser.add_argument("-id", "--image_id", dest="image_id", default=10, type=int, help="image to open/generate HTML") parser.add_argument("-md", "--mask_definition", dest="mask_definition", default="mask_definition.json", help="path to a mask definition JSON file, generated by MaskJsonUtils module") parser.add_argument("-di", "--dataset_info", dest="dataset_info", help="path to a dataset info JSON file") parser.add_argument("-at", "--generate_automatic_info", dest="generate_automatic_info", default=1, type=int, help="to generate automatic info: 0 or 1") parser.add_argument("-rw", "--width", dest="width", default=4000, type=int, help="width to resize images") parser.add_argument("-rh", "--height", dest="height", default=3000, type=int, help="height to resize images") args = Args() args.show() coco_dataset = CocoDataset() pycococreator = PyCocoCreator() pycococreatortools = PyCocoCreatorTools() cocojsoncreator = CocoJsonCreator() pycococreator.main(args, pycococreatortools) cocojsoncreator.main(args) try: coco_dataset.main(args) except: print("In File('coco_instances.json') not found Error, please generate before run") if(not os.path.exists(args.annotation_path)): raise Exception(f'File not found {args.annotation_path}') images_ids = coco_dataset.images coco_dataset.display_categories() coco_dataset.save_images_to_html(images_ids, max_width=900)
true
true
f73be43cfc5eaa1bca66f8efa2cb926b32ef81d2
11,979
py
Python
django_cyverse_auth/protocol/cas.py
simpsonw/django-cyverse-auth
969ff75f0068a116dce2aaf92c78e1f1675eccbe
[ "BSD-3-Clause" ]
1
2017-04-27T20:06:39.000Z
2017-04-27T20:06:39.000Z
django_cyverse_auth/protocol/cas.py
simpsonw/django-cyverse-auth
969ff75f0068a116dce2aaf92c78e1f1675eccbe
[ "BSD-3-Clause" ]
14
2021-04-09T01:09:39.000Z
2021-10-22T01:14:28.000Z
django_cyverse_auth/protocol/cas.py
benlazarine/django-cyverse-auth
1664a11c5d9ab223d4e476d78e0513adf8eb27e0
[ "BSD-3-Clause" ]
10
2017-01-05T16:18:33.000Z
2020-04-20T20:47:53.000Z
""" CAS authentication protocol Contact: Steven Gregory <sgregory@iplantcollaborative.org> """ from datetime import timedelta import time from django.conf import settings from django.contrib.auth import get_user_model from django.core.urlresolvers import reverse from django.http import HttpResponse, HttpResponseRedirect from django.utils import timezone from caslib import CASClient, SAMLClient, OAuthClient import logging logger = logging.getLogger(__name__) from django_cyverse_auth.session import create_session_token from django_cyverse_auth.settings import auth_settings from django_cyverse_auth.models import UserProxy ########################### # CAS-SPECIFIC SSO METHODS ########################### PROXY_TICKET_EXPIRY = timedelta(days=1) User = get_user_model() def get_cas_client(): """ This is how you initialize a CAS Client """ return CASClient(auth_settings.CAS_SERVER, settings.SERVICE_URL, proxy_url=settings.PROXY_URL, proxy_callback=settings.PROXY_CALLBACK_URL, auth_prefix=auth_settings.CAS_AUTH_PREFIX, self_signed_cert=auth_settings.SELF_SIGNED_CERT) def cas_logoutRedirect(): return HttpResponseRedirect(auth_settings.CAS_SERVER + "/cas/logout?service=" + settings.SERVER_URL) def cas_loginRedirect(request, redirect=None, gateway=False): if not redirect: redirect = request.get_full_path() redirect_to = "%s/CAS_serviceValidater?sendback=%s" \ % (settings.SERVER_URL, redirect) login_url = "%s%s/login?service=%s" \ % (auth_settings.CAS_SERVER, auth_settings.CAS_AUTH_PREFIX, redirect_to) if gateway: login_url += '&gateway=true' return HttpResponseRedirect(login_url) def cas_validateUser(username): """ Because this is a programmatic request and CAS requires user input when expired, We MUST use CAS Proxy Service, and see if we can reauthenticate the user. """ try: userProxy = UserProxy.objects.filter(username=username).order_by('pk').first() logger.debug("[CAS] Validation Test - %s" % username) if userProxy is None: logger.debug("User %s does not have a proxy" % username) return (False, None) proxyTicket = userProxy.proxyTicket caslib = get_cas_client() (validUser, cas_response) =\ caslib.reauthenticate(proxyTicket, username=username) logger.debug("Valid User: %s Proxy response: %s" % (validUser, cas_response)) return (validUser, cas_response) except Exception: logger.exception('Error validating user %s' % username) return (False, None) def cas_updateUserProxy(user, pgtIou, max_try=3): attempts = 0 while attempts < max_try: try: # If PGTIOU exists, a UserProxy object was created # match the user to this ticket. userProxy = UserProxy.objects.get(proxyIOU=pgtIou) userProxy.username = user userProxy.expiresOn = timezone.now() + PROXY_TICKET_EXPIRY logger.debug("Found a matching proxy IOU for %s" % userProxy.username) userProxy.save() return True except UserProxy.DoesNotExist: logger.error("Could not find UserProxy object!" "ProxyIOU & ID was not saved " "at proxy url endpoint.") time.sleep(min(2**attempts, 8)) attempts += 1 return False def cas_set_redirect_url(sendback, request): absolute_url = request.build_absolute_uri( reverse('django_cyverse_auth:cas-service-validate-link')) return "%s?sendback=%s" % (absolute_url, sendback) def cas_validateTicket(request): """ Method expects 2 GET parameters: 'ticket' & 'sendback' After a CAS Login: Redirects the request based on the GET param 'ticket' Unauthorized Users are redirected to '/' In the event of failure. Authorized Users are redirected to the GET param 'sendback' """ redirect_logout_url = settings.REDIRECT_URL + "/login/" no_user_url = settings.REDIRECT_URL + "/no_user/" logger.debug('GET Variables:%s' % request.GET) ticket = request.GET.get('ticket', None) sendback = request.GET.get('sendback', None) if not ticket: logger.warn("No Ticket received in GET string " "-- Logout user: %s" % redirect_logout_url) return HttpResponseRedirect(redirect_logout_url) logger.debug("ServiceValidate endpoint includes a ticket." " Ticket must now be validated with CAS") # ReturnLocation set, apply on successful authentication caslib = get_cas_client() caslib.service_url = cas_set_redirect_url(sendback, request) cas_response = caslib.cas_serviceValidate(ticket) if not cas_response.success: logger.debug("CAS Server did NOT validate ticket:%s" " and included this response:%s (Err:%s)" % (ticket, cas_response.object, cas_response.error_str)) return HttpResponseRedirect(redirect_logout_url) if not cas_response.user: logger.debug("User attribute missing from cas response!" "This may require a fix to caslib.py") return HttpResponseRedirect(redirect_logout_url) if not cas_response.proxy_granting_ticket: logger.error("""Proxy Granting Ticket missing! Atmosphere requires CAS proxy as a service to authenticate users. Possible Causes: * ServerName variable is wrong in /etc/apache2/apache2.conf * Proxy URL does not exist * Proxy URL is not a valid RSA-2/VeriSigned SSL certificate * /etc/host and hostname do not match machine.""") return HttpResponseRedirect(redirect_logout_url) updated = cas_updateUserProxy( cas_response.user, cas_response.proxy_granting_ticket) if not updated: return HttpResponseRedirect(redirect_logout_url) logger.info("Updated proxy for <%s> -- Auth success!" % cas_response.user) try: user = User.objects.get(username=cas_response.user) except User.DoesNotExist: return HttpResponseRedirect(no_user_url) auth_token = create_session_token(None, user, request, issuer="CAS") if auth_token is None: logger.warn("Failed to create AuthToken") HttpResponseRedirect(redirect_logout_url) return_to = request.GET['sendback'] logger.info("Session token created, User logged in, return to: %s" % return_to) return HttpResponseRedirect(return_to) """ CAS as a proxy service is a useful feature to renew a users token/authentication without having to explicitly redirect the browser. These two functions will be called if caslib has been configured for proxy usage. (See #settings.py) """ def cas_storeProxyIOU_ID(request): """ Any request to the proxy url will contain the PROXY-TICKET IOU and ID IOU and ID are mapped to a DB so they can be used later """ if "pgtIou" in request.GET and "pgtId" in request.GET: iou_token = request.GET["pgtIou"] proxy_ticket = request.GET["pgtId"] logger.debug("PROXY HIT 2 - CAS server sends two IDs: " "1.ProxyIOU (%s) 2. ProxyGrantingTicket (%s)" % (iou_token, proxy_ticket)) proxy = UserProxy( proxyIOU=iou_token, proxyTicket=proxy_ticket ) proxy.save() logger.debug("Proxy ID has been saved, match ProxyIOU(%s) " "from the proxyIOU returned in service validate." % (proxy.proxyIOU,)) else: logger.debug("Proxy HIT 1 - CAS server tests that this link is HTTPS") return HttpResponse("Received proxy request. Thank you.") def cas_proxyCallback(request): """ This is a placeholder for a proxyCallback service needed for CAS authentication """ logger.debug("Incoming request to CASPROXY (Proxy Callback):") return HttpResponse("I am at a RSA-2 or VeriSigned SSL Cert. website.") ########################### # CAS-SPECIFIC SAML METHODS ########################### def get_saml_client(): s_client = SAMLClient(auth_settings.CAS_SERVER, settings.SERVER_URL, auth_prefix=auth_settings.CAS_AUTH_PREFIX) return s_client def saml_loginRedirect(request, redirect=None, gateway=False): login_url = "%s%s/login?service=%s/s_serviceValidater%s" %\ (auth_settings.CAS_SERVER, auth_settings.CAS_AUTH_PREFIX, settings.SERVER_URL, "?sendback=%s" % redirect if redirect else "") if gateway: login_url += '&gateway=true' return HttpResponseRedirect(login_url) def saml_validateTicket(request): """ Method expects 2 GET parameters: 'ticket' & 'sendback' After a CAS Login: Redirects the request based on the GET param 'ticket' Unauthorized Users are redirected to '/' In the event of failure. Authorized Users are redirected to the GET param 'sendback' """ redirect_logout_url = settings.REDIRECT_URL + "/login/" no_user_url = settings.REDIRECT_URL + "/no_user/" logger.debug('GET Variables:%s' % request.GET) ticket = request.GET.get('ticket', None) if not ticket: logger.warn("No Ticket received in GET string " "-- Logout user: %s" % redirect_logout_url) return HttpResponseRedirect(redirect_logout_url) logger.debug("ServiceValidate endpoint includes a ticket." " Ticket must now be validated with SAML") # ReturnLocation set, apply on successful authentication saml_client = get_saml_client() saml_response = saml_client.saml_serviceValidate(ticket) if not saml_response.success: logger.debug("CAS Server did NOT validate ticket:%s" " and included this response:%s" % (ticket, saml_response.xml)) return HttpResponseRedirect(redirect_logout_url) try: user = User.objects.get(username=saml_response.user) except User.DoesNotExist: return HttpResponseRedirect(no_user_url) auth_token = create_session_token(None, user, request, issuer="CAS+SAML") if auth_token is None: logger.warn("Failed to create AuthToken") HttpResponseRedirect(redirect_logout_url) return_to = request.GET.get('sendback') if not return_to: return HttpResponse(saml_response.response, content_type="text/xml; charset=utf-8") return_to += "?token=%s" % auth_token logger.info("Session token created, return to: %s" % return_to) return HttpResponseRedirect(return_to) ########################### # CAS-SPECIFIC OAUTH METHODS ########################### def get_cas_oauth_client(): o_client = OAuthClient(auth_settings.CAS_SERVER, auth_settings.OAUTH_CLIENT_CALLBACK, auth_settings.OAUTH_CLIENT_KEY, auth_settings.OAUTH_CLIENT_SECRET, auth_prefix=auth_settings.CAS_AUTH_PREFIX) return o_client def cas_profile_contains(attrs, test_value): # Two basic types of 'values' # Lists: e.g. attrs['entitlement'] = ['group1','group2','group3'] # Objects: e.g. attrs['email'] = 'test@email.com' for attr in attrs: for (key, value) in attr.items(): if isinstance(value, list) and test_value in value: return True elif value == test_value: return True return False def cas_profile_for_token(access_token): oauth_client = get_cas_oauth_client() profile = oauth_client.get_profile(access_token) return profile
36.745399
86
0.652642
from datetime import timedelta import time from django.conf import settings from django.contrib.auth import get_user_model from django.core.urlresolvers import reverse from django.http import HttpResponse, HttpResponseRedirect from django.utils import timezone from caslib import CASClient, SAMLClient, OAuthClient import logging logger = logging.getLogger(__name__) from django_cyverse_auth.session import create_session_token from django_cyverse_auth.settings import auth_settings from django_cyverse_auth.models import UserProxy request.get_full_path() redirect_to = "%s/CAS_serviceValidater?sendback=%s" \ % (settings.SERVER_URL, redirect) login_url = "%s%s/login?service=%s" \ % (auth_settings.CAS_SERVER, auth_settings.CAS_AUTH_PREFIX, redirect_to) if gateway: login_url += '&gateway=true' return HttpResponseRedirect(login_url) def cas_validateUser(username): try: userProxy = UserProxy.objects.filter(username=username).order_by('pk').first() logger.debug("[CAS] Validation Test - %s" % username) if userProxy is None: logger.debug("User %s does not have a proxy" % username) return (False, None) proxyTicket = userProxy.proxyTicket caslib = get_cas_client() (validUser, cas_response) =\ caslib.reauthenticate(proxyTicket, username=username) logger.debug("Valid User: %s Proxy response: %s" % (validUser, cas_response)) return (validUser, cas_response) except Exception: logger.exception('Error validating user %s' % username) return (False, None) def cas_updateUserProxy(user, pgtIou, max_try=3): attempts = 0 while attempts < max_try: try: userProxy = UserProxy.objects.get(proxyIOU=pgtIou) userProxy.username = user userProxy.expiresOn = timezone.now() + PROXY_TICKET_EXPIRY logger.debug("Found a matching proxy IOU for %s" % userProxy.username) userProxy.save() return True except UserProxy.DoesNotExist: logger.error("Could not find UserProxy object!" "ProxyIOU & ID was not saved " "at proxy url endpoint.") time.sleep(min(2**attempts, 8)) attempts += 1 return False def cas_set_redirect_url(sendback, request): absolute_url = request.build_absolute_uri( reverse('django_cyverse_auth:cas-service-validate-link')) return "%s?sendback=%s" % (absolute_url, sendback) def cas_validateTicket(request): redirect_logout_url = settings.REDIRECT_URL + "/login/" no_user_url = settings.REDIRECT_URL + "/no_user/" logger.debug('GET Variables:%s' % request.GET) ticket = request.GET.get('ticket', None) sendback = request.GET.get('sendback', None) if not ticket: logger.warn("No Ticket received in GET string " "-- Logout user: %s" % redirect_logout_url) return HttpResponseRedirect(redirect_logout_url) logger.debug("ServiceValidate endpoint includes a ticket." " Ticket must now be validated with CAS") caslib = get_cas_client() caslib.service_url = cas_set_redirect_url(sendback, request) cas_response = caslib.cas_serviceValidate(ticket) if not cas_response.success: logger.debug("CAS Server did NOT validate ticket:%s" " and included this response:%s (Err:%s)" % (ticket, cas_response.object, cas_response.error_str)) return HttpResponseRedirect(redirect_logout_url) if not cas_response.user: logger.debug("User attribute missing from cas response!" "This may require a fix to caslib.py") return HttpResponseRedirect(redirect_logout_url) if not cas_response.proxy_granting_ticket: logger.error("""Proxy Granting Ticket missing! Atmosphere requires CAS proxy as a service to authenticate users. Possible Causes: * ServerName variable is wrong in /etc/apache2/apache2.conf * Proxy URL does not exist * Proxy URL is not a valid RSA-2/VeriSigned SSL certificate * /etc/host and hostname do not match machine.""") return HttpResponseRedirect(redirect_logout_url) updated = cas_updateUserProxy( cas_response.user, cas_response.proxy_granting_ticket) if not updated: return HttpResponseRedirect(redirect_logout_url) logger.info("Updated proxy for <%s> -- Auth success!" % cas_response.user) try: user = User.objects.get(username=cas_response.user) except User.DoesNotExist: return HttpResponseRedirect(no_user_url) auth_token = create_session_token(None, user, request, issuer="CAS") if auth_token is None: logger.warn("Failed to create AuthToken") HttpResponseRedirect(redirect_logout_url) return_to = request.GET['sendback'] logger.info("Session token created, User logged in, return to: %s" % return_to) return HttpResponseRedirect(return_to) def cas_storeProxyIOU_ID(request): if "pgtIou" in request.GET and "pgtId" in request.GET: iou_token = request.GET["pgtIou"] proxy_ticket = request.GET["pgtId"] logger.debug("PROXY HIT 2 - CAS server sends two IDs: " "1.ProxyIOU (%s) 2. ProxyGrantingTicket (%s)" % (iou_token, proxy_ticket)) proxy = UserProxy( proxyIOU=iou_token, proxyTicket=proxy_ticket ) proxy.save() logger.debug("Proxy ID has been saved, match ProxyIOU(%s) " "from the proxyIOU returned in service validate." % (proxy.proxyIOU,)) else: logger.debug("Proxy HIT 1 - CAS server tests that this link is HTTPS") return HttpResponse("Received proxy request. Thank you.") def cas_proxyCallback(request): logger.debug("Incoming request to CASPROXY (Proxy Callback):") return HttpResponse("I am at a RSA-2 or VeriSigned SSL Cert. website.") + "/login/" no_user_url = settings.REDIRECT_URL + "/no_user/" logger.debug('GET Variables:%s' % request.GET) ticket = request.GET.get('ticket', None) if not ticket: logger.warn("No Ticket received in GET string " "-- Logout user: %s" % redirect_logout_url) return HttpResponseRedirect(redirect_logout_url) logger.debug("ServiceValidate endpoint includes a ticket." " Ticket must now be validated with SAML") saml_client = get_saml_client() saml_response = saml_client.saml_serviceValidate(ticket) if not saml_response.success: logger.debug("CAS Server did NOT validate ticket:%s" " and included this response:%s" % (ticket, saml_response.xml)) return HttpResponseRedirect(redirect_logout_url) try: user = User.objects.get(username=saml_response.user) except User.DoesNotExist: return HttpResponseRedirect(no_user_url) auth_token = create_session_token(None, user, request, issuer="CAS+SAML") if auth_token is None: logger.warn("Failed to create AuthToken") HttpResponseRedirect(redirect_logout_url) return_to = request.GET.get('sendback') if not return_to: return HttpResponse(saml_response.response, content_type="text/xml; charset=utf-8") return_to += "?token=%s" % auth_token logger.info("Session token created, return to: %s" % return_to) return HttpResponseRedirect(return_to) : oauth_client = get_cas_oauth_client() profile = oauth_client.get_profile(access_token) return profile
true
true
f73be4a4ef5b959c7ff8ded4c4dafe4cdd762858
7,640
py
Python
conans/tools.py
johnmcfarlane/conan
725bd0cee4e53f35521aef7eeb61d4772c460d5e
[ "MIT" ]
null
null
null
conans/tools.py
johnmcfarlane/conan
725bd0cee4e53f35521aef7eeb61d4772c460d5e
[ "MIT" ]
null
null
null
conans/tools.py
johnmcfarlane/conan
725bd0cee4e53f35521aef7eeb61d4772c460d5e
[ "MIT" ]
null
null
null
""" Conan tools: classes and function in this module are intended to be used out of the box with the Conan configuration already currified into them. This configuration refers mainly to two items: - requester: on network calls, this will include proxy definition. - output: the output configuration Here in this module there should be no logic, all functions and classes must be implemented elsewhere (mainly in conans.util or conans.client.tools) and ready to be used without the currification. """ import requests from conans.client.output import ConanOutput # Tools from conans.client.tools from conans.client.tools import files as tools_files, net as tools_net, oss as tools_oss, \ system_pm as tools_system_pm, win as tools_win from conans.client.tools.env import * # pylint: disable=unused-import from conans.client.tools.pkg_config import * # pylint: disable=unused-import from conans.client.tools.scm import * # pylint: disable=unused-import from conans.client.tools.settings import * # pylint: disable=unused-import from conans.client.tools.apple import * from conans.client.tools.android import * # Tools form conans.util from conans.util.env_reader import get_env from conans.util.files import _generic_algorithm_sum, load, md5, md5sum, mkdir, relative_dirs, \ rmdir, save as files_save, save_append, sha1sum, sha256sum, to_file_bytes, touch from conans.util.log import logger from conans.client.tools.version import Version from conans.client.build.cppstd_flags import cppstd_flag_new as cppstd_flag # pylint: disable=unused-import # This global variables are intended to store the configuration of the running Conan application _global_output = None _global_requester = None _global_config = None def set_global_instances(the_output, the_requester, config): global _global_output global _global_requester global _global_config # TODO: pass here the configuration file, and make the work here (explicit!) _global_output = the_output _global_requester = the_requester _global_config = config def get_global_instances(): return _global_output, _global_requester # Assign a default, will be overwritten in the factory of the ConanAPI set_global_instances(the_output=ConanOutput(sys.stdout, sys.stderr, True), the_requester=requests, config=None) """ From here onwards only currification is expected, no logic """ def save(path, content, append=False): # TODO: All this three functions: save, save_append and this one should be merged into one. if append: save_append(path=path, content=content) else: files_save(path=path, content=content, only_if_modified=False) # From conans.client.tools.net ftp_download = tools_net.ftp_download def download(*args, **kwargs): return tools_net.download(out=_global_output, requester=_global_requester, *args, **kwargs) def get(*args, **kwargs): return tools_net.get(output=_global_output, requester=_global_requester, *args, **kwargs) # from conans.client.tools.files chdir = tools_files.chdir human_size = tools_files.human_size untargz = tools_files.untargz check_with_algorithm_sum = tools_files.check_with_algorithm_sum check_sha1 = tools_files.check_sha1 check_md5 = tools_files.check_md5 check_sha256 = tools_files.check_sha256 patch = tools_files.patch replace_prefix_in_pc_file = tools_files.replace_prefix_in_pc_file collect_libs = tools_files.collect_libs which = tools_files.which unix2dos = tools_files.unix2dos dos2unix = tools_files.dos2unix def unzip(*args, **kwargs): return tools_files.unzip(output=_global_output, *args, **kwargs) def replace_in_file(*args, **kwargs): return tools_files.replace_in_file(output=_global_output, *args, **kwargs) def replace_path_in_file(*args, **kwargs): return tools_files.replace_path_in_file(output=_global_output, *args, **kwargs) # from conans.client.tools.oss args_to_string = tools_oss.args_to_string detected_architecture = tools_oss.detected_architecture detected_os = tools_oss.detected_os OSInfo = tools_oss.OSInfo cross_building = tools_oss.cross_building get_cross_building_settings = tools_oss.get_cross_building_settings get_gnu_triplet = tools_oss.get_gnu_triplet def cpu_count(*args, **kwargs): return tools_oss.cpu_count(output=_global_output, *args, **kwargs) # from conans.client.tools.system_pm class SystemPackageTool(tools_system_pm.SystemPackageTool): def __init__(self, *args, **kwargs): super(SystemPackageTool, self).__init__(output=_global_output, *args, **kwargs) class NullTool(tools_system_pm.NullTool): def __init__(self, *args, **kwargs): super(NullTool, self).__init__(output=_global_output, *args, **kwargs) class AptTool(tools_system_pm.AptTool): def __init__(self, *args, **kwargs): super(AptTool, self).__init__(output=_global_output, *args, **kwargs) class DnfTool(tools_system_pm.DnfTool): def __init__(self, *args, **kwargs): super(DnfTool, self).__init__(output=_global_output, *args, **kwargs) class YumTool(tools_system_pm.YumTool): def __init__(self, *args, **kwargs): super(YumTool, self).__init__(output=_global_output, *args, **kwargs) class BrewTool(tools_system_pm.BrewTool): def __init__(self, *args, **kwargs): super(BrewTool, self).__init__(output=_global_output, *args, **kwargs) class PkgTool(tools_system_pm.PkgTool): def __init__(self, *args, **kwargs): super(PkgTool, self).__init__(output=_global_output, *args, **kwargs) class ChocolateyTool(tools_system_pm.ChocolateyTool): def __init__(self, *args, **kwargs): super(ChocolateyTool, self).__init__(output=_global_output, *args, **kwargs) class PkgUtilTool(tools_system_pm.PkgUtilTool): def __init__(self, *args, **kwargs): super(PkgUtilTool, self).__init__(output=_global_output, *args, **kwargs) class PacManTool(tools_system_pm.PacManTool): def __init__(self, *args, **kwargs): super(PacManTool, self).__init__(output=_global_output, *args, **kwargs) class ZypperTool(tools_system_pm.ZypperTool): def __init__(self, *args, **kwargs): super(ZypperTool, self).__init__(output=_global_output, *args, **kwargs) # from conans.client.tools.win vs_installation_path = tools_win.vs_installation_path vswhere = tools_win.vswhere vs_comntools = tools_win.vs_comntools find_windows_10_sdk = tools_win.find_windows_10_sdk escape_windows_cmd = tools_win.escape_windows_cmd get_cased_path = tools_win.get_cased_path MSYS2 = tools_win.MSYS2 MSYS = tools_win.MSYS CYGWIN = tools_win.CYGWIN WSL = tools_win.WSL SFU = tools_win.SFU unix_path = tools_win.unix_path run_in_windows_bash = tools_win.run_in_windows_bash @contextmanager def vcvars(*args, **kwargs): with tools_win.vcvars(output=_global_output, *args, **kwargs): yield def msvc_build_command(*args, **kwargs): return tools_win.msvc_build_command(output=_global_output, *args, **kwargs) def build_sln_command(*args, **kwargs): return tools_win.build_sln_command(output=_global_output, *args, **kwargs) def vcvars_command(*args, **kwargs): return tools_win.vcvars_command(output=_global_output, *args, **kwargs) def vcvars_dict(*args, **kwargs): return tools_win.vcvars_dict(output=_global_output, *args, **kwargs) def latest_vs_version_installed(*args, **kwargs): return tools_win.latest_vs_version_installed(output=_global_output, *args, **kwargs) # Ready to use objects. try: os_info = OSInfo() except Exception as exc: logger.error(exc) _global_output.error("Error detecting os_info")
32.931034
108
0.767277
import requests from conans.client.output import ConanOutput from conans.client.tools import files as tools_files, net as tools_net, oss as tools_oss, \ system_pm as tools_system_pm, win as tools_win from conans.client.tools.env import * from conans.client.tools.pkg_config import * from conans.client.tools.scm import * from conans.client.tools.settings import * from conans.client.tools.apple import * from conans.client.tools.android import * from conans.util.env_reader import get_env from conans.util.files import _generic_algorithm_sum, load, md5, md5sum, mkdir, relative_dirs, \ rmdir, save as files_save, save_append, sha1sum, sha256sum, to_file_bytes, touch from conans.util.log import logger from conans.client.tools.version import Version from conans.client.build.cppstd_flags import cppstd_flag_new as cppstd_flag _global_output = None _global_requester = None _global_config = None def set_global_instances(the_output, the_requester, config): global _global_output global _global_requester global _global_config _global_output = the_output _global_requester = the_requester _global_config = config def get_global_instances(): return _global_output, _global_requester set_global_instances(the_output=ConanOutput(sys.stdout, sys.stderr, True), the_requester=requests, config=None) def save(path, content, append=False): if append: save_append(path=path, content=content) else: files_save(path=path, content=content, only_if_modified=False) ftp_download = tools_net.ftp_download def download(*args, **kwargs): return tools_net.download(out=_global_output, requester=_global_requester, *args, **kwargs) def get(*args, **kwargs): return tools_net.get(output=_global_output, requester=_global_requester, *args, **kwargs) chdir = tools_files.chdir human_size = tools_files.human_size untargz = tools_files.untargz check_with_algorithm_sum = tools_files.check_with_algorithm_sum check_sha1 = tools_files.check_sha1 check_md5 = tools_files.check_md5 check_sha256 = tools_files.check_sha256 patch = tools_files.patch replace_prefix_in_pc_file = tools_files.replace_prefix_in_pc_file collect_libs = tools_files.collect_libs which = tools_files.which unix2dos = tools_files.unix2dos dos2unix = tools_files.dos2unix def unzip(*args, **kwargs): return tools_files.unzip(output=_global_output, *args, **kwargs) def replace_in_file(*args, **kwargs): return tools_files.replace_in_file(output=_global_output, *args, **kwargs) def replace_path_in_file(*args, **kwargs): return tools_files.replace_path_in_file(output=_global_output, *args, **kwargs) args_to_string = tools_oss.args_to_string detected_architecture = tools_oss.detected_architecture detected_os = tools_oss.detected_os OSInfo = tools_oss.OSInfo cross_building = tools_oss.cross_building get_cross_building_settings = tools_oss.get_cross_building_settings get_gnu_triplet = tools_oss.get_gnu_triplet def cpu_count(*args, **kwargs): return tools_oss.cpu_count(output=_global_output, *args, **kwargs) class SystemPackageTool(tools_system_pm.SystemPackageTool): def __init__(self, *args, **kwargs): super(SystemPackageTool, self).__init__(output=_global_output, *args, **kwargs) class NullTool(tools_system_pm.NullTool): def __init__(self, *args, **kwargs): super(NullTool, self).__init__(output=_global_output, *args, **kwargs) class AptTool(tools_system_pm.AptTool): def __init__(self, *args, **kwargs): super(AptTool, self).__init__(output=_global_output, *args, **kwargs) class DnfTool(tools_system_pm.DnfTool): def __init__(self, *args, **kwargs): super(DnfTool, self).__init__(output=_global_output, *args, **kwargs) class YumTool(tools_system_pm.YumTool): def __init__(self, *args, **kwargs): super(YumTool, self).__init__(output=_global_output, *args, **kwargs) class BrewTool(tools_system_pm.BrewTool): def __init__(self, *args, **kwargs): super(BrewTool, self).__init__(output=_global_output, *args, **kwargs) class PkgTool(tools_system_pm.PkgTool): def __init__(self, *args, **kwargs): super(PkgTool, self).__init__(output=_global_output, *args, **kwargs) class ChocolateyTool(tools_system_pm.ChocolateyTool): def __init__(self, *args, **kwargs): super(ChocolateyTool, self).__init__(output=_global_output, *args, **kwargs) class PkgUtilTool(tools_system_pm.PkgUtilTool): def __init__(self, *args, **kwargs): super(PkgUtilTool, self).__init__(output=_global_output, *args, **kwargs) class PacManTool(tools_system_pm.PacManTool): def __init__(self, *args, **kwargs): super(PacManTool, self).__init__(output=_global_output, *args, **kwargs) class ZypperTool(tools_system_pm.ZypperTool): def __init__(self, *args, **kwargs): super(ZypperTool, self).__init__(output=_global_output, *args, **kwargs) vs_installation_path = tools_win.vs_installation_path vswhere = tools_win.vswhere vs_comntools = tools_win.vs_comntools find_windows_10_sdk = tools_win.find_windows_10_sdk escape_windows_cmd = tools_win.escape_windows_cmd get_cased_path = tools_win.get_cased_path MSYS2 = tools_win.MSYS2 MSYS = tools_win.MSYS CYGWIN = tools_win.CYGWIN WSL = tools_win.WSL SFU = tools_win.SFU unix_path = tools_win.unix_path run_in_windows_bash = tools_win.run_in_windows_bash @contextmanager def vcvars(*args, **kwargs): with tools_win.vcvars(output=_global_output, *args, **kwargs): yield def msvc_build_command(*args, **kwargs): return tools_win.msvc_build_command(output=_global_output, *args, **kwargs) def build_sln_command(*args, **kwargs): return tools_win.build_sln_command(output=_global_output, *args, **kwargs) def vcvars_command(*args, **kwargs): return tools_win.vcvars_command(output=_global_output, *args, **kwargs) def vcvars_dict(*args, **kwargs): return tools_win.vcvars_dict(output=_global_output, *args, **kwargs) def latest_vs_version_installed(*args, **kwargs): return tools_win.latest_vs_version_installed(output=_global_output, *args, **kwargs) try: os_info = OSInfo() except Exception as exc: logger.error(exc) _global_output.error("Error detecting os_info")
true
true
f73be4b323b6f527fad757910c441cb65ddf9c62
116,467
py
Python
tools/machine_learning/dataset/dataset_2000.py
emiliopomares/flow-separation-prediction
ca4585a8263cd5933889fbd763154e2f3851969a
[ "MIT" ]
null
null
null
tools/machine_learning/dataset/dataset_2000.py
emiliopomares/flow-separation-prediction
ca4585a8263cd5933889fbd763154e2f3851969a
[ "MIT" ]
null
null
null
tools/machine_learning/dataset/dataset_2000.py
emiliopomares/flow-separation-prediction
ca4585a8263cd5933889fbd763154e2f3851969a
[ "MIT" ]
null
null
null
dataset = [[575, 1.8478238142818033, 335.84132856507614, 4.0, 40.0, 18.0, 1.0], [576, 15.77686287348893, 42.441254859835766, 4.0, 40.0, 18.0, 1.0], [577, 2.5651959697179394, 460.4148384334217, 4.0, 40.0, 18.0, 0.796539971089987], [578, 17.085286455903624, 1722.0569227642409, 4.0, 40.0, 18.0, 0.20868652310905142], [579, 0.8922404456295938, 544.2232815722436, 4.0, 40.0, 18.0, 0.8391968483418055], [580, 1.855625614157908, 324.0611877697628, 4.0, 40.0, 18.0, 1.0], [581, 13.887583805638116, 370.66355345479735, 4.0, 40.0, 18.0, 0.4156268407842482], [582, 4.574319040915774, 1916.680593529377, 4.0, 40.0, 18.0, 0.4731431536067498], [583, 11.620584764397769, 1765.727157030207, 4.0, 40.0, 18.0, 0.3125490112539554], [584, 2.6235186150459104, 1833.1693569752063, 4.0, 40.0, 18.0, 0.5374621823117757], [585, 13.484073962894696, 660.3699928897363, 4.0, 40.0, 18.0, 0.3539297235585189], [586, 14.039143301499495, 1194.4759299020527, 4.0, 40.0, 18.0, 0.2909060179303773], [587, 13.912217642029711, 1554.008389226998, 4.0, 40.0, 18.0, 0.2743234627435817], [588, 13.99433118037543, 1828.6039911998955, 4.0, 40.0, 18.0, 0.26137292500800485], [589, 13.97985812120516, 1457.7115189147835, 4.0, 40.0, 18.0, 0.277698127963447], [590, 14.235082927476668, 1533.990342569619, 4.0, 40.0, 18.0, 0.2684974619970663], [591, 0.14438911944825494, 1552.4111707538157, 4.0, 40.0, 18.0, 0.6646778218759786], [592, 12.838639587489782, 897.1618056405601, 4.0, 40.0, 18.0, 0.3404965898150903], [593, 11.149353897963573, 598.125634720337, 4.0, 40.0, 18.0, 0.42099275208739695], [594, 2.6770942046494586, 1985.9408533826909, 4.0, 40.0, 18.0, 0.5310687434251105], [595, 2.9997563631283897, 1040.9042861817666, 4.0, 40.0, 18.0, 0.6086804760399638], [596, 2.3022674475226914, 307.9928831694318, 4.0, 40.0, 18.0, 1.0], [597, 16.291429852780407, 1256.6722690719946, 4.0, 40.0, 18.0, 0.24371063020372327], [598, 15.922124691331389, 507.48840748529494, 4.0, 40.0, 18.0, 0.32681641236328557], [599, 1.9421270372564932, 296.11422429306833, 4.0, 40.0, 18.0, 1.0], [600, 17.158011255218895, 1370.1534109726924, 4.0, 40.0, 18.0, 0.22156305405117283], [601, 3.7932553942867377, 1121.772643508161, 4.0, 40.0, 18.0, 0.5662626026818638], [602, 14.10521268070536, 34.78839785558823, 4.0, 40.0, 18.0, 1.0], [603, 9.73869075013141, 1581.5507122013316, 4.0, 40.0, 18.0, 0.3625765570093685], [604, 4.274494253604736, 235.24380920742726, 4.0, 40.0, 18.0, 1.0], [605, 3.2295643670867213, 393.2709171003013, 4.0, 40.0, 18.0, 0.8062809231893029], [606, 12.475115053078682, 1915.9055265343711, 4.0, 40.0, 18.0, 0.2878838514964685], [607, 1.6886028928130217, 270.23903604128816, 4.0, 40.0, 18.0, 1.0], [608, 11.690517878266208, 1486.0998001794405, 4.0, 40.0, 18.0, 0.3237148945091826], [609, 15.926532949205589, 1131.5673840607374, 4.0, 40.0, 18.0, 0.25796741296596415], [610, 1.8446899105333436, 1224.3210865622684, 4.0, 40.0, 18.0, 0.6298416489939084], [611, 2.6566105295944635, 731.8422724963048, 4.0, 40.0, 18.0, 0.6871636918077176], [612, 15.125019204779058, 297.8907653170386, 4.0, 40.0, 18.0, 0.4185181467542303], [613, 3.8530850542593447, 1361.0001015077783, 4.0, 40.0, 18.0, 0.5373958378712685], [614, 12.598205126437756, 1717.5123950190948, 4.0, 40.0, 18.0, 0.2939356075449356], [615, 12.334492974283485, 623.3359259430543, 4.0, 40.0, 18.0, 0.3876830117045339], [616, 11.192915263658763, 1966.1376533177634, 4.0, 40.0, 18.0, 0.31352700344854745], [617, 13.997928305359197, 93.70706217274409, 4.0, 40.0, 18.0, 0.7141709148966205], [618, 16.366113492225136, 1236.5923416320818, 4.0, 40.0, 18.0, 0.2435035777641013], [619, 2.9775330120959294, 1632.2202941746937, 4.0, 40.0, 18.0, 0.5446685841376463], [620, 12.375848772835731, 1454.8269704665809, 4.0, 40.0, 18.0, 0.31088421330658206], [621, 10.573273354771072, 809.9502345850427, 4.0, 40.0, 18.0, 0.4036679768086213], [622, 0.658017470275974, 1174.2198110845593, 4.0, 40.0, 18.0, 0.6885758157602853], [623, 15.388174373590935, 1558.7329300836798, 4.0, 40.0, 18.0, 0.24506860672627045], [624, 11.47959751919403, 495.72811032418923, 4.0, 40.0, 18.0, 0.43667527053115335], [625, 1.0249692741560694, 1714.5459100597297, 4.0, 40.0, 18.0, 0.6123934325550479], [626, 3.3490318260816085, 1587.88311412887, 4.0, 40.0, 18.0, 0.5351478832625405], [627, 13.621056051126965, 375.77630844629044, 4.0, 40.0, 18.0, 0.42020410107137113], [628, 15.612933200029865, 1869.1874865785246, 4.0, 40.0, 18.0, 0.23012146482050813], [629, 14.50967460452426, 81.04786425254812, 4.0, 40.0, 18.0, 0.7461201670586656], [630, 2.5263164804706095, 105.6280113180625, 4.0, 40.0, 18.0, 1.0], [631, 1.8029490922238407, 1029.5067606548166, 4.0, 40.0, 18.0, 0.6610687775529165], [632, 0.7319623297551434, 701.8330982972551, 4.0, 40.0, 18.0, 0.7869646649087125], [633, 0.4386533254903302, 794.8526073599669, 4.0, 40.0, 18.0, 0.7738976400870887], [634, 14.652220310971902, 952.1451985012426, 4.0, 40.0, 18.0, 0.29583094230337575], [635, 0.20246156655845526, 1804.2365401166314, 4.0, 40.0, 18.0, 0.6391260749168856], [636, 2.8725533409172863, 1981.9648539155141, 4.0, 40.0, 18.0, 0.5245258107130394], [637, 12.689631024428062, 1311.634645014224, 4.0, 40.0, 18.0, 0.31189270816357884], [638, 0.9614768236763129, 558.3493409470752, 4.0, 40.0, 18.0, 0.8292573515295004], [639, 12.935603226152619, 1333.9059870755948, 4.0, 40.0, 18.0, 0.30569596499996965], [640, 13.7979345626314, 779.3753407587446, 4.0, 40.0, 18.0, 0.331003864216317], [641, 0.9849375805474148, 825.9890492627081, 4.0, 40.0, 18.0, 0.7400968512949635], [642, 1.4105991726699643, 1700.8898630020053, 4.0, 40.0, 18.0, 0.5979771432431052], [643, 2.8503538554202015, 1064.3038789087652, 4.0, 40.0, 18.0, 0.6111854691231061], [644, 16.04834549540297, 1707.5748299191628, 4.0, 40.0, 18.0, 0.22738107076450115], [645, 16.783303066854362, 471.84431061561446, 4.0, 40.0, 18.0, 0.31643778862826194], [646, 0.39848871808779607, 1240.3359247723351, 4.0, 40.0, 18.0, 0.6908161553575081], [647, 2.4778883843398987, 1787.162704442942, 4.0, 40.0, 18.0, 0.5509850654318513], [648, 13.25885996232254, 1698.8026832869486, 4.0, 40.0, 18.0, 0.2808466520575083], [649, 15.986135974030557, 224.86348916148867, 4.0, 40.0, 18.0, 0.44441779528106345], [650, 13.675228038330282, 473.8153271538389, 4.0, 40.0, 18.0, 0.3878574026483963], [651, 2.5952162624842563, 981.5417910601017, 4.0, 40.0, 18.0, 0.6353139849691855], [652, 2.0463400011399635, 613.7276199815243, 4.0, 40.0, 18.0, 0.7530953898704728], [653, 9.649425541309405, 105.74257917879993, 4.0, 40.0, 18.0, 1.0], [654, 11.838679710747154, 1217.4544316048584, 4.0, 40.0, 18.0, 0.33626602314339615], [655, 2.098502180533995, 1251.1532820311488, 4.0, 40.0, 18.0, 0.6159690017531594], [656, 2.1025685084517787, 98.89633668206652, 4.0, 40.0, 18.0, 1.0], [657, 12.857819799391866, 1359.358143001064, 4.0, 40.0, 18.0, 0.3059279506262172], [658, 2.233221352723336, 906.0579887922141, 4.0, 40.0, 18.0, 0.6649669558186118], [659, 1.7811199402272135, 1448.3686916556367, 4.0, 40.0, 18.0, 0.6063111556787022], [660, 14.448901654879998, 167.34241426955228, 4.0, 40.0, 18.0, 0.5463919600053861], [661, 10.562823930958594, 1297.580763882867, 4.0, 40.0, 18.0, 0.35997957233633354], [662, 3.272269298565971, 1067.0431698105351, 4.0, 40.0, 18.0, 0.593765244196822], [663, 1.0343862600092883, 667.017705876655, 4.0, 40.0, 18.0, 0.7836515967272167], [664, 0.16856950051587072, 720.6567295694282, 4.0, 40.0, 18.0, 0.8084805065903237], [665, 3.9177560361585195, 66.91061929546387, 4.0, 40.0, 18.0, 1.0], [666, 2.3780276261967783, 1668.1749819231613, 4.0, 40.0, 18.0, 0.5635900242240913], [667, 1.8813117598740123, 150.91226392259642, 4.0, 40.0, 18.0, 1.0], [668, 0.8447262583017403, 386.5987088286651, 4.0, 40.0, 18.0, 1.0], [669, 13.736506161491794, 889.6250282419106, 4.0, 40.0, 18.0, 0.3211736700709351], [670, 2.0090109477892426, 561.9522843904172, 4.0, 40.0, 18.0, 0.7749304621492978], [671, 3.038100630719427, 679.1589007105449, 4.0, 40.0, 18.0, 0.6851159203011229], [672, 3.9884559940760846, 833.3855871576827, 4.0, 40.0, 18.0, 0.6057434054729105], [673, 4.814399039171513, 30.558155481419423, 4.0, 40.0, 18.0, 1.0], [674, 0.7751917829450046, 1577.5845369091087, 4.0, 40.0, 18.0, 0.6351594260173516], [675, 14.960053050776954, 832.9983160485076, 4.0, 40.0, 18.0, 0.30016266965397775], [676, 17.771656535063176, 848.7911707008597, 4.0, 40.0, 18.0, 0.24429667759683385], [677, 1.401096753301573, 1702.1164509374134, 4.0, 40.0, 18.0, 0.5982530514668655], [678, 0.4236798333082352, 317.63818523938863, 4.0, 40.0, 18.0, 1.0], [679, 15.223642065748285, 933.4098161116106, 4.0, 40.0, 18.0, 0.2857079409908941], [680, 9.012045439273502, 1116.6022833618213, 4.0, 40.0, 18.0, 0.41056611918224784], [681, 1.2355168150259568, 1420.4346939390018, 4.0, 40.0, 18.0, 0.6319092021335166], [682, 0.3865424177831063, 473.2484023940222, 4.0, 40.0, 18.0, 0.904518741016756], [683, 1.2963805536393012, 1694.6108822988597, 4.0, 40.0, 18.0, 0.6029557100837556], [684, 1.9197251745615858, 1694.91862221521, 4.0, 40.0, 18.0, 0.5786066164285447], [685, 10.089674593087276, 544.0253517370464, 4.0, 40.0, 18.0, 0.46211496683321573], [686, 0.6334401188915719, 1844.853761865411, 4.0, 40.0, 18.0, 0.6178520401415883], [687, 13.326970265837408, 1834.0964338819465, 4.0, 40.0, 18.0, 0.2738377055667499], [688, 14.666304604439595, 467.8448461972652, 4.0, 40.0, 18.0, 0.3655591708614097], [689, 0.844304414255183, 1076.3594020117616, 4.0, 40.0, 18.0, 0.6959151451407178], [690, 3.6789640910162786, 626.4312648360602, 4.0, 40.0, 18.0, 0.6727236618293195], [691, 2.0051344211178366, 1007.3458777711303, 4.0, 40.0, 18.0, 0.6561383111692533], [692, 2.3481552984134613, 1471.3713178729508, 4.0, 40.0, 18.0, 0.5817223165277476], [693, 4.427135327036443, 1243.094343499476, 4.0, 40.0, 18.0, 0.5295634840686351], [694, 2.190869281670545, 554.5439904875483, 4.0, 40.0, 18.0, 0.7691339438293971], [695, 10.501562239820775, 250.29321404168425, 4.0, 40.0, 18.0, 0.5972936037623792], [696, 1.0579076190132626, 264.2901377145358, 4.0, 40.0, 18.0, 1.0], [697, 14.595094635291549, 1352.0935667481695, 4.0, 40.0, 18.0, 0.27090889639433213], [698, 16.80898248691349, 217.61917192979868, 4.0, 40.0, 18.0, 0.4291036505392953], [699, 0.4008588925910539, 496.7708158830859, 4.0, 40.0, 18.0, 0.8892724306754946], [700, 16.673778109326502, 763.6732093014514, 4.0, 40.0, 18.0, 0.27343576698147964], [701, 4.049172829085666, 878.2315631702593, 4.0, 40.0, 18.0, 0.5945659270380028], [702, 3.0942338736710977, 1363.9170030493724, 4.0, 40.0, 18.0, 0.5643689491583638], [703, 2.126979268729393, 1774.8561400512385, 4.0, 40.0, 18.0, 0.5647131880217934], [704, 3.237293211206092, 191.81932534132739, 4.0, 40.0, 18.0, 1.0], [705, 3.4937049106334515, 484.70155391287017, 4.0, 40.0, 18.0, 0.7375686883019803], [706, 11.959072857883736, 42.238399809480285, 4.0, 40.0, 18.0, 1.0], [707, 9.832122271157074, 1013.5444884346197, 4.0, 40.0, 18.0, 0.400658804102444], [708, 14.257261734318867, 1290.167101491819, 4.0, 40.0, 18.0, 0.2811251610127445], [709, 13.584067151959738, 1617.345767469841, 4.0, 40.0, 18.0, 0.27790841405159983], [710, 2.86939610720086, 1528.0120869387833, 4.0, 40.0, 18.0, 0.5571594643640032], [711, 15.247977292503393, 78.47662178969892, 4.0, 40.0, 18.0, 0.7221566962049696], [712, 15.117226234056941, 1500.6389271531489, 4.0, 40.0, 18.0, 0.2529794077617841], [713, 0.8943924071003173, 930.034122187956, 4.0, 40.0, 18.0, 0.720893590508253], [714, 13.984266724862009, 248.39513882779175, 4.0, 40.0, 18.0, 0.482014990063243], [715, 2.223386857337172, 146.40521301654854, 4.0, 40.0, 18.0, 1.0], [716, 3.6592465325879995, 575.4090431647485, 4.0, 40.0, 18.0, 0.6909654634158583], [717, 11.527160608092421, 470.6255362849969, 4.0, 40.0, 18.0, 0.44251677112794247], [718, 1.5345921160899505, 933.6775403022946, 4.0, 40.0, 18.0, 0.6907209763934317], [719, 13.495474661190109, 448.9712669713969, 4.0, 40.0, 18.0, 0.3995449302285382], [720, 2.8143152440490398, 754.914262233562, 4.0, 40.0, 18.0, 0.6740759572520959], [721, 12.386140618404264, 617.4674074358072, 4.0, 40.0, 18.0, 0.3874563897442213], [722, 2.489191894483384, 1858.0378326075042, 4.0, 40.0, 18.0, 0.5455838604250699], [723, 14.945166788553095, 977.6845992440178, 4.0, 40.0, 18.0, 0.287696335767216], [724, 15.062033439978197, 586.7926249125157, 4.0, 40.0, 18.0, 0.3301358785168809], [725, 13.315486637462875, 1276.1039521680034, 4.0, 40.0, 18.0, 0.30085902284906213], [726, 5.5250731877212775, 1510.3496624936267, 4.0, 40.0, 18.0, 0.47230268898558975], [727, 2.4576833472407893, 1678.2310828766163, 4.0, 40.0, 18.0, 0.5598075789527722], [728, 15.369419811333646, 1910.4883745286518, 4.0, 40.0, 18.0, 0.23338937847894647], [729, 5.214285816910424, 956.0268177113105, 4.0, 40.0, 18.0, 0.5381743176242986], [730, 1.3077047948296436, 1976.678926789535, 4.0, 40.0, 18.0, 0.581328676181729], [731, 1.3390940699166374, 1338.4119742971081, 4.0, 40.0, 18.0, 0.6369868698126483], [732, 0.9447404261407903, 68.99171372172654, 4.0, 40.0, 18.0, 1.0], [733, 4.371945005856279, 1890.1522564525435, 4.0, 40.0, 18.0, 0.4821625964777501], [734, 13.618513853095315, 1646.6200277329588, 4.0, 40.0, 18.0, 0.27590590005053567], [735, 10.775468892185003, 1976.3410362610828, 4.0, 40.0, 18.0, 0.32235853074822723], [736, 1.0811406407288038, 1443.2954720813782, 4.0, 40.0, 18.0, 0.6359822206273342], [737, 1.0995547545955375, 376.7519579456747, 4.0, 40.0, 18.0, 1.0], [738, 4.301003721146503, 77.81431231638504, 4.0, 40.0, 18.0, 1.0], [739, 15.080118223773836, 76.48070671447462, 4.0, 40.0, 18.0, 0.7425229728210355], [740, 3.574588871061194, 1433.6517065601122, 4.0, 40.0, 18.0, 0.5404133911667138], [741, 3.266803748137959, 768.0183481079831, 4.0, 40.0, 18.0, 0.6508517372826103], [742, 11.43373389005566, 1204.420414885311, 4.0, 40.0, 18.0, 0.3463125292925278], [743, 2.35173742155411, 489.22149464059817, 4.0, 40.0, 18.0, 0.7917123610736267], [744, 12.905201804335201, 1152.2035380398245, 4.0, 40.0, 18.0, 0.3175083258225495], [745, 0.4524338972995281, 1136.9338973099766, 4.0, 40.0, 18.0, 0.7038516116029139], [746, 4.3175697121860726, 650.7705528177835, 4.0, 40.0, 18.0, 0.6374002296428714], [747, 0.39510622901542636, 426.23806126288514, 4.0, 40.0, 18.0, 0.9455163973623548], [748, 14.67896127887815, 1456.1132115988173, 4.0, 40.0, 18.0, 0.26381913942493096], [749, 0.3555654643300439, 1560.595803351052, 4.0, 40.0, 18.0, 0.6548400085242511], [750, 10.055333964164149, 1696.7235455170785, 4.0, 40.0, 18.0, 0.34975385610025994], [751, 3.585948608898914, 1420.1272427420804, 4.0, 40.0, 18.0, 0.5412431767198689], [752, 13.239128463560627, 1768.3159811873886, 4.0, 40.0, 18.0, 0.27813109147915616], [753, 16.581173790050137, 1774.982634205434, 4.0, 40.0, 18.0, 0.21572823709191316], [754, 12.405257599909454, 1035.7129388087144, 4.0, 40.0, 18.0, 0.3375681394414063], [755, 14.025105691317485, 525.9186634463808, 4.0, 40.0, 18.0, 0.3664283779805332], [756, 2.2627180677654173, 1610.1215579758955, 4.0, 40.0, 18.0, 0.5725099766756114], [757, 3.8456742854216834, 974.6240117342162, 4.0, 40.0, 18.0, 0.5857546073747673], [758, 16.197110055633342, 811.1043442328842, 4.0, 40.0, 18.0, 0.27696357422853746], [759, 2.665567780129208, 1336.0363928181405, 4.0, 40.0, 18.0, 0.5834163594521643], [760, 15.878764458243273, 462.27250530519643, 4.0, 40.0, 18.0, 0.33885961769632117], [761, 16.657701804149767, 624.3021245972809, 4.0, 40.0, 18.0, 0.28958737547857455], [762, 12.4319797348128, 27.981780002591723, 4.0, 40.0, 18.0, 1.0], [763, 2.012088284960268, 203.25557099559074, 4.0, 40.0, 18.0, 1.0], [764, 12.07149921493987, 676.2404177502207, 4.0, 40.0, 18.0, 0.3852280102935016], [765, 3.2847123226633084, 480.74450340340275, 4.0, 40.0, 18.0, 0.7496224546070713], [766, 1.015608512384193, 989.0806757950718, 4.0, 40.0, 18.0, 0.7037137312266106], [767, 4.360325936889063, 1164.2479267295978, 4.0, 40.0, 18.0, 0.5405340479152868], [768, 18.3553886093106, 812.878232633861, 4.0, 40.0, 18.0, 0.23685367650290476], [769, 12.819641028845409, 99.85617473371678, 4.0, 40.0, 18.0, 0.7470961521980516], [770, 14.887398175547531, 955.1368509262782, 4.0, 40.0, 18.0, 0.29053086854876525], [771, 0.3049978880800497, 240.7835315879952, 4.0, 40.0, 18.0, 1.0], [772, 17.127897067130593, 128.52248922310935, 4.0, 40.0, 18.0, 0.5179783036052341], [773, 14.531605080634762, 1792.6363312400038, 4.0, 40.0, 18.0, 0.25257580205051355], [774, 18.277613851490095, 323.99960113230503, 4.0, 40.0, 18.0, 0.33223349252333434], [775, 14.093652353671214, 834.2167547538646, 4.0, 40.0, 18.0, 0.3187851943897908], [776, 13.976500995865905, 1971.4792814333991, 4.0, 40.0, 18.0, 0.25760437353957355], [777, 1.952474272573163, 295.44785041285417, 4.0, 40.0, 18.0, 1.0], [778, 0.08806775032605585, 113.73233728362332, 4.0, 40.0, 18.0, 1.0], [779, 2.453415081795997, 910.7242479224352, 4.0, 40.0, 18.0, 0.6545424458355116], [780, 3.6143013216859554, 1413.6379225118915, 4.0, 40.0, 18.0, 0.5408465932926981], [781, 13.999693357590361, 1563.0900153598675, 4.0, 40.0, 18.0, 0.2720721321312241], [782, 1.3264017323236081, 593.4245618091892, 4.0, 40.0, 18.0, 0.7960560666824285], [783, 10.191735427999252, 1600.8013150448471, 4.0, 40.0, 18.0, 0.3513108574021523], [784, 4.051250394046126, 985.8585314218301, 4.0, 40.0, 18.0, 0.5760435104642214], [785, 14.807307671854748, 320.66778786870094, 4.0, 40.0, 18.0, 0.4147264064944534], [786, 4.268574548409049, 743.3937271179888, 4.0, 40.0, 18.0, 0.6147561722735536], [787, 0.8542702954421335, 1638.3696405059818, 4.0, 40.0, 18.0, 0.6260693567962147], [788, 12.390226678120298, 35.71317038290776, 4.0, 40.0, 18.0, 1.0], [789, 1.3639243293316943, 1868.2981482102077, 4.0, 40.0, 18.0, 0.5868582703677531], [790, 2.188831794594288, 1711.7797448804686, 4.0, 40.0, 18.0, 0.5671193765550071], [791, 15.758645055648472, 530.8267105297234, 4.0, 40.0, 18.0, 0.32530622487733385], [792, 13.059184046144315, 546.4123943138928, 4.0, 40.0, 18.0, 0.38516009047684285], [793, 0.020813953740415947, 1879.7985181999034, 4.0, 40.0, 18.0, 0.6407069486576307], [794, 13.976962090956986, 618.0348813826838, 4.0, 40.0, 18.0, 0.34917404485508285], [795, 0.14640206387335253, 1894.464504292166, 4.0, 40.0, 18.0, 0.6343405003196935], [796, 2.5511425850656826, 601.9232883752466, 4.0, 40.0, 18.0, 0.7331806458577551], [797, 12.847672292440175, 1261.598687370912, 4.0, 40.0, 18.0, 0.3115043396222614], [798, 12.344080026363585, 622.9195855768322, 4.0, 40.0, 18.0, 0.38750786086516037], [799, 0.9140585736086912, 1918.1368313365947, 4.0, 40.0, 18.0, 0.6009262777088463], [800, 0.35235189836048986, 1200.7010354775734, 4.0, 40.0, 18.0, 0.6986593717380764], [801, 0.42408892485570027, 775.8273006039032, 4.0, 40.0, 18.0, 0.7799235039007982], [802, 12.149370594834728, 49.7844001872184, 4.0, 40.0, 18.0, 1.0], [803, 2.9452057386165458, 675.5148540411165, 4.0, 40.0, 18.0, 0.6903658838768196], [804, 0.8650275146096837, 490.3551333869768, 4.0, 40.0, 18.0, 0.8678914535667861], [805, 11.264888723035527, 279.3339740753467, 4.0, 40.0, 18.0, 0.5452519060100363], [806, 0.9395446287184084, 268.10969545806984, 4.0, 40.0, 18.0, 1.0], [807, 12.320807236805747, 1300.6221934228442, 4.0, 40.0, 18.0, 0.3204935123878617], [808, 11.70632945413205, 1441.745445288523, 4.0, 40.0, 18.0, 0.32579640997107984], [809, 0.35145916095542207, 585.0453061453562, 4.0, 40.0, 18.0, 0.8485895124139439], [810, 4.445773728023266, 1258.4893180743315, 4.0, 40.0, 18.0, 0.5273167945622266], [811, 13.157817112034227, 1302.4115111415156, 4.0, 40.0, 18.0, 0.3027236223450973], [812, 2.6038670669485415, 575.2770357029218, 4.0, 40.0, 18.0, 0.7406538623606801], [813, 14.494374239199553, 950.3095667625149, 4.0, 40.0, 18.0, 0.29917920893048755], [814, 2.678555621471801, 875.2773613567432, 4.0, 40.0, 18.0, 0.6519207852377271], [815, 5.462519171772081, 1565.7578258848418, 4.0, 40.0, 18.0, 0.470256327574106], [816, 3.6925405136339666, 83.83399528926779, 4.0, 40.0, 18.0, 1.0], [817, 0.2115348826045702, 111.75885341485372, 4.0, 40.0, 18.0, 1.0], [818, 14.402890184387994, 692.8296053618269, 4.0, 40.0, 18.0, 0.32817588075576654], [819, 14.053683308306466, 554.168037466839, 4.0, 40.0, 18.0, 0.3594465509401986], [820, 9.822034405531422, 398.14312654946264, 4.0, 40.0, 18.0, 0.5215505855665795], [821, 10.80218553706231, 566.8990284356657, 4.0, 40.0, 18.0, 0.43702722233564545], [822, 4.226574534404808, 1895.5571571711712, 4.0, 40.0, 18.0, 0.4862284241682766], [823, 15.949911231789805, 328.6047615035785, 4.0, 40.0, 18.0, 0.38379127533171353], [824, 1.7421386127462293, 575.514140642602, 4.0, 40.0, 18.0, 0.7826957935291143], [825, 13.812861891077372, 530.6744036266848, 4.0, 40.0, 18.0, 0.37037799072499694], [826, 15.396540461138336, 1005.8720769068121, 4.0, 40.0, 18.0, 0.27668394182518674], [827, 3.7434074240616857, 1624.9930754111701, 4.0, 40.0, 18.0, 0.519149602832212], [828, 0.07647515233241453, 1077.467134469636, 4.0, 40.0, 18.0, 0.7309306554616624], [829, 16.768563736003642, 1401.9601662982361, 4.0, 40.0, 18.0, 0.2269830652049164], [830, 15.636220365831498, 1472.3493807122168, 4.0, 40.0, 18.0, 0.24437477012394507], [831, 1.4410911226772205, 1016.3433066720726, 4.0, 40.0, 18.0, 0.6794864004396505], [832, 13.429268731173906, 1636.0324925040434, 4.0, 40.0, 18.0, 0.2802160985454364], [833, 1.7001405956665383, 497.6687682538203, 4.0, 40.0, 18.0, 0.8204325318854587], [834, 2.2801805994410946, 1982.1214434171384, 4.0, 40.0, 18.0, 0.5450168923984778], [835, 3.6254909244031404, 851.6421890102177, 4.0, 40.0, 18.0, 0.6168636797014945], [836, 2.4674618747481962, 25.072612129755036, 4.0, 40.0, 18.0, 1.0], [837, 11.309361392384687, 74.83038114989517, 4.0, 40.0, 18.0, 1.0], [838, 2.755439076643698, 517.1786134321785, 4.0, 40.0, 18.0, 0.757905306884278], [839, 2.877880558773736, 513.3027576117254, 4.0, 40.0, 18.0, 0.7536759982981875], [840, 0.12078775050444213, 507.1880300488068, 4.0, 40.0, 18.0, 0.8972672699498528], [841, 1.2444480307310486, 850.347297144776, 4.0, 40.0, 18.0, 0.7220547696064394], [842, 12.346750089330126, 432.5882287162059, 4.0, 40.0, 18.0, 0.4326930156506844], [843, 1.5889424632997833, 1340.7825445181215, 4.0, 40.0, 18.0, 0.6261528541397215], [844, 3.222459948811766, 127.94951038145253, 4.0, 40.0, 18.0, 1.0], [845, 13.84976520946138, 1860.6028498991764, 4.0, 40.0, 18.0, 0.2630875407495844], [846, 10.809450602607129, 697.7146030768455, 4.0, 40.0, 18.0, 0.4122864658711165], [847, 2.6517370985662945, 524.882015156666, 4.0, 40.0, 18.0, 0.7594546559239661], [848, 0.33151114416620175, 148.02595643232797, 4.0, 40.0, 18.0, 1.0], [849, 12.561384381668459, 1707.4417184851957, 4.0, 40.0, 18.0, 0.2952058934400186], [850, 0.2911780698329245, 1984.1116309569888, 4.0, 40.0, 18.0, 0.6216635522127714], [851, 15.034769662755576, 506.00476943493436, 4.0, 40.0, 18.0, 0.3471045303079199], [852, 13.613211001049788, 1621.8526579690747, 4.0, 40.0, 18.0, 0.27712957684978695], [853, 14.40687818096237, 123.64644020573355, 4.0, 40.0, 18.0, 0.6169858411816644], [854, 15.294320483647374, 1024.5299577076414, 4.0, 40.0, 18.0, 0.27731194025122224], [855, 11.771252185889919, 792.1702398240161, 4.0, 40.0, 18.0, 0.3765789395910771], [856, 12.712316898436608, 875.0992355058326, 4.0, 40.0, 18.0, 0.3454188902225476], [857, 1.6596262198930225, 1514.836482990111, 4.0, 40.0, 18.0, 0.6045344845451512], [858, 9.922203260880732, 1763.5137529481794, 4.0, 40.0, 18.0, 0.34982745088830114], [859, 13.988993685467582, 44.92657644876597, 4.0, 40.0, 18.0, 1.0], [860, 11.361187941096702, 949.3578953048755, 4.0, 40.0, 18.0, 0.3697459466883972], [861, 4.7679721273197755, 270.8999790374226, 4.0, 40.0, 18.0, 0.8368747879513082], [862, 13.261838815599832, 511.2917526138542, 4.0, 40.0, 18.0, 0.3883826771684269], [863, 0.3471091839154674, 493.5988467910215, 4.0, 40.0, 18.0, 0.8933765780267001], [864, 0.8367956247061774, 721.331726759914, 4.0, 40.0, 18.0, 0.7757173979973006], [865, 15.587056868126549, 577.3829475502033, 4.0, 40.0, 18.0, 0.3215678688285604], [866, 5.558999515029088, 1219.3662366634514, 4.0, 40.0, 18.0, 0.4952216968075769], [867, 3.501123798345038, 36.39878879439331, 4.0, 40.0, 18.0, 1.0], [868, 12.478975729494595, 552.9496226672502, 4.0, 40.0, 18.0, 0.3983166754960929], [869, 2.191668262788809, 212.63346482514075, 4.0, 40.0, 18.0, 1.0], [870, 0.6651462967474171, 332.0085086621616, 4.0, 40.0, 18.0, 1.0], [871, 0.6520694894744892, 1525.5004930335242, 4.0, 40.0, 18.0, 0.6456586439354453], [872, 3.2854463912199674, 1529.7019305477327, 4.0, 40.0, 18.0, 0.5421830391732159], [873, 4.26479061271794, 1650.8828736240553, 4.0, 40.0, 18.0, 0.5004064865608115], [874, 1.397417393603562, 520.5374198976365, 4.0, 40.0, 18.0, 0.8244495034316459], [875, 17.4020075595963, 1638.5497268464483, 4.0, 40.0, 18.0, 0.2063952702935425], [876, 17.266867430259012, 517.7366361387418, 4.0, 40.0, 18.0, 0.29733518877058607], [877, 4.651958853541073, 1991.7687195777119, 4.0, 40.0, 18.0, 0.4671262272005749], [878, 4.6773427238229655, 1887.7061658877649, 4.0, 40.0, 18.0, 0.47336803999065985], [879, 2.3367373636181195, 26.395310323466973, 4.0, 40.0, 18.0, 1.0], [880, 1.0425516720644656, 1583.9473784885345, 4.0, 40.0, 18.0, 0.6233641218149952], [881, 10.801450726991098, 515.3670963596301, 4.0, 40.0, 18.0, 0.449610086321201], [882, 3.6448084496851765, 1330.078454856822, 4.0, 40.0, 18.0, 0.5476344589935176], [883, 0.8186557151880045, 1751.4627850640395, 4.0, 40.0, 18.0, 0.6177246429711354], [884, 3.8456552890447084, 484.5436617996804, 4.0, 40.0, 18.0, 0.7208386596494314], [885, 14.179261861894211, 45.97764684225157, 4.0, 40.0, 18.0, 1.0], [886, 12.469770353975969, 1396.0546701729043, 4.0, 40.0, 18.0, 0.31188432431254715], [887, 0.9979036808746822, 482.5891193944024, 4.0, 40.0, 18.0, 0.8653929817154723], [888, 3.6543410591156675, 1511.589633483546, 4.0, 40.0, 18.0, 0.5309946902593086], [889, 2.3109790245019717, 1486.5918912322754, 4.0, 40.0, 18.0, 0.5816964312441228], [890, 1.5279860050850842, 1179.8792252646228, 4.0, 40.0, 18.0, 0.6495763551188402], [891, 16.009572858609232, 1527.8415977201078, 4.0, 40.0, 18.0, 0.2349395566333903], [892, 0.625553047297132, 1375.382578491901, 4.0, 40.0, 18.0, 0.6632309139206402], [893, 1.8700549420915475, 1459.3607464749566, 4.0, 40.0, 18.0, 0.6017235731715382], [894, 4.7203670143848075, 1233.955835525839, 4.0, 40.0, 18.0, 0.5206216359174135], [895, 2.3469275787028274, 143.68895877079933, 4.0, 40.0, 18.0, 1.0], [896, 13.293778841935117, 932.4615150655806, 4.0, 40.0, 18.0, 0.3269208744678481], [897, 2.2745822436563925, 1770.8350103208168, 4.0, 40.0, 18.0, 0.5595061389646725], [898, 13.666554850823916, 1580.8125987337846, 4.0, 40.0, 18.0, 0.2779646088948095], [899, 12.817842657558726, 1103.2719645390746, 4.0, 40.0, 18.0, 0.32279005191624505], [900, 12.625966008465072, 1081.9735094386326, 4.0, 40.0, 18.0, 0.32889863628161914], [901, 13.723473591368872, 934.2190141641606, 4.0, 40.0, 18.0, 0.3174030111597273], [902, 2.1392136415686838, 1777.8941529681579, 4.0, 40.0, 18.0, 0.5640434960950943], [903, 13.969768661699815, 456.11253120090186, 4.0, 40.0, 18.0, 0.3855585390092459], [904, 13.492296797506846, 823.6488026751222, 4.0, 40.0, 18.0, 0.33304210963356434], [905, 2.4006379347625755, 1208.4337139265797, 4.0, 40.0, 18.0, 0.6090798255363604], [906, 3.2996878926064643, 1337.1106049529992, 4.0, 40.0, 18.0, 0.5594818899137194], [907, 0.6492383211452042, 729.2503406045738, 4.0, 40.0, 18.0, 0.7824979378024204], [908, 17.346096572563948, 1116.1596925144515, 4.0, 40.0, 18.0, 0.23304827148437396], [909, 11.76715542138179, 231.2640104783427, 4.0, 40.0, 18.0, 0.5685466033812662], [910, 3.460305294910314, 463.3425516518295, 4.0, 40.0, 18.0, 0.7503076720840286], [911, 9.905165123946508, 1623.4591525304995, 4.0, 40.0, 18.0, 0.35674684537427404], [912, 12.519429777919818, 1087.8976955000057, 4.0, 40.0, 18.0, 0.3308106440616603], [913, 12.319860344214563, 1170.7922649261436, 4.0, 40.0, 18.0, 0.32899488387402576], [914, 2.2228893559090825, 392.33817468647754, 4.0, 40.0, 18.0, 0.8602564526557024], [915, 3.8067427291656584, 1404.3857152789735, 4.0, 40.0, 18.0, 0.5349146406180674], [916, 2.845307779535298, 1224.5948480262102, 4.0, 40.0, 18.0, 0.5894087300981499], [917, 1.5978325186990006, 733.7915602697259, 4.0, 40.0, 18.0, 0.7356911118556473], [918, 12.375250922216058, 1094.1405838746987, 4.0, 40.0, 18.0, 0.333446679789058], [919, 12.664249513564506, 1338.942009483894, 4.0, 40.0, 18.0, 0.31096660152908345], [920, 4.730057276230532, 707.6882882087757, 4.0, 40.0, 18.0, 0.6047021685017591], [921, 1.34200905881488, 445.7578236962116, 4.0, 40.0, 18.0, 0.869228677565014], [922, 19.189200552837196, 1271.8341781178929, 4.0, 40.0, 18.0, 0.19257084213389974], [923, 0.20754297998301063, 1975.762382694505, 4.0, 40.0, 18.0, 0.6256652310084061], [924, 1.5847315139362266, 1653.4738687150048, 4.0, 40.0, 18.0, 0.5950294363507569], [925, 0.9029107915704686, 1677.1085267452029, 4.0, 40.0, 18.0, 0.6206617133297019], [926, 1.9910239556107405, 1127.1444321415468, 4.0, 40.0, 18.0, 0.6373936481003256], [927, 1.6339303208191573, 868.9373037304671, 4.0, 40.0, 18.0, 0.6998676675829426], [928, 11.786132320694488, 357.5789828613401, 4.0, 40.0, 18.0, 0.47899034894400444], [929, 2.572490815590662, 572.5060453942956, 4.0, 40.0, 18.0, 0.7432340613970874], [930, 11.324076732318105, 1717.409772456833, 4.0, 40.0, 18.0, 0.32109029940016276], [931, 3.2033824826704946, 1381.766083288084, 4.0, 40.0, 18.0, 0.5584738695440018], [932, 0.29935076673756944, 346.4940759742787, 4.0, 40.0, 18.0, 1.0], [933, 3.3149473633066835, 369.4040760496109, 4.0, 40.0, 18.0, 0.8199639826820321], [934, 14.957461489742554, 392.1766853209864, 4.0, 40.0, 18.0, 0.38210275992583703], [935, 10.252068671560986, 878.8502929934763, 4.0, 40.0, 18.0, 0.4036874990181745], [936, 2.3298973235475184, 192.02823441675403, 4.0, 40.0, 18.0, 1.0], [937, 16.133257692608076, 68.96615475196143, 4.0, 40.0, 18.0, 0.7361223643796446], [938, 15.384049651537264, 674.7480437355557, 4.0, 40.0, 18.0, 0.3092093102129602], [939, 11.811459137224123, 1172.9652532967907, 4.0, 40.0, 18.0, 0.34018306472707666], [940, 0.25360697410958943, 394.65344289624267, 4.0, 40.0, 18.0, 1.0], [941, 1.0153955963041492, 1405.8976942428362, 4.0, 40.0, 18.0, 0.6429231137926095], [942, 15.023097872524138, 1612.1185311349077, 4.0, 40.0, 18.0, 0.24963010031947305], [943, 12.674138874068834, 1529.2419566313745, 4.0, 40.0, 18.0, 0.30084850061216206], [944, 14.802983843618815, 164.936432500173, 4.0, 40.0, 18.0, 0.5380645237956828], [945, 1.7974469089941434, 780.51386038894, 4.0, 40.0, 18.0, 0.7134924720574082], [946, 4.917161606969317, 1062.7610082561107, 4.0, 40.0, 18.0, 0.5336437867991229], [947, 12.4743031269495, 929.6669079224592, 4.0, 40.0, 18.0, 0.34558765008358927], [948, 11.621068659871398, 1118.90290438318, 4.0, 40.0, 18.0, 0.34840901764913146], [949, 16.280946670743294, 410.62405010321527, 4.0, 40.0, 18.0, 0.34469277018725086], [950, 3.0558105663569095, 175.23515739061907, 4.0, 40.0, 18.0, 1.0], [951, 11.340051238252313, 180.51325888810882, 4.0, 40.0, 18.0, 0.6411003699273948], [952, 14.277484175725977, 826.0645908403268, 4.0, 40.0, 18.0, 0.3155862057747023], [953, 1.3169760846316896, 1007.6922592236383, 4.0, 40.0, 18.0, 0.6864551690008719], [954, 15.98081088902735, 1312.1921207392581, 4.0, 40.0, 18.0, 0.24634828994169444], [955, 2.7038090341911643, 1070.423555481202, 4.0, 40.0, 18.0, 0.6162027689566242], [956, 0.13162317514869004, 968.7399315897476, 4.0, 40.0, 18.0, 0.7484223591940241], [957, 0.8895438034073955, 394.7437938171988, 4.0, 40.0, 18.0, 0.9472596588449149], [958, 17.6054051883661, 1164.4526642461376, 4.0, 40.0, 18.0, 0.22543712181505488], [959, 0.9803329696935392, 966.5219069922134, 4.0, 40.0, 18.0, 0.7095950885633576], [960, 12.40450221251569, 40.73367000616375, 4.0, 40.0, 18.0, 1.0], [961, 14.38877907140237, 1945.7004454607559, 4.0, 40.0, 18.0, 0.2506475381119266], [962, 13.839385771164153, 1721.227435947506, 4.0, 40.0, 18.0, 0.2678203713392895], [963, 11.300003819707515, 899.41463455827, 4.0, 40.0, 18.0, 0.3760826184598671], [964, 12.621759878623221, 1509.09256699155, 4.0, 40.0, 18.0, 0.3030571471492336], [965, 4.446005752769037, 1215.6389911853855, 4.0, 40.0, 18.0, 0.531810221189528], [966, 14.12027533741313, 470.96365260444, 4.0, 40.0, 18.0, 0.37783846710475805], [967, 1.2335924052287428, 668.8036679063714, 4.0, 40.0, 18.0, 0.7731438909319109], [968, 13.891702310379722, 1433.5430891509743, 4.0, 40.0, 18.0, 0.28073620473004], [969, 13.07691706050377, 1990.0562638873407, 4.0, 40.0, 18.0, 0.27428850854309267], [970, 0.3741296303842674, 1992.3569398652019, 4.0, 40.0, 18.0, 0.6175748266261021], [971, 1.7737879178494667, 259.0842415595515, 4.0, 40.0, 18.0, 1.0], [972, 11.844469223850624, 685.3142445680993, 4.0, 40.0, 18.0, 0.3895263456632979], [973, 15.994151464447118, 195.3837928601646, 4.0, 40.0, 18.0, 0.46969256643285157], [974, 15.07415742392848, 1188.7953327292678, 4.0, 40.0, 18.0, 0.27083084409540814], [975, 11.51631080098148, 990.392849343815, 4.0, 40.0, 18.0, 0.3621093095794676], [976, 3.7324910274297363, 548.3082993159786, 4.0, 40.0, 18.0, 0.6979905116968695], [977, 10.126931622487524, 1311.0628779016936, 4.0, 40.0, 18.0, 0.3695539493716479], [978, 14.384868256664273, 1801.2232282031146, 4.0, 40.0, 18.0, 0.2550010957197734], [979, 1.5941497499646031, 21.42087368419864, 4.0, 40.0, 18.0, 1.0], [980, 14.311725315503883, 1641.4679331877837, 4.0, 40.0, 18.0, 0.26209851444974125], [981, 11.702490712594281, 1608.85589034457, 4.0, 40.0, 18.0, 0.3178295943147355], [982, 1.6831440624308462, 1937.773598507266, 4.0, 40.0, 18.0, 0.569613400952179], [983, 10.04449510773711, 1337.8640599219123, 4.0, 40.0, 18.0, 0.3697144533402944], [984, 12.265338523875279, 685.4790451609148, 4.0, 40.0, 18.0, 0.3791954762658907], [985, 14.047504992714842, 907.1842125697408, 4.0, 40.0, 18.0, 0.3126033510357102], [986, 0.0812992206315577, 237.74215754091662, 4.0, 40.0, 18.0, 1.0], [987, 16.240518744406902, 383.0087836084393, 4.0, 40.0, 18.0, 0.3549175791697821], [988, 0.06883842704250531, 1404.314806666811, 4.0, 40.0, 18.0, 0.6845397642348533], [989, 13.787439703636503, 497.45265536719654, 4.0, 40.0, 18.0, 0.3790056810984859], [990, 0.27157705024292866, 548.9587537120956, 4.0, 40.0, 18.0, 0.8688478453937651], [991, 15.892046387870389, 284.8597712136106, 4.0, 40.0, 18.0, 0.40718189317192766], [992, 9.848824144688235, 1846.5677455100385, 4.0, 40.0, 18.0, 0.34791635672450527], [993, 2.9839837324850187, 628.7377084119697, 4.0, 40.0, 18.0, 0.7035413612230239], [994, 9.743893770133019, 872.7922343672578, 4.0, 40.0, 18.0, 0.4160575897456665], [995, 3.1725132243825644, 1719.0748664697376, 4.0, 40.0, 18.0, 0.5315448268622537], [996, 14.16220465364361, 1380.1146019027651, 4.0, 40.0, 18.0, 0.27806357499245316], [997, 12.047099646751901, 808.0087418067287, 4.0, 40.0, 18.0, 0.3681542492764767], [998, 0.5304190367814037, 1655.4241578426804, 4.0, 40.0, 18.0, 0.6381701152771689], [999, 14.549367192037938, 1884.0702403816829, 4.0, 40.0, 18.0, 0.24933330781290222], [1000, 10.969079225753841, 51.07680155924689, 4.0, 40.0, 18.0, 1.0], [1001, 14.379505126968896, 1550.2450716680714, 4.0, 40.0, 18.0, 0.2650038234454316], [1002, 12.565066512565895, 208.72732881755675, 4.0, 40.0, 18.0, 0.5628847986029153], [1003, 16.709875990586724, 1443.2650546498917, 4.0, 40.0, 18.0, 0.2261108268887247], [1004, 13.627628051755496, 1244.7532834905965, 4.0, 40.0, 18.0, 0.2964599992301991], [1005, 2.5697766087191978, 280.23877982048634, 4.0, 40.0, 18.0, 1.0], [1006, 2.8154781428106443, 1941.9545128347193, 4.0, 40.0, 18.0, 0.5290188622609497], [1007, 2.6333417921411346, 1076.5628624835515, 4.0, 40.0, 18.0, 0.6181720182582942], [1008, 17.28877762619281, 1874.0882382622772, 4.0, 40.0, 18.0, 0.20020956318710967], [1009, 11.25986242885082, 1152.8672697173233, 4.0, 40.0, 18.0, 0.35436490976918505], [1010, 2.0208419061258196, 1515.6593149763987, 4.0, 40.0, 18.0, 0.5903298092886061], [1011, 13.814286645733162, 983.7863318379204, 4.0, 40.0, 18.0, 0.31074136158799803], [1012, 12.444316410401754, 982.2880542798202, 4.0, 40.0, 18.0, 0.3415192818676255], [1013, 0.24952696616418857, 1036.7053004115012, 4.0, 40.0, 18.0, 0.7301535922653354], [1014, 13.639792484664886, 295.6283233387311, 4.0, 40.0, 18.0, 0.4594129236510011], [1015, 13.996314281774813, 549.8992191379771, 4.0, 40.0, 18.0, 0.3616955234209215], [1016, 15.921205656186562, 1729.2266149159716, 4.0, 40.0, 18.0, 0.22883476115101697], [1017, 14.134947206063751, 393.084954980827, 4.0, 40.0, 18.0, 0.4019043302681516], [1018, 8.66595361238791, 282.97653182782375, 4.0, 40.0, 18.0, 0.6408100008094858], [1019, 14.71138262666598, 60.97269071044567, 4.0, 40.0, 18.0, 1.0], [1020, 2.382268438061506, 917.076536395473, 4.0, 40.0, 18.0, 0.6564118004625531], [1021, 1.3609303603948204, 1001.8006758262095, 4.0, 40.0, 18.0, 0.6855880264411957], [1022, 3.0509720820353223, 1130.0439406595988, 4.0, 40.0, 18.0, 0.593504458976072], [1023, 2.5284957625784505, 1702.6925102059365, 4.0, 40.0, 18.0, 0.5553988908104479], [1024, 15.292318778395282, 164.5565070065693, 4.0, 40.0, 18.0, 0.5231762056992927], [1025, 12.709092524239063, 1337.838808917731, 4.0, 40.0, 18.0, 0.31013542004721883], [1026, 0.8664897269266063, 942.9426248090579, 4.0, 40.0, 18.0, 0.7195565723760619], [1027, 1.5536892902461885, 389.97253600178186, 4.0, 40.0, 18.0, 0.8975768383165542], [1028, 11.839748995092563, 35.17181476816696, 4.0, 40.0, 18.0, 1.0], [1029, 1.4292313804494934, 1740.6119702955277, 4.0, 40.0, 18.0, 0.5939137749467036], [1030, 1.3363135462155014, 1954.668669092828, 4.0, 40.0, 18.0, 0.5816961163661599], [1031, 2.8060572164743784, 1511.4491807486315, 4.0, 40.0, 18.0, 0.5609479158655598], [1032, 1.6806437079890457, 1155.2749326757967, 4.0, 40.0, 18.0, 0.6465713835704293], [1033, 13.284582033277403, 882.454433596463, 4.0, 40.0, 18.0, 0.33188616968173273], [1034, 12.994307863493844, 697.5910800347838, 4.0, 40.0, 18.0, 0.3597134973980741], [1035, 2.885707761520376, 1620.154783493823, 4.0, 40.0, 18.0, 0.5488468743484286], [1036, 1.7882389506939496, 1287.3707719696315, 4.0, 40.0, 18.0, 0.6243143714742815], [1037, 3.080079185483843, 493.3275516580027, 4.0, 40.0, 18.0, 0.7533814280844582], [1038, 2.028637955841206, 945.993717340121, 4.0, 40.0, 18.0, 0.6662129702129556], [1039, 1.905818935924439, 1285.8710428640877, 4.0, 40.0, 18.0, 0.6196402258174992], [1040, 11.599243904922254, 502.6315839756573, 4.0, 40.0, 18.0, 0.43158195615645856], [1041, 13.790439405291108, 103.64426432372672, 4.0, 40.0, 18.0, 0.6904458550013394], [1042, 4.004757399049852, 67.01069464719315, 4.0, 40.0, 18.0, 1.0], [1043, 14.052258983357287, 222.10776228882835, 4.0, 40.0, 18.0, 0.501652903766701], [1044, 3.227388132069464, 1720.8643397860187, 4.0, 40.0, 18.0, 0.529568658548787], [1045, 10.959534497399321, 996.1713102210934, 4.0, 40.0, 18.0, 0.3747722821016364], [1046, 2.60867208302761, 1007.3720736381559, 4.0, 40.0, 18.0, 0.6301983748350348], [1047, 10.34723334800158, 1855.8283279760872, 4.0, 40.0, 18.0, 0.33661924254169556], [1048, 13.663771853028694, 416.4327323283204, 4.0, 40.0, 18.0, 0.40522574751884216], [1049, 2.926733647466812, 789.2318756014898, 4.0, 40.0, 18.0, 0.6604279212323652], [1050, 3.916952249795078, 1446.2509437736092, 4.0, 40.0, 18.0, 0.527588212328101], [1051, 1.5906441069726882, 227.17592299329092, 4.0, 40.0, 18.0, 1.0], [1052, 14.751478209741343, 1848.0800359979974, 4.0, 40.0, 18.0, 0.2466063624687294], [1053, 1.7180325519710022, 1784.2677435651497, 4.0, 40.0, 18.0, 0.5793416188712317], [1054, 1.770786282639183, 621.5628077582616, 4.0, 40.0, 18.0, 0.7636640570909402], [1055, 1.7634453515540762, 375.3213031714331, 4.0, 40.0, 18.0, 0.8985040082586395], [1056, 3.289665948419221, 197.39330994853793, 4.0, 40.0, 18.0, 1.0], [1057, 4.321995443049678, 739.2499295139535, 4.0, 40.0, 18.0, 0.613594208215663], [1058, 15.02796313304617, 684.1786932402887, 4.0, 40.0, 18.0, 0.31558062235784945], [1059, 1.7022514916524445, 132.8195324775657, 4.0, 40.0, 18.0, 1.0], [1060, 11.861196214382316, 1726.5552035226012, 4.0, 40.0, 18.0, 0.3092657884234668], [1061, 18.53219405604401, 1345.7697364706196, 4.0, 40.0, 18.0, 0.19962274578731354], [1062, 2.2018003860809126, 500.1605357192023, 4.0, 40.0, 18.0, 0.7936425960912976], [1063, 3.4308501723109535, 1040.2294758666994, 4.0, 40.0, 18.0, 0.5916129904815498], [1064, 3.131734440587331, 355.50835691379723, 4.0, 40.0, 18.0, 0.8411527242435897], [1065, 2.9134224241220155, 229.23854805670973, 4.0, 40.0, 18.0, 1.0], [1066, 14.182680593516672, 1376.92496836616, 4.0, 40.0, 18.0, 0.27783514085000444], [1067, 1.0786995201198697, 822.2766104186211, 4.0, 40.0, 18.0, 0.7365328861340361], [1068, 2.013553879196522, 1400.2014508042894, 4.0, 40.0, 18.0, 0.602115791885369], [1069, 2.7438534237015824, 129.28535173944977, 4.0, 40.0, 18.0, 1.0], [1070, 1.042107562388861, 524.6764616151083, 4.0, 40.0, 18.0, 0.8407905441604646], [1071, 1.0363398012755658, 1090.763973585378, 4.0, 40.0, 18.0, 0.6848039829600742], [1072, 3.237176891741856, 1216.7050270741079, 4.0, 40.0, 18.0, 0.5752419675428667], [1073, 3.6846385922453555, 1022.0347809340302, 4.0, 40.0, 18.0, 0.5844967780427568], [1074, 16.48237661016666, 230.3628634944973, 4.0, 40.0, 18.0, 0.42753973279031254], [1075, 11.887183915844156, 1532.8909206881513, 4.0, 40.0, 18.0, 0.31745651977986195], [1076, 9.795987619152806, 368.4240752495669, 4.0, 40.0, 18.0, 0.5377963414991482], [1077, 1.5725362031766448, 613.1519482784821, 4.0, 40.0, 18.0, 0.7763502871890664], [1078, 16.14442837863233, 1789.6274344911128, 4.0, 40.0, 18.0, 0.22301517771401855], [1079, 3.010681177382583, 525.0947325429249, 4.0, 40.0, 18.0, 0.7418565423427339], [1080, 2.626050049856609, 87.26077739123517, 4.0, 40.0, 18.0, 1.0], [1081, 3.5461655770160334, 721.0241535448812, 4.0, 40.0, 18.0, 0.6508579971217162], [1082, 0.43942912734418327, 1903.2370431310198, 4.0, 40.0, 18.0, 0.6214608856436676], [1083, 14.201089097245724, 1418.6458888791833, 4.0, 40.0, 18.0, 0.27532909801989186], [1084, 15.697789759021756, 71.49820317665981, 4.0, 40.0, 18.0, 0.7420874614185295], [1085, 3.894179835326802, 788.2217364023721, 4.0, 40.0, 18.0, 0.619585289675786], [1086, 1.9379248240751918, 622.0804084562546, 4.0, 40.0, 18.0, 0.7553662922444764], [1087, 2.9280871415937564, 368.8052447090669, 4.0, 40.0, 18.0, 0.8408900945428005], [1088, 14.192762025559434, 394.2730494780759, 4.0, 40.0, 18.0, 0.4001357721309345], [1089, 12.615397903867029, 1710.6393537745012, 4.0, 40.0, 18.0, 0.29387052050302004], [1090, 13.388229013777496, 1019.0095809250246, 4.0, 40.0, 18.0, 0.31715968686528295], [1091, 13.936394137395718, 494.0804879432962, 4.0, 40.0, 18.0, 0.3761526138501655], [1092, 2.6626983703910425, 1331.096613064419, 4.0, 40.0, 18.0, 0.5840920102088519], [1093, 1.6606063674304958, 1330.8185094517041, 4.0, 40.0, 18.0, 0.6243939242790345], [1094, 11.782712083760497, 1672.6850788957563, 4.0, 40.0, 18.0, 0.3131053486626985], [1095, 15.184319320772518, 1755.46332324165, 4.0, 40.0, 18.0, 0.2414660477031268], [1096, 12.919814450210065, 1136.5904190118838, 4.0, 40.0, 18.0, 0.3183027222015717], [1097, 13.397585541158913, 1420.02894350462, 4.0, 40.0, 18.0, 0.29137754426213663], [1098, 16.039296411196265, 49.81395132343184, 4.0, 40.0, 18.0, 1.0], [1099, 0.03223062208415195, 564.5007444947587, 4.0, 40.0, 18.0, 0.8738943923720012], [1100, 0.3965324674328574, 208.67144682210144, 4.0, 40.0, 18.0, 1.0], [1101, 0.2587454158897944, 492.9092213386541, 4.0, 40.0, 18.0, 0.8978433478561145], [1102, 2.4512282270355894, 969.5733077750301, 4.0, 40.0, 18.0, 0.6435722255483033], [1103, 13.037110622507559, 1978.7926611925182, 4.0, 40.0, 18.0, 0.2752066659479918], [1104, 0.09702801470292499, 1034.568683688643, 4.0, 40.0, 18.0, 0.737549579804514], [1105, 3.7243141512925186, 1252.7413042473172, 4.0, 40.0, 18.0, 0.5531106760686401], [1106, 3.7300823221064467, 1840.6662082327387, 4.0, 40.0, 18.0, 0.5050181829485517], [1107, 0.7390472827273027, 951.1868047048595, 4.0, 40.0, 18.0, 0.7237118480285332], [1108, 14.699203480418248, 790.876583285864, 4.0, 40.0, 18.0, 0.3100833375715713], [1109, 13.744642637595122, 295.211022577771, 4.0, 40.0, 18.0, 0.4565803091908225], [1110, 1.3234921726933306, 1411.1810785263465, 4.0, 40.0, 18.0, 0.6291183294059258], [1111, 10.733819827658278, 1594.8047339872448, 4.0, 40.0, 18.0, 0.3394986221586111], [1112, 15.08396030415058, 1199.8789231506944, 4.0, 40.0, 18.0, 0.2699211489375776], [1113, 14.151848809304024, 1869.221704700846, 4.0, 40.0, 18.0, 0.2572297358093538], [1114, 15.425082152374571, 1928.9469914362653, 4.0, 40.0, 18.0, 0.2320335107672802], [1115, 3.1846824091441563, 56.284039786743236, 4.0, 40.0, 18.0, 1.0], [1116, 15.023770557348968, 89.36796050368335, 4.0, 40.0, 18.0, 0.6852249853431339], [1117, 1.9043719116248894, 810.3152033779872, 4.0, 40.0, 18.0, 0.701132851442026], [1118, 2.476744419555659, 512.9939632842418, 4.0, 40.0, 18.0, 0.7734321965867296], [1119, 5.638224412403431, 947.4189031293905, 4.0, 40.0, 18.0, 0.5248719760731467], [1120, 12.582768429433104, 889.0572248684971, 4.0, 40.0, 18.0, 0.34691433189565624], [1121, 13.209996807812674, 1703.2990868663117, 4.0, 40.0, 18.0, 0.2816933934759444], [1122, 2.571196462356841, 965.5725176028748, 4.0, 40.0, 18.0, 0.6391147897940703], [1123, 15.549839824671203, 860.7425022913329, 4.0, 40.0, 18.0, 0.28533522764891855], [1124, 2.290197107034295, 37.718444738812885, 4.0, 40.0, 18.0, 1.0], [1125, 15.296633491654081, 1631.6983714902794, 4.0, 40.0, 18.0, 0.24385832761706278], [1126, 1.9773314267862319, 824.5442653048218, 4.0, 40.0, 18.0, 0.6943966812207562], [1127, 13.585137721344376, 1533.1725756544176, 4.0, 40.0, 18.0, 0.282034008235188], [1128, 15.724474189120706, 1222.4003815287786, 4.0, 40.0, 18.0, 0.25636618186995946], [1129, 15.054773013057204, 28.788618774487205, 4.0, 40.0, 18.0, 1.0], [1130, 3.935755879936649, 451.2790557553616, 4.0, 40.0, 18.0, 0.7340605450519974], [1131, 12.060374322802431, 1695.8775129845455, 4.0, 40.0, 18.0, 0.3064044663551058], [1132, 0.5699100026349455, 257.01071505759273, 4.0, 40.0, 18.0, 1.0], [1133, 2.337056732561778, 1332.1933599946979, 4.0, 40.0, 18.0, 0.5967274659049205], [1134, 14.929161291553678, 1858.4160649572784, 4.0, 40.0, 18.0, 0.24298573172591478], [1135, 16.37830189256581, 735.8850746398455, 4.0, 40.0, 18.0, 0.28103652066356616], [1136, 3.008408854260301, 732.031875140908, 4.0, 40.0, 18.0, 0.6715223796687633], [1137, 1.9402546562278868, 260.90790268695594, 4.0, 40.0, 18.0, 1.0], [1138, 1.7561927137871818, 1729.9272434246395, 4.0, 40.0, 18.0, 0.5819820807182222], [1139, 3.3182210403211245, 350.9610525818323, 4.0, 40.0, 18.0, 0.8352300172435078], [1140, 12.010770887643613, 1248.6746024116271, 4.0, 40.0, 18.0, 0.3305460939325732], [1141, 10.85941168088137, 71.47059309401556, 4.0, 40.0, 18.0, 1.0], [1142, 10.980415534436846, 166.37596074320402, 4.0, 40.0, 18.0, 0.6761774118115749], [1143, 1.5617875088281348, 675.6474977311742, 4.0, 40.0, 18.0, 0.7552429145001589], [1144, 13.165972382410674, 1899.8163826372215, 4.0, 40.0, 18.0, 0.2748329138010692], [1145, 12.190708636744622, 751.015848145742, 4.0, 40.0, 18.0, 0.3715866481019394], [1146, 1.3287748158002197, 1381.2224180495753, 4.0, 40.0, 18.0, 0.6323922963778769], [1147, 14.349986748903923, 662.8479716854886, 4.0, 40.0, 18.0, 0.33361783156616204], [1148, 0.8117701028446691, 1330.9808890399793, 4.0, 40.0, 18.0, 0.6605914629692309], [1149, 10.694801028864255, 1180.9791185972229, 4.0, 40.0, 18.0, 0.3654725380201102], [1150, 12.522745003700294, 1288.6646026503458, 4.0, 40.0, 18.0, 0.3169286746814777], [1151, 14.125243173427773, 1704.1461229143479, 4.0, 40.0, 18.0, 0.2631058145280849], [1152, 12.640158288260164, 1325.0179824092363, 4.0, 40.0, 18.0, 0.31216247638923234], [1153, 2.996767138831382, 1340.973855832597, 4.0, 40.0, 18.0, 0.5701560739249362], [1154, 1.0103158459050823, 149.82453054600853, 4.0, 40.0, 18.0, 1.0], [1155, 15.45792707994535, 490.6957143049323, 4.0, 40.0, 18.0, 0.34124500808218117], [1156, 12.632808993338807, 60.51819786436858, 4.0, 40.0, 18.0, 1.0], [1157, 9.995708285889641, 1053.1451951133604, 4.0, 40.0, 18.0, 0.3931658299507808], [1158, 2.3665670302411397, 619.0489234318621, 4.0, 40.0, 18.0, 0.7357972231848412], [1159, 1.992289451710359, 542.7269335929699, 4.0, 40.0, 18.0, 0.7841200540845771], [1160, 1.0581205167158352, 1797.386027870636, 4.0, 40.0, 18.0, 0.60410395312093], [1161, 4.506478622211751, 1922.4010309402897, 4.0, 40.0, 18.0, 0.4763789694653317], [1162, 1.230245115157081, 862.4269173835855, 4.0, 40.0, 18.0, 0.7199665502138691], [1163, 12.27136767117742, 1886.5720539480553, 4.0, 40.0, 18.0, 0.2933905468259819], [1164, 0.25313841335976406, 741.2614845795835, 4.0, 40.0, 18.0, 0.7979299627443198], [1165, 1.478970185735261, 1564.1201914549476, 4.0, 40.0, 18.0, 0.6071810359443801], [1166, 16.09605943976552, 919.6453315499255, 4.0, 40.0, 18.0, 0.2693965632365857], [1167, 4.037566215031811, 66.79749823781658, 4.0, 40.0, 18.0, 1.0], [1168, 19.78364305035553, 1938.2673473014622, 4.0, 40.0, 18.0, 0.16089148757518487], [1169, 2.4027005670192043, 41.94031467342457, 4.0, 40.0, 18.0, 1.0], [1170, 0.6820141827833761, 1399.187879991399, 4.0, 40.0, 18.0, 0.639718489507238], [1171, 19.036076524320162, 123.42767966478047, 4.0, 40.0, 18.0, 0.449049620257609], [1172, 3.1399276566344376, 369.9001366174945, 4.0, 40.0, 18.0, 0.7666244215791429], [1173, 3.236058640430873, 283.40322513670935, 4.0, 40.0, 18.0, 0.8203188141646067], [1174, 1.9691454086109306, 961.7632204257682, 4.0, 40.0, 18.0, 0.6449613351258549], [1175, 2.715182514964546, 1150.6740337312187, 4.0, 40.0, 18.0, 0.5920930222828198], [1176, 27.21311553963702, 1756.2584035565546, 4.0, 40.0, 18.0, 0.0903216643796868], [1177, 16.332278679348473, 1929.1325653020458, 4.0, 40.0, 18.0, 0.21548233630350994], [1178, 18.851358241448246, 164.25249141088568, 4.0, 40.0, 18.0, 0.40659466958377105], [1179, 1.2645877257120057, 128.72759562109485, 4.0, 40.0, 18.0, 1.0], [1180, 2.174052221206166, 1871.2330478166857, 4.0, 40.0, 18.0, 0.5500519158934457], [1181, 0.369264383329146, 79.06962927973483, 4.0, 40.0, 18.0, 1.0], [1182, 5.015590388923191, 1829.5542832770025, 4.0, 40.0, 18.0, 0.46517364188444893], [1183, 7.9641432612560354, 1659.8180695861129, 4.0, 40.0, 18.0, 0.4003670474723286], [1184, 6.3462868073769085, 1397.883291575914, 4.0, 40.0, 18.0, 0.45477617599031894], [1185, 7.355755954655896, 1807.5327951701765, 4.0, 40.0, 18.0, 0.4074628472567552], [1186, 8.64442960873242, 1303.9973933918432, 4.0, 40.0, 18.0, 0.40467635360271276], [1187, 8.269376490066012, 1475.3787626067367, 4.0, 40.0, 18.0, 0.4029830158676198], [1188, 5.295471215691508, 961.476795656042, 4.0, 40.0, 18.0, 0.5283487182528002], [1189, 7.186759498414793, 798.0421632859379, 4.0, 40.0, 18.0, 0.49340469296409467], [1190, 1.8033674141831544, 1555.069034300783, 4.0, 40.0, 18.0, 0.5852046358304841], [1191, 5.178531140767847, 1602.1361815824275, 4.0, 40.0, 18.0, 0.4737608456775778], [1192, 17.499316946265807, 620.0053180513956, 4.0, 40.0, 18.0, 0.2756750300803742], [1193, 1.9355781932515268, 381.00787564845757, 4.0, 40.0, 18.0, 0.8101138400526966], [1194, 7.336185562775143, 423.59185827148286, 4.0, 40.0, 18.0, 0.5803122245928203], [1195, 18.33887179153779, 1359.1806943668778, 4.0, 40.0, 18.0, 0.2023491071916834], [1196, 7.130302768984083, 1340.6693328992794, 4.0, 40.0, 18.0, 0.43880829257870607], [1197, 13.414444529516082, 128.79726388648123, 4.0, 40.0, 18.0, 0.6071991537058363], [1198, 8.267772881612357, 294.9600472733411, 4.0, 40.0, 18.0, 0.6126383197909909], [1199, 7.337264528588566, 1634.5111683840066, 4.0, 40.0, 18.0, 0.4158109062971177], [1200, 6.9139864083816285, 141.39951824680566, 4.0, 40.0, 18.0, 0.8421334519140697], [1201, 8.175265131631265, 103.74759390533359, 4.0, 40.0, 18.0, 1.0], [1202, 6.315423840438488, 105.9531581358207, 4.0, 40.0, 18.0, 1.0], [1203, 7.977137727503396, 398.44340273995226, 4.0, 40.0, 18.0, 0.5686245839943815], [1204, 8.332407119903376, 1121.741332798518, 4.0, 40.0, 18.0, 0.42516968726602145], [1205, 6.569668765469224, 1981.7682164139594, 4.0, 40.0, 18.0, 0.4182336957011782], [1206, 6.92982025249804, 836.6303225546189, 4.0, 40.0, 18.0, 0.4952506114546419], [1207, 4.724765881516024, 66.19126710533727, 4.0, 40.0, 18.0, 1.0], [1208, 5.496455450181003, 1020.5655269761479, 4.0, 40.0, 18.0, 0.5144030695496474], [1209, 12.23978705101208, 124.71125716357164, 4.0, 40.0, 18.0, 0.6573088266938368], [1210, 16.712612730021203, 926.4867825338386, 4.0, 40.0, 18.0, 0.2572257334400859], [1211, 20.454886096365193, 186.34483810853808, 4.0, 40.0, 18.0, 0.35263237159788147], [1212, 9.077300632255346, 1543.3696335480215, 4.0, 40.0, 18.0, 0.37983852792403305], [1213, 10.002309398630995, 167.45928656638495, 4.0, 40.0, 18.0, 0.6693986801311453], [1214, 5.041531638213447, 539.451421243852, 4.0, 40.0, 18.0, 0.6216735892997367], [1215, 17.295354635393192, 1258.190830900827, 4.0, 40.0, 18.0, 0.22503256583007158], [1216, 7.529610314398463, 81.18094844993291, 4.0, 40.0, 18.0, 1.0], [1217, 8.716881519353677, 522.0598522107705, 4.0, 40.0, 18.0, 0.5034524537641513], [1218, 23.207488562506025, 1183.9987562749156, 4.0, 40.0, 18.0, 0.14273251949545332], [1219, 15.53041038553193, 1653.9244284660335, 4.0, 40.0, 18.0, 0.23876833803890174], [1220, 7.581987642793694, 126.5490051137025, 4.0, 40.0, 18.0, 0.8471405292924625], [1221, 18.100579927708925, 78.56611237387193, 4.0, 40.0, 18.0, 0.5723543398356996], [1222, 10.032954048928659, 1583.0188619122825, 4.0, 40.0, 18.0, 0.3552885933537411], [1223, 3.928618864946684, 1172.098169381809, 4.0, 40.0, 18.0, 0.5476817583031351], [1224, 8.878072541422867, 1052.1277030255112, 4.0, 40.0, 18.0, 0.41794991221434724], [1225, 7.538354231026863, 1150.5325398662783, 4.0, 40.0, 18.0, 0.4430023174977232], [1226, 3.86430927339438, 159.5061944069398, 4.0, 40.0, 18.0, 1.0], [1227, 6.271635001441852, 714.0497554992917, 4.0, 40.0, 18.0, 0.5362393576330298], [1228, 5.284787910798242, 934.5347648863686, 4.0, 40.0, 18.0, 0.5323052224051645], [1229, 9.357768871113777, 983.1155550614441, 4.0, 40.0, 18.0, 0.4128187349829811], [1230, 1.9260645051823582, 1352.833337687172, 4.0, 40.0, 18.0, 0.5987624743799103], [1231, 2.964067273683936, 559.9733735227916, 4.0, 40.0, 18.0, 0.6943021026302088], [1232, 8.987943269243626, 87.84460272548935, 4.0, 40.0, 18.0, 1.0], [1233, 8.892778007256128, 1879.2127971726704, 4.0, 40.0, 18.0, 0.3681950952851235], [1234, 7.776003883481518, 1269.5906241773212, 4.0, 40.0, 18.0, 0.42744039552339286], [1235, 2.801001159610994, 327.06479995276214, 4.0, 40.0, 18.0, 0.806736806048399], [1236, 5.7188470581733775, 711.1044171570837, 4.0, 40.0, 18.0, 0.5551491587240777], [1237, 7.898725291077167, 917.4608916170924, 4.0, 40.0, 18.0, 0.45666826071995703], [1238, 10.606889008599666, 160.0841741182571, 4.0, 40.0, 18.0, 0.6572256724301155], [1239, 7.432590392373277, 187.00049815458232, 4.0, 40.0, 18.0, 0.7443421818814376], [1240, 13.774078721690827, 1266.0431727037835, 4.0, 40.0, 18.0, 0.2913974431720982], [1241, 6.282444921870322, 38.019598628753926, 4.0, 40.0, 18.0, 1.0], [1242, 21.07774803192213, 554.8461810902066, 4.0, 40.0, 18.0, 0.2219080730434804], [1243, 6.886359730237494, 127.40059719552774, 4.0, 40.0, 18.0, 0.8964373692149963], [1244, 13.42167230468698, 801.5509709291521, 4.0, 40.0, 18.0, 0.3373220817679418], [1245, 23.092273369478555, 50.99786504059964, 4.0, 40.0, 18.0, 0.5337052832409289], [1246, 4.548593126766056, 1453.1644361071328, 4.0, 40.0, 18.0, 0.5025293218844562], [1247, 2.3629948907309637, 922.9084452025695, 4.0, 40.0, 18.0, 0.6362891669369165], [1248, 6.0197603572237455, 156.37500238824265, 4.0, 40.0, 18.0, 0.852839393860757], [1249, 12.755982625323353, 56.05131895169758, 4.0, 40.0, 18.0, 1.0], [1250, 7.6034813542348285, 62.955048796187185, 4.0, 40.0, 18.0, 1.0], [1251, 17.919673159104054, 699.2242419395886, 4.0, 40.0, 18.0, 0.25715009688587154], [1252, 6.555400094542579, 415.27269071849696, 4.0, 40.0, 18.0, 0.6113975663260102], [1253, 0.4683803992454405, 766.3019646419847, 4.0, 40.0, 18.0, 0.7391491333636743], [1254, 24.065839967024313, 56.13669337316097, 4.0, 40.0, 18.0, 0.48409256999326084], [1255, 6.188304545031862, 87.83946702881296, 4.0, 40.0, 18.0, 1.0], [1256, 5.6545761135112595, 409.4800516606527, 4.0, 40.0, 18.0, 0.64720109692411], [1257, 8.875924397475725, 1885.198280422778, 4.0, 40.0, 18.0, 0.36834073945473816], [1258, 5.9812605938000765, 1627.8956165996356, 4.0, 40.0, 18.0, 0.45019048409168305], [1259, 7.563385596100861, 1668.4160934378785, 4.0, 40.0, 18.0, 0.40893350760687874], [1260, 6.767522146498627, 1892.654600288721, 4.0, 40.0, 18.0, 0.41727179473952486], [1261, 9.003768343888353, 924.3897737315632, 4.0, 40.0, 18.0, 0.4275660496560671], [1262, 10.11415998228599, 92.72362753509303, 4.0, 40.0, 18.0, 0.8420548802855079], [1263, 3.1676425521769103, 24.25115770596276, 4.0, 40.0, 18.0, 1.0], [1264, 8.60709620270132, 176.3005333520283, 4.0, 40.0, 18.0, 0.7113186351743177], [1265, 8.574862833918065, 1867.2280891759276, 4.0, 40.0, 18.0, 0.37604187015770435], [1266, 22.568124982996913, 447.49349422921375, 4.0, 40.0, 18.0, 0.21648344682626164], [1267, 7.442154491451388, 1048.0093148569185, 4.0, 40.0, 18.0, 0.45473073539560094], [1268, 6.380765099104327, 809.1409989911083, 4.0, 40.0, 18.0, 0.5161337468286761], [1269, 6.2703930117405635, 100.32388964712213, 4.0, 40.0, 18.0, 1.0], [1270, 10.891774882555897, 1079.3078057471048, 4.0, 40.0, 18.0, 0.36813708960246255], [1271, 8.95396269186028, 1627.7744256533495, 4.0, 40.0, 18.0, 0.3783056440799314], [1272, 18.44610600865544, 1757.4927772048977, 4.0, 40.0, 18.0, 0.18551712146322075], [1273, 4.6786716661006835, 547.769122013201, 4.0, 40.0, 18.0, 0.632447593403183], [1274, 2.1251800097959395, 892.7901026143173, 4.0, 40.0, 18.0, 0.6502236632149607], [1275, 8.741687911881119, 145.72296449208903, 4.0, 40.0, 18.0, 0.7542550861397427], [1276, 6.0317976118249925, 1525.2067264516947, 4.0, 40.0, 18.0, 0.4547038137911362], [1277, 4.8260706205210875, 996.9702392265958, 4.0, 40.0, 18.0, 0.5388373173621561], [1278, 8.978237387083567, 1320.1616961970205, 4.0, 40.0, 18.0, 0.3958794698591554], [1279, 6.6378293436968, 1161.2106690573914, 4.0, 40.0, 18.0, 0.4660687658328459], [1280, 8.458132265868903, 158.34178015920972, 4.0, 40.0, 18.0, 0.744202484161588], [1281, 6.512084835891488, 1365.227444970359, 4.0, 40.0, 18.0, 0.45284650512938746], [1282, 18.946150935904775, 836.8908630333353, 4.0, 40.0, 18.0, 0.22517478630818474], [1283, 4.197391059439028, 157.4806314253832, 4.0, 40.0, 18.0, 1.0], [1284, 15.704640944044323, 922.7576841594541, 4.0, 40.0, 18.0, 0.27692127673445993], [1285, 8.993572378756085, 1523.1643034307276, 4.0, 40.0, 18.0, 0.38288870048956913], [1286, 9.551762373963772, 137.37838587082774, 4.0, 40.0, 18.0, 0.7367472147108782], [1287, 6.836120334993654, 108.57200969134549, 4.0, 40.0, 18.0, 1.0], [1288, 18.623187829706257, 129.6522516712035, 4.0, 40.0, 18.0, 0.4505472862340692], [1289, 7.711400972402812, 177.20270732729372, 4.0, 40.0, 18.0, 0.7462122889683686], [1290, 26.761811418085376, 118.05657923867592, 4.0, 40.0, 18.0, 0.29554905956925015], [1291, 7.662669097791084, 56.90296588565029, 4.0, 40.0, 18.0, 1.0], [1292, 0.10870203577803483, 1741.7558595401888, 4.0, 40.0, 18.0, 0.6317376280654958], [1293, 1.9473160729516499, 145.14083380839472, 4.0, 40.0, 18.0, 1.0], [1294, 6.737575277744637, 608.2291946195155, 4.0, 40.0, 18.0, 0.5438704744699925], [1295, 19.682639589441887, 198.74232015310795, 4.0, 40.0, 18.0, 0.36004526049807617], [1296, 2.473123880916147, 1716.4999207626365, 4.0, 40.0, 18.0, 0.5500478185539265], [1297, 5.193408794676076, 1418.6348634687708, 4.0, 40.0, 18.0, 0.4859055759995014], [1298, 7.183575667393996, 417.07295184014845, 4.0, 40.0, 18.0, 0.5883269378654972], [1299, 21.150832560894557, 558.1605411763966, 4.0, 40.0, 18.0, 0.22015338400895257], [1300, 6.807668500434356, 22.774253873927528, 4.0, 40.0, 18.0, 1.0], [1301, 14.825811837772338, 1815.779515961615, 4.0, 40.0, 18.0, 0.24641851346536917], [1302, 19.61257903220836, 174.76549651402712, 4.0, 40.0, 18.0, 0.38045717360338593], [1303, 9.44597038411877, 1460.8613774570842, 4.0, 40.0, 18.0, 0.375590569182475], [1304, 8.363350677255537, 108.24812024854873, 4.0, 40.0, 18.0, 1.0], [1305, 11.627619932330308, 462.2438699082355, 4.0, 40.0, 18.0, 0.4386236246487028], [1306, 4.25375068601658, 44.737240506673565, 4.0, 40.0, 18.0, 1.0], [1307, 15.323723976306244, 193.13690043569127, 4.0, 40.0, 18.0, 0.4672625452083837], [1308, 7.67840565342198, 1221.622652501602, 4.0, 40.0, 18.0, 0.4335147827423309], [1309, 10.161472753743686, 637.9307505211873, 4.0, 40.0, 18.0, 0.4380512233028316], [1310, 5.352103221972685, 769.5004046201453, 4.0, 40.0, 18.0, 0.5562587887761353], [1311, 5.94396778460326, 734.3183615782542, 4.0, 40.0, 18.0, 0.5431304519220624], [1312, 4.329894397767422, 198.44607248145576, 4.0, 40.0, 18.0, 0.8611783341871606], [1313, 7.793884550435181, 195.9457843165826, 4.0, 40.0, 18.0, 0.7184431228756457], [1314, 1.6458396071403951, 1543.8211216833088, 4.0, 40.0, 18.0, 0.5916827782308387], [1315, 5.2397810323720115, 773.2872602366556, 4.0, 40.0, 18.0, 0.5592971078305247], [1316, 5.071334677576081, 1264.1841390372872, 4.0, 40.0, 18.0, 0.5022489125175499], [1317, 4.915897698343463, 190.8311847495432, 4.0, 40.0, 18.0, 0.8452452350984642], [1318, 18.131443878584584, 1355.144172984478, 4.0, 40.0, 18.0, 0.20603050675229675], [1319, 5.045425961883796, 1263.3857271786644, 4.0, 40.0, 18.0, 0.5031427613586562], [1320, 25.69668842680312, 25.471367110249652, 4.0, 40.0, 18.0, 1.0], [1321, 13.64121552667359, 282.1824957504032, 4.0, 40.0, 18.0, 0.4526475419564154], [1322, 7.894733259432112, 1686.619810566747, 4.0, 40.0, 18.0, 0.4006909265458889], [1323, 3.0672278084490934, 65.44236325424095, 4.0, 40.0, 18.0, 1.0], [1324, 8.26341516439206, 1780.6937209454986, 4.0, 40.0, 18.0, 0.3871309731675182], [1325, 12.025004855539096, 1169.499783664861, 4.0, 40.0, 18.0, 0.3348465579061486], [1326, 4.47725367246014, 61.237256127318005, 4.0, 40.0, 18.0, 1.0], [1327, 7.246862918086097, 573.9746077988173, 4.0, 40.0, 18.0, 0.5356513270883977], [1328, 18.144359786541134, 1899.8874592795703, 4.0, 40.0, 18.0, 0.18598501949557647], [1329, 6.013099458010077, 1658.5314414190225, 4.0, 40.0, 18.0, 0.44767210184258555], [1330, 0.8940771986066971, 1582.2689871819275, 4.0, 40.0, 18.0, 0.6156049967055883], [1331, 4.907385422705094, 729.179555570033, 4.0, 40.0, 18.0, 0.5791881555367441], [1332, 16.99250656770565, 174.90756645855836, 4.0, 40.0, 18.0, 0.44054102068717704], [1333, 5.068817012991641, 127.23313653932355, 4.0, 40.0, 18.0, 1.0], [1334, 25.195788921777797, 199.22631283422874, 4.0, 40.0, 18.0, 0.25507330373329373], [1335, 21.544753706441554, 555.1440084697524, 4.0, 40.0, 18.0, 0.21454677378346265], [1336, 9.916165122980521, 1541.0613415556109, 4.0, 40.0, 18.0, 0.35997462896430976], [1337, 6.526742349405956, 1807.7032492032918, 4.0, 40.0, 18.0, 0.42692317849711736], [1338, 8.563157796132174, 1546.4066817804764, 4.0, 40.0, 18.0, 0.392069891200638], [1339, 0.13033089636797346, 192.62951101195787, 4.0, 40.0, 18.0, 1.0], [1340, 6.99974565403118, 700.5548064422012, 4.0, 40.0, 18.0, 0.5157984088004443], [1341, 7.930998747820943, 1924.7987871999992, 4.0, 40.0, 18.0, 0.388928666683442], [1342, 6.50218765952285, 103.3118977811145, 4.0, 40.0, 18.0, 1.0], [1343, 6.719471840101348, 524.3954111611961, 4.0, 40.0, 18.0, 0.566853611351499], [1344, 9.751484178237913, 1675.8590959706237, 4.0, 40.0, 18.0, 0.3572178883988987], [1345, 0.10556604256910118, 1988.3050719028138, 4.0, 40.0, 18.0, 0.6150523684283697], [1346, 7.465932691532797, 183.1350873155065, 4.0, 40.0, 18.0, 0.7480246811260532], [1347, 7.265061941242257, 1125.8668851620005, 4.0, 40.0, 18.0, 0.45221003013275557], [1348, 6.122643015680119, 505.9987529538203, 4.0, 40.0, 18.0, 0.5931976591557482], [1349, 14.052223142753279, 1646.8249220778218, 4.0, 40.0, 18.0, 0.26745803017459796], [1350, 2.285564024387261, 1736.8013731136873, 4.0, 40.0, 18.0, 0.555062623731482], [1351, 6.739586680709102, 1344.7777862749588, 4.0, 40.0, 18.0, 0.4484823896795065], [1352, 7.164191395642688, 1117.1305572109643, 4.0, 40.0, 18.0, 0.4554428400556505], [1353, 14.849823545415909, 60.51695642447782, 4.0, 40.0, 18.0, 0.7833688915537796], [1354, 1.0194288663691398, 643.6275149273292, 4.0, 40.0, 18.0, 0.7469355692128384], [1355, 4.224066021987014, 1850.6837171253924, 4.0, 40.0, 18.0, 0.4866177807894939], [1356, 5.28716431248343, 1198.5718831747179, 4.0, 40.0, 18.0, 0.501774866899628], [1357, 8.340162678424349, 21.32494338213747, 4.0, 40.0, 18.0, 1.0], [1358, 9.719696774086742, 1841.7448205686303, 4.0, 40.0, 18.0, 0.35093579334605834], [1359, 16.814467905302354, 436.53341151347615, 4.0, 40.0, 18.0, 0.3242747967897373], [1360, 7.136016058138387, 1805.174997613296, 4.0, 40.0, 18.0, 0.41233348649200013], [1361, 25.680360170614822, 1916.2654263793163, 4.0, 40.0, 18.0, 0.09868157245353541], [1362, 23.099160932893476, 53.988658358854174, 4.0, 40.0, 18.0, 0.5190200093701248], [1363, 8.287422700055004, 984.1446901483798, 4.0, 40.0, 18.0, 0.43950859004598525], [1364, 12.058783954039447, 133.8844636343489, 4.0, 40.0, 18.0, 0.6463895822376701], [1365, 9.570013135856426, 661.2203894418175, 4.0, 40.0, 18.0, 0.44918698955353176], [1366, 17.539737847664163, 1569.7905336636911, 4.0, 40.0, 18.0, 0.20686584129155425], [1367, 16.375565215042805, 57.32755223864203, 4.0, 40.0, 18.0, 0.7305089329251796], [1368, 20.460560887932456, 855.8125840564995, 4.0, 40.0, 18.0, 0.1991373075193943], [1369, 4.55623263295522, 1282.14661520889, 4.0, 40.0, 18.0, 0.5165644340953469], [1370, 7.150778970106229, 881.760361028266, 4.0, 40.0, 18.0, 0.482389076448841], [1371, 6.420536299167115, 128.6662795596822, 4.0, 40.0, 18.0, 1.0], [1372, 9.09740701735269, 1965.287890119186, 4.0, 40.0, 18.0, 0.36004562132144996], [1373, 10.218863171498334, 505.9085800974442, 4.0, 40.0, 18.0, 0.464425772046037], [1374, 14.65441945974307, 252.30961098180072, 4.0, 40.0, 18.0, 0.44288392155964335], [1375, 16.795703707285128, 226.3918446941288, 4.0, 40.0, 18.0, 0.4068243099239856], [1376, 5.823036947728013, 849.3043870869254, 4.0, 40.0, 18.0, 0.5274785131092828], [1377, 24.154257832958088, 387.69631372576947, 4.0, 40.0, 18.0, 0.2053197251258709], [1378, 9.31706074414023, 123.89462226580088, 4.0, 40.0, 18.0, 0.7748116244169757], [1379, 8.065688030040926, 653.1942686991022, 4.0, 40.0, 18.0, 0.4927263221995867], [1380, 6.668595913838589, 186.8616931993778, 4.0, 40.0, 18.0, 0.7760405056050108], [1381, 3.533939419568749, 1290.0786579273663, 4.0, 40.0, 18.0, 0.5489046860897507], [1382, 1.7876579280540317, 646.4495812386665, 4.0, 40.0, 18.0, 0.7155867567216679], [1383, 14.54602461532443, 104.6725867910299, 4.0, 40.0, 18.0, 0.6183518069423478], [1384, 9.720111922566346, 1204.4440584728006, 4.0, 40.0, 18.0, 0.3858636006926133], [1385, 4.2663777942188705, 1708.2784897037623, 4.0, 40.0, 18.0, 0.493654176496372], [1386, 5.912671009190981, 125.81832702215486, 4.0, 40.0, 18.0, 1.0], [1387, 2.864431626963559, 88.6105652125078, 4.0, 40.0, 18.0, 1.0], [1388, 8.828281335803887, 1660.0172449378802, 4.0, 40.0, 18.0, 0.3796882638638138], [1389, 16.976929032103577, 38.75175542146202, 4.0, 40.0, 18.0, 1.0], [1390, 6.173275727101157, 21.81769529618452, 4.0, 40.0, 18.0, 1.0], [1391, 6.394661678869747, 1060.9792765034206, 4.0, 40.0, 18.0, 0.4829041861212237], [1392, 8.091634846899952, 452.38127858847275, 4.0, 40.0, 18.0, 0.5442641075440612], [1393, 16.905222691580484, 1451.3963081595855, 4.0, 40.0, 18.0, 0.22252343435978125], [1394, 5.147401899172413, 481.46956022354743, 4.0, 40.0, 18.0, 0.6371341839088344], [1395, 6.327297979245694, 127.24141945635041, 4.0, 40.0, 18.0, 1.0], [1396, 7.632596554337456, 921.2564358208746, 4.0, 40.0, 18.0, 0.4637740331925716], [1397, 25.825657942170633, 205.80766295754094, 4.0, 40.0, 18.0, 0.24137489469123433], [1398, 6.0584440628799365, 500.5116098297718, 4.0, 40.0, 18.0, 0.5973426126154691], [1399, 7.9131059479424115, 1597.757707464418, 4.0, 40.0, 18.0, 0.4046379655980164], [1400, 7.286783127722787, 127.77346965847144, 4.0, 40.0, 18.0, 0.8609159256657956], [1401, 3.113734526985728, 642.4370082728271, 4.0, 40.0, 18.0, 0.6647394288477444], [1402, 5.566438287554614, 861.2438352864752, 4.0, 40.0, 18.0, 0.5338140512797686], [1403, 4.090170831269408, 1607.9498981143893, 4.0, 40.0, 18.0, 0.5054999709022908], [1404, 5.519173513705386, 356.23734320744296, 4.0, 40.0, 18.0, 0.6788731723037061], [1405, 2.7230058978339438, 1976.9569970489736, 4.0, 40.0, 18.0, 0.5258695281514262], [1406, 4.539128976956881, 1586.7407604409423, 4.0, 40.0, 18.0, 0.49329369871567663], [1407, 8.763996818005866, 1822.4813705337447, 4.0, 40.0, 18.0, 0.37333197806542145], [1408, 9.172136762076102, 1254.3759358401282, 4.0, 40.0, 18.0, 0.39577254459307415], [1409, 22.55971215400875, 317.4500568026399, 4.0, 40.0, 18.0, 0.24816005365660937], [1410, 0.9795654593731009, 1976.1116351746173, 4.0, 40.0, 18.0, 0.5847560014854973], [1411, 8.508525530073715, 122.17045542643952, 4.0, 40.0, 18.0, 0.8146383914943712], [1412, 7.7627152000885555, 571.6975814005536, 4.0, 40.0, 18.0, 0.5199969511981349], [1413, 4.885724552599617, 747.6209323274771, 4.0, 40.0, 18.0, 0.5763116576610152], [1414, 7.482250265907595, 659.9314748865169, 4.0, 40.0, 18.0, 0.5089215291255028], [1415, 16.63449458091894, 149.8296927390698, 4.0, 40.0, 18.0, 0.4762899092703998], [1416, 8.199745593319292, 255.7337656073644, 4.0, 40.0, 18.0, 0.6435593469955595], [1417, 10.396870821616815, 414.165999681726, 4.0, 40.0, 18.0, 0.4866278132533303], [1418, 3.8197082321208518, 326.2534529390826, 4.0, 40.0, 18.0, 0.7650955605387477], [1419, 5.107839148855279, 206.46602475137428, 4.0, 40.0, 18.0, 0.8168528929261701], [1420, 7.136623751745773, 838.3287723587431, 4.0, 40.0, 18.0, 0.4887489743942604], [1421, 11.018485421148723, 1071.1619237716827, 4.0, 40.0, 18.0, 0.3658433545926178], [1422, 7.411689938374195, 174.4799242029652, 4.0, 40.0, 18.0, 0.7625153719057864], [1423, 5.93757187065406, 179.9555009896015, 4.0, 40.0, 18.0, 0.8166183943957896], [1424, 7.147622801389404, 1309.328395659426, 4.0, 40.0, 18.0, 0.44059117539631326], [1425, 7.748731237222881, 630.7807328388087, 4.0, 40.0, 18.0, 0.5068765977587307], [1426, 4.054742971073745, 1242.8736359334082, 4.0, 40.0, 18.0, 0.5362693768841398], [1427, 16.48206583064995, 756.240680967567, 4.0, 40.0, 18.0, 0.2779619242027664], [1428, 9.319666056394937, 118.7103763672889, 4.0, 40.0, 18.0, 0.7873644770570118], [1429, 9.40316753148614, 1522.841419946728, 4.0, 40.0, 18.0, 0.3729787097983885], [1430, 21.0307354674067, 220.1224659093743, 4.0, 40.0, 18.0, 0.3182457651493458], [1431, 3.5075843326134737, 136.01829096185543, 4.0, 40.0, 18.0, 1.0], [1432, 0.9210005823611191, 184.1876577957321, 4.0, 40.0, 18.0, 1.0], [1433, 9.903996728830823, 1459.2835815847238, 4.0, 40.0, 18.0, 0.36498620823531897], [1434, 19.347748426320095, 57.2984845756346, 4.0, 40.0, 18.0, 0.6172382959089595], [1435, 8.35257194781639, 191.7104433653863, 4.0, 40.0, 18.0, 0.7013766311999595], [1436, 3.6159963555167547, 912.5518876786311, 4.0, 40.0, 18.0, 0.5921725245655639], [1437, 10.585722986769962, 534.282518908241, 4.0, 40.0, 18.0, 0.447736256462454], [1438, 2.360392659617932, 1389.0127337179586, 4.0, 40.0, 18.0, 0.5798697076504878], [1439, 6.423309054741375, 1845.9969610469705, 4.0, 40.0, 18.0, 0.42773102917721917], [1440, 4.9310184094062235, 189.8114709679628, 4.0, 40.0, 18.0, 0.8459776501651886], [1441, 6.756472862851727, 609.2040494682731, 4.0, 40.0, 18.0, 0.5430375240419054], [1442, 22.14181910290872, 1369.791741661247, 4.0, 40.0, 18.0, 0.14771846747461068], [1443, 7.485527111046827, 1682.4025833949743, 4.0, 40.0, 18.0, 0.40995008233885827], [1444, 7.380650671748898, 79.20259967729757, 4.0, 40.0, 18.0, 1.0], [1445, 20.54353796109101, 166.58739838706174, 4.0, 40.0, 18.0, 0.36704344083301926], [1446, 5.673755828504657, 1085.7861657909748, 4.0, 40.0, 18.0, 0.5015361614639686], [1447, 9.984443326715368, 1808.5984835855545, 4.0, 40.0, 18.0, 0.34632257908593983], [1448, 13.219226349241204, 161.59723985510686, 4.0, 40.0, 18.0, 0.5634615415650952], [1449, 7.021111811196257, 111.67981667752437, 4.0, 40.0, 18.0, 1.0], [1450, 8.540184283794405, 1018.9235970172816, 4.0, 40.0, 18.0, 0.42950546783587545], [1451, 5.459702810904283, 1340.8923404361194, 4.0, 40.0, 18.0, 0.484204256924139], [1452, 14.918864534241061, 168.81417198867373, 4.0, 40.0, 18.0, 0.5024032368379349], [1453, 11.725688011767506, 1591.5073324979096, 4.0, 40.0, 18.0, 0.3178516084122688], [1454, 7.13849244803573, 137.65056449485706, 4.0, 40.0, 18.0, 0.8402491581825245], [1455, 5.007171095240031, 80.09991727899396, 4.0, 40.0, 18.0, 1.0], [1456, 5.264111943684236, 448.8296683598004, 4.0, 40.0, 18.0, 0.6452261103901248], [1457, 6.531901181647383, 115.59103480830353, 4.0, 40.0, 18.0, 1.0], [1458, 4.915021301326687, 1903.0619053882313, 4.0, 40.0, 18.0, 0.4642063693207182], [1459, 9.063858586782045, 158.64134672705455, 4.0, 40.0, 18.0, 0.7192582870148209], [1460, 9.000356201275288, 171.4955053878099, 4.0, 40.0, 18.0, 0.7025235473310786], [1461, 3.6207900837374725, 1227.212346627141, 4.0, 40.0, 18.0, 0.5523525338667007], [1462, 4.559176583172796, 962.7969539049454, 4.0, 40.0, 18.0, 0.552272997307918], [1463, 7.097479716721113, 831.189625528717, 4.0, 40.0, 18.0, 0.4909462324354141], [1464, 7.853843590651697, 1345.2278877031283, 4.0, 40.0, 18.0, 0.42007204358827827], [1465, 15.576567455340557, 442.91370847226943, 4.0, 40.0, 18.0, 0.3499360268169618], [1466, 3.8286033331003066, 1104.8815871998224, 4.0, 40.0, 18.0, 0.558697590944733], [1467, 3.1728991282796795, 1190.268697728775, 4.0, 40.0, 18.0, 0.5714213279096731], [1468, 4.7080831155969864, 1961.9041232881436, 4.0, 40.0, 18.0, 0.4670007609372295], [1469, 9.383962574208145, 47.86997075185706, 4.0, 40.0, 18.0, 1.0], [1470, 5.716170715660127, 1605.4449869355064, 4.0, 40.0, 18.0, 0.4583364631760671], [1471, 6.421737775921175, 766.4220305576592, 4.0, 40.0, 18.0, 0.5219588632484173], [1472, 8.320922043463431, 733.2403755610082, 4.0, 40.0, 18.0, 0.471051349047567], [1473, 5.5026756100814165, 1336.4147369295201, 4.0, 40.0, 18.0, 0.4833100515238367], [1474, 2.873128626161143, 879.5932883855462, 4.0, 40.0, 18.0, 0.6244993168477565], [1475, 10.094220072413082, 1078.3724303164206, 4.0, 40.0, 18.0, 0.38700420057796237], [1476, 22.45088621608383, 1659.564225464918, 4.0, 40.0, 18.0, 0.13405650221037882], [1477, 6.4952578245604755, 1965.8026158327255, 4.0, 40.0, 18.0, 0.42051057755491655], [1478, 8.254938813595167, 198.26463031907002, 4.0, 40.0, 18.0, 0.6973661418436117], [1479, 6.6693098475435395, 95.97260340666128, 4.0, 40.0, 18.0, 1.0], [1480, 8.960095033469251, 718.1037809228001, 4.0, 40.0, 18.0, 0.4555794401231723], [1481, 4.185226598798953, 551.3340797213306, 4.0, 40.0, 18.0, 0.649891517448095], [1482, 7.669206913490655, 142.53548443934395, 4.0, 40.0, 18.0, 0.8055923428312685], [1483, 4.380209233970239, 55.60462969995695, 4.0, 40.0, 18.0, 1.0], [1484, 7.980059686942393, 1784.7853298189943, 4.0, 40.0, 18.0, 0.3941287284858618], [1485, 0.8487115857203438, 710.7979778009479, 4.0, 40.0, 18.0, 0.7366317340803488], [1486, 23.830501879935564, 367.90587245598323, 4.0, 40.0, 18.0, 0.2144417984749653], [1487, 8.147244185878588, 1649.1988452971443, 4.0, 40.0, 18.0, 0.3966602250652906], [1488, 9.877801977267824, 1236.932159960166, 4.0, 40.0, 18.0, 0.37992647973807897], [1489, 4.90405386155661, 1364.0687250186543, 4.0, 40.0, 18.0, 0.4988081121931583], [1490, 4.219960423087258, 1433.9182882997545, 4.0, 40.0, 18.0, 0.5140779610688632], [1491, 6.960572216316067, 45.60916411877682, 4.0, 40.0, 18.0, 1.0], [1492, 7.607286231016997, 1756.212973066769, 4.0, 40.0, 18.0, 0.40410711860611964], [1493, 7.972396638388323, 103.45973223503046, 4.0, 40.0, 18.0, 1.0], [1494, 8.588974145422014, 115.21985504896168, 4.0, 40.0, 18.0, 0.8310331820405117], [1495, 6.987713123796789, 1816.97476756514, 4.0, 40.0, 18.0, 0.4153834115122825], [1496, 18.630269174539826, 1842.9053613159588, 4.0, 40.0, 18.0, 0.18060055847971512], [1497, 6.200520295863679, 1917.3950639713453, 4.0, 40.0, 18.0, 0.43008645845583837], [1498, 7.615918266417502, 467.0725890266668, 4.0, 40.0, 18.0, 0.5549543656189154], [1499, 13.307852026928668, 1276.8692703322868, 4.0, 40.0, 18.0, 0.3003517095877196], [1500, 10.064607743630653, 305.38473599614144, 4.0, 40.0, 18.0, 0.5448285916695026], [1501, 4.709094586280131, 1449.619378054903, 4.0, 40.0, 18.0, 0.49799362030237193], [1502, 4.830233562377837, 191.0888654280777, 4.0, 40.0, 18.0, 0.8486572829321771], [1503, 10.397938341200392, 1192.4401813826155, 4.0, 40.0, 18.0, 0.3707346897700712], [1504, 6.744045772849609, 1623.913009423283, 4.0, 40.0, 18.0, 0.43091237613891237], [1505, 8.620423257196446, 1605.5347996379483, 4.0, 40.0, 18.0, 0.38718495978718465], [1506, 2.7539527756474196, 1666.7973145429144, 4.0, 40.0, 18.0, 0.5440997199674483], [1507, 6.986253108067194, 136.48008176877158, 4.0, 40.0, 18.0, 0.8506737431950953], [1508, 13.425107250505567, 53.51126764306746, 4.0, 40.0, 18.0, 1.0], [1509, 5.726309774187078, 1037.17538354635, 4.0, 40.0, 18.0, 0.5054826574138978], [1510, 5.2543291010687, 640.5337955897517, 4.0, 40.0, 18.0, 0.5865578833066498], [1511, 6.964501971213778, 75.4965424659672, 4.0, 40.0, 18.0, 1.0], [1512, 1.2573508892105674, 1567.06544289013, 4.0, 40.0, 18.0, 0.6035673937535503], [1513, 7.615602855085757, 1465.4885661681344, 4.0, 40.0, 18.0, 0.4183867721165828], [1514, 15.734496303273243, 1729.4187601811373, 4.0, 40.0, 18.0, 0.23265730411934044], [1515, 22.111617752957965, 178.00951168217637, 4.0, 40.0, 18.0, 0.3247223965745285], [1516, 5.272018057434318, 902.5477051202643, 4.0, 40.0, 18.0, 0.5371807742235534], [1517, 13.485349491868027, 1653.6600863242463, 4.0, 40.0, 18.0, 0.27859676627349844], [1518, 8.452271206511048, 187.14470296177936, 4.0, 40.0, 18.0, 0.70318017012652], [1519, 9.824187525348615, 1250.867803198726, 4.0, 40.0, 18.0, 0.3802090700265861], [1520, 7.535922362931124, 826.1707695009538, 4.0, 40.0, 18.0, 0.4788668629670954], [1521, 6.557982765004695, 661.1834130318531, 4.0, 40.0, 18.0, 0.537754311538409], [1522, 2.4700428738059377, 117.55284934995986, 4.0, 40.0, 18.0, 1.0], [1523, 8.287639368354647, 63.02474683194713, 4.0, 40.0, 18.0, 1.0], [1524, 25.176251828780927, 1849.6062195943505, 4.0, 40.0, 18.0, 0.10436698962403848], [1525, 6.4387423928073115, 183.86033083199905, 4.0, 40.0, 18.0, 0.7898335893714638], [1526, 7.244811254458893, 164.021295935586, 4.0, 40.0, 18.0, 0.7853869983860317], [1527, 5.97809335147173, 1438.2181314880895, 4.0, 40.0, 18.0, 0.46221550714787474], [1528, 6.883738392003519, 771.3117976600419, 4.0, 40.0, 18.0, 0.5068373102386473], [1529, 22.582957946437688, 992.1618455315597, 4.0, 40.0, 18.0, 0.1599705319409927], [1530, 5.024551319984834, 1829.8700060930769, 4.0, 40.0, 18.0, 0.4649121746169556], [1531, 7.006263563357525, 1019.2135454245756, 4.0, 40.0, 18.0, 0.470015976938189], [1532, 6.852418448016522, 310.20841793395005, 4.0, 40.0, 18.0, 0.6550366633609217], [1533, 16.785762084605487, 1237.0837480758935, 4.0, 40.0, 18.0, 0.23516399147759037], [1534, 7.579072285555343, 835.9238940359734, 4.0, 40.0, 18.0, 0.47635669865830443], [1535, 8.220778097404907, 1814.7100596899054, 4.0, 40.0, 18.0, 0.38654315636533265], [1536, 3.3149561544562367, 294.92083355666637, 4.0, 40.0, 18.0, 0.8081213075033146], [1537, 4.988566134673856, 1729.7295913850178, 4.0, 40.0, 18.0, 0.47149223187710665], [1538, 6.291919924755264, 1013.2081301490877, 4.0, 40.0, 18.0, 0.49111854488694073], [1539, 8.210123389294777, 132.83571857393963, 4.0, 40.0, 18.0, 0.8023259709401551], [1540, 6.0956817478497705, 407.4492472401333, 4.0, 40.0, 18.0, 0.6315458059797406], [1541, 7.517749593949734, 30.114625427491088, 4.0, 40.0, 18.0, 1.0], [1542, 5.709178216642644, 222.05231710538783, 4.0, 40.0, 18.0, 0.7737139533701478], [1543, 5.3265457462019, 619.5962546307791, 4.0, 40.0, 18.0, 0.5891373514914607], [1544, 8.943530596053572, 148.59445754831265, 4.0, 40.0, 18.0, 0.7408676665509704], [1545, 7.96712057254748, 1193.457412722449, 4.0, 40.0, 18.0, 0.42851853175621196], [1546, 15.140816417249377, 121.88343581080954, 4.0, 40.0, 18.0, 0.5623407367180397], [1547, 7.711760890047227, 1914.0161666749566, 4.0, 40.0, 18.0, 0.3948694863190791], [1548, 10.393619046889896, 579.3342631959591, 4.0, 40.0, 18.0, 0.4430654860752856], [1549, 3.2531421951043216, 1613.5838883431766, 4.0, 40.0, 18.0, 0.5315494793734886], [1550, 5.6624236525372496, 1721.256554564797, 4.0, 40.0, 18.0, 0.4532343279865594], [1551, 9.059465828231772, 1845.7991067620585, 4.0, 40.0, 18.0, 0.3657284658490221], [1552, 4.13821052961041, 1044.155612357326, 4.0, 40.0, 18.0, 0.5557068687960173], [1553, 7.557002635510009, 1159.7141364254624, 4.0, 40.0, 18.0, 0.4417935177237833], [1554, 5.422952173682925, 788.888096099482, 4.0, 40.0, 18.0, 0.55040046200748], [1555, 6.341920872683209, 122.49598616193731, 4.0, 40.0, 18.0, 1.0], [1556, 5.942486163575136, 975.413009090001, 4.0, 40.0, 18.0, 0.5062841755766976], [1557, 9.256527097173086, 31.491395161893806, 4.0, 40.0, 18.0, 1.0], [1558, 5.111516108017895, 111.55227211045029, 4.0, 40.0, 18.0, 1.0], [1559, 23.319342545464835, 351.0544985065413, 4.0, 40.0, 18.0, 0.22640154761432907], [1560, 7.67602454730259, 117.96612791305735, 4.0, 40.0, 18.0, 0.8850806152493458], [1561, 2.5803063109338495, 127.32548532685276, 4.0, 40.0, 18.0, 1.0], [1562, 8.483667436909156, 187.94993935838662, 4.0, 40.0, 18.0, 0.7008759650854324], [1563, 8.505485960172487, 560.6523647245923, 4.0, 40.0, 18.0, 0.499923949723935], [1564, 4.927322378695388, 1011.9312471510475, 4.0, 40.0, 18.0, 0.53361455171533], [1565, 5.97610692768192, 90.37361757128325, 4.0, 40.0, 18.0, 1.0], [1566, 17.196907544178057, 173.72181873992542, 4.0, 40.0, 18.0, 0.43659016124799227], [1567, 16.373602472648145, 1484.558772660294, 4.0, 40.0, 18.0, 0.23051325615548954], [1568, 11.25784038332165, 24.365463959628595, 4.0, 40.0, 18.0, 1.0], [1569, 4.539114559930617, 117.82279114081426, 4.0, 40.0, 18.0, 1.0], [1570, 8.137055769416175, 129.55918644115124, 4.0, 40.0, 18.0, 0.8131025884034608], [1571, 7.17321187318156, 1543.3733106520424, 4.0, 40.0, 18.0, 0.4245150767818461], [1572, 4.971997291966351, 1875.4278383080573, 4.0, 40.0, 18.0, 0.4640214638118571], [1573, 5.389884768575948, 678.0938355673694, 4.0, 40.0, 18.0, 0.5731785904098703], [1574, 6.5352777858328155, 46.48729679491143, 4.0, 40.0, 18.0, 1.0], [1575, 7.083226106729316, 208.11762961437992, 4.0, 40.0, 18.0, 0.7328213729322276], [1576, 7.615889523391522, 98.09827236727384, 4.0, 40.0, 18.0, 1.0], [1577, 4.487274195875061, 1933.6112197503096, 4.0, 40.0, 18.0, 0.4747212362454081], [1578, 13.815818479278862, 1644.0797488667242, 4.0, 40.0, 18.0, 0.2724238693840501], [1579, 6.98248575952951, 93.64992281827371, 4.0, 40.0, 18.0, 1.0], [1580, 6.7717044778518085, 1001.9967188413315, 4.0, 40.0, 18.0, 0.4783501206022522], [1581, 1.530721477890057, 955.9681238030797, 4.0, 40.0, 18.0, 0.662356300931181], [1582, 9.21349684244142, 1534.8545760129753, 4.0, 40.0, 18.0, 0.37700484781190413], [1583, 2.4891278043235117, 760.8017795237251, 4.0, 40.0, 18.0, 0.6612081917676421], [1584, 4.851597685329531, 843.4675315618787, 4.0, 40.0, 18.0, 0.5603282283857064], [1585, 6.284590998314919, 1393.1852870210366, 4.0, 40.0, 18.0, 0.45672079426792156], [1586, 8.573339101412763, 1647.1529565229707, 4.0, 40.0, 18.0, 0.3861399976115557], [1587, 12.485121521140268, 1198.0509007228854, 4.0, 40.0, 18.0, 0.3225931511156782], [1588, 7.188460683795303, 893.7962546201172, 4.0, 40.0, 18.0, 0.47960952583786803], [1589, 13.77065377173318, 1093.5291508078062, 4.0, 40.0, 18.0, 0.3026012238493006], [1590, 11.038071220867437, 1200.4785855359808, 4.0, 40.0, 18.0, 0.35523031589033155], [1591, 11.077642545356793, 112.84857477468286, 4.0, 40.0, 18.0, 0.7286889826134711], [1592, 8.047021404532785, 1430.7417168172196, 4.0, 40.0, 18.0, 0.41023890436230936], [1593, 21.198841542350447, 1594.2306949930814, 4.0, 40.0, 18.0, 0.1517343546149145], [1594, 20.737818059619038, 60.35801817259937, 4.0, 40.0, 18.0, 0.5584882001733179], [1595, 17.65938215847581, 1675.7858999043176, 4.0, 40.0, 18.0, 0.2009636266621825], [1596, 5.523877592864851, 52.319801558893396, 4.0, 40.0, 18.0, 1.0], [1597, 16.83547101508357, 366.7845126799053, 4.0, 40.0, 18.0, 0.34365724979908163], [1598, 5.933608808236918, 636.0701978046965, 4.0, 40.0, 18.0, 0.5641536101602254], [1599, 8.847791782780538, 769.141049772764, 4.0, 40.0, 18.0, 0.4510547688311206], [1600, 7.47757269856208, 1141.4605021462014, 4.0, 40.0, 18.0, 0.4452828344393443], [1601, 6.266790430490555, 1636.7865830191788, 4.0, 40.0, 18.0, 0.4423811551257093], [1602, 5.658318074957867, 657.8552474471037, 4.0, 40.0, 18.0, 0.5684322662153971], [1603, 12.366747639486434, 30.243731314940444, 4.0, 40.0, 18.0, 1.0], [1604, 5.6856981753668565, 646.890151560126, 4.0, 40.0, 18.0, 0.5699237423176763], [1605, 4.102901479730693, 1539.3520779954683, 4.0, 40.0, 18.0, 0.5098209125389793], [1606, 9.486710768467919, 115.7111630325097, 4.0, 40.0, 18.0, 0.7878439089593062], [1607, 1.4851286867285634, 1927.9575067779454, 4.0, 40.0, 18.0, 0.5699106159404842], [1608, 7.911733324718017, 1211.5728815837865, 4.0, 40.0, 18.0, 0.42847589659378715], [1609, 7.465279783636339, 158.83762958229187, 4.0, 40.0, 18.0, 0.7845883288860804], [1610, 10.061320917690406, 712.7425145613392, 4.0, 40.0, 18.0, 0.4282152326245007], [1611, 7.911532290734311, 1659.4390673425883, 4.0, 40.0, 18.0, 0.4016308131481007], [1612, 0.7852919721511977, 713.8500278258549, 4.0, 40.0, 18.0, 0.7384753120770747], [1613, 6.798342348286864, 1226.7443513302426, 4.0, 40.0, 18.0, 0.45563227999479977], [1614, 9.490832905765648, 186.48638196422908, 4.0, 40.0, 18.0, 0.6636319667081898], [1615, 0.1686326172120972, 112.96335802917251, 4.0, 40.0, 18.0, 1.0], [1616, 4.3852270924502434, 1413.1300533350884, 4.0, 40.0, 18.0, 0.5106265546088355], [1617, 7.569800399757582, 305.361018535936, 4.0, 40.0, 18.0, 0.6312764488097602], [1618, 8.995053900413623, 753.869112218757, 4.0, 40.0, 18.0, 0.4494382332958127], [1619, 5.6886898713491245, 116.17153114751129, 4.0, 40.0, 18.0, 1.0], [1620, 6.465421504831174, 328.68811271429234, 4.0, 40.0, 18.0, 0.6582951005915435], [1621, 3.033598833121677, 60.74334204492873, 4.0, 40.0, 18.0, 1.0], [1622, 1.2218207419255158, 993.9028509830154, 4.0, 40.0, 18.0, 0.6683785854930143], [1623, 5.647488732281726, 888.4292704592026, 4.0, 40.0, 18.0, 0.5272190185840033], [1624, 4.975118572150626, 1604.9612978741588, 4.0, 40.0, 18.0, 0.47919340951601636], [1625, 8.595509165067217, 616.4978532127075, 4.0, 40.0, 18.0, 0.48472541684070736], [1626, 3.8936450683992607, 952.2082528753598, 4.0, 40.0, 18.0, 0.5764878040995821], [1627, 14.95945571875557, 1261.3537089534711, 4.0, 40.0, 18.0, 0.2679700136085218], [1628, 21.489258931217186, 1390.766249458563, 4.0, 40.0, 18.0, 0.15512650429283778], [1629, 9.058376362837839, 1027.7094762257932, 4.0, 40.0, 18.0, 0.4158554383571835], [1630, 6.745155297252995, 104.48587457215604, 4.0, 40.0, 18.0, 1.0], [1631, 6.297808238511056, 148.72929406847962, 4.0, 40.0, 18.0, 0.8559034195789268], [1632, 7.112354676232983, 101.03525569501069, 4.0, 40.0, 18.0, 1.0], [1633, 6.621417997172424, 934.6571671431897, 4.0, 40.0, 18.0, 0.49087067985461497], [1634, 9.2601626407869, 93.45293263013231, 4.0, 40.0, 18.0, 1.0], [1635, 4.94839046578994, 1433.201264508306, 4.0, 40.0, 18.0, 0.49198221309731804], [1636, 7.3770774159517485, 26.21984462745555, 4.0, 40.0, 18.0, 1.0], [1637, 15.259939818081723, 1838.749858724377, 4.0, 40.0, 18.0, 0.23747799788069238], [1638, 1.6316163727905535, 1997.8689443894837, 4.0, 40.0, 18.0, 0.5609012401618487], [1639, 21.61159647687823, 1270.8469648630462, 4.0, 40.0, 18.0, 0.15848005922279051], [1640, 6.953656638490959, 128.4157837046645, 4.0, 40.0, 18.0, 0.8853194288383118], [1641, 4.770794886720471, 1644.9915509672323, 4.0, 40.0, 18.0, 0.48271060946564276], [1642, 7.040030938905816, 1505.6237457215211, 4.0, 40.0, 18.0, 0.4302584230611116], [1643, 4.198527956123838, 1327.5249866495, 4.0, 40.0, 18.0, 0.5237328477055591], [1644, 3.80275626241089, 1191.2424626352936, 4.0, 40.0, 18.0, 0.549944100648603], [1645, 7.30616210679718, 467.2230107454874, 4.0, 40.0, 18.0, 0.5652872563931881], [1646, 18.62998807007149, 1454.2411967535686, 4.0, 40.0, 18.0, 0.193431661063486], [1647, 3.9524177590718677, 645.3976457633727, 4.0, 40.0, 18.0, 0.6324148618618348], [1648, 9.259919710583842, 1213.9180149508372, 4.0, 40.0, 18.0, 0.39660378977259375], [1649, 8.98356941878831, 286.804045740392, 4.0, 40.0, 18.0, 0.5925364993975425], [1650, 4.0754944198128005, 1376.6624989827112, 4.0, 40.0, 18.0, 0.5234017131724842], [1651, 20.614821226854374, 1345.0195517438349, 4.0, 40.0, 18.0, 0.16836566916039566], [1652, 11.235265293180639, 1569.0148716555143, 4.0, 40.0, 18.0, 0.32935230267809584], [1653, 0.640553290943116, 1040.434160903121, 4.0, 40.0, 18.0, 0.6838550977546844], [1654, 7.116371545043857, 1767.5494074908672, 4.0, 40.0, 18.0, 0.4145784334456293], [1655, 2.3344891794774325, 658.2178271523104, 4.0, 40.0, 18.0, 0.6909339400600828], [1656, 8.61949641144609, 161.59626815604886, 4.0, 40.0, 18.0, 0.7325055206590708], [1657, 9.499586520503968, 188.40984985917711, 4.0, 40.0, 18.0, 0.6610333494786171], [1658, 10.891146391818191, 712.0602442357903, 4.0, 40.0, 18.0, 0.4082875018740277], [1659, 12.473805884382497, 1066.4056951642303, 4.0, 40.0, 18.0, 0.3328106575333431], [1660, 8.993031963239025, 1206.3474725113097, 4.0, 40.0, 18.0, 0.4033603946066045], [1661, 8.923773680054712, 1395.1969587319325, 4.0, 40.0, 18.0, 0.3922806963897056], [1662, 10.882054019046326, 659.2612501586011, 4.0, 40.0, 18.0, 0.4162864557272494], [1663, 7.798092311334155, 69.88553316718503, 4.0, 40.0, 18.0, 1.0], [1664, 5.905958258587269, 427.398237054045, 4.0, 40.0, 18.0, 0.6299106627822287], [1665, 12.91251293504547, 1034.7296958756046, 4.0, 40.0, 18.0, 0.3255449221157862], [1666, 7.675088266463692, 135.75529350839068, 4.0, 40.0, 18.0, 0.8195281933273749], [1667, 10.91162911921838, 1203.496110055343, 4.0, 40.0, 18.0, 0.3578754283845784], [1668, 3.99511014433471, 1075.01932289308, 4.0, 40.0, 18.0, 0.5567028309953953], [1669, 15.170245914615203, 74.52237344890682, 4.0, 40.0, 18.0, 0.688811699115677], [1670, 15.806257485795008, 672.344855083556, 4.0, 40.0, 18.0, 0.3021638140504407], [1671, 9.63814333889086, 888.0692858547285, 4.0, 40.0, 18.0, 0.4159430760380338], [1672, 4.062994863761005, 1400.1018974708004, 4.0, 40.0, 18.0, 0.5218857895230017], [1673, 14.275762764849793, 814.0398107743806, 4.0, 40.0, 18.0, 0.31716073638197323], [1674, 18.0647737180295, 684.0561790767533, 4.0, 40.0, 18.0, 0.2563523141479301], [1675, 6.918418318805938, 1312.8630956655209, 4.0, 40.0, 18.0, 0.44610895921061156], [1676, 17.609560699900108, 1449.6242792217233, 4.0, 40.0, 18.0, 0.21029334878178543], [1677, 6.858103420229911, 1596.7709393642663, 4.0, 40.0, 18.0, 0.4295534628649423], [1678, 6.646193081492147, 1020.1458001567873, 4.0, 40.0, 18.0, 0.47999532222399177], [1679, 14.333299259831545, 402.4857354677446, 4.0, 40.0, 18.0, 0.39018956793815157], [1680, 3.9357227257517198, 1009.6160389492817, 4.0, 40.0, 18.0, 0.5670290580913139], [1681, 6.235188591895676, 622.2576374884144, 4.0, 40.0, 18.0, 0.5571805936549958], [1682, 2.0887038020060364, 40.56246359817597, 4.0, 40.0, 18.0, 1.0], [1683, 2.4664850934228433, 867.407067948808, 4.0, 40.0, 18.0, 0.6416755508211597], [1684, 4.852197523168505, 933.149900718775, 4.0, 40.0, 18.0, 0.5464985834764098], [1685, 0.6069216968144497, 1168.0099409816653, 4.0, 40.0, 18.0, 0.6680179537471249], [1686, 3.6613774145206364, 1249.6165792574493, 4.0, 40.0, 18.0, 0.5485991145035783], [1687, 5.398352808918952, 669.7048519657511, 4.0, 40.0, 18.0, 0.5747736781707152], [1688, 12.961855027046893, 1162.0163371867548, 4.0, 40.0, 18.0, 0.31488500613896575], [1689, 0.3031297699446073, 129.185641663362, 4.0, 40.0, 18.0, 1.0], [1690, 15.762586835791058, 82.92259714203308, 4.0, 40.0, 18.0, 0.6365326100045656], [1691, 15.479861791265462, 78.79329070177506, 4.0, 40.0, 18.0, 0.6609007784565603], [1692, 22.56249862676315, 1738.761884201823, 4.0, 40.0, 18.0, 0.13078090436600118], [1693, 19.112957069206786, 600.0047537288826, 4.0, 40.0, 18.0, 0.2484033785515952], [1694, 15.888384178132887, 1872.2849137221901, 4.0, 40.0, 18.0, 0.2251701387032182], [1695, 21.73302740338143, 43.675754892077514, 4.0, 40.0, 18.0, 0.6210665384782286], [1696, 22.35524370032606, 52.08387746133504, 4.0, 40.0, 18.0, 0.5493658238475402], [1697, 7.60610363625139, 118.98771347772472, 4.0, 40.0, 18.0, 0.883471450279507], [1698, 16.497331904597434, 337.0259151015339, 4.0, 40.0, 18.0, 0.361238271097317], [1699, 16.72254242516536, 1333.3757859904756, 4.0, 40.0, 18.0, 0.23128208052229698], [1700, 21.2556025054371, 1694.8674591788474, 4.0, 40.0, 18.0, 0.14781331380522963], [1701, 13.101953900006746, 396.0693228205217, 4.0, 40.0, 18.0, 0.42054517982609846], [1702, 19.55083194228293, 1034.545484455351, 4.0, 40.0, 18.0, 0.20039833453379519], [1703, 20.78584571964179, 1867.5443465788612, 4.0, 40.0, 18.0, 0.1502409427264411], [1704, 9.621310409558534, 567.6272557324186, 4.0, 40.0, 18.0, 0.46617566920175546], [1705, 24.714157198792456, 1915.3556212048488, 4.0, 40.0, 18.0, 0.10472151022251487], [1706, 19.775474825923244, 1817.283487334307, 4.0, 40.0, 18.0, 0.16425677711083322], [1707, 18.077549508166634, 1722.4817249234773, 4.0, 40.0, 18.0, 0.1923978628357989], [1708, 23.17916550338367, 1002.4909877164397, 4.0, 40.0, 18.0, 0.15203873706577736], [1709, 1.3308891148640618, 1825.640079787098, 4.0, 40.0, 18.0, 0.5818667882580332], [1710, 23.18181601780554, 980.3473162986612, 4.0, 40.0, 18.0, 0.15323319278524383], [1711, 4.234333825354664, 823.401527753161, 4.0, 40.0, 18.0, 0.5850696173441955], [1712, 22.941907300107466, 326.0656104286293, 4.0, 40.0, 18.0, 0.23917209296486333], [1713, 7.974280813246165, 1475.619985061003, 4.0, 40.0, 18.0, 0.40943513809536514], [1714, 19.118336420445516, 1375.3049222502161, 4.0, 40.0, 18.0, 0.18932035460981275], [1715, 0.6758817654935079, 167.96636726114173, 4.0, 40.0, 18.0, 1.0], [1716, 13.340897887753394, 683.7157036004362, 4.0, 40.0, 18.0, 0.35476705474396264], [1717, 22.880690187965598, 1790.5640023756032, 4.0, 40.0, 18.0, 0.12590012170617182], [1718, 0.880537397394135, 142.92209867527026, 4.0, 40.0, 18.0, 1.0], [1719, 24.09024460322958, 188.47513080588132, 4.0, 40.0, 18.0, 0.28029616423419723], [1720, 18.648659406238238, 462.34861353463054, 4.0, 40.0, 18.0, 0.28114815564051354], [1721, 9.342950193665873, 1957.851072467311, 4.0, 40.0, 18.0, 0.35508695158398346], [1722, 2.33113846495932, 629.6882187125599, 4.0, 40.0, 18.0, 0.6986872263946223], [1723, 13.744366509705605, 1758.367200113514, 4.0, 40.0, 18.0, 0.26915331924848346], [1724, 9.068127384927246, 1360.0922019048633, 4.0, 40.0, 18.0, 0.39098618425733545], [1725, 10.965096996073745, 772.9182498864528, 4.0, 40.0, 18.0, 0.3987558809815726], [1726, 23.295943137459993, 1312.6174588234753, 4.0, 40.0, 18.0, 0.13657673179334268], [1727, 9.032234003208295, 732.9254307648537, 4.0, 40.0, 18.0, 0.4515769737871771], [1728, 18.267895693882554, 182.55128559575311, 4.0, 40.0, 18.0, 0.4042246847408414], [1729, 2.1202048731287753, 1753.952164170847, 4.0, 40.0, 18.0, 0.5594489317446404], [1730, 5.980206779099389, 327.0958138073652, 4.0, 40.0, 18.0, 0.6779155225371589], [1731, 8.422051753762771, 1032.4055742871856, 4.0, 40.0, 18.0, 0.4311760849371425], [1732, 15.212073329895636, 1962.9687756971878, 4.0, 40.0, 18.0, 0.23464613548352306], [1733, 13.55927264769112, 1034.825914833511, 4.0, 40.0, 18.0, 0.3114553442035314], [1734, 1.813961930744712, 849.0248971772902, 4.0, 40.0, 18.0, 0.6698452595489227], [1735, 5.814377314443184, 180.4212502991614, 4.0, 40.0, 18.0, 0.8211067953789679], [1736, 18.097117132613477, 127.89044561583256, 4.0, 40.0, 18.0, 0.4664430326821901], [1737, 2.8433978869424643, 49.71097111868656, 4.0, 40.0, 18.0, 1.0], [1738, 13.870135390374008, 82.88675289973095, 4.0, 40.0, 18.0, 0.7071546521401205], [1739, 4.2975571797138565, 1072.6389692047155, 4.0, 40.0, 18.0, 0.54670214551155], [1740, 23.4465644967952, 1207.2268328110101, 4.0, 40.0, 18.0, 0.1389989814445305], [1741, 21.558336504250782, 944.3977781049064, 4.0, 40.0, 18.0, 0.1764806189591074], [1742, 22.642546810517896, 79.32561996954387, 4.0, 40.0, 18.0, 0.44486310261551226], [1743, 23.257205536426017, 782.4847906808816, 4.0, 40.0, 18.0, 0.16586493682430567], [1744, 20.3297106094133, 1532.6350395948778, 4.0, 40.0, 18.0, 0.16535566984090594], [1745, 7.601612577132405, 438.85613842569336, 4.0, 40.0, 18.0, 0.565432669707558], [1746, 20.533567876256793, 123.74129377390427, 4.0, 40.0, 18.0, 0.41302150698065226], [1747, 20.11184514098866, 64.43666065779996, 4.0, 40.0, 18.0, 0.5605086964690513], [1748, 20.96467599726041, 1310.5094970747757, 4.0, 40.0, 18.0, 0.1652938351338709], [1749, 10.694394663692886, 1071.3183206045346, 4.0, 40.0, 18.0, 0.3732300901264402], [1750, 11.009038331513354, 645.1806737580594, 4.0, 40.0, 18.0, 0.4155147861842374], [1751, 23.965946633057523, 186.31153145871704, 4.0, 40.0, 18.0, 0.28401520670589264], [1752, 16.305472713001144, 230.5820099724613, 4.0, 40.0, 18.0, 0.4152747794097998], [1753, 2.0164802133756767, 414.9259012008752, 4.0, 40.0, 18.0, 0.7893316759748789], [1754, 12.577597764246393, 522.904072949603, 4.0, 40.0, 18.0, 0.4016105096535484], [1755, 14.537924432368559, 1621.429842934899, 4.0, 40.0, 18.0, 0.259014058998408], [1756, 5.610243603993884, 1653.5787863792725, 4.0, 40.0, 18.0, 0.45836421643322045], [1757, 24.29808632457799, 164.02014029181785, 4.0, 40.0, 18.0, 0.2943475712690083], [1758, 2.9619784325358918, 180.52944331525876, 4.0, 40.0, 18.0, 1.0], [1759, 16.177258552308913, 730.2512584094902, 4.0, 40.0, 18.0, 0.28710448451694126], [1760, 5.092308259179642, 120.43679712598822, 4.0, 40.0, 18.0, 1.0], [1761, 9.341953599135755, 1427.1125828128668, 4.0, 40.0, 18.0, 0.38019307505029964], [1762, 0.0808929910235262, 130.78839415986135, 4.0, 40.0, 18.0, 1.0], [1763, 10.506599678634865, 55.77020106854454, 4.0, 40.0, 18.0, 1.0], [1764, 6.155716401936137, 1044.1699211597609, 4.0, 40.0, 18.0, 0.49167379269704986], [1765, 0.6659840402889611, 96.33683447878275, 4.0, 40.0, 18.0, 1.0], [1766, 5.530353872410493, 141.9441808981498, 4.0, 40.0, 18.0, 1.0], [1767, 11.726903321333973, 556.3654462994716, 4.0, 40.0, 18.0, 0.41426328705631144], [1768, 18.87605500340939, 76.07111892484716, 4.0, 40.0, 18.0, 0.5563037985166693], [1769, 17.433652422583805, 1555.6290369092196, 4.0, 40.0, 18.0, 0.2090955263639545], [1770, 15.890498729636269, 1820.8909098689562, 4.0, 40.0, 18.0, 0.22674377947090424], [1771, 14.5502972715096, 1894.4060003147958, 4.0, 40.0, 18.0, 0.24905068439655245], [1772, 8.464446854207875, 112.5154525721117, 4.0, 40.0, 18.0, 0.8467430070767269], [1773, 16.461447548851982, 155.62536688148208, 4.0, 40.0, 18.0, 0.47415829069438836], [1774, 20.49542049972595, 736.054022226142, 4.0, 40.0, 18.0, 0.2092340505329641], [1775, 0.6117723911408746, 1205.9421482297519, 4.0, 40.0, 18.0, 0.663110631965399], [1776, 16.16633438585804, 1334.3455108439366, 4.0, 40.0, 18.0, 0.24127153789138406], [1777, 13.445236728644232, 519.5707212360242, 4.0, 40.0, 18.0, 0.38156595474842525], [1778, 1.941726246394229, 1190.2222584800893, 4.0, 40.0, 18.0, 0.6154408656267621], [1779, 22.933443950961856, 754.4847313798897, 4.0, 40.0, 18.0, 0.17242953840641795], [1780, 15.239094871816889, 599.5393457326618, 4.0, 40.0, 18.0, 0.32526907722035353], [1781, 0.5841490230752644, 1258.4122661637946, 4.0, 40.0, 18.0, 0.6582267651766723], [1782, 3.954996027187602, 92.75001261564577, 4.0, 40.0, 18.0, 1.0], [1783, 7.125744104905896, 989.2878830273229, 4.0, 40.0, 18.0, 0.46997227474710984], [1784, 20.682134108879737, 231.73321945422063, 4.0, 40.0, 18.0, 0.31866073220815094], [1785, 3.4918799090534054, 1123.3344603573385, 4.0, 40.0, 18.0, 0.5680535181828943], [1786, 15.869755144826566, 821.1813359286382, 4.0, 40.0, 18.0, 0.2831674933512515], [1787, 4.704694471801313, 1528.4334606449145, 4.0, 40.0, 18.0, 0.4923162793936037], [1788, 9.445479325181994, 1135.7402822920678, 4.0, 40.0, 18.0, 0.3981706515841447], [1789, 3.553233063054337, 1708.827774787038, 4.0, 40.0, 18.0, 0.515409911878944], [1790, 0.3513204425915878, 1942.2968057621283, 4.0, 40.0, 18.0, 0.609114972772225], [1791, 11.73244871110241, 322.5582352989726, 4.0, 40.0, 18.0, 0.48514922376989766], [1792, 24.50098799522684, 172.89678438839343, 4.0, 40.0, 18.0, 0.28394652900731726], [1793, 4.8142139311164955, 1153.022501044064, 4.0, 40.0, 18.0, 0.5210863185944499], [1794, 1.6436134050577005, 448.1557362447912, 4.0, 40.0, 18.0, 0.789250068824483], [1795, 8.451059530543542, 443.9358627326224, 4.0, 40.0, 18.0, 0.5354869178761084], [1796, 1.691811602776669, 253.0018590837451, 4.0, 40.0, 18.0, 0.9167207966432277], [1797, 1.0788963314240896, 562.7895763915516, 4.0, 40.0, 18.0, 0.7687701288807397], [1798, 8.81602127053965, 598.5774856927376, 4.0, 40.0, 18.0, 0.48213955272424164], [1799, 4.48923184195614, 159.2007069926749, 4.0, 40.0, 18.0, 1.0], [1800, 19.321391696712492, 86.07542288926702, 4.0, 40.0, 18.0, 0.5140082317870771], [1801, 9.467945450721748, 674.5387201255522, 4.0, 40.0, 18.0, 0.4495764267565451], [1802, 15.421146756805031, 103.3961991546702, 4.0, 40.0, 18.0, 0.5915310240162551], [1803, 11.135700460825726, 370.2390737078568, 4.0, 40.0, 18.0, 0.48160922701396947], [1804, 19.376594498731855, 465.4104399860047, 4.0, 40.0, 18.0, 0.266685792278971], [1805, 9.67774749346847, 787.4172587660398, 4.0, 40.0, 18.0, 0.42718938158047914], [1806, 10.91208451456561, 42.03402080517279, 4.0, 40.0, 18.0, 1.0], [1807, 5.4003471037021935, 1366.675062110117, 4.0, 40.0, 18.0, 0.48388276494372834], [1808, 6.213870855002332, 42.78518692155916, 4.0, 40.0, 18.0, 1.0], [1809, 20.3179803190086, 1302.3099192078114, 4.0, 40.0, 18.0, 0.17466555307053072], [1810, 5.554226502467852, 955.381132235269, 4.0, 40.0, 18.0, 0.5208940224119262], [1811, 2.787010134940557, 1889.0149437827565, 4.0, 40.0, 18.0, 0.5288857385645944], [1812, 15.305732153387265, 1926.1308663233258, 4.0, 40.0, 18.0, 0.2340797999911963], [1813, 18.749652685543772, 715.4767998108595, 4.0, 40.0, 18.0, 0.24037514635878554], [1814, 24.44469858354737, 732.3942567694203, 4.0, 40.0, 18.0, 0.15554532129771784], [1815, 23.057270816101667, 1990.2071628824074, 4.0, 40.0, 18.0, 0.11875224669485106], [1816, 14.173645278442104, 1437.5860485871199, 4.0, 40.0, 18.0, 0.2745172773760426], [1817, 16.958370359297046, 435.7781883322644, 4.0, 40.0, 18.0, 0.3215241482650204], [1818, 19.12700103175865, 155.57315077916272, 4.0, 40.0, 18.0, 0.40846182959817123], [1819, 20.51011975215225, 805.0695042494839, 4.0, 40.0, 18.0, 0.20257964742258003], [1820, 3.821391875021227, 39.82321381519032, 4.0, 40.0, 18.0, 1.0], [1821, 22.90372670852188, 1670.8399186068268, 4.0, 40.0, 18.0, 0.12887875657343575], [1822, 5.2420129339053405, 1734.1965475057896, 4.0, 40.0, 18.0, 0.46407407173502185], [1823, 2.487500732159681, 1429.8682310102722, 4.0, 40.0, 18.0, 0.5716837924729437], [1824, 7.146914934777959, 1654.6513294782615, 4.0, 40.0, 18.0, 0.4192187645389359], [1825, 23.60847559222103, 198.25888283221005, 4.0, 40.0, 18.0, 0.2826691729487767], [1826, 21.415267431694453, 475.72255128924763, 4.0, 40.0, 18.0, 0.22931063429205265], [1827, 1.5756592101652922, 1125.066747634953, 4.0, 40.0, 18.0, 0.6368538307548633], [1828, 14.265779580392838, 106.13294402735201, 4.0, 40.0, 18.0, 0.6247018718959303], [1829, 8.343411048642697, 1369.0123365395, 4.0, 40.0, 18.0, 0.407364239673956], [1830, 8.61847000835097, 747.4712161293687, 4.0, 40.0, 18.0, 0.4604427833256935], [1831, 4.360327303971154, 1332.9609613992307, 4.0, 40.0, 18.0, 0.5182203536813111], [1832, 19.471597953964366, 1611.9433143862395, 4.0, 40.0, 18.0, 0.17486755814266905], [1833, 24.049190756471305, 199.54989986729868, 4.0, 40.0, 18.0, 0.27412684446211966], [1834, 14.844594846182751, 120.4554127128788, 4.0, 40.0, 18.0, 0.5745915374877814], [1835, 3.240098342749462, 607.1051788760919, 4.0, 40.0, 18.0, 0.6695693587722176], [1836, 24.848974602173, 849.4800746006101, 4.0, 40.0, 18.0, 0.1423024009563565], [1837, 2.6108575504030758, 1246.6470118947454, 4.0, 40.0, 18.0, 0.5851434722852972], [1838, 15.279431052619103, 434.84143142312234, 4.0, 40.0, 18.0, 0.3587941955636343], [1839, 17.964293473993415, 1615.9521211412791, 4.0, 40.0, 18.0, 0.19815389251528545], [1840, 22.036332477973527, 1540.6770814963702, 4.0, 40.0, 18.0, 0.1430679769747265], [1841, 6.692100382736768, 79.69006044724026, 4.0, 40.0, 18.0, 1.0], [1842, 9.236438216590937, 137.0323407347843, 4.0, 40.0, 18.0, 0.7502317203795221], [1843, 10.685776458798504, 470.42821411120855, 4.0, 40.0, 18.0, 0.46108188187504284], [1844, 23.886887549646556, 199.06399480760902, 4.0, 40.0, 18.0, 0.2772127409628226], [1845, 15.752248617162076, 158.1555078956105, 4.0, 40.0, 18.0, 0.49067944404709196], [1846, 8.950223761879764, 1804.2162503192576, 4.0, 40.0, 18.0, 0.36998251668443105], [1847, 20.9727163318946, 73.40025991675279, 4.0, 40.0, 18.0, 0.5040523763077089], [1848, 7.2159634563970565, 325.3089461477477, 4.0, 40.0, 18.0, 0.632115846306468], [1849, 9.837173313314707, 269.7833263029355, 4.0, 40.0, 18.0, 0.5746833059804951], [1850, 1.348969915375506, 227.71613557546453, 4.0, 40.0, 18.0, 1.0], [1851, 16.106321586642483, 737.0816851018122, 4.0, 40.0, 18.0, 0.28768067507899475], [1852, 23.116823050089188, 131.68859504798297, 4.0, 40.0, 18.0, 0.34806598046392856], [1853, 14.291481655113202, 1320.4209708939525, 4.0, 40.0, 18.0, 0.2780450613001988], [1854, 19.803132320615696, 75.44618064041273, 4.0, 40.0, 18.0, 0.530670984961586], [1855, 14.953532499763863, 1371.0019090408966, 4.0, 40.0, 18.0, 0.2624413902773426], [1856, 13.171140102801813, 1044.0345409406605, 4.0, 40.0, 18.0, 0.31923905102704786], [1857, 4.702948593376563, 1026.2884959281755, 4.0, 40.0, 18.0, 0.5391480549535197], [1858, 24.795853025502186, 1814.3063387781701, 4.0, 40.0, 18.0, 0.10678822475736813], [1859, 3.7740038176058004, 1772.1207732918092, 4.0, 40.0, 18.0, 0.5047090954414234], [1860, 7.665644889476542, 86.3814278772671, 4.0, 40.0, 18.0, 1.0], [1861, 5.876774528355167, 75.42542786378309, 4.0, 40.0, 18.0, 1.0], [1862, 11.59704929753818, 1443.476763648336, 4.0, 40.0, 18.0, 0.32757177990691577], [1863, 6.824750920813669, 169.43856327136092, 4.0, 40.0, 18.0, 0.7945477148637582], [1864, 22.98942777549491, 146.32860719813732, 4.0, 40.0, 18.0, 0.33500234883787583], [1865, 18.799749837928488, 1396.4339226346692, 4.0, 40.0, 18.0, 0.19313076821188008], [1866, 1.3340533787297741, 427.16963222830276, 4.0, 40.0, 18.0, 0.8115241239984092], [1867, 21.86297444638135, 1254.9945402104786, 4.0, 40.0, 18.0, 0.1559499233768141], [1868, 20.84402473909523, 1403.018120165402, 4.0, 40.0, 18.0, 0.16312262728139915], [1869, 12.513597214105971, 137.88647803831844, 4.0, 40.0, 18.0, 0.6228555383760062], [1870, 0.9031364345789133, 483.5555258129089, 4.0, 40.0, 18.0, 0.8046850957648027], [1871, 4.7590135852196624, 1001.0592407061898, 4.0, 40.0, 18.0, 0.5405283158339406], [1872, 9.504012552161543, 188.14961464598392, 4.0, 40.0, 18.0, 0.6611778867494635], [1873, 14.233688272121212, 1915.1809969370652, 4.0, 40.0, 18.0, 0.2545633286808171], [1874, 16.706157274793, 325.9914859041708, 4.0, 40.0, 18.0, 0.3605807908473322], [1875, 13.44404800377561, 79.8723662904433, 4.0, 40.0, 18.0, 0.7359213323161425], [1876, 18.69884048713576, 597.671896605518, 4.0, 40.0, 18.0, 0.2563194986328056], [1877, 17.870275067633155, 1871.1968637924667, 4.0, 40.0, 18.0, 0.19122757305321156], [1878, 8.086777898823541, 132.47706535786756, 4.0, 40.0, 18.0, 0.8086016219361791], [1879, 16.766471592041803, 137.44248121861466, 4.0, 40.0, 18.0, 0.48874272915329736], [1880, 20.36474130407488, 34.16103614361353, 4.0, 40.0, 18.0, 1.0], [1881, 18.2837341813897, 693.2792465783828, 4.0, 40.0, 18.0, 0.2513159069175625], [1882, 5.751679043409133, 1652.6858601316421, 4.0, 40.0, 18.0, 0.45454438541707853], [1883, 13.581769930210912, 127.29572927096659, 4.0, 40.0, 18.0, 0.604146358096679], [1884, 20.217634402539154, 447.9956758607286, 4.0, 40.0, 18.0, 0.2551645921835903], [1885, 18.82343096465152, 1549.2431926707948, 4.0, 40.0, 18.0, 0.18682981601075516], [1886, 23.61101116343376, 56.31308310153682, 4.0, 40.0, 18.0, 0.49506669102635725], [1887, 23.95830884279107, 1045.6197660382663, 4.0, 40.0, 18.0, 0.14087027259529966], [1888, 23.345318437250764, 319.4774483010274, 4.0, 40.0, 18.0, 0.23487003957382097], [1889, 12.174557295359497, 931.7795219644557, 4.0, 40.0, 18.0, 0.3516587971098201], [1890, 3.5628851270324864, 76.89602618966957, 4.0, 40.0, 18.0, 1.0], [1891, 11.649118247663527, 105.2911591759369, 4.0, 40.0, 18.0, 0.725188645412746], [1892, 3.9847097518608683, 1168.6613357106414, 4.0, 40.0, 18.0, 0.5462011252226638], [1893, 12.98039814328282, 338.80503922976135, 4.0, 40.0, 18.0, 0.4440051464878844], [1894, 21.76384222653067, 480.8128790233031, 4.0, 40.0, 18.0, 0.22292047912716645], [1895, 10.824715885623428, 1343.2037601109316, 4.0, 40.0, 18.0, 0.35043707750717284], [1896, 14.377652345347828, 82.74520524662913, 4.0, 40.0, 18.0, 0.687877081212253], [1897, 13.374952119494884, 1676.706780960808, 4.0, 40.0, 18.0, 0.27988476349521035], [1898, 9.497812875584847, 1414.8386104858605, 4.0, 40.0, 18.0, 0.37716987942482133], [1899, 20.07791851647754, 825.2072454953318, 4.0, 40.0, 18.0, 0.20763552701901508], [1900, 2.1221560994224897, 137.6137020403229, 4.0, 40.0, 18.0, 1.0], [1901, 3.882121169360972, 168.7645910277558, 4.0, 40.0, 18.0, 1.0], [1902, 14.009967296685197, 1378.3755483218486, 4.0, 40.0, 18.0, 0.28066980536030706], [1903, 18.174664733669726, 141.23146083728952, 4.0, 40.0, 18.0, 0.44670310168065763], [1904, 9.466456775367543, 125.27734367506513, 4.0, 40.0, 18.0, 0.7654894272136504], [1905, 0.22953808582744928, 87.91556165388147, 4.0, 40.0, 18.0, 1.0], [1906, 23.430097129384475, 1094.6519447815324, 4.0, 40.0, 18.0, 0.14424130751653252], [1907, 11.575700851879597, 170.73320566118431, 4.0, 40.0, 18.0, 0.6072773684687981], [1908, 3.3784300695386795, 1560.668023137015, 4.0, 40.0, 18.0, 0.5313332768675827], [1909, 3.8926884753925113, 95.320417891007, 4.0, 40.0, 18.0, 1.0], [1910, 12.547220265154015, 91.96867547022983, 4.0, 40.0, 18.0, 0.728614752969701], [1911, 4.923249396313468, 310.3760108864668, 4.0, 40.0, 18.0, 0.7306932411166738], [1912, 10.02610472558947, 990.2987729242266, 4.0, 40.0, 18.0, 0.39704974290989803], [1913, 3.9078072998742024, 387.5135572264773, 4.0, 40.0, 18.0, 0.7258905416879472], [1914, 21.744716139095313, 1767.8478011771438, 4.0, 40.0, 18.0, 0.13908581986860621], [1915, 2.258711681922873, 1442.7725169981622, 4.0, 40.0, 18.0, 0.5786283798840285], [1916, 4.43420285521203, 156.40248211760132, 4.0, 40.0, 18.0, 1.0], [1917, 3.007989913402234, 1436.7553318710109, 4.0, 40.0, 18.0, 0.5534230178826371], [1918, 13.744860551147333, 1572.8546028465985, 4.0, 40.0, 18.0, 0.27685456524960983], [1919, 23.27995927255027, 156.23765138005308, 4.0, 40.0, 18.0, 0.32000516996572254], [1920, 8.349389741242916, 640.1012606436612, 4.0, 40.0, 18.0, 0.48700336274058853], [1921, 24.598200648696576, 137.44811527476065, 4.0, 40.0, 18.0, 0.3128780433590026], [1922, 12.93047186229432, 813.7423213302408, 4.0, 40.0, 18.0, 0.3469308457087111], [1923, 6.262485283124522, 1271.038040522172, 4.0, 40.0, 18.0, 0.4669558449987634], [1924, 6.50291860444277, 1966.668743292896, 4.0, 40.0, 18.0, 0.42030857145850026], [1925, 7.307331785538926, 1816.7122146640293, 4.0, 40.0, 18.0, 0.4081257902198177], [1926, 10.492943735372195, 46.371938240960525, 4.0, 40.0, 18.0, 1.0], [1927, 11.596304058530341, 1138.2027878444414, 4.0, 40.0, 18.0, 0.3468385222973252], [1928, 24.347352509983764, 58.69897882967909, 4.0, 40.0, 18.0, 0.46688910309491094], [1929, 3.6471385177059306, 1746.3387323521263, 4.0, 40.0, 18.0, 0.5101054449988423], [1930, 11.984096587369834, 624.0367089266477, 4.0, 40.0, 18.0, 0.396616617947586], [1931, 20.746839831400308, 187.8661004123425, 4.0, 40.0, 18.0, 0.3452682640833985], [1932, 10.23441975893285, 139.37534073080505, 4.0, 40.0, 18.0, 0.7057650418001672], [1933, 14.985214285443957, 600.4120009643879, 4.0, 40.0, 18.0, 0.33084454126775564], [1934, 22.611523606726216, 1237.5895238501282, 4.0, 40.0, 18.0, 0.1473302200483939], [1935, 15.907275581625383, 1903.0670364742768, 4.0, 40.0, 18.0, 0.2238641850440459], [1936, 11.023549498851404, 1110.8333859056133, 4.0, 40.0, 18.0, 0.3623046943340012], [1937, 13.69754230078197, 1073.0787824364565, 4.0, 40.0, 18.0, 0.3057402903004179], [1938, 17.440538792545222, 1033.3911317848074, 4.0, 40.0, 18.0, 0.2359067931255248], [1939, 13.64670934972993, 423.3326930470313, 4.0, 40.0, 18.0, 0.40049820796241], [1940, 13.764247823540893, 1966.937016735742, 4.0, 40.0, 18.0, 0.261919817737229], [1941, 0.9288291950075078, 1792.9297687215046, 4.0, 40.0, 18.0, 0.5984580405373244], [1942, 13.212485977818167, 672.3530570879083, 4.0, 40.0, 18.0, 0.3592985569326885], [1943, 1.0258959085792245, 1092.2137459756923, 4.0, 40.0, 18.0, 0.6617951591553232], [1944, 17.8351020228825, 1726.787789756901, 4.0, 40.0, 18.0, 0.19635421570752118], [1945, 21.21943335691722, 419.59032057933365, 4.0, 40.0, 18.0, 0.24388329033583123], [1946, 24.22709591969878, 1396.7414599108656, 4.0, 40.0, 18.0, 0.12309308896902814], [1947, 20.924883922295642, 989.2047567721401, 4.0, 40.0, 18.0, 0.18260497786986535], [1948, 18.0435504079589, 89.18567663017862, 4.0, 40.0, 18.0, 0.5433952214790109], [1949, 24.27081830466515, 41.89502986189963, 4.0, 40.0, 18.0, 0.5537057327452106], [1950, 21.27899200474617, 835.5406116648644, 4.0, 40.0, 18.0, 0.1885390296117541], [1951, 20.364665078772774, 422.6417340808581, 4.0, 40.0, 18.0, 0.25794499753128536], [1952, 5.103262203791576, 767.5841880106092, 4.0, 40.0, 18.0, 0.5650524825877493], [1953, 1.681210788030052, 820.1569171059258, 4.0, 40.0, 18.0, 0.680437337467596], [1954, 21.288120100189246, 1634.73591190358, 4.0, 40.0, 18.0, 0.14941347100784588], [1955, 11.086447344058723, 68.02700472787981, 4.0, 40.0, 18.0, 1.0], [1956, 9.448475175225138, 129.90593672321307, 4.0, 40.0, 18.0, 0.7561368113011686], [1957, 2.1903150727018206, 194.4411860511767, 4.0, 40.0, 18.0, 1.0], [1958, 17.75500781863005, 66.80001080240127, 4.0, 40.0, 18.0, 0.6271295197613048], [1959, 19.762523777533932, 726.9728304536992, 4.0, 40.0, 18.0, 0.22202010047235224], [1960, 20.82066505363703, 1872.6929550943491, 4.0, 40.0, 18.0, 0.14777804591993463], [1961, 19.218788295963247, 1791.6591725648584, 4.0, 40.0, 18.0, 0.17300785394950557], [1962, 7.867769431003787, 55.0881303536056, 4.0, 40.0, 18.0, 1.0], [1963, 19.899012972436378, 690.9612458112083, 4.0, 40.0, 18.0, 0.22372600571775045], [1964, 13.741683645695776, 1418.9175494785252, 4.0, 40.0, 18.0, 0.28411193805018514], [1965, 9.256316527727579, 614.500089599888, 4.0, 40.0, 18.0, 0.4663692566048468], [1966, 4.772804792592991, 1608.3447522206286, 4.0, 40.0, 18.0, 0.48498603836504206], [1967, 10.60974817077909, 155.3744712390088, 4.0, 40.0, 18.0, 0.6641080621927825], [1968, 11.77922600197693, 829.1528369129031, 4.0, 40.0, 18.0, 0.3719392095098326], [1969, 13.380973365094222, 1077.8286832314043, 4.0, 40.0, 18.0, 0.3118559156014599], [1970, 22.466102035208284, 66.22839536718286, 4.0, 40.0, 18.0, 0.4874530554474939], [1971, 16.16565090160762, 1796.780949291825, 4.0, 40.0, 18.0, 0.22256554305893014], [1972, 5.588177042494847, 1697.075855727831, 4.0, 40.0, 18.0, 0.4562990451243489], [1973, 7.107503324884531, 1949.8536251772614, 4.0, 40.0, 18.0, 0.40726697206450485], [1974, 2.7187146569001452, 904.0802167604023, 4.0, 40.0, 18.0, 0.6260851635866334], [1975, 14.935093211954534, 607.3240779492281, 4.0, 40.0, 18.0, 0.33079591695055427], [1976, 24.270104775462823, 392.3622987549394, 4.0, 40.0, 18.0, 0.20260298752096173], [1977, 17.993690554562384, 80.3881711350263, 4.0, 40.0, 18.0, 0.5699337243185855], [1978, 22.297523888819526, 1349.9246942381608, 4.0, 40.0, 18.0, 0.14648876337987388], [1979, 1.0352863870984663, 50.09247613746339, 4.0, 40.0, 18.0, 1.0], [1980, 6.468476365464065, 113.04680706875692, 4.0, 40.0, 18.0, 1.0], [1981, 1.1031548990482447, 1878.4048572142203, 4.0, 40.0, 18.0, 0.5865813653678678], [1982, 4.010539300634067, 207.12738672209704, 4.0, 40.0, 18.0, 0.8637305682980617], [1983, 23.308990416271254, 76.40168591179443, 4.0, 40.0, 18.0, 0.4368493801397688], [1984, 14.862987689927046, 1805.1760032035231, 4.0, 40.0, 18.0, 0.24608120532147834], [1985, 2.825312477235914, 352.84885975814996, 4.0, 40.0, 18.0, 0.7894656488806221], [1986, 24.022808787226566, 1050.5860653565364, 4.0, 40.0, 18.0, 0.139899557763747], [1987, 7.51679142904062, 90.9751315562337, 4.0, 40.0, 18.0, 1.0], [1988, 9.429987701915419, 1216.7729076176734, 4.0, 40.0, 18.0, 0.39225766456653555], [1989, 1.0944192219756483, 1233.398827848469, 4.0, 40.0, 18.0, 0.641796622013196], [1990, 10.388878917456587, 1418.7904022947578, 4.0, 40.0, 18.0, 0.35602975557568084], [1991, 14.842402896319818, 1977.013567387179, 4.0, 40.0, 18.0, 0.2410396884651546], [1992, 11.482628687738552, 1011.5742119700101, 4.0, 40.0, 18.0, 0.35999433660814334], [1993, 23.346790872516337, 1162.0536614731188, 4.0, 40.0, 18.0, 0.14216513276602566], [1994, 3.5491767050882834, 933.3096372785426, 4.0, 40.0, 18.0, 0.5913856475535725], [1995, 3.7449721566626626, 97.5434341757727, 4.0, 40.0, 18.0, 1.0]]
58,233.5
116,466
0.731546
dataset = [[575, 1.8478238142818033, 335.84132856507614, 4.0, 40.0, 18.0, 1.0], [576, 15.77686287348893, 42.441254859835766, 4.0, 40.0, 18.0, 1.0], [577, 2.5651959697179394, 460.4148384334217, 4.0, 40.0, 18.0, 0.796539971089987], [578, 17.085286455903624, 1722.0569227642409, 4.0, 40.0, 18.0, 0.20868652310905142], [579, 0.8922404456295938, 544.2232815722436, 4.0, 40.0, 18.0, 0.8391968483418055], [580, 1.855625614157908, 324.0611877697628, 4.0, 40.0, 18.0, 1.0], [581, 13.887583805638116, 370.66355345479735, 4.0, 40.0, 18.0, 0.4156268407842482], [582, 4.574319040915774, 1916.680593529377, 4.0, 40.0, 18.0, 0.4731431536067498], [583, 11.620584764397769, 1765.727157030207, 4.0, 40.0, 18.0, 0.3125490112539554], [584, 2.6235186150459104, 1833.1693569752063, 4.0, 40.0, 18.0, 0.5374621823117757], [585, 13.484073962894696, 660.3699928897363, 4.0, 40.0, 18.0, 0.3539297235585189], [586, 14.039143301499495, 1194.4759299020527, 4.0, 40.0, 18.0, 0.2909060179303773], [587, 13.912217642029711, 1554.008389226998, 4.0, 40.0, 18.0, 0.2743234627435817], [588, 13.99433118037543, 1828.6039911998955, 4.0, 40.0, 18.0, 0.26137292500800485], [589, 13.97985812120516, 1457.7115189147835, 4.0, 40.0, 18.0, 0.277698127963447], [590, 14.235082927476668, 1533.990342569619, 4.0, 40.0, 18.0, 0.2684974619970663], [591, 0.14438911944825494, 1552.4111707538157, 4.0, 40.0, 18.0, 0.6646778218759786], [592, 12.838639587489782, 897.1618056405601, 4.0, 40.0, 18.0, 0.3404965898150903], [593, 11.149353897963573, 598.125634720337, 4.0, 40.0, 18.0, 0.42099275208739695], [594, 2.6770942046494586, 1985.9408533826909, 4.0, 40.0, 18.0, 0.5310687434251105], [595, 2.9997563631283897, 1040.9042861817666, 4.0, 40.0, 18.0, 0.6086804760399638], [596, 2.3022674475226914, 307.9928831694318, 4.0, 40.0, 18.0, 1.0], [597, 16.291429852780407, 1256.6722690719946, 4.0, 40.0, 18.0, 0.24371063020372327], [598, 15.922124691331389, 507.48840748529494, 4.0, 40.0, 18.0, 0.32681641236328557], [599, 1.9421270372564932, 296.11422429306833, 4.0, 40.0, 18.0, 1.0], [600, 17.158011255218895, 1370.1534109726924, 4.0, 40.0, 18.0, 0.22156305405117283], [601, 3.7932553942867377, 1121.772643508161, 4.0, 40.0, 18.0, 0.5662626026818638], [602, 14.10521268070536, 34.78839785558823, 4.0, 40.0, 18.0, 1.0], [603, 9.73869075013141, 1581.5507122013316, 4.0, 40.0, 18.0, 0.3625765570093685], [604, 4.274494253604736, 235.24380920742726, 4.0, 40.0, 18.0, 1.0], [605, 3.2295643670867213, 393.2709171003013, 4.0, 40.0, 18.0, 0.8062809231893029], [606, 12.475115053078682, 1915.9055265343711, 4.0, 40.0, 18.0, 0.2878838514964685], [607, 1.6886028928130217, 270.23903604128816, 4.0, 40.0, 18.0, 1.0], [608, 11.690517878266208, 1486.0998001794405, 4.0, 40.0, 18.0, 0.3237148945091826], [609, 15.926532949205589, 1131.5673840607374, 4.0, 40.0, 18.0, 0.25796741296596415], [610, 1.8446899105333436, 1224.3210865622684, 4.0, 40.0, 18.0, 0.6298416489939084], [611, 2.6566105295944635, 731.8422724963048, 4.0, 40.0, 18.0, 0.6871636918077176], [612, 15.125019204779058, 297.8907653170386, 4.0, 40.0, 18.0, 0.4185181467542303], [613, 3.8530850542593447, 1361.0001015077783, 4.0, 40.0, 18.0, 0.5373958378712685], [614, 12.598205126437756, 1717.5123950190948, 4.0, 40.0, 18.0, 0.2939356075449356], [615, 12.334492974283485, 623.3359259430543, 4.0, 40.0, 18.0, 0.3876830117045339], [616, 11.192915263658763, 1966.1376533177634, 4.0, 40.0, 18.0, 0.31352700344854745], [617, 13.997928305359197, 93.70706217274409, 4.0, 40.0, 18.0, 0.7141709148966205], [618, 16.366113492225136, 1236.5923416320818, 4.0, 40.0, 18.0, 0.2435035777641013], [619, 2.9775330120959294, 1632.2202941746937, 4.0, 40.0, 18.0, 0.5446685841376463], [620, 12.375848772835731, 1454.8269704665809, 4.0, 40.0, 18.0, 0.31088421330658206], [621, 10.573273354771072, 809.9502345850427, 4.0, 40.0, 18.0, 0.4036679768086213], [622, 0.658017470275974, 1174.2198110845593, 4.0, 40.0, 18.0, 0.6885758157602853], [623, 15.388174373590935, 1558.7329300836798, 4.0, 40.0, 18.0, 0.24506860672627045], [624, 11.47959751919403, 495.72811032418923, 4.0, 40.0, 18.0, 0.43667527053115335], [625, 1.0249692741560694, 1714.5459100597297, 4.0, 40.0, 18.0, 0.6123934325550479], [626, 3.3490318260816085, 1587.88311412887, 4.0, 40.0, 18.0, 0.5351478832625405], [627, 13.621056051126965, 375.77630844629044, 4.0, 40.0, 18.0, 0.42020410107137113], [628, 15.612933200029865, 1869.1874865785246, 4.0, 40.0, 18.0, 0.23012146482050813], [629, 14.50967460452426, 81.04786425254812, 4.0, 40.0, 18.0, 0.7461201670586656], [630, 2.5263164804706095, 105.6280113180625, 4.0, 40.0, 18.0, 1.0], [631, 1.8029490922238407, 1029.5067606548166, 4.0, 40.0, 18.0, 0.6610687775529165], [632, 0.7319623297551434, 701.8330982972551, 4.0, 40.0, 18.0, 0.7869646649087125], [633, 0.4386533254903302, 794.8526073599669, 4.0, 40.0, 18.0, 0.7738976400870887], [634, 14.652220310971902, 952.1451985012426, 4.0, 40.0, 18.0, 0.29583094230337575], [635, 0.20246156655845526, 1804.2365401166314, 4.0, 40.0, 18.0, 0.6391260749168856], [636, 2.8725533409172863, 1981.9648539155141, 4.0, 40.0, 18.0, 0.5245258107130394], [637, 12.689631024428062, 1311.634645014224, 4.0, 40.0, 18.0, 0.31189270816357884], [638, 0.9614768236763129, 558.3493409470752, 4.0, 40.0, 18.0, 0.8292573515295004], [639, 12.935603226152619, 1333.9059870755948, 4.0, 40.0, 18.0, 0.30569596499996965], [640, 13.7979345626314, 779.3753407587446, 4.0, 40.0, 18.0, 0.331003864216317], [641, 0.9849375805474148, 825.9890492627081, 4.0, 40.0, 18.0, 0.7400968512949635], [642, 1.4105991726699643, 1700.8898630020053, 4.0, 40.0, 18.0, 0.5979771432431052], [643, 2.8503538554202015, 1064.3038789087652, 4.0, 40.0, 18.0, 0.6111854691231061], [644, 16.04834549540297, 1707.5748299191628, 4.0, 40.0, 18.0, 0.22738107076450115], [645, 16.783303066854362, 471.84431061561446, 4.0, 40.0, 18.0, 0.31643778862826194], [646, 0.39848871808779607, 1240.3359247723351, 4.0, 40.0, 18.0, 0.6908161553575081], [647, 2.4778883843398987, 1787.162704442942, 4.0, 40.0, 18.0, 0.5509850654318513], [648, 13.25885996232254, 1698.8026832869486, 4.0, 40.0, 18.0, 0.2808466520575083], [649, 15.986135974030557, 224.86348916148867, 4.0, 40.0, 18.0, 0.44441779528106345], [650, 13.675228038330282, 473.8153271538389, 4.0, 40.0, 18.0, 0.3878574026483963], [651, 2.5952162624842563, 981.5417910601017, 4.0, 40.0, 18.0, 0.6353139849691855], [652, 2.0463400011399635, 613.7276199815243, 4.0, 40.0, 18.0, 0.7530953898704728], [653, 9.649425541309405, 105.74257917879993, 4.0, 40.0, 18.0, 1.0], [654, 11.838679710747154, 1217.4544316048584, 4.0, 40.0, 18.0, 0.33626602314339615], [655, 2.098502180533995, 1251.1532820311488, 4.0, 40.0, 18.0, 0.6159690017531594], [656, 2.1025685084517787, 98.89633668206652, 4.0, 40.0, 18.0, 1.0], [657, 12.857819799391866, 1359.358143001064, 4.0, 40.0, 18.0, 0.3059279506262172], [658, 2.233221352723336, 906.0579887922141, 4.0, 40.0, 18.0, 0.6649669558186118], [659, 1.7811199402272135, 1448.3686916556367, 4.0, 40.0, 18.0, 0.6063111556787022], [660, 14.448901654879998, 167.34241426955228, 4.0, 40.0, 18.0, 0.5463919600053861], [661, 10.562823930958594, 1297.580763882867, 4.0, 40.0, 18.0, 0.35997957233633354], [662, 3.272269298565971, 1067.0431698105351, 4.0, 40.0, 18.0, 0.593765244196822], [663, 1.0343862600092883, 667.017705876655, 4.0, 40.0, 18.0, 0.7836515967272167], [664, 0.16856950051587072, 720.6567295694282, 4.0, 40.0, 18.0, 0.8084805065903237], [665, 3.9177560361585195, 66.91061929546387, 4.0, 40.0, 18.0, 1.0], [666, 2.3780276261967783, 1668.1749819231613, 4.0, 40.0, 18.0, 0.5635900242240913], [667, 1.8813117598740123, 150.91226392259642, 4.0, 40.0, 18.0, 1.0], [668, 0.8447262583017403, 386.5987088286651, 4.0, 40.0, 18.0, 1.0], [669, 13.736506161491794, 889.6250282419106, 4.0, 40.0, 18.0, 0.3211736700709351], [670, 2.0090109477892426, 561.9522843904172, 4.0, 40.0, 18.0, 0.7749304621492978], [671, 3.038100630719427, 679.1589007105449, 4.0, 40.0, 18.0, 0.6851159203011229], [672, 3.9884559940760846, 833.3855871576827, 4.0, 40.0, 18.0, 0.6057434054729105], [673, 4.814399039171513, 30.558155481419423, 4.0, 40.0, 18.0, 1.0], [674, 0.7751917829450046, 1577.5845369091087, 4.0, 40.0, 18.0, 0.6351594260173516], [675, 14.960053050776954, 832.9983160485076, 4.0, 40.0, 18.0, 0.30016266965397775], [676, 17.771656535063176, 848.7911707008597, 4.0, 40.0, 18.0, 0.24429667759683385], [677, 1.401096753301573, 1702.1164509374134, 4.0, 40.0, 18.0, 0.5982530514668655], [678, 0.4236798333082352, 317.63818523938863, 4.0, 40.0, 18.0, 1.0], [679, 15.223642065748285, 933.4098161116106, 4.0, 40.0, 18.0, 0.2857079409908941], [680, 9.012045439273502, 1116.6022833618213, 4.0, 40.0, 18.0, 0.41056611918224784], [681, 1.2355168150259568, 1420.4346939390018, 4.0, 40.0, 18.0, 0.6319092021335166], [682, 0.3865424177831063, 473.2484023940222, 4.0, 40.0, 18.0, 0.904518741016756], [683, 1.2963805536393012, 1694.6108822988597, 4.0, 40.0, 18.0, 0.6029557100837556], [684, 1.9197251745615858, 1694.91862221521, 4.0, 40.0, 18.0, 0.5786066164285447], [685, 10.089674593087276, 544.0253517370464, 4.0, 40.0, 18.0, 0.46211496683321573], [686, 0.6334401188915719, 1844.853761865411, 4.0, 40.0, 18.0, 0.6178520401415883], [687, 13.326970265837408, 1834.0964338819465, 4.0, 40.0, 18.0, 0.2738377055667499], [688, 14.666304604439595, 467.8448461972652, 4.0, 40.0, 18.0, 0.3655591708614097], [689, 0.844304414255183, 1076.3594020117616, 4.0, 40.0, 18.0, 0.6959151451407178], [690, 3.6789640910162786, 626.4312648360602, 4.0, 40.0, 18.0, 0.6727236618293195], [691, 2.0051344211178366, 1007.3458777711303, 4.0, 40.0, 18.0, 0.6561383111692533], [692, 2.3481552984134613, 1471.3713178729508, 4.0, 40.0, 18.0, 0.5817223165277476], [693, 4.427135327036443, 1243.094343499476, 4.0, 40.0, 18.0, 0.5295634840686351], [694, 2.190869281670545, 554.5439904875483, 4.0, 40.0, 18.0, 0.7691339438293971], [695, 10.501562239820775, 250.29321404168425, 4.0, 40.0, 18.0, 0.5972936037623792], [696, 1.0579076190132626, 264.2901377145358, 4.0, 40.0, 18.0, 1.0], [697, 14.595094635291549, 1352.0935667481695, 4.0, 40.0, 18.0, 0.27090889639433213], [698, 16.80898248691349, 217.61917192979868, 4.0, 40.0, 18.0, 0.4291036505392953], [699, 0.4008588925910539, 496.7708158830859, 4.0, 40.0, 18.0, 0.8892724306754946], [700, 16.673778109326502, 763.6732093014514, 4.0, 40.0, 18.0, 0.27343576698147964], [701, 4.049172829085666, 878.2315631702593, 4.0, 40.0, 18.0, 0.5945659270380028], [702, 3.0942338736710977, 1363.9170030493724, 4.0, 40.0, 18.0, 0.5643689491583638], [703, 2.126979268729393, 1774.8561400512385, 4.0, 40.0, 18.0, 0.5647131880217934], [704, 3.237293211206092, 191.81932534132739, 4.0, 40.0, 18.0, 1.0], [705, 3.4937049106334515, 484.70155391287017, 4.0, 40.0, 18.0, 0.7375686883019803], [706, 11.959072857883736, 42.238399809480285, 4.0, 40.0, 18.0, 1.0], [707, 9.832122271157074, 1013.5444884346197, 4.0, 40.0, 18.0, 0.400658804102444], [708, 14.257261734318867, 1290.167101491819, 4.0, 40.0, 18.0, 0.2811251610127445], [709, 13.584067151959738, 1617.345767469841, 4.0, 40.0, 18.0, 0.27790841405159983], [710, 2.86939610720086, 1528.0120869387833, 4.0, 40.0, 18.0, 0.5571594643640032], [711, 15.247977292503393, 78.47662178969892, 4.0, 40.0, 18.0, 0.7221566962049696], [712, 15.117226234056941, 1500.6389271531489, 4.0, 40.0, 18.0, 0.2529794077617841], [713, 0.8943924071003173, 930.034122187956, 4.0, 40.0, 18.0, 0.720893590508253], [714, 13.984266724862009, 248.39513882779175, 4.0, 40.0, 18.0, 0.482014990063243], [715, 2.223386857337172, 146.40521301654854, 4.0, 40.0, 18.0, 1.0], [716, 3.6592465325879995, 575.4090431647485, 4.0, 40.0, 18.0, 0.6909654634158583], [717, 11.527160608092421, 470.6255362849969, 4.0, 40.0, 18.0, 0.44251677112794247], [718, 1.5345921160899505, 933.6775403022946, 4.0, 40.0, 18.0, 0.6907209763934317], [719, 13.495474661190109, 448.9712669713969, 4.0, 40.0, 18.0, 0.3995449302285382], [720, 2.8143152440490398, 754.914262233562, 4.0, 40.0, 18.0, 0.6740759572520959], [721, 12.386140618404264, 617.4674074358072, 4.0, 40.0, 18.0, 0.3874563897442213], [722, 2.489191894483384, 1858.0378326075042, 4.0, 40.0, 18.0, 0.5455838604250699], [723, 14.945166788553095, 977.6845992440178, 4.0, 40.0, 18.0, 0.287696335767216], [724, 15.062033439978197, 586.7926249125157, 4.0, 40.0, 18.0, 0.3301358785168809], [725, 13.315486637462875, 1276.1039521680034, 4.0, 40.0, 18.0, 0.30085902284906213], [726, 5.5250731877212775, 1510.3496624936267, 4.0, 40.0, 18.0, 0.47230268898558975], [727, 2.4576833472407893, 1678.2310828766163, 4.0, 40.0, 18.0, 0.5598075789527722], [728, 15.369419811333646, 1910.4883745286518, 4.0, 40.0, 18.0, 0.23338937847894647], [729, 5.214285816910424, 956.0268177113105, 4.0, 40.0, 18.0, 0.5381743176242986], [730, 1.3077047948296436, 1976.678926789535, 4.0, 40.0, 18.0, 0.581328676181729], [731, 1.3390940699166374, 1338.4119742971081, 4.0, 40.0, 18.0, 0.6369868698126483], [732, 0.9447404261407903, 68.99171372172654, 4.0, 40.0, 18.0, 1.0], [733, 4.371945005856279, 1890.1522564525435, 4.0, 40.0, 18.0, 0.4821625964777501], [734, 13.618513853095315, 1646.6200277329588, 4.0, 40.0, 18.0, 0.27590590005053567], [735, 10.775468892185003, 1976.3410362610828, 4.0, 40.0, 18.0, 0.32235853074822723], [736, 1.0811406407288038, 1443.2954720813782, 4.0, 40.0, 18.0, 0.6359822206273342], [737, 1.0995547545955375, 376.7519579456747, 4.0, 40.0, 18.0, 1.0], [738, 4.301003721146503, 77.81431231638504, 4.0, 40.0, 18.0, 1.0], [739, 15.080118223773836, 76.48070671447462, 4.0, 40.0, 18.0, 0.7425229728210355], [740, 3.574588871061194, 1433.6517065601122, 4.0, 40.0, 18.0, 0.5404133911667138], [741, 3.266803748137959, 768.0183481079831, 4.0, 40.0, 18.0, 0.6508517372826103], [742, 11.43373389005566, 1204.420414885311, 4.0, 40.0, 18.0, 0.3463125292925278], [743, 2.35173742155411, 489.22149464059817, 4.0, 40.0, 18.0, 0.7917123610736267], [744, 12.905201804335201, 1152.2035380398245, 4.0, 40.0, 18.0, 0.3175083258225495], [745, 0.4524338972995281, 1136.9338973099766, 4.0, 40.0, 18.0, 0.7038516116029139], [746, 4.3175697121860726, 650.7705528177835, 4.0, 40.0, 18.0, 0.6374002296428714], [747, 0.39510622901542636, 426.23806126288514, 4.0, 40.0, 18.0, 0.9455163973623548], [748, 14.67896127887815, 1456.1132115988173, 4.0, 40.0, 18.0, 0.26381913942493096], [749, 0.3555654643300439, 1560.595803351052, 4.0, 40.0, 18.0, 0.6548400085242511], [750, 10.055333964164149, 1696.7235455170785, 4.0, 40.0, 18.0, 0.34975385610025994], [751, 3.585948608898914, 1420.1272427420804, 4.0, 40.0, 18.0, 0.5412431767198689], [752, 13.239128463560627, 1768.3159811873886, 4.0, 40.0, 18.0, 0.27813109147915616], [753, 16.581173790050137, 1774.982634205434, 4.0, 40.0, 18.0, 0.21572823709191316], [754, 12.405257599909454, 1035.7129388087144, 4.0, 40.0, 18.0, 0.3375681394414063], [755, 14.025105691317485, 525.9186634463808, 4.0, 40.0, 18.0, 0.3664283779805332], [756, 2.2627180677654173, 1610.1215579758955, 4.0, 40.0, 18.0, 0.5725099766756114], [757, 3.8456742854216834, 974.6240117342162, 4.0, 40.0, 18.0, 0.5857546073747673], [758, 16.197110055633342, 811.1043442328842, 4.0, 40.0, 18.0, 0.27696357422853746], [759, 2.665567780129208, 1336.0363928181405, 4.0, 40.0, 18.0, 0.5834163594521643], [760, 15.878764458243273, 462.27250530519643, 4.0, 40.0, 18.0, 0.33885961769632117], [761, 16.657701804149767, 624.3021245972809, 4.0, 40.0, 18.0, 0.28958737547857455], [762, 12.4319797348128, 27.981780002591723, 4.0, 40.0, 18.0, 1.0], [763, 2.012088284960268, 203.25557099559074, 4.0, 40.0, 18.0, 1.0], [764, 12.07149921493987, 676.2404177502207, 4.0, 40.0, 18.0, 0.3852280102935016], [765, 3.2847123226633084, 480.74450340340275, 4.0, 40.0, 18.0, 0.7496224546070713], [766, 1.015608512384193, 989.0806757950718, 4.0, 40.0, 18.0, 0.7037137312266106], [767, 4.360325936889063, 1164.2479267295978, 4.0, 40.0, 18.0, 0.5405340479152868], [768, 18.3553886093106, 812.878232633861, 4.0, 40.0, 18.0, 0.23685367650290476], [769, 12.819641028845409, 99.85617473371678, 4.0, 40.0, 18.0, 0.7470961521980516], [770, 14.887398175547531, 955.1368509262782, 4.0, 40.0, 18.0, 0.29053086854876525], [771, 0.3049978880800497, 240.7835315879952, 4.0, 40.0, 18.0, 1.0], [772, 17.127897067130593, 128.52248922310935, 4.0, 40.0, 18.0, 0.5179783036052341], [773, 14.531605080634762, 1792.6363312400038, 4.0, 40.0, 18.0, 0.25257580205051355], [774, 18.277613851490095, 323.99960113230503, 4.0, 40.0, 18.0, 0.33223349252333434], [775, 14.093652353671214, 834.2167547538646, 4.0, 40.0, 18.0, 0.3187851943897908], [776, 13.976500995865905, 1971.4792814333991, 4.0, 40.0, 18.0, 0.25760437353957355], [777, 1.952474272573163, 295.44785041285417, 4.0, 40.0, 18.0, 1.0], [778, 0.08806775032605585, 113.73233728362332, 4.0, 40.0, 18.0, 1.0], [779, 2.453415081795997, 910.7242479224352, 4.0, 40.0, 18.0, 0.6545424458355116], [780, 3.6143013216859554, 1413.6379225118915, 4.0, 40.0, 18.0, 0.5408465932926981], [781, 13.999693357590361, 1563.0900153598675, 4.0, 40.0, 18.0, 0.2720721321312241], [782, 1.3264017323236081, 593.4245618091892, 4.0, 40.0, 18.0, 0.7960560666824285], [783, 10.191735427999252, 1600.8013150448471, 4.0, 40.0, 18.0, 0.3513108574021523], [784, 4.051250394046126, 985.8585314218301, 4.0, 40.0, 18.0, 0.5760435104642214], [785, 14.807307671854748, 320.66778786870094, 4.0, 40.0, 18.0, 0.4147264064944534], [786, 4.268574548409049, 743.3937271179888, 4.0, 40.0, 18.0, 0.6147561722735536], [787, 0.8542702954421335, 1638.3696405059818, 4.0, 40.0, 18.0, 0.6260693567962147], [788, 12.390226678120298, 35.71317038290776, 4.0, 40.0, 18.0, 1.0], [789, 1.3639243293316943, 1868.2981482102077, 4.0, 40.0, 18.0, 0.5868582703677531], [790, 2.188831794594288, 1711.7797448804686, 4.0, 40.0, 18.0, 0.5671193765550071], [791, 15.758645055648472, 530.8267105297234, 4.0, 40.0, 18.0, 0.32530622487733385], [792, 13.059184046144315, 546.4123943138928, 4.0, 40.0, 18.0, 0.38516009047684285], [793, 0.020813953740415947, 1879.7985181999034, 4.0, 40.0, 18.0, 0.6407069486576307], [794, 13.976962090956986, 618.0348813826838, 4.0, 40.0, 18.0, 0.34917404485508285], [795, 0.14640206387335253, 1894.464504292166, 4.0, 40.0, 18.0, 0.6343405003196935], [796, 2.5511425850656826, 601.9232883752466, 4.0, 40.0, 18.0, 0.7331806458577551], [797, 12.847672292440175, 1261.598687370912, 4.0, 40.0, 18.0, 0.3115043396222614], [798, 12.344080026363585, 622.9195855768322, 4.0, 40.0, 18.0, 0.38750786086516037], [799, 0.9140585736086912, 1918.1368313365947, 4.0, 40.0, 18.0, 0.6009262777088463], [800, 0.35235189836048986, 1200.7010354775734, 4.0, 40.0, 18.0, 0.6986593717380764], [801, 0.42408892485570027, 775.8273006039032, 4.0, 40.0, 18.0, 0.7799235039007982], [802, 12.149370594834728, 49.7844001872184, 4.0, 40.0, 18.0, 1.0], [803, 2.9452057386165458, 675.5148540411165, 4.0, 40.0, 18.0, 0.6903658838768196], [804, 0.8650275146096837, 490.3551333869768, 4.0, 40.0, 18.0, 0.8678914535667861], [805, 11.264888723035527, 279.3339740753467, 4.0, 40.0, 18.0, 0.5452519060100363], [806, 0.9395446287184084, 268.10969545806984, 4.0, 40.0, 18.0, 1.0], [807, 12.320807236805747, 1300.6221934228442, 4.0, 40.0, 18.0, 0.3204935123878617], [808, 11.70632945413205, 1441.745445288523, 4.0, 40.0, 18.0, 0.32579640997107984], [809, 0.35145916095542207, 585.0453061453562, 4.0, 40.0, 18.0, 0.8485895124139439], [810, 4.445773728023266, 1258.4893180743315, 4.0, 40.0, 18.0, 0.5273167945622266], [811, 13.157817112034227, 1302.4115111415156, 4.0, 40.0, 18.0, 0.3027236223450973], [812, 2.6038670669485415, 575.2770357029218, 4.0, 40.0, 18.0, 0.7406538623606801], [813, 14.494374239199553, 950.3095667625149, 4.0, 40.0, 18.0, 0.29917920893048755], [814, 2.678555621471801, 875.2773613567432, 4.0, 40.0, 18.0, 0.6519207852377271], [815, 5.462519171772081, 1565.7578258848418, 4.0, 40.0, 18.0, 0.470256327574106], [816, 3.6925405136339666, 83.83399528926779, 4.0, 40.0, 18.0, 1.0], [817, 0.2115348826045702, 111.75885341485372, 4.0, 40.0, 18.0, 1.0], [818, 14.402890184387994, 692.8296053618269, 4.0, 40.0, 18.0, 0.32817588075576654], [819, 14.053683308306466, 554.168037466839, 4.0, 40.0, 18.0, 0.3594465509401986], [820, 9.822034405531422, 398.14312654946264, 4.0, 40.0, 18.0, 0.5215505855665795], [821, 10.80218553706231, 566.8990284356657, 4.0, 40.0, 18.0, 0.43702722233564545], [822, 4.226574534404808, 1895.5571571711712, 4.0, 40.0, 18.0, 0.4862284241682766], [823, 15.949911231789805, 328.6047615035785, 4.0, 40.0, 18.0, 0.38379127533171353], [824, 1.7421386127462293, 575.514140642602, 4.0, 40.0, 18.0, 0.7826957935291143], [825, 13.812861891077372, 530.6744036266848, 4.0, 40.0, 18.0, 0.37037799072499694], [826, 15.396540461138336, 1005.8720769068121, 4.0, 40.0, 18.0, 0.27668394182518674], [827, 3.7434074240616857, 1624.9930754111701, 4.0, 40.0, 18.0, 0.519149602832212], [828, 0.07647515233241453, 1077.467134469636, 4.0, 40.0, 18.0, 0.7309306554616624], [829, 16.768563736003642, 1401.9601662982361, 4.0, 40.0, 18.0, 0.2269830652049164], [830, 15.636220365831498, 1472.3493807122168, 4.0, 40.0, 18.0, 0.24437477012394507], [831, 1.4410911226772205, 1016.3433066720726, 4.0, 40.0, 18.0, 0.6794864004396505], [832, 13.429268731173906, 1636.0324925040434, 4.0, 40.0, 18.0, 0.2802160985454364], [833, 1.7001405956665383, 497.6687682538203, 4.0, 40.0, 18.0, 0.8204325318854587], [834, 2.2801805994410946, 1982.1214434171384, 4.0, 40.0, 18.0, 0.5450168923984778], [835, 3.6254909244031404, 851.6421890102177, 4.0, 40.0, 18.0, 0.6168636797014945], [836, 2.4674618747481962, 25.072612129755036, 4.0, 40.0, 18.0, 1.0], [837, 11.309361392384687, 74.83038114989517, 4.0, 40.0, 18.0, 1.0], [838, 2.755439076643698, 517.1786134321785, 4.0, 40.0, 18.0, 0.757905306884278], [839, 2.877880558773736, 513.3027576117254, 4.0, 40.0, 18.0, 0.7536759982981875], [840, 0.12078775050444213, 507.1880300488068, 4.0, 40.0, 18.0, 0.8972672699498528], [841, 1.2444480307310486, 850.347297144776, 4.0, 40.0, 18.0, 0.7220547696064394], [842, 12.346750089330126, 432.5882287162059, 4.0, 40.0, 18.0, 0.4326930156506844], [843, 1.5889424632997833, 1340.7825445181215, 4.0, 40.0, 18.0, 0.6261528541397215], [844, 3.222459948811766, 127.94951038145253, 4.0, 40.0, 18.0, 1.0], [845, 13.84976520946138, 1860.6028498991764, 4.0, 40.0, 18.0, 0.2630875407495844], [846, 10.809450602607129, 697.7146030768455, 4.0, 40.0, 18.0, 0.4122864658711165], [847, 2.6517370985662945, 524.882015156666, 4.0, 40.0, 18.0, 0.7594546559239661], [848, 0.33151114416620175, 148.02595643232797, 4.0, 40.0, 18.0, 1.0], [849, 12.561384381668459, 1707.4417184851957, 4.0, 40.0, 18.0, 0.2952058934400186], [850, 0.2911780698329245, 1984.1116309569888, 4.0, 40.0, 18.0, 0.6216635522127714], [851, 15.034769662755576, 506.00476943493436, 4.0, 40.0, 18.0, 0.3471045303079199], [852, 13.613211001049788, 1621.8526579690747, 4.0, 40.0, 18.0, 0.27712957684978695], [853, 14.40687818096237, 123.64644020573355, 4.0, 40.0, 18.0, 0.6169858411816644], [854, 15.294320483647374, 1024.5299577076414, 4.0, 40.0, 18.0, 0.27731194025122224], [855, 11.771252185889919, 792.1702398240161, 4.0, 40.0, 18.0, 0.3765789395910771], [856, 12.712316898436608, 875.0992355058326, 4.0, 40.0, 18.0, 0.3454188902225476], [857, 1.6596262198930225, 1514.836482990111, 4.0, 40.0, 18.0, 0.6045344845451512], [858, 9.922203260880732, 1763.5137529481794, 4.0, 40.0, 18.0, 0.34982745088830114], [859, 13.988993685467582, 44.92657644876597, 4.0, 40.0, 18.0, 1.0], [860, 11.361187941096702, 949.3578953048755, 4.0, 40.0, 18.0, 0.3697459466883972], [861, 4.7679721273197755, 270.8999790374226, 4.0, 40.0, 18.0, 0.8368747879513082], [862, 13.261838815599832, 511.2917526138542, 4.0, 40.0, 18.0, 0.3883826771684269], [863, 0.3471091839154674, 493.5988467910215, 4.0, 40.0, 18.0, 0.8933765780267001], [864, 0.8367956247061774, 721.331726759914, 4.0, 40.0, 18.0, 0.7757173979973006], [865, 15.587056868126549, 577.3829475502033, 4.0, 40.0, 18.0, 0.3215678688285604], [866, 5.558999515029088, 1219.3662366634514, 4.0, 40.0, 18.0, 0.4952216968075769], [867, 3.501123798345038, 36.39878879439331, 4.0, 40.0, 18.0, 1.0], [868, 12.478975729494595, 552.9496226672502, 4.0, 40.0, 18.0, 0.3983166754960929], [869, 2.191668262788809, 212.63346482514075, 4.0, 40.0, 18.0, 1.0], [870, 0.6651462967474171, 332.0085086621616, 4.0, 40.0, 18.0, 1.0], [871, 0.6520694894744892, 1525.5004930335242, 4.0, 40.0, 18.0, 0.6456586439354453], [872, 3.2854463912199674, 1529.7019305477327, 4.0, 40.0, 18.0, 0.5421830391732159], [873, 4.26479061271794, 1650.8828736240553, 4.0, 40.0, 18.0, 0.5004064865608115], [874, 1.397417393603562, 520.5374198976365, 4.0, 40.0, 18.0, 0.8244495034316459], [875, 17.4020075595963, 1638.5497268464483, 4.0, 40.0, 18.0, 0.2063952702935425], [876, 17.266867430259012, 517.7366361387418, 4.0, 40.0, 18.0, 0.29733518877058607], [877, 4.651958853541073, 1991.7687195777119, 4.0, 40.0, 18.0, 0.4671262272005749], [878, 4.6773427238229655, 1887.7061658877649, 4.0, 40.0, 18.0, 0.47336803999065985], [879, 2.3367373636181195, 26.395310323466973, 4.0, 40.0, 18.0, 1.0], [880, 1.0425516720644656, 1583.9473784885345, 4.0, 40.0, 18.0, 0.6233641218149952], [881, 10.801450726991098, 515.3670963596301, 4.0, 40.0, 18.0, 0.449610086321201], [882, 3.6448084496851765, 1330.078454856822, 4.0, 40.0, 18.0, 0.5476344589935176], [883, 0.8186557151880045, 1751.4627850640395, 4.0, 40.0, 18.0, 0.6177246429711354], [884, 3.8456552890447084, 484.5436617996804, 4.0, 40.0, 18.0, 0.7208386596494314], [885, 14.179261861894211, 45.97764684225157, 4.0, 40.0, 18.0, 1.0], [886, 12.469770353975969, 1396.0546701729043, 4.0, 40.0, 18.0, 0.31188432431254715], [887, 0.9979036808746822, 482.5891193944024, 4.0, 40.0, 18.0, 0.8653929817154723], [888, 3.6543410591156675, 1511.589633483546, 4.0, 40.0, 18.0, 0.5309946902593086], [889, 2.3109790245019717, 1486.5918912322754, 4.0, 40.0, 18.0, 0.5816964312441228], [890, 1.5279860050850842, 1179.8792252646228, 4.0, 40.0, 18.0, 0.6495763551188402], [891, 16.009572858609232, 1527.8415977201078, 4.0, 40.0, 18.0, 0.2349395566333903], [892, 0.625553047297132, 1375.382578491901, 4.0, 40.0, 18.0, 0.6632309139206402], [893, 1.8700549420915475, 1459.3607464749566, 4.0, 40.0, 18.0, 0.6017235731715382], [894, 4.7203670143848075, 1233.955835525839, 4.0, 40.0, 18.0, 0.5206216359174135], [895, 2.3469275787028274, 143.68895877079933, 4.0, 40.0, 18.0, 1.0], [896, 13.293778841935117, 932.4615150655806, 4.0, 40.0, 18.0, 0.3269208744678481], [897, 2.2745822436563925, 1770.8350103208168, 4.0, 40.0, 18.0, 0.5595061389646725], [898, 13.666554850823916, 1580.8125987337846, 4.0, 40.0, 18.0, 0.2779646088948095], [899, 12.817842657558726, 1103.2719645390746, 4.0, 40.0, 18.0, 0.32279005191624505], [900, 12.625966008465072, 1081.9735094386326, 4.0, 40.0, 18.0, 0.32889863628161914], [901, 13.723473591368872, 934.2190141641606, 4.0, 40.0, 18.0, 0.3174030111597273], [902, 2.1392136415686838, 1777.8941529681579, 4.0, 40.0, 18.0, 0.5640434960950943], [903, 13.969768661699815, 456.11253120090186, 4.0, 40.0, 18.0, 0.3855585390092459], [904, 13.492296797506846, 823.6488026751222, 4.0, 40.0, 18.0, 0.33304210963356434], [905, 2.4006379347625755, 1208.4337139265797, 4.0, 40.0, 18.0, 0.6090798255363604], [906, 3.2996878926064643, 1337.1106049529992, 4.0, 40.0, 18.0, 0.5594818899137194], [907, 0.6492383211452042, 729.2503406045738, 4.0, 40.0, 18.0, 0.7824979378024204], [908, 17.346096572563948, 1116.1596925144515, 4.0, 40.0, 18.0, 0.23304827148437396], [909, 11.76715542138179, 231.2640104783427, 4.0, 40.0, 18.0, 0.5685466033812662], [910, 3.460305294910314, 463.3425516518295, 4.0, 40.0, 18.0, 0.7503076720840286], [911, 9.905165123946508, 1623.4591525304995, 4.0, 40.0, 18.0, 0.35674684537427404], [912, 12.519429777919818, 1087.8976955000057, 4.0, 40.0, 18.0, 0.3308106440616603], [913, 12.319860344214563, 1170.7922649261436, 4.0, 40.0, 18.0, 0.32899488387402576], [914, 2.2228893559090825, 392.33817468647754, 4.0, 40.0, 18.0, 0.8602564526557024], [915, 3.8067427291656584, 1404.3857152789735, 4.0, 40.0, 18.0, 0.5349146406180674], [916, 2.845307779535298, 1224.5948480262102, 4.0, 40.0, 18.0, 0.5894087300981499], [917, 1.5978325186990006, 733.7915602697259, 4.0, 40.0, 18.0, 0.7356911118556473], [918, 12.375250922216058, 1094.1405838746987, 4.0, 40.0, 18.0, 0.333446679789058], [919, 12.664249513564506, 1338.942009483894, 4.0, 40.0, 18.0, 0.31096660152908345], [920, 4.730057276230532, 707.6882882087757, 4.0, 40.0, 18.0, 0.6047021685017591], [921, 1.34200905881488, 445.7578236962116, 4.0, 40.0, 18.0, 0.869228677565014], [922, 19.189200552837196, 1271.8341781178929, 4.0, 40.0, 18.0, 0.19257084213389974], [923, 0.20754297998301063, 1975.762382694505, 4.0, 40.0, 18.0, 0.6256652310084061], [924, 1.5847315139362266, 1653.4738687150048, 4.0, 40.0, 18.0, 0.5950294363507569], [925, 0.9029107915704686, 1677.1085267452029, 4.0, 40.0, 18.0, 0.6206617133297019], [926, 1.9910239556107405, 1127.1444321415468, 4.0, 40.0, 18.0, 0.6373936481003256], [927, 1.6339303208191573, 868.9373037304671, 4.0, 40.0, 18.0, 0.6998676675829426], [928, 11.786132320694488, 357.5789828613401, 4.0, 40.0, 18.0, 0.47899034894400444], [929, 2.572490815590662, 572.5060453942956, 4.0, 40.0, 18.0, 0.7432340613970874], [930, 11.324076732318105, 1717.409772456833, 4.0, 40.0, 18.0, 0.32109029940016276], [931, 3.2033824826704946, 1381.766083288084, 4.0, 40.0, 18.0, 0.5584738695440018], [932, 0.29935076673756944, 346.4940759742787, 4.0, 40.0, 18.0, 1.0], [933, 3.3149473633066835, 369.4040760496109, 4.0, 40.0, 18.0, 0.8199639826820321], [934, 14.957461489742554, 392.1766853209864, 4.0, 40.0, 18.0, 0.38210275992583703], [935, 10.252068671560986, 878.8502929934763, 4.0, 40.0, 18.0, 0.4036874990181745], [936, 2.3298973235475184, 192.02823441675403, 4.0, 40.0, 18.0, 1.0], [937, 16.133257692608076, 68.96615475196143, 4.0, 40.0, 18.0, 0.7361223643796446], [938, 15.384049651537264, 674.7480437355557, 4.0, 40.0, 18.0, 0.3092093102129602], [939, 11.811459137224123, 1172.9652532967907, 4.0, 40.0, 18.0, 0.34018306472707666], [940, 0.25360697410958943, 394.65344289624267, 4.0, 40.0, 18.0, 1.0], [941, 1.0153955963041492, 1405.8976942428362, 4.0, 40.0, 18.0, 0.6429231137926095], [942, 15.023097872524138, 1612.1185311349077, 4.0, 40.0, 18.0, 0.24963010031947305], [943, 12.674138874068834, 1529.2419566313745, 4.0, 40.0, 18.0, 0.30084850061216206], [944, 14.802983843618815, 164.936432500173, 4.0, 40.0, 18.0, 0.5380645237956828], [945, 1.7974469089941434, 780.51386038894, 4.0, 40.0, 18.0, 0.7134924720574082], [946, 4.917161606969317, 1062.7610082561107, 4.0, 40.0, 18.0, 0.5336437867991229], [947, 12.4743031269495, 929.6669079224592, 4.0, 40.0, 18.0, 0.34558765008358927], [948, 11.621068659871398, 1118.90290438318, 4.0, 40.0, 18.0, 0.34840901764913146], [949, 16.280946670743294, 410.62405010321527, 4.0, 40.0, 18.0, 0.34469277018725086], [950, 3.0558105663569095, 175.23515739061907, 4.0, 40.0, 18.0, 1.0], [951, 11.340051238252313, 180.51325888810882, 4.0, 40.0, 18.0, 0.6411003699273948], [952, 14.277484175725977, 826.0645908403268, 4.0, 40.0, 18.0, 0.3155862057747023], [953, 1.3169760846316896, 1007.6922592236383, 4.0, 40.0, 18.0, 0.6864551690008719], [954, 15.98081088902735, 1312.1921207392581, 4.0, 40.0, 18.0, 0.24634828994169444], [955, 2.7038090341911643, 1070.423555481202, 4.0, 40.0, 18.0, 0.6162027689566242], [956, 0.13162317514869004, 968.7399315897476, 4.0, 40.0, 18.0, 0.7484223591940241], [957, 0.8895438034073955, 394.7437938171988, 4.0, 40.0, 18.0, 0.9472596588449149], [958, 17.6054051883661, 1164.4526642461376, 4.0, 40.0, 18.0, 0.22543712181505488], [959, 0.9803329696935392, 966.5219069922134, 4.0, 40.0, 18.0, 0.7095950885633576], [960, 12.40450221251569, 40.73367000616375, 4.0, 40.0, 18.0, 1.0], [961, 14.38877907140237, 1945.7004454607559, 4.0, 40.0, 18.0, 0.2506475381119266], [962, 13.839385771164153, 1721.227435947506, 4.0, 40.0, 18.0, 0.2678203713392895], [963, 11.300003819707515, 899.41463455827, 4.0, 40.0, 18.0, 0.3760826184598671], [964, 12.621759878623221, 1509.09256699155, 4.0, 40.0, 18.0, 0.3030571471492336], [965, 4.446005752769037, 1215.6389911853855, 4.0, 40.0, 18.0, 0.531810221189528], [966, 14.12027533741313, 470.96365260444, 4.0, 40.0, 18.0, 0.37783846710475805], [967, 1.2335924052287428, 668.8036679063714, 4.0, 40.0, 18.0, 0.7731438909319109], [968, 13.891702310379722, 1433.5430891509743, 4.0, 40.0, 18.0, 0.28073620473004], [969, 13.07691706050377, 1990.0562638873407, 4.0, 40.0, 18.0, 0.27428850854309267], [970, 0.3741296303842674, 1992.3569398652019, 4.0, 40.0, 18.0, 0.6175748266261021], [971, 1.7737879178494667, 259.0842415595515, 4.0, 40.0, 18.0, 1.0], [972, 11.844469223850624, 685.3142445680993, 4.0, 40.0, 18.0, 0.3895263456632979], [973, 15.994151464447118, 195.3837928601646, 4.0, 40.0, 18.0, 0.46969256643285157], [974, 15.07415742392848, 1188.7953327292678, 4.0, 40.0, 18.0, 0.27083084409540814], [975, 11.51631080098148, 990.392849343815, 4.0, 40.0, 18.0, 0.3621093095794676], [976, 3.7324910274297363, 548.3082993159786, 4.0, 40.0, 18.0, 0.6979905116968695], [977, 10.126931622487524, 1311.0628779016936, 4.0, 40.0, 18.0, 0.3695539493716479], [978, 14.384868256664273, 1801.2232282031146, 4.0, 40.0, 18.0, 0.2550010957197734], [979, 1.5941497499646031, 21.42087368419864, 4.0, 40.0, 18.0, 1.0], [980, 14.311725315503883, 1641.4679331877837, 4.0, 40.0, 18.0, 0.26209851444974125], [981, 11.702490712594281, 1608.85589034457, 4.0, 40.0, 18.0, 0.3178295943147355], [982, 1.6831440624308462, 1937.773598507266, 4.0, 40.0, 18.0, 0.569613400952179], [983, 10.04449510773711, 1337.8640599219123, 4.0, 40.0, 18.0, 0.3697144533402944], [984, 12.265338523875279, 685.4790451609148, 4.0, 40.0, 18.0, 0.3791954762658907], [985, 14.047504992714842, 907.1842125697408, 4.0, 40.0, 18.0, 0.3126033510357102], [986, 0.0812992206315577, 237.74215754091662, 4.0, 40.0, 18.0, 1.0], [987, 16.240518744406902, 383.0087836084393, 4.0, 40.0, 18.0, 0.3549175791697821], [988, 0.06883842704250531, 1404.314806666811, 4.0, 40.0, 18.0, 0.6845397642348533], [989, 13.787439703636503, 497.45265536719654, 4.0, 40.0, 18.0, 0.3790056810984859], [990, 0.27157705024292866, 548.9587537120956, 4.0, 40.0, 18.0, 0.8688478453937651], [991, 15.892046387870389, 284.8597712136106, 4.0, 40.0, 18.0, 0.40718189317192766], [992, 9.848824144688235, 1846.5677455100385, 4.0, 40.0, 18.0, 0.34791635672450527], [993, 2.9839837324850187, 628.7377084119697, 4.0, 40.0, 18.0, 0.7035413612230239], [994, 9.743893770133019, 872.7922343672578, 4.0, 40.0, 18.0, 0.4160575897456665], [995, 3.1725132243825644, 1719.0748664697376, 4.0, 40.0, 18.0, 0.5315448268622537], [996, 14.16220465364361, 1380.1146019027651, 4.0, 40.0, 18.0, 0.27806357499245316], [997, 12.047099646751901, 808.0087418067287, 4.0, 40.0, 18.0, 0.3681542492764767], [998, 0.5304190367814037, 1655.4241578426804, 4.0, 40.0, 18.0, 0.6381701152771689], [999, 14.549367192037938, 1884.0702403816829, 4.0, 40.0, 18.0, 0.24933330781290222], [1000, 10.969079225753841, 51.07680155924689, 4.0, 40.0, 18.0, 1.0], [1001, 14.379505126968896, 1550.2450716680714, 4.0, 40.0, 18.0, 0.2650038234454316], [1002, 12.565066512565895, 208.72732881755675, 4.0, 40.0, 18.0, 0.5628847986029153], [1003, 16.709875990586724, 1443.2650546498917, 4.0, 40.0, 18.0, 0.2261108268887247], [1004, 13.627628051755496, 1244.7532834905965, 4.0, 40.0, 18.0, 0.2964599992301991], [1005, 2.5697766087191978, 280.23877982048634, 4.0, 40.0, 18.0, 1.0], [1006, 2.8154781428106443, 1941.9545128347193, 4.0, 40.0, 18.0, 0.5290188622609497], [1007, 2.6333417921411346, 1076.5628624835515, 4.0, 40.0, 18.0, 0.6181720182582942], [1008, 17.28877762619281, 1874.0882382622772, 4.0, 40.0, 18.0, 0.20020956318710967], [1009, 11.25986242885082, 1152.8672697173233, 4.0, 40.0, 18.0, 0.35436490976918505], [1010, 2.0208419061258196, 1515.6593149763987, 4.0, 40.0, 18.0, 0.5903298092886061], [1011, 13.814286645733162, 983.7863318379204, 4.0, 40.0, 18.0, 0.31074136158799803], [1012, 12.444316410401754, 982.2880542798202, 4.0, 40.0, 18.0, 0.3415192818676255], [1013, 0.24952696616418857, 1036.7053004115012, 4.0, 40.0, 18.0, 0.7301535922653354], [1014, 13.639792484664886, 295.6283233387311, 4.0, 40.0, 18.0, 0.4594129236510011], [1015, 13.996314281774813, 549.8992191379771, 4.0, 40.0, 18.0, 0.3616955234209215], [1016, 15.921205656186562, 1729.2266149159716, 4.0, 40.0, 18.0, 0.22883476115101697], [1017, 14.134947206063751, 393.084954980827, 4.0, 40.0, 18.0, 0.4019043302681516], [1018, 8.66595361238791, 282.97653182782375, 4.0, 40.0, 18.0, 0.6408100008094858], [1019, 14.71138262666598, 60.97269071044567, 4.0, 40.0, 18.0, 1.0], [1020, 2.382268438061506, 917.076536395473, 4.0, 40.0, 18.0, 0.6564118004625531], [1021, 1.3609303603948204, 1001.8006758262095, 4.0, 40.0, 18.0, 0.6855880264411957], [1022, 3.0509720820353223, 1130.0439406595988, 4.0, 40.0, 18.0, 0.593504458976072], [1023, 2.5284957625784505, 1702.6925102059365, 4.0, 40.0, 18.0, 0.5553988908104479], [1024, 15.292318778395282, 164.5565070065693, 4.0, 40.0, 18.0, 0.5231762056992927], [1025, 12.709092524239063, 1337.838808917731, 4.0, 40.0, 18.0, 0.31013542004721883], [1026, 0.8664897269266063, 942.9426248090579, 4.0, 40.0, 18.0, 0.7195565723760619], [1027, 1.5536892902461885, 389.97253600178186, 4.0, 40.0, 18.0, 0.8975768383165542], [1028, 11.839748995092563, 35.17181476816696, 4.0, 40.0, 18.0, 1.0], [1029, 1.4292313804494934, 1740.6119702955277, 4.0, 40.0, 18.0, 0.5939137749467036], [1030, 1.3363135462155014, 1954.668669092828, 4.0, 40.0, 18.0, 0.5816961163661599], [1031, 2.8060572164743784, 1511.4491807486315, 4.0, 40.0, 18.0, 0.5609479158655598], [1032, 1.6806437079890457, 1155.2749326757967, 4.0, 40.0, 18.0, 0.6465713835704293], [1033, 13.284582033277403, 882.454433596463, 4.0, 40.0, 18.0, 0.33188616968173273], [1034, 12.994307863493844, 697.5910800347838, 4.0, 40.0, 18.0, 0.3597134973980741], [1035, 2.885707761520376, 1620.154783493823, 4.0, 40.0, 18.0, 0.5488468743484286], [1036, 1.7882389506939496, 1287.3707719696315, 4.0, 40.0, 18.0, 0.6243143714742815], [1037, 3.080079185483843, 493.3275516580027, 4.0, 40.0, 18.0, 0.7533814280844582], [1038, 2.028637955841206, 945.993717340121, 4.0, 40.0, 18.0, 0.6662129702129556], [1039, 1.905818935924439, 1285.8710428640877, 4.0, 40.0, 18.0, 0.6196402258174992], [1040, 11.599243904922254, 502.6315839756573, 4.0, 40.0, 18.0, 0.43158195615645856], [1041, 13.790439405291108, 103.64426432372672, 4.0, 40.0, 18.0, 0.6904458550013394], [1042, 4.004757399049852, 67.01069464719315, 4.0, 40.0, 18.0, 1.0], [1043, 14.052258983357287, 222.10776228882835, 4.0, 40.0, 18.0, 0.501652903766701], [1044, 3.227388132069464, 1720.8643397860187, 4.0, 40.0, 18.0, 0.529568658548787], [1045, 10.959534497399321, 996.1713102210934, 4.0, 40.0, 18.0, 0.3747722821016364], [1046, 2.60867208302761, 1007.3720736381559, 4.0, 40.0, 18.0, 0.6301983748350348], [1047, 10.34723334800158, 1855.8283279760872, 4.0, 40.0, 18.0, 0.33661924254169556], [1048, 13.663771853028694, 416.4327323283204, 4.0, 40.0, 18.0, 0.40522574751884216], [1049, 2.926733647466812, 789.2318756014898, 4.0, 40.0, 18.0, 0.6604279212323652], [1050, 3.916952249795078, 1446.2509437736092, 4.0, 40.0, 18.0, 0.527588212328101], [1051, 1.5906441069726882, 227.17592299329092, 4.0, 40.0, 18.0, 1.0], [1052, 14.751478209741343, 1848.0800359979974, 4.0, 40.0, 18.0, 0.2466063624687294], [1053, 1.7180325519710022, 1784.2677435651497, 4.0, 40.0, 18.0, 0.5793416188712317], [1054, 1.770786282639183, 621.5628077582616, 4.0, 40.0, 18.0, 0.7636640570909402], [1055, 1.7634453515540762, 375.3213031714331, 4.0, 40.0, 18.0, 0.8985040082586395], [1056, 3.289665948419221, 197.39330994853793, 4.0, 40.0, 18.0, 1.0], [1057, 4.321995443049678, 739.2499295139535, 4.0, 40.0, 18.0, 0.613594208215663], [1058, 15.02796313304617, 684.1786932402887, 4.0, 40.0, 18.0, 0.31558062235784945], [1059, 1.7022514916524445, 132.8195324775657, 4.0, 40.0, 18.0, 1.0], [1060, 11.861196214382316, 1726.5552035226012, 4.0, 40.0, 18.0, 0.3092657884234668], [1061, 18.53219405604401, 1345.7697364706196, 4.0, 40.0, 18.0, 0.19962274578731354], [1062, 2.2018003860809126, 500.1605357192023, 4.0, 40.0, 18.0, 0.7936425960912976], [1063, 3.4308501723109535, 1040.2294758666994, 4.0, 40.0, 18.0, 0.5916129904815498], [1064, 3.131734440587331, 355.50835691379723, 4.0, 40.0, 18.0, 0.8411527242435897], [1065, 2.9134224241220155, 229.23854805670973, 4.0, 40.0, 18.0, 1.0], [1066, 14.182680593516672, 1376.92496836616, 4.0, 40.0, 18.0, 0.27783514085000444], [1067, 1.0786995201198697, 822.2766104186211, 4.0, 40.0, 18.0, 0.7365328861340361], [1068, 2.013553879196522, 1400.2014508042894, 4.0, 40.0, 18.0, 0.602115791885369], [1069, 2.7438534237015824, 129.28535173944977, 4.0, 40.0, 18.0, 1.0], [1070, 1.042107562388861, 524.6764616151083, 4.0, 40.0, 18.0, 0.8407905441604646], [1071, 1.0363398012755658, 1090.763973585378, 4.0, 40.0, 18.0, 0.6848039829600742], [1072, 3.237176891741856, 1216.7050270741079, 4.0, 40.0, 18.0, 0.5752419675428667], [1073, 3.6846385922453555, 1022.0347809340302, 4.0, 40.0, 18.0, 0.5844967780427568], [1074, 16.48237661016666, 230.3628634944973, 4.0, 40.0, 18.0, 0.42753973279031254], [1075, 11.887183915844156, 1532.8909206881513, 4.0, 40.0, 18.0, 0.31745651977986195], [1076, 9.795987619152806, 368.4240752495669, 4.0, 40.0, 18.0, 0.5377963414991482], [1077, 1.5725362031766448, 613.1519482784821, 4.0, 40.0, 18.0, 0.7763502871890664], [1078, 16.14442837863233, 1789.6274344911128, 4.0, 40.0, 18.0, 0.22301517771401855], [1079, 3.010681177382583, 525.0947325429249, 4.0, 40.0, 18.0, 0.7418565423427339], [1080, 2.626050049856609, 87.26077739123517, 4.0, 40.0, 18.0, 1.0], [1081, 3.5461655770160334, 721.0241535448812, 4.0, 40.0, 18.0, 0.6508579971217162], [1082, 0.43942912734418327, 1903.2370431310198, 4.0, 40.0, 18.0, 0.6214608856436676], [1083, 14.201089097245724, 1418.6458888791833, 4.0, 40.0, 18.0, 0.27532909801989186], [1084, 15.697789759021756, 71.49820317665981, 4.0, 40.0, 18.0, 0.7420874614185295], [1085, 3.894179835326802, 788.2217364023721, 4.0, 40.0, 18.0, 0.619585289675786], [1086, 1.9379248240751918, 622.0804084562546, 4.0, 40.0, 18.0, 0.7553662922444764], [1087, 2.9280871415937564, 368.8052447090669, 4.0, 40.0, 18.0, 0.8408900945428005], [1088, 14.192762025559434, 394.2730494780759, 4.0, 40.0, 18.0, 0.4001357721309345], [1089, 12.615397903867029, 1710.6393537745012, 4.0, 40.0, 18.0, 0.29387052050302004], [1090, 13.388229013777496, 1019.0095809250246, 4.0, 40.0, 18.0, 0.31715968686528295], [1091, 13.936394137395718, 494.0804879432962, 4.0, 40.0, 18.0, 0.3761526138501655], [1092, 2.6626983703910425, 1331.096613064419, 4.0, 40.0, 18.0, 0.5840920102088519], [1093, 1.6606063674304958, 1330.8185094517041, 4.0, 40.0, 18.0, 0.6243939242790345], [1094, 11.782712083760497, 1672.6850788957563, 4.0, 40.0, 18.0, 0.3131053486626985], [1095, 15.184319320772518, 1755.46332324165, 4.0, 40.0, 18.0, 0.2414660477031268], [1096, 12.919814450210065, 1136.5904190118838, 4.0, 40.0, 18.0, 0.3183027222015717], [1097, 13.397585541158913, 1420.02894350462, 4.0, 40.0, 18.0, 0.29137754426213663], [1098, 16.039296411196265, 49.81395132343184, 4.0, 40.0, 18.0, 1.0], [1099, 0.03223062208415195, 564.5007444947587, 4.0, 40.0, 18.0, 0.8738943923720012], [1100, 0.3965324674328574, 208.67144682210144, 4.0, 40.0, 18.0, 1.0], [1101, 0.2587454158897944, 492.9092213386541, 4.0, 40.0, 18.0, 0.8978433478561145], [1102, 2.4512282270355894, 969.5733077750301, 4.0, 40.0, 18.0, 0.6435722255483033], [1103, 13.037110622507559, 1978.7926611925182, 4.0, 40.0, 18.0, 0.2752066659479918], [1104, 0.09702801470292499, 1034.568683688643, 4.0, 40.0, 18.0, 0.737549579804514], [1105, 3.7243141512925186, 1252.7413042473172, 4.0, 40.0, 18.0, 0.5531106760686401], [1106, 3.7300823221064467, 1840.6662082327387, 4.0, 40.0, 18.0, 0.5050181829485517], [1107, 0.7390472827273027, 951.1868047048595, 4.0, 40.0, 18.0, 0.7237118480285332], [1108, 14.699203480418248, 790.876583285864, 4.0, 40.0, 18.0, 0.3100833375715713], [1109, 13.744642637595122, 295.211022577771, 4.0, 40.0, 18.0, 0.4565803091908225], [1110, 1.3234921726933306, 1411.1810785263465, 4.0, 40.0, 18.0, 0.6291183294059258], [1111, 10.733819827658278, 1594.8047339872448, 4.0, 40.0, 18.0, 0.3394986221586111], [1112, 15.08396030415058, 1199.8789231506944, 4.0, 40.0, 18.0, 0.2699211489375776], [1113, 14.151848809304024, 1869.221704700846, 4.0, 40.0, 18.0, 0.2572297358093538], [1114, 15.425082152374571, 1928.9469914362653, 4.0, 40.0, 18.0, 0.2320335107672802], [1115, 3.1846824091441563, 56.284039786743236, 4.0, 40.0, 18.0, 1.0], [1116, 15.023770557348968, 89.36796050368335, 4.0, 40.0, 18.0, 0.6852249853431339], [1117, 1.9043719116248894, 810.3152033779872, 4.0, 40.0, 18.0, 0.701132851442026], [1118, 2.476744419555659, 512.9939632842418, 4.0, 40.0, 18.0, 0.7734321965867296], [1119, 5.638224412403431, 947.4189031293905, 4.0, 40.0, 18.0, 0.5248719760731467], [1120, 12.582768429433104, 889.0572248684971, 4.0, 40.0, 18.0, 0.34691433189565624], [1121, 13.209996807812674, 1703.2990868663117, 4.0, 40.0, 18.0, 0.2816933934759444], [1122, 2.571196462356841, 965.5725176028748, 4.0, 40.0, 18.0, 0.6391147897940703], [1123, 15.549839824671203, 860.7425022913329, 4.0, 40.0, 18.0, 0.28533522764891855], [1124, 2.290197107034295, 37.718444738812885, 4.0, 40.0, 18.0, 1.0], [1125, 15.296633491654081, 1631.6983714902794, 4.0, 40.0, 18.0, 0.24385832761706278], [1126, 1.9773314267862319, 824.5442653048218, 4.0, 40.0, 18.0, 0.6943966812207562], [1127, 13.585137721344376, 1533.1725756544176, 4.0, 40.0, 18.0, 0.282034008235188], [1128, 15.724474189120706, 1222.4003815287786, 4.0, 40.0, 18.0, 0.25636618186995946], [1129, 15.054773013057204, 28.788618774487205, 4.0, 40.0, 18.0, 1.0], [1130, 3.935755879936649, 451.2790557553616, 4.0, 40.0, 18.0, 0.7340605450519974], [1131, 12.060374322802431, 1695.8775129845455, 4.0, 40.0, 18.0, 0.3064044663551058], [1132, 0.5699100026349455, 257.01071505759273, 4.0, 40.0, 18.0, 1.0], [1133, 2.337056732561778, 1332.1933599946979, 4.0, 40.0, 18.0, 0.5967274659049205], [1134, 14.929161291553678, 1858.4160649572784, 4.0, 40.0, 18.0, 0.24298573172591478], [1135, 16.37830189256581, 735.8850746398455, 4.0, 40.0, 18.0, 0.28103652066356616], [1136, 3.008408854260301, 732.031875140908, 4.0, 40.0, 18.0, 0.6715223796687633], [1137, 1.9402546562278868, 260.90790268695594, 4.0, 40.0, 18.0, 1.0], [1138, 1.7561927137871818, 1729.9272434246395, 4.0, 40.0, 18.0, 0.5819820807182222], [1139, 3.3182210403211245, 350.9610525818323, 4.0, 40.0, 18.0, 0.8352300172435078], [1140, 12.010770887643613, 1248.6746024116271, 4.0, 40.0, 18.0, 0.3305460939325732], [1141, 10.85941168088137, 71.47059309401556, 4.0, 40.0, 18.0, 1.0], [1142, 10.980415534436846, 166.37596074320402, 4.0, 40.0, 18.0, 0.6761774118115749], [1143, 1.5617875088281348, 675.6474977311742, 4.0, 40.0, 18.0, 0.7552429145001589], [1144, 13.165972382410674, 1899.8163826372215, 4.0, 40.0, 18.0, 0.2748329138010692], [1145, 12.190708636744622, 751.015848145742, 4.0, 40.0, 18.0, 0.3715866481019394], [1146, 1.3287748158002197, 1381.2224180495753, 4.0, 40.0, 18.0, 0.6323922963778769], [1147, 14.349986748903923, 662.8479716854886, 4.0, 40.0, 18.0, 0.33361783156616204], [1148, 0.8117701028446691, 1330.9808890399793, 4.0, 40.0, 18.0, 0.6605914629692309], [1149, 10.694801028864255, 1180.9791185972229, 4.0, 40.0, 18.0, 0.3654725380201102], [1150, 12.522745003700294, 1288.6646026503458, 4.0, 40.0, 18.0, 0.3169286746814777], [1151, 14.125243173427773, 1704.1461229143479, 4.0, 40.0, 18.0, 0.2631058145280849], [1152, 12.640158288260164, 1325.0179824092363, 4.0, 40.0, 18.0, 0.31216247638923234], [1153, 2.996767138831382, 1340.973855832597, 4.0, 40.0, 18.0, 0.5701560739249362], [1154, 1.0103158459050823, 149.82453054600853, 4.0, 40.0, 18.0, 1.0], [1155, 15.45792707994535, 490.6957143049323, 4.0, 40.0, 18.0, 0.34124500808218117], [1156, 12.632808993338807, 60.51819786436858, 4.0, 40.0, 18.0, 1.0], [1157, 9.995708285889641, 1053.1451951133604, 4.0, 40.0, 18.0, 0.3931658299507808], [1158, 2.3665670302411397, 619.0489234318621, 4.0, 40.0, 18.0, 0.7357972231848412], [1159, 1.992289451710359, 542.7269335929699, 4.0, 40.0, 18.0, 0.7841200540845771], [1160, 1.0581205167158352, 1797.386027870636, 4.0, 40.0, 18.0, 0.60410395312093], [1161, 4.506478622211751, 1922.4010309402897, 4.0, 40.0, 18.0, 0.4763789694653317], [1162, 1.230245115157081, 862.4269173835855, 4.0, 40.0, 18.0, 0.7199665502138691], [1163, 12.27136767117742, 1886.5720539480553, 4.0, 40.0, 18.0, 0.2933905468259819], [1164, 0.25313841335976406, 741.2614845795835, 4.0, 40.0, 18.0, 0.7979299627443198], [1165, 1.478970185735261, 1564.1201914549476, 4.0, 40.0, 18.0, 0.6071810359443801], [1166, 16.09605943976552, 919.6453315499255, 4.0, 40.0, 18.0, 0.2693965632365857], [1167, 4.037566215031811, 66.79749823781658, 4.0, 40.0, 18.0, 1.0], [1168, 19.78364305035553, 1938.2673473014622, 4.0, 40.0, 18.0, 0.16089148757518487], [1169, 2.4027005670192043, 41.94031467342457, 4.0, 40.0, 18.0, 1.0], [1170, 0.6820141827833761, 1399.187879991399, 4.0, 40.0, 18.0, 0.639718489507238], [1171, 19.036076524320162, 123.42767966478047, 4.0, 40.0, 18.0, 0.449049620257609], [1172, 3.1399276566344376, 369.9001366174945, 4.0, 40.0, 18.0, 0.7666244215791429], [1173, 3.236058640430873, 283.40322513670935, 4.0, 40.0, 18.0, 0.8203188141646067], [1174, 1.9691454086109306, 961.7632204257682, 4.0, 40.0, 18.0, 0.6449613351258549], [1175, 2.715182514964546, 1150.6740337312187, 4.0, 40.0, 18.0, 0.5920930222828198], [1176, 27.21311553963702, 1756.2584035565546, 4.0, 40.0, 18.0, 0.0903216643796868], [1177, 16.332278679348473, 1929.1325653020458, 4.0, 40.0, 18.0, 0.21548233630350994], [1178, 18.851358241448246, 164.25249141088568, 4.0, 40.0, 18.0, 0.40659466958377105], [1179, 1.2645877257120057, 128.72759562109485, 4.0, 40.0, 18.0, 1.0], [1180, 2.174052221206166, 1871.2330478166857, 4.0, 40.0, 18.0, 0.5500519158934457], [1181, 0.369264383329146, 79.06962927973483, 4.0, 40.0, 18.0, 1.0], [1182, 5.015590388923191, 1829.5542832770025, 4.0, 40.0, 18.0, 0.46517364188444893], [1183, 7.9641432612560354, 1659.8180695861129, 4.0, 40.0, 18.0, 0.4003670474723286], [1184, 6.3462868073769085, 1397.883291575914, 4.0, 40.0, 18.0, 0.45477617599031894], [1185, 7.355755954655896, 1807.5327951701765, 4.0, 40.0, 18.0, 0.4074628472567552], [1186, 8.64442960873242, 1303.9973933918432, 4.0, 40.0, 18.0, 0.40467635360271276], [1187, 8.269376490066012, 1475.3787626067367, 4.0, 40.0, 18.0, 0.4029830158676198], [1188, 5.295471215691508, 961.476795656042, 4.0, 40.0, 18.0, 0.5283487182528002], [1189, 7.186759498414793, 798.0421632859379, 4.0, 40.0, 18.0, 0.49340469296409467], [1190, 1.8033674141831544, 1555.069034300783, 4.0, 40.0, 18.0, 0.5852046358304841], [1191, 5.178531140767847, 1602.1361815824275, 4.0, 40.0, 18.0, 0.4737608456775778], [1192, 17.499316946265807, 620.0053180513956, 4.0, 40.0, 18.0, 0.2756750300803742], [1193, 1.9355781932515268, 381.00787564845757, 4.0, 40.0, 18.0, 0.8101138400526966], [1194, 7.336185562775143, 423.59185827148286, 4.0, 40.0, 18.0, 0.5803122245928203], [1195, 18.33887179153779, 1359.1806943668778, 4.0, 40.0, 18.0, 0.2023491071916834], [1196, 7.130302768984083, 1340.6693328992794, 4.0, 40.0, 18.0, 0.43880829257870607], [1197, 13.414444529516082, 128.79726388648123, 4.0, 40.0, 18.0, 0.6071991537058363], [1198, 8.267772881612357, 294.9600472733411, 4.0, 40.0, 18.0, 0.6126383197909909], [1199, 7.337264528588566, 1634.5111683840066, 4.0, 40.0, 18.0, 0.4158109062971177], [1200, 6.9139864083816285, 141.39951824680566, 4.0, 40.0, 18.0, 0.8421334519140697], [1201, 8.175265131631265, 103.74759390533359, 4.0, 40.0, 18.0, 1.0], [1202, 6.315423840438488, 105.9531581358207, 4.0, 40.0, 18.0, 1.0], [1203, 7.977137727503396, 398.44340273995226, 4.0, 40.0, 18.0, 0.5686245839943815], [1204, 8.332407119903376, 1121.741332798518, 4.0, 40.0, 18.0, 0.42516968726602145], [1205, 6.569668765469224, 1981.7682164139594, 4.0, 40.0, 18.0, 0.4182336957011782], [1206, 6.92982025249804, 836.6303225546189, 4.0, 40.0, 18.0, 0.4952506114546419], [1207, 4.724765881516024, 66.19126710533727, 4.0, 40.0, 18.0, 1.0], [1208, 5.496455450181003, 1020.5655269761479, 4.0, 40.0, 18.0, 0.5144030695496474], [1209, 12.23978705101208, 124.71125716357164, 4.0, 40.0, 18.0, 0.6573088266938368], [1210, 16.712612730021203, 926.4867825338386, 4.0, 40.0, 18.0, 0.2572257334400859], [1211, 20.454886096365193, 186.34483810853808, 4.0, 40.0, 18.0, 0.35263237159788147], [1212, 9.077300632255346, 1543.3696335480215, 4.0, 40.0, 18.0, 0.37983852792403305], [1213, 10.002309398630995, 167.45928656638495, 4.0, 40.0, 18.0, 0.6693986801311453], [1214, 5.041531638213447, 539.451421243852, 4.0, 40.0, 18.0, 0.6216735892997367], [1215, 17.295354635393192, 1258.190830900827, 4.0, 40.0, 18.0, 0.22503256583007158], [1216, 7.529610314398463, 81.18094844993291, 4.0, 40.0, 18.0, 1.0], [1217, 8.716881519353677, 522.0598522107705, 4.0, 40.0, 18.0, 0.5034524537641513], [1218, 23.207488562506025, 1183.9987562749156, 4.0, 40.0, 18.0, 0.14273251949545332], [1219, 15.53041038553193, 1653.9244284660335, 4.0, 40.0, 18.0, 0.23876833803890174], [1220, 7.581987642793694, 126.5490051137025, 4.0, 40.0, 18.0, 0.8471405292924625], [1221, 18.100579927708925, 78.56611237387193, 4.0, 40.0, 18.0, 0.5723543398356996], [1222, 10.032954048928659, 1583.0188619122825, 4.0, 40.0, 18.0, 0.3552885933537411], [1223, 3.928618864946684, 1172.098169381809, 4.0, 40.0, 18.0, 0.5476817583031351], [1224, 8.878072541422867, 1052.1277030255112, 4.0, 40.0, 18.0, 0.41794991221434724], [1225, 7.538354231026863, 1150.5325398662783, 4.0, 40.0, 18.0, 0.4430023174977232], [1226, 3.86430927339438, 159.5061944069398, 4.0, 40.0, 18.0, 1.0], [1227, 6.271635001441852, 714.0497554992917, 4.0, 40.0, 18.0, 0.5362393576330298], [1228, 5.284787910798242, 934.5347648863686, 4.0, 40.0, 18.0, 0.5323052224051645], [1229, 9.357768871113777, 983.1155550614441, 4.0, 40.0, 18.0, 0.4128187349829811], [1230, 1.9260645051823582, 1352.833337687172, 4.0, 40.0, 18.0, 0.5987624743799103], [1231, 2.964067273683936, 559.9733735227916, 4.0, 40.0, 18.0, 0.6943021026302088], [1232, 8.987943269243626, 87.84460272548935, 4.0, 40.0, 18.0, 1.0], [1233, 8.892778007256128, 1879.2127971726704, 4.0, 40.0, 18.0, 0.3681950952851235], [1234, 7.776003883481518, 1269.5906241773212, 4.0, 40.0, 18.0, 0.42744039552339286], [1235, 2.801001159610994, 327.06479995276214, 4.0, 40.0, 18.0, 0.806736806048399], [1236, 5.7188470581733775, 711.1044171570837, 4.0, 40.0, 18.0, 0.5551491587240777], [1237, 7.898725291077167, 917.4608916170924, 4.0, 40.0, 18.0, 0.45666826071995703], [1238, 10.606889008599666, 160.0841741182571, 4.0, 40.0, 18.0, 0.6572256724301155], [1239, 7.432590392373277, 187.00049815458232, 4.0, 40.0, 18.0, 0.7443421818814376], [1240, 13.774078721690827, 1266.0431727037835, 4.0, 40.0, 18.0, 0.2913974431720982], [1241, 6.282444921870322, 38.019598628753926, 4.0, 40.0, 18.0, 1.0], [1242, 21.07774803192213, 554.8461810902066, 4.0, 40.0, 18.0, 0.2219080730434804], [1243, 6.886359730237494, 127.40059719552774, 4.0, 40.0, 18.0, 0.8964373692149963], [1244, 13.42167230468698, 801.5509709291521, 4.0, 40.0, 18.0, 0.3373220817679418], [1245, 23.092273369478555, 50.99786504059964, 4.0, 40.0, 18.0, 0.5337052832409289], [1246, 4.548593126766056, 1453.1644361071328, 4.0, 40.0, 18.0, 0.5025293218844562], [1247, 2.3629948907309637, 922.9084452025695, 4.0, 40.0, 18.0, 0.6362891669369165], [1248, 6.0197603572237455, 156.37500238824265, 4.0, 40.0, 18.0, 0.852839393860757], [1249, 12.755982625323353, 56.05131895169758, 4.0, 40.0, 18.0, 1.0], [1250, 7.6034813542348285, 62.955048796187185, 4.0, 40.0, 18.0, 1.0], [1251, 17.919673159104054, 699.2242419395886, 4.0, 40.0, 18.0, 0.25715009688587154], [1252, 6.555400094542579, 415.27269071849696, 4.0, 40.0, 18.0, 0.6113975663260102], [1253, 0.4683803992454405, 766.3019646419847, 4.0, 40.0, 18.0, 0.7391491333636743], [1254, 24.065839967024313, 56.13669337316097, 4.0, 40.0, 18.0, 0.48409256999326084], [1255, 6.188304545031862, 87.83946702881296, 4.0, 40.0, 18.0, 1.0], [1256, 5.6545761135112595, 409.4800516606527, 4.0, 40.0, 18.0, 0.64720109692411], [1257, 8.875924397475725, 1885.198280422778, 4.0, 40.0, 18.0, 0.36834073945473816], [1258, 5.9812605938000765, 1627.8956165996356, 4.0, 40.0, 18.0, 0.45019048409168305], [1259, 7.563385596100861, 1668.4160934378785, 4.0, 40.0, 18.0, 0.40893350760687874], [1260, 6.767522146498627, 1892.654600288721, 4.0, 40.0, 18.0, 0.41727179473952486], [1261, 9.003768343888353, 924.3897737315632, 4.0, 40.0, 18.0, 0.4275660496560671], [1262, 10.11415998228599, 92.72362753509303, 4.0, 40.0, 18.0, 0.8420548802855079], [1263, 3.1676425521769103, 24.25115770596276, 4.0, 40.0, 18.0, 1.0], [1264, 8.60709620270132, 176.3005333520283, 4.0, 40.0, 18.0, 0.7113186351743177], [1265, 8.574862833918065, 1867.2280891759276, 4.0, 40.0, 18.0, 0.37604187015770435], [1266, 22.568124982996913, 447.49349422921375, 4.0, 40.0, 18.0, 0.21648344682626164], [1267, 7.442154491451388, 1048.0093148569185, 4.0, 40.0, 18.0, 0.45473073539560094], [1268, 6.380765099104327, 809.1409989911083, 4.0, 40.0, 18.0, 0.5161337468286761], [1269, 6.2703930117405635, 100.32388964712213, 4.0, 40.0, 18.0, 1.0], [1270, 10.891774882555897, 1079.3078057471048, 4.0, 40.0, 18.0, 0.36813708960246255], [1271, 8.95396269186028, 1627.7744256533495, 4.0, 40.0, 18.0, 0.3783056440799314], [1272, 18.44610600865544, 1757.4927772048977, 4.0, 40.0, 18.0, 0.18551712146322075], [1273, 4.6786716661006835, 547.769122013201, 4.0, 40.0, 18.0, 0.632447593403183], [1274, 2.1251800097959395, 892.7901026143173, 4.0, 40.0, 18.0, 0.6502236632149607], [1275, 8.741687911881119, 145.72296449208903, 4.0, 40.0, 18.0, 0.7542550861397427], [1276, 6.0317976118249925, 1525.2067264516947, 4.0, 40.0, 18.0, 0.4547038137911362], [1277, 4.8260706205210875, 996.9702392265958, 4.0, 40.0, 18.0, 0.5388373173621561], [1278, 8.978237387083567, 1320.1616961970205, 4.0, 40.0, 18.0, 0.3958794698591554], [1279, 6.6378293436968, 1161.2106690573914, 4.0, 40.0, 18.0, 0.4660687658328459], [1280, 8.458132265868903, 158.34178015920972, 4.0, 40.0, 18.0, 0.744202484161588], [1281, 6.512084835891488, 1365.227444970359, 4.0, 40.0, 18.0, 0.45284650512938746], [1282, 18.946150935904775, 836.8908630333353, 4.0, 40.0, 18.0, 0.22517478630818474], [1283, 4.197391059439028, 157.4806314253832, 4.0, 40.0, 18.0, 1.0], [1284, 15.704640944044323, 922.7576841594541, 4.0, 40.0, 18.0, 0.27692127673445993], [1285, 8.993572378756085, 1523.1643034307276, 4.0, 40.0, 18.0, 0.38288870048956913], [1286, 9.551762373963772, 137.37838587082774, 4.0, 40.0, 18.0, 0.7367472147108782], [1287, 6.836120334993654, 108.57200969134549, 4.0, 40.0, 18.0, 1.0], [1288, 18.623187829706257, 129.6522516712035, 4.0, 40.0, 18.0, 0.4505472862340692], [1289, 7.711400972402812, 177.20270732729372, 4.0, 40.0, 18.0, 0.7462122889683686], [1290, 26.761811418085376, 118.05657923867592, 4.0, 40.0, 18.0, 0.29554905956925015], [1291, 7.662669097791084, 56.90296588565029, 4.0, 40.0, 18.0, 1.0], [1292, 0.10870203577803483, 1741.7558595401888, 4.0, 40.0, 18.0, 0.6317376280654958], [1293, 1.9473160729516499, 145.14083380839472, 4.0, 40.0, 18.0, 1.0], [1294, 6.737575277744637, 608.2291946195155, 4.0, 40.0, 18.0, 0.5438704744699925], [1295, 19.682639589441887, 198.74232015310795, 4.0, 40.0, 18.0, 0.36004526049807617], [1296, 2.473123880916147, 1716.4999207626365, 4.0, 40.0, 18.0, 0.5500478185539265], [1297, 5.193408794676076, 1418.6348634687708, 4.0, 40.0, 18.0, 0.4859055759995014], [1298, 7.183575667393996, 417.07295184014845, 4.0, 40.0, 18.0, 0.5883269378654972], [1299, 21.150832560894557, 558.1605411763966, 4.0, 40.0, 18.0, 0.22015338400895257], [1300, 6.807668500434356, 22.774253873927528, 4.0, 40.0, 18.0, 1.0], [1301, 14.825811837772338, 1815.779515961615, 4.0, 40.0, 18.0, 0.24641851346536917], [1302, 19.61257903220836, 174.76549651402712, 4.0, 40.0, 18.0, 0.38045717360338593], [1303, 9.44597038411877, 1460.8613774570842, 4.0, 40.0, 18.0, 0.375590569182475], [1304, 8.363350677255537, 108.24812024854873, 4.0, 40.0, 18.0, 1.0], [1305, 11.627619932330308, 462.2438699082355, 4.0, 40.0, 18.0, 0.4386236246487028], [1306, 4.25375068601658, 44.737240506673565, 4.0, 40.0, 18.0, 1.0], [1307, 15.323723976306244, 193.13690043569127, 4.0, 40.0, 18.0, 0.4672625452083837], [1308, 7.67840565342198, 1221.622652501602, 4.0, 40.0, 18.0, 0.4335147827423309], [1309, 10.161472753743686, 637.9307505211873, 4.0, 40.0, 18.0, 0.4380512233028316], [1310, 5.352103221972685, 769.5004046201453, 4.0, 40.0, 18.0, 0.5562587887761353], [1311, 5.94396778460326, 734.3183615782542, 4.0, 40.0, 18.0, 0.5431304519220624], [1312, 4.329894397767422, 198.44607248145576, 4.0, 40.0, 18.0, 0.8611783341871606], [1313, 7.793884550435181, 195.9457843165826, 4.0, 40.0, 18.0, 0.7184431228756457], [1314, 1.6458396071403951, 1543.8211216833088, 4.0, 40.0, 18.0, 0.5916827782308387], [1315, 5.2397810323720115, 773.2872602366556, 4.0, 40.0, 18.0, 0.5592971078305247], [1316, 5.071334677576081, 1264.1841390372872, 4.0, 40.0, 18.0, 0.5022489125175499], [1317, 4.915897698343463, 190.8311847495432, 4.0, 40.0, 18.0, 0.8452452350984642], [1318, 18.131443878584584, 1355.144172984478, 4.0, 40.0, 18.0, 0.20603050675229675], [1319, 5.045425961883796, 1263.3857271786644, 4.0, 40.0, 18.0, 0.5031427613586562], [1320, 25.69668842680312, 25.471367110249652, 4.0, 40.0, 18.0, 1.0], [1321, 13.64121552667359, 282.1824957504032, 4.0, 40.0, 18.0, 0.4526475419564154], [1322, 7.894733259432112, 1686.619810566747, 4.0, 40.0, 18.0, 0.4006909265458889], [1323, 3.0672278084490934, 65.44236325424095, 4.0, 40.0, 18.0, 1.0], [1324, 8.26341516439206, 1780.6937209454986, 4.0, 40.0, 18.0, 0.3871309731675182], [1325, 12.025004855539096, 1169.499783664861, 4.0, 40.0, 18.0, 0.3348465579061486], [1326, 4.47725367246014, 61.237256127318005, 4.0, 40.0, 18.0, 1.0], [1327, 7.246862918086097, 573.9746077988173, 4.0, 40.0, 18.0, 0.5356513270883977], [1328, 18.144359786541134, 1899.8874592795703, 4.0, 40.0, 18.0, 0.18598501949557647], [1329, 6.013099458010077, 1658.5314414190225, 4.0, 40.0, 18.0, 0.44767210184258555], [1330, 0.8940771986066971, 1582.2689871819275, 4.0, 40.0, 18.0, 0.6156049967055883], [1331, 4.907385422705094, 729.179555570033, 4.0, 40.0, 18.0, 0.5791881555367441], [1332, 16.99250656770565, 174.90756645855836, 4.0, 40.0, 18.0, 0.44054102068717704], [1333, 5.068817012991641, 127.23313653932355, 4.0, 40.0, 18.0, 1.0], [1334, 25.195788921777797, 199.22631283422874, 4.0, 40.0, 18.0, 0.25507330373329373], [1335, 21.544753706441554, 555.1440084697524, 4.0, 40.0, 18.0, 0.21454677378346265], [1336, 9.916165122980521, 1541.0613415556109, 4.0, 40.0, 18.0, 0.35997462896430976], [1337, 6.526742349405956, 1807.7032492032918, 4.0, 40.0, 18.0, 0.42692317849711736], [1338, 8.563157796132174, 1546.4066817804764, 4.0, 40.0, 18.0, 0.392069891200638], [1339, 0.13033089636797346, 192.62951101195787, 4.0, 40.0, 18.0, 1.0], [1340, 6.99974565403118, 700.5548064422012, 4.0, 40.0, 18.0, 0.5157984088004443], [1341, 7.930998747820943, 1924.7987871999992, 4.0, 40.0, 18.0, 0.388928666683442], [1342, 6.50218765952285, 103.3118977811145, 4.0, 40.0, 18.0, 1.0], [1343, 6.719471840101348, 524.3954111611961, 4.0, 40.0, 18.0, 0.566853611351499], [1344, 9.751484178237913, 1675.8590959706237, 4.0, 40.0, 18.0, 0.3572178883988987], [1345, 0.10556604256910118, 1988.3050719028138, 4.0, 40.0, 18.0, 0.6150523684283697], [1346, 7.465932691532797, 183.1350873155065, 4.0, 40.0, 18.0, 0.7480246811260532], [1347, 7.265061941242257, 1125.8668851620005, 4.0, 40.0, 18.0, 0.45221003013275557], [1348, 6.122643015680119, 505.9987529538203, 4.0, 40.0, 18.0, 0.5931976591557482], [1349, 14.052223142753279, 1646.8249220778218, 4.0, 40.0, 18.0, 0.26745803017459796], [1350, 2.285564024387261, 1736.8013731136873, 4.0, 40.0, 18.0, 0.555062623731482], [1351, 6.739586680709102, 1344.7777862749588, 4.0, 40.0, 18.0, 0.4484823896795065], [1352, 7.164191395642688, 1117.1305572109643, 4.0, 40.0, 18.0, 0.4554428400556505], [1353, 14.849823545415909, 60.51695642447782, 4.0, 40.0, 18.0, 0.7833688915537796], [1354, 1.0194288663691398, 643.6275149273292, 4.0, 40.0, 18.0, 0.7469355692128384], [1355, 4.224066021987014, 1850.6837171253924, 4.0, 40.0, 18.0, 0.4866177807894939], [1356, 5.28716431248343, 1198.5718831747179, 4.0, 40.0, 18.0, 0.501774866899628], [1357, 8.340162678424349, 21.32494338213747, 4.0, 40.0, 18.0, 1.0], [1358, 9.719696774086742, 1841.7448205686303, 4.0, 40.0, 18.0, 0.35093579334605834], [1359, 16.814467905302354, 436.53341151347615, 4.0, 40.0, 18.0, 0.3242747967897373], [1360, 7.136016058138387, 1805.174997613296, 4.0, 40.0, 18.0, 0.41233348649200013], [1361, 25.680360170614822, 1916.2654263793163, 4.0, 40.0, 18.0, 0.09868157245353541], [1362, 23.099160932893476, 53.988658358854174, 4.0, 40.0, 18.0, 0.5190200093701248], [1363, 8.287422700055004, 984.1446901483798, 4.0, 40.0, 18.0, 0.43950859004598525], [1364, 12.058783954039447, 133.8844636343489, 4.0, 40.0, 18.0, 0.6463895822376701], [1365, 9.570013135856426, 661.2203894418175, 4.0, 40.0, 18.0, 0.44918698955353176], [1366, 17.539737847664163, 1569.7905336636911, 4.0, 40.0, 18.0, 0.20686584129155425], [1367, 16.375565215042805, 57.32755223864203, 4.0, 40.0, 18.0, 0.7305089329251796], [1368, 20.460560887932456, 855.8125840564995, 4.0, 40.0, 18.0, 0.1991373075193943], [1369, 4.55623263295522, 1282.14661520889, 4.0, 40.0, 18.0, 0.5165644340953469], [1370, 7.150778970106229, 881.760361028266, 4.0, 40.0, 18.0, 0.482389076448841], [1371, 6.420536299167115, 128.6662795596822, 4.0, 40.0, 18.0, 1.0], [1372, 9.09740701735269, 1965.287890119186, 4.0, 40.0, 18.0, 0.36004562132144996], [1373, 10.218863171498334, 505.9085800974442, 4.0, 40.0, 18.0, 0.464425772046037], [1374, 14.65441945974307, 252.30961098180072, 4.0, 40.0, 18.0, 0.44288392155964335], [1375, 16.795703707285128, 226.3918446941288, 4.0, 40.0, 18.0, 0.4068243099239856], [1376, 5.823036947728013, 849.3043870869254, 4.0, 40.0, 18.0, 0.5274785131092828], [1377, 24.154257832958088, 387.69631372576947, 4.0, 40.0, 18.0, 0.2053197251258709], [1378, 9.31706074414023, 123.89462226580088, 4.0, 40.0, 18.0, 0.7748116244169757], [1379, 8.065688030040926, 653.1942686991022, 4.0, 40.0, 18.0, 0.4927263221995867], [1380, 6.668595913838589, 186.8616931993778, 4.0, 40.0, 18.0, 0.7760405056050108], [1381, 3.533939419568749, 1290.0786579273663, 4.0, 40.0, 18.0, 0.5489046860897507], [1382, 1.7876579280540317, 646.4495812386665, 4.0, 40.0, 18.0, 0.7155867567216679], [1383, 14.54602461532443, 104.6725867910299, 4.0, 40.0, 18.0, 0.6183518069423478], [1384, 9.720111922566346, 1204.4440584728006, 4.0, 40.0, 18.0, 0.3858636006926133], [1385, 4.2663777942188705, 1708.2784897037623, 4.0, 40.0, 18.0, 0.493654176496372], [1386, 5.912671009190981, 125.81832702215486, 4.0, 40.0, 18.0, 1.0], [1387, 2.864431626963559, 88.6105652125078, 4.0, 40.0, 18.0, 1.0], [1388, 8.828281335803887, 1660.0172449378802, 4.0, 40.0, 18.0, 0.3796882638638138], [1389, 16.976929032103577, 38.75175542146202, 4.0, 40.0, 18.0, 1.0], [1390, 6.173275727101157, 21.81769529618452, 4.0, 40.0, 18.0, 1.0], [1391, 6.394661678869747, 1060.9792765034206, 4.0, 40.0, 18.0, 0.4829041861212237], [1392, 8.091634846899952, 452.38127858847275, 4.0, 40.0, 18.0, 0.5442641075440612], [1393, 16.905222691580484, 1451.3963081595855, 4.0, 40.0, 18.0, 0.22252343435978125], [1394, 5.147401899172413, 481.46956022354743, 4.0, 40.0, 18.0, 0.6371341839088344], [1395, 6.327297979245694, 127.24141945635041, 4.0, 40.0, 18.0, 1.0], [1396, 7.632596554337456, 921.2564358208746, 4.0, 40.0, 18.0, 0.4637740331925716], [1397, 25.825657942170633, 205.80766295754094, 4.0, 40.0, 18.0, 0.24137489469123433], [1398, 6.0584440628799365, 500.5116098297718, 4.0, 40.0, 18.0, 0.5973426126154691], [1399, 7.9131059479424115, 1597.757707464418, 4.0, 40.0, 18.0, 0.4046379655980164], [1400, 7.286783127722787, 127.77346965847144, 4.0, 40.0, 18.0, 0.8609159256657956], [1401, 3.113734526985728, 642.4370082728271, 4.0, 40.0, 18.0, 0.6647394288477444], [1402, 5.566438287554614, 861.2438352864752, 4.0, 40.0, 18.0, 0.5338140512797686], [1403, 4.090170831269408, 1607.9498981143893, 4.0, 40.0, 18.0, 0.5054999709022908], [1404, 5.519173513705386, 356.23734320744296, 4.0, 40.0, 18.0, 0.6788731723037061], [1405, 2.7230058978339438, 1976.9569970489736, 4.0, 40.0, 18.0, 0.5258695281514262], [1406, 4.539128976956881, 1586.7407604409423, 4.0, 40.0, 18.0, 0.49329369871567663], [1407, 8.763996818005866, 1822.4813705337447, 4.0, 40.0, 18.0, 0.37333197806542145], [1408, 9.172136762076102, 1254.3759358401282, 4.0, 40.0, 18.0, 0.39577254459307415], [1409, 22.55971215400875, 317.4500568026399, 4.0, 40.0, 18.0, 0.24816005365660937], [1410, 0.9795654593731009, 1976.1116351746173, 4.0, 40.0, 18.0, 0.5847560014854973], [1411, 8.508525530073715, 122.17045542643952, 4.0, 40.0, 18.0, 0.8146383914943712], [1412, 7.7627152000885555, 571.6975814005536, 4.0, 40.0, 18.0, 0.5199969511981349], [1413, 4.885724552599617, 747.6209323274771, 4.0, 40.0, 18.0, 0.5763116576610152], [1414, 7.482250265907595, 659.9314748865169, 4.0, 40.0, 18.0, 0.5089215291255028], [1415, 16.63449458091894, 149.8296927390698, 4.0, 40.0, 18.0, 0.4762899092703998], [1416, 8.199745593319292, 255.7337656073644, 4.0, 40.0, 18.0, 0.6435593469955595], [1417, 10.396870821616815, 414.165999681726, 4.0, 40.0, 18.0, 0.4866278132533303], [1418, 3.8197082321208518, 326.2534529390826, 4.0, 40.0, 18.0, 0.7650955605387477], [1419, 5.107839148855279, 206.46602475137428, 4.0, 40.0, 18.0, 0.8168528929261701], [1420, 7.136623751745773, 838.3287723587431, 4.0, 40.0, 18.0, 0.4887489743942604], [1421, 11.018485421148723, 1071.1619237716827, 4.0, 40.0, 18.0, 0.3658433545926178], [1422, 7.411689938374195, 174.4799242029652, 4.0, 40.0, 18.0, 0.7625153719057864], [1423, 5.93757187065406, 179.9555009896015, 4.0, 40.0, 18.0, 0.8166183943957896], [1424, 7.147622801389404, 1309.328395659426, 4.0, 40.0, 18.0, 0.44059117539631326], [1425, 7.748731237222881, 630.7807328388087, 4.0, 40.0, 18.0, 0.5068765977587307], [1426, 4.054742971073745, 1242.8736359334082, 4.0, 40.0, 18.0, 0.5362693768841398], [1427, 16.48206583064995, 756.240680967567, 4.0, 40.0, 18.0, 0.2779619242027664], [1428, 9.319666056394937, 118.7103763672889, 4.0, 40.0, 18.0, 0.7873644770570118], [1429, 9.40316753148614, 1522.841419946728, 4.0, 40.0, 18.0, 0.3729787097983885], [1430, 21.0307354674067, 220.1224659093743, 4.0, 40.0, 18.0, 0.3182457651493458], [1431, 3.5075843326134737, 136.01829096185543, 4.0, 40.0, 18.0, 1.0], [1432, 0.9210005823611191, 184.1876577957321, 4.0, 40.0, 18.0, 1.0], [1433, 9.903996728830823, 1459.2835815847238, 4.0, 40.0, 18.0, 0.36498620823531897], [1434, 19.347748426320095, 57.2984845756346, 4.0, 40.0, 18.0, 0.6172382959089595], [1435, 8.35257194781639, 191.7104433653863, 4.0, 40.0, 18.0, 0.7013766311999595], [1436, 3.6159963555167547, 912.5518876786311, 4.0, 40.0, 18.0, 0.5921725245655639], [1437, 10.585722986769962, 534.282518908241, 4.0, 40.0, 18.0, 0.447736256462454], [1438, 2.360392659617932, 1389.0127337179586, 4.0, 40.0, 18.0, 0.5798697076504878], [1439, 6.423309054741375, 1845.9969610469705, 4.0, 40.0, 18.0, 0.42773102917721917], [1440, 4.9310184094062235, 189.8114709679628, 4.0, 40.0, 18.0, 0.8459776501651886], [1441, 6.756472862851727, 609.2040494682731, 4.0, 40.0, 18.0, 0.5430375240419054], [1442, 22.14181910290872, 1369.791741661247, 4.0, 40.0, 18.0, 0.14771846747461068], [1443, 7.485527111046827, 1682.4025833949743, 4.0, 40.0, 18.0, 0.40995008233885827], [1444, 7.380650671748898, 79.20259967729757, 4.0, 40.0, 18.0, 1.0], [1445, 20.54353796109101, 166.58739838706174, 4.0, 40.0, 18.0, 0.36704344083301926], [1446, 5.673755828504657, 1085.7861657909748, 4.0, 40.0, 18.0, 0.5015361614639686], [1447, 9.984443326715368, 1808.5984835855545, 4.0, 40.0, 18.0, 0.34632257908593983], [1448, 13.219226349241204, 161.59723985510686, 4.0, 40.0, 18.0, 0.5634615415650952], [1449, 7.021111811196257, 111.67981667752437, 4.0, 40.0, 18.0, 1.0], [1450, 8.540184283794405, 1018.9235970172816, 4.0, 40.0, 18.0, 0.42950546783587545], [1451, 5.459702810904283, 1340.8923404361194, 4.0, 40.0, 18.0, 0.484204256924139], [1452, 14.918864534241061, 168.81417198867373, 4.0, 40.0, 18.0, 0.5024032368379349], [1453, 11.725688011767506, 1591.5073324979096, 4.0, 40.0, 18.0, 0.3178516084122688], [1454, 7.13849244803573, 137.65056449485706, 4.0, 40.0, 18.0, 0.8402491581825245], [1455, 5.007171095240031, 80.09991727899396, 4.0, 40.0, 18.0, 1.0], [1456, 5.264111943684236, 448.8296683598004, 4.0, 40.0, 18.0, 0.6452261103901248], [1457, 6.531901181647383, 115.59103480830353, 4.0, 40.0, 18.0, 1.0], [1458, 4.915021301326687, 1903.0619053882313, 4.0, 40.0, 18.0, 0.4642063693207182], [1459, 9.063858586782045, 158.64134672705455, 4.0, 40.0, 18.0, 0.7192582870148209], [1460, 9.000356201275288, 171.4955053878099, 4.0, 40.0, 18.0, 0.7025235473310786], [1461, 3.6207900837374725, 1227.212346627141, 4.0, 40.0, 18.0, 0.5523525338667007], [1462, 4.559176583172796, 962.7969539049454, 4.0, 40.0, 18.0, 0.552272997307918], [1463, 7.097479716721113, 831.189625528717, 4.0, 40.0, 18.0, 0.4909462324354141], [1464, 7.853843590651697, 1345.2278877031283, 4.0, 40.0, 18.0, 0.42007204358827827], [1465, 15.576567455340557, 442.91370847226943, 4.0, 40.0, 18.0, 0.3499360268169618], [1466, 3.8286033331003066, 1104.8815871998224, 4.0, 40.0, 18.0, 0.558697590944733], [1467, 3.1728991282796795, 1190.268697728775, 4.0, 40.0, 18.0, 0.5714213279096731], [1468, 4.7080831155969864, 1961.9041232881436, 4.0, 40.0, 18.0, 0.4670007609372295], [1469, 9.383962574208145, 47.86997075185706, 4.0, 40.0, 18.0, 1.0], [1470, 5.716170715660127, 1605.4449869355064, 4.0, 40.0, 18.0, 0.4583364631760671], [1471, 6.421737775921175, 766.4220305576592, 4.0, 40.0, 18.0, 0.5219588632484173], [1472, 8.320922043463431, 733.2403755610082, 4.0, 40.0, 18.0, 0.471051349047567], [1473, 5.5026756100814165, 1336.4147369295201, 4.0, 40.0, 18.0, 0.4833100515238367], [1474, 2.873128626161143, 879.5932883855462, 4.0, 40.0, 18.0, 0.6244993168477565], [1475, 10.094220072413082, 1078.3724303164206, 4.0, 40.0, 18.0, 0.38700420057796237], [1476, 22.45088621608383, 1659.564225464918, 4.0, 40.0, 18.0, 0.13405650221037882], [1477, 6.4952578245604755, 1965.8026158327255, 4.0, 40.0, 18.0, 0.42051057755491655], [1478, 8.254938813595167, 198.26463031907002, 4.0, 40.0, 18.0, 0.6973661418436117], [1479, 6.6693098475435395, 95.97260340666128, 4.0, 40.0, 18.0, 1.0], [1480, 8.960095033469251, 718.1037809228001, 4.0, 40.0, 18.0, 0.4555794401231723], [1481, 4.185226598798953, 551.3340797213306, 4.0, 40.0, 18.0, 0.649891517448095], [1482, 7.669206913490655, 142.53548443934395, 4.0, 40.0, 18.0, 0.8055923428312685], [1483, 4.380209233970239, 55.60462969995695, 4.0, 40.0, 18.0, 1.0], [1484, 7.980059686942393, 1784.7853298189943, 4.0, 40.0, 18.0, 0.3941287284858618], [1485, 0.8487115857203438, 710.7979778009479, 4.0, 40.0, 18.0, 0.7366317340803488], [1486, 23.830501879935564, 367.90587245598323, 4.0, 40.0, 18.0, 0.2144417984749653], [1487, 8.147244185878588, 1649.1988452971443, 4.0, 40.0, 18.0, 0.3966602250652906], [1488, 9.877801977267824, 1236.932159960166, 4.0, 40.0, 18.0, 0.37992647973807897], [1489, 4.90405386155661, 1364.0687250186543, 4.0, 40.0, 18.0, 0.4988081121931583], [1490, 4.219960423087258, 1433.9182882997545, 4.0, 40.0, 18.0, 0.5140779610688632], [1491, 6.960572216316067, 45.60916411877682, 4.0, 40.0, 18.0, 1.0], [1492, 7.607286231016997, 1756.212973066769, 4.0, 40.0, 18.0, 0.40410711860611964], [1493, 7.972396638388323, 103.45973223503046, 4.0, 40.0, 18.0, 1.0], [1494, 8.588974145422014, 115.21985504896168, 4.0, 40.0, 18.0, 0.8310331820405117], [1495, 6.987713123796789, 1816.97476756514, 4.0, 40.0, 18.0, 0.4153834115122825], [1496, 18.630269174539826, 1842.9053613159588, 4.0, 40.0, 18.0, 0.18060055847971512], [1497, 6.200520295863679, 1917.3950639713453, 4.0, 40.0, 18.0, 0.43008645845583837], [1498, 7.615918266417502, 467.0725890266668, 4.0, 40.0, 18.0, 0.5549543656189154], [1499, 13.307852026928668, 1276.8692703322868, 4.0, 40.0, 18.0, 0.3003517095877196], [1500, 10.064607743630653, 305.38473599614144, 4.0, 40.0, 18.0, 0.5448285916695026], [1501, 4.709094586280131, 1449.619378054903, 4.0, 40.0, 18.0, 0.49799362030237193], [1502, 4.830233562377837, 191.0888654280777, 4.0, 40.0, 18.0, 0.8486572829321771], [1503, 10.397938341200392, 1192.4401813826155, 4.0, 40.0, 18.0, 0.3707346897700712], [1504, 6.744045772849609, 1623.913009423283, 4.0, 40.0, 18.0, 0.43091237613891237], [1505, 8.620423257196446, 1605.5347996379483, 4.0, 40.0, 18.0, 0.38718495978718465], [1506, 2.7539527756474196, 1666.7973145429144, 4.0, 40.0, 18.0, 0.5440997199674483], [1507, 6.986253108067194, 136.48008176877158, 4.0, 40.0, 18.0, 0.8506737431950953], [1508, 13.425107250505567, 53.51126764306746, 4.0, 40.0, 18.0, 1.0], [1509, 5.726309774187078, 1037.17538354635, 4.0, 40.0, 18.0, 0.5054826574138978], [1510, 5.2543291010687, 640.5337955897517, 4.0, 40.0, 18.0, 0.5865578833066498], [1511, 6.964501971213778, 75.4965424659672, 4.0, 40.0, 18.0, 1.0], [1512, 1.2573508892105674, 1567.06544289013, 4.0, 40.0, 18.0, 0.6035673937535503], [1513, 7.615602855085757, 1465.4885661681344, 4.0, 40.0, 18.0, 0.4183867721165828], [1514, 15.734496303273243, 1729.4187601811373, 4.0, 40.0, 18.0, 0.23265730411934044], [1515, 22.111617752957965, 178.00951168217637, 4.0, 40.0, 18.0, 0.3247223965745285], [1516, 5.272018057434318, 902.5477051202643, 4.0, 40.0, 18.0, 0.5371807742235534], [1517, 13.485349491868027, 1653.6600863242463, 4.0, 40.0, 18.0, 0.27859676627349844], [1518, 8.452271206511048, 187.14470296177936, 4.0, 40.0, 18.0, 0.70318017012652], [1519, 9.824187525348615, 1250.867803198726, 4.0, 40.0, 18.0, 0.3802090700265861], [1520, 7.535922362931124, 826.1707695009538, 4.0, 40.0, 18.0, 0.4788668629670954], [1521, 6.557982765004695, 661.1834130318531, 4.0, 40.0, 18.0, 0.537754311538409], [1522, 2.4700428738059377, 117.55284934995986, 4.0, 40.0, 18.0, 1.0], [1523, 8.287639368354647, 63.02474683194713, 4.0, 40.0, 18.0, 1.0], [1524, 25.176251828780927, 1849.6062195943505, 4.0, 40.0, 18.0, 0.10436698962403848], [1525, 6.4387423928073115, 183.86033083199905, 4.0, 40.0, 18.0, 0.7898335893714638], [1526, 7.244811254458893, 164.021295935586, 4.0, 40.0, 18.0, 0.7853869983860317], [1527, 5.97809335147173, 1438.2181314880895, 4.0, 40.0, 18.0, 0.46221550714787474], [1528, 6.883738392003519, 771.3117976600419, 4.0, 40.0, 18.0, 0.5068373102386473], [1529, 22.582957946437688, 992.1618455315597, 4.0, 40.0, 18.0, 0.1599705319409927], [1530, 5.024551319984834, 1829.8700060930769, 4.0, 40.0, 18.0, 0.4649121746169556], [1531, 7.006263563357525, 1019.2135454245756, 4.0, 40.0, 18.0, 0.470015976938189], [1532, 6.852418448016522, 310.20841793395005, 4.0, 40.0, 18.0, 0.6550366633609217], [1533, 16.785762084605487, 1237.0837480758935, 4.0, 40.0, 18.0, 0.23516399147759037], [1534, 7.579072285555343, 835.9238940359734, 4.0, 40.0, 18.0, 0.47635669865830443], [1535, 8.220778097404907, 1814.7100596899054, 4.0, 40.0, 18.0, 0.38654315636533265], [1536, 3.3149561544562367, 294.92083355666637, 4.0, 40.0, 18.0, 0.8081213075033146], [1537, 4.988566134673856, 1729.7295913850178, 4.0, 40.0, 18.0, 0.47149223187710665], [1538, 6.291919924755264, 1013.2081301490877, 4.0, 40.0, 18.0, 0.49111854488694073], [1539, 8.210123389294777, 132.83571857393963, 4.0, 40.0, 18.0, 0.8023259709401551], [1540, 6.0956817478497705, 407.4492472401333, 4.0, 40.0, 18.0, 0.6315458059797406], [1541, 7.517749593949734, 30.114625427491088, 4.0, 40.0, 18.0, 1.0], [1542, 5.709178216642644, 222.05231710538783, 4.0, 40.0, 18.0, 0.7737139533701478], [1543, 5.3265457462019, 619.5962546307791, 4.0, 40.0, 18.0, 0.5891373514914607], [1544, 8.943530596053572, 148.59445754831265, 4.0, 40.0, 18.0, 0.7408676665509704], [1545, 7.96712057254748, 1193.457412722449, 4.0, 40.0, 18.0, 0.42851853175621196], [1546, 15.140816417249377, 121.88343581080954, 4.0, 40.0, 18.0, 0.5623407367180397], [1547, 7.711760890047227, 1914.0161666749566, 4.0, 40.0, 18.0, 0.3948694863190791], [1548, 10.393619046889896, 579.3342631959591, 4.0, 40.0, 18.0, 0.4430654860752856], [1549, 3.2531421951043216, 1613.5838883431766, 4.0, 40.0, 18.0, 0.5315494793734886], [1550, 5.6624236525372496, 1721.256554564797, 4.0, 40.0, 18.0, 0.4532343279865594], [1551, 9.059465828231772, 1845.7991067620585, 4.0, 40.0, 18.0, 0.3657284658490221], [1552, 4.13821052961041, 1044.155612357326, 4.0, 40.0, 18.0, 0.5557068687960173], [1553, 7.557002635510009, 1159.7141364254624, 4.0, 40.0, 18.0, 0.4417935177237833], [1554, 5.422952173682925, 788.888096099482, 4.0, 40.0, 18.0, 0.55040046200748], [1555, 6.341920872683209, 122.49598616193731, 4.0, 40.0, 18.0, 1.0], [1556, 5.942486163575136, 975.413009090001, 4.0, 40.0, 18.0, 0.5062841755766976], [1557, 9.256527097173086, 31.491395161893806, 4.0, 40.0, 18.0, 1.0], [1558, 5.111516108017895, 111.55227211045029, 4.0, 40.0, 18.0, 1.0], [1559, 23.319342545464835, 351.0544985065413, 4.0, 40.0, 18.0, 0.22640154761432907], [1560, 7.67602454730259, 117.96612791305735, 4.0, 40.0, 18.0, 0.8850806152493458], [1561, 2.5803063109338495, 127.32548532685276, 4.0, 40.0, 18.0, 1.0], [1562, 8.483667436909156, 187.94993935838662, 4.0, 40.0, 18.0, 0.7008759650854324], [1563, 8.505485960172487, 560.6523647245923, 4.0, 40.0, 18.0, 0.499923949723935], [1564, 4.927322378695388, 1011.9312471510475, 4.0, 40.0, 18.0, 0.53361455171533], [1565, 5.97610692768192, 90.37361757128325, 4.0, 40.0, 18.0, 1.0], [1566, 17.196907544178057, 173.72181873992542, 4.0, 40.0, 18.0, 0.43659016124799227], [1567, 16.373602472648145, 1484.558772660294, 4.0, 40.0, 18.0, 0.23051325615548954], [1568, 11.25784038332165, 24.365463959628595, 4.0, 40.0, 18.0, 1.0], [1569, 4.539114559930617, 117.82279114081426, 4.0, 40.0, 18.0, 1.0], [1570, 8.137055769416175, 129.55918644115124, 4.0, 40.0, 18.0, 0.8131025884034608], [1571, 7.17321187318156, 1543.3733106520424, 4.0, 40.0, 18.0, 0.4245150767818461], [1572, 4.971997291966351, 1875.4278383080573, 4.0, 40.0, 18.0, 0.4640214638118571], [1573, 5.389884768575948, 678.0938355673694, 4.0, 40.0, 18.0, 0.5731785904098703], [1574, 6.5352777858328155, 46.48729679491143, 4.0, 40.0, 18.0, 1.0], [1575, 7.083226106729316, 208.11762961437992, 4.0, 40.0, 18.0, 0.7328213729322276], [1576, 7.615889523391522, 98.09827236727384, 4.0, 40.0, 18.0, 1.0], [1577, 4.487274195875061, 1933.6112197503096, 4.0, 40.0, 18.0, 0.4747212362454081], [1578, 13.815818479278862, 1644.0797488667242, 4.0, 40.0, 18.0, 0.2724238693840501], [1579, 6.98248575952951, 93.64992281827371, 4.0, 40.0, 18.0, 1.0], [1580, 6.7717044778518085, 1001.9967188413315, 4.0, 40.0, 18.0, 0.4783501206022522], [1581, 1.530721477890057, 955.9681238030797, 4.0, 40.0, 18.0, 0.662356300931181], [1582, 9.21349684244142, 1534.8545760129753, 4.0, 40.0, 18.0, 0.37700484781190413], [1583, 2.4891278043235117, 760.8017795237251, 4.0, 40.0, 18.0, 0.6612081917676421], [1584, 4.851597685329531, 843.4675315618787, 4.0, 40.0, 18.0, 0.5603282283857064], [1585, 6.284590998314919, 1393.1852870210366, 4.0, 40.0, 18.0, 0.45672079426792156], [1586, 8.573339101412763, 1647.1529565229707, 4.0, 40.0, 18.0, 0.3861399976115557], [1587, 12.485121521140268, 1198.0509007228854, 4.0, 40.0, 18.0, 0.3225931511156782], [1588, 7.188460683795303, 893.7962546201172, 4.0, 40.0, 18.0, 0.47960952583786803], [1589, 13.77065377173318, 1093.5291508078062, 4.0, 40.0, 18.0, 0.3026012238493006], [1590, 11.038071220867437, 1200.4785855359808, 4.0, 40.0, 18.0, 0.35523031589033155], [1591, 11.077642545356793, 112.84857477468286, 4.0, 40.0, 18.0, 0.7286889826134711], [1592, 8.047021404532785, 1430.7417168172196, 4.0, 40.0, 18.0, 0.41023890436230936], [1593, 21.198841542350447, 1594.2306949930814, 4.0, 40.0, 18.0, 0.1517343546149145], [1594, 20.737818059619038, 60.35801817259937, 4.0, 40.0, 18.0, 0.5584882001733179], [1595, 17.65938215847581, 1675.7858999043176, 4.0, 40.0, 18.0, 0.2009636266621825], [1596, 5.523877592864851, 52.319801558893396, 4.0, 40.0, 18.0, 1.0], [1597, 16.83547101508357, 366.7845126799053, 4.0, 40.0, 18.0, 0.34365724979908163], [1598, 5.933608808236918, 636.0701978046965, 4.0, 40.0, 18.0, 0.5641536101602254], [1599, 8.847791782780538, 769.141049772764, 4.0, 40.0, 18.0, 0.4510547688311206], [1600, 7.47757269856208, 1141.4605021462014, 4.0, 40.0, 18.0, 0.4452828344393443], [1601, 6.266790430490555, 1636.7865830191788, 4.0, 40.0, 18.0, 0.4423811551257093], [1602, 5.658318074957867, 657.8552474471037, 4.0, 40.0, 18.0, 0.5684322662153971], [1603, 12.366747639486434, 30.243731314940444, 4.0, 40.0, 18.0, 1.0], [1604, 5.6856981753668565, 646.890151560126, 4.0, 40.0, 18.0, 0.5699237423176763], [1605, 4.102901479730693, 1539.3520779954683, 4.0, 40.0, 18.0, 0.5098209125389793], [1606, 9.486710768467919, 115.7111630325097, 4.0, 40.0, 18.0, 0.7878439089593062], [1607, 1.4851286867285634, 1927.9575067779454, 4.0, 40.0, 18.0, 0.5699106159404842], [1608, 7.911733324718017, 1211.5728815837865, 4.0, 40.0, 18.0, 0.42847589659378715], [1609, 7.465279783636339, 158.83762958229187, 4.0, 40.0, 18.0, 0.7845883288860804], [1610, 10.061320917690406, 712.7425145613392, 4.0, 40.0, 18.0, 0.4282152326245007], [1611, 7.911532290734311, 1659.4390673425883, 4.0, 40.0, 18.0, 0.4016308131481007], [1612, 0.7852919721511977, 713.8500278258549, 4.0, 40.0, 18.0, 0.7384753120770747], [1613, 6.798342348286864, 1226.7443513302426, 4.0, 40.0, 18.0, 0.45563227999479977], [1614, 9.490832905765648, 186.48638196422908, 4.0, 40.0, 18.0, 0.6636319667081898], [1615, 0.1686326172120972, 112.96335802917251, 4.0, 40.0, 18.0, 1.0], [1616, 4.3852270924502434, 1413.1300533350884, 4.0, 40.0, 18.0, 0.5106265546088355], [1617, 7.569800399757582, 305.361018535936, 4.0, 40.0, 18.0, 0.6312764488097602], [1618, 8.995053900413623, 753.869112218757, 4.0, 40.0, 18.0, 0.4494382332958127], [1619, 5.6886898713491245, 116.17153114751129, 4.0, 40.0, 18.0, 1.0], [1620, 6.465421504831174, 328.68811271429234, 4.0, 40.0, 18.0, 0.6582951005915435], [1621, 3.033598833121677, 60.74334204492873, 4.0, 40.0, 18.0, 1.0], [1622, 1.2218207419255158, 993.9028509830154, 4.0, 40.0, 18.0, 0.6683785854930143], [1623, 5.647488732281726, 888.4292704592026, 4.0, 40.0, 18.0, 0.5272190185840033], [1624, 4.975118572150626, 1604.9612978741588, 4.0, 40.0, 18.0, 0.47919340951601636], [1625, 8.595509165067217, 616.4978532127075, 4.0, 40.0, 18.0, 0.48472541684070736], [1626, 3.8936450683992607, 952.2082528753598, 4.0, 40.0, 18.0, 0.5764878040995821], [1627, 14.95945571875557, 1261.3537089534711, 4.0, 40.0, 18.0, 0.2679700136085218], [1628, 21.489258931217186, 1390.766249458563, 4.0, 40.0, 18.0, 0.15512650429283778], [1629, 9.058376362837839, 1027.7094762257932, 4.0, 40.0, 18.0, 0.4158554383571835], [1630, 6.745155297252995, 104.48587457215604, 4.0, 40.0, 18.0, 1.0], [1631, 6.297808238511056, 148.72929406847962, 4.0, 40.0, 18.0, 0.8559034195789268], [1632, 7.112354676232983, 101.03525569501069, 4.0, 40.0, 18.0, 1.0], [1633, 6.621417997172424, 934.6571671431897, 4.0, 40.0, 18.0, 0.49087067985461497], [1634, 9.2601626407869, 93.45293263013231, 4.0, 40.0, 18.0, 1.0], [1635, 4.94839046578994, 1433.201264508306, 4.0, 40.0, 18.0, 0.49198221309731804], [1636, 7.3770774159517485, 26.21984462745555, 4.0, 40.0, 18.0, 1.0], [1637, 15.259939818081723, 1838.749858724377, 4.0, 40.0, 18.0, 0.23747799788069238], [1638, 1.6316163727905535, 1997.8689443894837, 4.0, 40.0, 18.0, 0.5609012401618487], [1639, 21.61159647687823, 1270.8469648630462, 4.0, 40.0, 18.0, 0.15848005922279051], [1640, 6.953656638490959, 128.4157837046645, 4.0, 40.0, 18.0, 0.8853194288383118], [1641, 4.770794886720471, 1644.9915509672323, 4.0, 40.0, 18.0, 0.48271060946564276], [1642, 7.040030938905816, 1505.6237457215211, 4.0, 40.0, 18.0, 0.4302584230611116], [1643, 4.198527956123838, 1327.5249866495, 4.0, 40.0, 18.0, 0.5237328477055591], [1644, 3.80275626241089, 1191.2424626352936, 4.0, 40.0, 18.0, 0.549944100648603], [1645, 7.30616210679718, 467.2230107454874, 4.0, 40.0, 18.0, 0.5652872563931881], [1646, 18.62998807007149, 1454.2411967535686, 4.0, 40.0, 18.0, 0.193431661063486], [1647, 3.9524177590718677, 645.3976457633727, 4.0, 40.0, 18.0, 0.6324148618618348], [1648, 9.259919710583842, 1213.9180149508372, 4.0, 40.0, 18.0, 0.39660378977259375], [1649, 8.98356941878831, 286.804045740392, 4.0, 40.0, 18.0, 0.5925364993975425], [1650, 4.0754944198128005, 1376.6624989827112, 4.0, 40.0, 18.0, 0.5234017131724842], [1651, 20.614821226854374, 1345.0195517438349, 4.0, 40.0, 18.0, 0.16836566916039566], [1652, 11.235265293180639, 1569.0148716555143, 4.0, 40.0, 18.0, 0.32935230267809584], [1653, 0.640553290943116, 1040.434160903121, 4.0, 40.0, 18.0, 0.6838550977546844], [1654, 7.116371545043857, 1767.5494074908672, 4.0, 40.0, 18.0, 0.4145784334456293], [1655, 2.3344891794774325, 658.2178271523104, 4.0, 40.0, 18.0, 0.6909339400600828], [1656, 8.61949641144609, 161.59626815604886, 4.0, 40.0, 18.0, 0.7325055206590708], [1657, 9.499586520503968, 188.40984985917711, 4.0, 40.0, 18.0, 0.6610333494786171], [1658, 10.891146391818191, 712.0602442357903, 4.0, 40.0, 18.0, 0.4082875018740277], [1659, 12.473805884382497, 1066.4056951642303, 4.0, 40.0, 18.0, 0.3328106575333431], [1660, 8.993031963239025, 1206.3474725113097, 4.0, 40.0, 18.0, 0.4033603946066045], [1661, 8.923773680054712, 1395.1969587319325, 4.0, 40.0, 18.0, 0.3922806963897056], [1662, 10.882054019046326, 659.2612501586011, 4.0, 40.0, 18.0, 0.4162864557272494], [1663, 7.798092311334155, 69.88553316718503, 4.0, 40.0, 18.0, 1.0], [1664, 5.905958258587269, 427.398237054045, 4.0, 40.0, 18.0, 0.6299106627822287], [1665, 12.91251293504547, 1034.7296958756046, 4.0, 40.0, 18.0, 0.3255449221157862], [1666, 7.675088266463692, 135.75529350839068, 4.0, 40.0, 18.0, 0.8195281933273749], [1667, 10.91162911921838, 1203.496110055343, 4.0, 40.0, 18.0, 0.3578754283845784], [1668, 3.99511014433471, 1075.01932289308, 4.0, 40.0, 18.0, 0.5567028309953953], [1669, 15.170245914615203, 74.52237344890682, 4.0, 40.0, 18.0, 0.688811699115677], [1670, 15.806257485795008, 672.344855083556, 4.0, 40.0, 18.0, 0.3021638140504407], [1671, 9.63814333889086, 888.0692858547285, 4.0, 40.0, 18.0, 0.4159430760380338], [1672, 4.062994863761005, 1400.1018974708004, 4.0, 40.0, 18.0, 0.5218857895230017], [1673, 14.275762764849793, 814.0398107743806, 4.0, 40.0, 18.0, 0.31716073638197323], [1674, 18.0647737180295, 684.0561790767533, 4.0, 40.0, 18.0, 0.2563523141479301], [1675, 6.918418318805938, 1312.8630956655209, 4.0, 40.0, 18.0, 0.44610895921061156], [1676, 17.609560699900108, 1449.6242792217233, 4.0, 40.0, 18.0, 0.21029334878178543], [1677, 6.858103420229911, 1596.7709393642663, 4.0, 40.0, 18.0, 0.4295534628649423], [1678, 6.646193081492147, 1020.1458001567873, 4.0, 40.0, 18.0, 0.47999532222399177], [1679, 14.333299259831545, 402.4857354677446, 4.0, 40.0, 18.0, 0.39018956793815157], [1680, 3.9357227257517198, 1009.6160389492817, 4.0, 40.0, 18.0, 0.5670290580913139], [1681, 6.235188591895676, 622.2576374884144, 4.0, 40.0, 18.0, 0.5571805936549958], [1682, 2.0887038020060364, 40.56246359817597, 4.0, 40.0, 18.0, 1.0], [1683, 2.4664850934228433, 867.407067948808, 4.0, 40.0, 18.0, 0.6416755508211597], [1684, 4.852197523168505, 933.149900718775, 4.0, 40.0, 18.0, 0.5464985834764098], [1685, 0.6069216968144497, 1168.0099409816653, 4.0, 40.0, 18.0, 0.6680179537471249], [1686, 3.6613774145206364, 1249.6165792574493, 4.0, 40.0, 18.0, 0.5485991145035783], [1687, 5.398352808918952, 669.7048519657511, 4.0, 40.0, 18.0, 0.5747736781707152], [1688, 12.961855027046893, 1162.0163371867548, 4.0, 40.0, 18.0, 0.31488500613896575], [1689, 0.3031297699446073, 129.185641663362, 4.0, 40.0, 18.0, 1.0], [1690, 15.762586835791058, 82.92259714203308, 4.0, 40.0, 18.0, 0.6365326100045656], [1691, 15.479861791265462, 78.79329070177506, 4.0, 40.0, 18.0, 0.6609007784565603], [1692, 22.56249862676315, 1738.761884201823, 4.0, 40.0, 18.0, 0.13078090436600118], [1693, 19.112957069206786, 600.0047537288826, 4.0, 40.0, 18.0, 0.2484033785515952], [1694, 15.888384178132887, 1872.2849137221901, 4.0, 40.0, 18.0, 0.2251701387032182], [1695, 21.73302740338143, 43.675754892077514, 4.0, 40.0, 18.0, 0.6210665384782286], [1696, 22.35524370032606, 52.08387746133504, 4.0, 40.0, 18.0, 0.5493658238475402], [1697, 7.60610363625139, 118.98771347772472, 4.0, 40.0, 18.0, 0.883471450279507], [1698, 16.497331904597434, 337.0259151015339, 4.0, 40.0, 18.0, 0.361238271097317], [1699, 16.72254242516536, 1333.3757859904756, 4.0, 40.0, 18.0, 0.23128208052229698], [1700, 21.2556025054371, 1694.8674591788474, 4.0, 40.0, 18.0, 0.14781331380522963], [1701, 13.101953900006746, 396.0693228205217, 4.0, 40.0, 18.0, 0.42054517982609846], [1702, 19.55083194228293, 1034.545484455351, 4.0, 40.0, 18.0, 0.20039833453379519], [1703, 20.78584571964179, 1867.5443465788612, 4.0, 40.0, 18.0, 0.1502409427264411], [1704, 9.621310409558534, 567.6272557324186, 4.0, 40.0, 18.0, 0.46617566920175546], [1705, 24.714157198792456, 1915.3556212048488, 4.0, 40.0, 18.0, 0.10472151022251487], [1706, 19.775474825923244, 1817.283487334307, 4.0, 40.0, 18.0, 0.16425677711083322], [1707, 18.077549508166634, 1722.4817249234773, 4.0, 40.0, 18.0, 0.1923978628357989], [1708, 23.17916550338367, 1002.4909877164397, 4.0, 40.0, 18.0, 0.15203873706577736], [1709, 1.3308891148640618, 1825.640079787098, 4.0, 40.0, 18.0, 0.5818667882580332], [1710, 23.18181601780554, 980.3473162986612, 4.0, 40.0, 18.0, 0.15323319278524383], [1711, 4.234333825354664, 823.401527753161, 4.0, 40.0, 18.0, 0.5850696173441955], [1712, 22.941907300107466, 326.0656104286293, 4.0, 40.0, 18.0, 0.23917209296486333], [1713, 7.974280813246165, 1475.619985061003, 4.0, 40.0, 18.0, 0.40943513809536514], [1714, 19.118336420445516, 1375.3049222502161, 4.0, 40.0, 18.0, 0.18932035460981275], [1715, 0.6758817654935079, 167.96636726114173, 4.0, 40.0, 18.0, 1.0], [1716, 13.340897887753394, 683.7157036004362, 4.0, 40.0, 18.0, 0.35476705474396264], [1717, 22.880690187965598, 1790.5640023756032, 4.0, 40.0, 18.0, 0.12590012170617182], [1718, 0.880537397394135, 142.92209867527026, 4.0, 40.0, 18.0, 1.0], [1719, 24.09024460322958, 188.47513080588132, 4.0, 40.0, 18.0, 0.28029616423419723], [1720, 18.648659406238238, 462.34861353463054, 4.0, 40.0, 18.0, 0.28114815564051354], [1721, 9.342950193665873, 1957.851072467311, 4.0, 40.0, 18.0, 0.35508695158398346], [1722, 2.33113846495932, 629.6882187125599, 4.0, 40.0, 18.0, 0.6986872263946223], [1723, 13.744366509705605, 1758.367200113514, 4.0, 40.0, 18.0, 0.26915331924848346], [1724, 9.068127384927246, 1360.0922019048633, 4.0, 40.0, 18.0, 0.39098618425733545], [1725, 10.965096996073745, 772.9182498864528, 4.0, 40.0, 18.0, 0.3987558809815726], [1726, 23.295943137459993, 1312.6174588234753, 4.0, 40.0, 18.0, 0.13657673179334268], [1727, 9.032234003208295, 732.9254307648537, 4.0, 40.0, 18.0, 0.4515769737871771], [1728, 18.267895693882554, 182.55128559575311, 4.0, 40.0, 18.0, 0.4042246847408414], [1729, 2.1202048731287753, 1753.952164170847, 4.0, 40.0, 18.0, 0.5594489317446404], [1730, 5.980206779099389, 327.0958138073652, 4.0, 40.0, 18.0, 0.6779155225371589], [1731, 8.422051753762771, 1032.4055742871856, 4.0, 40.0, 18.0, 0.4311760849371425], [1732, 15.212073329895636, 1962.9687756971878, 4.0, 40.0, 18.0, 0.23464613548352306], [1733, 13.55927264769112, 1034.825914833511, 4.0, 40.0, 18.0, 0.3114553442035314], [1734, 1.813961930744712, 849.0248971772902, 4.0, 40.0, 18.0, 0.6698452595489227], [1735, 5.814377314443184, 180.4212502991614, 4.0, 40.0, 18.0, 0.8211067953789679], [1736, 18.097117132613477, 127.89044561583256, 4.0, 40.0, 18.0, 0.4664430326821901], [1737, 2.8433978869424643, 49.71097111868656, 4.0, 40.0, 18.0, 1.0], [1738, 13.870135390374008, 82.88675289973095, 4.0, 40.0, 18.0, 0.7071546521401205], [1739, 4.2975571797138565, 1072.6389692047155, 4.0, 40.0, 18.0, 0.54670214551155], [1740, 23.4465644967952, 1207.2268328110101, 4.0, 40.0, 18.0, 0.1389989814445305], [1741, 21.558336504250782, 944.3977781049064, 4.0, 40.0, 18.0, 0.1764806189591074], [1742, 22.642546810517896, 79.32561996954387, 4.0, 40.0, 18.0, 0.44486310261551226], [1743, 23.257205536426017, 782.4847906808816, 4.0, 40.0, 18.0, 0.16586493682430567], [1744, 20.3297106094133, 1532.6350395948778, 4.0, 40.0, 18.0, 0.16535566984090594], [1745, 7.601612577132405, 438.85613842569336, 4.0, 40.0, 18.0, 0.565432669707558], [1746, 20.533567876256793, 123.74129377390427, 4.0, 40.0, 18.0, 0.41302150698065226], [1747, 20.11184514098866, 64.43666065779996, 4.0, 40.0, 18.0, 0.5605086964690513], [1748, 20.96467599726041, 1310.5094970747757, 4.0, 40.0, 18.0, 0.1652938351338709], [1749, 10.694394663692886, 1071.3183206045346, 4.0, 40.0, 18.0, 0.3732300901264402], [1750, 11.009038331513354, 645.1806737580594, 4.0, 40.0, 18.0, 0.4155147861842374], [1751, 23.965946633057523, 186.31153145871704, 4.0, 40.0, 18.0, 0.28401520670589264], [1752, 16.305472713001144, 230.5820099724613, 4.0, 40.0, 18.0, 0.4152747794097998], [1753, 2.0164802133756767, 414.9259012008752, 4.0, 40.0, 18.0, 0.7893316759748789], [1754, 12.577597764246393, 522.904072949603, 4.0, 40.0, 18.0, 0.4016105096535484], [1755, 14.537924432368559, 1621.429842934899, 4.0, 40.0, 18.0, 0.259014058998408], [1756, 5.610243603993884, 1653.5787863792725, 4.0, 40.0, 18.0, 0.45836421643322045], [1757, 24.29808632457799, 164.02014029181785, 4.0, 40.0, 18.0, 0.2943475712690083], [1758, 2.9619784325358918, 180.52944331525876, 4.0, 40.0, 18.0, 1.0], [1759, 16.177258552308913, 730.2512584094902, 4.0, 40.0, 18.0, 0.28710448451694126], [1760, 5.092308259179642, 120.43679712598822, 4.0, 40.0, 18.0, 1.0], [1761, 9.341953599135755, 1427.1125828128668, 4.0, 40.0, 18.0, 0.38019307505029964], [1762, 0.0808929910235262, 130.78839415986135, 4.0, 40.0, 18.0, 1.0], [1763, 10.506599678634865, 55.77020106854454, 4.0, 40.0, 18.0, 1.0], [1764, 6.155716401936137, 1044.1699211597609, 4.0, 40.0, 18.0, 0.49167379269704986], [1765, 0.6659840402889611, 96.33683447878275, 4.0, 40.0, 18.0, 1.0], [1766, 5.530353872410493, 141.9441808981498, 4.0, 40.0, 18.0, 1.0], [1767, 11.726903321333973, 556.3654462994716, 4.0, 40.0, 18.0, 0.41426328705631144], [1768, 18.87605500340939, 76.07111892484716, 4.0, 40.0, 18.0, 0.5563037985166693], [1769, 17.433652422583805, 1555.6290369092196, 4.0, 40.0, 18.0, 0.2090955263639545], [1770, 15.890498729636269, 1820.8909098689562, 4.0, 40.0, 18.0, 0.22674377947090424], [1771, 14.5502972715096, 1894.4060003147958, 4.0, 40.0, 18.0, 0.24905068439655245], [1772, 8.464446854207875, 112.5154525721117, 4.0, 40.0, 18.0, 0.8467430070767269], [1773, 16.461447548851982, 155.62536688148208, 4.0, 40.0, 18.0, 0.47415829069438836], [1774, 20.49542049972595, 736.054022226142, 4.0, 40.0, 18.0, 0.2092340505329641], [1775, 0.6117723911408746, 1205.9421482297519, 4.0, 40.0, 18.0, 0.663110631965399], [1776, 16.16633438585804, 1334.3455108439366, 4.0, 40.0, 18.0, 0.24127153789138406], [1777, 13.445236728644232, 519.5707212360242, 4.0, 40.0, 18.0, 0.38156595474842525], [1778, 1.941726246394229, 1190.2222584800893, 4.0, 40.0, 18.0, 0.6154408656267621], [1779, 22.933443950961856, 754.4847313798897, 4.0, 40.0, 18.0, 0.17242953840641795], [1780, 15.239094871816889, 599.5393457326618, 4.0, 40.0, 18.0, 0.32526907722035353], [1781, 0.5841490230752644, 1258.4122661637946, 4.0, 40.0, 18.0, 0.6582267651766723], [1782, 3.954996027187602, 92.75001261564577, 4.0, 40.0, 18.0, 1.0], [1783, 7.125744104905896, 989.2878830273229, 4.0, 40.0, 18.0, 0.46997227474710984], [1784, 20.682134108879737, 231.73321945422063, 4.0, 40.0, 18.0, 0.31866073220815094], [1785, 3.4918799090534054, 1123.3344603573385, 4.0, 40.0, 18.0, 0.5680535181828943], [1786, 15.869755144826566, 821.1813359286382, 4.0, 40.0, 18.0, 0.2831674933512515], [1787, 4.704694471801313, 1528.4334606449145, 4.0, 40.0, 18.0, 0.4923162793936037], [1788, 9.445479325181994, 1135.7402822920678, 4.0, 40.0, 18.0, 0.3981706515841447], [1789, 3.553233063054337, 1708.827774787038, 4.0, 40.0, 18.0, 0.515409911878944], [1790, 0.3513204425915878, 1942.2968057621283, 4.0, 40.0, 18.0, 0.609114972772225], [1791, 11.73244871110241, 322.5582352989726, 4.0, 40.0, 18.0, 0.48514922376989766], [1792, 24.50098799522684, 172.89678438839343, 4.0, 40.0, 18.0, 0.28394652900731726], [1793, 4.8142139311164955, 1153.022501044064, 4.0, 40.0, 18.0, 0.5210863185944499], [1794, 1.6436134050577005, 448.1557362447912, 4.0, 40.0, 18.0, 0.789250068824483], [1795, 8.451059530543542, 443.9358627326224, 4.0, 40.0, 18.0, 0.5354869178761084], [1796, 1.691811602776669, 253.0018590837451, 4.0, 40.0, 18.0, 0.9167207966432277], [1797, 1.0788963314240896, 562.7895763915516, 4.0, 40.0, 18.0, 0.7687701288807397], [1798, 8.81602127053965, 598.5774856927376, 4.0, 40.0, 18.0, 0.48213955272424164], [1799, 4.48923184195614, 159.2007069926749, 4.0, 40.0, 18.0, 1.0], [1800, 19.321391696712492, 86.07542288926702, 4.0, 40.0, 18.0, 0.5140082317870771], [1801, 9.467945450721748, 674.5387201255522, 4.0, 40.0, 18.0, 0.4495764267565451], [1802, 15.421146756805031, 103.3961991546702, 4.0, 40.0, 18.0, 0.5915310240162551], [1803, 11.135700460825726, 370.2390737078568, 4.0, 40.0, 18.0, 0.48160922701396947], [1804, 19.376594498731855, 465.4104399860047, 4.0, 40.0, 18.0, 0.266685792278971], [1805, 9.67774749346847, 787.4172587660398, 4.0, 40.0, 18.0, 0.42718938158047914], [1806, 10.91208451456561, 42.03402080517279, 4.0, 40.0, 18.0, 1.0], [1807, 5.4003471037021935, 1366.675062110117, 4.0, 40.0, 18.0, 0.48388276494372834], [1808, 6.213870855002332, 42.78518692155916, 4.0, 40.0, 18.0, 1.0], [1809, 20.3179803190086, 1302.3099192078114, 4.0, 40.0, 18.0, 0.17466555307053072], [1810, 5.554226502467852, 955.381132235269, 4.0, 40.0, 18.0, 0.5208940224119262], [1811, 2.787010134940557, 1889.0149437827565, 4.0, 40.0, 18.0, 0.5288857385645944], [1812, 15.305732153387265, 1926.1308663233258, 4.0, 40.0, 18.0, 0.2340797999911963], [1813, 18.749652685543772, 715.4767998108595, 4.0, 40.0, 18.0, 0.24037514635878554], [1814, 24.44469858354737, 732.3942567694203, 4.0, 40.0, 18.0, 0.15554532129771784], [1815, 23.057270816101667, 1990.2071628824074, 4.0, 40.0, 18.0, 0.11875224669485106], [1816, 14.173645278442104, 1437.5860485871199, 4.0, 40.0, 18.0, 0.2745172773760426], [1817, 16.958370359297046, 435.7781883322644, 4.0, 40.0, 18.0, 0.3215241482650204], [1818, 19.12700103175865, 155.57315077916272, 4.0, 40.0, 18.0, 0.40846182959817123], [1819, 20.51011975215225, 805.0695042494839, 4.0, 40.0, 18.0, 0.20257964742258003], [1820, 3.821391875021227, 39.82321381519032, 4.0, 40.0, 18.0, 1.0], [1821, 22.90372670852188, 1670.8399186068268, 4.0, 40.0, 18.0, 0.12887875657343575], [1822, 5.2420129339053405, 1734.1965475057896, 4.0, 40.0, 18.0, 0.46407407173502185], [1823, 2.487500732159681, 1429.8682310102722, 4.0, 40.0, 18.0, 0.5716837924729437], [1824, 7.146914934777959, 1654.6513294782615, 4.0, 40.0, 18.0, 0.4192187645389359], [1825, 23.60847559222103, 198.25888283221005, 4.0, 40.0, 18.0, 0.2826691729487767], [1826, 21.415267431694453, 475.72255128924763, 4.0, 40.0, 18.0, 0.22931063429205265], [1827, 1.5756592101652922, 1125.066747634953, 4.0, 40.0, 18.0, 0.6368538307548633], [1828, 14.265779580392838, 106.13294402735201, 4.0, 40.0, 18.0, 0.6247018718959303], [1829, 8.343411048642697, 1369.0123365395, 4.0, 40.0, 18.0, 0.407364239673956], [1830, 8.61847000835097, 747.4712161293687, 4.0, 40.0, 18.0, 0.4604427833256935], [1831, 4.360327303971154, 1332.9609613992307, 4.0, 40.0, 18.0, 0.5182203536813111], [1832, 19.471597953964366, 1611.9433143862395, 4.0, 40.0, 18.0, 0.17486755814266905], [1833, 24.049190756471305, 199.54989986729868, 4.0, 40.0, 18.0, 0.27412684446211966], [1834, 14.844594846182751, 120.4554127128788, 4.0, 40.0, 18.0, 0.5745915374877814], [1835, 3.240098342749462, 607.1051788760919, 4.0, 40.0, 18.0, 0.6695693587722176], [1836, 24.848974602173, 849.4800746006101, 4.0, 40.0, 18.0, 0.1423024009563565], [1837, 2.6108575504030758, 1246.6470118947454, 4.0, 40.0, 18.0, 0.5851434722852972], [1838, 15.279431052619103, 434.84143142312234, 4.0, 40.0, 18.0, 0.3587941955636343], [1839, 17.964293473993415, 1615.9521211412791, 4.0, 40.0, 18.0, 0.19815389251528545], [1840, 22.036332477973527, 1540.6770814963702, 4.0, 40.0, 18.0, 0.1430679769747265], [1841, 6.692100382736768, 79.69006044724026, 4.0, 40.0, 18.0, 1.0], [1842, 9.236438216590937, 137.0323407347843, 4.0, 40.0, 18.0, 0.7502317203795221], [1843, 10.685776458798504, 470.42821411120855, 4.0, 40.0, 18.0, 0.46108188187504284], [1844, 23.886887549646556, 199.06399480760902, 4.0, 40.0, 18.0, 0.2772127409628226], [1845, 15.752248617162076, 158.1555078956105, 4.0, 40.0, 18.0, 0.49067944404709196], [1846, 8.950223761879764, 1804.2162503192576, 4.0, 40.0, 18.0, 0.36998251668443105], [1847, 20.9727163318946, 73.40025991675279, 4.0, 40.0, 18.0, 0.5040523763077089], [1848, 7.2159634563970565, 325.3089461477477, 4.0, 40.0, 18.0, 0.632115846306468], [1849, 9.837173313314707, 269.7833263029355, 4.0, 40.0, 18.0, 0.5746833059804951], [1850, 1.348969915375506, 227.71613557546453, 4.0, 40.0, 18.0, 1.0], [1851, 16.106321586642483, 737.0816851018122, 4.0, 40.0, 18.0, 0.28768067507899475], [1852, 23.116823050089188, 131.68859504798297, 4.0, 40.0, 18.0, 0.34806598046392856], [1853, 14.291481655113202, 1320.4209708939525, 4.0, 40.0, 18.0, 0.2780450613001988], [1854, 19.803132320615696, 75.44618064041273, 4.0, 40.0, 18.0, 0.530670984961586], [1855, 14.953532499763863, 1371.0019090408966, 4.0, 40.0, 18.0, 0.2624413902773426], [1856, 13.171140102801813, 1044.0345409406605, 4.0, 40.0, 18.0, 0.31923905102704786], [1857, 4.702948593376563, 1026.2884959281755, 4.0, 40.0, 18.0, 0.5391480549535197], [1858, 24.795853025502186, 1814.3063387781701, 4.0, 40.0, 18.0, 0.10678822475736813], [1859, 3.7740038176058004, 1772.1207732918092, 4.0, 40.0, 18.0, 0.5047090954414234], [1860, 7.665644889476542, 86.3814278772671, 4.0, 40.0, 18.0, 1.0], [1861, 5.876774528355167, 75.42542786378309, 4.0, 40.0, 18.0, 1.0], [1862, 11.59704929753818, 1443.476763648336, 4.0, 40.0, 18.0, 0.32757177990691577], [1863, 6.824750920813669, 169.43856327136092, 4.0, 40.0, 18.0, 0.7945477148637582], [1864, 22.98942777549491, 146.32860719813732, 4.0, 40.0, 18.0, 0.33500234883787583], [1865, 18.799749837928488, 1396.4339226346692, 4.0, 40.0, 18.0, 0.19313076821188008], [1866, 1.3340533787297741, 427.16963222830276, 4.0, 40.0, 18.0, 0.8115241239984092], [1867, 21.86297444638135, 1254.9945402104786, 4.0, 40.0, 18.0, 0.1559499233768141], [1868, 20.84402473909523, 1403.018120165402, 4.0, 40.0, 18.0, 0.16312262728139915], [1869, 12.513597214105971, 137.88647803831844, 4.0, 40.0, 18.0, 0.6228555383760062], [1870, 0.9031364345789133, 483.5555258129089, 4.0, 40.0, 18.0, 0.8046850957648027], [1871, 4.7590135852196624, 1001.0592407061898, 4.0, 40.0, 18.0, 0.5405283158339406], [1872, 9.504012552161543, 188.14961464598392, 4.0, 40.0, 18.0, 0.6611778867494635], [1873, 14.233688272121212, 1915.1809969370652, 4.0, 40.0, 18.0, 0.2545633286808171], [1874, 16.706157274793, 325.9914859041708, 4.0, 40.0, 18.0, 0.3605807908473322], [1875, 13.44404800377561, 79.8723662904433, 4.0, 40.0, 18.0, 0.7359213323161425], [1876, 18.69884048713576, 597.671896605518, 4.0, 40.0, 18.0, 0.2563194986328056], [1877, 17.870275067633155, 1871.1968637924667, 4.0, 40.0, 18.0, 0.19122757305321156], [1878, 8.086777898823541, 132.47706535786756, 4.0, 40.0, 18.0, 0.8086016219361791], [1879, 16.766471592041803, 137.44248121861466, 4.0, 40.0, 18.0, 0.48874272915329736], [1880, 20.36474130407488, 34.16103614361353, 4.0, 40.0, 18.0, 1.0], [1881, 18.2837341813897, 693.2792465783828, 4.0, 40.0, 18.0, 0.2513159069175625], [1882, 5.751679043409133, 1652.6858601316421, 4.0, 40.0, 18.0, 0.45454438541707853], [1883, 13.581769930210912, 127.29572927096659, 4.0, 40.0, 18.0, 0.604146358096679], [1884, 20.217634402539154, 447.9956758607286, 4.0, 40.0, 18.0, 0.2551645921835903], [1885, 18.82343096465152, 1549.2431926707948, 4.0, 40.0, 18.0, 0.18682981601075516], [1886, 23.61101116343376, 56.31308310153682, 4.0, 40.0, 18.0, 0.49506669102635725], [1887, 23.95830884279107, 1045.6197660382663, 4.0, 40.0, 18.0, 0.14087027259529966], [1888, 23.345318437250764, 319.4774483010274, 4.0, 40.0, 18.0, 0.23487003957382097], [1889, 12.174557295359497, 931.7795219644557, 4.0, 40.0, 18.0, 0.3516587971098201], [1890, 3.5628851270324864, 76.89602618966957, 4.0, 40.0, 18.0, 1.0], [1891, 11.649118247663527, 105.2911591759369, 4.0, 40.0, 18.0, 0.725188645412746], [1892, 3.9847097518608683, 1168.6613357106414, 4.0, 40.0, 18.0, 0.5462011252226638], [1893, 12.98039814328282, 338.80503922976135, 4.0, 40.0, 18.0, 0.4440051464878844], [1894, 21.76384222653067, 480.8128790233031, 4.0, 40.0, 18.0, 0.22292047912716645], [1895, 10.824715885623428, 1343.2037601109316, 4.0, 40.0, 18.0, 0.35043707750717284], [1896, 14.377652345347828, 82.74520524662913, 4.0, 40.0, 18.0, 0.687877081212253], [1897, 13.374952119494884, 1676.706780960808, 4.0, 40.0, 18.0, 0.27988476349521035], [1898, 9.497812875584847, 1414.8386104858605, 4.0, 40.0, 18.0, 0.37716987942482133], [1899, 20.07791851647754, 825.2072454953318, 4.0, 40.0, 18.0, 0.20763552701901508], [1900, 2.1221560994224897, 137.6137020403229, 4.0, 40.0, 18.0, 1.0], [1901, 3.882121169360972, 168.7645910277558, 4.0, 40.0, 18.0, 1.0], [1902, 14.009967296685197, 1378.3755483218486, 4.0, 40.0, 18.0, 0.28066980536030706], [1903, 18.174664733669726, 141.23146083728952, 4.0, 40.0, 18.0, 0.44670310168065763], [1904, 9.466456775367543, 125.27734367506513, 4.0, 40.0, 18.0, 0.7654894272136504], [1905, 0.22953808582744928, 87.91556165388147, 4.0, 40.0, 18.0, 1.0], [1906, 23.430097129384475, 1094.6519447815324, 4.0, 40.0, 18.0, 0.14424130751653252], [1907, 11.575700851879597, 170.73320566118431, 4.0, 40.0, 18.0, 0.6072773684687981], [1908, 3.3784300695386795, 1560.668023137015, 4.0, 40.0, 18.0, 0.5313332768675827], [1909, 3.8926884753925113, 95.320417891007, 4.0, 40.0, 18.0, 1.0], [1910, 12.547220265154015, 91.96867547022983, 4.0, 40.0, 18.0, 0.728614752969701], [1911, 4.923249396313468, 310.3760108864668, 4.0, 40.0, 18.0, 0.7306932411166738], [1912, 10.02610472558947, 990.2987729242266, 4.0, 40.0, 18.0, 0.39704974290989803], [1913, 3.9078072998742024, 387.5135572264773, 4.0, 40.0, 18.0, 0.7258905416879472], [1914, 21.744716139095313, 1767.8478011771438, 4.0, 40.0, 18.0, 0.13908581986860621], [1915, 2.258711681922873, 1442.7725169981622, 4.0, 40.0, 18.0, 0.5786283798840285], [1916, 4.43420285521203, 156.40248211760132, 4.0, 40.0, 18.0, 1.0], [1917, 3.007989913402234, 1436.7553318710109, 4.0, 40.0, 18.0, 0.5534230178826371], [1918, 13.744860551147333, 1572.8546028465985, 4.0, 40.0, 18.0, 0.27685456524960983], [1919, 23.27995927255027, 156.23765138005308, 4.0, 40.0, 18.0, 0.32000516996572254], [1920, 8.349389741242916, 640.1012606436612, 4.0, 40.0, 18.0, 0.48700336274058853], [1921, 24.598200648696576, 137.44811527476065, 4.0, 40.0, 18.0, 0.3128780433590026], [1922, 12.93047186229432, 813.7423213302408, 4.0, 40.0, 18.0, 0.3469308457087111], [1923, 6.262485283124522, 1271.038040522172, 4.0, 40.0, 18.0, 0.4669558449987634], [1924, 6.50291860444277, 1966.668743292896, 4.0, 40.0, 18.0, 0.42030857145850026], [1925, 7.307331785538926, 1816.7122146640293, 4.0, 40.0, 18.0, 0.4081257902198177], [1926, 10.492943735372195, 46.371938240960525, 4.0, 40.0, 18.0, 1.0], [1927, 11.596304058530341, 1138.2027878444414, 4.0, 40.0, 18.0, 0.3468385222973252], [1928, 24.347352509983764, 58.69897882967909, 4.0, 40.0, 18.0, 0.46688910309491094], [1929, 3.6471385177059306, 1746.3387323521263, 4.0, 40.0, 18.0, 0.5101054449988423], [1930, 11.984096587369834, 624.0367089266477, 4.0, 40.0, 18.0, 0.396616617947586], [1931, 20.746839831400308, 187.8661004123425, 4.0, 40.0, 18.0, 0.3452682640833985], [1932, 10.23441975893285, 139.37534073080505, 4.0, 40.0, 18.0, 0.7057650418001672], [1933, 14.985214285443957, 600.4120009643879, 4.0, 40.0, 18.0, 0.33084454126775564], [1934, 22.611523606726216, 1237.5895238501282, 4.0, 40.0, 18.0, 0.1473302200483939], [1935, 15.907275581625383, 1903.0670364742768, 4.0, 40.0, 18.0, 0.2238641850440459], [1936, 11.023549498851404, 1110.8333859056133, 4.0, 40.0, 18.0, 0.3623046943340012], [1937, 13.69754230078197, 1073.0787824364565, 4.0, 40.0, 18.0, 0.3057402903004179], [1938, 17.440538792545222, 1033.3911317848074, 4.0, 40.0, 18.0, 0.2359067931255248], [1939, 13.64670934972993, 423.3326930470313, 4.0, 40.0, 18.0, 0.40049820796241], [1940, 13.764247823540893, 1966.937016735742, 4.0, 40.0, 18.0, 0.261919817737229], [1941, 0.9288291950075078, 1792.9297687215046, 4.0, 40.0, 18.0, 0.5984580405373244], [1942, 13.212485977818167, 672.3530570879083, 4.0, 40.0, 18.0, 0.3592985569326885], [1943, 1.0258959085792245, 1092.2137459756923, 4.0, 40.0, 18.0, 0.6617951591553232], [1944, 17.8351020228825, 1726.787789756901, 4.0, 40.0, 18.0, 0.19635421570752118], [1945, 21.21943335691722, 419.59032057933365, 4.0, 40.0, 18.0, 0.24388329033583123], [1946, 24.22709591969878, 1396.7414599108656, 4.0, 40.0, 18.0, 0.12309308896902814], [1947, 20.924883922295642, 989.2047567721401, 4.0, 40.0, 18.0, 0.18260497786986535], [1948, 18.0435504079589, 89.18567663017862, 4.0, 40.0, 18.0, 0.5433952214790109], [1949, 24.27081830466515, 41.89502986189963, 4.0, 40.0, 18.0, 0.5537057327452106], [1950, 21.27899200474617, 835.5406116648644, 4.0, 40.0, 18.0, 0.1885390296117541], [1951, 20.364665078772774, 422.6417340808581, 4.0, 40.0, 18.0, 0.25794499753128536], [1952, 5.103262203791576, 767.5841880106092, 4.0, 40.0, 18.0, 0.5650524825877493], [1953, 1.681210788030052, 820.1569171059258, 4.0, 40.0, 18.0, 0.680437337467596], [1954, 21.288120100189246, 1634.73591190358, 4.0, 40.0, 18.0, 0.14941347100784588], [1955, 11.086447344058723, 68.02700472787981, 4.0, 40.0, 18.0, 1.0], [1956, 9.448475175225138, 129.90593672321307, 4.0, 40.0, 18.0, 0.7561368113011686], [1957, 2.1903150727018206, 194.4411860511767, 4.0, 40.0, 18.0, 1.0], [1958, 17.75500781863005, 66.80001080240127, 4.0, 40.0, 18.0, 0.6271295197613048], [1959, 19.762523777533932, 726.9728304536992, 4.0, 40.0, 18.0, 0.22202010047235224], [1960, 20.82066505363703, 1872.6929550943491, 4.0, 40.0, 18.0, 0.14777804591993463], [1961, 19.218788295963247, 1791.6591725648584, 4.0, 40.0, 18.0, 0.17300785394950557], [1962, 7.867769431003787, 55.0881303536056, 4.0, 40.0, 18.0, 1.0], [1963, 19.899012972436378, 690.9612458112083, 4.0, 40.0, 18.0, 0.22372600571775045], [1964, 13.741683645695776, 1418.9175494785252, 4.0, 40.0, 18.0, 0.28411193805018514], [1965, 9.256316527727579, 614.500089599888, 4.0, 40.0, 18.0, 0.4663692566048468], [1966, 4.772804792592991, 1608.3447522206286, 4.0, 40.0, 18.0, 0.48498603836504206], [1967, 10.60974817077909, 155.3744712390088, 4.0, 40.0, 18.0, 0.6641080621927825], [1968, 11.77922600197693, 829.1528369129031, 4.0, 40.0, 18.0, 0.3719392095098326], [1969, 13.380973365094222, 1077.8286832314043, 4.0, 40.0, 18.0, 0.3118559156014599], [1970, 22.466102035208284, 66.22839536718286, 4.0, 40.0, 18.0, 0.4874530554474939], [1971, 16.16565090160762, 1796.780949291825, 4.0, 40.0, 18.0, 0.22256554305893014], [1972, 5.588177042494847, 1697.075855727831, 4.0, 40.0, 18.0, 0.4562990451243489], [1973, 7.107503324884531, 1949.8536251772614, 4.0, 40.0, 18.0, 0.40726697206450485], [1974, 2.7187146569001452, 904.0802167604023, 4.0, 40.0, 18.0, 0.6260851635866334], [1975, 14.935093211954534, 607.3240779492281, 4.0, 40.0, 18.0, 0.33079591695055427], [1976, 24.270104775462823, 392.3622987549394, 4.0, 40.0, 18.0, 0.20260298752096173], [1977, 17.993690554562384, 80.3881711350263, 4.0, 40.0, 18.0, 0.5699337243185855], [1978, 22.297523888819526, 1349.9246942381608, 4.0, 40.0, 18.0, 0.14648876337987388], [1979, 1.0352863870984663, 50.09247613746339, 4.0, 40.0, 18.0, 1.0], [1980, 6.468476365464065, 113.04680706875692, 4.0, 40.0, 18.0, 1.0], [1981, 1.1031548990482447, 1878.4048572142203, 4.0, 40.0, 18.0, 0.5865813653678678], [1982, 4.010539300634067, 207.12738672209704, 4.0, 40.0, 18.0, 0.8637305682980617], [1983, 23.308990416271254, 76.40168591179443, 4.0, 40.0, 18.0, 0.4368493801397688], [1984, 14.862987689927046, 1805.1760032035231, 4.0, 40.0, 18.0, 0.24608120532147834], [1985, 2.825312477235914, 352.84885975814996, 4.0, 40.0, 18.0, 0.7894656488806221], [1986, 24.022808787226566, 1050.5860653565364, 4.0, 40.0, 18.0, 0.139899557763747], [1987, 7.51679142904062, 90.9751315562337, 4.0, 40.0, 18.0, 1.0], [1988, 9.429987701915419, 1216.7729076176734, 4.0, 40.0, 18.0, 0.39225766456653555], [1989, 1.0944192219756483, 1233.398827848469, 4.0, 40.0, 18.0, 0.641796622013196], [1990, 10.388878917456587, 1418.7904022947578, 4.0, 40.0, 18.0, 0.35602975557568084], [1991, 14.842402896319818, 1977.013567387179, 4.0, 40.0, 18.0, 0.2410396884651546], [1992, 11.482628687738552, 1011.5742119700101, 4.0, 40.0, 18.0, 0.35999433660814334], [1993, 23.346790872516337, 1162.0536614731188, 4.0, 40.0, 18.0, 0.14216513276602566], [1994, 3.5491767050882834, 933.3096372785426, 4.0, 40.0, 18.0, 0.5913856475535725], [1995, 3.7449721566626626, 97.5434341757727, 4.0, 40.0, 18.0, 1.0]]
true
true
f73be5427fbe24030a0f1a642b8b36c5bc757baa
7,447
py
Python
git/objects/fun.py
yarikoptic/GitPython
7576b282013249a2b20ccda4acacefd5e625ea39
[ "BSD-3-Clause" ]
1
2020-10-15T06:16:48.000Z
2020-10-15T06:16:48.000Z
git/objects/fun.py
yarikoptic/GitPython
7576b282013249a2b20ccda4acacefd5e625ea39
[ "BSD-3-Clause" ]
null
null
null
git/objects/fun.py
yarikoptic/GitPython
7576b282013249a2b20ccda4acacefd5e625ea39
[ "BSD-3-Clause" ]
null
null
null
"""Module with functions which are supposed to be as fast as possible""" from stat import S_ISDIR __all__ = ('tree_to_stream', 'tree_entries_from_data', 'traverse_trees_recursive', 'traverse_tree_recursive') def tree_to_stream(entries, write): """Write the give list of entries into a stream using its write method :param entries: **sorted** list of tuples with (binsha, mode, name) :param write: write method which takes a data string""" ord_zero = ord('0') bit_mask = 7 # 3 bits set for binsha, mode, name in entries: mode_str = '' for i in xrange(6): mode_str = chr(((mode >> (i*3)) & bit_mask) + ord_zero) + mode_str # END for each 8 octal value # git slices away the first octal if its zero if mode_str[0] == '0': mode_str = mode_str[1:] # END save a byte # here it comes: if the name is actually unicode, the replacement below # will not work as the binsha is not part of the ascii unicode encoding - # hence we must convert to an utf8 string for it to work properly. # According to my tests, this is exactly what git does, that is it just # takes the input literally, which appears to be utf8 on linux. if isinstance(name, unicode): name = name.encode("utf8") write("%s %s\0%s" % (mode_str, name, binsha)) # END for each item def tree_entries_from_data(data): """Reads the binary representation of a tree and returns tuples of Tree items :param data: data block with tree data :return: list(tuple(binsha, mode, tree_relative_path), ...)""" ord_zero = ord('0') len_data = len(data) i = 0 out = list() while i < len_data: mode = 0 # read mode # Some git versions truncate the leading 0, some don't # The type will be extracted from the mode later while data[i] != ' ': # move existing mode integer up one level being 3 bits # and add the actual ordinal value of the character mode = (mode << 3) + (ord(data[i]) - ord_zero) i += 1 # END while reading mode # byte is space now, skip it i += 1 # parse name, it is NULL separated ns = i while data[i] != '\0': i += 1 # END while not reached NULL # default encoding for strings in git is utf8 # Only use the respective unicode object if the byte stream was encoded name = data[ns:i] name_enc = name.decode("utf-8") if len(name) > len(name_enc): name = name_enc # END handle encoding # byte is NULL, get next 20 i += 1 sha = data[i:i+20] i = i + 20 out.append((sha, mode, name)) # END for each byte in data stream return out def _find_by_name(tree_data, name, is_dir, start_at): """return data entry matching the given name and tree mode or None. Before the item is returned, the respective data item is set None in the tree_data list to mark it done""" try: item = tree_data[start_at] if item and item[2] == name and S_ISDIR(item[1]) == is_dir: tree_data[start_at] = None return item except IndexError: pass # END exception handling for index, item in enumerate(tree_data): if item and item[2] == name and S_ISDIR(item[1]) == is_dir: tree_data[index] = None return item # END if item matches # END for each item return None def _to_full_path(item, path_prefix): """Rebuild entry with given path prefix""" if not item: return item return (item[0], item[1], path_prefix+item[2]) def traverse_trees_recursive(odb, tree_shas, path_prefix): """ :return: list with entries according to the given binary tree-shas. The result is encoded in a list of n tuple|None per blob/commit, (n == len(tree_shas)), where * [0] == 20 byte sha * [1] == mode as int * [2] == path relative to working tree root The entry tuple is None if the respective blob/commit did not exist in the given tree. :param tree_shas: iterable of shas pointing to trees. All trees must be on the same level. A tree-sha may be None in which case None :param path_prefix: a prefix to be added to the returned paths on this level, set it '' for the first iteration :note: The ordering of the returned items will be partially lost""" trees_data = list() nt = len(tree_shas) for tree_sha in tree_shas: if tree_sha is None: data = list() else: data = tree_entries_from_data(odb.stream(tree_sha).read()) # END handle muted trees trees_data.append(data) # END for each sha to get data for out = list() out_append = out.append # find all matching entries and recursively process them together if the match # is a tree. If the match is a non-tree item, put it into the result. # Processed items will be set None for ti, tree_data in enumerate(trees_data): for ii, item in enumerate(tree_data): if not item: continue # END skip already done items entries = [ None for n in range(nt) ] entries[ti] = item sha, mode, name = item # its faster to unpack is_dir = S_ISDIR(mode) # type mode bits # find this item in all other tree data items # wrap around, but stop one before our current index, hence # ti+nt, not ti+1+nt for tio in range(ti+1, ti+nt): tio = tio % nt entries[tio] = _find_by_name(trees_data[tio], name, is_dir, ii) # END for each other item data # if we are a directory, enter recursion if is_dir: out.extend(traverse_trees_recursive(odb, [((ei and ei[0]) or None) for ei in entries], path_prefix+name+'/')) else: out_append(tuple(_to_full_path(e, path_prefix) for e in entries)) # END handle recursion # finally mark it done tree_data[ii] = None # END for each item # we are done with one tree, set all its data empty del(tree_data[:]) # END for each tree_data chunk return out def traverse_tree_recursive(odb, tree_sha, path_prefix): """ :return: list of entries of the tree pointed to by the binary tree_sha. An entry has the following format: * [0] 20 byte sha * [1] mode as int * [2] path relative to the repository :param path_prefix: prefix to prepend to the front of all returned paths""" entries = list() data = tree_entries_from_data(odb.stream(tree_sha).read()) # unpacking/packing is faster than accessing individual items for sha, mode, name in data: if S_ISDIR(mode): entries.extend(traverse_tree_recursive(odb, sha, path_prefix+name+'/')) else: entries.append((sha, mode, path_prefix+name)) # END for each item return entries
37.235
125
0.587485
from stat import S_ISDIR __all__ = ('tree_to_stream', 'tree_entries_from_data', 'traverse_trees_recursive', 'traverse_tree_recursive') def tree_to_stream(entries, write): ord_zero = ord('0') bit_mask = 7 for binsha, mode, name in entries: mode_str = '' for i in xrange(6): mode_str = chr(((mode >> (i*3)) & bit_mask) + ord_zero) + mode_str if mode_str[0] == '0': mode_str = mode_str[1:] if isinstance(name, unicode): name = name.encode("utf8") write("%s %s\0%s" % (mode_str, name, binsha)) def tree_entries_from_data(data): ord_zero = ord('0') len_data = len(data) i = 0 out = list() while i < len_data: mode = 0 # The type will be extracted from the mode later while data[i] != ' ': # move existing mode integer up one level being 3 bits # and add the actual ordinal value of the character mode = (mode << 3) + (ord(data[i]) - ord_zero) i += 1 # END while reading mode # byte is space now, skip it i += 1 # parse name, it is NULL separated ns = i while data[i] != '\0': i += 1 # END while not reached NULL # default encoding for strings in git is utf8 # Only use the respective unicode object if the byte stream was encoded name = data[ns:i] name_enc = name.decode("utf-8") if len(name) > len(name_enc): name = name_enc # END handle encoding # byte is NULL, get next 20 i += 1 sha = data[i:i+20] i = i + 20 out.append((sha, mode, name)) # END for each byte in data stream return out def _find_by_name(tree_data, name, is_dir, start_at): try: item = tree_data[start_at] if item and item[2] == name and S_ISDIR(item[1]) == is_dir: tree_data[start_at] = None return item except IndexError: pass # END exception handling for index, item in enumerate(tree_data): if item and item[2] == name and S_ISDIR(item[1]) == is_dir: tree_data[index] = None return item # END if item matches # END for each item return None def _to_full_path(item, path_prefix): if not item: return item return (item[0], item[1], path_prefix+item[2]) def traverse_trees_recursive(odb, tree_shas, path_prefix): trees_data = list() nt = len(tree_shas) for tree_sha in tree_shas: if tree_sha is None: data = list() else: data = tree_entries_from_data(odb.stream(tree_sha).read()) # END handle muted trees trees_data.append(data) # END for each sha to get data for out = list() out_append = out.append # find all matching entries and recursively process them together if the match # is a tree. If the match is a non-tree item, put it into the result. # Processed items will be set None for ti, tree_data in enumerate(trees_data): for ii, item in enumerate(tree_data): if not item: continue # END skip already done items entries = [ None for n in range(nt) ] entries[ti] = item sha, mode, name = item # its faster to unpack is_dir = S_ISDIR(mode) # type mode bits # find this item in all other tree data items # wrap around, but stop one before our current index, hence # ti+nt, not ti+1+nt for tio in range(ti+1, ti+nt): tio = tio % nt entries[tio] = _find_by_name(trees_data[tio], name, is_dir, ii) # END for each other item data # if we are a directory, enter recursion if is_dir: out.extend(traverse_trees_recursive(odb, [((ei and ei[0]) or None) for ei in entries], path_prefix+name+'/')) else: out_append(tuple(_to_full_path(e, path_prefix) for e in entries)) # END handle recursion # finally mark it done tree_data[ii] = None # END for each item # we are done with one tree, set all its data empty del(tree_data[:]) # END for each tree_data chunk return out def traverse_tree_recursive(odb, tree_sha, path_prefix): entries = list() data = tree_entries_from_data(odb.stream(tree_sha).read()) # unpacking/packing is faster than accessing individual items for sha, mode, name in data: if S_ISDIR(mode): entries.extend(traverse_tree_recursive(odb, sha, path_prefix+name+'/')) else: entries.append((sha, mode, path_prefix+name)) # END for each item return entries
true
true
f73be5dcad677cd24db42711894839a5ab1fc3b9
1,614
py
Python
installers/aws-eks-cdk/lib/handlers/eksAlbLoadBalancer/index.py
hagerupe/legend
0131369145adb997832c99f0ef886ff9f7cc67f4
[ "Apache-2.0" ]
1
2021-02-02T00:12:25.000Z
2021-02-02T00:12:25.000Z
installers/aws-eks-cdk/lib/handlers/eksAlbLoadBalancer/index.py
epsstan/legend
a0b1ee08bc92fd95514fe2e985fa8b809c3870d7
[ "Apache-2.0" ]
1
2021-04-07T15:34:22.000Z
2021-04-07T15:34:22.000Z
installers/aws-eks-cdk/lib/handlers/eksAlbLoadBalancer/index.py
hagerupe/legend
0131369145adb997832c99f0ef886ff9f7cc67f4
[ "Apache-2.0" ]
2
2021-01-27T15:41:10.000Z
2021-02-13T23:40:19.000Z
import logging as log import cfnresponse import boto3 import hashlib import time log.getLogger().setLevel(log.INFO) client = boto3.client('elbv2') def main(event, context): fqn = event['StackId'] + event['LogicalResourceId'] physical_id = hashlib.md5(fqn.encode('utf-8')).hexdigest() log.info(physical_id) try: log.info('Input event: %s', event) eksCluster = event['ResourceProperties']['Cluster'] stack = event['ResourceProperties']['ClusterStack'] for x in range(20): loadBalancers = client.describe_load_balancers() for lb in loadBalancers['LoadBalancers']: tagDescriptions = client.describe_tags(ResourceArns = [ lb['LoadBalancerArn'] ]) tags = tagDescriptions['TagDescriptions'][0]['Tags'] stackTag = list(filter(lambda tag: tag['Key'] == 'ingress.k8s.aws/stack', tags)) clusterTag = list(filter(lambda tag: tag['Key'] == 'elbv2.k8s.aws/cluster', tags)) if len(stackTag) > 0 and stackTag[0]['Value'] == stack and len(clusterTag) > 0 and clusterTag[0]['Value'] == eksCluster: attributes = { 'loadBalancerDnsName': lb['DNSName'], 'loadBalancerCanonicalHostedZoneId': lb['CanonicalHostedZoneId'], } cfnresponse.send(event, context, cfnresponse.SUCCESS, attributes, physical_id) return None time.sleep(30) cfnresponse.send(event, context, cfnresponse.FAILED, {}, physical_id) except Exception as e: log.exception(e) # cfnresponse's error message is always "see CloudWatch" cfnresponse.send(event, context, cfnresponse.FAILED, {}, physical_id)
38.428571
128
0.67658
import logging as log import cfnresponse import boto3 import hashlib import time log.getLogger().setLevel(log.INFO) client = boto3.client('elbv2') def main(event, context): fqn = event['StackId'] + event['LogicalResourceId'] physical_id = hashlib.md5(fqn.encode('utf-8')).hexdigest() log.info(physical_id) try: log.info('Input event: %s', event) eksCluster = event['ResourceProperties']['Cluster'] stack = event['ResourceProperties']['ClusterStack'] for x in range(20): loadBalancers = client.describe_load_balancers() for lb in loadBalancers['LoadBalancers']: tagDescriptions = client.describe_tags(ResourceArns = [ lb['LoadBalancerArn'] ]) tags = tagDescriptions['TagDescriptions'][0]['Tags'] stackTag = list(filter(lambda tag: tag['Key'] == 'ingress.k8s.aws/stack', tags)) clusterTag = list(filter(lambda tag: tag['Key'] == 'elbv2.k8s.aws/cluster', tags)) if len(stackTag) > 0 and stackTag[0]['Value'] == stack and len(clusterTag) > 0 and clusterTag[0]['Value'] == eksCluster: attributes = { 'loadBalancerDnsName': lb['DNSName'], 'loadBalancerCanonicalHostedZoneId': lb['CanonicalHostedZoneId'], } cfnresponse.send(event, context, cfnresponse.SUCCESS, attributes, physical_id) return None time.sleep(30) cfnresponse.send(event, context, cfnresponse.FAILED, {}, physical_id) except Exception as e: log.exception(e) cfnresponse.send(event, context, cfnresponse.FAILED, {}, physical_id)
true
true
f73be7b40925c0d767653e9194b3df819eeb147d
975
py
Python
quantize_models.py
LordRaivo/ToxicBot
ebc09064afef0f4af5def1022eada9ef109d27ac
[ "MIT" ]
null
null
null
quantize_models.py
LordRaivo/ToxicBot
ebc09064afef0f4af5def1022eada9ef109d27ac
[ "MIT" ]
null
null
null
quantize_models.py
LordRaivo/ToxicBot
ebc09064afef0f4af5def1022eada9ef109d27ac
[ "MIT" ]
1
2022-02-03T23:49:29.000Z
2022-02-03T23:49:29.000Z
import torch import torch.nn as nn import models import os import pickle import glob import json import numpy as np backend = 'fbgemm' def split_path(path): _, path = os.path.splitdrive(path) folders = [] while 1: path, folder = os.path.split(path) if folder != "": folders.append(folder) elif path == "\\" or path == "": break folders.reverse() return folders if __name__ == "__main__": folder = 'models' for path in glob.glob(f'{folder}/*/'): model_name = split_path(path)[-1].split('_')[0] model_class = getattr(models, model_name) modelpath = os.path.join(path, 'model.pt') smallmodelpath = os.path.join(path, 'model_sml.pt') argspath = os.path.join(path, 'args.txt') with open(argspath, 'r') as f: args = json.loads(f.readline()) model = model_class(**args) model.load_state_dict(torch.load(modelpath, map_location=torch.device('cpu'))) model = model.half() model.eval() torch.save(model.state_dict(), smallmodelpath)
25
80
0.683077
import torch import torch.nn as nn import models import os import pickle import glob import json import numpy as np backend = 'fbgemm' def split_path(path): _, path = os.path.splitdrive(path) folders = [] while 1: path, folder = os.path.split(path) if folder != "": folders.append(folder) elif path == "\\" or path == "": break folders.reverse() return folders if __name__ == "__main__": folder = 'models' for path in glob.glob(f'{folder}/*/'): model_name = split_path(path)[-1].split('_')[0] model_class = getattr(models, model_name) modelpath = os.path.join(path, 'model.pt') smallmodelpath = os.path.join(path, 'model_sml.pt') argspath = os.path.join(path, 'args.txt') with open(argspath, 'r') as f: args = json.loads(f.readline()) model = model_class(**args) model.load_state_dict(torch.load(modelpath, map_location=torch.device('cpu'))) model = model.half() model.eval() torch.save(model.state_dict(), smallmodelpath)
true
true
f73be7c1487f54c21bfe94df00b0db808b57f4ff
1,278
py
Python
nuitka/codegen/OperatorCodes.py
sthagen/Nuitka-Nuitka
023dc76eeafd9c53ee2a51931474ddd98a3ba083
[ "Apache-2.0" ]
null
null
null
nuitka/codegen/OperatorCodes.py
sthagen/Nuitka-Nuitka
023dc76eeafd9c53ee2a51931474ddd98a3ba083
[ "Apache-2.0" ]
null
null
null
nuitka/codegen/OperatorCodes.py
sthagen/Nuitka-Nuitka
023dc76eeafd9c53ee2a51931474ddd98a3ba083
[ "Apache-2.0" ]
null
null
null
# Copyright 2022, Kay Hayen, mailto:kay.hayen@gmail.com # # Part of "Nuitka", an optimizing Python compiler that is compatible and # integrates with CPython, but also works on its own. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # """ Operator code tables These are mostly used to look up the Python C/API from operations or a wrapper used. """ unary_operator_codes = { "UAdd": ("PyNumber_Positive", 1), "USub": ("PyNumber_Negative", 1), "Invert": ("PyNumber_Invert", 1), "Repr": ("PyObject_Repr", 1), "Not": ("UNARY_NOT", 0), } rich_comparison_codes = { "Lt": "LT", "LtE": "LE", "Eq": "EQ", "NotEq": "NE", "Gt": "GT", "GtE": "GE", } containing_comparison_codes = ("In", "NotIn")
30.428571
84
0.659624
unary_operator_codes = { "UAdd": ("PyNumber_Positive", 1), "USub": ("PyNumber_Negative", 1), "Invert": ("PyNumber_Invert", 1), "Repr": ("PyObject_Repr", 1), "Not": ("UNARY_NOT", 0), } rich_comparison_codes = { "Lt": "LT", "LtE": "LE", "Eq": "EQ", "NotEq": "NE", "Gt": "GT", "GtE": "GE", } containing_comparison_codes = ("In", "NotIn")
true
true
f73be9e2c4accbcd5522d0c33b4871bb452b4176
158
py
Python
contrib/wallettools/walletunlock.py
GoldRushGOLDR/Gold-Rush_GOLDR
97c25d6e1fcdd2e8a243d267b2c3c280e6fa8ad4
[ "MIT" ]
null
null
null
contrib/wallettools/walletunlock.py
GoldRushGOLDR/Gold-Rush_GOLDR
97c25d6e1fcdd2e8a243d267b2c3c280e6fa8ad4
[ "MIT" ]
null
null
null
contrib/wallettools/walletunlock.py
GoldRushGOLDR/Gold-Rush_GOLDR
97c25d6e1fcdd2e8a243d267b2c3c280e6fa8ad4
[ "MIT" ]
null
null
null
from jsonrpc import ServiceProxy access = ServiceProxy("http://127.0.0.1:4354") pwd = raw_input("Enter wallet passphrase: ") access.walletpassphrase(pwd, 60)
31.6
46
0.765823
from jsonrpc import ServiceProxy access = ServiceProxy("http://127.0.0.1:4354") pwd = raw_input("Enter wallet passphrase: ") access.walletpassphrase(pwd, 60)
true
true
f73bea86057b0a672eb66c19ecef01f2e50c44e4
8,094
py
Python
msgraph-cli-extensions/v1_0/files_v1_0/azext_files_v1_0/vendored_sdks/files/aio/_files.py
thewahome/msgraph-cli
33127d9efa23a0e5f5303c93242fbdbb73348671
[ "MIT" ]
null
null
null
msgraph-cli-extensions/v1_0/files_v1_0/azext_files_v1_0/vendored_sdks/files/aio/_files.py
thewahome/msgraph-cli
33127d9efa23a0e5f5303c93242fbdbb73348671
[ "MIT" ]
22
2022-03-29T22:54:37.000Z
2022-03-29T22:55:27.000Z
msgraph-cli-extensions/v1_0/files_v1_0/azext_files_v1_0/vendored_sdks/files/aio/_files.py
thewahome/msgraph-cli
33127d9efa23a0e5f5303c93242fbdbb73348671
[ "MIT" ]
null
null
null
# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, Optional, TYPE_CHECKING from azure.mgmt.core import AsyncARMPipelineClient from msrest import Deserializer, Serializer if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from azure.core.credentials_async import AsyncTokenCredential from ._configuration import FilesConfiguration from .operations import DrivesDriveOperations from .operations import DrivesOperations from .operations import DrivesListOperations from .operations import DrivesListContentTypesOperations from .operations import DrivesListItemsOperations from .operations import DrivesListItemsVersionsOperations from .operations import GroupsOperations from .operations import SharesSharedDriveItemOperations from .operations import SharesOperations from .operations import SharesListOperations from .operations import SharesListContentTypesOperations from .operations import SharesListItemsOperations from .operations import SharesListItemsVersionsOperations from .operations import SharesListItemOperations from .operations import SharesListItemVersionsOperations from .operations import SharesPermissionOperations from .operations import UsersOperations from .. import models class Files(object): """Files. :ivar drives_drive: DrivesDriveOperations operations :vartype drives_drive: files.aio.operations.DrivesDriveOperations :ivar drives: DrivesOperations operations :vartype drives: files.aio.operations.DrivesOperations :ivar drives_list: DrivesListOperations operations :vartype drives_list: files.aio.operations.DrivesListOperations :ivar drives_list_content_types: DrivesListContentTypesOperations operations :vartype drives_list_content_types: files.aio.operations.DrivesListContentTypesOperations :ivar drives_list_items: DrivesListItemsOperations operations :vartype drives_list_items: files.aio.operations.DrivesListItemsOperations :ivar drives_list_items_versions: DrivesListItemsVersionsOperations operations :vartype drives_list_items_versions: files.aio.operations.DrivesListItemsVersionsOperations :ivar groups: GroupsOperations operations :vartype groups: files.aio.operations.GroupsOperations :ivar shares_shared_drive_item: SharesSharedDriveItemOperations operations :vartype shares_shared_drive_item: files.aio.operations.SharesSharedDriveItemOperations :ivar shares: SharesOperations operations :vartype shares: files.aio.operations.SharesOperations :ivar shares_list: SharesListOperations operations :vartype shares_list: files.aio.operations.SharesListOperations :ivar shares_list_content_types: SharesListContentTypesOperations operations :vartype shares_list_content_types: files.aio.operations.SharesListContentTypesOperations :ivar shares_list_items: SharesListItemsOperations operations :vartype shares_list_items: files.aio.operations.SharesListItemsOperations :ivar shares_list_items_versions: SharesListItemsVersionsOperations operations :vartype shares_list_items_versions: files.aio.operations.SharesListItemsVersionsOperations :ivar shares_list_item: SharesListItemOperations operations :vartype shares_list_item: files.aio.operations.SharesListItemOperations :ivar shares_list_item_versions: SharesListItemVersionsOperations operations :vartype shares_list_item_versions: files.aio.operations.SharesListItemVersionsOperations :ivar shares_permission: SharesPermissionOperations operations :vartype shares_permission: files.aio.operations.SharesPermissionOperations :ivar users: UsersOperations operations :vartype users: files.aio.operations.UsersOperations :param credential: Credential needed for the client to connect to Azure. :type credential: ~azure.core.credentials_async.AsyncTokenCredential :param top: Show only the first n items. :type top: int :param skip: Skip the first n items. :type skip: int :param search: Search items by search phrases. :type search: str :param filter: Filter items by property values. :type filter: str :param count: Include count of items. :type count: bool :param str base_url: Service URL """ def __init__( self, credential: "AsyncTokenCredential", top: Optional[int] = None, skip: Optional[int] = None, search: Optional[str] = None, filter: Optional[str] = None, count: Optional[bool] = None, base_url: Optional[str] = None, **kwargs: Any ) -> None: if not base_url: base_url = 'https://graph.microsoft.com/v1.0' self._config = FilesConfiguration(credential, top, skip, search, filter, count, **kwargs) self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs) client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} self._serialize = Serializer(client_models) self._serialize.client_side_validation = False self._deserialize = Deserializer(client_models) self.drives_drive = DrivesDriveOperations( self._client, self._config, self._serialize, self._deserialize) self.drives = DrivesOperations( self._client, self._config, self._serialize, self._deserialize) self.drives_list = DrivesListOperations( self._client, self._config, self._serialize, self._deserialize) self.drives_list_content_types = DrivesListContentTypesOperations( self._client, self._config, self._serialize, self._deserialize) self.drives_list_items = DrivesListItemsOperations( self._client, self._config, self._serialize, self._deserialize) self.drives_list_items_versions = DrivesListItemsVersionsOperations( self._client, self._config, self._serialize, self._deserialize) self.groups = GroupsOperations( self._client, self._config, self._serialize, self._deserialize) self.shares_shared_drive_item = SharesSharedDriveItemOperations( self._client, self._config, self._serialize, self._deserialize) self.shares = SharesOperations( self._client, self._config, self._serialize, self._deserialize) self.shares_list = SharesListOperations( self._client, self._config, self._serialize, self._deserialize) self.shares_list_content_types = SharesListContentTypesOperations( self._client, self._config, self._serialize, self._deserialize) self.shares_list_items = SharesListItemsOperations( self._client, self._config, self._serialize, self._deserialize) self.shares_list_items_versions = SharesListItemsVersionsOperations( self._client, self._config, self._serialize, self._deserialize) self.shares_list_item = SharesListItemOperations( self._client, self._config, self._serialize, self._deserialize) self.shares_list_item_versions = SharesListItemVersionsOperations( self._client, self._config, self._serialize, self._deserialize) self.shares_permission = SharesPermissionOperations( self._client, self._config, self._serialize, self._deserialize) self.users = UsersOperations( self._client, self._config, self._serialize, self._deserialize) async def close(self) -> None: await self._client.close() async def __aenter__(self) -> "Files": await self._client.__aenter__() return self async def __aexit__(self, *exc_details) -> None: await self._client.__aexit__(*exc_details)
51.884615
97
0.75315
from typing import Any, Optional, TYPE_CHECKING from azure.mgmt.core import AsyncARMPipelineClient from msrest import Deserializer, Serializer if TYPE_CHECKING: from azure.core.credentials_async import AsyncTokenCredential from ._configuration import FilesConfiguration from .operations import DrivesDriveOperations from .operations import DrivesOperations from .operations import DrivesListOperations from .operations import DrivesListContentTypesOperations from .operations import DrivesListItemsOperations from .operations import DrivesListItemsVersionsOperations from .operations import GroupsOperations from .operations import SharesSharedDriveItemOperations from .operations import SharesOperations from .operations import SharesListOperations from .operations import SharesListContentTypesOperations from .operations import SharesListItemsOperations from .operations import SharesListItemsVersionsOperations from .operations import SharesListItemOperations from .operations import SharesListItemVersionsOperations from .operations import SharesPermissionOperations from .operations import UsersOperations from .. import models class Files(object): def __init__( self, credential: "AsyncTokenCredential", top: Optional[int] = None, skip: Optional[int] = None, search: Optional[str] = None, filter: Optional[str] = None, count: Optional[bool] = None, base_url: Optional[str] = None, **kwargs: Any ) -> None: if not base_url: base_url = 'https://graph.microsoft.com/v1.0' self._config = FilesConfiguration(credential, top, skip, search, filter, count, **kwargs) self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs) client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} self._serialize = Serializer(client_models) self._serialize.client_side_validation = False self._deserialize = Deserializer(client_models) self.drives_drive = DrivesDriveOperations( self._client, self._config, self._serialize, self._deserialize) self.drives = DrivesOperations( self._client, self._config, self._serialize, self._deserialize) self.drives_list = DrivesListOperations( self._client, self._config, self._serialize, self._deserialize) self.drives_list_content_types = DrivesListContentTypesOperations( self._client, self._config, self._serialize, self._deserialize) self.drives_list_items = DrivesListItemsOperations( self._client, self._config, self._serialize, self._deserialize) self.drives_list_items_versions = DrivesListItemsVersionsOperations( self._client, self._config, self._serialize, self._deserialize) self.groups = GroupsOperations( self._client, self._config, self._serialize, self._deserialize) self.shares_shared_drive_item = SharesSharedDriveItemOperations( self._client, self._config, self._serialize, self._deserialize) self.shares = SharesOperations( self._client, self._config, self._serialize, self._deserialize) self.shares_list = SharesListOperations( self._client, self._config, self._serialize, self._deserialize) self.shares_list_content_types = SharesListContentTypesOperations( self._client, self._config, self._serialize, self._deserialize) self.shares_list_items = SharesListItemsOperations( self._client, self._config, self._serialize, self._deserialize) self.shares_list_items_versions = SharesListItemsVersionsOperations( self._client, self._config, self._serialize, self._deserialize) self.shares_list_item = SharesListItemOperations( self._client, self._config, self._serialize, self._deserialize) self.shares_list_item_versions = SharesListItemVersionsOperations( self._client, self._config, self._serialize, self._deserialize) self.shares_permission = SharesPermissionOperations( self._client, self._config, self._serialize, self._deserialize) self.users = UsersOperations( self._client, self._config, self._serialize, self._deserialize) async def close(self) -> None: await self._client.close() async def __aenter__(self) -> "Files": await self._client.__aenter__() return self async def __aexit__(self, *exc_details) -> None: await self._client.__aexit__(*exc_details)
true
true
f73beb3153c6c928fad7b0cca32a698113dbfc52
3,605
py
Python
PreProcessing/compare_3km_12km_snapshot.py
fuxingwang2018/HCLIMAI
8673d1b2cb9c5c17e70ba523fa64a8579c48798a
[ "MIT" ]
null
null
null
PreProcessing/compare_3km_12km_snapshot.py
fuxingwang2018/HCLIMAI
8673d1b2cb9c5c17e70ba523fa64a8579c48798a
[ "MIT" ]
null
null
null
PreProcessing/compare_3km_12km_snapshot.py
fuxingwang2018/HCLIMAI
8673d1b2cb9c5c17e70ba523fa64a8579c48798a
[ "MIT" ]
null
null
null
#%matplotlib inline import matplotlib.pyplot as plt import netCDF4 plt.switch_backend('agg') from map_plot import MapPlot import numpy as np data_folder = '/nobackup/rossby26/users/sm_fuxwa/AI/standard_data/' fig_out_path = '/home/sm_fuxwa/Figures/AI/' # 3km: tas, pr # 12km: ta500, ta700, ta850, ta950, # hus500, hus700, hus850, hus950, # ua500, ua700, ua850, ua950, # va500, va700, va850, va950, # phi500, phi700, phi850, phi950, # tas, pr var = 'tas' exp_name = '3km' # 'EOBS20', '3km', '12km' plot_period = '1h' if plot_period == '1h': rec_start = 6590 rec_end = rec_start + 1 ymdhm = 200010011330 scale_factor = 3600 elif plot_period == '1d': rec_start = 6577 rec_end = 6601 ymdhm = 20001001 scale_factor = 86400 month = 10 period = '200001010000-200912312300' #'200001010000-200912312100' #'200001010030-200912312330' freq='1hr' month_dic = { '1':'January', '2':'February', '3':'March', '4':'April', '5':'May', '6':'June', '7':'July', '8':'August', '9':'September', '10':'October', '11':'November', '12':'December' } filein = data_folder + '/' + str(exp_name) + '/' + str(freq) + '/' + str(var) + '/' + str(var) + '_' + str(exp_name) + '_' + str(freq) + '_' + str(period) + '.nc' #width_def = 12E5 #height_def = 8E5 #lat_0_def = 46.0 #lon_0_def = 11.0 width_def = 4E5 height_def = 3E5 lat_0_def = 46.6 lon_0_def = 11.0 #if exp_name == '12km': # width_def = 12E5 # height_def = 8E5 # lat_0_def = 46.0 # lon_0_def = 11.0 #14.0 #elif exp_name == '3km': # width_def = 12E5 # height_def = 8E5 # lat_0_def = 46.0 #45.5 # lon_0_def = 11.0 #16.0 fig_title = " " lat_name = 'lat' #'latitude' lon_name = 'lon' #'longitude' proj_def = 'lcc' # lcc, cyl, ortho res_def = 'i' fig_type = '.png' label_def = '' extend_def = 'max' #'min', 'max', 'neither', 'both' cmap_def = 'rainbow' #cmap_def = 'RdBu_r' if 'tas' in var: variable_list = ['tas'] unit = 'K' extend_def = 'both' if month >= 5 and month <= 8: scale_min_def = 275 scale_max_def = 305 else: scale_min_def = 255 scale_max_def = 285 elif var == 'pr': variable_list = ['pr'] scale_min_def = 0.0 if plot_period == '1h': scale_max_def = 12.0 unit = 'mm/hour' elif plot_period == '1d': scale_max_def = 150.0 unit = 'mm/day' nc = netCDF4.Dataset(filein) lat_sim = nc.variables[lat_name][:] lon_sim = nc.variables[lon_name][:] if np.array(lat_sim).ndim == 1 and np.array(lon_sim).ndim == 1: lon_sim_2d, lat_sim_2d = np.meshgrid(lon_sim, lat_sim) elif np.array(lat_sim).ndim == 2 and np.array(lon_sim).ndim == 2: lon_sim_2d = lon_sim lat_sim_2d = lat_sim #print nc.variables.keys() # examine the variables for var_to_plot in variable_list: # average over all steps var_sim_3d = nc.variables[var_to_plot][:,:,:] #var_sim_2d = var_sim_3d[rec_start - 1 : rec_end - 1, :, :] * scale_factor #np.nanmean(var_sim_3d, axis=0) var_sim_2d = np.nanmean(var_sim_3d[rec_start - 1 : rec_end - 1, :, :], axis=0) * scale_factor title_def = var_to_plot + '(' + unit + ')' fig_out = str(fig_out_path) + exp_name + '_' + var_to_plot + '_' + str(plot_period) + '_' + str(ymdhm) + fig_type map_plot = MapPlot(fig_out, proj_def, res_def, width_def, height_def, lat_0_def, lon_0_def) map_plot.Plot_2DField(lat_sim_2d, lon_sim_2d, var_sim_2d[:,:], scale_min_def, scale_max_def, title_def, label_def, cmap_def, extend_def) #map_plot.Plot_ortho(lat_sim_2d, lon_sim_2d, title_def)
28.164063
162
0.628294
import matplotlib.pyplot as plt import netCDF4 plt.switch_backend('agg') from map_plot import MapPlot import numpy as np data_folder = '/nobackup/rossby26/users/sm_fuxwa/AI/standard_data/' fig_out_path = '/home/sm_fuxwa/Figures/AI/' var = 'tas' exp_name = '3km' plot_period = '1h' if plot_period == '1h': rec_start = 6590 rec_end = rec_start + 1 ymdhm = 200010011330 scale_factor = 3600 elif plot_period == '1d': rec_start = 6577 rec_end = 6601 ymdhm = 20001001 scale_factor = 86400 month = 10 period = '200001010000-200912312300' ':'January', '2':'February', '3':'March', '4':'April', '5':'May', '6':'June', '7':'July', '8':'August', '9':'September', '10':'October', '11':'November', '12':'December' } filein = data_folder + '/' + str(exp_name) + '/' + str(freq) + '/' + str(var) + '/' + str(var) + '_' + str(exp_name) + '_' + str(freq) + '_' + str(period) + '.nc' width_def = 4E5 height_def = 3E5 lat_0_def = 46.6 lon_0_def = 11.0 le = " " lat_name = 'lat' lon_name = 'lon' proj_def = 'lcc' res_def = 'i' fig_type = '.png' label_def = '' extend_def = 'max' cmap_def = 'rainbow' if 'tas' in var: variable_list = ['tas'] unit = 'K' extend_def = 'both' if month >= 5 and month <= 8: scale_min_def = 275 scale_max_def = 305 else: scale_min_def = 255 scale_max_def = 285 elif var == 'pr': variable_list = ['pr'] scale_min_def = 0.0 if plot_period == '1h': scale_max_def = 12.0 unit = 'mm/hour' elif plot_period == '1d': scale_max_def = 150.0 unit = 'mm/day' nc = netCDF4.Dataset(filein) lat_sim = nc.variables[lat_name][:] lon_sim = nc.variables[lon_name][:] if np.array(lat_sim).ndim == 1 and np.array(lon_sim).ndim == 1: lon_sim_2d, lat_sim_2d = np.meshgrid(lon_sim, lat_sim) elif np.array(lat_sim).ndim == 2 and np.array(lon_sim).ndim == 2: lon_sim_2d = lon_sim lat_sim_2d = lat_sim for var_to_plot in variable_list: var_sim_3d = nc.variables[var_to_plot][:,:,:] n(var_sim_3d[rec_start - 1 : rec_end - 1, :, :], axis=0) * scale_factor title_def = var_to_plot + '(' + unit + ')' fig_out = str(fig_out_path) + exp_name + '_' + var_to_plot + '_' + str(plot_period) + '_' + str(ymdhm) + fig_type map_plot = MapPlot(fig_out, proj_def, res_def, width_def, height_def, lat_0_def, lon_0_def) map_plot.Plot_2DField(lat_sim_2d, lon_sim_2d, var_sim_2d[:,:], scale_min_def, scale_max_def, title_def, label_def, cmap_def, extend_def)
true
true
f73beb4c1b32163d91ff788de411bab4fbb108e3
6,897
py
Python
pybaseball/league_batting_stats.py
mdrews93/pybaseball
0dab4a2a3e27dd9fa27285d63a1f6f829dfcf4c5
[ "MIT" ]
null
null
null
pybaseball/league_batting_stats.py
mdrews93/pybaseball
0dab4a2a3e27dd9fa27285d63a1f6f829dfcf4c5
[ "MIT" ]
null
null
null
pybaseball/league_batting_stats.py
mdrews93/pybaseball
0dab4a2a3e27dd9fa27285d63a1f6f829dfcf4c5
[ "MIT" ]
null
null
null
import requests import pandas as pd import datetime import io from bs4 import BeautifulSoup def validate_datestring(date_text): try: datetime.datetime.strptime(date_text, '%Y-%m-%d') except ValueError: raise ValueError("Incorrect data format, should be YYYY-MM-DD") def sanitize_input(start_dt, end_dt): # if no dates are supplied, assume they want yesterday's data # send a warning in case they wanted to specify if start_dt is None and end_dt is None: today = datetime.datetime.today() start_dt = (today - datetime.timedelta(1)).strftime("%Y-%m-%d") end_dt = today.strftime("%Y-%m-%d") print("Warning: no date range supplied. Returning yesterday's data. For a different date range, try batting_stats_range(start_dt, end_dt) or batting_stats(season).") #if only one date is supplied, assume they only want that day's stats #query in this case is from date 1 to date 1 if start_dt is None: start_dt = end_dt if end_dt is None: end_dt = start_dt #if end date occurs before start date, swap them if end_dt < start_dt: temp = start_dt start_dt = end_dt end_dt = temp # now that both dates are not None, make sure they are valid date strings validate_datestring(start_dt) validate_datestring(end_dt) return start_dt, end_dt def get_soup(start_dt, end_dt): # get most recent standings if date not specified # if((start_dt is None) or (end_dt is None)): # print('Error: a date range needs to be specified') # return None url = "http://www.baseball-reference.com/leagues/daily.cgi?user_team=&bust_cache=&type=b&lastndays=7&dates=fromandto&fromandto={}.{}&level=mlb&franch=&stat=&stat_value=0".format(start_dt, end_dt) s = requests.get(url).content return BeautifulSoup(s, "html.parser") def get_id_table(soup): table = soup.find_all('table')[0] data = [] headings = [th.get_text() for th in table.find("tr").find_all("th")][1:] data.append(headings) table_body = table.find('tbody') rows = table_body.find_all('tr') for row in rows: cols = row.find_all('td') try: pid = cols[0]["data-append-csv"].split("=")[-1] cols = [ele.text.strip() for ele in cols] cols[0] = pid data.append([ele for ele in cols]) except: pass data = pd.DataFrame(data) data = data.rename(columns=data.iloc[0]) data = data.reindex(data.index.drop(0)) return data def get_table(soup): table = soup.find_all('table')[0] data = [] headings = [th.get_text() for th in table.find("tr").find_all("th")][1:] data.append(headings) table_body = table.find('tbody') rows = table_body.find_all('tr') for row in rows: cols = row.find_all('td') cols = [ele.text.strip() for ele in cols] data.append([ele for ele in cols]) data = pd.DataFrame(data) data = data.rename(columns=data.iloc[0]) data = data.reindex(data.index.drop(0)) return data def batting_stats_range_by_id(start_dt=None, end_dt=None): """ Get all batting stats for a set time range. This can be the past week, the month of August, anything. Just supply the start and end date in YYYY-MM-DD format. """ # make sure date inputs are valid start_dt, end_dt = sanitize_input(start_dt, end_dt) if datetime.datetime.strptime(start_dt, "%Y-%m-%d").year < 2008: raise ValueError("Year must be 2008 or later") if datetime.datetime.strptime(end_dt, "%Y-%m-%d").year < 2008: raise ValueError("Year must be 2008 or later") # retrieve html from baseball reference soup = get_soup(start_dt, end_dt) table = get_id_table(soup) table = table.dropna(how='all') # drop if all columns are NA # scraped data is initially in string format. # convert the necessary columns to numeric. for column in ['Age', '#days', 'G', 'PA', 'AB', 'R', 'H', '2B', '3B', 'HR', 'RBI', 'BB', 'IBB', 'SO', 'HBP', 'SH', 'SF', 'GDP', 'SB', 'CS', 'BA', 'OBP', 'SLG', 'OPS']: #table[column] = table[column].astype('float') table[column] = pd.to_numeric(table[column]) #table['column'] = table['column'].convert_objects(convert_numeric=True) table = table.drop('', 1) return table def batting_stats_range(start_dt=None, end_dt=None): """ Get all batting stats for a set time range. This can be the past week, the month of August, anything. Just supply the start and end date in YYYY-MM-DD format. """ # make sure date inputs are valid start_dt, end_dt = sanitize_input(start_dt, end_dt) if datetime.datetime.strptime(start_dt, "%Y-%m-%d").year < 2008: raise ValueError("Year must be 2008 or later") if datetime.datetime.strptime(end_dt, "%Y-%m-%d").year < 2008: raise ValueError("Year must be 2008 or later") # retrieve html from baseball reference soup = get_soup(start_dt, end_dt) table = get_table(soup) table = table.dropna(how='all') # drop if all columns are NA # scraped data is initially in string format. # convert the necessary columns to numeric. for column in ['Age', '#days', 'G', 'PA', 'AB', 'R', 'H', '2B', '3B', 'HR', 'RBI', 'BB', 'IBB', 'SO', 'HBP', 'SH', 'SF', 'GDP', 'SB', 'CS', 'BA', 'OBP', 'SLG', 'OPS']: #table[column] = table[column].astype('float') table[column] = pd.to_numeric(table[column]) #table['column'] = table['column'].convert_objects(convert_numeric=True) table = table.drop('', 1) return table def batting_stats_bref(season=None): """ Get all batting stats for a set season. If no argument is supplied, gives stats for current season to date. """ if season is None: season = datetime.datetime.today().strftime("%Y") season = str(season) start_dt = season + '-03-01' #opening day is always late march or early april end_dt = season + '-11-01' #season is definitely over by November return(batting_stats_range(start_dt, end_dt)) def bwar_bat(return_all=False): """ Get data from war_daily_bat table. Returns WAR, its components, and a few other useful stats. To get all fields from this table, supply argument return_all=True. """ url = "http://www.baseball-reference.com/data/war_daily_bat.txt" s = requests.get(url).content c=pd.read_csv(io.StringIO(s.decode('utf-8'))) if return_all: return c else: cols_to_keep = ['name_common', 'mlb_ID', 'player_ID', 'year_ID', 'team_ID', 'stint_ID', 'lg_ID', 'pitcher','G', 'PA', 'salary', 'runs_above_avg', 'runs_above_avg_off','runs_above_avg_def', 'WAR_rep','WAA','WAR'] return c[cols_to_keep]
40.098837
199
0.634624
import requests import pandas as pd import datetime import io from bs4 import BeautifulSoup def validate_datestring(date_text): try: datetime.datetime.strptime(date_text, '%Y-%m-%d') except ValueError: raise ValueError("Incorrect data format, should be YYYY-MM-DD") def sanitize_input(start_dt, end_dt): # send a warning in case they wanted to specify if start_dt is None and end_dt is None: today = datetime.datetime.today() start_dt = (today - datetime.timedelta(1)).strftime("%Y-%m-%d") end_dt = today.strftime("%Y-%m-%d") print("Warning: no date range supplied. Returning yesterday's data. For a different date range, try batting_stats_range(start_dt, end_dt) or batting_stats(season).") #query in this case is from date 1 to date 1 if start_dt is None: start_dt = end_dt if end_dt is None: end_dt = start_dt #if end date occurs before start date, swap them if end_dt < start_dt: temp = start_dt start_dt = end_dt end_dt = temp # now that both dates are not None, make sure they are valid date strings validate_datestring(start_dt) validate_datestring(end_dt) return start_dt, end_dt def get_soup(start_dt, end_dt): # get most recent standings if date not specified # if((start_dt is None) or (end_dt is None)): # print('Error: a date range needs to be specified') # return None url = "http://www.baseball-reference.com/leagues/daily.cgi?user_team=&bust_cache=&type=b&lastndays=7&dates=fromandto&fromandto={}.{}&level=mlb&franch=&stat=&stat_value=0".format(start_dt, end_dt) s = requests.get(url).content return BeautifulSoup(s, "html.parser") def get_id_table(soup): table = soup.find_all('table')[0] data = [] headings = [th.get_text() for th in table.find("tr").find_all("th")][1:] data.append(headings) table_body = table.find('tbody') rows = table_body.find_all('tr') for row in rows: cols = row.find_all('td') try: pid = cols[0]["data-append-csv"].split("=")[-1] cols = [ele.text.strip() for ele in cols] cols[0] = pid data.append([ele for ele in cols]) except: pass data = pd.DataFrame(data) data = data.rename(columns=data.iloc[0]) data = data.reindex(data.index.drop(0)) return data def get_table(soup): table = soup.find_all('table')[0] data = [] headings = [th.get_text() for th in table.find("tr").find_all("th")][1:] data.append(headings) table_body = table.find('tbody') rows = table_body.find_all('tr') for row in rows: cols = row.find_all('td') cols = [ele.text.strip() for ele in cols] data.append([ele for ele in cols]) data = pd.DataFrame(data) data = data.rename(columns=data.iloc[0]) data = data.reindex(data.index.drop(0)) return data def batting_stats_range_by_id(start_dt=None, end_dt=None): # make sure date inputs are valid start_dt, end_dt = sanitize_input(start_dt, end_dt) if datetime.datetime.strptime(start_dt, "%Y-%m-%d").year < 2008: raise ValueError("Year must be 2008 or later") if datetime.datetime.strptime(end_dt, "%Y-%m-%d").year < 2008: raise ValueError("Year must be 2008 or later") # retrieve html from baseball reference soup = get_soup(start_dt, end_dt) table = get_id_table(soup) table = table.dropna(how='all') # drop if all columns are NA # scraped data is initially in string format. # convert the necessary columns to numeric. for column in ['Age', ' 'HR', 'RBI', 'BB', 'IBB', 'SO', 'HBP', 'SH', 'SF', 'GDP', 'SB', 'CS', 'BA', 'OBP', 'SLG', 'OPS']: #table[column] = table[column].astype('float') table[column] = pd.to_numeric(table[column]) #table['column'] = table['column'].convert_objects(convert_numeric=True) table = table.drop('', 1) return table def batting_stats_range(start_dt=None, end_dt=None): # make sure date inputs are valid start_dt, end_dt = sanitize_input(start_dt, end_dt) if datetime.datetime.strptime(start_dt, "%Y-%m-%d").year < 2008: raise ValueError("Year must be 2008 or later") if datetime.datetime.strptime(end_dt, "%Y-%m-%d").year < 2008: raise ValueError("Year must be 2008 or later") # retrieve html from baseball reference soup = get_soup(start_dt, end_dt) table = get_table(soup) table = table.dropna(how='all') # drop if all columns are NA # scraped data is initially in string format. # convert the necessary columns to numeric. for column in ['Age', ' 'HR', 'RBI', 'BB', 'IBB', 'SO', 'HBP', 'SH', 'SF', 'GDP', 'SB', 'CS', 'BA', 'OBP', 'SLG', 'OPS']: #table[column] = table[column].astype('float') table[column] = pd.to_numeric(table[column]) #table['column'] = table['column'].convert_objects(convert_numeric=True) table = table.drop('', 1) return table def batting_stats_bref(season=None): if season is None: season = datetime.datetime.today().strftime("%Y") season = str(season) start_dt = season + '-03-01' #opening day is always late march or early april end_dt = season + '-11-01' #season is definitely over by November return(batting_stats_range(start_dt, end_dt)) def bwar_bat(return_all=False): url = "http://www.baseball-reference.com/data/war_daily_bat.txt" s = requests.get(url).content c=pd.read_csv(io.StringIO(s.decode('utf-8'))) if return_all: return c else: cols_to_keep = ['name_common', 'mlb_ID', 'player_ID', 'year_ID', 'team_ID', 'stint_ID', 'lg_ID', 'pitcher','G', 'PA', 'salary', 'runs_above_avg', 'runs_above_avg_off','runs_above_avg_def', 'WAR_rep','WAA','WAR'] return c[cols_to_keep]
true
true
f73bebdfdde04ee93dea82677a22edd4015cfe32
9,016
py
Python
hours/tests/test_resource_periods_as_text.py
City-of-Helsinki/hauki
f49628c0533baa68c7deb83224065c3b1158f807
[ "MIT" ]
3
2020-03-26T05:04:30.000Z
2022-03-22T15:57:18.000Z
hours/tests/test_resource_periods_as_text.py
City-of-Helsinki/hauki
f49628c0533baa68c7deb83224065c3b1158f807
[ "MIT" ]
81
2020-06-17T14:31:11.000Z
2022-02-20T19:01:54.000Z
hours/tests/test_resource_periods_as_text.py
City-of-Helsinki/hauki
f49628c0533baa68c7deb83224065c3b1158f807
[ "MIT" ]
9
2020-06-18T10:52:09.000Z
2022-02-11T13:05:59.000Z
import datetime import pytest from django.utils import translation from hours.enums import FrequencyModifier, RuleContext, RuleSubject, State, Weekday from hours.models import Rule from hours.tests.conftest import ( DatePeriodFactory, RuleFactory, TimeSpanFactory, TimeSpanGroupFactory, ) @pytest.mark.django_db @pytest.mark.parametrize("lang", ["en", "fi"]) def test_resource_opening_hours_as_text_no_date_periods(resource, lang): with translation.override(lang): assert resource._get_date_periods_as_text() == "" @pytest.mark.django_db def test_resource_opening_hours_as_text(resource): DatePeriodFactory( name="Special hours", resource=resource, resource_state=State.CLOSED, start_date=datetime.date(year=2021, month=12, day=27), end_date=datetime.date(year=2022, month=1, day=2), override=True, ) date_period = DatePeriodFactory( name="Regular opening hours", resource=resource, resource_state=State.OPEN, start_date=datetime.date(year=2021, month=1, day=1), end_date=datetime.date(year=2022, month=12, day=31), ) time_span_group = TimeSpanGroupFactory(period=date_period) TimeSpanFactory( name="Test time span", group=time_span_group, start_time=datetime.time(hour=9, minute=0), end_time=datetime.time(hour=17, minute=0), weekdays=[Weekday.MONDAY, Weekday.TUESDAY, Weekday.THURSDAY], ) TimeSpanFactory( name="Test time span...", group=time_span_group, start_time=datetime.time(hour=9, minute=0), end_time=datetime.time(hour=19, minute=0), weekdays=[Weekday.FRIDAY, Weekday.SATURDAY], ) TimeSpanFactory( name="Test time span 2", group=time_span_group, start_time=datetime.time(hour=10, minute=0), end_time=datetime.time(hour=14, minute=0), weekdays=[Weekday.SUNDAY], ) RuleFactory( group=time_span_group, context=RuleContext.PERIOD, subject=RuleSubject.WEEK, frequency_modifier=FrequencyModifier.EVEN, ) time_span_group2 = TimeSpanGroupFactory(period=date_period) TimeSpanFactory( name="Test time span 3", group=time_span_group2, start_time=datetime.time(hour=8, minute=0), end_time=datetime.time(hour=16, minute=0), weekdays=[Weekday.MONDAY, Weekday.TUESDAY], ) TimeSpanFactory( name="Test time span 4", group=time_span_group2, start_time=datetime.time(hour=9, minute=0), end_time=datetime.time(hour=13, minute=0), weekdays=Weekday.weekend(), ) RuleFactory( group=time_span_group2, context=RuleContext.PERIOD, subject=RuleSubject.MONTH, frequency_ordinal=2, ) RuleFactory( group=time_span_group2, context=RuleContext.PERIOD, subject=RuleSubject.WEEK, frequency_modifier=FrequencyModifier.ODD, ) with translation.override("en"): assert resource._get_date_periods_as_text() == ( "\n" "========================================\n" "Regular opening hours\n" "Date period: Jan. 1, 2021 - Dec. 31, 2022\n" "Opening hours:\n" "\n" " Monday-Tuesday, Thursday 9 a.m.-5 p.m. Open\n" " Friday-Saturday 9 a.m.-7 p.m. Open\n" " Sunday 10 a.m.-2 p.m. Open\n" "\n" " In effect when every one of these match:\n" " - On even weeks in the period\n" "\n" " ---------------------------------------\n" "\n" " Monday-Tuesday 8 a.m.-4 p.m. Open\n" " Saturday-Sunday 9 a.m.-1 p.m. Open\n" "\n" " In effect when every one of these match:\n" " - Every 2nd month in the period\n" " - On odd weeks in the period\n" "\n" "========================================\n" "Special hours\n" "Date period: Dec. 27, 2021 - Jan. 2, 2022\n" "Opening hours:\n" "\n" " Closed\n" "\n" "========================================\n" ) with translation.override("fi"): assert resource._get_date_periods_as_text() == ( "\n" "========================================\n" "Regular opening hours\n" "Aikajakso: 1. tammikuuta 2021 - 31. joulukuuta 2022\n" "Aukioloajat:\n" "\n" " Maanantai-Tiistai, Torstai 9.00-17.00 Auki\n" " Perjantai-Lauantai 9.00-19.00 Auki\n" " Sunnuntai 10.00-14.00 Auki\n" "\n" " Voimassa kun kaikki seuraavat pätevät:\n" " - Jakson jokainen parillinen viikko\n" "\n" " ---------------------------------------\n" "\n" " Maanantai-Tiistai 8.00-16.00 Auki\n" " Lauantai-Sunnuntai 9.00-13.00 Auki\n" "\n" " Voimassa kun kaikki seuraavat pätevät:\n" " - Jakson joka 2. kuukausi\n" " - Jakson jokainen pariton viikko\n" "\n" "========================================\n" "Special hours\n" "Aikajakso: 27. joulukuuta 2021 - 2. tammikuuta 2022\n" "Aukioloajat:\n" "\n" " Suljettu\n" "\n" "========================================\n" ) @pytest.mark.django_db @pytest.mark.parametrize( "modifier", [None, FrequencyModifier.EVEN, FrequencyModifier.ODD] ) @pytest.mark.parametrize("start", [None, 1, 2, -1, -2]) @pytest.mark.parametrize("ordinal", [None, 1, 4]) @pytest.mark.parametrize("subject", list(RuleSubject)) @pytest.mark.parametrize("context", list(RuleContext)) def test_rule_as_text_frequency_ordinal(context, subject, start, ordinal, modifier): if not any([start, ordinal, modifier]) or ( subject == RuleSubject.MONTH and context == RuleContext.MONTH ): pytest.skip("Won't test this combination as it's an invalid rule") rule = Rule( context=context, subject=subject, start=start, frequency_ordinal=ordinal, frequency_modifier=modifier, ) with translation.override("en"): rule_as_text_en = rule.as_text() with translation.override("fi"): rule_as_text_fi = rule.as_text() assert rule_as_text_en assert rule_as_text_fi assert rule_as_text_en != rule_as_text_fi @pytest.mark.django_db @pytest.mark.parametrize("lang", ["en", "fi"]) def test_resource_date_periods_as_text_is_kept_up_to_date(resource, lang): assert resource.date_periods_as_text == "" date_period = DatePeriodFactory( name="Test hours", resource=resource, resource_state=State.OPEN, start_date=datetime.date(year=2021, month=1, day=1), end_date=datetime.date(year=2022, month=12, day=31), ) assert resource.date_periods_as_text == ( "\n========================================\n" "Test hours\n" "Aikajakso: 1. tammikuuta 2021 - 31. joulukuuta 2022\n" "Aukioloajat:\n" "\n" " Auki\n" "\n" "========================================\n" ) date_period.resource_state = State.CLOSED date_period.save() assert resource.date_periods_as_text == ( "\n========================================\n" "Test hours\n" "Aikajakso: 1. tammikuuta 2021 - 31. joulukuuta 2022\n" "Aukioloajat:\n" "\n" " Suljettu\n" "\n" "========================================\n" ) time_span_group = TimeSpanGroupFactory(period=date_period) TimeSpanFactory( group=time_span_group, start_time=datetime.time(hour=10, minute=0), end_time=datetime.time(hour=12, minute=0), weekdays=[Weekday.MONDAY], resource_state=State.OPEN, ) assert resource.date_periods_as_text == ( "\n========================================\n" "Test hours\n" "Aikajakso: 1. tammikuuta 2021 - 31. joulukuuta 2022\n" "Aukioloajat:\n" "\n" " Maanantai 10.00-12.00 Auki\n" "\n" "========================================\n" ) RuleFactory( group=time_span_group, context=RuleContext.PERIOD, subject=RuleSubject.WEEK, frequency_ordinal=2, ) assert resource.date_periods_as_text == ( "\n========================================\n" "Test hours\n" "Aikajakso: 1. tammikuuta 2021 - 31. joulukuuta 2022\n" "Aukioloajat:\n" "\n" " Maanantai 10.00-12.00 Auki\n" "\n" " Voimassa kun kaikki seuraavat pätevät:\n" " - Jakson joka 2. viikko\n" "\n" "========================================\n" )
31.746479
84
0.542702
import datetime import pytest from django.utils import translation from hours.enums import FrequencyModifier, RuleContext, RuleSubject, State, Weekday from hours.models import Rule from hours.tests.conftest import ( DatePeriodFactory, RuleFactory, TimeSpanFactory, TimeSpanGroupFactory, ) @pytest.mark.django_db @pytest.mark.parametrize("lang", ["en", "fi"]) def test_resource_opening_hours_as_text_no_date_periods(resource, lang): with translation.override(lang): assert resource._get_date_periods_as_text() == "" @pytest.mark.django_db def test_resource_opening_hours_as_text(resource): DatePeriodFactory( name="Special hours", resource=resource, resource_state=State.CLOSED, start_date=datetime.date(year=2021, month=12, day=27), end_date=datetime.date(year=2022, month=1, day=2), override=True, ) date_period = DatePeriodFactory( name="Regular opening hours", resource=resource, resource_state=State.OPEN, start_date=datetime.date(year=2021, month=1, day=1), end_date=datetime.date(year=2022, month=12, day=31), ) time_span_group = TimeSpanGroupFactory(period=date_period) TimeSpanFactory( name="Test time span", group=time_span_group, start_time=datetime.time(hour=9, minute=0), end_time=datetime.time(hour=17, minute=0), weekdays=[Weekday.MONDAY, Weekday.TUESDAY, Weekday.THURSDAY], ) TimeSpanFactory( name="Test time span...", group=time_span_group, start_time=datetime.time(hour=9, minute=0), end_time=datetime.time(hour=19, minute=0), weekdays=[Weekday.FRIDAY, Weekday.SATURDAY], ) TimeSpanFactory( name="Test time span 2", group=time_span_group, start_time=datetime.time(hour=10, minute=0), end_time=datetime.time(hour=14, minute=0), weekdays=[Weekday.SUNDAY], ) RuleFactory( group=time_span_group, context=RuleContext.PERIOD, subject=RuleSubject.WEEK, frequency_modifier=FrequencyModifier.EVEN, ) time_span_group2 = TimeSpanGroupFactory(period=date_period) TimeSpanFactory( name="Test time span 3", group=time_span_group2, start_time=datetime.time(hour=8, minute=0), end_time=datetime.time(hour=16, minute=0), weekdays=[Weekday.MONDAY, Weekday.TUESDAY], ) TimeSpanFactory( name="Test time span 4", group=time_span_group2, start_time=datetime.time(hour=9, minute=0), end_time=datetime.time(hour=13, minute=0), weekdays=Weekday.weekend(), ) RuleFactory( group=time_span_group2, context=RuleContext.PERIOD, subject=RuleSubject.MONTH, frequency_ordinal=2, ) RuleFactory( group=time_span_group2, context=RuleContext.PERIOD, subject=RuleSubject.WEEK, frequency_modifier=FrequencyModifier.ODD, ) with translation.override("en"): assert resource._get_date_periods_as_text() == ( "\n" "========================================\n" "Regular opening hours\n" "Date period: Jan. 1, 2021 - Dec. 31, 2022\n" "Opening hours:\n" "\n" " Monday-Tuesday, Thursday 9 a.m.-5 p.m. Open\n" " Friday-Saturday 9 a.m.-7 p.m. Open\n" " Sunday 10 a.m.-2 p.m. Open\n" "\n" " In effect when every one of these match:\n" " - On even weeks in the period\n" "\n" " ---------------------------------------\n" "\n" " Monday-Tuesday 8 a.m.-4 p.m. Open\n" " Saturday-Sunday 9 a.m.-1 p.m. Open\n" "\n" " In effect when every one of these match:\n" " - Every 2nd month in the period\n" " - On odd weeks in the period\n" "\n" "========================================\n" "Special hours\n" "Date period: Dec. 27, 2021 - Jan. 2, 2022\n" "Opening hours:\n" "\n" " Closed\n" "\n" "========================================\n" ) with translation.override("fi"): assert resource._get_date_periods_as_text() == ( "\n" "========================================\n" "Regular opening hours\n" "Aikajakso: 1. tammikuuta 2021 - 31. joulukuuta 2022\n" "Aukioloajat:\n" "\n" " Maanantai-Tiistai, Torstai 9.00-17.00 Auki\n" " Perjantai-Lauantai 9.00-19.00 Auki\n" " Sunnuntai 10.00-14.00 Auki\n" "\n" " Voimassa kun kaikki seuraavat pätevät:\n" " - Jakson jokainen parillinen viikko\n" "\n" " ---------------------------------------\n" "\n" " Maanantai-Tiistai 8.00-16.00 Auki\n" " Lauantai-Sunnuntai 9.00-13.00 Auki\n" "\n" " Voimassa kun kaikki seuraavat pätevät:\n" " - Jakson joka 2. kuukausi\n" " - Jakson jokainen pariton viikko\n" "\n" "========================================\n" "Special hours\n" "Aikajakso: 27. joulukuuta 2021 - 2. tammikuuta 2022\n" "Aukioloajat:\n" "\n" " Suljettu\n" "\n" "========================================\n" ) @pytest.mark.django_db @pytest.mark.parametrize( "modifier", [None, FrequencyModifier.EVEN, FrequencyModifier.ODD] ) @pytest.mark.parametrize("start", [None, 1, 2, -1, -2]) @pytest.mark.parametrize("ordinal", [None, 1, 4]) @pytest.mark.parametrize("subject", list(RuleSubject)) @pytest.mark.parametrize("context", list(RuleContext)) def test_rule_as_text_frequency_ordinal(context, subject, start, ordinal, modifier): if not any([start, ordinal, modifier]) or ( subject == RuleSubject.MONTH and context == RuleContext.MONTH ): pytest.skip("Won't test this combination as it's an invalid rule") rule = Rule( context=context, subject=subject, start=start, frequency_ordinal=ordinal, frequency_modifier=modifier, ) with translation.override("en"): rule_as_text_en = rule.as_text() with translation.override("fi"): rule_as_text_fi = rule.as_text() assert rule_as_text_en assert rule_as_text_fi assert rule_as_text_en != rule_as_text_fi @pytest.mark.django_db @pytest.mark.parametrize("lang", ["en", "fi"]) def test_resource_date_periods_as_text_is_kept_up_to_date(resource, lang): assert resource.date_periods_as_text == "" date_period = DatePeriodFactory( name="Test hours", resource=resource, resource_state=State.OPEN, start_date=datetime.date(year=2021, month=1, day=1), end_date=datetime.date(year=2022, month=12, day=31), ) assert resource.date_periods_as_text == ( "\n========================================\n" "Test hours\n" "Aikajakso: 1. tammikuuta 2021 - 31. joulukuuta 2022\n" "Aukioloajat:\n" "\n" " Auki\n" "\n" "========================================\n" ) date_period.resource_state = State.CLOSED date_period.save() assert resource.date_periods_as_text == ( "\n========================================\n" "Test hours\n" "Aikajakso: 1. tammikuuta 2021 - 31. joulukuuta 2022\n" "Aukioloajat:\n" "\n" " Suljettu\n" "\n" "========================================\n" ) time_span_group = TimeSpanGroupFactory(period=date_period) TimeSpanFactory( group=time_span_group, start_time=datetime.time(hour=10, minute=0), end_time=datetime.time(hour=12, minute=0), weekdays=[Weekday.MONDAY], resource_state=State.OPEN, ) assert resource.date_periods_as_text == ( "\n========================================\n" "Test hours\n" "Aikajakso: 1. tammikuuta 2021 - 31. joulukuuta 2022\n" "Aukioloajat:\n" "\n" " Maanantai 10.00-12.00 Auki\n" "\n" "========================================\n" ) RuleFactory( group=time_span_group, context=RuleContext.PERIOD, subject=RuleSubject.WEEK, frequency_ordinal=2, ) assert resource.date_periods_as_text == ( "\n========================================\n" "Test hours\n" "Aikajakso: 1. tammikuuta 2021 - 31. joulukuuta 2022\n" "Aukioloajat:\n" "\n" " Maanantai 10.00-12.00 Auki\n" "\n" " Voimassa kun kaikki seuraavat pätevät:\n" " - Jakson joka 2. viikko\n" "\n" "========================================\n" )
true
true
f73bebf5f22f828fa0dddcbd7d2a4deae02f54a0
179
py
Python
plugins/clients/apps-client/test/mock_client_app/mock_client_event.py
pcanto-hopeit/hopeit.engine
c17b0438e56940a4d1b2f071cca90ae8b6f70629
[ "Apache-2.0" ]
15
2020-07-09T17:41:14.000Z
2021-10-04T20:13:08.000Z
plugins/clients/apps-client/test/mock_client_app/mock_client_event.py
pcanto-hopeit/hopeit.engine
c17b0438e56940a4d1b2f071cca90ae8b6f70629
[ "Apache-2.0" ]
48
2020-07-10T15:16:17.000Z
2022-03-03T19:46:46.000Z
plugins/clients/apps-client/test/mock_client_app/mock_client_event.py
pcanto-hopeit/hopeit.engine
c17b0438e56940a4d1b2f071cca90ae8b6f70629
[ "Apache-2.0" ]
3
2020-07-08T20:12:58.000Z
2021-01-10T15:57:21.000Z
from hopeit.app.context import EventContext __steps__ = ['test_app_call'] async def test_app_call(payload: None, context: EventContext) -> str: raise NotImplementedError()
22.375
69
0.77095
from hopeit.app.context import EventContext __steps__ = ['test_app_call'] async def test_app_call(payload: None, context: EventContext) -> str: raise NotImplementedError()
true
true
f73bed0be17ea24ab6ebb469bbeb064f4f2841f5
898
py
Python
Build.py
wasimakh2/FacebookBot
fe3adc1dfbc0babb0b54b8265b0c27eadbe22422
[ "Apache-2.0" ]
312
2016-02-16T22:52:32.000Z
2022-03-11T05:34:59.000Z
Build.py
wasimakh2/FacebookBot
fe3adc1dfbc0babb0b54b8265b0c27eadbe22422
[ "Apache-2.0" ]
17
2016-12-18T17:35:13.000Z
2021-04-30T08:58:03.000Z
Build.py
wasimakh2/FacebookBot
fe3adc1dfbc0babb0b54b8265b0c27eadbe22422
[ "Apache-2.0" ]
130
2016-02-03T12:34:18.000Z
2022-03-22T08:10:59.000Z
import os import sys script=""" from cx_Freeze import setup, Executable import sys base = None if sys.platform == "win32": base = "Win32GUI" setup( name = "$FILENOTPY$", version = "1.0", description = "$FILENOTPY$", executables = [Executable("$FILENAME$",appendScriptToExe = False,appendScriptToLibrary = True,icon="icon.ico")] )""" def main(filename): global script file=open("temp.py","wt") script=script.replace("$FILENAME$",filename) name=filename[:-3] script=script.replace("$FILENOTPY$",name) file.write(script) file.close() os.system("python temp.py build") os.remove("temp.py") input("\nDONE") if __name__=="__main__": if len(sys.argv)==2: print("Starting build ->") print(sys.argv[1]) main(sys.argv[1]) else: print("No argument\n Pass: Filename")
24.944444
116
0.601336
import os import sys script=""" from cx_Freeze import setup, Executable import sys base = None if sys.platform == "win32": base = "Win32GUI" setup( name = "$FILENOTPY$", version = "1.0", description = "$FILENOTPY$", executables = [Executable("$FILENAME$",appendScriptToExe = False,appendScriptToLibrary = True,icon="icon.ico")] )""" def main(filename): global script file=open("temp.py","wt") script=script.replace("$FILENAME$",filename) name=filename[:-3] script=script.replace("$FILENOTPY$",name) file.write(script) file.close() os.system("python temp.py build") os.remove("temp.py") input("\nDONE") if __name__=="__main__": if len(sys.argv)==2: print("Starting build ->") print(sys.argv[1]) main(sys.argv[1]) else: print("No argument\n Pass: Filename")
true
true
f73beebf053c139436dce4f12aff85bc275759a9
2,041
py
Python
src/api/bkuser_core/categories/handlers.py
trueware/bk-user
8c633e0a3821beb839ed120c4514c5733e675862
[ "MIT" ]
null
null
null
src/api/bkuser_core/categories/handlers.py
trueware/bk-user
8c633e0a3821beb839ed120c4514c5733e675862
[ "MIT" ]
null
null
null
src/api/bkuser_core/categories/handlers.py
trueware/bk-user
8c633e0a3821beb839ed120c4514c5733e675862
[ "MIT" ]
1
2021-12-31T06:48:41.000Z
2021-12-31T06:48:41.000Z
# -*- coding: utf-8 -*- """ TencentBlueKing is pleased to support the open source community by making 蓝鲸智云-用户管理(Bk-User) available. Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://opensource.org/licenses/MIT Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import logging from bkuser_core.bkiam.constants import IAMAction, ResourceType from bkuser_core.bkiam.helper import IAMHelper from bkuser_core.categories.signals import post_category_create from django.dispatch import receiver from .plugins.ldap.handlers import create_sync_tasks, delete_sync_tasks, update_sync_tasks # noqa from .plugins.local.handlers import make_local_default_settings # noqa logger = logging.getLogger(__name__) @receiver(post_category_create) def create_creator_actions(sender, category, **kwargs): """请求权限中心,创建新建关联权限记录""" logger.info("going to create resource_creator_action for Category<%s>", category.id) helper = IAMHelper() try: helper.create_creator_actions(kwargs["creator"], category) except Exception: # pylint: disable=broad-except logger.exception("failed to create resource_creator_action (category related)") # 创建目录之后,默认拥有了目录 & 组织的管理能力 try: helper.create_auth_by_ancestor( username=kwargs["creator"], ancestor=category, target_type=ResourceType.DEPARTMENT.value, action_ids=[IAMAction.MANAGE_DEPARTMENT, IAMAction.VIEW_DEPARTMENT], ) except Exception: # pylint: disable=broad-except logger.exception("failed to create resource_creator_action (department related)")
46.386364
115
0.763351
import logging from bkuser_core.bkiam.constants import IAMAction, ResourceType from bkuser_core.bkiam.helper import IAMHelper from bkuser_core.categories.signals import post_category_create from django.dispatch import receiver from .plugins.ldap.handlers import create_sync_tasks, delete_sync_tasks, update_sync_tasks from .plugins.local.handlers import make_local_default_settings logger = logging.getLogger(__name__) @receiver(post_category_create) def create_creator_actions(sender, category, **kwargs): logger.info("going to create resource_creator_action for Category<%s>", category.id) helper = IAMHelper() try: helper.create_creator_actions(kwargs["creator"], category) except Exception: logger.exception("failed to create resource_creator_action (category related)") try: helper.create_auth_by_ancestor( username=kwargs["creator"], ancestor=category, target_type=ResourceType.DEPARTMENT.value, action_ids=[IAMAction.MANAGE_DEPARTMENT, IAMAction.VIEW_DEPARTMENT], ) except Exception: logger.exception("failed to create resource_creator_action (department related)")
true
true
f73bef434d57e5ad068ebf3139df0ab3c82e5c73
5,308
py
Python
src/resultGen/apisUsed.py
RRua/AnaDroid
7417b117a50149a6f210cd334de71b814db8d6c7
[ "MIT" ]
7
2019-01-17T18:37:59.000Z
2020-11-16T13:42:29.000Z
src/resultGen/apisUsed.py
RRua/AnaDroid
7417b117a50149a6f210cd334de71b814db8d6c7
[ "MIT" ]
null
null
null
src/resultGen/apisUsed.py
RRua/AnaDroid
7417b117a50149a6f210cd334de71b814db8d6c7
[ "MIT" ]
null
null
null
import json,sys,os,re # Linares Vasquez red_apis_file=os.getenv("ANADROID_PATH")+"/resources/redAPIS.json" sec_apis_file=os.getenv("ANADROID_PATH")+"/resources/secAPIS.json" # only needed fot tests executed before 27/08/2020 # created to undo error that caused malformed methoddefinitions in instrumentation phase # remove later # goal: to fix things like this : sample.andremion.musicplayer.view.ProgressView->onMeasure.SavedState|-1183758944 # expected result : sample.andremion.musicplayer.view.ProgressView->onMeasure|-1183758944 def fixMethodDefinition(met_id): simpl_m_name= met_id.split("->")[1].split("|")[0] if(len(simpl_m_name.split("."))>1): met_id=met_id.replace(simpl_m_name,simpl_m_name.split(".")[0] ) return met_id def areMethodEquals(met1,met2id,met2_val): met2id = fixMethodDefinition(met2id) return met1['method_name'] == met2id.split("|")[0] and len(met1['method_args']) == len(met2_val['method_args']) and ( met1['method_length'] == met2_val['method_length'] or met2_val['method_length']==-1 ) # and ( met1['method_locals'] == met2_val['method_locals'] or met2_val['method_locals']==-1 ) def methodWasInvoked(method_obj, traced_methods): #return method_obj['method_name'] in traced_methods return len (list(filter( lambda x : areMethodEquals(method_obj, x[0], x[1] ), traced_methods.items() ))) > 0 def methodRedAPIs(method_obj, red_apis_obj): #print(method_obj['method_apis']) #print("-----\n") #print(red_apis_obj) l=[] for method_call in method_obj['method_apis']: if 'name' in method_call and method_call['name'] in red_apis_obj: l.append(method_call['name']) #m_apis=method_obj return l def methodSecAPIs(method_obj, sec_apis_obj): #print(method_obj['method_apis']) #print("-----\n") #print(red_apis_obj) l=[] for method_call in method_obj['method_apis']: #print(method_call) if 'name' in method_call: sec_api_match = list(filter( lambda x : re.search( x , method_call['name'] ), sec_apis_obj.keys() )) if len (sec_api_match)>0: l.append(method_call['name']) # l.append(method_call['name']) #m_apis=method_obj return l def getRedApis(androguard_out_filename, all_traced_filename ): # load files red_apis_used={} with open(androguard_out_filename) as json_file: app_apis = json.load(json_file) with open(red_apis_file, encoding='utf-8') as json_file: red_apis = json.load(json_file) red_apis_dict={} for x in red_apis: red_apis_dict[x['fullMethodDefinition']]=x with open(all_traced_filename) as json_file: traced_methods = json.load(json_file) traced_methods_dict={} #print(traced_methods.keys()) for testid in traced_methods.keys(): traced_methods_dict[testid]={} for me , y in traced_methods[testid].items(): #print(me) traced_methods_dict[testid][me]= y ct=0 for classe in app_apis.values(): for method in classe['class_methods'].values(): for testid, test_methods in traced_methods_dict.items(): if methodWasInvoked(method, test_methods): #print("invocado %s" % method['method_name'] ) l= methodRedAPIs(method, red_apis_dict) if len(l)>0: if not testid in red_apis_used: red_apis_used[testid]={} red_apis_used[testid][method['method_name']]=l ct=ct+1 return red_apis_used , ct def getSecApis(androguard_out_filename, all_traced_filename ): # load files sec_apis_used={} with open(androguard_out_filename) as json_file: app_apis = json.load(json_file) with open(sec_apis_file, encoding='utf-8') as json_file: sec_apis = json.load(json_file) sec_apis_dict={} for x in sec_apis: sec_apis_dict[x['APIPrefix']]=x # load alltracedmethods.json with open(all_traced_filename) as json_file: traced_methods = json.load(json_file) traced_methods_dict={} #print(traced_methods.keys()) for testid in traced_methods.keys(): traced_methods_dict[testid]={} for me , y in traced_methods[testid].items(): #print(me) traced_methods_dict[testid][me]= y #print(traced_methods_dict) ct=0 for classe in app_apis.values(): for method in classe['class_methods'].values(): for testid, test_methods in traced_methods_dict.items(): if methodWasInvoked(method, test_methods): l= methodSecAPIs(method, sec_apis_dict) if len(l)>0: if not testid in sec_apis_used: sec_apis_used[testid]={} sec_apis_used[testid][method['method_name']]=l ct=ct+1 return sec_apis_used , ct if __name__ == '__main__': if len(sys.argv)>2: red_apis , ct_red = getRedApis(androguard_out_filename=sys.argv[1],all_traced_filename=sys.argv[2]) sec_apis , ct_sec = getSecApis(androguard_out_filename=sys.argv[1],all_traced_filename=sys.argv[2]) print("methods with red apis: " + str(ct_red) ) print("methods with sec apis: " + str(ct_sec) ) target_dir_path = os.path.dirname(os.path.realpath(sys.argv[2])) print("dumping red apis to "+ str(target_dir_path) + "/redAPIs.json") with open( str(target_dir_path) + "/redAPIs.json", 'w') as outfile: json.dump(red_apis, outfile) print("dumping sec apis to "+ str(target_dir_path) + "/secAPIs.json") with open( str(target_dir_path) + "/secAPIs.json", 'w') as outfile: json.dump(sec_apis, outfile) else: print("bad input len. please provide androguard output file and allTracedMethods.json filepaths as args")
36.356164
300
0.727581
import json,sys,os,re red_apis_file=os.getenv("ANADROID_PATH")+"/resources/redAPIS.json" sec_apis_file=os.getenv("ANADROID_PATH")+"/resources/secAPIS.json" def fixMethodDefinition(met_id): simpl_m_name= met_id.split("->")[1].split("|")[0] if(len(simpl_m_name.split("."))>1): met_id=met_id.replace(simpl_m_name,simpl_m_name.split(".")[0] ) return met_id def areMethodEquals(met1,met2id,met2_val): met2id = fixMethodDefinition(met2id) return met1['method_name'] == met2id.split("|")[0] and len(met1['method_args']) == len(met2_val['method_args']) and ( met1['method_length'] == met2_val['method_length'] or met2_val['method_length']==-1 ) def methodWasInvoked(method_obj, traced_methods): return len (list(filter( lambda x : areMethodEquals(method_obj, x[0], x[1] ), traced_methods.items() ))) > 0 def methodRedAPIs(method_obj, red_apis_obj): l=[] for method_call in method_obj['method_apis']: if 'name' in method_call and method_call['name'] in red_apis_obj: l.append(method_call['name']) return l def methodSecAPIs(method_obj, sec_apis_obj): l=[] for method_call in method_obj['method_apis']: if 'name' in method_call: sec_api_match = list(filter( lambda x : re.search( x , method_call['name'] ), sec_apis_obj.keys() )) if len (sec_api_match)>0: l.append(method_call['name']) return l def getRedApis(androguard_out_filename, all_traced_filename ): red_apis_used={} with open(androguard_out_filename) as json_file: app_apis = json.load(json_file) with open(red_apis_file, encoding='utf-8') as json_file: red_apis = json.load(json_file) red_apis_dict={} for x in red_apis: red_apis_dict[x['fullMethodDefinition']]=x with open(all_traced_filename) as json_file: traced_methods = json.load(json_file) traced_methods_dict={} for testid in traced_methods.keys(): traced_methods_dict[testid]={} for me , y in traced_methods[testid].items(): traced_methods_dict[testid][me]= y ct=0 for classe in app_apis.values(): for method in classe['class_methods'].values(): for testid, test_methods in traced_methods_dict.items(): if methodWasInvoked(method, test_methods): l= methodRedAPIs(method, red_apis_dict) if len(l)>0: if not testid in red_apis_used: red_apis_used[testid]={} red_apis_used[testid][method['method_name']]=l ct=ct+1 return red_apis_used , ct def getSecApis(androguard_out_filename, all_traced_filename ): sec_apis_used={} with open(androguard_out_filename) as json_file: app_apis = json.load(json_file) with open(sec_apis_file, encoding='utf-8') as json_file: sec_apis = json.load(json_file) sec_apis_dict={} for x in sec_apis: sec_apis_dict[x['APIPrefix']]=x with open(all_traced_filename) as json_file: traced_methods = json.load(json_file) traced_methods_dict={} for testid in traced_methods.keys(): traced_methods_dict[testid]={} for me , y in traced_methods[testid].items(): traced_methods_dict[testid][me]= y ct=0 for classe in app_apis.values(): for method in classe['class_methods'].values(): for testid, test_methods in traced_methods_dict.items(): if methodWasInvoked(method, test_methods): l= methodSecAPIs(method, sec_apis_dict) if len(l)>0: if not testid in sec_apis_used: sec_apis_used[testid]={} sec_apis_used[testid][method['method_name']]=l ct=ct+1 return sec_apis_used , ct if __name__ == '__main__': if len(sys.argv)>2: red_apis , ct_red = getRedApis(androguard_out_filename=sys.argv[1],all_traced_filename=sys.argv[2]) sec_apis , ct_sec = getSecApis(androguard_out_filename=sys.argv[1],all_traced_filename=sys.argv[2]) print("methods with red apis: " + str(ct_red) ) print("methods with sec apis: " + str(ct_sec) ) target_dir_path = os.path.dirname(os.path.realpath(sys.argv[2])) print("dumping red apis to "+ str(target_dir_path) + "/redAPIs.json") with open( str(target_dir_path) + "/redAPIs.json", 'w') as outfile: json.dump(red_apis, outfile) print("dumping sec apis to "+ str(target_dir_path) + "/secAPIs.json") with open( str(target_dir_path) + "/secAPIs.json", 'w') as outfile: json.dump(sec_apis, outfile) else: print("bad input len. please provide androguard output file and allTracedMethods.json filepaths as args")
true
true
f73befd25e262931487e0a2a67889b9f79f6c43d
2,187
py
Python
caffe2/quantization/server/tanh_dnnlowp_op_test.py
Hacky-DH/pytorch
80dc4be615854570aa39a7e36495897d8a040ecc
[ "Intel" ]
60,067
2017-01-18T17:21:31.000Z
2022-03-31T21:37:45.000Z
caffe2/quantization/server/tanh_dnnlowp_op_test.py
Hacky-DH/pytorch
80dc4be615854570aa39a7e36495897d8a040ecc
[ "Intel" ]
66,955
2017-01-18T17:21:38.000Z
2022-03-31T23:56:11.000Z
caffe2/quantization/server/tanh_dnnlowp_op_test.py
Hacky-DH/pytorch
80dc4be615854570aa39a7e36495897d8a040ecc
[ "Intel" ]
19,210
2017-01-18T17:45:04.000Z
2022-03-31T23:51:56.000Z
import collections import caffe2.python.hypothesis_test_util as hu import hypothesis.strategies as st import numpy as np from caffe2.python import core, dyndep, workspace from hypothesis import given, settings dyndep.InitOpsLibrary("//caffe2/caffe2/quantization/server:dnnlowp_ops") workspace.GlobalInit(["caffe2", "--caffe2_omp_num_threads=11"]) class DNNLowPTanhOpTest(hu.HypothesisTestCase): @given(size=st.integers(1024, 2048), is_empty=st.booleans(), **hu.gcs_cpu_only) @settings(max_examples=10, deadline=None) def test_dnnlowp_tanh(self, size, is_empty, gc, dc): if is_empty: size = 0 X = (np.random.rand(size) * 10 - 5).astype(np.float32) Output = collections.namedtuple("Output", ["Y", "op_type", "engine"]) outputs = [] op_engine_list = [("Tanh", ""), ("Tanh", "DNNLOWP"), ("Int8Tanh", "DNNLOWP")] for op_type, engine in op_engine_list: net = core.Net("test_net") if engine == "DNNLOWP": quantize = core.CreateOperator( "Quantize", ["X"], ["X_q"], engine=engine, device_option=gc, followed_by="Tanh", ) net.Proto().op.extend([quantize]) tanh = core.CreateOperator( op_type, ["X_q" if engine == "DNNLOWP" else "X"], ["Y_q" if engine == "DNNLOWP" else "Y"], engine=engine, device_option=gc, ) net.Proto().op.extend([tanh]) if engine == "DNNLOWP": dequantize = core.CreateOperator( "Dequantize", ["Y_q"], ["Y"], engine=engine, device_option=gc ) net.Proto().op.extend([dequantize]) self.ws.create_blob("X").feed(X, device_option=gc) self.ws.run(net) outputs.append( Output(Y=self.ws.blobs["Y"].fetch(), op_type=op_type, engine=engine) ) for o in outputs: np.testing.assert_allclose(o.Y, outputs[0].Y, atol=0.02, rtol=0)
32.641791
85
0.541838
import collections import caffe2.python.hypothesis_test_util as hu import hypothesis.strategies as st import numpy as np from caffe2.python import core, dyndep, workspace from hypothesis import given, settings dyndep.InitOpsLibrary("//caffe2/caffe2/quantization/server:dnnlowp_ops") workspace.GlobalInit(["caffe2", "--caffe2_omp_num_threads=11"]) class DNNLowPTanhOpTest(hu.HypothesisTestCase): @given(size=st.integers(1024, 2048), is_empty=st.booleans(), **hu.gcs_cpu_only) @settings(max_examples=10, deadline=None) def test_dnnlowp_tanh(self, size, is_empty, gc, dc): if is_empty: size = 0 X = (np.random.rand(size) * 10 - 5).astype(np.float32) Output = collections.namedtuple("Output", ["Y", "op_type", "engine"]) outputs = [] op_engine_list = [("Tanh", ""), ("Tanh", "DNNLOWP"), ("Int8Tanh", "DNNLOWP")] for op_type, engine in op_engine_list: net = core.Net("test_net") if engine == "DNNLOWP": quantize = core.CreateOperator( "Quantize", ["X"], ["X_q"], engine=engine, device_option=gc, followed_by="Tanh", ) net.Proto().op.extend([quantize]) tanh = core.CreateOperator( op_type, ["X_q" if engine == "DNNLOWP" else "X"], ["Y_q" if engine == "DNNLOWP" else "Y"], engine=engine, device_option=gc, ) net.Proto().op.extend([tanh]) if engine == "DNNLOWP": dequantize = core.CreateOperator( "Dequantize", ["Y_q"], ["Y"], engine=engine, device_option=gc ) net.Proto().op.extend([dequantize]) self.ws.create_blob("X").feed(X, device_option=gc) self.ws.run(net) outputs.append( Output(Y=self.ws.blobs["Y"].fetch(), op_type=op_type, engine=engine) ) for o in outputs: np.testing.assert_allclose(o.Y, outputs[0].Y, atol=0.02, rtol=0)
true
true
f73bf01e0e8932369dd3f787db82c08609a7e6aa
2,988
py
Python
sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_internal/__init__.py
rsdoherty/azure-sdk-for-python
6bba5326677468e6660845a703686327178bb7b1
[ "MIT" ]
3
2020-06-23T02:25:27.000Z
2021-09-07T18:48:11.000Z
sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_internal/__init__.py
rsdoherty/azure-sdk-for-python
6bba5326677468e6660845a703686327178bb7b1
[ "MIT" ]
510
2019-07-17T16:11:19.000Z
2021-08-02T08:38:32.000Z
sdk/keyvault/azure-keyvault-administration/azure/keyvault/administration/_internal/__init__.py
rsdoherty/azure-sdk-for-python
6bba5326677468e6660845a703686327178bb7b1
[ "MIT" ]
5
2019-09-04T12:51:37.000Z
2020-09-16T07:28:40.000Z
# ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. # ------------------------------------ from collections import namedtuple from six.moves.urllib_parse import urlparse from .challenge_auth_policy import ChallengeAuthPolicy, ChallengeAuthPolicyBase from .client_base import KeyVaultClientBase from .http_challenge import HttpChallenge from . import http_challenge_cache as HttpChallengeCache __all__ = [ "ChallengeAuthPolicy", "ChallengeAuthPolicyBase", "HttpChallenge", "HttpChallengeCache", "KeyVaultClientBase", ] _VaultId = namedtuple("_VaultId", ["vault_url", "collection", "name", "version"]) def parse_vault_id(url): try: parsed_uri = urlparse(url) except Exception: # pylint: disable=broad-except raise ValueError("'{}' is not not a valid url".format(url)) if not (parsed_uri.scheme and parsed_uri.hostname): raise ValueError("'{}' is not not a valid url".format(url)) path = list(filter(None, parsed_uri.path.split("/"))) if len(path) < 2 or len(path) > 3: raise ValueError("'{}' is not not a valid vault url".format(url)) return _VaultId( vault_url="{}://{}".format(parsed_uri.scheme, parsed_uri.hostname), collection=path[0], name=path[1], version=path[2] if len(path) == 3 else None, ) BackupLocation = namedtuple("BackupLocation", ["container_url", "folder_name"]) def parse_folder_url(folder_url): # type: (str) -> BackupLocation """Parse the blob container URL and folder name from a backup's blob storage URL. For example, https://<account>.blob.core.windows.net/backup/mhsm-account-2020090117323313 parses to (container_url="https://<account>.blob.core.windows.net/backup", folder_name="mhsm-account-2020090117323313"). """ try: parsed = urlparse(folder_url) # the first segment of the path is the container name stripped_path = parsed.path.strip("/") container = stripped_path.split("/")[0] # the rest of the path is the folder name folder_name = stripped_path[len(container) + 1 :] # this intentionally discards any SAS token in the URL--methods require the SAS token as a separate parameter container_url = "{}://{}/{}".format(parsed.scheme, parsed.netloc, container) return BackupLocation(container_url, folder_name) except: # pylint:disable=broad-except raise ValueError( '"folder_url" should be the URL of a blob holding a Key Vault backup, for example ' '"https://<account>.blob.core.windows.net/backup/mhsm-account-2020090117323313"' ) try: # pylint:disable=unused-import from .async_challenge_auth_policy import AsyncChallengeAuthPolicy from .async_client_base import AsyncKeyVaultClientBase __all__.extend(["AsyncChallengeAuthPolicy", "AsyncKeyVaultClientBase"]) except (SyntaxError, ImportError): pass
34.344828
117
0.678715
from collections import namedtuple from six.moves.urllib_parse import urlparse from .challenge_auth_policy import ChallengeAuthPolicy, ChallengeAuthPolicyBase from .client_base import KeyVaultClientBase from .http_challenge import HttpChallenge from . import http_challenge_cache as HttpChallengeCache __all__ = [ "ChallengeAuthPolicy", "ChallengeAuthPolicyBase", "HttpChallenge", "HttpChallengeCache", "KeyVaultClientBase", ] _VaultId = namedtuple("_VaultId", ["vault_url", "collection", "name", "version"]) def parse_vault_id(url): try: parsed_uri = urlparse(url) except Exception: raise ValueError("'{}' is not not a valid url".format(url)) if not (parsed_uri.scheme and parsed_uri.hostname): raise ValueError("'{}' is not not a valid url".format(url)) path = list(filter(None, parsed_uri.path.split("/"))) if len(path) < 2 or len(path) > 3: raise ValueError("'{}' is not not a valid vault url".format(url)) return _VaultId( vault_url="{}://{}".format(parsed_uri.scheme, parsed_uri.hostname), collection=path[0], name=path[1], version=path[2] if len(path) == 3 else None, ) BackupLocation = namedtuple("BackupLocation", ["container_url", "folder_name"]) def parse_folder_url(folder_url): try: parsed = urlparse(folder_url) stripped_path = parsed.path.strip("/") container = stripped_path.split("/")[0] folder_name = stripped_path[len(container) + 1 :] container_url = "{}://{}/{}".format(parsed.scheme, parsed.netloc, container) return BackupLocation(container_url, folder_name) except: raise ValueError( '"folder_url" should be the URL of a blob holding a Key Vault backup, for example ' '"https://<account>.blob.core.windows.net/backup/mhsm-account-2020090117323313"' ) try: from .async_challenge_auth_policy import AsyncChallengeAuthPolicy from .async_client_base import AsyncKeyVaultClientBase __all__.extend(["AsyncChallengeAuthPolicy", "AsyncKeyVaultClientBase"]) except (SyntaxError, ImportError): pass
true
true
f73bf1a1f7a6df4e14a3529dfff37120bc1158be
376
py
Python
TestDemo/changeApp.py
mocne/handerCode
bd6f9c1c5ec9a2e8be79146748f19a430ba30074
[ "MIT" ]
1
2018-04-21T13:54:56.000Z
2018-04-21T13:54:56.000Z
TestDemo/changeApp.py
mocne/handerCode
bd6f9c1c5ec9a2e8be79146748f19a430ba30074
[ "MIT" ]
null
null
null
TestDemo/changeApp.py
mocne/handerCode
bd6f9c1c5ec9a2e8be79146748f19a430ba30074
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- import time from DriverInit import initAppiumDriver def change2app(driver, package, activity): driver.quit() driver = initAppiumDriver.initAppiumWithInfo(package=package, activity=activity) time.sleep(10) driver.get_screenshot_as_file('./img/%s.png' % time.strftime('%Y-%m-%d_%H_%M_%S', time.localtime(time.time()))) time.sleep(3)
34.181818
115
0.712766
import time from DriverInit import initAppiumDriver def change2app(driver, package, activity): driver.quit() driver = initAppiumDriver.initAppiumWithInfo(package=package, activity=activity) time.sleep(10) driver.get_screenshot_as_file('./img/%s.png' % time.strftime('%Y-%m-%d_%H_%M_%S', time.localtime(time.time()))) time.sleep(3)
true
true
f73bf271bcf6b41568d091472a46e83b99a55918
417
py
Python
torchsparse/nn/modules/crop.py
f-sky/torchsparse
65466a10c6fa54bff17c6429706b7019a2a59409
[ "MIT" ]
1
2021-03-16T02:47:56.000Z
2021-03-16T02:47:56.000Z
torchsparse/nn/modules/crop.py
f-sky/torchsparse
65466a10c6fa54bff17c6429706b7019a2a59409
[ "MIT" ]
null
null
null
torchsparse/nn/modules/crop.py
f-sky/torchsparse
65466a10c6fa54bff17c6429706b7019a2a59409
[ "MIT" ]
null
null
null
import torch from torch import nn from torchsparse.nn.functional import spcrop __all__ = ['SparseCrop'] class SparseCrop(nn.Module): def __init__(self, loc_min, loc_max): super().__init__() self.loc_min = torch.cuda.IntTensor([list(loc_min)]) self.loc_max = torch.cuda.IntTensor([list(loc_max)]) def forward(self, inputs): return spcrop(inputs, self.loc_min, self.loc_max)
26.0625
60
0.693046
import torch from torch import nn from torchsparse.nn.functional import spcrop __all__ = ['SparseCrop'] class SparseCrop(nn.Module): def __init__(self, loc_min, loc_max): super().__init__() self.loc_min = torch.cuda.IntTensor([list(loc_min)]) self.loc_max = torch.cuda.IntTensor([list(loc_max)]) def forward(self, inputs): return spcrop(inputs, self.loc_min, self.loc_max)
true
true
f73bf2e03a18009163612bcd49527a1e50bbe4eb
6,768
py
Python
dqn_zoo/networks_test.py
khushjammu/dqn_zoo
249af96717d605cc1a62fc2b69941881c9661249
[ "Apache-2.0" ]
3
2021-02-04T23:13:51.000Z
2021-11-06T10:21:50.000Z
dqn_zoo/networks_test.py
khushjammu/dqn_zoo
249af96717d605cc1a62fc2b69941881c9661249
[ "Apache-2.0" ]
null
null
null
dqn_zoo/networks_test.py
khushjammu/dqn_zoo
249af96717d605cc1a62fc2b69941881c9661249
[ "Apache-2.0" ]
1
2021-12-20T13:42:35.000Z
2021-12-20T13:42:35.000Z
# Copyright 2019 DeepMind Technologies Limited. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for networks.""" # pylint: disable=g-bad-import-order import haiku as hk import jax from jax.config import config import jax.numpy as jnp import numpy as np import tree from dqn_zoo import networks from absl.testing import absltest def _sample_input(input_shape): return jnp.zeros((1,) + input_shape, dtype=jnp.float32) class SimpleLayersTest(absltest.TestCase): def test_linear(self): layer = hk.transform(networks.linear(4)) params = layer.init(jax.random.PRNGKey(1), _sample_input((3,))) self.assertCountEqual(['linear'], params) lin_params = params['linear'] self.assertCountEqual(['w', 'b'], lin_params) self.assertEqual((3, 4), lin_params['w'].shape) self.assertEqual((4,), lin_params['b'].shape) def test_conv(self): layer = hk.transform(networks.conv(4, (3, 3), 2)) params = layer.init(jax.random.PRNGKey(1), _sample_input((7, 7, 3))) self.assertCountEqual(['conv2_d'], params) conv_params = params['conv2_d'] self.assertCountEqual(['w', 'b'], conv_params) self.assertEqual((3, 3, 3, 4), conv_params['w'].shape) self.assertEqual((4,), conv_params['b'].shape) class LinearWithSharedBiasTest(absltest.TestCase): def setUp(self): super().setUp() rng_key = jax.random.PRNGKey(1) self.init_rng_key, self.apply_rng_key = jax.random.split(rng_key) self.input_shape = (4,) self.output_shape = (3,) self.weights_shape = (self.input_shape[0], self.output_shape[0]) network_fn = networks.linear_with_shared_bias(self.output_shape[0]) self.network = hk.transform(network_fn) def test_bias_parameter_shape(self): params = self.network.init(self.init_rng_key, _sample_input(self.input_shape)) self.assertLen(tree.flatten(params), 2) def check_params(path, param): if path[-1] == 'b': self.assertNotEqual(self.output_shape, param.shape) self.assertEqual((1,), param.shape) elif path[-1] == 'w': self.assertEqual(self.weights_shape, param.shape) else: self.fail('Unexpected parameter %s.' % path) tree.map_structure_with_path(check_params, params) def test_output_shares_bias(self): bias = 1.23 params = self.network.init(self.init_rng_key, _sample_input(self.input_shape)) def replace_params(path, param): if path[-1] == 'b': return jnp.ones_like(param) * bias else: return jnp.zeros_like(param) params = tree.map_structure_with_path(replace_params, params) output = self.network.apply(params, self.apply_rng_key, jnp.zeros((1,) + self.input_shape)) self.assertEqual((1,) + self.output_shape, output.shape) np.testing.assert_allclose([bias] * self.output_shape[0], list(output[0])) class NoisyLinearTest(absltest.TestCase): def setUp(self): super().setUp() rng_key = jax.random.PRNGKey(1) self.init_rng_key, self.apply_rng_key = jax.random.split(rng_key) self.input_shape = (4,) self.output_shape = (3,) self.network_fn = networks.noisy_linear(self.output_shape[0], 0.1) self.network = hk.transform(self.network_fn) self.params = self.network.init(self.init_rng_key, _sample_input(self.input_shape)) self.inputs = jnp.zeros((2,) + self.input_shape) def test_basic(self): self.network.apply(self.params, self.apply_rng_key, self.inputs) def test_error_raised_if_rng_is_not_passed_in(self): with self.assertRaisesRegex(ValueError, 'must be called with an RNG'): self.network.apply(self.params, self.inputs) def test_error_raised_if_transformed_without_rng_1(self): network = hk.without_apply_rng(hk.transform(self.network_fn)) with self.assertRaisesRegex(ValueError, 'PRNGKey'): network.apply(self.params, self.inputs) def test_error_raised_if_transformed_without_rng_2(self): network = hk.without_apply_rng(hk.transform(self.network_fn)) with self.assertRaisesRegex(TypeError, 'positional argument'): network.apply(self.params, self.apply_rng_key, self.inputs) def test_same_rng_produces_same_outputs(self): outputs_1 = self.network.apply(self.params, self.apply_rng_key, self.inputs) outputs_2 = self.network.apply(self.params, self.apply_rng_key, self.inputs) np.testing.assert_allclose(outputs_1, outputs_2) def test_different_rngs_produce_different_outputs(self): rng_1, rng_2 = jax.random.split(jax.random.PRNGKey(1)) outputs_1 = self.network.apply(self.params, rng_1, self.inputs) outputs_2 = self.network.apply(self.params, rng_2, self.inputs) self.assertFalse(np.allclose(outputs_1, outputs_2)) def test_number_of_params_with_bias_correct(self): net_fn = networks.noisy_linear(self.output_shape[0], 0.1, with_bias=True) network = hk.transform(net_fn) params = network.init(self.init_rng_key, _sample_input(self.input_shape)) self.assertCountEqual(['mu', 'sigma'], params) self.assertCountEqual(['b', 'w'], params['mu']) self.assertCountEqual(['b', 'w'], params['sigma']) def test_number_of_params_without_bias_correct(self): net_fn = networks.noisy_linear(self.output_shape[0], 0.1, with_bias=False) network = hk.transform(net_fn) params = network.init(self.init_rng_key, _sample_input(self.input_shape)) self.assertCountEqual(['mu', 'sigma'], params) self.assertCountEqual(['w'], params['mu']) self.assertCountEqual(['b', 'w'], params['sigma']) def test_sigma_params_are_constant(self): self.assertCountEqual(['mu', 'sigma'], self.params) sigma_params = self.params['sigma'] sigma_w_values = np.unique(sigma_params['w']) sigma_b_values = np.unique(sigma_params['b']) self.assertLen(sigma_w_values, 1) self.assertLen(sigma_b_values, 1) value = 0.1 / np.sqrt(self.input_shape[0]) self.assertAlmostEqual(value, sigma_w_values) self.assertAlmostEqual(value, sigma_b_values) if __name__ == '__main__': config.update('jax_numpy_rank_promotion', 'raise') absltest.main()
38.896552
80
0.700502
import haiku as hk import jax from jax.config import config import jax.numpy as jnp import numpy as np import tree from dqn_zoo import networks from absl.testing import absltest def _sample_input(input_shape): return jnp.zeros((1,) + input_shape, dtype=jnp.float32) class SimpleLayersTest(absltest.TestCase): def test_linear(self): layer = hk.transform(networks.linear(4)) params = layer.init(jax.random.PRNGKey(1), _sample_input((3,))) self.assertCountEqual(['linear'], params) lin_params = params['linear'] self.assertCountEqual(['w', 'b'], lin_params) self.assertEqual((3, 4), lin_params['w'].shape) self.assertEqual((4,), lin_params['b'].shape) def test_conv(self): layer = hk.transform(networks.conv(4, (3, 3), 2)) params = layer.init(jax.random.PRNGKey(1), _sample_input((7, 7, 3))) self.assertCountEqual(['conv2_d'], params) conv_params = params['conv2_d'] self.assertCountEqual(['w', 'b'], conv_params) self.assertEqual((3, 3, 3, 4), conv_params['w'].shape) self.assertEqual((4,), conv_params['b'].shape) class LinearWithSharedBiasTest(absltest.TestCase): def setUp(self): super().setUp() rng_key = jax.random.PRNGKey(1) self.init_rng_key, self.apply_rng_key = jax.random.split(rng_key) self.input_shape = (4,) self.output_shape = (3,) self.weights_shape = (self.input_shape[0], self.output_shape[0]) network_fn = networks.linear_with_shared_bias(self.output_shape[0]) self.network = hk.transform(network_fn) def test_bias_parameter_shape(self): params = self.network.init(self.init_rng_key, _sample_input(self.input_shape)) self.assertLen(tree.flatten(params), 2) def check_params(path, param): if path[-1] == 'b': self.assertNotEqual(self.output_shape, param.shape) self.assertEqual((1,), param.shape) elif path[-1] == 'w': self.assertEqual(self.weights_shape, param.shape) else: self.fail('Unexpected parameter %s.' % path) tree.map_structure_with_path(check_params, params) def test_output_shares_bias(self): bias = 1.23 params = self.network.init(self.init_rng_key, _sample_input(self.input_shape)) def replace_params(path, param): if path[-1] == 'b': return jnp.ones_like(param) * bias else: return jnp.zeros_like(param) params = tree.map_structure_with_path(replace_params, params) output = self.network.apply(params, self.apply_rng_key, jnp.zeros((1,) + self.input_shape)) self.assertEqual((1,) + self.output_shape, output.shape) np.testing.assert_allclose([bias] * self.output_shape[0], list(output[0])) class NoisyLinearTest(absltest.TestCase): def setUp(self): super().setUp() rng_key = jax.random.PRNGKey(1) self.init_rng_key, self.apply_rng_key = jax.random.split(rng_key) self.input_shape = (4,) self.output_shape = (3,) self.network_fn = networks.noisy_linear(self.output_shape[0], 0.1) self.network = hk.transform(self.network_fn) self.params = self.network.init(self.init_rng_key, _sample_input(self.input_shape)) self.inputs = jnp.zeros((2,) + self.input_shape) def test_basic(self): self.network.apply(self.params, self.apply_rng_key, self.inputs) def test_error_raised_if_rng_is_not_passed_in(self): with self.assertRaisesRegex(ValueError, 'must be called with an RNG'): self.network.apply(self.params, self.inputs) def test_error_raised_if_transformed_without_rng_1(self): network = hk.without_apply_rng(hk.transform(self.network_fn)) with self.assertRaisesRegex(ValueError, 'PRNGKey'): network.apply(self.params, self.inputs) def test_error_raised_if_transformed_without_rng_2(self): network = hk.without_apply_rng(hk.transform(self.network_fn)) with self.assertRaisesRegex(TypeError, 'positional argument'): network.apply(self.params, self.apply_rng_key, self.inputs) def test_same_rng_produces_same_outputs(self): outputs_1 = self.network.apply(self.params, self.apply_rng_key, self.inputs) outputs_2 = self.network.apply(self.params, self.apply_rng_key, self.inputs) np.testing.assert_allclose(outputs_1, outputs_2) def test_different_rngs_produce_different_outputs(self): rng_1, rng_2 = jax.random.split(jax.random.PRNGKey(1)) outputs_1 = self.network.apply(self.params, rng_1, self.inputs) outputs_2 = self.network.apply(self.params, rng_2, self.inputs) self.assertFalse(np.allclose(outputs_1, outputs_2)) def test_number_of_params_with_bias_correct(self): net_fn = networks.noisy_linear(self.output_shape[0], 0.1, with_bias=True) network = hk.transform(net_fn) params = network.init(self.init_rng_key, _sample_input(self.input_shape)) self.assertCountEqual(['mu', 'sigma'], params) self.assertCountEqual(['b', 'w'], params['mu']) self.assertCountEqual(['b', 'w'], params['sigma']) def test_number_of_params_without_bias_correct(self): net_fn = networks.noisy_linear(self.output_shape[0], 0.1, with_bias=False) network = hk.transform(net_fn) params = network.init(self.init_rng_key, _sample_input(self.input_shape)) self.assertCountEqual(['mu', 'sigma'], params) self.assertCountEqual(['w'], params['mu']) self.assertCountEqual(['b', 'w'], params['sigma']) def test_sigma_params_are_constant(self): self.assertCountEqual(['mu', 'sigma'], self.params) sigma_params = self.params['sigma'] sigma_w_values = np.unique(sigma_params['w']) sigma_b_values = np.unique(sigma_params['b']) self.assertLen(sigma_w_values, 1) self.assertLen(sigma_b_values, 1) value = 0.1 / np.sqrt(self.input_shape[0]) self.assertAlmostEqual(value, sigma_w_values) self.assertAlmostEqual(value, sigma_b_values) if __name__ == '__main__': config.update('jax_numpy_rank_promotion', 'raise') absltest.main()
true
true
f73bf43a853a72ce52ed11b2221e3d69c86939c4
72
py
Python
pagebits/tests/__init__.py
nngroup/django-pagebits
5580219de66ce5da1b3dda0e82fc32713153f0fc
[ "BSD-3-Clause" ]
4
2016-04-10T13:17:26.000Z
2019-03-12T17:18:59.000Z
pagebits/tests/__init__.py
nngroup/django-pagebits
5580219de66ce5da1b3dda0e82fc32713153f0fc
[ "BSD-3-Clause" ]
null
null
null
pagebits/tests/__init__.py
nngroup/django-pagebits
5580219de66ce5da1b3dda0e82fc32713153f0fc
[ "BSD-3-Clause" ]
6
2015-03-27T15:50:46.000Z
2017-03-09T18:05:47.000Z
from .models import * from .views import * from .templatetags import *
14.4
27
0.736111
from .models import * from .views import * from .templatetags import *
true
true
f73bf45e888c263f9c6a0a064dddd9dc61be0e1d
4,994
py
Python
bench/sqlite3-search-bench.py
daq-tools/PyTables
0949d41e611e6882a49248c6d82b1da9a994e788
[ "BSD-3-Clause" ]
null
null
null
bench/sqlite3-search-bench.py
daq-tools/PyTables
0949d41e611e6882a49248c6d82b1da9a994e788
[ "BSD-3-Clause" ]
null
null
null
bench/sqlite3-search-bench.py
daq-tools/PyTables
0949d41e611e6882a49248c6d82b1da9a994e788
[ "BSD-3-Clause" ]
2
2021-02-09T22:58:15.000Z
2021-12-23T14:59:22.000Z
import os import os.path from time import perf_counter as clock import numpy import random # in order to always generate the same random sequence random.seed(19) def fill_arrays(start, stop): col_i = numpy.arange(start, stop, dtype=numpy.int32) if userandom: col_j = numpy.random.uniform(0, nrows, stop - start) else: col_j = numpy.array(col_i, dtype=numpy.float64) return col_i, col_j # Generator for ensure pytables benchmark compatibility def int_generator(nrows): step = 1000 * 100 j = 0 for i in range(nrows): if i >= step * j: stop = (j + 1) * step if stop > nrows: # Seems unnecessary stop = nrows col_i, col_j = fill_arrays(i, stop) j += 1 k = 0 yield (col_i[k], col_j[k]) k += 1 def int_generator_slow(nrows): for i in range(nrows): if userandom: yield (i, float(random.randint(0, nrows))) else: yield (i, float(i)) def open_db(filename, remove=0): if remove and os.path.exists(filename): os.remove(filename) con = sqlite.connect(filename) cur = con.cursor() return con, cur def create_db(filename, nrows): con, cur = open_db(filename, remove=1) cur.execute("create table ints(i integer, j real)") t1 = clock() # This is twice as fast as a plain loop cur.executemany("insert into ints(i,j) values (?,?)", int_generator(nrows)) con.commit() ctime = clock() - t1 if verbose: print(f"insert time: {ctime:.5f}") print(f"Krows/s: {nrows / 1000 / ctime:.5f}") close_db(con, cur) def index_db(filename): con, cur = open_db(filename) t1 = clock() cur.execute("create index ij on ints(j)") con.commit() itime = clock() - t1 if verbose: print(f"index time: {itime:.5f}") print(f"Krows/s: {nrows / itime:.5f}") # Close the DB close_db(con, cur) def query_db(filename, rng): con, cur = open_db(filename) t1 = clock() ntimes = 10 for i in range(ntimes): # between clause does not seem to take advantage of indexes # cur.execute("select j from ints where j between %s and %s" % \ cur.execute("select i from ints where j >= %s and j <= %s" % # cur.execute("select i from ints where i >= %s and i <= # %s" % (rng[0] + i, rng[1] + i)) results = cur.fetchall() con.commit() qtime = (clock() - t1) / ntimes if verbose: print(f"query time: {qtime:.5f}") print(f"Mrows/s: {nrows / 1000 / qtime:.5f}") print(results) close_db(con, cur) def close_db(con, cur): cur.close() con.close() if __name__ == "__main__": import sys import getopt try: import psyco psyco_imported = 1 except: psyco_imported = 0 usage = """usage: %s [-v] [-p] [-m] [-i] [-q] [-c] [-R range] [-n nrows] file -v verbose -p use "psyco" if available -m use random values to fill the table -q do query -c create the database -i index the table -2 use sqlite2 (default is use sqlite3) -R select a range in a field in the form "start,stop" (def "0,10") -n sets the number of rows (in krows) in each table \n""" % sys.argv[0] try: opts, pargs = getopt.getopt(sys.argv[1:], 'vpmiqc2R:n:') except: sys.stderr.write(usage) sys.exit(0) # default options verbose = 0 usepsyco = 0 userandom = 0 docreate = 0 createindex = 0 doquery = 0 sqlite_version = "3" rng = [0, 10] nrows = 1 # Get the options for option in opts: if option[0] == '-v': verbose = 1 elif option[0] == '-p': usepsyco = 1 elif option[0] == '-m': userandom = 1 elif option[0] == '-i': createindex = 1 elif option[0] == '-q': doquery = 1 elif option[0] == '-c': docreate = 1 elif option[0] == "-2": sqlite_version = "2" elif option[0] == '-R': rng = [int(i) for i in option[1].split(",")] elif option[0] == '-n': nrows = int(option[1]) # Catch the hdf5 file passed as the last argument filename = pargs[0] if sqlite_version == "2": import sqlite else: from pysqlite2 import dbapi2 as sqlite if verbose: print("pysqlite version:", sqlite.version) if userandom: print("using random values") if docreate: if verbose: print("writing %s krows" % nrows) if psyco_imported and usepsyco: psyco.bind(create_db) nrows *= 1000 create_db(filename, nrows) if createindex: index_db(filename) if doquery: query_db(filename, rng)
26.284211
81
0.543452
import os import os.path from time import perf_counter as clock import numpy import random random.seed(19) def fill_arrays(start, stop): col_i = numpy.arange(start, stop, dtype=numpy.int32) if userandom: col_j = numpy.random.uniform(0, nrows, stop - start) else: col_j = numpy.array(col_i, dtype=numpy.float64) return col_i, col_j def int_generator(nrows): step = 1000 * 100 j = 0 for i in range(nrows): if i >= step * j: stop = (j + 1) * step if stop > nrows: stop = nrows col_i, col_j = fill_arrays(i, stop) j += 1 k = 0 yield (col_i[k], col_j[k]) k += 1 def int_generator_slow(nrows): for i in range(nrows): if userandom: yield (i, float(random.randint(0, nrows))) else: yield (i, float(i)) def open_db(filename, remove=0): if remove and os.path.exists(filename): os.remove(filename) con = sqlite.connect(filename) cur = con.cursor() return con, cur def create_db(filename, nrows): con, cur = open_db(filename, remove=1) cur.execute("create table ints(i integer, j real)") t1 = clock() cur.executemany("insert into ints(i,j) values (?,?)", int_generator(nrows)) con.commit() ctime = clock() - t1 if verbose: print(f"insert time: {ctime:.5f}") print(f"Krows/s: {nrows / 1000 / ctime:.5f}") close_db(con, cur) def index_db(filename): con, cur = open_db(filename) t1 = clock() cur.execute("create index ij on ints(j)") con.commit() itime = clock() - t1 if verbose: print(f"index time: {itime:.5f}") print(f"Krows/s: {nrows / itime:.5f}") close_db(con, cur) def query_db(filename, rng): con, cur = open_db(filename) t1 = clock() ntimes = 10 for i in range(ntimes): cur.execute("select i from ints where j >= %s and j <= %s" % # %s" % (rng[0] + i, rng[1] + i)) results = cur.fetchall() con.commit() qtime = (clock() - t1) / ntimes if verbose: print(f"query time: {qtime:.5f}") print(f"Mrows/s: {nrows / 1000 / qtime:.5f}") print(results) close_db(con, cur) def close_db(con, cur): cur.close() con.close() if __name__ == "__main__": import sys import getopt try: import psyco psyco_imported = 1 except: psyco_imported = 0 usage = """usage: %s [-v] [-p] [-m] [-i] [-q] [-c] [-R range] [-n nrows] file -v verbose -p use "psyco" if available -m use random values to fill the table -q do query -c create the database -i index the table -2 use sqlite2 (default is use sqlite3) -R select a range in a field in the form "start,stop" (def "0,10") -n sets the number of rows (in krows) in each table \n""" % sys.argv[0] try: opts, pargs = getopt.getopt(sys.argv[1:], 'vpmiqc2R:n:') except: sys.stderr.write(usage) sys.exit(0) verbose = 0 usepsyco = 0 userandom = 0 docreate = 0 createindex = 0 doquery = 0 sqlite_version = "3" rng = [0, 10] nrows = 1 for option in opts: if option[0] == '-v': verbose = 1 elif option[0] == '-p': usepsyco = 1 elif option[0] == '-m': userandom = 1 elif option[0] == '-i': createindex = 1 elif option[0] == '-q': doquery = 1 elif option[0] == '-c': docreate = 1 elif option[0] == "-2": sqlite_version = "2" elif option[0] == '-R': rng = [int(i) for i in option[1].split(",")] elif option[0] == '-n': nrows = int(option[1]) filename = pargs[0] if sqlite_version == "2": import sqlite else: from pysqlite2 import dbapi2 as sqlite if verbose: print("pysqlite version:", sqlite.version) if userandom: print("using random values") if docreate: if verbose: print("writing %s krows" % nrows) if psyco_imported and usepsyco: psyco.bind(create_db) nrows *= 1000 create_db(filename, nrows) if createindex: index_db(filename) if doquery: query_db(filename, rng)
true
true
f73bf4c22765ae3b9e4134953e252e8c4eeb07b1
158
py
Python
kattis/heartrate.py
terror/Solutions
1ad33daec95b565a38ac4730261593bcf249ac86
[ "CC0-1.0" ]
2
2021-04-05T14:26:37.000Z
2021-06-10T04:22:01.000Z
kattis/heartrate.py
terror/Solutions
1ad33daec95b565a38ac4730261593bcf249ac86
[ "CC0-1.0" ]
null
null
null
kattis/heartrate.py
terror/Solutions
1ad33daec95b565a38ac4730261593bcf249ac86
[ "CC0-1.0" ]
null
null
null
for i in range(int(input())): b, p = list(map(float, input().split())) t = 60 / p tt = t * b print("{:.4f} {:.4f} {:.4f}".format(tt - t, tt, tt + t))
26.333333
58
0.481013
for i in range(int(input())): b, p = list(map(float, input().split())) t = 60 / p tt = t * b print("{:.4f} {:.4f} {:.4f}".format(tt - t, tt, tt + t))
true
true
f73bf4cc862a2490edd9d343c95505e9bf67c7bb
9,047
py
Python
tests/test_filters.py
Outflier/PyAV
f3aa2336a9fddfc2ae46e15a26956da08153af7e
[ "BSD-3-Clause" ]
965
2015-01-08T19:11:16.000Z
2020-04-30T16:27:07.000Z
tests/test_filters.py
Outflier/PyAV
f3aa2336a9fddfc2ae46e15a26956da08153af7e
[ "BSD-3-Clause" ]
542
2015-01-02T12:55:46.000Z
2020-04-30T16:13:56.000Z
tests/test_filters.py
Outflier/PyAV
f3aa2336a9fddfc2ae46e15a26956da08153af7e
[ "BSD-3-Clause" ]
211
2015-01-10T12:10:02.000Z
2020-04-29T14:02:51.000Z
from fractions import Fraction from unittest import SkipTest import errno import numpy as np from av import AudioFrame, VideoFrame from av.audio.frame import format_dtypes from av.filter import Filter, Graph import av from .common import Image, TestCase, fate_suite def generate_audio_frame( frame_num, input_format="s16", layout="stereo", sample_rate=44100, frame_size=1024 ): """ Generate audio frame representing part of the sinusoidal wave :param input_format: default: s16 :param layout: default: stereo :param sample_rate: default: 44100 :param frame_size: default: 1024 :param frame_num: frame number :return: audio frame for sinusoidal wave audio signal slice """ frame = AudioFrame(format=input_format, layout=layout, samples=frame_size) frame.sample_rate = sample_rate frame.pts = frame_num * frame_size for i in range(len(frame.layout.channels)): data = np.zeros(frame_size, dtype=format_dtypes[input_format]) for j in range(frame_size): data[j] = np.sin(2 * np.pi * (frame_num + j) * (i + 1) / float(frame_size)) frame.planes[i].update(data) return frame def pull_until_blocked(graph): frames = [] while True: try: frames.append(graph.pull()) except av.utils.AVError as e: if e.errno != errno.EAGAIN: raise return frames class TestFilters(TestCase): def test_filter_descriptor(self): f = Filter("testsrc") self.assertEqual(f.name, "testsrc") self.assertEqual(f.description, "Generate test pattern.") self.assertFalse(f.dynamic_inputs) self.assertEqual(len(f.inputs), 0) self.assertFalse(f.dynamic_outputs) self.assertEqual(len(f.outputs), 1) self.assertEqual(f.outputs[0].name, "default") self.assertEqual(f.outputs[0].type, "video") def test_dynamic_filter_descriptor(self): f = Filter("split") self.assertFalse(f.dynamic_inputs) self.assertEqual(len(f.inputs), 1) self.assertTrue(f.dynamic_outputs) self.assertEqual(len(f.outputs), 0) def test_generator_graph(self): graph = Graph() src = graph.add("testsrc") lutrgb = graph.add( "lutrgb", "r=maxval+minval-val:g=maxval+minval-val:b=maxval+minval-val", name="invert", ) sink = graph.add("buffersink") src.link_to(lutrgb) lutrgb.link_to(sink) # pads and links self.assertIs(src.outputs[0].link.output, lutrgb.inputs[0]) self.assertIs(lutrgb.inputs[0].link.input, src.outputs[0]) frame = sink.pull() self.assertIsInstance(frame, VideoFrame) if Image: frame.to_image().save(self.sandboxed("mandelbrot2.png")) def test_auto_find_sink(self): graph = Graph() src = graph.add("testsrc") src.link_to(graph.add("buffersink")) graph.configure() frame = graph.pull() if Image: frame.to_image().save(self.sandboxed("mandelbrot3.png")) def test_delegate_sink(self): graph = Graph() src = graph.add("testsrc") src.link_to(graph.add("buffersink")) graph.configure() frame = src.pull() if Image: frame.to_image().save(self.sandboxed("mandelbrot4.png")) def test_haldclut_graph(self): raise SkipTest() graph = Graph() img = Image.open(fate_suite("png1/lena-rgb24.png")) frame = VideoFrame.from_image(img) img_source = graph.add_buffer(frame) hald_img = Image.open("hald_7.png") hald_frame = VideoFrame.from_image(hald_img) hald_source = graph.add_buffer(hald_frame) hald_filter = graph.add("haldclut") sink = graph.add("buffersink") img_source.link(0, hald_filter, 0) hald_source.link(0, hald_filter, 1) hald_filter.link(0, sink, 0) graph.config() self.assertIs(img_source.outputs[0].linked_to, hald_filter.inputs[0]) self.assertIs(hald_source.outputs[0].linked_to, hald_filter.inputs[1]) self.assertIs(hald_filter.outputs[0].linked_to, sink.inputs[0]) hald_source.push(hald_frame) img_source.push(frame) frame = sink.pull() self.assertIsInstance(frame, VideoFrame) frame.to_image().save(self.sandboxed("filtered.png")) def test_audio_buffer_sink(self): graph = Graph() audio_buffer = graph.add_abuffer( format="fltp", sample_rate=48000, layout="stereo", time_base=Fraction(1, 48000), ) audio_buffer.link_to(graph.add("abuffersink")) graph.configure() try: graph.pull() except OSError as e: # we haven't pushed any input so expect no frames / EAGAIN if e.errno != errno.EAGAIN: raise @staticmethod def link_nodes(*nodes): for c, n in zip(nodes, nodes[1:]): c.link_to(n) def test_audio_buffer_resample(self): graph = Graph() self.link_nodes( graph.add_abuffer( format="fltp", sample_rate=48000, layout="stereo", time_base=Fraction(1, 48000), ), graph.add( "aformat", "sample_fmts=s16:sample_rates=44100:channel_layouts=stereo" ), graph.add("abuffersink"), ) graph.configure() graph.push( generate_audio_frame( 0, input_format="fltp", layout="stereo", sample_rate=48000 ) ) out_frame = graph.pull() self.assertEqual(out_frame.format.name, "s16") self.assertEqual(out_frame.layout.name, "stereo") self.assertEqual(out_frame.sample_rate, 44100) def test_audio_buffer_volume_filter(self): graph = Graph() self.link_nodes( graph.add_abuffer( format="fltp", sample_rate=48000, layout="stereo", time_base=Fraction(1, 48000), ), graph.add("volume", volume="0.5"), graph.add("abuffersink"), ) graph.configure() input_frame = generate_audio_frame( 0, input_format="fltp", layout="stereo", sample_rate=48000 ) graph.push(input_frame) out_frame = graph.pull() self.assertEqual(out_frame.format.name, "fltp") self.assertEqual(out_frame.layout.name, "stereo") self.assertEqual(out_frame.sample_rate, 48000) input_data = input_frame.to_ndarray() output_data = out_frame.to_ndarray() self.assertTrue( np.allclose(input_data * 0.5, output_data), "Check that volume is reduced" ) def test_video_buffer(self): input_container = av.open(format="lavfi", file="color=c=pink:duration=1:r=30") input_video_stream = input_container.streams.video[0] graph = av.filter.Graph() buffer = graph.add_buffer(template=input_video_stream) bwdif = graph.add("bwdif", "send_field:tff:all") buffersink = graph.add("buffersink") buffer.link_to(bwdif) bwdif.link_to(buffersink) graph.configure() for frame in input_container.decode(): self.assertEqual(frame.time_base, Fraction(1, 30)) graph.push(frame) filtered_frames = pull_until_blocked(graph) if frame.pts == 0: # no output for the first input frame self.assertEqual(len(filtered_frames), 0) else: # we expect two filtered frames per input frame self.assertEqual(len(filtered_frames), 2) self.assertEqual(filtered_frames[0].pts, (frame.pts - 1) * 2) self.assertEqual(filtered_frames[0].time_base, Fraction(1, 60)) self.assertEqual(filtered_frames[1].pts, (frame.pts - 1) * 2 + 1) self.assertEqual(filtered_frames[1].time_base, Fraction(1, 60)) def test_EOF(self): input_container = av.open(format="lavfi", file="color=c=pink:duration=1:r=30") video_stream = input_container.streams.video[0] graph = av.filter.Graph() video_in = graph.add_buffer(template=video_stream) palette_gen_filter = graph.add("palettegen") video_out = graph.add("buffersink") video_in.link_to(palette_gen_filter) palette_gen_filter.link_to(video_out) graph.configure() for frame in input_container.decode(video=0): graph.push(frame) graph.push(None) # if we do not push None, we get a BlockingIOError palette_frame = graph.pull() self.assertIsInstance(palette_frame, av.VideoFrame) self.assertEqual(palette_frame.width, 16) self.assertEqual(palette_frame.height, 16)
31.522648
87
0.608378
from fractions import Fraction from unittest import SkipTest import errno import numpy as np from av import AudioFrame, VideoFrame from av.audio.frame import format_dtypes from av.filter import Filter, Graph import av from .common import Image, TestCase, fate_suite def generate_audio_frame( frame_num, input_format="s16", layout="stereo", sample_rate=44100, frame_size=1024 ): frame = AudioFrame(format=input_format, layout=layout, samples=frame_size) frame.sample_rate = sample_rate frame.pts = frame_num * frame_size for i in range(len(frame.layout.channels)): data = np.zeros(frame_size, dtype=format_dtypes[input_format]) for j in range(frame_size): data[j] = np.sin(2 * np.pi * (frame_num + j) * (i + 1) / float(frame_size)) frame.planes[i].update(data) return frame def pull_until_blocked(graph): frames = [] while True: try: frames.append(graph.pull()) except av.utils.AVError as e: if e.errno != errno.EAGAIN: raise return frames class TestFilters(TestCase): def test_filter_descriptor(self): f = Filter("testsrc") self.assertEqual(f.name, "testsrc") self.assertEqual(f.description, "Generate test pattern.") self.assertFalse(f.dynamic_inputs) self.assertEqual(len(f.inputs), 0) self.assertFalse(f.dynamic_outputs) self.assertEqual(len(f.outputs), 1) self.assertEqual(f.outputs[0].name, "default") self.assertEqual(f.outputs[0].type, "video") def test_dynamic_filter_descriptor(self): f = Filter("split") self.assertFalse(f.dynamic_inputs) self.assertEqual(len(f.inputs), 1) self.assertTrue(f.dynamic_outputs) self.assertEqual(len(f.outputs), 0) def test_generator_graph(self): graph = Graph() src = graph.add("testsrc") lutrgb = graph.add( "lutrgb", "r=maxval+minval-val:g=maxval+minval-val:b=maxval+minval-val", name="invert", ) sink = graph.add("buffersink") src.link_to(lutrgb) lutrgb.link_to(sink) self.assertIs(src.outputs[0].link.output, lutrgb.inputs[0]) self.assertIs(lutrgb.inputs[0].link.input, src.outputs[0]) frame = sink.pull() self.assertIsInstance(frame, VideoFrame) if Image: frame.to_image().save(self.sandboxed("mandelbrot2.png")) def test_auto_find_sink(self): graph = Graph() src = graph.add("testsrc") src.link_to(graph.add("buffersink")) graph.configure() frame = graph.pull() if Image: frame.to_image().save(self.sandboxed("mandelbrot3.png")) def test_delegate_sink(self): graph = Graph() src = graph.add("testsrc") src.link_to(graph.add("buffersink")) graph.configure() frame = src.pull() if Image: frame.to_image().save(self.sandboxed("mandelbrot4.png")) def test_haldclut_graph(self): raise SkipTest() graph = Graph() img = Image.open(fate_suite("png1/lena-rgb24.png")) frame = VideoFrame.from_image(img) img_source = graph.add_buffer(frame) hald_img = Image.open("hald_7.png") hald_frame = VideoFrame.from_image(hald_img) hald_source = graph.add_buffer(hald_frame) hald_filter = graph.add("haldclut") sink = graph.add("buffersink") img_source.link(0, hald_filter, 0) hald_source.link(0, hald_filter, 1) hald_filter.link(0, sink, 0) graph.config() self.assertIs(img_source.outputs[0].linked_to, hald_filter.inputs[0]) self.assertIs(hald_source.outputs[0].linked_to, hald_filter.inputs[1]) self.assertIs(hald_filter.outputs[0].linked_to, sink.inputs[0]) hald_source.push(hald_frame) img_source.push(frame) frame = sink.pull() self.assertIsInstance(frame, VideoFrame) frame.to_image().save(self.sandboxed("filtered.png")) def test_audio_buffer_sink(self): graph = Graph() audio_buffer = graph.add_abuffer( format="fltp", sample_rate=48000, layout="stereo", time_base=Fraction(1, 48000), ) audio_buffer.link_to(graph.add("abuffersink")) graph.configure() try: graph.pull() except OSError as e: if e.errno != errno.EAGAIN: raise @staticmethod def link_nodes(*nodes): for c, n in zip(nodes, nodes[1:]): c.link_to(n) def test_audio_buffer_resample(self): graph = Graph() self.link_nodes( graph.add_abuffer( format="fltp", sample_rate=48000, layout="stereo", time_base=Fraction(1, 48000), ), graph.add( "aformat", "sample_fmts=s16:sample_rates=44100:channel_layouts=stereo" ), graph.add("abuffersink"), ) graph.configure() graph.push( generate_audio_frame( 0, input_format="fltp", layout="stereo", sample_rate=48000 ) ) out_frame = graph.pull() self.assertEqual(out_frame.format.name, "s16") self.assertEqual(out_frame.layout.name, "stereo") self.assertEqual(out_frame.sample_rate, 44100) def test_audio_buffer_volume_filter(self): graph = Graph() self.link_nodes( graph.add_abuffer( format="fltp", sample_rate=48000, layout="stereo", time_base=Fraction(1, 48000), ), graph.add("volume", volume="0.5"), graph.add("abuffersink"), ) graph.configure() input_frame = generate_audio_frame( 0, input_format="fltp", layout="stereo", sample_rate=48000 ) graph.push(input_frame) out_frame = graph.pull() self.assertEqual(out_frame.format.name, "fltp") self.assertEqual(out_frame.layout.name, "stereo") self.assertEqual(out_frame.sample_rate, 48000) input_data = input_frame.to_ndarray() output_data = out_frame.to_ndarray() self.assertTrue( np.allclose(input_data * 0.5, output_data), "Check that volume is reduced" ) def test_video_buffer(self): input_container = av.open(format="lavfi", file="color=c=pink:duration=1:r=30") input_video_stream = input_container.streams.video[0] graph = av.filter.Graph() buffer = graph.add_buffer(template=input_video_stream) bwdif = graph.add("bwdif", "send_field:tff:all") buffersink = graph.add("buffersink") buffer.link_to(bwdif) bwdif.link_to(buffersink) graph.configure() for frame in input_container.decode(): self.assertEqual(frame.time_base, Fraction(1, 30)) graph.push(frame) filtered_frames = pull_until_blocked(graph) if frame.pts == 0: # no output for the first input frame self.assertEqual(len(filtered_frames), 0) else: # we expect two filtered frames per input frame self.assertEqual(len(filtered_frames), 2) self.assertEqual(filtered_frames[0].pts, (frame.pts - 1) * 2) self.assertEqual(filtered_frames[0].time_base, Fraction(1, 60)) self.assertEqual(filtered_frames[1].pts, (frame.pts - 1) * 2 + 1) self.assertEqual(filtered_frames[1].time_base, Fraction(1, 60)) def test_EOF(self): input_container = av.open(format="lavfi", file="color=c=pink:duration=1:r=30") video_stream = input_container.streams.video[0] graph = av.filter.Graph() video_in = graph.add_buffer(template=video_stream) palette_gen_filter = graph.add("palettegen") video_out = graph.add("buffersink") video_in.link_to(palette_gen_filter) palette_gen_filter.link_to(video_out) graph.configure() for frame in input_container.decode(video=0): graph.push(frame) graph.push(None) # if we do not push None, we get a BlockingIOError palette_frame = graph.pull() self.assertIsInstance(palette_frame, av.VideoFrame) self.assertEqual(palette_frame.width, 16) self.assertEqual(palette_frame.height, 16)
true
true
f73bf4f126cc8565a9bbb1503b05b3157f9bdb04
1,491
py
Python
extra_tests/ctypes_tests/test_numbers.py
nanjekyejoannah/pypy
e80079fe13c29eda7b2a6b4cd4557051f975a2d9
[ "Apache-2.0", "OpenSSL" ]
333
2015-08-08T18:03:38.000Z
2022-03-22T18:13:12.000Z
extra_tests/ctypes_tests/test_numbers.py
nanjekyejoannah/pypy
e80079fe13c29eda7b2a6b4cd4557051f975a2d9
[ "Apache-2.0", "OpenSSL" ]
7
2020-02-16T16:49:05.000Z
2021-11-26T09:00:56.000Z
extra_tests/ctypes_tests/test_numbers.py
nanjekyejoannah/pypy
e80079fe13c29eda7b2a6b4cd4557051f975a2d9
[ "Apache-2.0", "OpenSSL" ]
55
2015-08-16T02:41:30.000Z
2022-03-20T20:33:35.000Z
import pytest from ctypes import * unsigned_types = [c_ubyte, c_ushort, c_uint, c_ulong] signed_types = [c_byte, c_short, c_int, c_long, c_longlong] float_types = [c_double, c_float, c_longdouble] try: c_ulonglong c_longlong except NameError: pass else: unsigned_types.append(c_ulonglong) signed_types.append(c_longlong) ################################################################ @pytest.mark.parametrize('t', signed_types + unsigned_types + float_types) def test_init_again(t): parm = t() addr1 = addressof(parm) parm.__init__(0) addr2 = addressof(parm) assert addr1 == addr2 def test_subclass(): class enum(c_int): def __new__(cls, value): dont_call_me class S(Structure): _fields_ = [('t', enum)] assert isinstance(S().t, enum) #@pytest.mark.xfail("'__pypy__' not in sys.builtin_module_names") @pytest.mark.xfail def test_no_missing_shape_to_ffi_type(): # whitebox test "re-enable after adding 'g' to _shape_to_ffi_type.typemap, " "which I think needs fighting all the way up from " "rpython.rlib.libffi" from _ctypes.basics import _shape_to_ffi_type from _rawffi import Array for i in range(1, 256): try: Array(chr(i)) except ValueError: pass else: assert chr(i) in _shape_to_ffi_type.typemap @pytest.mark.xfail def test_pointer_to_long_double(): import ctypes ctypes.POINTER(ctypes.c_longdouble)
26.157895
74
0.653253
import pytest from ctypes import * unsigned_types = [c_ubyte, c_ushort, c_uint, c_ulong] signed_types = [c_byte, c_short, c_int, c_long, c_longlong] float_types = [c_double, c_float, c_longdouble] try: c_ulonglong c_longlong except NameError: pass else: unsigned_types.append(c_ulonglong) signed_types.append(c_longlong)
true
true
f73bf56a0b43782ac33279b2ff8eddf191cc7b6c
4,515
py
Python
pyspedas/mms/particles/mms_part_getspec.py
shihikoo/pyspedas
5251c52d074c4f2c3d91f2f8a457d36f3ad2ddb4
[ "MIT" ]
null
null
null
pyspedas/mms/particles/mms_part_getspec.py
shihikoo/pyspedas
5251c52d074c4f2c3d91f2f8a457d36f3ad2ddb4
[ "MIT" ]
null
null
null
pyspedas/mms/particles/mms_part_getspec.py
shihikoo/pyspedas
5251c52d074c4f2c3d91f2f8a457d36f3ad2ddb4
[ "MIT" ]
null
null
null
import logging from time import time import pyspedas from pyspedas import time_double from pyspedas.mms.particles.mms_part_products import mms_part_products logging.captureWarnings(True) logging.basicConfig(format='%(asctime)s: %(message)s', datefmt='%d-%b-%y %H:%M:%S', level=logging.INFO) def mms_part_getspec(instrument='fpi', probe='1', species='e', data_rate='fast', trange=None, output=['energy', 'theta', 'phi'], units='eflux', energy=None, phi=None, theta=None, pitch=None, gyro=None, mag_data_rate=None, fac_type='mphigeo', center_measurement=False, spdf=False, correct_photoelectrons=False, internal_photoelectron_corrections=False, disable_photoelectron_corrections=False): """ """ start_time = time() if trange is None: # test data for development trange = ['2015-10-16/13:06', '2015-10-16/13:07'] # data_rate = 'brst' if mag_data_rate is None: if data_rate == 'brst': mag_data_rate = 'brst' scpot_data_rate = 'brst' else: mag_data_rate = 'srvy' scpot_data_rate = 'fast' instrument = instrument.lower() # HPCA is required to be at the center of the accumulation interval # due to assumptions made in mms_get_hpca_dist if instrument == 'hpca' and center_measurement == False: center_measurement = True if instrument == 'fpi': data_vars = pyspedas.mms.fpi(datatype='d'+species+'s-dist', probe=probe, data_rate=data_rate, trange=trange, time_clip=True, center_measurement=center_measurement, spdf=spdf) elif instrument == 'hpca': # for HPCA, 'fast' should be 'srvy' if data_rate == 'fast': data_rate = 'srvy' # 'i' and 'e' are only valid for FPI if species in ['i', 'e']: species = 'hplus' data_vars = pyspedas.mms.hpca(datatype='ion', probe=probe, data_rate=data_rate, trange=trange, time_clip=True, center_measurement=center_measurement, get_support_data=True, spdf=spdf) else: logging.error('Error, unknown instrument: ' + instrument + '; valid options: fpi, hpca') return if data_vars is None or len(data_vars) == 0: logging.error('Error, no data loaded.') return None if not isinstance(probe, list): probe = [probe] if instrument == 'fpi' and species == 'e' and not disable_photoelectron_corrections: correct_photoelectrons = True support_trange = [time_double(trange[0])-60.0, time_double(trange[1])+60.0] # load state data (needed for coordinate transformations and field-aligned coordinates) pos_vars = pyspedas.mms.mec(probe=probe, trange=support_trange, time_clip=True, spdf=spdf) if len(pos_vars) == 0: logging.error('Error, no state data loaded.') return mag_vars = pyspedas.mms.fgm(probe=probe, trange=support_trange, data_rate=mag_data_rate, time_clip=True, spdf=spdf) if len(mag_vars) == 0: logging.error('Error, no magnetic field data loaded.') return scpot_vars = pyspedas.mms.edp(probe=probe, trange=support_trange, level='l2', spdf=spdf, data_rate=scpot_data_rate, datatype='scpot', varformat='*_edp_scpot_*') out_vars = [] for prb in probe: prb_str = str(prb) mag_name = 'mms'+prb_str+'_fgm_b_gse_'+mag_data_rate+'_l2_bvec' pos_name = 'mms'+prb_str+'_mec_r_gse' if instrument == 'fpi': tname = 'mms'+prb_str+'_d'+species+'s_dist_'+data_rate elif instrument == 'hpca': tname = 'mms'+prb_str+'_hpca_'+species+'_phase_space_density' scpot_variable = 'mms'+prb_str+'_edp_scpot_'+scpot_data_rate+'_l2' new_vars = mms_part_products(tname, species=species, instrument=instrument, probe=prb, data_rate=data_rate, output=output, units=units, energy=energy, phi=phi, theta=theta, pitch=pitch, gyro=gyro, mag_name=mag_name, pos_name=pos_name, fac_type=fac_type, sc_pot_name=scpot_variable, correct_photoelectrons=correct_photoelectrons, internal_photoelectron_corrections=internal_photoelectron_corrections, disable_photoelectron_corrections=disable_photoelectron_corrections) if new_vars is None: continue out_vars = out_vars + new_vars logging.info('Finished; time to run: ' + str(round(time()-start_time, 1)) + ' seconds.') return out_vars
39.955752
191
0.659579
import logging from time import time import pyspedas from pyspedas import time_double from pyspedas.mms.particles.mms_part_products import mms_part_products logging.captureWarnings(True) logging.basicConfig(format='%(asctime)s: %(message)s', datefmt='%d-%b-%y %H:%M:%S', level=logging.INFO) def mms_part_getspec(instrument='fpi', probe='1', species='e', data_rate='fast', trange=None, output=['energy', 'theta', 'phi'], units='eflux', energy=None, phi=None, theta=None, pitch=None, gyro=None, mag_data_rate=None, fac_type='mphigeo', center_measurement=False, spdf=False, correct_photoelectrons=False, internal_photoelectron_corrections=False, disable_photoelectron_corrections=False): start_time = time() if trange is None: trange = ['2015-10-16/13:06', '2015-10-16/13:07'] if mag_data_rate is None: if data_rate == 'brst': mag_data_rate = 'brst' scpot_data_rate = 'brst' else: mag_data_rate = 'srvy' scpot_data_rate = 'fast' instrument = instrument.lower() if instrument == 'hpca' and center_measurement == False: center_measurement = True if instrument == 'fpi': data_vars = pyspedas.mms.fpi(datatype='d'+species+'s-dist', probe=probe, data_rate=data_rate, trange=trange, time_clip=True, center_measurement=center_measurement, spdf=spdf) elif instrument == 'hpca': if data_rate == 'fast': data_rate = 'srvy' if species in ['i', 'e']: species = 'hplus' data_vars = pyspedas.mms.hpca(datatype='ion', probe=probe, data_rate=data_rate, trange=trange, time_clip=True, center_measurement=center_measurement, get_support_data=True, spdf=spdf) else: logging.error('Error, unknown instrument: ' + instrument + '; valid options: fpi, hpca') return if data_vars is None or len(data_vars) == 0: logging.error('Error, no data loaded.') return None if not isinstance(probe, list): probe = [probe] if instrument == 'fpi' and species == 'e' and not disable_photoelectron_corrections: correct_photoelectrons = True support_trange = [time_double(trange[0])-60.0, time_double(trange[1])+60.0] pos_vars = pyspedas.mms.mec(probe=probe, trange=support_trange, time_clip=True, spdf=spdf) if len(pos_vars) == 0: logging.error('Error, no state data loaded.') return mag_vars = pyspedas.mms.fgm(probe=probe, trange=support_trange, data_rate=mag_data_rate, time_clip=True, spdf=spdf) if len(mag_vars) == 0: logging.error('Error, no magnetic field data loaded.') return scpot_vars = pyspedas.mms.edp(probe=probe, trange=support_trange, level='l2', spdf=spdf, data_rate=scpot_data_rate, datatype='scpot', varformat='*_edp_scpot_*') out_vars = [] for prb in probe: prb_str = str(prb) mag_name = 'mms'+prb_str+'_fgm_b_gse_'+mag_data_rate+'_l2_bvec' pos_name = 'mms'+prb_str+'_mec_r_gse' if instrument == 'fpi': tname = 'mms'+prb_str+'_d'+species+'s_dist_'+data_rate elif instrument == 'hpca': tname = 'mms'+prb_str+'_hpca_'+species+'_phase_space_density' scpot_variable = 'mms'+prb_str+'_edp_scpot_'+scpot_data_rate+'_l2' new_vars = mms_part_products(tname, species=species, instrument=instrument, probe=prb, data_rate=data_rate, output=output, units=units, energy=energy, phi=phi, theta=theta, pitch=pitch, gyro=gyro, mag_name=mag_name, pos_name=pos_name, fac_type=fac_type, sc_pot_name=scpot_variable, correct_photoelectrons=correct_photoelectrons, internal_photoelectron_corrections=internal_photoelectron_corrections, disable_photoelectron_corrections=disable_photoelectron_corrections) if new_vars is None: continue out_vars = out_vars + new_vars logging.info('Finished; time to run: ' + str(round(time()-start_time, 1)) + ' seconds.') return out_vars
true
true
f73bf5fb08198debdf711452852fef0c042753b9
36,670
py
Python
auth-api/tests/unit/services/test_user.py
thorwolpert/sbc-auth
5da50cde2e5625d1b0ceea090c3656ee374c9b71
[ "Apache-2.0" ]
11
2019-09-26T06:58:25.000Z
2022-01-26T06:19:39.000Z
auth-api/tests/unit/services/test_user.py
thorwolpert/sbc-auth
5da50cde2e5625d1b0ceea090c3656ee374c9b71
[ "Apache-2.0" ]
1,622
2019-05-07T21:08:38.000Z
2022-03-28T17:07:15.000Z
auth-api/tests/unit/services/test_user.py
thorwolpert/sbc-auth
5da50cde2e5625d1b0ceea090c3656ee374c9b71
[ "Apache-2.0" ]
98
2019-03-01T21:36:15.000Z
2021-12-01T22:11:25.000Z
# Copyright © 2019 Province of British Columbia # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Tests to verify the User Service. Test-Suite to ensure that the User Service is working as expected. """ import json from unittest.mock import patch import pytest from werkzeug.exceptions import HTTPException from auth_api.exceptions import BusinessException from auth_api.exceptions.errors import Error from auth_api.models import Affiliation as AffiliationModel from auth_api.models import ContactLink as ContactLinkModel from auth_api.models import Membership as MembershipModel from auth_api.models import User as UserModel from auth_api.services import Org as OrgService from auth_api.services import User as UserService from auth_api.services.keycloak import KeycloakService from auth_api.services.keycloak_user import KeycloakUser from auth_api.utils.enums import IdpHint, ProductCode, Status from auth_api.utils.roles import ADMIN, COORDINATOR, USER, Role from tests.utilities.factory_scenarios import ( KeycloakScenario, TestAnonymousMembership, TestContactInfo, TestEntityInfo, TestJwtClaims, TestOrgInfo, TestUserInfo) from tests.utilities.factory_utils import ( factory_contact_model, factory_entity_model, factory_membership_model, factory_org_model, factory_product_model, factory_user_model, get_tos_latest_version, patch_token_info) def test_as_dict(session): # pylint: disable=unused-argument """Assert that a user is rendered correctly as a dictionary.""" user_model = factory_user_model() user = UserService(user_model) dictionary = user.as_dict() assert dictionary['username'] == TestUserInfo.user1['username'] def test_user_save_by_token(session, monkeypatch): # pylint: disable=unused-argument """Assert that a user can be created by token.""" patch_token_info(TestJwtClaims.user_test, monkeypatch) user = UserService.save_from_jwt_token() assert user is not None dictionary = user.as_dict() assert dictionary['username'] == TestJwtClaims.user_test['preferred_username'] assert dictionary['keycloak_guid'] == TestJwtClaims.user_test['sub'] def test_bcros_user_save_by_token(session, monkeypatch): # pylint: disable=unused-argument """Assert that a user can be created by token.""" patch_token_info(TestJwtClaims.anonymous_bcros_role, monkeypatch) user = UserService.save_from_jwt_token() assert user is not None dictionary = user.as_dict() assert dictionary['username'] == TestJwtClaims.anonymous_bcros_role['preferred_username'] assert dictionary['keycloak_guid'] == TestJwtClaims.anonymous_bcros_role['sub'] def test_bcros_user_update_by_token(session, monkeypatch): # pylint: disable=unused-argument """Assert that a user can be created by token.""" user_model = factory_user_model(TestUserInfo.user_bcros) user = UserService(user_model) dictionary = user.as_dict() assert dictionary.get('keycloak_guid', None) is None patch_token_info(TestJwtClaims.anonymous_bcros_role, monkeypatch) user = UserService.save_from_jwt_token() assert user is not None dictionary = user.as_dict() assert dictionary['username'] == TestJwtClaims.anonymous_bcros_role['preferred_username'] assert dictionary['keycloak_guid'] == TestJwtClaims.anonymous_bcros_role['sub'] def test_user_save_by_token_no_token(session): # pylint: disable=unused-argument """Assert that a user cannot be created from an empty token.""" user = UserService.save_from_jwt_token() assert user is None def test_create_user_and_add_membership_owner_skip_auth_mode(session, auth_mock, keycloak_mock): # pylint:disable=unused-argument """Assert that an owner can be added as anonymous.""" org = factory_org_model(org_info=TestOrgInfo.org_anonymous) membership = [TestAnonymousMembership.generate_random_user(ADMIN)] users = UserService.create_user_and_add_membership(membership, org.id, single_mode=True) assert len(users['users']) == 1 assert users['users'][0]['username'] == IdpHint.BCROS.value + '/' + membership[0]['username'] assert users['users'][0]['type'] == Role.ANONYMOUS_USER.name members = MembershipModel.find_members_by_org_id(org.id) # only one member should be there since its a STAFF created org assert len(members) == 1 assert members[0].membership_type_code == ADMIN def test_reset_password(session, auth_mock, keycloak_mock, monkeypatch): # pylint:disable=unused-argument """Assert that the password can be changed.""" org = factory_org_model(org_info=TestOrgInfo.org_anonymous) user = factory_user_model() factory_membership_model(user.id, org.id) factory_product_model(org.id, product_code=ProductCode.DIR_SEARCH.value) claims = TestJwtClaims.get_test_real_user(user.keycloak_guid) patch_token_info(claims, monkeypatch) membership = [TestAnonymousMembership.generate_random_user(USER)] users = UserService.create_user_and_add_membership(membership, org.id) user_name = users['users'][0]['username'] user_info = {'username': user_name, 'password': 'password'} kc_user = UserService.reset_password_for_anon_user(user_info, user_name) # cant assert anything else since password wont be gotten back assert kc_user.user_name == user_name.replace(f'{IdpHint.BCROS.value}/', '').lower() def test_reset_password_by_member(session, auth_mock, keycloak_mock, monkeypatch): # pylint:disable=unused-argument """Assert that the password cant be changed by member.""" org = factory_org_model(org_info=TestOrgInfo.org_anonymous) user = factory_user_model() factory_membership_model(user.id, org.id) factory_product_model(org.id, product_code=ProductCode.DIR_SEARCH.value) admin_claims = TestJwtClaims.get_test_real_user(user.keycloak_guid) membership = [TestAnonymousMembership.generate_random_user(USER)] patch_token_info(admin_claims, monkeypatch) users = UserService.create_user_and_add_membership(membership, org.id) user_name = users['users'][0]['username'] user_info = {'username': user_name, 'password': 'password'} with pytest.raises(HTTPException) as excinfo: patch_token_info(TestJwtClaims.public_user_role, monkeypatch) UserService.reset_password_for_anon_user(user_info, user_name) assert excinfo.exception.code == 403 def test_delete_otp_for_user(session, auth_mock, keycloak_mock, monkeypatch): """Assert that the otp cant be reset.""" kc_service = KeycloakService() org = factory_org_model(org_info=TestOrgInfo.org_anonymous) admin_user = factory_user_model() factory_membership_model(admin_user.id, org.id) admin_claims = TestJwtClaims.get_test_real_user(admin_user.keycloak_guid) membership = [TestAnonymousMembership.generate_random_user(USER)] keycloak_service = KeycloakService() request = KeycloakScenario.create_user_request() request.user_name = membership[0]['username'] keycloak_service.add_user(request) user = kc_service.get_user_by_username(request.user_name) user = factory_user_model(TestUserInfo.get_bceid_user_with_kc_guid(user.id)) factory_membership_model(user.id, org.id) patch_token_info(admin_claims, monkeypatch) UserService.delete_otp_for_user(user.username) user1 = kc_service.get_user_by_username(request.user_name) assert 'CONFIGURE_TOTP' in json.loads(user1.value()).get('requiredActions') def test_create_user_and_add_same_user_name_error_in_kc(session, auth_mock, keycloak_mock): # pylint:disable=unused-argument """Assert that same user name cannot be added twice.""" org = factory_org_model(org_info=TestOrgInfo.org_anonymous) membership = [TestAnonymousMembership.generate_random_user(ADMIN)] keycloak_service = KeycloakService() request = KeycloakScenario.create_user_request() request.user_name = membership[0]['username'] keycloak_service.add_user(request) users = UserService.create_user_and_add_membership(membership, org.id, single_mode=True) assert users['users'][0]['http_status'] == 409 assert users['users'][0]['error'] == 'The username is already taken' def test_create_user_and_add_same_user_name_error_in_db(session, auth_mock, keycloak_mock): # pylint:disable=unused-argument """Assert that same user name cannot be added twice.""" org = factory_org_model(org_info=TestOrgInfo.org_anonymous) user = factory_user_model(TestUserInfo.user_bcros) factory_membership_model(user.id, org.id) new_members = TestAnonymousMembership.generate_random_user(ADMIN) new_members['username'] = user.username.replace(f'{IdpHint.BCROS.value}/', '') membership = [new_members] users = UserService.create_user_and_add_membership(membership, org.id, single_mode=True) assert users['users'][0]['http_status'] == 409 assert users['users'][0]['error'] == 'The username is already taken' def test_create_user_and_add_transaction_membership(session, auth_mock, keycloak_mock): # pylint:disable=unused-argument """Assert transactions works fine.""" org = factory_org_model(org_info=TestOrgInfo.org_anonymous) membership = [TestAnonymousMembership.generate_random_user(ADMIN)] with patch('auth_api.models.Membership.flush', side_effect=Exception('mocked error')): users = UserService.create_user_and_add_membership(membership, org.id, single_mode=True) user_name = IdpHint.BCROS.value + '/' + membership[0]['username'] assert len(users['users']) == 1 assert users['users'][0]['username'] == membership[0]['username'] assert users['users'][0]['http_status'] == 500 assert users['users'][0]['error'] == 'Adding User Failed' # make sure no records are created user = UserModel.find_by_username(user_name) assert user is None user = UserModel.find_by_username(membership[0]['username']) assert user is None members = MembershipModel.find_members_by_org_id(org.id) # only one member should be there since its a STAFF created org assert len(members) == 0 def test_create_user_and_add_transaction_membership_1(session, auth_mock, keycloak_mock): # pylint:disable=unused-argument """Assert transactions works fine.""" org = factory_org_model(org_info=TestOrgInfo.org_anonymous) membership = [TestAnonymousMembership.generate_random_user(ADMIN)] with patch('auth_api.models.User.flush', side_effect=Exception('mocked error')): users = UserService.create_user_and_add_membership(membership, org.id, single_mode=True) user_name = IdpHint.BCROS.value + '/' + membership[0]['username'] assert len(users['users']) == 1 assert users['users'][0]['username'] == membership[0]['username'] assert users['users'][0]['http_status'] == 500 assert users['users'][0]['error'] == 'Adding User Failed' # make sure no records are created user = UserModel.find_by_username(user_name) assert user is None user = UserModel.find_by_username(membership[0]['username']) assert user is None members = MembershipModel.find_members_by_org_id(org.id) # only one member should be there since its a STAFF created org assert len(members) == 0 def test_create_user_and_add_membership_admin_skip_auth_mode(session, auth_mock, keycloak_mock): # pylint:disable=unused-argument """Assert that an admin can be added as anonymous.""" org = factory_org_model(org_info=TestOrgInfo.org_anonymous) membership = [TestAnonymousMembership.generate_random_user(COORDINATOR)] users = UserService.create_user_and_add_membership(membership, org.id, single_mode=True) assert len(users['users']) == 1 assert users['users'][0]['username'] == IdpHint.BCROS.value + '/' + membership[0]['username'] assert users['users'][0]['type'] == Role.ANONYMOUS_USER.name members = MembershipModel.find_members_by_org_id(org.id) # only one member should be there since its a STAFF created org assert len(members) == 1 assert members[0].membership_type_code == COORDINATOR def test_create_user_and_add_membership_admin_bulk_mode(session, auth_mock, keycloak_mock, monkeypatch): # pylint:disable=unused-argument """Assert that an admin can add a member.""" org = factory_org_model(org_info=TestOrgInfo.org_anonymous) user = factory_user_model() factory_membership_model(user.id, org.id) factory_product_model(org.id, product_code=ProductCode.DIR_SEARCH.value) claims = TestJwtClaims.get_test_real_user(user.keycloak_guid) patch_token_info(claims, monkeypatch) membership = [TestAnonymousMembership.generate_random_user(USER)] users = UserService.create_user_and_add_membership(membership, org.id) assert len(users['users']) == 1 assert users['users'][0]['username'] == IdpHint.BCROS.value + '/' + membership[0]['username'] assert users['users'][0]['type'] == Role.ANONYMOUS_USER.name members = MembershipModel.find_members_by_org_id(org.id) # staff didnt create members..so count is count of owner+other 1 member assert len(members) == 2 def test_create_user_add_membership_reenable(session, auth_mock, keycloak_mock, monkeypatch): # pylint:disable=unused-argument """Assert that an admin can add a member.""" org = factory_org_model(org_info=TestOrgInfo.org_anonymous) user = factory_user_model() factory_membership_model(user.id, org.id) factory_product_model(org.id, product_code=ProductCode.DIR_SEARCH.value) claims = TestJwtClaims.get_test_real_user(user.keycloak_guid) patch_token_info(claims, monkeypatch) anon_member = TestAnonymousMembership.generate_random_user(USER) membership = [anon_member] users = UserService.create_user_and_add_membership(membership, org.id) user_name = IdpHint.BCROS.value + '/' + membership[0]['username'] assert len(users['users']) == 1 assert users['users'][0]['username'] == user_name assert users['users'][0]['type'] == Role.ANONYMOUS_USER.name members = MembershipModel.find_members_by_org_id(org.id) # staff didnt create members..so count is count of owner+other 1 member assert len(members) == 2 # assert cant be readded users = UserService.create_user_and_add_membership(membership, org.id) assert users['users'][0]['http_status'] == 409 assert users['users'][0]['error'] == 'The username is already taken' # deactivate everything and try again anon_user = UserModel.find_by_username(user_name) anon_user.status = Status.INACTIVE.value anon_user.save() membership_model = MembershipModel.find_membership_by_userid(anon_user.id) membership_model.status = Status.INACTIVE.value update_user_request = KeycloakUser() update_user_request.user_name = membership[0]['username'] update_user_request.enabled = False KeycloakService.update_user(update_user_request) org2 = factory_org_model(org_info=TestOrgInfo.org_anonymous_2, org_type_info={'code': 'BASIC'}) factory_membership_model(user.id, org2.id) factory_product_model(org2.id, product_code=ProductCode.DIR_SEARCH.value) users = UserService.create_user_and_add_membership(membership, org2.id) assert users['users'][0]['http_status'] == 409 assert users['users'][0]['error'] == 'The username is already taken' # add to same org.Should work users = UserService.create_user_and_add_membership(membership, org.id) assert len(users['users']) == 1 assert users['users'][0]['username'] == IdpHint.BCROS.value + '/' + membership[0]['username'] assert users['users'][0]['type'] == Role.ANONYMOUS_USER.name def test_create_user_and_add_membership_admin_bulk_mode_unauthorised(session, auth_mock, keycloak_mock, monkeypatch): # pylint:disable=unused-argument """Assert that bulk operation cannot be performed by unauthorised users.""" org = factory_org_model(org_info=TestOrgInfo.org_anonymous) user = factory_user_model() factory_membership_model(user.id, org.id) membership = [TestAnonymousMembership.generate_random_user(USER)] with pytest.raises(HTTPException) as excinfo: patch_token_info(TestJwtClaims.public_user_role, monkeypatch) UserService.create_user_and_add_membership(membership, org.id) assert excinfo.value.code == 403 def test_create_user_and_add_membership_admin_bulk_mode_multiple(session, auth_mock, keycloak_mock, monkeypatch): # pylint:disable=unused-argument """Assert that an admin can add a group of members.""" org = factory_org_model(org_info=TestOrgInfo.org_anonymous) user = factory_user_model() factory_membership_model(user.id, org.id) factory_product_model(org.id, product_code=ProductCode.DIR_SEARCH.value) claims = TestJwtClaims.get_test_real_user(user.keycloak_guid) membership = [TestAnonymousMembership.generate_random_user(USER), TestAnonymousMembership.generate_random_user(COORDINATOR)] patch_token_info(claims, monkeypatch) users = UserService.create_user_and_add_membership(membership, org.id) assert len(users['users']) == 2 assert users['users'][0]['username'] == IdpHint.BCROS.value + '/' + membership[0]['username'] assert users['users'][0]['type'] == Role.ANONYMOUS_USER.name assert users['users'][1]['username'] == IdpHint.BCROS.value + '/' + membership[1]['username'] assert users['users'][1]['type'] == Role.ANONYMOUS_USER.name members = MembershipModel.find_members_by_org_id(org.id) # staff didnt create members..so count is count of owner+other 2 members assert len(members) == 3 def test_create_user_and_add_membership_member_error_skip_auth_mode(session, auth_mock, keycloak_mock): # pylint:disable=unused-argument """Assert that an member cannot be added as anonymous in single_mode mode.""" org = factory_org_model(org_info=TestOrgInfo.org_anonymous) membership = [TestAnonymousMembership.generate_random_user(USER)] with pytest.raises(BusinessException) as exception: UserService.create_user_and_add_membership(membership, org.id, single_mode=True) assert exception.value.code == Error.INVALID_USER_CREDENTIALS.name def test_create_user_and_add_membership_multiple_error_skip_auth_mode(session, auth_mock, keycloak_mock, monkeypatch): # pylint:disable=unused-argument """Assert that multiple user cannot be created in single_mode mode.""" org = factory_org_model(org_info=TestOrgInfo.org_anonymous) membership = [TestAnonymousMembership.generate_random_user(USER), TestAnonymousMembership.generate_random_user(COORDINATOR)] with pytest.raises(BusinessException) as exception: patch_token_info(TestJwtClaims.public_user_role, monkeypatch) UserService.create_user_and_add_membership(membership, org.id, single_mode=True) assert exception.value.code == Error.INVALID_USER_CREDENTIALS.name def test_user_save_by_token_fail(session, monkeypatch): # pylint: disable=unused-argument """Assert that a user cannot not be created.""" with patch.object(UserModel, 'create_from_jwt_token', return_value=None): patch_token_info(TestJwtClaims.user_test, monkeypatch) user = UserService.save_from_jwt_token() assert user is None def test_add_contact_to_user(session, monkeypatch): # pylint: disable=unused-argument """Assert that a contact can be added to a user.""" user_with_token = TestUserInfo.user_test user_with_token['keycloak_guid'] = TestJwtClaims.user_test['sub'] factory_user_model(user_info=user_with_token) patch_token_info(TestJwtClaims.user_test, monkeypatch) contact = UserService.add_contact(TestContactInfo.contact1).as_dict() assert contact['email'] == TestContactInfo.contact1['email'] assert contact['phone'] == TestContactInfo.contact1['phone'] assert contact['phone_extension'] == TestContactInfo.contact1['phoneExtension'] def test_add_contact_user_no_user(session, monkeypatch): # pylint: disable=unused-argument """Assert that a contact cannot be added to a user that does not exist.""" with pytest.raises(BusinessException) as exception: patch_token_info(TestJwtClaims.user_test, monkeypatch) UserService.add_contact(TestContactInfo.contact1) assert exception.value.code == Error.DATA_NOT_FOUND.name def test_add_contact_to_user_already_exists(session, monkeypatch): # pylint: disable=unused-argument """Assert that a contact cannot be added to a user that already has a contact.""" user_with_token = TestUserInfo.user_test user_with_token['keycloak_guid'] = TestJwtClaims.user_test['sub'] factory_user_model(user_info=user_with_token) patch_token_info(TestJwtClaims.user_test, monkeypatch) UserService.add_contact(TestContactInfo.contact1) with pytest.raises(BusinessException) as exception: UserService.add_contact(TestContactInfo.contact2) assert exception.value.code == Error.DATA_ALREADY_EXISTS.name def test_update_contact_for_user(session, monkeypatch): # pylint: disable=unused-argument """Assert that a contact can be updated for a user.""" user_with_token = TestUserInfo.user_test user_with_token['keycloak_guid'] = TestJwtClaims.user_test['sub'] factory_user_model(user_info=user_with_token) patch_token_info(TestJwtClaims.user_test, monkeypatch) contact = UserService.add_contact(TestContactInfo.contact1).as_dict() assert contact is not None updated_contact = UserService.update_contact(TestContactInfo.contact2).as_dict() assert updated_contact is not None assert updated_contact['email'] == TestContactInfo.contact2['email'] def test_update_terms_of_use_for_user(session, monkeypatch): # pylint: disable=unused-argument """Assert that a terms of use can be updated for a user.""" patch_token_info(TestJwtClaims.user_test, monkeypatch) UserService.save_from_jwt_token() updated_user = UserService.update_terms_of_use(True, 1) dictionary = updated_user.as_dict() assert dictionary['user_terms']['isTermsOfUseAccepted'] is True def test_terms_of_service_prev_version(session, monkeypatch): # pylint: disable=unused-argument """Assert that a terms of use can be updated for a user.""" patch_token_info(TestJwtClaims.user_test, monkeypatch) UserService.save_from_jwt_token() # update TOS with old version updated_user = UserService.update_terms_of_use(True, 1) dictionary = updated_user.as_dict() assert dictionary['user_terms']['isTermsOfUseAccepted'] is True # accepted version from previous step was old.so comparison should return false updated_user = UserService.save_from_jwt_token() dictionary = updated_user.as_dict() assert dictionary['user_terms']['isTermsOfUseAccepted'] is False # update TOS with latest version updated_user = UserService.update_terms_of_use(True, get_tos_latest_version()) dictionary = updated_user.as_dict() assert dictionary['user_terms']['isTermsOfUseAccepted'] is True # accepted version from previous step is latest.so comparison should return true updated_user = UserService.save_from_jwt_token() dictionary = updated_user.as_dict() assert dictionary['user_terms']['isTermsOfUseAccepted'] is True def test_update_contact_for_user_no_user(session, monkeypatch): # pylint: disable=unused-argument """Assert that a contact cannot be updated for a user that does not exist.""" with pytest.raises(BusinessException) as exception: patch_token_info(TestJwtClaims.user_test, monkeypatch) UserService.update_contact(TestContactInfo.contact2) assert exception.value.code == Error.DATA_NOT_FOUND.name def test_update_contact_for_user_no_contact(session, monkeypatch): # pylint: disable=unused-argument """Assert that a contact cannot be updated for a user with no contact.""" factory_user_model(user_info=TestUserInfo.user_test) with pytest.raises(BusinessException) as exception: patch_token_info(TestJwtClaims.user_test, monkeypatch) UserService.update_contact(TestContactInfo.contact2) assert exception.value.code == Error.DATA_NOT_FOUND.name def test_delete_contact_for_user(session, monkeypatch): # pylint: disable=unused-argument """Assert that a contact can be deleted for a user.""" user_with_token = TestUserInfo.user_test user_with_token['keycloak_guid'] = TestJwtClaims.user_test['sub'] factory_user_model(user_info=user_with_token) patch_token_info(TestJwtClaims.user_test, monkeypatch) contact = UserService.add_contact(TestContactInfo.contact1).as_dict() assert contact is not None deleted_contact = UserService.delete_contact().as_dict() assert deleted_contact is not None contacts = UserService.get_contacts() assert contacts.get('contacts') == [] def test_delete_contact_for_user_no_user(session, monkeypatch): # pylint: disable=unused-argument """Assert that deleting a contact for a non-existent user raises the right exception.""" with pytest.raises(BusinessException) as exception: patch_token_info(TestJwtClaims.user_test, monkeypatch) UserService.delete_contact() assert exception.value.code == Error.DATA_NOT_FOUND.name def test_delete_contact_for_user_no_contact(session, monkeypatch): # pylint: disable=unused-argument """Assert that deleting a contact for a user with no contact raises the right exception.""" factory_user_model(user_info=TestUserInfo.user_test) with pytest.raises(BusinessException) as exception: patch_token_info(TestJwtClaims.user_test, monkeypatch) UserService.delete_contact() assert exception.value.code == Error.DATA_NOT_FOUND.name def test_find_users(session): # pylint: disable=unused-argument """Assert that a list of users can be retrieved and searched on.""" factory_user_model() factory_user_model(user_info=TestUserInfo.user2) users = UserService.find_users(last_name='User') assert users is not None assert len(users) == 2 def test_user_find_by_token(session, monkeypatch): # pylint: disable=unused-argument """Assert that a user can be found by token.""" user_with_token = TestUserInfo.user_test user_with_token['keycloak_guid'] = TestJwtClaims.user_test['sub'] factory_user_model(user_info=user_with_token) found_user = UserService.find_by_jwt_token() assert found_user is None # User accepted older version terms and conditions should return False patch_token_info(TestJwtClaims.user_test, monkeypatch) UserService.update_terms_of_use(True, 1) found_user = UserService.find_by_jwt_token() assert found_user is not None dictionary = found_user.as_dict() assert dictionary['username'] == TestJwtClaims.user_test['preferred_username'] assert dictionary['keycloak_guid'] == TestJwtClaims.user_test['sub'] assert dictionary['user_terms']['isTermsOfUseAccepted'] is False # User accepted latest version terms and conditions should return True UserService.update_terms_of_use(True, get_tos_latest_version()) found_user = UserService.find_by_jwt_token() dictionary = found_user.as_dict() assert dictionary['user_terms']['isTermsOfUseAccepted'] is True def test_user_find_by_username(session): # pylint: disable=unused-argument """Assert that a user can be found by username.""" user_model = factory_user_model() user = UserService(user_model) user = UserService.find_by_username(None) assert user is None user = UserService.find_by_username(TestUserInfo.user1['username']) assert user is not None dictionary = user.as_dict() assert dictionary['username'] == TestUserInfo.user1['username'] def test_user_find_by_username_no_model_object(session): # pylint: disable=unused-argument """Assert that the business can't be found with no model.""" username = TestUserInfo.user_test['username'] user = UserService.find_by_username(username) assert user is None def test_user_find_by_username_missing_username(session): # pylint: disable=unused-argument """Assert that the business can't be found by incorrect username.""" user_model = factory_user_model(user_info=TestUserInfo.user_test) user = UserService(user_model) user = UserService.find_by_username('foo') assert user is None def test_delete_contact_user_link(session, auth_mock, keycloak_mock, monkeypatch): # pylint:disable=unused-argument """Assert that a contact can not be deleted if contact link exists.""" user_with_token = TestUserInfo.user_test user_with_token['keycloak_guid'] = TestJwtClaims.public_user_role['sub'] user_model = factory_user_model(user_info=user_with_token) user = UserService(user_model) patch_token_info(TestJwtClaims.public_user_role, monkeypatch) org = OrgService.create_org(TestOrgInfo.org1, user_id=user.identifier) org_dictionary = org.as_dict() org_id = org_dictionary['id'] contact = factory_contact_model() contact_link = ContactLinkModel() contact_link.contact = contact contact_link.user = user_model contact_link.org = org._model # pylint:disable=protected-access contact_link = contact_link.flush() contact_link.commit() deleted_contact = UserService.delete_contact() assert deleted_contact is None delete_contact_link = ContactLinkModel.find_by_user_id(user.identifier) assert not delete_contact_link exist_contact_link = ContactLinkModel.find_by_org_id(org_id) assert exist_contact_link def test_delete_user(session, auth_mock, keycloak_mock, monkeypatch): # pylint:disable=unused-argument """Assert that a user can be deleted.""" user_with_token = TestUserInfo.user_test user_with_token['keycloak_guid'] = TestJwtClaims.user_test['sub'] user_model = factory_user_model(user_info=user_with_token) contact = factory_contact_model() contact_link = ContactLinkModel() contact_link.contact = contact contact_link.user = user_model contact_link.commit() patch_token_info(TestJwtClaims.user_test, monkeypatch) org = OrgService.create_org(TestOrgInfo.org1, user_id=user_model.id) UserService.delete_user() updated_user = UserModel.find_by_jwt_token() assert len(updated_user.contacts) == 0 user_orgs = MembershipModel.find_orgs_for_user(updated_user.id) for org in user_orgs: assert org.status_code == 'INACTIVE' def test_delete_user_where_org_has_affiliations(session, auth_mock, keycloak_mock, monkeypatch): # pylint:disable=unused-argument """Assert that a user can be deleted.""" user_model = factory_user_model(user_info=TestUserInfo.user_test) contact = factory_contact_model() contact_link = ContactLinkModel() contact_link.contact = contact contact_link.user = user_model contact_link = contact_link.flush() contact_link.commit() patch_token_info(TestJwtClaims.user_test, monkeypatch) org = OrgService.create_org(TestOrgInfo.org1, user_id=user_model.id).as_dict() org_id = org['id'] entity = factory_entity_model(entity_info=TestEntityInfo.entity_lear_mock) affiliation = AffiliationModel(org_id=org_id, entity_id=entity.id) affiliation.save() with pytest.raises(BusinessException) as exception: UserService.delete_user() assert exception.code == Error.DELETE_FAILED_ONLY_OWNER updated_user = UserModel.find_by_jwt_token() contacts = UserService.get_contacts() assert len(contacts) == 1 user_orgs = MembershipModel.find_orgs_for_user(updated_user.id) for org in user_orgs: assert org.status_code == 'ACTIVE' def test_delete_user_where_user_is_member_on_org(session, auth_mock, keycloak_mock, monkeypatch): # pylint:disable=unused-argument """Assert that a user can be deleted.""" # Create a user and org user_model = factory_user_model(user_info=TestUserInfo.user_test) contact = factory_contact_model() contact_link = ContactLinkModel() contact_link.contact = contact contact_link.user = user_model contact_link.commit() patch_token_info(TestJwtClaims.get_test_user(user_model.keycloak_guid), monkeypatch) org = OrgService.create_org(TestOrgInfo.org1, user_id=user_model.id) org_dictionary = org.as_dict() org_id = org_dictionary['id'] entity = factory_entity_model(entity_info=TestEntityInfo.entity_lear_mock) affiliation = AffiliationModel(org_id=org_id, entity_id=entity.id) affiliation.save() # Create another user and add membership to the above org user_model2 = factory_user_model(user_info=TestUserInfo.user2) contact = factory_contact_model() contact_link = ContactLinkModel() contact_link.contact = contact contact_link.user = user_model2 contact_link.commit() membership = MembershipModel(org_id=org_id, user_id=user_model2.id, membership_type_code='USER', membership_type_status=Status.ACTIVE.value) membership.save() patch_token_info(TestJwtClaims.get_test_user(user_model2.keycloak_guid), monkeypatch) UserService.delete_user() updated_user = UserModel.find_by_jwt_token() assert len(updated_user.contacts) == 0 user_orgs = MembershipModel.find_orgs_for_user(updated_user.id) for org in user_orgs: assert org.status_code == 'INACTIVE' def test_delete_user_where_org_has_another_owner(session, auth_mock, keycloak_mock, monkeypatch): # pylint:disable=unused-argument """Assert that a user can be deleted.""" # Create a user and org user_model = factory_user_model(user_info=TestUserInfo.user_test) contact = factory_contact_model() contact_link = ContactLinkModel() contact_link.contact = contact contact_link.user = user_model contact_link.commit() patch_token_info(TestJwtClaims.get_test_user(user_model.keycloak_guid), monkeypatch) org = OrgService.create_org(TestOrgInfo.org1, user_id=user_model.id) org_dictionary = org.as_dict() org_id = org_dictionary['id'] entity = factory_entity_model(entity_info=TestEntityInfo.entity_lear_mock) affiliation = AffiliationModel(org_id=org_id, entity_id=entity.id) affiliation.save() # Create another user and add membership to the above org user_model2 = factory_user_model(user_info=TestUserInfo.user2) contact = factory_contact_model() contact_link = ContactLinkModel() contact_link.contact = contact contact_link.user = user_model2 contact_link.commit() membership = MembershipModel(org_id=org_id, user_id=user_model2.id, membership_type_code='ADMIN', membership_type_status=Status.ACTIVE.value) membership.save() membership.commit() # with pytest.raises(BusinessException) as exception: patch_token_info(TestJwtClaims.get_test_user(user_model2.keycloak_guid), monkeypatch) UserService.delete_user() updated_user = UserModel.find_by_jwt_token() assert len(updated_user.contacts) == 0 user_orgs = MembershipModel.find_orgs_for_user(updated_user.id) for org in user_orgs: assert org.status_code == 'INACTIVE'
45.552795
118
0.74066
import json from unittest.mock import patch import pytest from werkzeug.exceptions import HTTPException from auth_api.exceptions import BusinessException from auth_api.exceptions.errors import Error from auth_api.models import Affiliation as AffiliationModel from auth_api.models import ContactLink as ContactLinkModel from auth_api.models import Membership as MembershipModel from auth_api.models import User as UserModel from auth_api.services import Org as OrgService from auth_api.services import User as UserService from auth_api.services.keycloak import KeycloakService from auth_api.services.keycloak_user import KeycloakUser from auth_api.utils.enums import IdpHint, ProductCode, Status from auth_api.utils.roles import ADMIN, COORDINATOR, USER, Role from tests.utilities.factory_scenarios import ( KeycloakScenario, TestAnonymousMembership, TestContactInfo, TestEntityInfo, TestJwtClaims, TestOrgInfo, TestUserInfo) from tests.utilities.factory_utils import ( factory_contact_model, factory_entity_model, factory_membership_model, factory_org_model, factory_product_model, factory_user_model, get_tos_latest_version, patch_token_info) def test_as_dict(session): user_model = factory_user_model() user = UserService(user_model) dictionary = user.as_dict() assert dictionary['username'] == TestUserInfo.user1['username'] def test_user_save_by_token(session, monkeypatch): patch_token_info(TestJwtClaims.user_test, monkeypatch) user = UserService.save_from_jwt_token() assert user is not None dictionary = user.as_dict() assert dictionary['username'] == TestJwtClaims.user_test['preferred_username'] assert dictionary['keycloak_guid'] == TestJwtClaims.user_test['sub'] def test_bcros_user_save_by_token(session, monkeypatch): patch_token_info(TestJwtClaims.anonymous_bcros_role, monkeypatch) user = UserService.save_from_jwt_token() assert user is not None dictionary = user.as_dict() assert dictionary['username'] == TestJwtClaims.anonymous_bcros_role['preferred_username'] assert dictionary['keycloak_guid'] == TestJwtClaims.anonymous_bcros_role['sub'] def test_bcros_user_update_by_token(session, monkeypatch): user_model = factory_user_model(TestUserInfo.user_bcros) user = UserService(user_model) dictionary = user.as_dict() assert dictionary.get('keycloak_guid', None) is None patch_token_info(TestJwtClaims.anonymous_bcros_role, monkeypatch) user = UserService.save_from_jwt_token() assert user is not None dictionary = user.as_dict() assert dictionary['username'] == TestJwtClaims.anonymous_bcros_role['preferred_username'] assert dictionary['keycloak_guid'] == TestJwtClaims.anonymous_bcros_role['sub'] def test_user_save_by_token_no_token(session): user = UserService.save_from_jwt_token() assert user is None def test_create_user_and_add_membership_owner_skip_auth_mode(session, auth_mock, keycloak_mock): org = factory_org_model(org_info=TestOrgInfo.org_anonymous) membership = [TestAnonymousMembership.generate_random_user(ADMIN)] users = UserService.create_user_and_add_membership(membership, org.id, single_mode=True) assert len(users['users']) == 1 assert users['users'][0]['username'] == IdpHint.BCROS.value + '/' + membership[0]['username'] assert users['users'][0]['type'] == Role.ANONYMOUS_USER.name members = MembershipModel.find_members_by_org_id(org.id) assert len(members) == 1 assert members[0].membership_type_code == ADMIN def test_reset_password(session, auth_mock, keycloak_mock, monkeypatch): org = factory_org_model(org_info=TestOrgInfo.org_anonymous) user = factory_user_model() factory_membership_model(user.id, org.id) factory_product_model(org.id, product_code=ProductCode.DIR_SEARCH.value) claims = TestJwtClaims.get_test_real_user(user.keycloak_guid) patch_token_info(claims, monkeypatch) membership = [TestAnonymousMembership.generate_random_user(USER)] users = UserService.create_user_and_add_membership(membership, org.id) user_name = users['users'][0]['username'] user_info = {'username': user_name, 'password': 'password'} kc_user = UserService.reset_password_for_anon_user(user_info, user_name) assert kc_user.user_name == user_name.replace(f'{IdpHint.BCROS.value}/', '').lower() def test_reset_password_by_member(session, auth_mock, keycloak_mock, monkeypatch): org = factory_org_model(org_info=TestOrgInfo.org_anonymous) user = factory_user_model() factory_membership_model(user.id, org.id) factory_product_model(org.id, product_code=ProductCode.DIR_SEARCH.value) admin_claims = TestJwtClaims.get_test_real_user(user.keycloak_guid) membership = [TestAnonymousMembership.generate_random_user(USER)] patch_token_info(admin_claims, monkeypatch) users = UserService.create_user_and_add_membership(membership, org.id) user_name = users['users'][0]['username'] user_info = {'username': user_name, 'password': 'password'} with pytest.raises(HTTPException) as excinfo: patch_token_info(TestJwtClaims.public_user_role, monkeypatch) UserService.reset_password_for_anon_user(user_info, user_name) assert excinfo.exception.code == 403 def test_delete_otp_for_user(session, auth_mock, keycloak_mock, monkeypatch): kc_service = KeycloakService() org = factory_org_model(org_info=TestOrgInfo.org_anonymous) admin_user = factory_user_model() factory_membership_model(admin_user.id, org.id) admin_claims = TestJwtClaims.get_test_real_user(admin_user.keycloak_guid) membership = [TestAnonymousMembership.generate_random_user(USER)] keycloak_service = KeycloakService() request = KeycloakScenario.create_user_request() request.user_name = membership[0]['username'] keycloak_service.add_user(request) user = kc_service.get_user_by_username(request.user_name) user = factory_user_model(TestUserInfo.get_bceid_user_with_kc_guid(user.id)) factory_membership_model(user.id, org.id) patch_token_info(admin_claims, monkeypatch) UserService.delete_otp_for_user(user.username) user1 = kc_service.get_user_by_username(request.user_name) assert 'CONFIGURE_TOTP' in json.loads(user1.value()).get('requiredActions') def test_create_user_and_add_same_user_name_error_in_kc(session, auth_mock, keycloak_mock): org = factory_org_model(org_info=TestOrgInfo.org_anonymous) membership = [TestAnonymousMembership.generate_random_user(ADMIN)] keycloak_service = KeycloakService() request = KeycloakScenario.create_user_request() request.user_name = membership[0]['username'] keycloak_service.add_user(request) users = UserService.create_user_and_add_membership(membership, org.id, single_mode=True) assert users['users'][0]['http_status'] == 409 assert users['users'][0]['error'] == 'The username is already taken' def test_create_user_and_add_same_user_name_error_in_db(session, auth_mock, keycloak_mock): org = factory_org_model(org_info=TestOrgInfo.org_anonymous) user = factory_user_model(TestUserInfo.user_bcros) factory_membership_model(user.id, org.id) new_members = TestAnonymousMembership.generate_random_user(ADMIN) new_members['username'] = user.username.replace(f'{IdpHint.BCROS.value}/', '') membership = [new_members] users = UserService.create_user_and_add_membership(membership, org.id, single_mode=True) assert users['users'][0]['http_status'] == 409 assert users['users'][0]['error'] == 'The username is already taken' def test_create_user_and_add_transaction_membership(session, auth_mock, keycloak_mock): org = factory_org_model(org_info=TestOrgInfo.org_anonymous) membership = [TestAnonymousMembership.generate_random_user(ADMIN)] with patch('auth_api.models.Membership.flush', side_effect=Exception('mocked error')): users = UserService.create_user_and_add_membership(membership, org.id, single_mode=True) user_name = IdpHint.BCROS.value + '/' + membership[0]['username'] assert len(users['users']) == 1 assert users['users'][0]['username'] == membership[0]['username'] assert users['users'][0]['http_status'] == 500 assert users['users'][0]['error'] == 'Adding User Failed' user = UserModel.find_by_username(user_name) assert user is None user = UserModel.find_by_username(membership[0]['username']) assert user is None members = MembershipModel.find_members_by_org_id(org.id) assert len(members) == 0 def test_create_user_and_add_transaction_membership_1(session, auth_mock, keycloak_mock): org = factory_org_model(org_info=TestOrgInfo.org_anonymous) membership = [TestAnonymousMembership.generate_random_user(ADMIN)] with patch('auth_api.models.User.flush', side_effect=Exception('mocked error')): users = UserService.create_user_and_add_membership(membership, org.id, single_mode=True) user_name = IdpHint.BCROS.value + '/' + membership[0]['username'] assert len(users['users']) == 1 assert users['users'][0]['username'] == membership[0]['username'] assert users['users'][0]['http_status'] == 500 assert users['users'][0]['error'] == 'Adding User Failed' user = UserModel.find_by_username(user_name) assert user is None user = UserModel.find_by_username(membership[0]['username']) assert user is None members = MembershipModel.find_members_by_org_id(org.id) assert len(members) == 0 def test_create_user_and_add_membership_admin_skip_auth_mode(session, auth_mock, keycloak_mock): org = factory_org_model(org_info=TestOrgInfo.org_anonymous) membership = [TestAnonymousMembership.generate_random_user(COORDINATOR)] users = UserService.create_user_and_add_membership(membership, org.id, single_mode=True) assert len(users['users']) == 1 assert users['users'][0]['username'] == IdpHint.BCROS.value + '/' + membership[0]['username'] assert users['users'][0]['type'] == Role.ANONYMOUS_USER.name members = MembershipModel.find_members_by_org_id(org.id) assert len(members) == 1 assert members[0].membership_type_code == COORDINATOR def test_create_user_and_add_membership_admin_bulk_mode(session, auth_mock, keycloak_mock, monkeypatch): org = factory_org_model(org_info=TestOrgInfo.org_anonymous) user = factory_user_model() factory_membership_model(user.id, org.id) factory_product_model(org.id, product_code=ProductCode.DIR_SEARCH.value) claims = TestJwtClaims.get_test_real_user(user.keycloak_guid) patch_token_info(claims, monkeypatch) membership = [TestAnonymousMembership.generate_random_user(USER)] users = UserService.create_user_and_add_membership(membership, org.id) assert len(users['users']) == 1 assert users['users'][0]['username'] == IdpHint.BCROS.value + '/' + membership[0]['username'] assert users['users'][0]['type'] == Role.ANONYMOUS_USER.name members = MembershipModel.find_members_by_org_id(org.id) assert len(members) == 2 def test_create_user_add_membership_reenable(session, auth_mock, keycloak_mock, monkeypatch): org = factory_org_model(org_info=TestOrgInfo.org_anonymous) user = factory_user_model() factory_membership_model(user.id, org.id) factory_product_model(org.id, product_code=ProductCode.DIR_SEARCH.value) claims = TestJwtClaims.get_test_real_user(user.keycloak_guid) patch_token_info(claims, monkeypatch) anon_member = TestAnonymousMembership.generate_random_user(USER) membership = [anon_member] users = UserService.create_user_and_add_membership(membership, org.id) user_name = IdpHint.BCROS.value + '/' + membership[0]['username'] assert len(users['users']) == 1 assert users['users'][0]['username'] == user_name assert users['users'][0]['type'] == Role.ANONYMOUS_USER.name members = MembershipModel.find_members_by_org_id(org.id) assert len(members) == 2 users = UserService.create_user_and_add_membership(membership, org.id) assert users['users'][0]['http_status'] == 409 assert users['users'][0]['error'] == 'The username is already taken' anon_user = UserModel.find_by_username(user_name) anon_user.status = Status.INACTIVE.value anon_user.save() membership_model = MembershipModel.find_membership_by_userid(anon_user.id) membership_model.status = Status.INACTIVE.value update_user_request = KeycloakUser() update_user_request.user_name = membership[0]['username'] update_user_request.enabled = False KeycloakService.update_user(update_user_request) org2 = factory_org_model(org_info=TestOrgInfo.org_anonymous_2, org_type_info={'code': 'BASIC'}) factory_membership_model(user.id, org2.id) factory_product_model(org2.id, product_code=ProductCode.DIR_SEARCH.value) users = UserService.create_user_and_add_membership(membership, org2.id) assert users['users'][0]['http_status'] == 409 assert users['users'][0]['error'] == 'The username is already taken' users = UserService.create_user_and_add_membership(membership, org.id) assert len(users['users']) == 1 assert users['users'][0]['username'] == IdpHint.BCROS.value + '/' + membership[0]['username'] assert users['users'][0]['type'] == Role.ANONYMOUS_USER.name def test_create_user_and_add_membership_admin_bulk_mode_unauthorised(session, auth_mock, keycloak_mock, monkeypatch): org = factory_org_model(org_info=TestOrgInfo.org_anonymous) user = factory_user_model() factory_membership_model(user.id, org.id) membership = [TestAnonymousMembership.generate_random_user(USER)] with pytest.raises(HTTPException) as excinfo: patch_token_info(TestJwtClaims.public_user_role, monkeypatch) UserService.create_user_and_add_membership(membership, org.id) assert excinfo.value.code == 403 def test_create_user_and_add_membership_admin_bulk_mode_multiple(session, auth_mock, keycloak_mock, monkeypatch): org = factory_org_model(org_info=TestOrgInfo.org_anonymous) user = factory_user_model() factory_membership_model(user.id, org.id) factory_product_model(org.id, product_code=ProductCode.DIR_SEARCH.value) claims = TestJwtClaims.get_test_real_user(user.keycloak_guid) membership = [TestAnonymousMembership.generate_random_user(USER), TestAnonymousMembership.generate_random_user(COORDINATOR)] patch_token_info(claims, monkeypatch) users = UserService.create_user_and_add_membership(membership, org.id) assert len(users['users']) == 2 assert users['users'][0]['username'] == IdpHint.BCROS.value + '/' + membership[0]['username'] assert users['users'][0]['type'] == Role.ANONYMOUS_USER.name assert users['users'][1]['username'] == IdpHint.BCROS.value + '/' + membership[1]['username'] assert users['users'][1]['type'] == Role.ANONYMOUS_USER.name members = MembershipModel.find_members_by_org_id(org.id) assert len(members) == 3 def test_create_user_and_add_membership_member_error_skip_auth_mode(session, auth_mock, keycloak_mock): org = factory_org_model(org_info=TestOrgInfo.org_anonymous) membership = [TestAnonymousMembership.generate_random_user(USER)] with pytest.raises(BusinessException) as exception: UserService.create_user_and_add_membership(membership, org.id, single_mode=True) assert exception.value.code == Error.INVALID_USER_CREDENTIALS.name def test_create_user_and_add_membership_multiple_error_skip_auth_mode(session, auth_mock, keycloak_mock, monkeypatch): org = factory_org_model(org_info=TestOrgInfo.org_anonymous) membership = [TestAnonymousMembership.generate_random_user(USER), TestAnonymousMembership.generate_random_user(COORDINATOR)] with pytest.raises(BusinessException) as exception: patch_token_info(TestJwtClaims.public_user_role, monkeypatch) UserService.create_user_and_add_membership(membership, org.id, single_mode=True) assert exception.value.code == Error.INVALID_USER_CREDENTIALS.name def test_user_save_by_token_fail(session, monkeypatch): with patch.object(UserModel, 'create_from_jwt_token', return_value=None): patch_token_info(TestJwtClaims.user_test, monkeypatch) user = UserService.save_from_jwt_token() assert user is None def test_add_contact_to_user(session, monkeypatch): user_with_token = TestUserInfo.user_test user_with_token['keycloak_guid'] = TestJwtClaims.user_test['sub'] factory_user_model(user_info=user_with_token) patch_token_info(TestJwtClaims.user_test, monkeypatch) contact = UserService.add_contact(TestContactInfo.contact1).as_dict() assert contact['email'] == TestContactInfo.contact1['email'] assert contact['phone'] == TestContactInfo.contact1['phone'] assert contact['phone_extension'] == TestContactInfo.contact1['phoneExtension'] def test_add_contact_user_no_user(session, monkeypatch): with pytest.raises(BusinessException) as exception: patch_token_info(TestJwtClaims.user_test, monkeypatch) UserService.add_contact(TestContactInfo.contact1) assert exception.value.code == Error.DATA_NOT_FOUND.name def test_add_contact_to_user_already_exists(session, monkeypatch): user_with_token = TestUserInfo.user_test user_with_token['keycloak_guid'] = TestJwtClaims.user_test['sub'] factory_user_model(user_info=user_with_token) patch_token_info(TestJwtClaims.user_test, monkeypatch) UserService.add_contact(TestContactInfo.contact1) with pytest.raises(BusinessException) as exception: UserService.add_contact(TestContactInfo.contact2) assert exception.value.code == Error.DATA_ALREADY_EXISTS.name def test_update_contact_for_user(session, monkeypatch): user_with_token = TestUserInfo.user_test user_with_token['keycloak_guid'] = TestJwtClaims.user_test['sub'] factory_user_model(user_info=user_with_token) patch_token_info(TestJwtClaims.user_test, monkeypatch) contact = UserService.add_contact(TestContactInfo.contact1).as_dict() assert contact is not None updated_contact = UserService.update_contact(TestContactInfo.contact2).as_dict() assert updated_contact is not None assert updated_contact['email'] == TestContactInfo.contact2['email'] def test_update_terms_of_use_for_user(session, monkeypatch): patch_token_info(TestJwtClaims.user_test, monkeypatch) UserService.save_from_jwt_token() updated_user = UserService.update_terms_of_use(True, 1) dictionary = updated_user.as_dict() assert dictionary['user_terms']['isTermsOfUseAccepted'] is True def test_terms_of_service_prev_version(session, monkeypatch): patch_token_info(TestJwtClaims.user_test, monkeypatch) UserService.save_from_jwt_token() updated_user = UserService.update_terms_of_use(True, 1) dictionary = updated_user.as_dict() assert dictionary['user_terms']['isTermsOfUseAccepted'] is True updated_user = UserService.save_from_jwt_token() dictionary = updated_user.as_dict() assert dictionary['user_terms']['isTermsOfUseAccepted'] is False updated_user = UserService.update_terms_of_use(True, get_tos_latest_version()) dictionary = updated_user.as_dict() assert dictionary['user_terms']['isTermsOfUseAccepted'] is True updated_user = UserService.save_from_jwt_token() dictionary = updated_user.as_dict() assert dictionary['user_terms']['isTermsOfUseAccepted'] is True def test_update_contact_for_user_no_user(session, monkeypatch): with pytest.raises(BusinessException) as exception: patch_token_info(TestJwtClaims.user_test, monkeypatch) UserService.update_contact(TestContactInfo.contact2) assert exception.value.code == Error.DATA_NOT_FOUND.name def test_update_contact_for_user_no_contact(session, monkeypatch): factory_user_model(user_info=TestUserInfo.user_test) with pytest.raises(BusinessException) as exception: patch_token_info(TestJwtClaims.user_test, monkeypatch) UserService.update_contact(TestContactInfo.contact2) assert exception.value.code == Error.DATA_NOT_FOUND.name def test_delete_contact_for_user(session, monkeypatch): user_with_token = TestUserInfo.user_test user_with_token['keycloak_guid'] = TestJwtClaims.user_test['sub'] factory_user_model(user_info=user_with_token) patch_token_info(TestJwtClaims.user_test, monkeypatch) contact = UserService.add_contact(TestContactInfo.contact1).as_dict() assert contact is not None deleted_contact = UserService.delete_contact().as_dict() assert deleted_contact is not None contacts = UserService.get_contacts() assert contacts.get('contacts') == [] def test_delete_contact_for_user_no_user(session, monkeypatch): with pytest.raises(BusinessException) as exception: patch_token_info(TestJwtClaims.user_test, monkeypatch) UserService.delete_contact() assert exception.value.code == Error.DATA_NOT_FOUND.name def test_delete_contact_for_user_no_contact(session, monkeypatch): factory_user_model(user_info=TestUserInfo.user_test) with pytest.raises(BusinessException) as exception: patch_token_info(TestJwtClaims.user_test, monkeypatch) UserService.delete_contact() assert exception.value.code == Error.DATA_NOT_FOUND.name def test_find_users(session): factory_user_model() factory_user_model(user_info=TestUserInfo.user2) users = UserService.find_users(last_name='User') assert users is not None assert len(users) == 2 def test_user_find_by_token(session, monkeypatch): user_with_token = TestUserInfo.user_test user_with_token['keycloak_guid'] = TestJwtClaims.user_test['sub'] factory_user_model(user_info=user_with_token) found_user = UserService.find_by_jwt_token() assert found_user is None patch_token_info(TestJwtClaims.user_test, monkeypatch) UserService.update_terms_of_use(True, 1) found_user = UserService.find_by_jwt_token() assert found_user is not None dictionary = found_user.as_dict() assert dictionary['username'] == TestJwtClaims.user_test['preferred_username'] assert dictionary['keycloak_guid'] == TestJwtClaims.user_test['sub'] assert dictionary['user_terms']['isTermsOfUseAccepted'] is False UserService.update_terms_of_use(True, get_tos_latest_version()) found_user = UserService.find_by_jwt_token() dictionary = found_user.as_dict() assert dictionary['user_terms']['isTermsOfUseAccepted'] is True def test_user_find_by_username(session): user_model = factory_user_model() user = UserService(user_model) user = UserService.find_by_username(None) assert user is None user = UserService.find_by_username(TestUserInfo.user1['username']) assert user is not None dictionary = user.as_dict() assert dictionary['username'] == TestUserInfo.user1['username'] def test_user_find_by_username_no_model_object(session): username = TestUserInfo.user_test['username'] user = UserService.find_by_username(username) assert user is None def test_user_find_by_username_missing_username(session): user_model = factory_user_model(user_info=TestUserInfo.user_test) user = UserService(user_model) user = UserService.find_by_username('foo') assert user is None def test_delete_contact_user_link(session, auth_mock, keycloak_mock, monkeypatch): user_with_token = TestUserInfo.user_test user_with_token['keycloak_guid'] = TestJwtClaims.public_user_role['sub'] user_model = factory_user_model(user_info=user_with_token) user = UserService(user_model) patch_token_info(TestJwtClaims.public_user_role, monkeypatch) org = OrgService.create_org(TestOrgInfo.org1, user_id=user.identifier) org_dictionary = org.as_dict() org_id = org_dictionary['id'] contact = factory_contact_model() contact_link = ContactLinkModel() contact_link.contact = contact contact_link.user = user_model contact_link.org = org._model contact_link = contact_link.flush() contact_link.commit() deleted_contact = UserService.delete_contact() assert deleted_contact is None delete_contact_link = ContactLinkModel.find_by_user_id(user.identifier) assert not delete_contact_link exist_contact_link = ContactLinkModel.find_by_org_id(org_id) assert exist_contact_link def test_delete_user(session, auth_mock, keycloak_mock, monkeypatch): user_with_token = TestUserInfo.user_test user_with_token['keycloak_guid'] = TestJwtClaims.user_test['sub'] user_model = factory_user_model(user_info=user_with_token) contact = factory_contact_model() contact_link = ContactLinkModel() contact_link.contact = contact contact_link.user = user_model contact_link.commit() patch_token_info(TestJwtClaims.user_test, monkeypatch) org = OrgService.create_org(TestOrgInfo.org1, user_id=user_model.id) UserService.delete_user() updated_user = UserModel.find_by_jwt_token() assert len(updated_user.contacts) == 0 user_orgs = MembershipModel.find_orgs_for_user(updated_user.id) for org in user_orgs: assert org.status_code == 'INACTIVE' def test_delete_user_where_org_has_affiliations(session, auth_mock, keycloak_mock, monkeypatch): user_model = factory_user_model(user_info=TestUserInfo.user_test) contact = factory_contact_model() contact_link = ContactLinkModel() contact_link.contact = contact contact_link.user = user_model contact_link = contact_link.flush() contact_link.commit() patch_token_info(TestJwtClaims.user_test, monkeypatch) org = OrgService.create_org(TestOrgInfo.org1, user_id=user_model.id).as_dict() org_id = org['id'] entity = factory_entity_model(entity_info=TestEntityInfo.entity_lear_mock) affiliation = AffiliationModel(org_id=org_id, entity_id=entity.id) affiliation.save() with pytest.raises(BusinessException) as exception: UserService.delete_user() assert exception.code == Error.DELETE_FAILED_ONLY_OWNER updated_user = UserModel.find_by_jwt_token() contacts = UserService.get_contacts() assert len(contacts) == 1 user_orgs = MembershipModel.find_orgs_for_user(updated_user.id) for org in user_orgs: assert org.status_code == 'ACTIVE' def test_delete_user_where_user_is_member_on_org(session, auth_mock, keycloak_mock, monkeypatch): user_model = factory_user_model(user_info=TestUserInfo.user_test) contact = factory_contact_model() contact_link = ContactLinkModel() contact_link.contact = contact contact_link.user = user_model contact_link.commit() patch_token_info(TestJwtClaims.get_test_user(user_model.keycloak_guid), monkeypatch) org = OrgService.create_org(TestOrgInfo.org1, user_id=user_model.id) org_dictionary = org.as_dict() org_id = org_dictionary['id'] entity = factory_entity_model(entity_info=TestEntityInfo.entity_lear_mock) affiliation = AffiliationModel(org_id=org_id, entity_id=entity.id) affiliation.save() user_model2 = factory_user_model(user_info=TestUserInfo.user2) contact = factory_contact_model() contact_link = ContactLinkModel() contact_link.contact = contact contact_link.user = user_model2 contact_link.commit() membership = MembershipModel(org_id=org_id, user_id=user_model2.id, membership_type_code='USER', membership_type_status=Status.ACTIVE.value) membership.save() patch_token_info(TestJwtClaims.get_test_user(user_model2.keycloak_guid), monkeypatch) UserService.delete_user() updated_user = UserModel.find_by_jwt_token() assert len(updated_user.contacts) == 0 user_orgs = MembershipModel.find_orgs_for_user(updated_user.id) for org in user_orgs: assert org.status_code == 'INACTIVE' def test_delete_user_where_org_has_another_owner(session, auth_mock, keycloak_mock, monkeypatch): user_model = factory_user_model(user_info=TestUserInfo.user_test) contact = factory_contact_model() contact_link = ContactLinkModel() contact_link.contact = contact contact_link.user = user_model contact_link.commit() patch_token_info(TestJwtClaims.get_test_user(user_model.keycloak_guid), monkeypatch) org = OrgService.create_org(TestOrgInfo.org1, user_id=user_model.id) org_dictionary = org.as_dict() org_id = org_dictionary['id'] entity = factory_entity_model(entity_info=TestEntityInfo.entity_lear_mock) affiliation = AffiliationModel(org_id=org_id, entity_id=entity.id) affiliation.save() user_model2 = factory_user_model(user_info=TestUserInfo.user2) contact = factory_contact_model() contact_link = ContactLinkModel() contact_link.contact = contact contact_link.user = user_model2 contact_link.commit() membership = MembershipModel(org_id=org_id, user_id=user_model2.id, membership_type_code='ADMIN', membership_type_status=Status.ACTIVE.value) membership.save() membership.commit() patch_token_info(TestJwtClaims.get_test_user(user_model2.keycloak_guid), monkeypatch) UserService.delete_user() updated_user = UserModel.find_by_jwt_token() assert len(updated_user.contacts) == 0 user_orgs = MembershipModel.find_orgs_for_user(updated_user.id) for org in user_orgs: assert org.status_code == 'INACTIVE'
true
true
f73bf60adcc79b502502f15afba074b8a5426043
18,617
py
Python
yfinance/base.py
rdiere/yfinance
969eeb66b5bc98635aa0289c15a49246248910e4
[ "Apache-2.0" ]
null
null
null
yfinance/base.py
rdiere/yfinance
969eeb66b5bc98635aa0289c15a49246248910e4
[ "Apache-2.0" ]
null
null
null
yfinance/base.py
rdiere/yfinance
969eeb66b5bc98635aa0289c15a49246248910e4
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Yahoo! Finance market data downloader (+fix for Pandas Datareader) # https://github.com/ranaroussi/yfinance # # Copyright 2017-2019 Ran Aroussi # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from __future__ import print_function import time as _time import datetime as _datetime import requests as _requests import pandas as _pd import numpy as _np try: from urllib.parse import quote as urlencode except ImportError: from urllib import quote as urlencode from . import utils # import json as _json # import re as _re # import sys as _sys from . import shared class TickerBase(): def __init__(self, ticker): self.ticker = ticker.upper() self._history = None self._base_url = 'https://query1.finance.yahoo.com' self._scrape_url = 'https://finance.yahoo.com/quote' self._fundamentals = False self._info = None self._sustainability = None self._recommendations = None self._major_holders = None self._institutional_holders = None self._isin = None self._calendar = None self._expirations = {} self._earnings = { "yearly": utils.empty_df(), "quarterly": utils.empty_df()} self._financials = { "yearly": utils.empty_df(), "quarterly": utils.empty_df()} self._balancesheet = { "yearly": utils.empty_df(), "quarterly": utils.empty_df()} self._cashflow = { "yearly": utils.empty_df(), "quarterly": utils.empty_df()} def history(self, period="1mo", interval="1d", start=None, end=None, prepost=False, actions=True, auto_adjust=True, back_adjust=False, proxy=None, rounding=True, tz=None, **kwargs): """ :Parameters: period : str Valid periods: 1d,5d,1mo,3mo,6mo,1y,2y,5y,10y,ytd,max Either Use period parameter or use start and end interval : str Valid intervals: 1m,2m,5m,15m,30m,60m,90m,1h,1d,5d,1wk,1mo,3mo Intraday data cannot extend last 60 days start: str Download start date string (YYYY-MM-DD) or _datetime. Default is 1900-01-01 end: str Download end date string (YYYY-MM-DD) or _datetime. Default is now prepost : bool Include Pre and Post market data in results? Default is False auto_adjust: bool Adjust all OHLC automatically? Default is True back_adjust: bool Back-adjusted data to mimic true historical prices proxy: str Optional. Proxy server URL scheme. Default is None rounding: bool Round values to 2 decimal places? Optional. Default is False = precision suggested by Yahoo! tz: str Optional timezone locale for dates. (default data is returned as non-localized dates) **kwargs: dict debug: bool Optional. If passed as False, will suppress error message printing to console. """ if start or period is None or period.lower() == "max": if start is None: start = -2208988800 elif isinstance(start, _datetime.datetime): start = int(_time.mktime(start.timetuple())) else: start = int(_time.mktime( _time.strptime(str(start), '%Y-%m-%d'))) if end is None: end = int(_time.time()) elif isinstance(end, _datetime.datetime): end = int(_time.mktime(end.timetuple())) else: end = int(_time.mktime(_time.strptime(str(end), '%Y-%m-%d'))) params = {"period1": start, "period2": end} else: period = period.lower() params = {"range": period} params["interval"] = interval.lower() params["includePrePost"] = prepost params["events"] = "div,splits" # 1) fix weired bug with Yahoo! - returning 60m for 30m bars if params["interval"] == "30m": params["interval"] = "15m" # setup proxy in requests format if proxy is not None: if isinstance(proxy, dict) and "https" in proxy: proxy = proxy["https"] proxy = {"https": proxy} # Getting data from json url = "{}/v8/finance/chart/{}".format(self._base_url, self.ticker) data = _requests.get(url=url, params=params, proxies=proxy) if "Will be right back" in data.text: raise RuntimeError("*** YAHOO! FINANCE IS CURRENTLY DOWN! ***\n" "Our engineers are working quickly to resolve " "the issue. Thank you for your patience.") data = data.json() # Work with errors debug_mode = True if "debug" in kwargs and isinstance(kwargs["debug"], bool): debug_mode = kwargs["debug"] err_msg = "No data found for this date range, symbol may be delisted" if "chart" in data and data["chart"]["error"]: err_msg = data["chart"]["error"]["description"] shared._DFS[self.ticker] = utils.empty_df() shared._ERRORS[self.ticker] = err_msg if "many" not in kwargs and debug_mode: print('- %s: %s' % (self.ticker, err_msg)) return shared._DFS[self.ticker] elif "chart" not in data or data["chart"]["result"] is None or \ not data["chart"]["result"]: shared._DFS[self.ticker] = utils.empty_df() shared._ERRORS[self.ticker] = err_msg if "many" not in kwargs and debug_mode: print('- %s: %s' % (self.ticker, err_msg)) return shared._DFS[self.ticker] # parse quotes try: quotes = utils.parse_quotes(data["chart"]["result"][0], tz) except Exception: shared._DFS[self.ticker] = utils.empty_df() shared._ERRORS[self.ticker] = err_msg if "many" not in kwargs and debug_mode: print('- %s: %s' % (self.ticker, err_msg)) return shared._DFS[self.ticker] # 2) fix weired bug with Yahoo! - returning 60m for 30m bars if interval.lower() == "30m": quotes2 = quotes.resample('30T') quotes = _pd.DataFrame(index=quotes2.last().index, data={ 'Open': quotes2['Open'].first(), 'High': quotes2['High'].max(), 'Low': quotes2['Low'].min(), 'Close': quotes2['Close'].last(), 'Adj Close': quotes2['Adj Close'].last(), 'Volume': quotes2['Volume'].sum() }) try: quotes['Dividends'] = quotes2['Dividends'].max() except Exception: pass try: quotes['Stock Splits'] = quotes2['Dividends'].max() except Exception: pass if auto_adjust: quotes = utils.auto_adjust(quotes) elif back_adjust: quotes = utils.back_adjust(quotes) if rounding: quotes = _np.round(quotes, data[ "chart"]["result"][0]["meta"]["priceHint"]) quotes['Volume'] = quotes['Volume'].fillna(0).astype(_np.int64) quotes.dropna(inplace=True) # actions dividends, splits = utils.parse_actions(data["chart"]["result"][0], tz) # combine df = _pd.concat([quotes, dividends, splits], axis=1, sort=True) df["Dividends"].fillna(0, inplace=True) df["Stock Splits"].fillna(0, inplace=True) # index eod/intraday df.index = df.index.tz_localize("UTC").tz_convert( data["chart"]["result"][0]["meta"]["exchangeTimezoneName"]) if params["interval"][-1] in {"m", "h"}: df.index.name = "Datetime" else: df.index = _pd.to_datetime(df.index.date) if tz is not None: df.index = df.index.tz_localize(tz) df.index.name = "Date" self._history = df.copy() if not actions: df.drop(columns=["Dividends", "Stock Splits"], inplace=True) return df # ------------------------ def _get_fundamentals(self, kind=None, proxy=None): def cleanup(data): df = _pd.DataFrame(data).drop(columns=['maxAge']) for col in df.columns: df[col] = _np.where( df[col].astype(str) == '-', _np.nan, df[col]) df.set_index('endDate', inplace=True) try: df.index = _pd.to_datetime(df.index, unit='s') except ValueError: df.index = _pd.to_datetime(df.index) df = df.T df.columns.name = '' df.index.name = 'Breakdown' df.index = utils.camel2title(df.index) return df # setup proxy in requests format if proxy is not None: if isinstance(proxy, dict) and "https" in proxy: proxy = proxy["https"] proxy = {"https": proxy} if self._fundamentals: return # get info and sustainability url = '%s/%s' % (self._scrape_url, self.ticker) data = utils.get_json(url, proxy) # holders url = "{}/{}/holders".format(self._scrape_url, self.ticker) holders = _pd.read_html(url) self._major_holders = holders[0] self._institutional_holders = holders[1] if 'Date Reported' in self._institutional_holders: self._institutional_holders['Date Reported'] = _pd.to_datetime( self._institutional_holders['Date Reported']) if '% Out' in self._institutional_holders: self._institutional_holders['% Out'] = self._institutional_holders[ '% Out'].str.replace('%', '').astype(float)/100 # sustainability d = {} if isinstance(data.get('esgScores'), dict): for item in data['esgScores']: if not isinstance(data['esgScores'][item], (dict, list)): d[item] = data['esgScores'][item] s = _pd.DataFrame(index=[0], data=d)[-1:].T s.columns = ['Value'] s.index.name = '%.f-%.f' % ( s[s.index == 'ratingYear']['Value'].values[0], s[s.index == 'ratingMonth']['Value'].values[0]) self._sustainability = s[~s.index.isin( ['maxAge', 'ratingYear', 'ratingMonth'])] # info (be nice to python 2) self._info = {} items = ['summaryProfile', 'summaryDetail', 'quoteType', 'defaultKeyStatistics', 'assetProfile', 'summaryDetail'] for item in items: if isinstance(data.get(item), dict): self._info.update(data[item]) self._info['regularMarketPrice'] = self._info['regularMarketOpen'] self._info['logo_url'] = "" try: domain = self._info['website'].split( '://')[1].split('/')[0].replace('www.', '') self._info['logo_url'] = 'https://logo.clearbit.com/%s' % domain except Exception: pass # events try: cal = _pd.DataFrame( data['calendarEvents']['earnings']) cal['earningsDate'] = _pd.to_datetime( cal['earningsDate'], unit='s') self._calendar = cal.T self._calendar.index = utils.camel2title(self._calendar.index) self._calendar.columns = ['Value'] except Exception: pass # analyst recommendations try: rec = _pd.DataFrame( data['upgradeDowngradeHistory']['history']) rec['earningsDate'] = _pd.to_datetime( rec['epochGradeDate'], unit='s') rec.set_index('earningsDate', inplace=True) rec.index.name = 'Date' rec.columns = utils.camel2title(rec.columns) self._recommendations = rec[[ 'Firm', 'To Grade', 'From Grade', 'Action']].sort_index() except Exception: pass # get fundamentals data = utils.get_json(url+'/financials', proxy) # generic patterns for key in ( (self._cashflow, 'cashflowStatement', 'cashflowStatements'), (self._balancesheet, 'balanceSheet', 'balanceSheetStatements'), (self._financials, 'incomeStatement', 'incomeStatementHistory') ): item = key[1] + 'History' if isinstance(data.get(item), dict): key[0]['yearly'] = cleanup(data[item][key[2]]) item = key[1]+'HistoryQuarterly' if isinstance(data.get(item), dict): key[0]['quarterly'] = cleanup(data[item][key[2]]) # earnings if isinstance(data.get('earnings'), dict): earnings = data['earnings']['financialsChart'] df = _pd.DataFrame(earnings['yearly']).set_index('date') df.columns = utils.camel2title(df.columns) df.index.name = 'Year' self._earnings['yearly'] = df df = _pd.DataFrame(earnings['quarterly']).set_index('date') df.columns = utils.camel2title(df.columns) df.index.name = 'Quarter' self._earnings['quarterly'] = df self._fundamentals = True def get_recommendations(self, proxy=None, as_dict=False, *args, **kwargs): self._get_fundamentals(proxy) data = self._recommendations if as_dict: return data.to_dict() return data def get_calendar(self, proxy=None, as_dict=False, *args, **kwargs): self._get_fundamentals(proxy) data = self._calendar if as_dict: return data.to_dict() return data def get_major_holders(self, proxy=None, as_dict=False, *args, **kwargs): self._get_fundamentals(proxy) data = self._major_holders if as_dict: return data.to_dict() return data def get_institutional_holders(self, proxy=None, as_dict=False, *args, **kwargs): self._get_fundamentals(proxy) data = self._institutional_holders if as_dict: return data.to_dict() return data def get_info(self, proxy=None, as_dict=False, *args, **kwargs): self._get_fundamentals(proxy) data = self._info if as_dict: return data.to_dict() return data def get_sustainability(self, proxy=None, as_dict=False, *args, **kwargs): self._get_fundamentals(proxy) data = self._sustainability if as_dict: return data.to_dict() return data def get_earnings(self, proxy=None, as_dict=False, freq="yearly"): self._get_fundamentals(proxy) data = self._earnings[freq] if as_dict: return data.to_dict() return data def get_financials(self, proxy=None, as_dict=False, freq="yearly"): self._get_fundamentals(proxy) data = self._financials[freq] if as_dict: return data.to_dict() return data def get_balancesheet(self, proxy=None, as_dict=False, freq="yearly"): self._get_fundamentals(proxy) data = self._balancesheet[freq] if as_dict: return data.to_dict() return data def get_balance_sheet(self, proxy=None, as_dict=False, freq="yearly"): return self.get_balancesheet(proxy, as_dict, freq) def get_cashflow(self, proxy=None, as_dict=False, freq="yearly"): self._get_fundamentals(proxy) data = self._cashflow[freq] if as_dict: return data.to_dict() return data def get_dividends(self, proxy=None): if self._history is None: self.history(period="max", proxy=proxy) dividends = self._history["Dividends"] return dividends[dividends != 0] def get_splits(self, proxy=None): if self._history is None: self.history(period="max", proxy=proxy) splits = self._history["Stock Splits"] return splits[splits != 0] def get_actions(self, proxy=None): if self._history is None: self.history(period="max", proxy=proxy) actions = self._history[["Dividends", "Stock Splits"]] return actions[actions != 0].dropna(how='all').fillna(0) def get_isin(self, proxy=None): # *** experimental *** if self._isin is not None: return self._isin ticker = self.ticker.upper() if "-" in ticker or "^" in ticker: self._isin = '-' return self._isin # setup proxy in requests format if proxy is not None: if isinstance(proxy, dict) and "https" in proxy: proxy = proxy["https"] proxy = {"https": proxy} q = ticker self.get_info(proxy=proxy) if "shortName" in self._info: q = self._info['shortName'] url = 'https://markets.businessinsider.com/ajax/' \ 'SearchController_Suggest?max_results=25&query=%s' \ % urlencode(q) data = _requests.get(url=url, proxies=proxy).text search_str = '"{}|'.format(ticker) if search_str not in data: if q.lower() in data.lower(): search_str = '"|' if search_str not in data: self._isin = '-' return self._isin else: self._isin = '-' return self._isin self._isin = data.split(search_str)[1].split('"')[0].split('|')[0] return self._isin
36.009671
84
0.556481
from __future__ import print_function import time as _time import datetime as _datetime import requests as _requests import pandas as _pd import numpy as _np try: from urllib.parse import quote as urlencode except ImportError: from urllib import quote as urlencode from . import utils from . import shared class TickerBase(): def __init__(self, ticker): self.ticker = ticker.upper() self._history = None self._base_url = 'https://query1.finance.yahoo.com' self._scrape_url = 'https://finance.yahoo.com/quote' self._fundamentals = False self._info = None self._sustainability = None self._recommendations = None self._major_holders = None self._institutional_holders = None self._isin = None self._calendar = None self._expirations = {} self._earnings = { "yearly": utils.empty_df(), "quarterly": utils.empty_df()} self._financials = { "yearly": utils.empty_df(), "quarterly": utils.empty_df()} self._balancesheet = { "yearly": utils.empty_df(), "quarterly": utils.empty_df()} self._cashflow = { "yearly": utils.empty_df(), "quarterly": utils.empty_df()} def history(self, period="1mo", interval="1d", start=None, end=None, prepost=False, actions=True, auto_adjust=True, back_adjust=False, proxy=None, rounding=True, tz=None, **kwargs): if start or period is None or period.lower() == "max": if start is None: start = -2208988800 elif isinstance(start, _datetime.datetime): start = int(_time.mktime(start.timetuple())) else: start = int(_time.mktime( _time.strptime(str(start), '%Y-%m-%d'))) if end is None: end = int(_time.time()) elif isinstance(end, _datetime.datetime): end = int(_time.mktime(end.timetuple())) else: end = int(_time.mktime(_time.strptime(str(end), '%Y-%m-%d'))) params = {"period1": start, "period2": end} else: period = period.lower() params = {"range": period} params["interval"] = interval.lower() params["includePrePost"] = prepost params["events"] = "div,splits" if params["interval"] == "30m": params["interval"] = "15m" if proxy is not None: if isinstance(proxy, dict) and "https" in proxy: proxy = proxy["https"] proxy = {"https": proxy} url = "{}/v8/finance/chart/{}".format(self._base_url, self.ticker) data = _requests.get(url=url, params=params, proxies=proxy) if "Will be right back" in data.text: raise RuntimeError("*** YAHOO! FINANCE IS CURRENTLY DOWN! ***\n" "Our engineers are working quickly to resolve " "the issue. Thank you for your patience.") data = data.json() debug_mode = True if "debug" in kwargs and isinstance(kwargs["debug"], bool): debug_mode = kwargs["debug"] err_msg = "No data found for this date range, symbol may be delisted" if "chart" in data and data["chart"]["error"]: err_msg = data["chart"]["error"]["description"] shared._DFS[self.ticker] = utils.empty_df() shared._ERRORS[self.ticker] = err_msg if "many" not in kwargs and debug_mode: print('- %s: %s' % (self.ticker, err_msg)) return shared._DFS[self.ticker] elif "chart" not in data or data["chart"]["result"] is None or \ not data["chart"]["result"]: shared._DFS[self.ticker] = utils.empty_df() shared._ERRORS[self.ticker] = err_msg if "many" not in kwargs and debug_mode: print('- %s: %s' % (self.ticker, err_msg)) return shared._DFS[self.ticker] try: quotes = utils.parse_quotes(data["chart"]["result"][0], tz) except Exception: shared._DFS[self.ticker] = utils.empty_df() shared._ERRORS[self.ticker] = err_msg if "many" not in kwargs and debug_mode: print('- %s: %s' % (self.ticker, err_msg)) return shared._DFS[self.ticker] if interval.lower() == "30m": quotes2 = quotes.resample('30T') quotes = _pd.DataFrame(index=quotes2.last().index, data={ 'Open': quotes2['Open'].first(), 'High': quotes2['High'].max(), 'Low': quotes2['Low'].min(), 'Close': quotes2['Close'].last(), 'Adj Close': quotes2['Adj Close'].last(), 'Volume': quotes2['Volume'].sum() }) try: quotes['Dividends'] = quotes2['Dividends'].max() except Exception: pass try: quotes['Stock Splits'] = quotes2['Dividends'].max() except Exception: pass if auto_adjust: quotes = utils.auto_adjust(quotes) elif back_adjust: quotes = utils.back_adjust(quotes) if rounding: quotes = _np.round(quotes, data[ "chart"]["result"][0]["meta"]["priceHint"]) quotes['Volume'] = quotes['Volume'].fillna(0).astype(_np.int64) quotes.dropna(inplace=True) dividends, splits = utils.parse_actions(data["chart"]["result"][0], tz) df = _pd.concat([quotes, dividends, splits], axis=1, sort=True) df["Dividends"].fillna(0, inplace=True) df["Stock Splits"].fillna(0, inplace=True) df.index = df.index.tz_localize("UTC").tz_convert( data["chart"]["result"][0]["meta"]["exchangeTimezoneName"]) if params["interval"][-1] in {"m", "h"}: df.index.name = "Datetime" else: df.index = _pd.to_datetime(df.index.date) if tz is not None: df.index = df.index.tz_localize(tz) df.index.name = "Date" self._history = df.copy() if not actions: df.drop(columns=["Dividends", "Stock Splits"], inplace=True) return df def _get_fundamentals(self, kind=None, proxy=None): def cleanup(data): df = _pd.DataFrame(data).drop(columns=['maxAge']) for col in df.columns: df[col] = _np.where( df[col].astype(str) == '-', _np.nan, df[col]) df.set_index('endDate', inplace=True) try: df.index = _pd.to_datetime(df.index, unit='s') except ValueError: df.index = _pd.to_datetime(df.index) df = df.T df.columns.name = '' df.index.name = 'Breakdown' df.index = utils.camel2title(df.index) return df if proxy is not None: if isinstance(proxy, dict) and "https" in proxy: proxy = proxy["https"] proxy = {"https": proxy} if self._fundamentals: return url = '%s/%s' % (self._scrape_url, self.ticker) data = utils.get_json(url, proxy) url = "{}/{}/holders".format(self._scrape_url, self.ticker) holders = _pd.read_html(url) self._major_holders = holders[0] self._institutional_holders = holders[1] if 'Date Reported' in self._institutional_holders: self._institutional_holders['Date Reported'] = _pd.to_datetime( self._institutional_holders['Date Reported']) if '% Out' in self._institutional_holders: self._institutional_holders['% Out'] = self._institutional_holders[ '% Out'].str.replace('%', '').astype(float)/100 d = {} if isinstance(data.get('esgScores'), dict): for item in data['esgScores']: if not isinstance(data['esgScores'][item], (dict, list)): d[item] = data['esgScores'][item] s = _pd.DataFrame(index=[0], data=d)[-1:].T s.columns = ['Value'] s.index.name = '%.f-%.f' % ( s[s.index == 'ratingYear']['Value'].values[0], s[s.index == 'ratingMonth']['Value'].values[0]) self._sustainability = s[~s.index.isin( ['maxAge', 'ratingYear', 'ratingMonth'])] self._info = {} items = ['summaryProfile', 'summaryDetail', 'quoteType', 'defaultKeyStatistics', 'assetProfile', 'summaryDetail'] for item in items: if isinstance(data.get(item), dict): self._info.update(data[item]) self._info['regularMarketPrice'] = self._info['regularMarketOpen'] self._info['logo_url'] = "" try: domain = self._info['website'].split( '://')[1].split('/')[0].replace('www.', '') self._info['logo_url'] = 'https://logo.clearbit.com/%s' % domain except Exception: pass try: cal = _pd.DataFrame( data['calendarEvents']['earnings']) cal['earningsDate'] = _pd.to_datetime( cal['earningsDate'], unit='s') self._calendar = cal.T self._calendar.index = utils.camel2title(self._calendar.index) self._calendar.columns = ['Value'] except Exception: pass try: rec = _pd.DataFrame( data['upgradeDowngradeHistory']['history']) rec['earningsDate'] = _pd.to_datetime( rec['epochGradeDate'], unit='s') rec.set_index('earningsDate', inplace=True) rec.index.name = 'Date' rec.columns = utils.camel2title(rec.columns) self._recommendations = rec[[ 'Firm', 'To Grade', 'From Grade', 'Action']].sort_index() except Exception: pass data = utils.get_json(url+'/financials', proxy) for key in ( (self._cashflow, 'cashflowStatement', 'cashflowStatements'), (self._balancesheet, 'balanceSheet', 'balanceSheetStatements'), (self._financials, 'incomeStatement', 'incomeStatementHistory') ): item = key[1] + 'History' if isinstance(data.get(item), dict): key[0]['yearly'] = cleanup(data[item][key[2]]) item = key[1]+'HistoryQuarterly' if isinstance(data.get(item), dict): key[0]['quarterly'] = cleanup(data[item][key[2]]) if isinstance(data.get('earnings'), dict): earnings = data['earnings']['financialsChart'] df = _pd.DataFrame(earnings['yearly']).set_index('date') df.columns = utils.camel2title(df.columns) df.index.name = 'Year' self._earnings['yearly'] = df df = _pd.DataFrame(earnings['quarterly']).set_index('date') df.columns = utils.camel2title(df.columns) df.index.name = 'Quarter' self._earnings['quarterly'] = df self._fundamentals = True def get_recommendations(self, proxy=None, as_dict=False, *args, **kwargs): self._get_fundamentals(proxy) data = self._recommendations if as_dict: return data.to_dict() return data def get_calendar(self, proxy=None, as_dict=False, *args, **kwargs): self._get_fundamentals(proxy) data = self._calendar if as_dict: return data.to_dict() return data def get_major_holders(self, proxy=None, as_dict=False, *args, **kwargs): self._get_fundamentals(proxy) data = self._major_holders if as_dict: return data.to_dict() return data def get_institutional_holders(self, proxy=None, as_dict=False, *args, **kwargs): self._get_fundamentals(proxy) data = self._institutional_holders if as_dict: return data.to_dict() return data def get_info(self, proxy=None, as_dict=False, *args, **kwargs): self._get_fundamentals(proxy) data = self._info if as_dict: return data.to_dict() return data def get_sustainability(self, proxy=None, as_dict=False, *args, **kwargs): self._get_fundamentals(proxy) data = self._sustainability if as_dict: return data.to_dict() return data def get_earnings(self, proxy=None, as_dict=False, freq="yearly"): self._get_fundamentals(proxy) data = self._earnings[freq] if as_dict: return data.to_dict() return data def get_financials(self, proxy=None, as_dict=False, freq="yearly"): self._get_fundamentals(proxy) data = self._financials[freq] if as_dict: return data.to_dict() return data def get_balancesheet(self, proxy=None, as_dict=False, freq="yearly"): self._get_fundamentals(proxy) data = self._balancesheet[freq] if as_dict: return data.to_dict() return data def get_balance_sheet(self, proxy=None, as_dict=False, freq="yearly"): return self.get_balancesheet(proxy, as_dict, freq) def get_cashflow(self, proxy=None, as_dict=False, freq="yearly"): self._get_fundamentals(proxy) data = self._cashflow[freq] if as_dict: return data.to_dict() return data def get_dividends(self, proxy=None): if self._history is None: self.history(period="max", proxy=proxy) dividends = self._history["Dividends"] return dividends[dividends != 0] def get_splits(self, proxy=None): if self._history is None: self.history(period="max", proxy=proxy) splits = self._history["Stock Splits"] return splits[splits != 0] def get_actions(self, proxy=None): if self._history is None: self.history(period="max", proxy=proxy) actions = self._history[["Dividends", "Stock Splits"]] return actions[actions != 0].dropna(how='all').fillna(0) def get_isin(self, proxy=None): if self._isin is not None: return self._isin ticker = self.ticker.upper() if "-" in ticker or "^" in ticker: self._isin = '-' return self._isin if proxy is not None: if isinstance(proxy, dict) and "https" in proxy: proxy = proxy["https"] proxy = {"https": proxy} q = ticker self.get_info(proxy=proxy) if "shortName" in self._info: q = self._info['shortName'] url = 'https://markets.businessinsider.com/ajax/' \ 'SearchController_Suggest?max_results=25&query=%s' \ % urlencode(q) data = _requests.get(url=url, proxies=proxy).text search_str = '"{}|'.format(ticker) if search_str not in data: if q.lower() in data.lower(): search_str = '"|' if search_str not in data: self._isin = '-' return self._isin else: self._isin = '-' return self._isin self._isin = data.split(search_str)[1].split('"')[0].split('|')[0] return self._isin
true
true
f73bf6dcdd224c16a88b42698e13671d40775354
53,040
py
Python
test/functional/feature_block.py
JoffreyBourdieux/ApsioCoin
6573a9ab4e6e302f99d203fe0e1e414adb1fd349
[ "MIT" ]
null
null
null
test/functional/feature_block.py
JoffreyBourdieux/ApsioCoin
6573a9ab4e6e302f99d203fe0e1e414adb1fd349
[ "MIT" ]
null
null
null
test/functional/feature_block.py
JoffreyBourdieux/ApsioCoin
6573a9ab4e6e302f99d203fe0e1e414adb1fd349
[ "MIT" ]
null
null
null
#!/usr/bin/env python3 # Copyright (c) 2015-2017 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test block processing. This reimplements tests from the bitcoinj/FullBlockTestGenerator used by the pull-tester. We use the testing framework in which we expect a particular answer from each test. """ from test_framework.test_framework import ComparisonTestFramework from test_framework.util import * from test_framework.comptool import TestManager, TestInstance, RejectResult from test_framework.blocktools import * import time from test_framework.key import CECKey from test_framework.script import * from test_framework.mininode import network_thread_start import struct class PreviousSpendableOutput(): def __init__(self, tx = CTransaction(), n = -1): self.tx = tx self.n = n # the output we're spending # Use this class for tests that require behavior other than normal "mininode" behavior. # For now, it is used to serialize a bloated varint (b64). class CBrokenBlock(CBlock): def __init__(self, header=None): super(CBrokenBlock, self).__init__(header) def initialize(self, base_block): self.vtx = copy.deepcopy(base_block.vtx) self.hashMerkleRoot = self.calc_merkle_root() def serialize(self, with_witness=False): r = b"" r += super(CBlock, self).serialize() r += struct.pack("<BQ", 255, len(self.vtx)) for tx in self.vtx: if with_witness: r += tx.serialize_with_witness() else: r += tx.serialize_without_witness() return r def normal_serialize(self): r = b"" r += super(CBrokenBlock, self).serialize() return r class FullBlockTest(ComparisonTestFramework): # Can either run this test as 1 node with expected answers, or two and compare them. # Change the "outcome" variable from each TestInstance object to only do the comparison. def set_test_params(self): self.num_nodes = 1 self.setup_clean_chain = True self.block_heights = {} self.coinbase_key = CECKey() self.coinbase_key.set_secretbytes(b"horsebattery") self.coinbase_pubkey = self.coinbase_key.get_pubkey() self.tip = None self.blocks = {} def add_options(self, parser): super().add_options(parser) parser.add_option("--runbarelyexpensive", dest="runbarelyexpensive", default=True) def run_test(self): self.test = TestManager(self, self.options.tmpdir) self.test.add_all_connections(self.nodes) network_thread_start() self.test.run() def add_transactions_to_block(self, block, tx_list): [ tx.rehash() for tx in tx_list ] block.vtx.extend(tx_list) # this is a little handier to use than the version in blocktools.py def create_tx(self, spend_tx, n, value, script=CScript([OP_TRUE])): tx = create_transaction(spend_tx, n, b"", value, script) return tx # sign a transaction, using the key we know about # this signs input 0 in tx, which is assumed to be spending output n in spend_tx def sign_tx(self, tx, spend_tx, n): scriptPubKey = bytearray(spend_tx.vout[n].scriptPubKey) if (scriptPubKey[0] == OP_TRUE): # an anyone-can-spend tx.vin[0].scriptSig = CScript() return (sighash, err) = SignatureHash(spend_tx.vout[n].scriptPubKey, tx, 0, SIGHASH_ALL) tx.vin[0].scriptSig = CScript([self.coinbase_key.sign(sighash) + bytes(bytearray([SIGHASH_ALL]))]) def create_and_sign_transaction(self, spend_tx, n, value, script=CScript([OP_TRUE])): tx = self.create_tx(spend_tx, n, value, script) self.sign_tx(tx, spend_tx, n) tx.rehash() return tx def next_block(self, number, spend=None, additional_coinbase_value=0, script=CScript([OP_TRUE]), solve=True): if self.tip == None: base_block_hash = self.genesis_hash block_time = int(time.time())+1 else: base_block_hash = self.tip.sha256 block_time = self.tip.nTime + 1 # First create the coinbase height = self.block_heights[base_block_hash] + 1 coinbase = create_coinbase(height, self.coinbase_pubkey) coinbase.vout[0].nValue += additional_coinbase_value coinbase.rehash() if spend == None: block = create_block(base_block_hash, coinbase, block_time) block.nVersion = 0x20000000 else: coinbase.vout[0].nValue += spend.tx.vout[spend.n].nValue - 1 # all but one satoshi to fees coinbase.rehash() block = create_block(base_block_hash, coinbase, block_time) block.nVersion = 0x20000000 tx = create_transaction(spend.tx, spend.n, b"", 1, script) # spend 1 satoshi self.sign_tx(tx, spend.tx, spend.n) self.add_transactions_to_block(block, [tx]) block.hashMerkleRoot = block.calc_merkle_root() if solve: block.solve() self.tip = block self.block_heights[block.sha256] = height assert number not in self.blocks self.blocks[number] = block return block def get_tests(self): self.genesis_hash = int(self.nodes[0].getbestblockhash(), 16) self.block_heights[self.genesis_hash] = 0 spendable_outputs = [] # save the current tip so it can be spent by a later block def save_spendable_output(): spendable_outputs.append(self.tip) # get an output that we previously marked as spendable def get_spendable_output(): return PreviousSpendableOutput(spendable_outputs.pop(0).vtx[0], 0) # returns a test case that asserts that the current tip was accepted def accepted(): return TestInstance([[self.tip, True]]) # returns a test case that asserts that the current tip was rejected def rejected(reject = None): if reject is None: return TestInstance([[self.tip, False]]) else: return TestInstance([[self.tip, reject]]) # move the tip back to a previous block def tip(number): self.tip = self.blocks[number] # adds transactions to the block and updates state def update_block(block_number, new_transactions): block = self.blocks[block_number] self.add_transactions_to_block(block, new_transactions) old_sha256 = block.sha256 block.hashMerkleRoot = block.calc_merkle_root() block.solve() # Update the internal state just like in next_block self.tip = block if block.sha256 != old_sha256: self.block_heights[block.sha256] = self.block_heights[old_sha256] del self.block_heights[old_sha256] self.blocks[block_number] = block return block # shorthand for functions block = self.next_block create_tx = self.create_tx create_and_sign_tx = self.create_and_sign_transaction # these must be updated if consensus changes MAX_BLOCK_SIGOPS = 20000 # Create a new block block(0) save_spendable_output() yield accepted() # Now we need that block to mature so we can spend the coinbase. test = TestInstance(sync_every_block=False) for i in range(99): block(5000 + i) test.blocks_and_transactions.append([self.tip, True]) save_spendable_output() yield test # collect spendable outputs now to avoid cluttering the code later on out = [] for i in range(33): out.append(get_spendable_output()) # Start by building a couple of blocks on top (which output is spent is # in parentheses): # genesis -> b1 (0) -> b2 (1) block(1, spend=out[0]) save_spendable_output() yield accepted() block(2, spend=out[1]) yield accepted() save_spendable_output() # so fork like this: # # genesis -> b1 (0) -> b2 (1) # \-> b3 (1) # # Nothing should happen at this point. We saw b2 first so it takes priority. tip(1) b3 = block(3, spend=out[1]) txout_b3 = PreviousSpendableOutput(b3.vtx[1], 0) yield rejected() # Now we add another block to make the alternative chain longer. # # genesis -> b1 (0) -> b2 (1) # \-> b3 (1) -> b4 (2) block(4, spend=out[2]) yield accepted() # ... and back to the first chain. # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) # \-> b3 (1) -> b4 (2) tip(2) block(5, spend=out[2]) save_spendable_output() yield rejected() block(6, spend=out[3]) yield accepted() # Try to create a fork that double-spends # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) # \-> b7 (2) -> b8 (4) # \-> b3 (1) -> b4 (2) tip(5) block(7, spend=out[2]) yield rejected() block(8, spend=out[4]) yield rejected() # Try to create a block that has too much fee # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) # \-> b9 (4) # \-> b3 (1) -> b4 (2) tip(6) block(9, spend=out[4], additional_coinbase_value=1) yield rejected(RejectResult(16, b'bad-cb-amount')) # Create a fork that ends in a block with too much fee (the one that causes the reorg) # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) # \-> b10 (3) -> b11 (4) # \-> b3 (1) -> b4 (2) tip(5) block(10, spend=out[3]) yield rejected() block(11, spend=out[4], additional_coinbase_value=1) yield rejected(RejectResult(16, b'bad-cb-amount')) # Try again, but with a valid fork first # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) # \-> b12 (3) -> b13 (4) -> b14 (5) # (b12 added last) # \-> b3 (1) -> b4 (2) tip(5) b12 = block(12, spend=out[3]) save_spendable_output() b13 = block(13, spend=out[4]) # Deliver the block header for b12, and the block b13. # b13 should be accepted but the tip won't advance until b12 is delivered. yield TestInstance([[CBlockHeader(b12), None], [b13, False]]) save_spendable_output() # b14 is invalid, but the node won't know that until it tries to connect # Tip still can't advance because b12 is missing block(14, spend=out[5], additional_coinbase_value=1) yield rejected() yield TestInstance([[b12, True, b13.sha256]]) # New tip should be b13. # Add a block with MAX_BLOCK_SIGOPS and one with one more sigop # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) # \-> b12 (3) -> b13 (4) -> b15 (5) -> b16 (6) # \-> b3 (1) -> b4 (2) # Test that a block with a lot of checksigs is okay lots_of_checksigs = CScript([OP_CHECKSIG] * (MAX_BLOCK_SIGOPS - 1)) tip(13) block(15, spend=out[5], script=lots_of_checksigs) yield accepted() save_spendable_output() # Test that a block with too many checksigs is rejected too_many_checksigs = CScript([OP_CHECKSIG] * (MAX_BLOCK_SIGOPS)) block(16, spend=out[6], script=too_many_checksigs) yield rejected(RejectResult(16, b'bad-blk-sigops')) # Attempt to spend a transaction created on a different fork # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) # \-> b12 (3) -> b13 (4) -> b15 (5) -> b17 (b3.vtx[1]) # \-> b3 (1) -> b4 (2) tip(15) block(17, spend=txout_b3) yield rejected(RejectResult(16, b'bad-txns-inputs-missingorspent')) # Attempt to spend a transaction created on a different fork (on a fork this time) # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) # \-> b12 (3) -> b13 (4) -> b15 (5) # \-> b18 (b3.vtx[1]) -> b19 (6) # \-> b3 (1) -> b4 (2) tip(13) block(18, spend=txout_b3) yield rejected() block(19, spend=out[6]) yield rejected() # Attempt to spend a coinbase at depth too low # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) # \-> b12 (3) -> b13 (4) -> b15 (5) -> b20 (7) # \-> b3 (1) -> b4 (2) tip(15) block(20, spend=out[7]) yield rejected(RejectResult(16, b'bad-txns-premature-spend-of-coinbase')) # Attempt to spend a coinbase at depth too low (on a fork this time) # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) # \-> b12 (3) -> b13 (4) -> b15 (5) # \-> b21 (6) -> b22 (5) # \-> b3 (1) -> b4 (2) tip(13) block(21, spend=out[6]) yield rejected() block(22, spend=out[5]) yield rejected() # Create a block on either side of MAX_BLOCK_BASE_SIZE and make sure its accepted/rejected # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) # \-> b12 (3) -> b13 (4) -> b15 (5) -> b23 (6) # \-> b24 (6) -> b25 (7) # \-> b3 (1) -> b4 (2) tip(15) b23 = block(23, spend=out[6]) tx = CTransaction() script_length = MAX_BLOCK_BASE_SIZE - len(b23.serialize()) - 69 script_output = CScript([b'\x00' * script_length]) tx.vout.append(CTxOut(0, script_output)) tx.vin.append(CTxIn(COutPoint(b23.vtx[1].sha256, 0))) b23 = update_block(23, [tx]) # Make sure the math above worked out to produce a max-sized block assert_equal(len(b23.serialize()), MAX_BLOCK_BASE_SIZE) yield accepted() save_spendable_output() # Make the next block one byte bigger and check that it fails tip(15) b24 = block(24, spend=out[6]) script_length = MAX_BLOCK_BASE_SIZE - len(b24.serialize()) - 69 script_output = CScript([b'\x00' * (script_length+1)]) tx.vout = [CTxOut(0, script_output)] b24 = update_block(24, [tx]) assert_equal(len(b24.serialize()), MAX_BLOCK_BASE_SIZE+1) yield rejected(RejectResult(16, b'bad-blk-length')) block(25, spend=out[7]) yield rejected() # Create blocks with a coinbase input script size out of range # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) # \-> b12 (3) -> b13 (4) -> b15 (5) -> b23 (6) -> b30 (7) # \-> ... (6) -> ... (7) # \-> b3 (1) -> b4 (2) tip(15) b26 = block(26, spend=out[6]) b26.vtx[0].vin[0].scriptSig = b'\x00' b26.vtx[0].rehash() # update_block causes the merkle root to get updated, even with no new # transactions, and updates the required state. b26 = update_block(26, []) yield rejected(RejectResult(16, b'bad-cb-length')) # Extend the b26 chain to make sure bitcoind isn't accepting b26 block(27, spend=out[7]) yield rejected(False) # Now try a too-large-coinbase script tip(15) b28 = block(28, spend=out[6]) b28.vtx[0].vin[0].scriptSig = b'\x00' * 101 b28.vtx[0].rehash() b28 = update_block(28, []) yield rejected(RejectResult(16, b'bad-cb-length')) # Extend the b28 chain to make sure bitcoind isn't accepting b28 block(29, spend=out[7]) yield rejected(False) # b30 has a max-sized coinbase scriptSig. tip(23) b30 = block(30) b30.vtx[0].vin[0].scriptSig = b'\x00' * 100 b30.vtx[0].rehash() b30 = update_block(30, []) yield accepted() save_spendable_output() # b31 - b35 - check sigops of OP_CHECKMULTISIG / OP_CHECKMULTISIGVERIFY / OP_CHECKSIGVERIFY # # genesis -> ... -> b30 (7) -> b31 (8) -> b33 (9) -> b35 (10) # \-> b36 (11) # \-> b34 (10) # \-> b32 (9) # # MULTISIG: each op code counts as 20 sigops. To create the edge case, pack another 19 sigops at the end. lots_of_multisigs = CScript([OP_CHECKMULTISIG] * ((MAX_BLOCK_SIGOPS-1) // 20) + [OP_CHECKSIG] * 19) b31 = block(31, spend=out[8], script=lots_of_multisigs) assert_equal(get_legacy_sigopcount_block(b31), MAX_BLOCK_SIGOPS) yield accepted() save_spendable_output() # this goes over the limit because the coinbase has one sigop too_many_multisigs = CScript([OP_CHECKMULTISIG] * (MAX_BLOCK_SIGOPS // 20)) b32 = block(32, spend=out[9], script=too_many_multisigs) assert_equal(get_legacy_sigopcount_block(b32), MAX_BLOCK_SIGOPS + 1) yield rejected(RejectResult(16, b'bad-blk-sigops')) # CHECKMULTISIGVERIFY tip(31) lots_of_multisigs = CScript([OP_CHECKMULTISIGVERIFY] * ((MAX_BLOCK_SIGOPS-1) // 20) + [OP_CHECKSIG] * 19) block(33, spend=out[9], script=lots_of_multisigs) yield accepted() save_spendable_output() too_many_multisigs = CScript([OP_CHECKMULTISIGVERIFY] * (MAX_BLOCK_SIGOPS // 20)) block(34, spend=out[10], script=too_many_multisigs) yield rejected(RejectResult(16, b'bad-blk-sigops')) # CHECKSIGVERIFY tip(33) lots_of_checksigs = CScript([OP_CHECKSIGVERIFY] * (MAX_BLOCK_SIGOPS - 1)) b35 = block(35, spend=out[10], script=lots_of_checksigs) yield accepted() save_spendable_output() too_many_checksigs = CScript([OP_CHECKSIGVERIFY] * (MAX_BLOCK_SIGOPS)) block(36, spend=out[11], script=too_many_checksigs) yield rejected(RejectResult(16, b'bad-blk-sigops')) # Check spending of a transaction in a block which failed to connect # # b6 (3) # b12 (3) -> b13 (4) -> b15 (5) -> b23 (6) -> b30 (7) -> b31 (8) -> b33 (9) -> b35 (10) # \-> b37 (11) # \-> b38 (11/37) # # save 37's spendable output, but then double-spend out11 to invalidate the block tip(35) b37 = block(37, spend=out[11]) txout_b37 = PreviousSpendableOutput(b37.vtx[1], 0) tx = create_and_sign_tx(out[11].tx, out[11].n, 0) b37 = update_block(37, [tx]) yield rejected(RejectResult(16, b'bad-txns-inputs-missingorspent')) # attempt to spend b37's first non-coinbase tx, at which point b37 was still considered valid tip(35) block(38, spend=txout_b37) yield rejected(RejectResult(16, b'bad-txns-inputs-missingorspent')) # Check P2SH SigOp counting # # # 13 (4) -> b15 (5) -> b23 (6) -> b30 (7) -> b31 (8) -> b33 (9) -> b35 (10) -> b39 (11) -> b41 (12) # \-> b40 (12) # # b39 - create some P2SH outputs that will require 6 sigops to spend: # # redeem_script = COINBASE_PUBKEY, (OP_2DUP+OP_CHECKSIGVERIFY) * 5, OP_CHECKSIG # p2sh_script = OP_HASH160, ripemd160(sha256(script)), OP_EQUAL # tip(35) b39 = block(39) b39_outputs = 0 b39_sigops_per_output = 6 # Build the redeem script, hash it, use hash to create the p2sh script redeem_script = CScript([self.coinbase_pubkey] + [OP_2DUP, OP_CHECKSIGVERIFY]*5 + [OP_CHECKSIG]) redeem_script_hash = hash160(redeem_script) p2sh_script = CScript([OP_HASH160, redeem_script_hash, OP_EQUAL]) # Create a transaction that spends one satoshi to the p2sh_script, the rest to OP_TRUE # This must be signed because it is spending a coinbase spend = out[11] tx = create_tx(spend.tx, spend.n, 1, p2sh_script) tx.vout.append(CTxOut(spend.tx.vout[spend.n].nValue - 1, CScript([OP_TRUE]))) self.sign_tx(tx, spend.tx, spend.n) tx.rehash() b39 = update_block(39, [tx]) b39_outputs += 1 # Until block is full, add tx's with 1 satoshi to p2sh_script, the rest to OP_TRUE tx_new = None tx_last = tx total_size=len(b39.serialize()) while(total_size < MAX_BLOCK_BASE_SIZE): tx_new = create_tx(tx_last, 1, 1, p2sh_script) tx_new.vout.append(CTxOut(tx_last.vout[1].nValue - 1, CScript([OP_TRUE]))) tx_new.rehash() total_size += len(tx_new.serialize()) if total_size >= MAX_BLOCK_BASE_SIZE: break b39.vtx.append(tx_new) # add tx to block tx_last = tx_new b39_outputs += 1 b39 = update_block(39, []) yield accepted() save_spendable_output() # Test sigops in P2SH redeem scripts # # b40 creates 3333 tx's spending the 6-sigop P2SH outputs from b39 for a total of 19998 sigops. # The first tx has one sigop and then at the end we add 2 more to put us just over the max. # # b41 does the same, less one, so it has the maximum sigops permitted. # tip(39) b40 = block(40, spend=out[12]) sigops = get_legacy_sigopcount_block(b40) numTxes = (MAX_BLOCK_SIGOPS - sigops) // b39_sigops_per_output assert_equal(numTxes <= b39_outputs, True) lastOutpoint = COutPoint(b40.vtx[1].sha256, 0) new_txs = [] for i in range(1, numTxes+1): tx = CTransaction() tx.vout.append(CTxOut(1, CScript([OP_TRUE]))) tx.vin.append(CTxIn(lastOutpoint, b'')) # second input is corresponding P2SH output from b39 tx.vin.append(CTxIn(COutPoint(b39.vtx[i].sha256, 0), b'')) # Note: must pass the redeem_script (not p2sh_script) to the signature hash function (sighash, err) = SignatureHash(redeem_script, tx, 1, SIGHASH_ALL) sig = self.coinbase_key.sign(sighash) + bytes(bytearray([SIGHASH_ALL])) scriptSig = CScript([sig, redeem_script]) tx.vin[1].scriptSig = scriptSig tx.rehash() new_txs.append(tx) lastOutpoint = COutPoint(tx.sha256, 0) b40_sigops_to_fill = MAX_BLOCK_SIGOPS - (numTxes * b39_sigops_per_output + sigops) + 1 tx = CTransaction() tx.vin.append(CTxIn(lastOutpoint, b'')) tx.vout.append(CTxOut(1, CScript([OP_CHECKSIG] * b40_sigops_to_fill))) tx.rehash() new_txs.append(tx) update_block(40, new_txs) yield rejected(RejectResult(16, b'bad-blk-sigops')) # same as b40, but one less sigop tip(39) block(41, spend=None) update_block(41, b40.vtx[1:-1]) b41_sigops_to_fill = b40_sigops_to_fill - 1 tx = CTransaction() tx.vin.append(CTxIn(lastOutpoint, b'')) tx.vout.append(CTxOut(1, CScript([OP_CHECKSIG] * b41_sigops_to_fill))) tx.rehash() update_block(41, [tx]) yield accepted() # Fork off of b39 to create a constant base again # # b23 (6) -> b30 (7) -> b31 (8) -> b33 (9) -> b35 (10) -> b39 (11) -> b42 (12) -> b43 (13) # \-> b41 (12) # tip(39) block(42, spend=out[12]) yield rejected() save_spendable_output() block(43, spend=out[13]) yield accepted() save_spendable_output() # Test a number of really invalid scenarios # # -> b31 (8) -> b33 (9) -> b35 (10) -> b39 (11) -> b42 (12) -> b43 (13) -> b44 (14) # \-> ??? (15) # The next few blocks are going to be created "by hand" since they'll do funky things, such as having # the first transaction be non-coinbase, etc. The purpose of b44 is to make sure this works. height = self.block_heights[self.tip.sha256] + 1 coinbase = create_coinbase(height, self.coinbase_pubkey) b44 = CBlock() b44.nVersion = 0x20000000 b44.nTime = self.tip.nTime + 1 b44.hashPrevBlock = self.tip.sha256 b44.nBits = 0x207fffff b44.vtx.append(coinbase) b44.hashMerkleRoot = b44.calc_merkle_root() b44.solve() self.tip = b44 self.block_heights[b44.sha256] = height self.blocks[44] = b44 yield accepted() # A block with a non-coinbase as the first tx non_coinbase = create_tx(out[15].tx, out[15].n, 1) b45 = CBlock() b45.nVersion = 0x20000000 b45.nTime = self.tip.nTime + 1 b45.hashPrevBlock = self.tip.sha256 b45.nBits = 0x207fffff b45.vtx.append(non_coinbase) b45.hashMerkleRoot = b45.calc_merkle_root() b45.calc_sha256() b45.solve() self.block_heights[b45.sha256] = self.block_heights[self.tip.sha256]+1 self.tip = b45 self.blocks[45] = b45 yield rejected(RejectResult(16, b'bad-cb-missing')) # A block with no txns tip(44) b46 = CBlock() b46.nVersion = 0x20000000 b46.nTime = b44.nTime+1 b46.hashPrevBlock = b44.sha256 b46.nBits = 0x207fffff b46.vtx = [] b46.hashMerkleRoot = 0 b46.solve() self.block_heights[b46.sha256] = self.block_heights[b44.sha256]+1 self.tip = b46 assert 46 not in self.blocks self.blocks[46] = b46 s = ser_uint256(b46.hashMerkleRoot) yield rejected(RejectResult(16, b'bad-blk-length')) # Apsiocoin: Temporarily disable test # A block with invalid work #tip(44) #b47 = block(47, solve=False) #target = uint256_from_compact(b47.nBits) #while b47.scrypt256 < target: #changed > to < # b47.nNonce += 1 # b47.rehash() #yield rejected(RejectResult(16, b'high-hash')) # A block with timestamp > 2 hrs in the future tip(44) b48 = block(48, solve=False) b48.nTime = int(time.time()) + 60 * 60 * 3 b48.solve() yield rejected(RejectResult(16, b'time-too-new')) # A block with an invalid merkle hash tip(44) b49 = block(49) b49.hashMerkleRoot += 1 b49.solve() yield rejected(RejectResult(16, b'bad-txnmrklroot')) # A block with an incorrect POW limit tip(44) b50 = block(50) b50.nBits = b50.nBits - 1 b50.solve() yield rejected(RejectResult(16, b'bad-diffbits')) # A block with two coinbase txns tip(44) b51 = block(51) cb2 = create_coinbase(51, self.coinbase_pubkey) b51 = update_block(51, [cb2]) yield rejected(RejectResult(16, b'bad-cb-multiple')) # A block w/ duplicate txns # Note: txns have to be in the right position in the merkle tree to trigger this error tip(44) b52 = block(52, spend=out[15]) tx = create_tx(b52.vtx[1], 0, 1) b52 = update_block(52, [tx, tx]) yield rejected(RejectResult(16, b'bad-txns-duplicate')) # Test block timestamps # -> b31 (8) -> b33 (9) -> b35 (10) -> b39 (11) -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15) # \-> b54 (15) # tip(43) block(53, spend=out[14]) yield rejected() # rejected since b44 is at same height save_spendable_output() # invalid timestamp (b35 is 5 blocks back, so its time is MedianTimePast) b54 = block(54, spend=out[15]) b54.nTime = b35.nTime - 1 b54.solve() yield rejected(RejectResult(16, b'time-too-old')) # valid timestamp tip(53) b55 = block(55, spend=out[15]) b55.nTime = b35.nTime update_block(55, []) yield accepted() save_spendable_output() # Test CVE-2012-2459 # # -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15) -> b57p2 (16) # \-> b57 (16) # \-> b56p2 (16) # \-> b56 (16) # # Merkle tree malleability (CVE-2012-2459): repeating sequences of transactions in a block without # affecting the merkle root of a block, while still invalidating it. # See: src/consensus/merkle.h # # b57 has three txns: coinbase, tx, tx1. The merkle root computation will duplicate tx. # Result: OK # # b56 copies b57 but duplicates tx1 and does not recalculate the block hash. So it has a valid merkle # root but duplicate transactions. # Result: Fails # # b57p2 has six transactions in its merkle tree: # - coinbase, tx, tx1, tx2, tx3, tx4 # Merkle root calculation will duplicate as necessary. # Result: OK. # # b56p2 copies b57p2 but adds both tx3 and tx4. The purpose of the test is to make sure the code catches # duplicate txns that are not next to one another with the "bad-txns-duplicate" error (which indicates # that the error was caught early, avoiding a DOS vulnerability.) # b57 - a good block with 2 txs, don't submit until end tip(55) b57 = block(57) tx = create_and_sign_tx(out[16].tx, out[16].n, 1) tx1 = create_tx(tx, 0, 1) b57 = update_block(57, [tx, tx1]) # b56 - copy b57, add a duplicate tx tip(55) b56 = copy.deepcopy(b57) self.blocks[56] = b56 assert_equal(len(b56.vtx),3) b56 = update_block(56, [tx1]) assert_equal(b56.hash, b57.hash) yield rejected(RejectResult(16, b'bad-txns-duplicate')) # b57p2 - a good block with 6 tx'es, don't submit until end tip(55) b57p2 = block("57p2") tx = create_and_sign_tx(out[16].tx, out[16].n, 1) tx1 = create_tx(tx, 0, 1) tx2 = create_tx(tx1, 0, 1) tx3 = create_tx(tx2, 0, 1) tx4 = create_tx(tx3, 0, 1) b57p2 = update_block("57p2", [tx, tx1, tx2, tx3, tx4]) # b56p2 - copy b57p2, duplicate two non-consecutive tx's tip(55) b56p2 = copy.deepcopy(b57p2) self.blocks["b56p2"] = b56p2 assert_equal(b56p2.hash, b57p2.hash) assert_equal(len(b56p2.vtx),6) b56p2 = update_block("b56p2", [tx3, tx4]) yield rejected(RejectResult(16, b'bad-txns-duplicate')) tip("57p2") yield accepted() tip(57) yield rejected() #rejected because 57p2 seen first save_spendable_output() # Test a few invalid tx types # # -> b35 (10) -> b39 (11) -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) # \-> ??? (17) # # tx with prevout.n out of range tip(57) b58 = block(58, spend=out[17]) tx = CTransaction() assert(len(out[17].tx.vout) < 42) tx.vin.append(CTxIn(COutPoint(out[17].tx.sha256, 42), CScript([OP_TRUE]), 0xffffffff)) tx.vout.append(CTxOut(0, b"")) tx.calc_sha256() b58 = update_block(58, [tx]) yield rejected(RejectResult(16, b'bad-txns-inputs-missingorspent')) # tx with output value > input value out of range tip(57) b59 = block(59) tx = create_and_sign_tx(out[17].tx, out[17].n, 51*COIN) b59 = update_block(59, [tx]) yield rejected(RejectResult(16, b'bad-txns-in-belowout')) # reset to good chain tip(57) b60 = block(60, spend=out[17]) yield accepted() save_spendable_output() # Test BIP30 # # -> b39 (11) -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) # \-> b61 (18) # # Blocks are not allowed to contain a transaction whose id matches that of an earlier, # not-fully-spent transaction in the same chain. To test, make identical coinbases; # the second one should be rejected. # tip(60) b61 = block(61, spend=out[18]) b61.vtx[0].vin[0].scriptSig = b60.vtx[0].vin[0].scriptSig #equalize the coinbases b61.vtx[0].rehash() b61 = update_block(61, []) assert_equal(b60.vtx[0].serialize(), b61.vtx[0].serialize()) yield rejected(RejectResult(16, b'bad-txns-BIP30')) # Test tx.isFinal is properly rejected (not an exhaustive tx.isFinal test, that should be in data-driven transaction tests) # # -> b39 (11) -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) # \-> b62 (18) # tip(60) b62 = block(62) tx = CTransaction() tx.nLockTime = 0xffffffff #this locktime is non-final assert(out[18].n < len(out[18].tx.vout)) tx.vin.append(CTxIn(COutPoint(out[18].tx.sha256, out[18].n))) # don't set nSequence tx.vout.append(CTxOut(0, CScript([OP_TRUE]))) assert(tx.vin[0].nSequence < 0xffffffff) tx.calc_sha256() b62 = update_block(62, [tx]) yield rejected(RejectResult(16, b'bad-txns-nonfinal')) # Test a non-final coinbase is also rejected # # -> b39 (11) -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) # \-> b63 (-) # tip(60) b63 = block(63) b63.vtx[0].nLockTime = 0xffffffff b63.vtx[0].vin[0].nSequence = 0xDEADBEEF b63.vtx[0].rehash() b63 = update_block(63, []) yield rejected(RejectResult(16, b'bad-txns-nonfinal')) # This checks that a block with a bloated VARINT between the block_header and the array of tx such that # the block is > MAX_BLOCK_BASE_SIZE with the bloated varint, but <= MAX_BLOCK_BASE_SIZE without the bloated varint, # does not cause a subsequent, identical block with canonical encoding to be rejected. The test does not # care whether the bloated block is accepted or rejected; it only cares that the second block is accepted. # # What matters is that the receiving node should not reject the bloated block, and then reject the canonical # block on the basis that it's the same as an already-rejected block (which would be a consensus failure.) # # -> b39 (11) -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) -> b64 (18) # \ # b64a (18) # b64a is a bloated block (non-canonical varint) # b64 is a good block (same as b64 but w/ canonical varint) # tip(60) regular_block = block("64a", spend=out[18]) # make it a "broken_block," with non-canonical serialization b64a = CBrokenBlock(regular_block) b64a.initialize(regular_block) self.blocks["64a"] = b64a self.tip = b64a tx = CTransaction() # use canonical serialization to calculate size script_length = MAX_BLOCK_BASE_SIZE - len(b64a.normal_serialize()) - 69 script_output = CScript([b'\x00' * script_length]) tx.vout.append(CTxOut(0, script_output)) tx.vin.append(CTxIn(COutPoint(b64a.vtx[1].sha256, 0))) b64a = update_block("64a", [tx]) assert_equal(len(b64a.serialize()), MAX_BLOCK_BASE_SIZE + 8) yield TestInstance([[self.tip, None]]) # comptool workaround: to make sure b64 is delivered, manually erase b64a from blockstore self.test.block_store.erase(b64a.sha256) tip(60) b64 = CBlock(b64a) b64.vtx = copy.deepcopy(b64a.vtx) assert_equal(b64.hash, b64a.hash) assert_equal(len(b64.serialize()), MAX_BLOCK_BASE_SIZE) self.blocks[64] = b64 update_block(64, []) yield accepted() save_spendable_output() # Spend an output created in the block itself # # -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) -> b64 (18) -> b65 (19) # tip(64) block(65) tx1 = create_and_sign_tx(out[19].tx, out[19].n, out[19].tx.vout[0].nValue) tx2 = create_and_sign_tx(tx1, 0, 0) update_block(65, [tx1, tx2]) yield accepted() save_spendable_output() # Attempt to spend an output created later in the same block # # -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) -> b64 (18) -> b65 (19) # \-> b66 (20) tip(65) block(66) tx1 = create_and_sign_tx(out[20].tx, out[20].n, out[20].tx.vout[0].nValue) tx2 = create_and_sign_tx(tx1, 0, 1) update_block(66, [tx2, tx1]) yield rejected(RejectResult(16, b'bad-txns-inputs-missingorspent')) # Attempt to double-spend a transaction created in a block # # -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) -> b64 (18) -> b65 (19) # \-> b67 (20) # # tip(65) block(67) tx1 = create_and_sign_tx(out[20].tx, out[20].n, out[20].tx.vout[0].nValue) tx2 = create_and_sign_tx(tx1, 0, 1) tx3 = create_and_sign_tx(tx1, 0, 2) update_block(67, [tx1, tx2, tx3]) yield rejected(RejectResult(16, b'bad-txns-inputs-missingorspent')) # More tests of block subsidy # # -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) -> b64 (18) -> b65 (19) -> b69 (20) # \-> b68 (20) # # b68 - coinbase with an extra 10 satoshis, # creates a tx that has 9 satoshis from out[20] go to fees # this fails because the coinbase is trying to claim 1 satoshi too much in fees # # b69 - coinbase with extra 10 satoshis, and a tx that gives a 10 satoshi fee # this succeeds # tip(65) block(68, additional_coinbase_value=10) tx = create_and_sign_tx(out[20].tx, out[20].n, out[20].tx.vout[0].nValue-9) update_block(68, [tx]) yield rejected(RejectResult(16, b'bad-cb-amount')) tip(65) b69 = block(69, additional_coinbase_value=10) tx = create_and_sign_tx(out[20].tx, out[20].n, out[20].tx.vout[0].nValue-10) update_block(69, [tx]) yield accepted() save_spendable_output() # Test spending the outpoint of a non-existent transaction # # -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) -> b64 (18) -> b65 (19) -> b69 (20) # \-> b70 (21) # tip(69) block(70, spend=out[21]) bogus_tx = CTransaction() bogus_tx.sha256 = uint256_from_str(b"23c70ed7c0506e9178fc1a987f40a33946d4ad4c962b5ae3a52546da53af0c5c") tx = CTransaction() tx.vin.append(CTxIn(COutPoint(bogus_tx.sha256, 0), b"", 0xffffffff)) tx.vout.append(CTxOut(1, b"")) update_block(70, [tx]) yield rejected(RejectResult(16, b'bad-txns-inputs-missingorspent')) # Test accepting an invalid block which has the same hash as a valid one (via merkle tree tricks) # # -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) -> b64 (18) -> b65 (19) -> b69 (20) -> b72 (21) # \-> b71 (21) # # b72 is a good block. # b71 is a copy of 72, but re-adds one of its transactions. However, it has the same hash as b71. # tip(69) b72 = block(72) tx1 = create_and_sign_tx(out[21].tx, out[21].n, 2) tx2 = create_and_sign_tx(tx1, 0, 1) b72 = update_block(72, [tx1, tx2]) # now tip is 72 b71 = copy.deepcopy(b72) b71.vtx.append(tx2) # add duplicate tx2 self.block_heights[b71.sha256] = self.block_heights[b69.sha256] + 1 # b71 builds off b69 self.blocks[71] = b71 assert_equal(len(b71.vtx), 4) assert_equal(len(b72.vtx), 3) assert_equal(b72.sha256, b71.sha256) tip(71) yield rejected(RejectResult(16, b'bad-txns-duplicate')) tip(72) yield accepted() save_spendable_output() # Test some invalid scripts and MAX_BLOCK_SIGOPS # # -> b55 (15) -> b57 (16) -> b60 (17) -> b64 (18) -> b65 (19) -> b69 (20) -> b72 (21) # \-> b** (22) # # b73 - tx with excessive sigops that are placed after an excessively large script element. # The purpose of the test is to make sure those sigops are counted. # # script is a bytearray of size 20,526 # # bytearray[0-19,998] : OP_CHECKSIG # bytearray[19,999] : OP_PUSHDATA4 # bytearray[20,000-20,003]: 521 (max_script_element_size+1, in little-endian format) # bytearray[20,004-20,525]: unread data (script_element) # bytearray[20,526] : OP_CHECKSIG (this puts us over the limit) # tip(72) b73 = block(73) size = MAX_BLOCK_SIGOPS - 1 + MAX_SCRIPT_ELEMENT_SIZE + 1 + 5 + 1 a = bytearray([OP_CHECKSIG] * size) a[MAX_BLOCK_SIGOPS - 1] = int("4e",16) # OP_PUSHDATA4 element_size = MAX_SCRIPT_ELEMENT_SIZE + 1 a[MAX_BLOCK_SIGOPS] = element_size % 256 a[MAX_BLOCK_SIGOPS+1] = element_size // 256 a[MAX_BLOCK_SIGOPS+2] = 0 a[MAX_BLOCK_SIGOPS+3] = 0 tx = create_and_sign_tx(out[22].tx, 0, 1, CScript(a)) b73 = update_block(73, [tx]) assert_equal(get_legacy_sigopcount_block(b73), MAX_BLOCK_SIGOPS+1) yield rejected(RejectResult(16, b'bad-blk-sigops')) # b74/75 - if we push an invalid script element, all prevous sigops are counted, # but sigops after the element are not counted. # # The invalid script element is that the push_data indicates that # there will be a large amount of data (0xffffff bytes), but we only # provide a much smaller number. These bytes are CHECKSIGS so they would # cause b75 to fail for excessive sigops, if those bytes were counted. # # b74 fails because we put MAX_BLOCK_SIGOPS+1 before the element # b75 succeeds because we put MAX_BLOCK_SIGOPS before the element # # tip(72) b74 = block(74) size = MAX_BLOCK_SIGOPS - 1 + MAX_SCRIPT_ELEMENT_SIZE + 42 # total = 20,561 a = bytearray([OP_CHECKSIG] * size) a[MAX_BLOCK_SIGOPS] = 0x4e a[MAX_BLOCK_SIGOPS+1] = 0xfe a[MAX_BLOCK_SIGOPS+2] = 0xff a[MAX_BLOCK_SIGOPS+3] = 0xff a[MAX_BLOCK_SIGOPS+4] = 0xff tx = create_and_sign_tx(out[22].tx, 0, 1, CScript(a)) b74 = update_block(74, [tx]) yield rejected(RejectResult(16, b'bad-blk-sigops')) tip(72) b75 = block(75) size = MAX_BLOCK_SIGOPS - 1 + MAX_SCRIPT_ELEMENT_SIZE + 42 a = bytearray([OP_CHECKSIG] * size) a[MAX_BLOCK_SIGOPS-1] = 0x4e a[MAX_BLOCK_SIGOPS] = 0xff a[MAX_BLOCK_SIGOPS+1] = 0xff a[MAX_BLOCK_SIGOPS+2] = 0xff a[MAX_BLOCK_SIGOPS+3] = 0xff tx = create_and_sign_tx(out[22].tx, 0, 1, CScript(a)) b75 = update_block(75, [tx]) yield accepted() save_spendable_output() # Check that if we push an element filled with CHECKSIGs, they are not counted tip(75) b76 = block(76) size = MAX_BLOCK_SIGOPS - 1 + MAX_SCRIPT_ELEMENT_SIZE + 1 + 5 a = bytearray([OP_CHECKSIG] * size) a[MAX_BLOCK_SIGOPS-1] = 0x4e # PUSHDATA4, but leave the following bytes as just checksigs tx = create_and_sign_tx(out[23].tx, 0, 1, CScript(a)) b76 = update_block(76, [tx]) yield accepted() save_spendable_output() # Test transaction resurrection # # -> b77 (24) -> b78 (25) -> b79 (26) # \-> b80 (25) -> b81 (26) -> b82 (27) # # b78 creates a tx, which is spent in b79. After b82, both should be in mempool # # The tx'es must be unsigned and pass the node's mempool policy. It is unsigned for the # rather obscure reason that the Python signature code does not distinguish between # Low-S and High-S values (whereas the bitcoin code has custom code which does so); # as a result of which, the odds are 50% that the python code will use the right # value and the transaction will be accepted into the mempool. Until we modify the # test framework to support low-S signing, we are out of luck. # # To get around this issue, we construct transactions which are not signed and which # spend to OP_TRUE. If the standard-ness rules change, this test would need to be # updated. (Perhaps to spend to a P2SH OP_TRUE script) # tip(76) block(77) tx77 = create_and_sign_tx(out[24].tx, out[24].n, 10*COIN) update_block(77, [tx77]) yield accepted() save_spendable_output() block(78) tx78 = create_tx(tx77, 0, 9*COIN) update_block(78, [tx78]) yield accepted() block(79) tx79 = create_tx(tx78, 0, 8*COIN) update_block(79, [tx79]) yield accepted() # mempool should be empty assert_equal(len(self.nodes[0].getrawmempool()), 0) tip(77) block(80, spend=out[25]) yield rejected() save_spendable_output() block(81, spend=out[26]) yield rejected() # other chain is same length save_spendable_output() block(82, spend=out[27]) yield accepted() # now this chain is longer, triggers re-org save_spendable_output() # now check that tx78 and tx79 have been put back into the peer's mempool mempool = self.nodes[0].getrawmempool() assert_equal(len(mempool), 2) assert(tx78.hash in mempool) assert(tx79.hash in mempool) # Test invalid opcodes in dead execution paths. # # -> b81 (26) -> b82 (27) -> b83 (28) # block(83) op_codes = [OP_IF, OP_INVALIDOPCODE, OP_ELSE, OP_TRUE, OP_ENDIF] script = CScript(op_codes) tx1 = create_and_sign_tx(out[28].tx, out[28].n, out[28].tx.vout[0].nValue, script) tx2 = create_and_sign_tx(tx1, 0, 0, CScript([OP_TRUE])) tx2.vin[0].scriptSig = CScript([OP_FALSE]) tx2.rehash() update_block(83, [tx1, tx2]) yield accepted() save_spendable_output() # Reorg on/off blocks that have OP_RETURN in them (and try to spend them) # # -> b81 (26) -> b82 (27) -> b83 (28) -> b84 (29) -> b87 (30) -> b88 (31) # \-> b85 (29) -> b86 (30) \-> b89a (32) # # block(84) tx1 = create_tx(out[29].tx, out[29].n, 0, CScript([OP_RETURN])) tx1.vout.append(CTxOut(0, CScript([OP_TRUE]))) tx1.vout.append(CTxOut(0, CScript([OP_TRUE]))) tx1.vout.append(CTxOut(0, CScript([OP_TRUE]))) tx1.vout.append(CTxOut(0, CScript([OP_TRUE]))) tx1.calc_sha256() self.sign_tx(tx1, out[29].tx, out[29].n) tx1.rehash() tx2 = create_tx(tx1, 1, 0, CScript([OP_RETURN])) tx2.vout.append(CTxOut(0, CScript([OP_RETURN]))) tx3 = create_tx(tx1, 2, 0, CScript([OP_RETURN])) tx3.vout.append(CTxOut(0, CScript([OP_TRUE]))) tx4 = create_tx(tx1, 3, 0, CScript([OP_TRUE])) tx4.vout.append(CTxOut(0, CScript([OP_RETURN]))) tx5 = create_tx(tx1, 4, 0, CScript([OP_RETURN])) update_block(84, [tx1,tx2,tx3,tx4,tx5]) yield accepted() save_spendable_output() tip(83) block(85, spend=out[29]) yield rejected() block(86, spend=out[30]) yield accepted() tip(84) block(87, spend=out[30]) yield rejected() save_spendable_output() block(88, spend=out[31]) yield accepted() save_spendable_output() # trying to spend the OP_RETURN output is rejected block("89a", spend=out[32]) tx = create_tx(tx1, 0, 0, CScript([OP_TRUE])) update_block("89a", [tx]) yield rejected() # Test re-org of a week's worth of blocks (1088 blocks) # This test takes a minute or two and can be accomplished in memory # if self.options.runbarelyexpensive: tip(88) LARGE_REORG_SIZE = 1088 test1 = TestInstance(sync_every_block=False) spend=out[32] for i in range(89, LARGE_REORG_SIZE + 89): b = block(i, spend) tx = CTransaction() script_length = MAX_BLOCK_BASE_SIZE - len(b.serialize()) - 69 script_output = CScript([b'\x00' * script_length]) tx.vout.append(CTxOut(0, script_output)) tx.vin.append(CTxIn(COutPoint(b.vtx[1].sha256, 0))) b = update_block(i, [tx]) assert_equal(len(b.serialize()), MAX_BLOCK_BASE_SIZE) test1.blocks_and_transactions.append([self.tip, True]) save_spendable_output() spend = get_spendable_output() yield test1 chain1_tip = i # now create alt chain of same length tip(88) test2 = TestInstance(sync_every_block=False) for i in range(89, LARGE_REORG_SIZE + 89): block("alt"+str(i)) test2.blocks_and_transactions.append([self.tip, False]) yield test2 # extend alt chain to trigger re-org block("alt" + str(chain1_tip + 1)) yield accepted() # ... and re-org back to the first chain tip(chain1_tip) block(chain1_tip + 1) yield rejected() block(chain1_tip + 2) yield accepted() chain1_tip += 2 if __name__ == '__main__': FullBlockTest().main()
40.8
131
0.5454
from test_framework.test_framework import ComparisonTestFramework from test_framework.util import * from test_framework.comptool import TestManager, TestInstance, RejectResult from test_framework.blocktools import * import time from test_framework.key import CECKey from test_framework.script import * from test_framework.mininode import network_thread_start import struct class PreviousSpendableOutput(): def __init__(self, tx = CTransaction(), n = -1): self.tx = tx self.n = n # Use this class for tests that require behavior other than normal "mininode" behavior. # For now, it is used to serialize a bloated varint (b64). class CBrokenBlock(CBlock): def __init__(self, header=None): super(CBrokenBlock, self).__init__(header) def initialize(self, base_block): self.vtx = copy.deepcopy(base_block.vtx) self.hashMerkleRoot = self.calc_merkle_root() def serialize(self, with_witness=False): r = b"" r += super(CBlock, self).serialize() r += struct.pack("<BQ", 255, len(self.vtx)) for tx in self.vtx: if with_witness: r += tx.serialize_with_witness() else: r += tx.serialize_without_witness() return r def normal_serialize(self): r = b"" r += super(CBrokenBlock, self).serialize() return r class FullBlockTest(ComparisonTestFramework): # Can either run this test as 1 node with expected answers, or two and compare them. # Change the "outcome" variable from each TestInstance object to only do the comparison. def set_test_params(self): self.num_nodes = 1 self.setup_clean_chain = True self.block_heights = {} self.coinbase_key = CECKey() self.coinbase_key.set_secretbytes(b"horsebattery") self.coinbase_pubkey = self.coinbase_key.get_pubkey() self.tip = None self.blocks = {} def add_options(self, parser): super().add_options(parser) parser.add_option("--runbarelyexpensive", dest="runbarelyexpensive", default=True) def run_test(self): self.test = TestManager(self, self.options.tmpdir) self.test.add_all_connections(self.nodes) network_thread_start() self.test.run() def add_transactions_to_block(self, block, tx_list): [ tx.rehash() for tx in tx_list ] block.vtx.extend(tx_list) # this is a little handier to use than the version in blocktools.py def create_tx(self, spend_tx, n, value, script=CScript([OP_TRUE])): tx = create_transaction(spend_tx, n, b"", value, script) return tx # sign a transaction, using the key we know about # this signs input 0 in tx, which is assumed to be spending output n in spend_tx def sign_tx(self, tx, spend_tx, n): scriptPubKey = bytearray(spend_tx.vout[n].scriptPubKey) if (scriptPubKey[0] == OP_TRUE): # an anyone-can-spend tx.vin[0].scriptSig = CScript() return (sighash, err) = SignatureHash(spend_tx.vout[n].scriptPubKey, tx, 0, SIGHASH_ALL) tx.vin[0].scriptSig = CScript([self.coinbase_key.sign(sighash) + bytes(bytearray([SIGHASH_ALL]))]) def create_and_sign_transaction(self, spend_tx, n, value, script=CScript([OP_TRUE])): tx = self.create_tx(spend_tx, n, value, script) self.sign_tx(tx, spend_tx, n) tx.rehash() return tx def next_block(self, number, spend=None, additional_coinbase_value=0, script=CScript([OP_TRUE]), solve=True): if self.tip == None: base_block_hash = self.genesis_hash block_time = int(time.time())+1 else: base_block_hash = self.tip.sha256 block_time = self.tip.nTime + 1 # First create the coinbase height = self.block_heights[base_block_hash] + 1 coinbase = create_coinbase(height, self.coinbase_pubkey) coinbase.vout[0].nValue += additional_coinbase_value coinbase.rehash() if spend == None: block = create_block(base_block_hash, coinbase, block_time) block.nVersion = 0x20000000 else: coinbase.vout[0].nValue += spend.tx.vout[spend.n].nValue - 1 # all but one satoshi to fees coinbase.rehash() block = create_block(base_block_hash, coinbase, block_time) block.nVersion = 0x20000000 tx = create_transaction(spend.tx, spend.n, b"", 1, script) # spend 1 satoshi self.sign_tx(tx, spend.tx, spend.n) self.add_transactions_to_block(block, [tx]) block.hashMerkleRoot = block.calc_merkle_root() if solve: block.solve() self.tip = block self.block_heights[block.sha256] = height assert number not in self.blocks self.blocks[number] = block return block def get_tests(self): self.genesis_hash = int(self.nodes[0].getbestblockhash(), 16) self.block_heights[self.genesis_hash] = 0 spendable_outputs = [] # save the current tip so it can be spent by a later block def save_spendable_output(): spendable_outputs.append(self.tip) # get an output that we previously marked as spendable def get_spendable_output(): return PreviousSpendableOutput(spendable_outputs.pop(0).vtx[0], 0) # returns a test case that asserts that the current tip was accepted def accepted(): return TestInstance([[self.tip, True]]) # returns a test case that asserts that the current tip was rejected def rejected(reject = None): if reject is None: return TestInstance([[self.tip, False]]) else: return TestInstance([[self.tip, reject]]) # move the tip back to a previous block def tip(number): self.tip = self.blocks[number] # adds transactions to the block and updates state def update_block(block_number, new_transactions): block = self.blocks[block_number] self.add_transactions_to_block(block, new_transactions) old_sha256 = block.sha256 block.hashMerkleRoot = block.calc_merkle_root() block.solve() # Update the internal state just like in next_block self.tip = block if block.sha256 != old_sha256: self.block_heights[block.sha256] = self.block_heights[old_sha256] del self.block_heights[old_sha256] self.blocks[block_number] = block return block # shorthand for functions block = self.next_block create_tx = self.create_tx create_and_sign_tx = self.create_and_sign_transaction # these must be updated if consensus changes MAX_BLOCK_SIGOPS = 20000 # Create a new block block(0) save_spendable_output() yield accepted() # Now we need that block to mature so we can spend the coinbase. test = TestInstance(sync_every_block=False) for i in range(99): block(5000 + i) test.blocks_and_transactions.append([self.tip, True]) save_spendable_output() yield test # collect spendable outputs now to avoid cluttering the code later on out = [] for i in range(33): out.append(get_spendable_output()) # Start by building a couple of blocks on top (which output is spent is # in parentheses): # genesis -> b1 (0) -> b2 (1) block(1, spend=out[0]) save_spendable_output() yield accepted() block(2, spend=out[1]) yield accepted() save_spendable_output() # so fork like this: # # genesis -> b1 (0) -> b2 (1) # \-> b3 (1) # # Nothing should happen at this point. We saw b2 first so it takes priority. tip(1) b3 = block(3, spend=out[1]) txout_b3 = PreviousSpendableOutput(b3.vtx[1], 0) yield rejected() # Now we add another block to make the alternative chain longer. # # genesis -> b1 (0) -> b2 (1) # \-> b3 (1) -> b4 (2) block(4, spend=out[2]) yield accepted() # ... and back to the first chain. # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) # \-> b3 (1) -> b4 (2) tip(2) block(5, spend=out[2]) save_spendable_output() yield rejected() block(6, spend=out[3]) yield accepted() # Try to create a fork that double-spends # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) # \-> b7 (2) -> b8 (4) # \-> b3 (1) -> b4 (2) tip(5) block(7, spend=out[2]) yield rejected() block(8, spend=out[4]) yield rejected() # Try to create a block that has too much fee # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) # \-> b9 (4) # \-> b3 (1) -> b4 (2) tip(6) block(9, spend=out[4], additional_coinbase_value=1) yield rejected(RejectResult(16, b'bad-cb-amount')) # Create a fork that ends in a block with too much fee (the one that causes the reorg) # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) # \-> b10 (3) -> b11 (4) # \-> b3 (1) -> b4 (2) tip(5) block(10, spend=out[3]) yield rejected() block(11, spend=out[4], additional_coinbase_value=1) yield rejected(RejectResult(16, b'bad-cb-amount')) # Try again, but with a valid fork first # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) # \-> b12 (3) -> b13 (4) -> b14 (5) # (b12 added last) # \-> b3 (1) -> b4 (2) tip(5) b12 = block(12, spend=out[3]) save_spendable_output() b13 = block(13, spend=out[4]) # Deliver the block header for b12, and the block b13. # b13 should be accepted but the tip won't advance until b12 is delivered. yield TestInstance([[CBlockHeader(b12), None], [b13, False]]) save_spendable_output() # Tip still can't advance because b12 is missing block(14, spend=out[5], additional_coinbase_value=1) yield rejected() yield TestInstance([[b12, True, b13.sha256]]) lots_of_checksigs = CScript([OP_CHECKSIG] * (MAX_BLOCK_SIGOPS - 1)) tip(13) block(15, spend=out[5], script=lots_of_checksigs) yield accepted() save_spendable_output() too_many_checksigs = CScript([OP_CHECKSIG] * (MAX_BLOCK_SIGOPS)) block(16, spend=out[6], script=too_many_checksigs) yield rejected(RejectResult(16, b'bad-blk-sigops')) tip(15) block(17, spend=txout_b3) yield rejected(RejectResult(16, b'bad-txns-inputs-missingorspent')) tip(13) block(18, spend=txout_b3) yield rejected() block(19, spend=out[6]) yield rejected() tip(15) block(20, spend=out[7]) yield rejected(RejectResult(16, b'bad-txns-premature-spend-of-coinbase')) tip(13) block(21, spend=out[6]) yield rejected() block(22, spend=out[5]) yield rejected() tip(15) b23 = block(23, spend=out[6]) tx = CTransaction() script_length = MAX_BLOCK_BASE_SIZE - len(b23.serialize()) - 69 script_output = CScript([b'\x00' * script_length]) tx.vout.append(CTxOut(0, script_output)) tx.vin.append(CTxIn(COutPoint(b23.vtx[1].sha256, 0))) b23 = update_block(23, [tx]) assert_equal(len(b23.serialize()), MAX_BLOCK_BASE_SIZE) yield accepted() save_spendable_output() tip(15) b24 = block(24, spend=out[6]) script_length = MAX_BLOCK_BASE_SIZE - len(b24.serialize()) - 69 script_output = CScript([b'\x00' * (script_length+1)]) tx.vout = [CTxOut(0, script_output)] b24 = update_block(24, [tx]) assert_equal(len(b24.serialize()), MAX_BLOCK_BASE_SIZE+1) yield rejected(RejectResult(16, b'bad-blk-length')) block(25, spend=out[7]) yield rejected() tip(15) b26 = block(26, spend=out[6]) b26.vtx[0].vin[0].scriptSig = b'\x00' b26.vtx[0].rehash() b26 = update_block(26, []) yield rejected(RejectResult(16, b'bad-cb-length')) block(27, spend=out[7]) yield rejected(False) # Now try a too-large-coinbase script tip(15) b28 = block(28, spend=out[6]) b28.vtx[0].vin[0].scriptSig = b'\x00' * 101 b28.vtx[0].rehash() b28 = update_block(28, []) yield rejected(RejectResult(16, b'bad-cb-length')) # Extend the b28 chain to make sure bitcoind isn't accepting b28 block(29, spend=out[7]) yield rejected(False) tip(23) b30 = block(30) b30.vtx[0].vin[0].scriptSig = b'\x00' * 100 b30.vtx[0].rehash() b30 = update_block(30, []) yield accepted() save_spendable_output() lots_of_multisigs = CScript([OP_CHECKMULTISIG] * ((MAX_BLOCK_SIGOPS-1) // 20) + [OP_CHECKSIG] * 19) b31 = block(31, spend=out[8], script=lots_of_multisigs) assert_equal(get_legacy_sigopcount_block(b31), MAX_BLOCK_SIGOPS) yield accepted() save_spendable_output() too_many_multisigs = CScript([OP_CHECKMULTISIG] * (MAX_BLOCK_SIGOPS // 20)) b32 = block(32, spend=out[9], script=too_many_multisigs) assert_equal(get_legacy_sigopcount_block(b32), MAX_BLOCK_SIGOPS + 1) yield rejected(RejectResult(16, b'bad-blk-sigops')) tip(31) lots_of_multisigs = CScript([OP_CHECKMULTISIGVERIFY] * ((MAX_BLOCK_SIGOPS-1) // 20) + [OP_CHECKSIG] * 19) block(33, spend=out[9], script=lots_of_multisigs) yield accepted() save_spendable_output() too_many_multisigs = CScript([OP_CHECKMULTISIGVERIFY] * (MAX_BLOCK_SIGOPS // 20)) block(34, spend=out[10], script=too_many_multisigs) yield rejected(RejectResult(16, b'bad-blk-sigops')) tip(33) lots_of_checksigs = CScript([OP_CHECKSIGVERIFY] * (MAX_BLOCK_SIGOPS - 1)) b35 = block(35, spend=out[10], script=lots_of_checksigs) yield accepted() save_spendable_output() too_many_checksigs = CScript([OP_CHECKSIGVERIFY] * (MAX_BLOCK_SIGOPS)) block(36, spend=out[11], script=too_many_checksigs) yield rejected(RejectResult(16, b'bad-blk-sigops')) tip(35) b37 = block(37, spend=out[11]) txout_b37 = PreviousSpendableOutput(b37.vtx[1], 0) tx = create_and_sign_tx(out[11].tx, out[11].n, 0) b37 = update_block(37, [tx]) yield rejected(RejectResult(16, b'bad-txns-inputs-missingorspent')) # attempt to spend b37's first non-coinbase tx, at which point b37 was still considered valid tip(35) block(38, spend=txout_b37) yield rejected(RejectResult(16, b'bad-txns-inputs-missingorspent')) tip(35) b39 = block(39) b39_outputs = 0 b39_sigops_per_output = 6 redeem_script = CScript([self.coinbase_pubkey] + [OP_2DUP, OP_CHECKSIGVERIFY]*5 + [OP_CHECKSIG]) redeem_script_hash = hash160(redeem_script) p2sh_script = CScript([OP_HASH160, redeem_script_hash, OP_EQUAL]) spend = out[11] tx = create_tx(spend.tx, spend.n, 1, p2sh_script) tx.vout.append(CTxOut(spend.tx.vout[spend.n].nValue - 1, CScript([OP_TRUE]))) self.sign_tx(tx, spend.tx, spend.n) tx.rehash() b39 = update_block(39, [tx]) b39_outputs += 1 tx_new = None tx_last = tx total_size=len(b39.serialize()) while(total_size < MAX_BLOCK_BASE_SIZE): tx_new = create_tx(tx_last, 1, 1, p2sh_script) tx_new.vout.append(CTxOut(tx_last.vout[1].nValue - 1, CScript([OP_TRUE]))) tx_new.rehash() total_size += len(tx_new.serialize()) if total_size >= MAX_BLOCK_BASE_SIZE: break b39.vtx.append(tx_new) # add tx to block tx_last = tx_new b39_outputs += 1 b39 = update_block(39, []) yield accepted() save_spendable_output() # Test sigops in P2SH redeem scripts # # b40 creates 3333 tx's spending the 6-sigop P2SH outputs from b39 for a total of 19998 sigops. tip(39) b40 = block(40, spend=out[12]) sigops = get_legacy_sigopcount_block(b40) numTxes = (MAX_BLOCK_SIGOPS - sigops) // b39_sigops_per_output assert_equal(numTxes <= b39_outputs, True) lastOutpoint = COutPoint(b40.vtx[1].sha256, 0) new_txs = [] for i in range(1, numTxes+1): tx = CTransaction() tx.vout.append(CTxOut(1, CScript([OP_TRUE]))) tx.vin.append(CTxIn(lastOutpoint, b'')) tx.vin.append(CTxIn(COutPoint(b39.vtx[i].sha256, 0), b'')) (sighash, err) = SignatureHash(redeem_script, tx, 1, SIGHASH_ALL) sig = self.coinbase_key.sign(sighash) + bytes(bytearray([SIGHASH_ALL])) scriptSig = CScript([sig, redeem_script]) tx.vin[1].scriptSig = scriptSig tx.rehash() new_txs.append(tx) lastOutpoint = COutPoint(tx.sha256, 0) b40_sigops_to_fill = MAX_BLOCK_SIGOPS - (numTxes * b39_sigops_per_output + sigops) + 1 tx = CTransaction() tx.vin.append(CTxIn(lastOutpoint, b'')) tx.vout.append(CTxOut(1, CScript([OP_CHECKSIG] * b40_sigops_to_fill))) tx.rehash() new_txs.append(tx) update_block(40, new_txs) yield rejected(RejectResult(16, b'bad-blk-sigops')) tip(39) block(41, spend=None) update_block(41, b40.vtx[1:-1]) b41_sigops_to_fill = b40_sigops_to_fill - 1 tx = CTransaction() tx.vin.append(CTxIn(lastOutpoint, b'')) tx.vout.append(CTxOut(1, CScript([OP_CHECKSIG] * b41_sigops_to_fill))) tx.rehash() update_block(41, [tx]) yield accepted() tip(39) block(42, spend=out[12]) yield rejected() save_spendable_output() block(43, spend=out[13]) yield accepted() save_spendable_output() # the first transaction be non-coinbase, etc. The purpose of b44 is to make sure this works. height = self.block_heights[self.tip.sha256] + 1 coinbase = create_coinbase(height, self.coinbase_pubkey) b44 = CBlock() b44.nVersion = 0x20000000 b44.nTime = self.tip.nTime + 1 b44.hashPrevBlock = self.tip.sha256 b44.nBits = 0x207fffff b44.vtx.append(coinbase) b44.hashMerkleRoot = b44.calc_merkle_root() b44.solve() self.tip = b44 self.block_heights[b44.sha256] = height self.blocks[44] = b44 yield accepted() # A block with a non-coinbase as the first tx non_coinbase = create_tx(out[15].tx, out[15].n, 1) b45 = CBlock() b45.nVersion = 0x20000000 b45.nTime = self.tip.nTime + 1 b45.hashPrevBlock = self.tip.sha256 b45.nBits = 0x207fffff b45.vtx.append(non_coinbase) b45.hashMerkleRoot = b45.calc_merkle_root() b45.calc_sha256() b45.solve() self.block_heights[b45.sha256] = self.block_heights[self.tip.sha256]+1 self.tip = b45 self.blocks[45] = b45 yield rejected(RejectResult(16, b'bad-cb-missing')) # A block with no txns tip(44) b46 = CBlock() b46.nVersion = 0x20000000 b46.nTime = b44.nTime+1 b46.hashPrevBlock = b44.sha256 b46.nBits = 0x207fffff b46.vtx = [] b46.hashMerkleRoot = 0 b46.solve() self.block_heights[b46.sha256] = self.block_heights[b44.sha256]+1 self.tip = b46 assert 46 not in self.blocks self.blocks[46] = b46 s = ser_uint256(b46.hashMerkleRoot) yield rejected(RejectResult(16, b'bad-blk-length')) # Apsiocoin: Temporarily disable test # A block with invalid work #tip(44) #b47 = block(47, solve=False) #target = uint256_from_compact(b47.nBits) #while b47.scrypt256 < target: #changed > to < # b47.nNonce += 1 # b47.rehash() #yield rejected(RejectResult(16, b'high-hash')) # A block with timestamp > 2 hrs in the future tip(44) b48 = block(48, solve=False) b48.nTime = int(time.time()) + 60 * 60 * 3 b48.solve() yield rejected(RejectResult(16, b'time-too-new')) # A block with an invalid merkle hash tip(44) b49 = block(49) b49.hashMerkleRoot += 1 b49.solve() yield rejected(RejectResult(16, b'bad-txnmrklroot')) # A block with an incorrect POW limit tip(44) b50 = block(50) b50.nBits = b50.nBits - 1 b50.solve() yield rejected(RejectResult(16, b'bad-diffbits')) # A block with two coinbase txns tip(44) b51 = block(51) cb2 = create_coinbase(51, self.coinbase_pubkey) b51 = update_block(51, [cb2]) yield rejected(RejectResult(16, b'bad-cb-multiple')) # A block w/ duplicate txns # Note: txns have to be in the right position in the merkle tree to trigger this error tip(44) b52 = block(52, spend=out[15]) tx = create_tx(b52.vtx[1], 0, 1) b52 = update_block(52, [tx, tx]) yield rejected(RejectResult(16, b'bad-txns-duplicate')) # Test block timestamps # -> b31 (8) -> b33 (9) -> b35 (10) -> b39 (11) -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15) # \-> b54 (15) # tip(43) block(53, spend=out[14]) yield rejected() # rejected since b44 is at same height save_spendable_output() # invalid timestamp (b35 is 5 blocks back, so its time is MedianTimePast) b54 = block(54, spend=out[15]) b54.nTime = b35.nTime - 1 b54.solve() yield rejected(RejectResult(16, b'time-too-old')) # valid timestamp tip(53) b55 = block(55, spend=out[15]) b55.nTime = b35.nTime update_block(55, []) yield accepted() save_spendable_output() # Test CVE-2012-2459 # # -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15) -> b57p2 (16) # \-> b57 (16) # \-> b56p2 (16) # \-> b56 (16) # # Merkle tree malleability (CVE-2012-2459): repeating sequences of transactions in a block without # affecting the merkle root of a block, while still invalidating it. # See: src/consensus/merkle.h # # b57 has three txns: coinbase, tx, tx1. The merkle root computation will duplicate tx. # Result: OK # # b56 copies b57 but duplicates tx1 and does not recalculate the block hash. So it has a valid merkle # root but duplicate transactions. # Result: Fails # # b57p2 has six transactions in its merkle tree: # - coinbase, tx, tx1, tx2, tx3, tx4 # Merkle root calculation will duplicate as necessary. # Result: OK. # # b56p2 copies b57p2 but adds both tx3 and tx4. The purpose of the test is to make sure the code catches # duplicate txns that are not next to one another with the "bad-txns-duplicate" error (which indicates # that the error was caught early, avoiding a DOS vulnerability.) # b57 - a good block with 2 txs, don't submit until end tip(55) b57 = block(57) tx = create_and_sign_tx(out[16].tx, out[16].n, 1) tx1 = create_tx(tx, 0, 1) b57 = update_block(57, [tx, tx1]) tip(55) b56 = copy.deepcopy(b57) self.blocks[56] = b56 assert_equal(len(b56.vtx),3) b56 = update_block(56, [tx1]) assert_equal(b56.hash, b57.hash) yield rejected(RejectResult(16, b'bad-txns-duplicate')) tip(55) b57p2 = block("57p2") tx = create_and_sign_tx(out[16].tx, out[16].n, 1) tx1 = create_tx(tx, 0, 1) tx2 = create_tx(tx1, 0, 1) tx3 = create_tx(tx2, 0, 1) tx4 = create_tx(tx3, 0, 1) b57p2 = update_block("57p2", [tx, tx1, tx2, tx3, tx4]) tip(55) b56p2 = copy.deepcopy(b57p2) self.blocks["b56p2"] = b56p2 assert_equal(b56p2.hash, b57p2.hash) assert_equal(len(b56p2.vtx),6) b56p2 = update_block("b56p2", [tx3, tx4]) yield rejected(RejectResult(16, b'bad-txns-duplicate')) tip("57p2") yield accepted() tip(57) yield rejected() #rejected because 57p2 seen first save_spendable_output() # Test a few invalid tx types # # -> b35 (10) -> b39 (11) -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) # \-> ??? (17) # # tx with prevout.n out of range tip(57) b58 = block(58, spend=out[17]) tx = CTransaction() assert(len(out[17].tx.vout) < 42) tx.vin.append(CTxIn(COutPoint(out[17].tx.sha256, 42), CScript([OP_TRUE]), 0xffffffff)) tx.vout.append(CTxOut(0, b"")) tx.calc_sha256() b58 = update_block(58, [tx]) yield rejected(RejectResult(16, b'bad-txns-inputs-missingorspent')) # tx with output value > input value out of range tip(57) b59 = block(59) tx = create_and_sign_tx(out[17].tx, out[17].n, 51*COIN) b59 = update_block(59, [tx]) yield rejected(RejectResult(16, b'bad-txns-in-belowout')) # reset to good chain tip(57) b60 = block(60, spend=out[17]) yield accepted() save_spendable_output() # Test BIP30 # # -> b39 (11) -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) # \-> b61 (18) # # Blocks are not allowed to contain a transaction whose id matches that of an earlier, # not-fully-spent transaction in the same chain. To test, make identical coinbases; # the second one should be rejected. # tip(60) b61 = block(61, spend=out[18]) b61.vtx[0].vin[0].scriptSig = b60.vtx[0].vin[0].scriptSig #equalize the coinbases b61.vtx[0].rehash() b61 = update_block(61, []) assert_equal(b60.vtx[0].serialize(), b61.vtx[0].serialize()) yield rejected(RejectResult(16, b'bad-txns-BIP30')) # Test tx.isFinal is properly rejected (not an exhaustive tx.isFinal test, that should be in data-driven transaction tests) # # -> b39 (11) -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) # \-> b62 (18) # tip(60) b62 = block(62) tx = CTransaction() tx.nLockTime = 0xffffffff #this locktime is non-final assert(out[18].n < len(out[18].tx.vout)) tx.vin.append(CTxIn(COutPoint(out[18].tx.sha256, out[18].n))) # don't set nSequence tx.vout.append(CTxOut(0, CScript([OP_TRUE]))) assert(tx.vin[0].nSequence < 0xffffffff) tx.calc_sha256() b62 = update_block(62, [tx]) yield rejected(RejectResult(16, b'bad-txns-nonfinal')) tip(60) b63 = block(63) b63.vtx[0].nLockTime = 0xffffffff b63.vtx[0].vin[0].nSequence = 0xDEADBEEF b63.vtx[0].rehash() b63 = update_block(63, []) yield rejected(RejectResult(16, b'bad-txns-nonfinal')) # # -> b39 (11) -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) -> b64 (18) # \ # b64a (18) # b64a is a bloated block (non-canonical varint) # b64 is a good block (same as b64 but w/ canonical varint) # tip(60) regular_block = block("64a", spend=out[18]) # make it a "broken_block," with non-canonical serialization b64a = CBrokenBlock(regular_block) b64a.initialize(regular_block) self.blocks["64a"] = b64a self.tip = b64a tx = CTransaction() # use canonical serialization to calculate size script_length = MAX_BLOCK_BASE_SIZE - len(b64a.normal_serialize()) - 69 script_output = CScript([b'\x00' * script_length]) tx.vout.append(CTxOut(0, script_output)) tx.vin.append(CTxIn(COutPoint(b64a.vtx[1].sha256, 0))) b64a = update_block("64a", [tx]) assert_equal(len(b64a.serialize()), MAX_BLOCK_BASE_SIZE + 8) yield TestInstance([[self.tip, None]]) # comptool workaround: to make sure b64 is delivered, manually erase b64a from blockstore self.test.block_store.erase(b64a.sha256) tip(60) b64 = CBlock(b64a) b64.vtx = copy.deepcopy(b64a.vtx) assert_equal(b64.hash, b64a.hash) assert_equal(len(b64.serialize()), MAX_BLOCK_BASE_SIZE) self.blocks[64] = b64 update_block(64, []) yield accepted() save_spendable_output() # Spend an output created in the block itself # # -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) -> b64 (18) -> b65 (19) # tip(64) block(65) tx1 = create_and_sign_tx(out[19].tx, out[19].n, out[19].tx.vout[0].nValue) tx2 = create_and_sign_tx(tx1, 0, 0) update_block(65, [tx1, tx2]) yield accepted() save_spendable_output() # Attempt to spend an output created later in the same block # # -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) -> b64 (18) -> b65 (19) # \-> b66 (20) tip(65) block(66) tx1 = create_and_sign_tx(out[20].tx, out[20].n, out[20].tx.vout[0].nValue) tx2 = create_and_sign_tx(tx1, 0, 1) update_block(66, [tx2, tx1]) yield rejected(RejectResult(16, b'bad-txns-inputs-missingorspent')) # Attempt to double-spend a transaction created in a block # # -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) -> b64 (18) -> b65 (19) # \-> b67 (20) # # tip(65) block(67) tx1 = create_and_sign_tx(out[20].tx, out[20].n, out[20].tx.vout[0].nValue) tx2 = create_and_sign_tx(tx1, 0, 1) tx3 = create_and_sign_tx(tx1, 0, 2) update_block(67, [tx1, tx2, tx3]) yield rejected(RejectResult(16, b'bad-txns-inputs-missingorspent')) # More tests of block subsidy # # -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) -> b64 (18) -> b65 (19) -> b69 (20) # \-> b68 (20) # # b68 - coinbase with an extra 10 satoshis, # creates a tx that has 9 satoshis from out[20] go to fees # this fails because the coinbase is trying to claim 1 satoshi too much in fees # # b69 - coinbase with extra 10 satoshis, and a tx that gives a 10 satoshi fee # this succeeds # tip(65) block(68, additional_coinbase_value=10) tx = create_and_sign_tx(out[20].tx, out[20].n, out[20].tx.vout[0].nValue-9) update_block(68, [tx]) yield rejected(RejectResult(16, b'bad-cb-amount')) tip(65) b69 = block(69, additional_coinbase_value=10) tx = create_and_sign_tx(out[20].tx, out[20].n, out[20].tx.vout[0].nValue-10) update_block(69, [tx]) yield accepted() save_spendable_output() # Test spending the outpoint of a non-existent transaction # # -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) -> b64 (18) -> b65 (19) -> b69 (20) # \-> b70 (21) # tip(69) block(70, spend=out[21]) bogus_tx = CTransaction() bogus_tx.sha256 = uint256_from_str(b"23c70ed7c0506e9178fc1a987f40a33946d4ad4c962b5ae3a52546da53af0c5c") tx = CTransaction() tx.vin.append(CTxIn(COutPoint(bogus_tx.sha256, 0), b"", 0xffffffff)) tx.vout.append(CTxOut(1, b"")) update_block(70, [tx]) yield rejected(RejectResult(16, b'bad-txns-inputs-missingorspent')) # Test accepting an invalid block which has the same hash as a valid one (via merkle tree tricks) # # -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) -> b64 (18) -> b65 (19) -> b69 (20) -> b72 (21) # \-> b71 (21) # # b72 is a good block. # b71 is a copy of 72, but re-adds one of its transactions. However, it has the same hash as b71. # tip(69) b72 = block(72) tx1 = create_and_sign_tx(out[21].tx, out[21].n, 2) tx2 = create_and_sign_tx(tx1, 0, 1) b72 = update_block(72, [tx1, tx2]) # now tip is 72 b71 = copy.deepcopy(b72) b71.vtx.append(tx2) # add duplicate tx2 self.block_heights[b71.sha256] = self.block_heights[b69.sha256] + 1 # b71 builds off b69 self.blocks[71] = b71 assert_equal(len(b71.vtx), 4) assert_equal(len(b72.vtx), 3) assert_equal(b72.sha256, b71.sha256) tip(71) yield rejected(RejectResult(16, b'bad-txns-duplicate')) tip(72) yield accepted() save_spendable_output() # Test some invalid scripts and MAX_BLOCK_SIGOPS # # -> b55 (15) -> b57 (16) -> b60 (17) -> b64 (18) -> b65 (19) -> b69 (20) -> b72 (21) # \-> b** (22) # # b73 - tx with excessive sigops that are placed after an excessively large script element. # The purpose of the test is to make sure those sigops are counted. # # script is a bytearray of size 20,526 # # bytearray[0-19,998] : OP_CHECKSIG # bytearray[19,999] : OP_PUSHDATA4 # bytearray[20,000-20,003]: 521 (max_script_element_size+1, in little-endian format) # bytearray[20,004-20,525]: unread data (script_element) # bytearray[20,526] : OP_CHECKSIG (this puts us over the limit) # tip(72) b73 = block(73) size = MAX_BLOCK_SIGOPS - 1 + MAX_SCRIPT_ELEMENT_SIZE + 1 + 5 + 1 a = bytearray([OP_CHECKSIG] * size) a[MAX_BLOCK_SIGOPS - 1] = int("4e",16) # OP_PUSHDATA4 element_size = MAX_SCRIPT_ELEMENT_SIZE + 1 a[MAX_BLOCK_SIGOPS] = element_size % 256 a[MAX_BLOCK_SIGOPS+1] = element_size // 256 a[MAX_BLOCK_SIGOPS+2] = 0 a[MAX_BLOCK_SIGOPS+3] = 0 tx = create_and_sign_tx(out[22].tx, 0, 1, CScript(a)) b73 = update_block(73, [tx]) assert_equal(get_legacy_sigopcount_block(b73), MAX_BLOCK_SIGOPS+1) yield rejected(RejectResult(16, b'bad-blk-sigops')) # b74/75 - if we push an invalid script element, all prevous sigops are counted, # but sigops after the element are not counted. # # The invalid script element is that the push_data indicates that # there will be a large amount of data (0xffffff bytes), but we only # provide a much smaller number. These bytes are CHECKSIGS so they would # cause b75 to fail for excessive sigops, if those bytes were counted. # # b74 fails because we put MAX_BLOCK_SIGOPS+1 before the element # b75 succeeds because we put MAX_BLOCK_SIGOPS before the element # # tip(72) b74 = block(74) size = MAX_BLOCK_SIGOPS - 1 + MAX_SCRIPT_ELEMENT_SIZE + 42 # total = 20,561 a = bytearray([OP_CHECKSIG] * size) a[MAX_BLOCK_SIGOPS] = 0x4e a[MAX_BLOCK_SIGOPS+1] = 0xfe a[MAX_BLOCK_SIGOPS+2] = 0xff a[MAX_BLOCK_SIGOPS+3] = 0xff a[MAX_BLOCK_SIGOPS+4] = 0xff tx = create_and_sign_tx(out[22].tx, 0, 1, CScript(a)) b74 = update_block(74, [tx]) yield rejected(RejectResult(16, b'bad-blk-sigops')) tip(72) b75 = block(75) size = MAX_BLOCK_SIGOPS - 1 + MAX_SCRIPT_ELEMENT_SIZE + 42 a = bytearray([OP_CHECKSIG] * size) a[MAX_BLOCK_SIGOPS-1] = 0x4e a[MAX_BLOCK_SIGOPS] = 0xff a[MAX_BLOCK_SIGOPS+1] = 0xff a[MAX_BLOCK_SIGOPS+2] = 0xff a[MAX_BLOCK_SIGOPS+3] = 0xff tx = create_and_sign_tx(out[22].tx, 0, 1, CScript(a)) b75 = update_block(75, [tx]) yield accepted() save_spendable_output() # Check that if we push an element filled with CHECKSIGs, they are not counted tip(75) b76 = block(76) size = MAX_BLOCK_SIGOPS - 1 + MAX_SCRIPT_ELEMENT_SIZE + 1 + 5 a = bytearray([OP_CHECKSIG] * size) a[MAX_BLOCK_SIGOPS-1] = 0x4e # PUSHDATA4, but leave the following bytes as just checksigs tx = create_and_sign_tx(out[23].tx, 0, 1, CScript(a)) b76 = update_block(76, [tx]) yield accepted() save_spendable_output() # Test transaction resurrection # # -> b77 (24) -> b78 (25) -> b79 (26) # \-> b80 (25) -> b81 (26) -> b82 (27) # # b78 creates a tx, which is spent in b79. After b82, both should be in mempool # # The tx'es must be unsigned and pass the node's mempool policy. It is unsigned for the # rather obscure reason that the Python signature code does not distinguish between # Low-S and High-S values (whereas the bitcoin code has custom code which does so); # as a result of which, the odds are 50% that the python code will use the right # value and the transaction will be accepted into the mempool. Until we modify the # test framework to support low-S signing, we are out of luck. # # To get around this issue, we construct transactions which are not signed and which # spend to OP_TRUE. If the standard-ness rules change, this test would need to be # updated. (Perhaps to spend to a P2SH OP_TRUE script) # tip(76) block(77) tx77 = create_and_sign_tx(out[24].tx, out[24].n, 10*COIN) update_block(77, [tx77]) yield accepted() save_spendable_output() block(78) tx78 = create_tx(tx77, 0, 9*COIN) update_block(78, [tx78]) yield accepted() block(79) tx79 = create_tx(tx78, 0, 8*COIN) update_block(79, [tx79]) yield accepted() # mempool should be empty assert_equal(len(self.nodes[0].getrawmempool()), 0) tip(77) block(80, spend=out[25]) yield rejected() save_spendable_output() block(81, spend=out[26]) yield rejected() # other chain is same length save_spendable_output() block(82, spend=out[27]) yield accepted() # now this chain is longer, triggers re-org save_spendable_output() # now check that tx78 and tx79 have been put back into the peer's mempool mempool = self.nodes[0].getrawmempool() assert_equal(len(mempool), 2) assert(tx78.hash in mempool) assert(tx79.hash in mempool) block(83) op_codes = [OP_IF, OP_INVALIDOPCODE, OP_ELSE, OP_TRUE, OP_ENDIF] script = CScript(op_codes) tx1 = create_and_sign_tx(out[28].tx, out[28].n, out[28].tx.vout[0].nValue, script) tx2 = create_and_sign_tx(tx1, 0, 0, CScript([OP_TRUE])) tx2.vin[0].scriptSig = CScript([OP_FALSE]) tx2.rehash() update_block(83, [tx1, tx2]) yield accepted() save_spendable_output() block(84) tx1 = create_tx(out[29].tx, out[29].n, 0, CScript([OP_RETURN])) tx1.vout.append(CTxOut(0, CScript([OP_TRUE]))) tx1.vout.append(CTxOut(0, CScript([OP_TRUE]))) tx1.vout.append(CTxOut(0, CScript([OP_TRUE]))) tx1.vout.append(CTxOut(0, CScript([OP_TRUE]))) tx1.calc_sha256() self.sign_tx(tx1, out[29].tx, out[29].n) tx1.rehash() tx2 = create_tx(tx1, 1, 0, CScript([OP_RETURN])) tx2.vout.append(CTxOut(0, CScript([OP_RETURN]))) tx3 = create_tx(tx1, 2, 0, CScript([OP_RETURN])) tx3.vout.append(CTxOut(0, CScript([OP_TRUE]))) tx4 = create_tx(tx1, 3, 0, CScript([OP_TRUE])) tx4.vout.append(CTxOut(0, CScript([OP_RETURN]))) tx5 = create_tx(tx1, 4, 0, CScript([OP_RETURN])) update_block(84, [tx1,tx2,tx3,tx4,tx5]) yield accepted() save_spendable_output() tip(83) block(85, spend=out[29]) yield rejected() block(86, spend=out[30]) yield accepted() tip(84) block(87, spend=out[30]) yield rejected() save_spendable_output() block(88, spend=out[31]) yield accepted() save_spendable_output() block("89a", spend=out[32]) tx = create_tx(tx1, 0, 0, CScript([OP_TRUE])) update_block("89a", [tx]) yield rejected() # This test takes a minute or two and can be accomplished in memory # if self.options.runbarelyexpensive: tip(88) LARGE_REORG_SIZE = 1088 test1 = TestInstance(sync_every_block=False) spend=out[32] for i in range(89, LARGE_REORG_SIZE + 89): b = block(i, spend) tx = CTransaction() script_length = MAX_BLOCK_BASE_SIZE - len(b.serialize()) - 69 script_output = CScript([b'\x00' * script_length]) tx.vout.append(CTxOut(0, script_output)) tx.vin.append(CTxIn(COutPoint(b.vtx[1].sha256, 0))) b = update_block(i, [tx]) assert_equal(len(b.serialize()), MAX_BLOCK_BASE_SIZE) test1.blocks_and_transactions.append([self.tip, True]) save_spendable_output() spend = get_spendable_output() yield test1 chain1_tip = i # now create alt chain of same length tip(88) test2 = TestInstance(sync_every_block=False) for i in range(89, LARGE_REORG_SIZE + 89): block("alt"+str(i)) test2.blocks_and_transactions.append([self.tip, False]) yield test2 # extend alt chain to trigger re-org block("alt" + str(chain1_tip + 1)) yield accepted() # ... and re-org back to the first chain tip(chain1_tip) block(chain1_tip + 1) yield rejected() block(chain1_tip + 2) yield accepted() chain1_tip += 2 if __name__ == '__main__': FullBlockTest().main()
true
true
f73bf710cd9d1456b44da0f6400136e906dc7444
11,494
py
Python
lib/galaxy/files/__init__.py
rhpvorderman/galaxy
178015f8eff0b0c7a59c0d6756658f6428222837
[ "CC-BY-3.0" ]
47
2015-10-21T23:30:30.000Z
2022-03-09T06:51:32.000Z
lib/galaxy/files/__init__.py
rhpvorderman/galaxy
178015f8eff0b0c7a59c0d6756658f6428222837
[ "CC-BY-3.0" ]
20
2015-09-30T18:56:40.000Z
2019-04-12T19:32:59.000Z
lib/galaxy/files/__init__.py
rhpvorderman/galaxy
178015f8eff0b0c7a59c0d6756658f6428222837
[ "CC-BY-3.0" ]
35
2015-10-30T13:09:40.000Z
2021-05-03T23:17:46.000Z
import logging import os from collections import ( defaultdict, namedtuple, ) from galaxy import exceptions from galaxy.util import ( plugin_config ) log = logging.getLogger(__name__) FileSourcePath = namedtuple('FileSourcePath', ['file_source', 'path']) class ConfiguredFileSources: """Load plugins and resolve Galaxy URIs to FileSource objects.""" def __init__(self, file_sources_config, conf_file=None, conf_dict=None, load_stock_plugins=False): self._file_sources_config = file_sources_config self._plugin_classes = self._file_source_plugins_dict() file_sources = [] if conf_file is not None: file_sources = self._load_plugins_from_file(conf_file) elif conf_dict is not None: plugin_source = plugin_config.plugin_source_from_dict(conf_dict) file_sources = self._parse_plugin_source(plugin_source) else: file_sources = [] custom_sources_configured = len(file_sources) > 0 if load_stock_plugins: stock_file_source_conf_dict = [] def _ensure_loaded(plugin_type): for file_source in file_sources: if file_source.plugin_type == plugin_type: return stock_file_source_conf_dict.append({'type': plugin_type}) if file_sources_config.ftp_upload_dir is not None: _ensure_loaded('gxftp') if file_sources_config.library_import_dir is not None: _ensure_loaded('gximport') if file_sources_config.user_library_import_dir is not None: _ensure_loaded('gxuserimport') if stock_file_source_conf_dict: stock_plugin_source = plugin_config.plugin_source_from_dict(stock_file_source_conf_dict) # insert at begining instead of append so FTP and library import appear # at the top of the list (presumably the most common options). Admins can insert # these explicitly for greater control. file_sources = self._parse_plugin_source(stock_plugin_source) + file_sources self._file_sources = file_sources self.custom_sources_configured = custom_sources_configured def _load_plugins_from_file(self, conf_file): plugin_source = plugin_config.plugin_source_from_path(conf_file) return self._parse_plugin_source(plugin_source) def _file_source_plugins_dict(self): import galaxy.files.sources return plugin_config.plugins_dict(galaxy.files.sources, 'plugin_type') def _parse_plugin_source(self, plugin_source): extra_kwds = { 'file_sources_config': self._file_sources_config, } return plugin_config.load_plugins( self._plugin_classes, plugin_source, extra_kwds, dict_to_list_key="id", ) def get_file_source_path(self, uri): """Parse uri into a FileSource object and a path relative to its base.""" if "://" not in uri: raise exceptions.RequestParameterInvalidException(f"Invalid uri [{uri}]") scheme, rest = uri.split("://", 1) if scheme not in self.get_schemes(): raise exceptions.RequestParameterInvalidException(f"Unsupported URI scheme [{scheme}]") if scheme != "gxfiles": # prefix unused id_prefix = None path = rest else: if "/" in rest: id_prefix, path = rest.split("/", 1) else: id_prefix, path = rest, "/" file_source = self.get_file_source(id_prefix, scheme) return FileSourcePath(file_source, path) def validate_uri_root(self, uri, user_context): # validate a URI against Galaxy's configuration, environment, and the current # user. Throw appropriate exception if there is a problem with the files source # referenced by the URI. if uri.startswith("gxuserimport://"): user_login = user_context.email user_base_dir = self._file_sources_config.user_library_import_dir if user_base_dir is None: raise exceptions.ConfigDoesNotAllowException('The configuration of this Galaxy instance does not allow upload from user directories.') full_import_dir = os.path.join(user_base_dir, user_login) if not os.path.exists(full_import_dir): raise exceptions.ObjectNotFound('Your user import directory does not exist.') elif uri.startswith("gximport://"): base_dir = self._file_sources_config.library_import_dir if base_dir is None: raise exceptions.ConfigDoesNotAllowException('The configuration of this Galaxy instance does not allow usage of import directory.') elif uri.startswith("gxftp://"): user_ftp_base_dir = self._file_sources_config.ftp_upload_dir if user_ftp_base_dir is None: raise exceptions.ConfigDoesNotAllowException('The configuration of this Galaxy instance does not allow upload from FTP directories.') user_ftp_dir = user_context.ftp_dir if not user_ftp_dir or not os.path.exists(user_ftp_dir): raise exceptions.ObjectNotFound('Your FTP directory does not exist, attempting to upload files to it may cause it to be created.') def get_file_source(self, id_prefix, scheme): for file_source in self._file_sources: # gxfiles uses prefix to find plugin, other scheme are assumed to have # at most one file_source. if scheme != file_source.get_scheme(): continue prefix_match = scheme != "gxfiles" or file_source.get_prefix() == id_prefix if prefix_match: return file_source def looks_like_uri(self, path_or_uri): # is this string a URI this object understands how to realize if path_or_uri.startswith("gx") and "://" in path_or_uri: for scheme in self.get_schemes(): if path_or_uri.startswith(f"{scheme}://"): return True return False def get_schemes(self): schemes = set() for file_source in self._file_sources: schemes.add(file_source.get_scheme()) return schemes def plugins_to_dict(self, for_serialization=False, user_context=None): rval = [] for file_source in self._file_sources: el = file_source.to_dict(for_serialization=for_serialization, user_context=user_context) rval.append(el) return rval def to_dict(self, for_serialization=False, user_context=None): return { 'file_sources': self.plugins_to_dict(for_serialization=for_serialization, user_context=user_context), 'config': self._file_sources_config.to_dict() } @staticmethod def from_app_config(config): config_file = config.file_sources_config_file if not os.path.exists(config_file): config_file = None file_sources_config = ConfiguredFileSourcesConfig.from_app_config(config) return ConfiguredFileSources(file_sources_config, config_file, load_stock_plugins=True) @staticmethod def from_dict(as_dict): if as_dict is not None: sources_as_dict = as_dict["file_sources"] config_as_dict = as_dict["config"] file_sources_config = ConfiguredFileSourcesConfig.from_dict(config_as_dict) else: sources_as_dict = [] file_sources_config = ConfiguredFileSourcesConfig() return ConfiguredFileSources(file_sources_config, conf_dict=sources_as_dict) class ConfiguredFileSourcesConfig: def __init__(self, symlink_allowlist=None, library_import_dir=None, user_library_import_dir=None, ftp_upload_dir=None, ftp_upload_purge=True): symlink_allowlist = symlink_allowlist or [] self.symlink_allowlist = symlink_allowlist self.library_import_dir = library_import_dir self.user_library_import_dir = user_library_import_dir self.ftp_upload_dir = ftp_upload_dir self.ftp_upload_purge = ftp_upload_purge @staticmethod def from_app_config(config): # Formalize what we read in from config to create a more clear interface # for this component. kwds = {} kwds["symlink_allowlist"] = getattr(config, "user_library_import_symlink_allowlist", []) kwds["library_import_dir"] = getattr(config, "library_import_dir", None) kwds["user_library_import_dir"] = getattr(config, "user_library_import_dir", None) kwds["ftp_upload_dir"] = getattr(config, "ftp_upload_dir", None) kwds["ftp_upload_purge"] = getattr(config, "ftp_upload_purge", True) return ConfiguredFileSourcesConfig(**kwds) def to_dict(self): return { 'symlink_allowlist': self.symlink_allowlist, 'library_import_dir': self.library_import_dir, 'user_library_import_dir': self.user_library_import_dir, 'ftp_upload_dir': self.ftp_upload_dir, 'ftp_upload_purge': self.ftp_upload_purge, } @staticmethod def from_dict(as_dict): return ConfiguredFileSourcesConfig( symlink_allowlist=as_dict['symlink_allowlist'], library_import_dir=as_dict['library_import_dir'], user_library_import_dir=as_dict['user_library_import_dir'], ftp_upload_dir=as_dict['ftp_upload_dir'], ftp_upload_purge=as_dict['ftp_upload_purge'], ) class ProvidesUserFileSourcesUserContext: """Implement a FileSourcesUserContext from a Galaxy ProvidesUserContext (e.g. trans).""" def __init__(self, trans): self.trans = trans @property def email(self): user = self.trans.user return user and user.email @property def username(self): user = self.trans.user return user and user.username @property def ftp_dir(self): return self.trans.user_ftp_dir @property def preferences(self): user = self.trans.user return user and user.extra_preferences or defaultdict(lambda: None) @property def role_names(self): """The set of role names of this user.""" user = self.trans.user return user and set([ura.role.name for ura in user.roles]) @property def group_names(self): """The set of group names to which this user belongs.""" user = self.trans.user return user and set([ugr.group.name for ugr in user.groups]) @property def is_admin(self): """Whether this user is an administrator.""" return self.trans.user_is_admin class DictFileSourcesUserContext: def __init__(self, **kwd): self._kwd = kwd @property def email(self): return self._kwd.get("email") @property def username(self): return self._kwd.get("username") @property def ftp_dir(self): return self._kwd.get("user_ftp_dir") @property def preferences(self): return self._kwd.get("preferences") @property def role_names(self): return self._kwd.get("role_names") @property def group_names(self): return self._kwd.get("group_names") @property def is_admin(self): return self._kwd.get("is_admin")
38.831081
150
0.66339
import logging import os from collections import ( defaultdict, namedtuple, ) from galaxy import exceptions from galaxy.util import ( plugin_config ) log = logging.getLogger(__name__) FileSourcePath = namedtuple('FileSourcePath', ['file_source', 'path']) class ConfiguredFileSources: def __init__(self, file_sources_config, conf_file=None, conf_dict=None, load_stock_plugins=False): self._file_sources_config = file_sources_config self._plugin_classes = self._file_source_plugins_dict() file_sources = [] if conf_file is not None: file_sources = self._load_plugins_from_file(conf_file) elif conf_dict is not None: plugin_source = plugin_config.plugin_source_from_dict(conf_dict) file_sources = self._parse_plugin_source(plugin_source) else: file_sources = [] custom_sources_configured = len(file_sources) > 0 if load_stock_plugins: stock_file_source_conf_dict = [] def _ensure_loaded(plugin_type): for file_source in file_sources: if file_source.plugin_type == plugin_type: return stock_file_source_conf_dict.append({'type': plugin_type}) if file_sources_config.ftp_upload_dir is not None: _ensure_loaded('gxftp') if file_sources_config.library_import_dir is not None: _ensure_loaded('gximport') if file_sources_config.user_library_import_dir is not None: _ensure_loaded('gxuserimport') if stock_file_source_conf_dict: stock_plugin_source = plugin_config.plugin_source_from_dict(stock_file_source_conf_dict) file_sources = self._parse_plugin_source(stock_plugin_source) + file_sources self._file_sources = file_sources self.custom_sources_configured = custom_sources_configured def _load_plugins_from_file(self, conf_file): plugin_source = plugin_config.plugin_source_from_path(conf_file) return self._parse_plugin_source(plugin_source) def _file_source_plugins_dict(self): import galaxy.files.sources return plugin_config.plugins_dict(galaxy.files.sources, 'plugin_type') def _parse_plugin_source(self, plugin_source): extra_kwds = { 'file_sources_config': self._file_sources_config, } return plugin_config.load_plugins( self._plugin_classes, plugin_source, extra_kwds, dict_to_list_key="id", ) def get_file_source_path(self, uri): if "://" not in uri: raise exceptions.RequestParameterInvalidException(f"Invalid uri [{uri}]") scheme, rest = uri.split("://", 1) if scheme not in self.get_schemes(): raise exceptions.RequestParameterInvalidException(f"Unsupported URI scheme [{scheme}]") if scheme != "gxfiles": id_prefix = None path = rest else: if "/" in rest: id_prefix, path = rest.split("/", 1) else: id_prefix, path = rest, "/" file_source = self.get_file_source(id_prefix, scheme) return FileSourcePath(file_source, path) def validate_uri_root(self, uri, user_context): # user. Throw appropriate exception if there is a problem with the files source # referenced by the URI. if uri.startswith("gxuserimport://"): user_login = user_context.email user_base_dir = self._file_sources_config.user_library_import_dir if user_base_dir is None: raise exceptions.ConfigDoesNotAllowException('The configuration of this Galaxy instance does not allow upload from user directories.') full_import_dir = os.path.join(user_base_dir, user_login) if not os.path.exists(full_import_dir): raise exceptions.ObjectNotFound('Your user import directory does not exist.') elif uri.startswith("gximport://"): base_dir = self._file_sources_config.library_import_dir if base_dir is None: raise exceptions.ConfigDoesNotAllowException('The configuration of this Galaxy instance does not allow usage of import directory.') elif uri.startswith("gxftp://"): user_ftp_base_dir = self._file_sources_config.ftp_upload_dir if user_ftp_base_dir is None: raise exceptions.ConfigDoesNotAllowException('The configuration of this Galaxy instance does not allow upload from FTP directories.') user_ftp_dir = user_context.ftp_dir if not user_ftp_dir or not os.path.exists(user_ftp_dir): raise exceptions.ObjectNotFound('Your FTP directory does not exist, attempting to upload files to it may cause it to be created.') def get_file_source(self, id_prefix, scheme): for file_source in self._file_sources: # gxfiles uses prefix to find plugin, other scheme are assumed to have # at most one file_source. if scheme != file_source.get_scheme(): continue prefix_match = scheme != "gxfiles" or file_source.get_prefix() == id_prefix if prefix_match: return file_source def looks_like_uri(self, path_or_uri): # is this string a URI this object understands how to realize if path_or_uri.startswith("gx") and "://" in path_or_uri: for scheme in self.get_schemes(): if path_or_uri.startswith(f"{scheme}://"): return True return False def get_schemes(self): schemes = set() for file_source in self._file_sources: schemes.add(file_source.get_scheme()) return schemes def plugins_to_dict(self, for_serialization=False, user_context=None): rval = [] for file_source in self._file_sources: el = file_source.to_dict(for_serialization=for_serialization, user_context=user_context) rval.append(el) return rval def to_dict(self, for_serialization=False, user_context=None): return { 'file_sources': self.plugins_to_dict(for_serialization=for_serialization, user_context=user_context), 'config': self._file_sources_config.to_dict() } @staticmethod def from_app_config(config): config_file = config.file_sources_config_file if not os.path.exists(config_file): config_file = None file_sources_config = ConfiguredFileSourcesConfig.from_app_config(config) return ConfiguredFileSources(file_sources_config, config_file, load_stock_plugins=True) @staticmethod def from_dict(as_dict): if as_dict is not None: sources_as_dict = as_dict["file_sources"] config_as_dict = as_dict["config"] file_sources_config = ConfiguredFileSourcesConfig.from_dict(config_as_dict) else: sources_as_dict = [] file_sources_config = ConfiguredFileSourcesConfig() return ConfiguredFileSources(file_sources_config, conf_dict=sources_as_dict) class ConfiguredFileSourcesConfig: def __init__(self, symlink_allowlist=None, library_import_dir=None, user_library_import_dir=None, ftp_upload_dir=None, ftp_upload_purge=True): symlink_allowlist = symlink_allowlist or [] self.symlink_allowlist = symlink_allowlist self.library_import_dir = library_import_dir self.user_library_import_dir = user_library_import_dir self.ftp_upload_dir = ftp_upload_dir self.ftp_upload_purge = ftp_upload_purge @staticmethod def from_app_config(config): # Formalize what we read in from config to create a more clear interface # for this component. kwds = {} kwds["symlink_allowlist"] = getattr(config, "user_library_import_symlink_allowlist", []) kwds["library_import_dir"] = getattr(config, "library_import_dir", None) kwds["user_library_import_dir"] = getattr(config, "user_library_import_dir", None) kwds["ftp_upload_dir"] = getattr(config, "ftp_upload_dir", None) kwds["ftp_upload_purge"] = getattr(config, "ftp_upload_purge", True) return ConfiguredFileSourcesConfig(**kwds) def to_dict(self): return { 'symlink_allowlist': self.symlink_allowlist, 'library_import_dir': self.library_import_dir, 'user_library_import_dir': self.user_library_import_dir, 'ftp_upload_dir': self.ftp_upload_dir, 'ftp_upload_purge': self.ftp_upload_purge, } @staticmethod def from_dict(as_dict): return ConfiguredFileSourcesConfig( symlink_allowlist=as_dict['symlink_allowlist'], library_import_dir=as_dict['library_import_dir'], user_library_import_dir=as_dict['user_library_import_dir'], ftp_upload_dir=as_dict['ftp_upload_dir'], ftp_upload_purge=as_dict['ftp_upload_purge'], ) class ProvidesUserFileSourcesUserContext: def __init__(self, trans): self.trans = trans @property def email(self): user = self.trans.user return user and user.email @property def username(self): user = self.trans.user return user and user.username @property def ftp_dir(self): return self.trans.user_ftp_dir @property def preferences(self): user = self.trans.user return user and user.extra_preferences or defaultdict(lambda: None) @property def role_names(self): user = self.trans.user return user and set([ura.role.name for ura in user.roles]) @property def group_names(self): user = self.trans.user return user and set([ugr.group.name for ugr in user.groups]) @property def is_admin(self): return self.trans.user_is_admin class DictFileSourcesUserContext: def __init__(self, **kwd): self._kwd = kwd @property def email(self): return self._kwd.get("email") @property def username(self): return self._kwd.get("username") @property def ftp_dir(self): return self._kwd.get("user_ftp_dir") @property def preferences(self): return self._kwd.get("preferences") @property def role_names(self): return self._kwd.get("role_names") @property def group_names(self): return self._kwd.get("group_names") @property def is_admin(self): return self._kwd.get("is_admin")
true
true
f73bf8dbbefdfad519cdebbeddc7dbf2a4f595ae
20,759
py
Python
src/sage/rings/polynomial/pbori/gbcore.py
hsm207/sage
020bd59ec28717bfab9af44d2231c53da1ff99f1
[ "BSL-1.0" ]
1
2021-10-18T01:24:04.000Z
2021-10-18T01:24:04.000Z
src/sage/rings/polynomial/pbori/gbcore.py
hsm207/sage
020bd59ec28717bfab9af44d2231c53da1ff99f1
[ "BSL-1.0" ]
null
null
null
src/sage/rings/polynomial/pbori/gbcore.py
hsm207/sage
020bd59ec28717bfab9af44d2231c53da1ff99f1
[ "BSL-1.0" ]
null
null
null
from .nf import GeneratorLimitExceeded, symmGB_F2_C, symmGB_F2_python from .PyPolyBoRi import (Monomial, Polynomial, GroebnerStrategy, OrderCode, ll_red_nf_redsb) from .ll import eliminate, ll_encode from copy import copy from itertools import chain from .statistics import used_vars_set from .heuristics import dense_system, gauss_on_linear from .easy_polynomials import easy_linear_polynomials from .interpolate import lex_groebner_basis_for_polynomial_via_variety from .fglm import _fglm from inspect import getfullargspec as getargspec def get_options_from_function(f): (argnames, varargs, varopts, defaults) = getargspec(f)[:4] return dict( zip( argnames[-len(defaults):], defaults)) def filter_oldstyle_options(**options): filtered = dict() for key in options.keys(): newkey = key for prefix in ['', 'use_', 'opt_allow_', 'opt_']: newkey = newkey.replace(prefix, '') filtered[newkey] = options[key] return filtered def filter_newstyle_options(func, **options): allowed = get_options_from_function(func).keys() filtered = dict() for key in options.keys(): for prefix in ['', 'use_', 'opt_', 'opt_allow_']: if prefix + key in allowed: filtered[prefix + key] = options[key] return filtered def owns_one_constant(I): """ Determine whether I contains the constant one polynomial. """ return any(p.is_one() for p in I) def want_interpolation_gb(G): if not G: return False if G[0].ring().get_order_code() != OrderCode.lp: return False if len(G) != 1: return False p = Polynomial(G[0]) if p.lead_deg() <= 1: return False if p.set().n_nodes() > 1000: return False return True def ll_is_good(I): lex_lead = set() for p in I: if not p.is_zero(): m = p.lex_lead() if m.deg() == 1: lex_lead.add(next(iter(m.variables())).index()) if len(lex_lead) >= 0.8 * len(I): uv = used_vars_set(I).deg() # don't use len here, which will yield 1 if len(lex_lead) > 0.9 * uv: if uv - len(lex_lead) > 16: return "llfirstonthefly" else: return "llfirst" return False def ll_heuristic(d): d = copy(d) I = d["I"] if ("llfirstonthefly" not in d) and ("llfirst" not in d): hint = ll_is_good(I) if hint: d[hint] = True return d def change_order_heuristic(d): d_orig = d d = copy(d) I = d["I"] if not I: return d switch_table = {OrderCode.lp: OrderCode.dp_asc, OrderCode.dlex: OrderCode. dp_asc} if "other_ordering_first" not in d: # TODO after ll situation might look much different, so heuristic is on # wrong place code = next(iter(I)).ring().get_order_code() if code in switch_table: max_non_linear = len(I) // 2 non_linear = 0 if code == OrderCode.lp: for p in I: if p.lead_deg() > 1: non_linear = non_linear + 1 if non_linear > max_non_linear: break if (non_linear > max_non_linear) or (code != OrderCode.lp): other_ordering_opts = copy(d_orig) other_ordering_opts["switch_to"] = switch_table[code] d["other_ordering_first"] = other_ordering_opts return d def interpolation_gb_heuristic(d): d = copy(d) I = d["I"] if not d.get("other_ordering_opts", False) and want_interpolation_gb(I): d["interpolation_gb"] = True d["other_ordering_first"] = False return d def linear_algebra_heuristic(d): d = copy(d) I = d["I"] def want_la(): if not I: return False n_used_vars = None bound = None if next(iter(I)).ring().has_degree_order(): new_bound = 200 n_used_vars = used_vars_set(I, bound=new_bound).deg() if n_used_vars < new_bound: return True bound = new_bound if dense_system(I): new_bound = 100 if not (bound and new_bound < bound): n_used_vars = used_vars_set(I, bound=new_bound).deg() bound = new_bound if n_used_vars < bound: return True return False if not (("faugere" in d and not d["faugere"]) or ("noro" in d and d["noro"])): if ("faugere" in d and d["faugere"]) or want_la(): d["faugere"] = True if "red_tail" not in d: d["red_tail"] = False if "selection_size" not in d: d["selection_size"] = 10000 if "ll" not in d: d["ll"] = True return d def trivial_heuristic(d): return d class HeuristicalFunction(object): def __call__(self, *args, **kwds): complete_dict = copy(kwds) heuristic = True try: heuristic = complete_dict["heuristic"] except KeyError: pass for (k, v) in zip(self.argnames, args): complete_dict[k] = v if heuristic: complete_dict = self.heuristicFunction(complete_dict) return self.f(**complete_dict) def __init__(self, f, heuristic_function): self.argnames, self.varargs, self.varopts, self.defaults = getargspec(f)[:4] if hasattr(f, "options"): self.options = f.options else: self.options = dict(zip(self.argnames[-len(self.defaults):], self. defaults)) self.heuristicFunction = heuristic_function self.f = f self.__doc__ = f.__doc__ def with_heuristic(heuristic_function): def make_wrapper(f): wrapped = HeuristicalFunction(f, heuristic_function) wrapped.__name__ = f.__name__ return wrapped return make_wrapper def clean_polys(I): I = list(set((Polynomial(p) for p in I if not Polynomial(p).is_zero()))) return I def clean_polys_pre(I): return (clean_polys(I), None) def gb_with_pre_post_option(option, pre=None, post=None, if_not_option=tuple(), default=False): def make_wrapper(f): def wrapper(I, **kwds): prot = kwds.get("prot", False) for o in if_not_option: if (o in kwds and kwds[o]) or (o not in kwds and groebner_basis.options[o]): option_set = False if "option_set" not in locals(): if option in kwds: option_set = kwds[option] else: option_set = default kwds = dict(((o, kwds[o]) for o in kwds if o != option)) state = None if option_set: if pre: pre_args = getargspec(pre)[0] if prot: print("preprocessing for option:", option) local_symbols = copy(locals()) (I, state) = pre(**dict([(k, v) for (k, v) in local_symbols.items() if k in pre_args])) I = f(I, **kwds) if option_set: if post: post_args = getargspec(post)[0] if prot: print("postprocessing for option:", option) local_symbols = copy(locals()) I = post(**{k: v for (k, v) in local_symbols.items() if k in post_args}) return I wrapper.__name__ = f.__name__ wrapper.__doc__ = f.__doc__ if hasattr(f, "options"): wrapper.options = copy(f.options) else: wrapper.options = get_options_from_function(f) wrapper.options[option] = default return wrapper return make_wrapper def redsb_post(I, state): if I == []: return [] else: return I.minimalize_and_tail_reduce() def minsb_post(I, state): if I == []: return [] else: return I.minimalize() def invert_all(I): return [p.map_every_x_to_x_plus_one() for p in I] def invert_all_pre(I): return (invert_all(I), None) def invert_all_post(I, state): return invert_all(I) def llfirst_pre(I, prot): (eliminated, llnf, I) = eliminate(I, on_the_fly=False, prot=prot) return (I, eliminated) def ll_constants_pre(I): ll_res = [] while len([p for p in I if p.lex_lead_deg() == 1 and (p + p.lex_lead()).constant()]) > 0: I_new = [] ll = [] leads = set() for p in I: if p.lex_lead_deg() == 1: l = p.lead() if not (l in leads) and p.is_singleton_or_pair(): tail = p + l if tail.deg() <= 0: ll.append(p) leads.add(l) continue I_new.append(p) encoded = ll_encode(ll) reduced = [] for p in I_new: p = ll_red_nf_redsb(p, encoded) if not p.is_zero(): reduced.append(p) I = reduced ll_res.extend(ll) return (I, ll_res) def variety_size_from_gb(I): """ TESTS:: sage: from sage.rings.polynomial.pbori.frontend import * sage: from sage.rings.polynomial.pbori.gbcore import variety_size_from_gb sage: r=Ring(100) sage: x = r.variable sage: variety_size_from_gb([]) 1 sage: variety_size_from_gb([Polynomial(0, r)]) 1 sage: variety_size_from_gb([Polynomial(1, r)]) 0.0 sage: variety_size_from_gb([x(1)]) 1.0 sage: variety_size_from_gb([x(1), x(2)]) 1.0 sage: variety_size_from_gb([x(1), x(2)*x(3)]) 3.0 sage: variety_size_from_gb([x(1), x(1)*x(4), x(2)*x(3)]) 6.0 sage: variety_size_from_gb([x(1)*x(2), x(2)*x(3)]) 5.0 sage: mons = [Monomial([r.variable(i) for i in range(100) if i!=j])\ for j in range(100)] sage: variety_size_from_gb(mons) 1.2676506002282294e+30 """ I = [Polynomial(p) for p in I] I = [p for p in I if not p.is_zero()] if len(I) == 0: return 1 # # TODO Here's something wrong! See the example with 5 solutions. # # (reverting for now) # number_of_used_vars = used_vars_set(I).deg() # leads = set([p.lead() for p in I]) # minimal_leads = BooleSet(leads).minimal_elements() # number_of_used_vars_minimal_leads =\ # minimal_leads.vars().deg() # standard_monomials =\ # minimal_leads.include_divisors().diff(minimal_leads) # return standard_monomials.size_double()*\ # 2**(number_of_used_vars-number_of_used_vars_minimal_leads) sm = Monomial(used_vars_set(I)).divisors() for p in I: m = p.lead() sm = sm.diff(sm.multiples_of(m)) return sm.size_double() def other_ordering_pre(I, option_set, kwds): """ TESTS:: sage: from sage.rings.polynomial.pbori.blocks import declare_ring sage: r = declare_ring(['x0', 'x1', 'x2', 'x3', 'x4'], globals()) sage: id = [x1*x3 + x1 + x2*x3 + x3 + x4, x0*x3 + x0 + x1*x2 + x2 + 1, x1*x3 + x1*x4 + x3*x4 + x4 + 1, x0*x2 + x0*x4 + x1 + x3 + x4] sage: from sage.rings.polynomial.pbori.gbcore import groebner_basis sage: groebner_basis(id) [1] """ if not I: return (I, None) main_kwds = kwds options = option_set old_ring = next(iter(I)).ring() try: new_ring = old_ring.clone(ordering=options["switch_to"]) kwds = {k: options[k] for k in options if k not in ("other_ordering_first", "switch_to", "I")} kwds["redsb"] = True I = groebner_basis([new_ring(poly) for poly in I], **kwds) variety_size = variety_size_from_gb(I) fglm_bound = options.get("fglm_bound") or groebner_basis.options["fglm_bound"] if variety_size < fglm_bound: main_kwds["convert_with_fglm_from_ring"] = new_ring main_kwds["convert_with_fglm_to_ring"] = old_ring else: I = [old_ring(poly) for poly in I] finally: pass return (I, None) def llfirstonthefly_pre(I, prot): (eliminated, llnf, I) = eliminate(I, on_the_fly=True) return (I, eliminated) def gauss_on_linear_pre(I, prot): return (gauss_on_linear(I), None) def easy_linear_polynomials_pre(I): res = [] for p in I: res.append(p) res.extend(easy_linear_polynomials(p)) return (list(set(res)), None) def llfirst_post(I, state, prot, kwds): eliminated = state for p in I: if p.is_one(): return [p] else: if len(eliminated) > 0: I = list(chain(I, eliminated)) # redsb just for safety, as don't know how option is set kwds = copy(kwds) kwds.update( dict(llfirst=False, llfirstonthefly=False, ll_constants=False, deg_bound=False, other_ordering_first=False, eliminate_identical_variables=False, redsb=True)) I = groebner_basis( I, **kwds ) return I def ll_constants_post(I, state): eliminated = state for p in I: if p.is_one(): return [p] else: if len(eliminated) > 0: I = list(chain(I, eliminated)) # redsb just for safety, as don't know how option is set return I def result_to_list_post(I, state): return list(I) def fix_deg_bound_post(I, state): if isinstance(I, GroebnerStrategy): return I.all_generators() else: return I def incremental_pre(I, prot, kwds): def sort_key(p): p = Polynomial(p) return (p.navigation().value(), -p.deg()) I = sorted(I, key=sort_key) inc_sys = [] kwds = copy(kwds) kwds['incremental'] = False for p in I[:-1]: inc_sys.append(p) inc_sys = groebner_basis(inc_sys, **kwds) if prot: print("incrementally calculating GB, adding generator:", p) inc_sys.append(I[:-1]) return (inc_sys, None) def eliminate_identical_variables_pre(I, prot): changed = True ll_system = [] treated_linears = set() while changed: changed = False rules = dict() for p in I: t = p + p.lead() if p.lead_deg() == 1: l = p.lead() if l in treated_linears: continue else: treated_linears.add(l) if t.deg() > 0: rules.setdefault(t, []) leads = rules[t] leads.append(l) def my_sort_key(l): return l.navigation().value() for (t, leads) in rules.items(): if len(leads) > 1: changed = True leads = sorted(leads, key=my_sort_key, reverse=True) chosen = leads[0] for v in leads[1:]: ll_system.append(chosen + v) if len(ll_system) > 0: ll_encoded = ll_encode(ll_system, reduce=True) I = set([ll_red_nf_redsb(p, ll_encoded) for p in I]) return (I, ll_system) @gb_with_pre_post_option("clean_arguments", pre=clean_polys_pre, default=True) @gb_with_pre_post_option("easy_linear_polynomials", pre=easy_linear_polynomials_pre, default=True) @gb_with_pre_post_option("result_to_list", post=result_to_list_post, default=True) @with_heuristic(interpolation_gb_heuristic) @gb_with_pre_post_option("invert", pre=invert_all_pre, post=invert_all_post, default=False) @gb_with_pre_post_option("gauss_on_linear", pre=gauss_on_linear_pre, default=True) @gb_with_pre_post_option("ll_constants", pre=ll_constants_pre, post=ll_constants_post, default=True) @gb_with_pre_post_option("eliminate_identical_variables", pre=eliminate_identical_variables_pre, post=llfirst_post, default=True) @with_heuristic(ll_heuristic) @gb_with_pre_post_option("llfirst", if_not_option=["llfirstonthefly"], pre=llfirst_pre, post=llfirst_post, default=False) @gb_with_pre_post_option("llfirstonthefly", pre=llfirstonthefly_pre, post=llfirst_post, default=False) @gb_with_pre_post_option("incremental", pre=incremental_pre) @with_heuristic(change_order_heuristic) @gb_with_pre_post_option("other_ordering_first", if_not_option=[ "interpolation_gb"], pre=other_ordering_pre, default=False) @with_heuristic(linear_algebra_heuristic) @gb_with_pre_post_option("fix_deg_bound", if_not_option=["interpolation_gb"], post=fix_deg_bound_post, default=True) @gb_with_pre_post_option("minsb", post=minsb_post, if_not_option=["redsb", "deg_bound", "interpolation_gb", "convert_with_fglm_from_ring"], default=True) @gb_with_pre_post_option("redsb", post=redsb_post, if_not_option=["deg_bound", "interpolation_gb", "convert_with_fglm_from_ring"], default=True) def groebner_basis(I, heuristic=True, unique_ideal_generator=False, interpolation_gb=False, clean_and_restart_algorithm=False, convert_with_fglm_from_ring=None, convert_with_fglm_to_ring=None, fglm_bound=40000, modified_linear_algebra=True, preprocessor=None, deg_bound=False, implementation="Python", full_prot=False, prot=False, draw_matrices=False, preprocess_only=False, **impl_options): """Computes a Groebner basis of a given ideal I, w.r.t options.""" if not I: return I if full_prot: prot = True if prot: print("number of passed generators:", len(I)) if convert_with_fglm_from_ring is not None: from_ring = convert_with_fglm_from_ring to_ring = convert_with_fglm_to_ring return _fglm(I, from_ring, to_ring) if interpolation_gb: first = next(iter(I)) if len(I) != 1 or first.ring().get_order_code() != OrderCode.lp: raise ValueError return lex_groebner_basis_for_polynomial_via_variety(first) if deg_bound is False: deg_bound = 100000000 I = [Polynomial(p) for p in I if not p.is_zero()] if unique_ideal_generator and I: prod = 1 for p in I: prod = (p + 1) * prod I = [prod + 1] if implementation == "Python": implementation = symmGB_F2_python else: implementation = symmGB_F2_C # custom preprocessing if preprocessor: I = preprocessor(I) if preprocess_only: for p in I: print(p) import sys sys.exit(0) def call_algorithm(I, max_generators=None): return implementation(I, deg_bound=deg_bound, full_prot=full_prot, prot=prot, max_generators=max_generators, draw_matrices=draw_matrices, **filter_newstyle_options(implementation, **impl_options)) if clean_and_restart_algorithm: for max_generators in [1000, 10000, 50000, 100000, 200000, 300000, 400000, None]: try: return call_algorithm(I, max_generators=max_generators) except GeneratorLimitExceeded as e: I = list(e.strat.all_generators()) del e.strat if prot: print("generator limit exceeded:", max_generators, "restarting algorithm") else: return call_algorithm(I) def build_groebner_basis_doc_string(): additional_options_from_buchberger = filter_oldstyle_options(** get_options_from_function(symmGB_F2_python)) for k in list(additional_options_from_buchberger): if k in groebner_basis.options: del additional_options_from_buchberger[k] groebner_basis.__doc__ = (groebner_basis.__doc__ + "\nOptions are:\n" + "\n".join((k + " : " + repr(groebner_basis.options[k]) for k in groebner_basis.options)) + """ Turn off heuristic by setting heuristic=False Additional options come from the actual buchberger implementation. In case of our standard Python implementation these are the following: """ + "\n".join((k + " : " + repr(additional_options_from_buchberger[k]) for k in additional_options_from_buchberger))) build_groebner_basis_doc_string() def _test(): import doctest doctest.testmod() if __name__ == "__main__": _test()
30.983582
141
0.58736
from .nf import GeneratorLimitExceeded, symmGB_F2_C, symmGB_F2_python from .PyPolyBoRi import (Monomial, Polynomial, GroebnerStrategy, OrderCode, ll_red_nf_redsb) from .ll import eliminate, ll_encode from copy import copy from itertools import chain from .statistics import used_vars_set from .heuristics import dense_system, gauss_on_linear from .easy_polynomials import easy_linear_polynomials from .interpolate import lex_groebner_basis_for_polynomial_via_variety from .fglm import _fglm from inspect import getfullargspec as getargspec def get_options_from_function(f): (argnames, varargs, varopts, defaults) = getargspec(f)[:4] return dict( zip( argnames[-len(defaults):], defaults)) def filter_oldstyle_options(**options): filtered = dict() for key in options.keys(): newkey = key for prefix in ['', 'use_', 'opt_allow_', 'opt_']: newkey = newkey.replace(prefix, '') filtered[newkey] = options[key] return filtered def filter_newstyle_options(func, **options): allowed = get_options_from_function(func).keys() filtered = dict() for key in options.keys(): for prefix in ['', 'use_', 'opt_', 'opt_allow_']: if prefix + key in allowed: filtered[prefix + key] = options[key] return filtered def owns_one_constant(I): return any(p.is_one() for p in I) def want_interpolation_gb(G): if not G: return False if G[0].ring().get_order_code() != OrderCode.lp: return False if len(G) != 1: return False p = Polynomial(G[0]) if p.lead_deg() <= 1: return False if p.set().n_nodes() > 1000: return False return True def ll_is_good(I): lex_lead = set() for p in I: if not p.is_zero(): m = p.lex_lead() if m.deg() == 1: lex_lead.add(next(iter(m.variables())).index()) if len(lex_lead) >= 0.8 * len(I): uv = used_vars_set(I).deg() if len(lex_lead) > 0.9 * uv: if uv - len(lex_lead) > 16: return "llfirstonthefly" else: return "llfirst" return False def ll_heuristic(d): d = copy(d) I = d["I"] if ("llfirstonthefly" not in d) and ("llfirst" not in d): hint = ll_is_good(I) if hint: d[hint] = True return d def change_order_heuristic(d): d_orig = d d = copy(d) I = d["I"] if not I: return d switch_table = {OrderCode.lp: OrderCode.dp_asc, OrderCode.dlex: OrderCode. dp_asc} if "other_ordering_first" not in d: # TODO after ll situation might look much different, so heuristic is on # wrong place code = next(iter(I)).ring().get_order_code() if code in switch_table: max_non_linear = len(I) // 2 non_linear = 0 if code == OrderCode.lp: for p in I: if p.lead_deg() > 1: non_linear = non_linear + 1 if non_linear > max_non_linear: break if (non_linear > max_non_linear) or (code != OrderCode.lp): other_ordering_opts = copy(d_orig) other_ordering_opts["switch_to"] = switch_table[code] d["other_ordering_first"] = other_ordering_opts return d def interpolation_gb_heuristic(d): d = copy(d) I = d["I"] if not d.get("other_ordering_opts", False) and want_interpolation_gb(I): d["interpolation_gb"] = True d["other_ordering_first"] = False return d def linear_algebra_heuristic(d): d = copy(d) I = d["I"] def want_la(): if not I: return False n_used_vars = None bound = None if next(iter(I)).ring().has_degree_order(): new_bound = 200 n_used_vars = used_vars_set(I, bound=new_bound).deg() if n_used_vars < new_bound: return True bound = new_bound if dense_system(I): new_bound = 100 if not (bound and new_bound < bound): n_used_vars = used_vars_set(I, bound=new_bound).deg() bound = new_bound if n_used_vars < bound: return True return False if not (("faugere" in d and not d["faugere"]) or ("noro" in d and d["noro"])): if ("faugere" in d and d["faugere"]) or want_la(): d["faugere"] = True if "red_tail" not in d: d["red_tail"] = False if "selection_size" not in d: d["selection_size"] = 10000 if "ll" not in d: d["ll"] = True return d def trivial_heuristic(d): return d class HeuristicalFunction(object): def __call__(self, *args, **kwds): complete_dict = copy(kwds) heuristic = True try: heuristic = complete_dict["heuristic"] except KeyError: pass for (k, v) in zip(self.argnames, args): complete_dict[k] = v if heuristic: complete_dict = self.heuristicFunction(complete_dict) return self.f(**complete_dict) def __init__(self, f, heuristic_function): self.argnames, self.varargs, self.varopts, self.defaults = getargspec(f)[:4] if hasattr(f, "options"): self.options = f.options else: self.options = dict(zip(self.argnames[-len(self.defaults):], self. defaults)) self.heuristicFunction = heuristic_function self.f = f self.__doc__ = f.__doc__ def with_heuristic(heuristic_function): def make_wrapper(f): wrapped = HeuristicalFunction(f, heuristic_function) wrapped.__name__ = f.__name__ return wrapped return make_wrapper def clean_polys(I): I = list(set((Polynomial(p) for p in I if not Polynomial(p).is_zero()))) return I def clean_polys_pre(I): return (clean_polys(I), None) def gb_with_pre_post_option(option, pre=None, post=None, if_not_option=tuple(), default=False): def make_wrapper(f): def wrapper(I, **kwds): prot = kwds.get("prot", False) for o in if_not_option: if (o in kwds and kwds[o]) or (o not in kwds and groebner_basis.options[o]): option_set = False if "option_set" not in locals(): if option in kwds: option_set = kwds[option] else: option_set = default kwds = dict(((o, kwds[o]) for o in kwds if o != option)) state = None if option_set: if pre: pre_args = getargspec(pre)[0] if prot: print("preprocessing for option:", option) local_symbols = copy(locals()) (I, state) = pre(**dict([(k, v) for (k, v) in local_symbols.items() if k in pre_args])) I = f(I, **kwds) if option_set: if post: post_args = getargspec(post)[0] if prot: print("postprocessing for option:", option) local_symbols = copy(locals()) I = post(**{k: v for (k, v) in local_symbols.items() if k in post_args}) return I wrapper.__name__ = f.__name__ wrapper.__doc__ = f.__doc__ if hasattr(f, "options"): wrapper.options = copy(f.options) else: wrapper.options = get_options_from_function(f) wrapper.options[option] = default return wrapper return make_wrapper def redsb_post(I, state): if I == []: return [] else: return I.minimalize_and_tail_reduce() def minsb_post(I, state): if I == []: return [] else: return I.minimalize() def invert_all(I): return [p.map_every_x_to_x_plus_one() for p in I] def invert_all_pre(I): return (invert_all(I), None) def invert_all_post(I, state): return invert_all(I) def llfirst_pre(I, prot): (eliminated, llnf, I) = eliminate(I, on_the_fly=False, prot=prot) return (I, eliminated) def ll_constants_pre(I): ll_res = [] while len([p for p in I if p.lex_lead_deg() == 1 and (p + p.lex_lead()).constant()]) > 0: I_new = [] ll = [] leads = set() for p in I: if p.lex_lead_deg() == 1: l = p.lead() if not (l in leads) and p.is_singleton_or_pair(): tail = p + l if tail.deg() <= 0: ll.append(p) leads.add(l) continue I_new.append(p) encoded = ll_encode(ll) reduced = [] for p in I_new: p = ll_red_nf_redsb(p, encoded) if not p.is_zero(): reduced.append(p) I = reduced ll_res.extend(ll) return (I, ll_res) def variety_size_from_gb(I): I = [Polynomial(p) for p in I] I = [p for p in I if not p.is_zero()] if len(I) == 0: return 1 # # TODO Here's something wrong! See the example with 5 solutions. onomial(used_vars_set(I)).divisors() for p in I: m = p.lead() sm = sm.diff(sm.multiples_of(m)) return sm.size_double() def other_ordering_pre(I, option_set, kwds): if not I: return (I, None) main_kwds = kwds options = option_set old_ring = next(iter(I)).ring() try: new_ring = old_ring.clone(ordering=options["switch_to"]) kwds = {k: options[k] for k in options if k not in ("other_ordering_first", "switch_to", "I")} kwds["redsb"] = True I = groebner_basis([new_ring(poly) for poly in I], **kwds) variety_size = variety_size_from_gb(I) fglm_bound = options.get("fglm_bound") or groebner_basis.options["fglm_bound"] if variety_size < fglm_bound: main_kwds["convert_with_fglm_from_ring"] = new_ring main_kwds["convert_with_fglm_to_ring"] = old_ring else: I = [old_ring(poly) for poly in I] finally: pass return (I, None) def llfirstonthefly_pre(I, prot): (eliminated, llnf, I) = eliminate(I, on_the_fly=True) return (I, eliminated) def gauss_on_linear_pre(I, prot): return (gauss_on_linear(I), None) def easy_linear_polynomials_pre(I): res = [] for p in I: res.append(p) res.extend(easy_linear_polynomials(p)) return (list(set(res)), None) def llfirst_post(I, state, prot, kwds): eliminated = state for p in I: if p.is_one(): return [p] else: if len(eliminated) > 0: I = list(chain(I, eliminated)) kwds = copy(kwds) kwds.update( dict(llfirst=False, llfirstonthefly=False, ll_constants=False, deg_bound=False, other_ordering_first=False, eliminate_identical_variables=False, redsb=True)) I = groebner_basis( I, **kwds ) return I def ll_constants_post(I, state): eliminated = state for p in I: if p.is_one(): return [p] else: if len(eliminated) > 0: I = list(chain(I, eliminated)) # redsb just for safety, as don't know how option is set return I def result_to_list_post(I, state): return list(I) def fix_deg_bound_post(I, state): if isinstance(I, GroebnerStrategy): return I.all_generators() else: return I def incremental_pre(I, prot, kwds): def sort_key(p): p = Polynomial(p) return (p.navigation().value(), -p.deg()) I = sorted(I, key=sort_key) inc_sys = [] kwds = copy(kwds) kwds['incremental'] = False for p in I[:-1]: inc_sys.append(p) inc_sys = groebner_basis(inc_sys, **kwds) if prot: print("incrementally calculating GB, adding generator:", p) inc_sys.append(I[:-1]) return (inc_sys, None) def eliminate_identical_variables_pre(I, prot): changed = True ll_system = [] treated_linears = set() while changed: changed = False rules = dict() for p in I: t = p + p.lead() if p.lead_deg() == 1: l = p.lead() if l in treated_linears: continue else: treated_linears.add(l) if t.deg() > 0: rules.setdefault(t, []) leads = rules[t] leads.append(l) def my_sort_key(l): return l.navigation().value() for (t, leads) in rules.items(): if len(leads) > 1: changed = True leads = sorted(leads, key=my_sort_key, reverse=True) chosen = leads[0] for v in leads[1:]: ll_system.append(chosen + v) if len(ll_system) > 0: ll_encoded = ll_encode(ll_system, reduce=True) I = set([ll_red_nf_redsb(p, ll_encoded) for p in I]) return (I, ll_system) @gb_with_pre_post_option("clean_arguments", pre=clean_polys_pre, default=True) @gb_with_pre_post_option("easy_linear_polynomials", pre=easy_linear_polynomials_pre, default=True) @gb_with_pre_post_option("result_to_list", post=result_to_list_post, default=True) @with_heuristic(interpolation_gb_heuristic) @gb_with_pre_post_option("invert", pre=invert_all_pre, post=invert_all_post, default=False) @gb_with_pre_post_option("gauss_on_linear", pre=gauss_on_linear_pre, default=True) @gb_with_pre_post_option("ll_constants", pre=ll_constants_pre, post=ll_constants_post, default=True) @gb_with_pre_post_option("eliminate_identical_variables", pre=eliminate_identical_variables_pre, post=llfirst_post, default=True) @with_heuristic(ll_heuristic) @gb_with_pre_post_option("llfirst", if_not_option=["llfirstonthefly"], pre=llfirst_pre, post=llfirst_post, default=False) @gb_with_pre_post_option("llfirstonthefly", pre=llfirstonthefly_pre, post=llfirst_post, default=False) @gb_with_pre_post_option("incremental", pre=incremental_pre) @with_heuristic(change_order_heuristic) @gb_with_pre_post_option("other_ordering_first", if_not_option=[ "interpolation_gb"], pre=other_ordering_pre, default=False) @with_heuristic(linear_algebra_heuristic) @gb_with_pre_post_option("fix_deg_bound", if_not_option=["interpolation_gb"], post=fix_deg_bound_post, default=True) @gb_with_pre_post_option("minsb", post=minsb_post, if_not_option=["redsb", "deg_bound", "interpolation_gb", "convert_with_fglm_from_ring"], default=True) @gb_with_pre_post_option("redsb", post=redsb_post, if_not_option=["deg_bound", "interpolation_gb", "convert_with_fglm_from_ring"], default=True) def groebner_basis(I, heuristic=True, unique_ideal_generator=False, interpolation_gb=False, clean_and_restart_algorithm=False, convert_with_fglm_from_ring=None, convert_with_fglm_to_ring=None, fglm_bound=40000, modified_linear_algebra=True, preprocessor=None, deg_bound=False, implementation="Python", full_prot=False, prot=False, draw_matrices=False, preprocess_only=False, **impl_options): if not I: return I if full_prot: prot = True if prot: print("number of passed generators:", len(I)) if convert_with_fglm_from_ring is not None: from_ring = convert_with_fglm_from_ring to_ring = convert_with_fglm_to_ring return _fglm(I, from_ring, to_ring) if interpolation_gb: first = next(iter(I)) if len(I) != 1 or first.ring().get_order_code() != OrderCode.lp: raise ValueError return lex_groebner_basis_for_polynomial_via_variety(first) if deg_bound is False: deg_bound = 100000000 I = [Polynomial(p) for p in I if not p.is_zero()] if unique_ideal_generator and I: prod = 1 for p in I: prod = (p + 1) * prod I = [prod + 1] if implementation == "Python": implementation = symmGB_F2_python else: implementation = symmGB_F2_C if preprocessor: I = preprocessor(I) if preprocess_only: for p in I: print(p) import sys sys.exit(0) def call_algorithm(I, max_generators=None): return implementation(I, deg_bound=deg_bound, full_prot=full_prot, prot=prot, max_generators=max_generators, draw_matrices=draw_matrices, **filter_newstyle_options(implementation, **impl_options)) if clean_and_restart_algorithm: for max_generators in [1000, 10000, 50000, 100000, 200000, 300000, 400000, None]: try: return call_algorithm(I, max_generators=max_generators) except GeneratorLimitExceeded as e: I = list(e.strat.all_generators()) del e.strat if prot: print("generator limit exceeded:", max_generators, "restarting algorithm") else: return call_algorithm(I) def build_groebner_basis_doc_string(): additional_options_from_buchberger = filter_oldstyle_options(** get_options_from_function(symmGB_F2_python)) for k in list(additional_options_from_buchberger): if k in groebner_basis.options: del additional_options_from_buchberger[k] groebner_basis.__doc__ = (groebner_basis.__doc__ + "\nOptions are:\n" + "\n".join((k + " : " + repr(groebner_basis.options[k]) for k in groebner_basis.options)) + """ Turn off heuristic by setting heuristic=False Additional options come from the actual buchberger implementation. In case of our standard Python implementation these are the following: """ + "\n".join((k + " : " + repr(additional_options_from_buchberger[k]) for k in additional_options_from_buchberger))) build_groebner_basis_doc_string() def _test(): import doctest doctest.testmod() if __name__ == "__main__": _test()
true
true
f73bfaa83b6b251ff6deddf45ecfca51a29d7b1e
289
py
Python
admin_helper/urls.py
truetug/django-admin-helper
a760725610243dedd36c903d4c7d30141e241890
[ "MIT" ]
null
null
null
admin_helper/urls.py
truetug/django-admin-helper
a760725610243dedd36c903d4c7d30141e241890
[ "MIT" ]
null
null
null
admin_helper/urls.py
truetug/django-admin-helper
a760725610243dedd36c903d4c7d30141e241890
[ "MIT" ]
null
null
null
# encoding: utf-8 from django.conf.urls import patterns, url from admin_helper.views import PopupView, SuggestView urlpatterns = patterns( 'admin_helper.views', url(r'^suggest/$', SuggestView.as_view(), name='suggest'), url(r'^popup/$', PopupView.as_view(), name='popup') )
24.083333
62
0.705882
from django.conf.urls import patterns, url from admin_helper.views import PopupView, SuggestView urlpatterns = patterns( 'admin_helper.views', url(r'^suggest/$', SuggestView.as_view(), name='suggest'), url(r'^popup/$', PopupView.as_view(), name='popup') )
true
true
f73bfb19701981ec4b479660a9438d906fc3faea
1,137
py
Python
pandasread_csv_addheader.py
SecTraversl/python_pandas_Tools
4b818c993e1587c150d8f94e0c8c6437516deee4
[ "MIT" ]
null
null
null
pandasread_csv_addheader.py
SecTraversl/python_pandas_Tools
4b818c993e1587c150d8f94e0c8c6437516deee4
[ "MIT" ]
null
null
null
pandasread_csv_addheader.py
SecTraversl/python_pandas_Tools
4b818c993e1587c150d8f94e0c8c6437516deee4
[ "MIT" ]
null
null
null
# %% ####################################### def pandasread_csv_addheader(csv_file: str, addheader: list): """Returns a pandas dataframe from the given .csv file. Assumes there is no header in the .csv and requires a header to be given as an argument to the 'addheader'. Example: >>> myheader = ['NAME','AGE','JOB','DEPARTMENT','PAY']\n >>> pandasread_csv_addheader('test.csv', addheader=myheader)\n NAME AGE JOB DEPARTMENT PAY\n 0 bob 21 janitor sanitization team 2\n 1 alice 22 secretary admin team 3\n 2 chuck 23 plumber construction team 4\n Reference: https://stackoverflow.com/questions/36828348/pandas-read-csv-reading-a-csv-file-with-a-missing-header-element Args: csv_file (str): Reference an existing .csv file. addheader (list): Reference the header you want to use for the columns. Returns: pandas.core.frame.DataFrame: Returns a pandas dataframe. """ import pandas df = pandas.read_csv(csv_file, header=None, names=addheader) return df
40.607143
168
0.614776
true
true
f73bfb9915a9103181b4cb12b36cafe4c81fcb2b
1,803
py
Python
test.py
hreshtaksensei/forexpy
706f9f9ba7dd6ec83edd268b1b287d96db59b2cf
[ "MIT" ]
1
2020-03-21T03:19:36.000Z
2020-03-21T03:19:36.000Z
test.py
hreshtaksensei/forexpy
706f9f9ba7dd6ec83edd268b1b287d96db59b2cf
[ "MIT" ]
null
null
null
test.py
hreshtaksensei/forexpy
706f9f9ba7dd6ec83edd268b1b287d96db59b2cf
[ "MIT" ]
null
null
null
#!/usr/bin/env python3 """Usage: test.py FILE test.py -h --help """ import sys import os.path from docopt import docopt from backtest.oanda_backtest import OandaBacktest from logic.strategy import Strategy from settings import CANDLES_MINUTES, MAX_PERCENTAGE_ACCOUNT_AT_RISK,\ STOP_LOSS, TRAILING_PERIOD, TAKE_PROFIT, PLOT_RESULTS from util.plot import Strategyplot def plot_results(plot_data): if not plot_data: return splot = Strategyplot(plot_data, 2) splot.plot("RawPrice", 0, "m-") splot.plot("Sell", 0, "ro") splot.plot("Buy", 0, "g^") splot.plot("Close", 0, "b*") splot.plot("StopLoss", 0, "_") #splot.plot("TrailingStop", 0, "g-") splot.plot("TakeProfit", 0, "g-") #splot.plot("short", 0, "r--") #splot.plot("medium", 0, "g--") #splot.plot("long", 0, "b--") #splot.plot("NetWorth", 1, "r-") splot.show() def main(argv): arguments = docopt( __doc__, argv, help=True, version=None, options_first=False) if os.path.isfile(arguments['FILE']) is not True: print('File not found') return oanda_backtest = OandaBacktest(arguments['FILE']) strategy = Strategy( oanda_backtest, CANDLES_MINUTES, email=None, risk=MAX_PERCENTAGE_ACCOUNT_AT_RISK, stoploss=STOP_LOSS, trailing_period=TRAILING_PERIOD, take_profit=TAKE_PROFIT) print('Starting backtest on', argv[0]) strategy.Start() while oanda_backtest.is_running(): oanda_backtest.update_subscribers() if PLOT_RESULTS: print('plotting results...') plot_data = oanda_backtest.get_plot_data() plot_results(plot_data) if __name__ == "__main__": try: main(sys.argv[1:]) except KeyboardInterrupt: sys.exit(0)
25.394366
70
0.645036
import sys import os.path from docopt import docopt from backtest.oanda_backtest import OandaBacktest from logic.strategy import Strategy from settings import CANDLES_MINUTES, MAX_PERCENTAGE_ACCOUNT_AT_RISK,\ STOP_LOSS, TRAILING_PERIOD, TAKE_PROFIT, PLOT_RESULTS from util.plot import Strategyplot def plot_results(plot_data): if not plot_data: return splot = Strategyplot(plot_data, 2) splot.plot("RawPrice", 0, "m-") splot.plot("Sell", 0, "ro") splot.plot("Buy", 0, "g^") splot.plot("Close", 0, "b*") splot.plot("StopLoss", 0, "_") splot.plot("TakeProfit", 0, "g-") splot.show() def main(argv): arguments = docopt( __doc__, argv, help=True, version=None, options_first=False) if os.path.isfile(arguments['FILE']) is not True: print('File not found') return oanda_backtest = OandaBacktest(arguments['FILE']) strategy = Strategy( oanda_backtest, CANDLES_MINUTES, email=None, risk=MAX_PERCENTAGE_ACCOUNT_AT_RISK, stoploss=STOP_LOSS, trailing_period=TRAILING_PERIOD, take_profit=TAKE_PROFIT) print('Starting backtest on', argv[0]) strategy.Start() while oanda_backtest.is_running(): oanda_backtest.update_subscribers() if PLOT_RESULTS: print('plotting results...') plot_data = oanda_backtest.get_plot_data() plot_results(plot_data) if __name__ == "__main__": try: main(sys.argv[1:]) except KeyboardInterrupt: sys.exit(0)
true
true
f73bfce54e54d377a8a966317f99d18fbadd3fa0
2,839
py
Python
generative_models/lstm_hc/distribution_learner.py
MorganCThomas/MolScore
b12b7b5539bb3211982fc7a1b5938c0f383a05c0
[ "MIT" ]
28
2020-12-11T22:10:16.000Z
2022-02-25T05:00:51.000Z
generative_models/lstm_hc/distribution_learner.py
MorganCThomas/MolScore
b12b7b5539bb3211982fc7a1b5938c0f383a05c0
[ "MIT" ]
3
2021-08-31T22:50:41.000Z
2021-11-04T15:41:01.000Z
generative_models/lstm_hc/distribution_learner.py
MorganCThomas/MolScore
b12b7b5539bb3211982fc7a1b5938c0f383a05c0
[ "MIT" ]
9
2021-03-03T12:10:10.000Z
2022-02-15T06:53:11.000Z
""" Adapted from guacamol_baselines https://github.com/BenevolentAI/guacamol_baselines """ import logging from typing import List import torch #from guacamol.distribution_matching_generator import DistributionMatchingGenerator from model import SmilesRnn from trainer import SmilesRnnTrainer from utils import get_tensor_dataset, load_smiles_from_list, set_random_seed from smiles_char_dict import SmilesCharDictionary logger = logging.getLogger(__name__) logger.addHandler(logging.NullHandler()) class SmilesRnnDistributionLearner: def __init__(self, output_dir: str, n_epochs=10, hidden_size=512, n_layers=3, max_len=100, batch_size=64, rnn_dropout=0.2, lr=1e-3, valid_every=100) -> None: self.n_epochs = n_epochs self.output_dir = output_dir self.hidden_size = hidden_size self.n_layers = n_layers self.max_len = max_len self.batch_size = batch_size self.rnn_dropout = rnn_dropout self.lr = lr self.valid_every = valid_every self.print_every = 10 self.seed = 42 def train(self, training_set: List[str], validation_set: List[str]):# -> DistributionMatchingGenerator: # GPU if available cuda_available = torch.cuda.is_available() device_str = 'cuda' if cuda_available else 'cpu' device = torch.device(device_str) logger.info(f'CUDA enabled:\t{cuda_available}') set_random_seed(self.seed, device) # load data train_seqs, _ = load_smiles_from_list(training_set, self.max_len) valid_seqs, _ = load_smiles_from_list(validation_set, self.max_len) train_set = get_tensor_dataset(train_seqs) test_set = get_tensor_dataset(valid_seqs) sd = SmilesCharDictionary() n_characters = sd.get_char_num() # build network smiles_model = SmilesRnn(input_size=n_characters, hidden_size=self.hidden_size, output_size=n_characters, n_layers=self.n_layers, rnn_dropout=self.rnn_dropout) # wire network for training optimizer = torch.optim.Adam(smiles_model.parameters(), lr=self.lr) criterion = torch.nn.CrossEntropyLoss(ignore_index=sd.pad_idx) trainer = SmilesRnnTrainer(model=smiles_model, criteria=[criterion], optimizer=optimizer, device=device, log_dir=self.output_dir) trainer.fit(train_set, test_set, batch_size=self.batch_size, print_every=self.print_every, valid_every=self.valid_every, n_epochs=self.n_epochs)
37.355263
107
0.638253
import logging from typing import List import torch from model import SmilesRnn from trainer import SmilesRnnTrainer from utils import get_tensor_dataset, load_smiles_from_list, set_random_seed from smiles_char_dict import SmilesCharDictionary logger = logging.getLogger(__name__) logger.addHandler(logging.NullHandler()) class SmilesRnnDistributionLearner: def __init__(self, output_dir: str, n_epochs=10, hidden_size=512, n_layers=3, max_len=100, batch_size=64, rnn_dropout=0.2, lr=1e-3, valid_every=100) -> None: self.n_epochs = n_epochs self.output_dir = output_dir self.hidden_size = hidden_size self.n_layers = n_layers self.max_len = max_len self.batch_size = batch_size self.rnn_dropout = rnn_dropout self.lr = lr self.valid_every = valid_every self.print_every = 10 self.seed = 42 def train(self, training_set: List[str], validation_set: List[str]): cuda_available = torch.cuda.is_available() device_str = 'cuda' if cuda_available else 'cpu' device = torch.device(device_str) logger.info(f'CUDA enabled:\t{cuda_available}') set_random_seed(self.seed, device) train_seqs, _ = load_smiles_from_list(training_set, self.max_len) valid_seqs, _ = load_smiles_from_list(validation_set, self.max_len) train_set = get_tensor_dataset(train_seqs) test_set = get_tensor_dataset(valid_seqs) sd = SmilesCharDictionary() n_characters = sd.get_char_num() smiles_model = SmilesRnn(input_size=n_characters, hidden_size=self.hidden_size, output_size=n_characters, n_layers=self.n_layers, rnn_dropout=self.rnn_dropout) optimizer = torch.optim.Adam(smiles_model.parameters(), lr=self.lr) criterion = torch.nn.CrossEntropyLoss(ignore_index=sd.pad_idx) trainer = SmilesRnnTrainer(model=smiles_model, criteria=[criterion], optimizer=optimizer, device=device, log_dir=self.output_dir) trainer.fit(train_set, test_set, batch_size=self.batch_size, print_every=self.print_every, valid_every=self.valid_every, n_epochs=self.n_epochs)
true
true
f73bfdd51aa47da6aa7a3fd44829b3bad58f6c35
780
py
Python
db/migrations/0023_auto_20210208_0838.py
matchd-ch/matchd-backend
84be4aab1b4708cae50a8988301b15df877c8db0
[ "Apache-2.0" ]
1
2022-03-03T09:55:57.000Z
2022-03-03T09:55:57.000Z
db/migrations/0023_auto_20210208_0838.py
matchd-ch/matchd-backend
84be4aab1b4708cae50a8988301b15df877c8db0
[ "Apache-2.0" ]
7
2022-02-09T10:44:53.000Z
2022-03-28T03:29:43.000Z
db/migrations/0023_auto_20210208_0838.py
matchd-ch/matchd-backend
84be4aab1b4708cae50a8988301b15df877c8db0
[ "Apache-2.0" ]
null
null
null
# Generated by Django 3.1.5 on 2021-02-08 08:38 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('db', '0022_student_nickname'), ] operations = [ migrations.AddField( model_name='student', name='field_of_study', field=models.CharField(default='', max_length=255), preserve_default=False, ), migrations.AddField( model_name='student', name='graduation', field=models.DateField(blank=True, null=True), ), migrations.AddField( model_name='student', name='school_name', field=models.CharField(blank=True, max_length=255, null=True), ), ]
26
74
0.571795
from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('db', '0022_student_nickname'), ] operations = [ migrations.AddField( model_name='student', name='field_of_study', field=models.CharField(default='', max_length=255), preserve_default=False, ), migrations.AddField( model_name='student', name='graduation', field=models.DateField(blank=True, null=True), ), migrations.AddField( model_name='student', name='school_name', field=models.CharField(blank=True, max_length=255, null=True), ), ]
true
true
f73bfe76a3fb3b909b501dcb198eb297cef9e9fc
5,979
py
Python
vulnerabilities/tests/test_npm.py
InLaw/vulnerablecode
e93154ce15f577430dda18cabd1feb1dabc7230a
[ "Apache-2.0" ]
null
null
null
vulnerabilities/tests/test_npm.py
InLaw/vulnerablecode
e93154ce15f577430dda18cabd1feb1dabc7230a
[ "Apache-2.0" ]
null
null
null
vulnerabilities/tests/test_npm.py
InLaw/vulnerablecode
e93154ce15f577430dda18cabd1feb1dabc7230a
[ "Apache-2.0" ]
null
null
null
# Author: Navonil Das (@NavonilDas) # Copyright (c) 2017 nexB Inc. and others. All rights reserved. # http://nexb.com and https://github.com/nexB/vulnerablecode/ # The VulnerableCode software is licensed under the Apache License version 2.0. # Data generated with VulnerableCode require an acknowledgment. # # You may not use this software except in compliance with the License. # You may obtain a copy of the License at: http://apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR # CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. # # When you publish or redistribute any data created with VulnerableCode or any VulnerableCode # derivative work, you must accompany this data with the following acknowledgment: # # Generated with VulnerableCode and provided on an "AS IS" BASIS, WITHOUT WARRANTIES # OR CONDITIONS OF ANY KIND, either express or implied. No content created from # VulnerableCode should be considered or used as legal advice. Consult an Attorney # for any legal advice. # VulnerableCode is a free software code scanning tool from nexB Inc. and others. # Visit https://github.com/nexB/vulnerablecode/ for support and download. import json import os import shutil import tempfile from unittest.mock import patch import zipfile from django.test import TestCase from vulnerabilities import models from vulnerabilities.import_runner import ImportRunner from vulnerabilities.package_managers import NpmVersionAPI from vulnerabilities.importers.npm import categorize_versions BASE_DIR = os.path.dirname(os.path.abspath(__file__)) TEST_DATA = os.path.join(BASE_DIR, "test_data/") MOCK_VERSION_API = NpmVersionAPI( cache={ "jquery": {"3.4", "3.8"}, "kerberos": {"0.5.8", "1.2"}, "@hapi/subtext": {"3.7", "4.1.1", "6.1.3", "7.0.0", "7.0.5"}, } ) @patch("vulnerabilities.importers.NpmDataSource._update_from_remote") class NpmImportTest(TestCase): tempdir = None @classmethod def setUpClass(cls) -> None: cls.tempdir = tempfile.mkdtemp() zip_path = os.path.join(TEST_DATA, "npm.zip") with zipfile.ZipFile(zip_path, "r") as zip_ref: zip_ref.extractall(cls.tempdir) cls.importer = models.Importer.objects.create( name="npm_unittests", license="", last_run=None, data_source="NpmDataSource", data_source_cfg={ "repository_url": "https://example.git", "working_directory": os.path.join(cls.tempdir, "npm_test"), "create_working_directory": False, "remove_working_directory": False, }, ) @classmethod def tearDownClass(cls) -> None: # Make sure no requests for unexpected package names have been made during the tests. shutil.rmtree(cls.tempdir) assert len(MOCK_VERSION_API.cache) == 3, MOCK_VERSION_API.cache def test_import(self, _): runner = ImportRunner(self.importer, 5) with patch("vulnerabilities.importers.NpmDataSource.versions", new=MOCK_VERSION_API): with patch("vulnerabilities.importers.NpmDataSource.set_api"): runner.run() assert models.Vulnerability.objects.count() == 3 assert models.VulnerabilityReference.objects.count() == 3 assert models.PackageRelatedVulnerability.objects.filter(is_vulnerable=False).count() == 5 assert models.PackageRelatedVulnerability.objects.filter(is_vulnerable=True).count() == 4 expected_package_count = sum([len(v) for v in MOCK_VERSION_API.cache.values()]) assert models.Package.objects.count() == expected_package_count self.assert_for_package( "jquery", {"3.4"}, {"3.8"}, "1518", vulnerability_id="CVE-2020-11022" ) # nopep8 self.assert_for_package("kerberos", {"0.5.8"}, {"1.2"}, "1514") self.assert_for_package("subtext", {"4.1.1", "7.0.0"}, {"3.7", "6.1.3", "7.0.5"}, "1476") def assert_for_package( self, package_name, impacted_versions, resolved_versions, vuln_id, vulnerability_id=None, ): vuln = None for version in impacted_versions: pkg = models.Package.objects.get(name=package_name, version=version) assert pkg.vulnerabilities.count() == 1 vuln = pkg.vulnerabilities.first() if vulnerability_id: assert vuln.vulnerability_id == vulnerability_id ref_url = f"https://registry.npmjs.org/-/npm/v1/advisories/{vuln_id}" assert models.VulnerabilityReference.objects.get(url=ref_url, vulnerability=vuln) for version in resolved_versions: pkg = models.Package.objects.get(name=package_name, version=version) assert models.PackageRelatedVulnerability.objects.filter( package=pkg, vulnerability=vuln, is_vulnerable=False ) def test_categorize_versions_simple_ranges(): all_versions = {"3.4", "3.8"} impacted_ranges = "<3.5.0" resolved_ranges = ">=3.5.0" impacted_versions, resolved_versions = categorize_versions( all_versions, impacted_ranges, resolved_ranges ) assert impacted_versions == {"3.4"} assert resolved_versions == {"3.8"} def test_categorize_versions_complex_ranges(): all_versions = {"3.7", "4.1.1", "6.1.3", "7.0.0", "7.0.5"} impacted_ranges = ">=4.1.0 <6.1.3 || >= 7.0.0 <7.0.3" resolved_ranges = ">=6.1.3 <7.0.0 || >=7.0.3" impacted_versions, resolved_versions = categorize_versions( all_versions, impacted_ranges, resolved_ranges ) assert impacted_versions == {"4.1.1", "7.0.0"} assert resolved_versions == {"3.7", "6.1.3", "7.0.5"}
38.326923
98
0.676367
import json import os import shutil import tempfile from unittest.mock import patch import zipfile from django.test import TestCase from vulnerabilities import models from vulnerabilities.import_runner import ImportRunner from vulnerabilities.package_managers import NpmVersionAPI from vulnerabilities.importers.npm import categorize_versions BASE_DIR = os.path.dirname(os.path.abspath(__file__)) TEST_DATA = os.path.join(BASE_DIR, "test_data/") MOCK_VERSION_API = NpmVersionAPI( cache={ "jquery": {"3.4", "3.8"}, "kerberos": {"0.5.8", "1.2"}, "@hapi/subtext": {"3.7", "4.1.1", "6.1.3", "7.0.0", "7.0.5"}, } ) @patch("vulnerabilities.importers.NpmDataSource._update_from_remote") class NpmImportTest(TestCase): tempdir = None @classmethod def setUpClass(cls) -> None: cls.tempdir = tempfile.mkdtemp() zip_path = os.path.join(TEST_DATA, "npm.zip") with zipfile.ZipFile(zip_path, "r") as zip_ref: zip_ref.extractall(cls.tempdir) cls.importer = models.Importer.objects.create( name="npm_unittests", license="", last_run=None, data_source="NpmDataSource", data_source_cfg={ "repository_url": "https://example.git", "working_directory": os.path.join(cls.tempdir, "npm_test"), "create_working_directory": False, "remove_working_directory": False, }, ) @classmethod def tearDownClass(cls) -> None: shutil.rmtree(cls.tempdir) assert len(MOCK_VERSION_API.cache) == 3, MOCK_VERSION_API.cache def test_import(self, _): runner = ImportRunner(self.importer, 5) with patch("vulnerabilities.importers.NpmDataSource.versions", new=MOCK_VERSION_API): with patch("vulnerabilities.importers.NpmDataSource.set_api"): runner.run() assert models.Vulnerability.objects.count() == 3 assert models.VulnerabilityReference.objects.count() == 3 assert models.PackageRelatedVulnerability.objects.filter(is_vulnerable=False).count() == 5 assert models.PackageRelatedVulnerability.objects.filter(is_vulnerable=True).count() == 4 expected_package_count = sum([len(v) for v in MOCK_VERSION_API.cache.values()]) assert models.Package.objects.count() == expected_package_count self.assert_for_package( "jquery", {"3.4"}, {"3.8"}, "1518", vulnerability_id="CVE-2020-11022" ) self.assert_for_package("kerberos", {"0.5.8"}, {"1.2"}, "1514") self.assert_for_package("subtext", {"4.1.1", "7.0.0"}, {"3.7", "6.1.3", "7.0.5"}, "1476") def assert_for_package( self, package_name, impacted_versions, resolved_versions, vuln_id, vulnerability_id=None, ): vuln = None for version in impacted_versions: pkg = models.Package.objects.get(name=package_name, version=version) assert pkg.vulnerabilities.count() == 1 vuln = pkg.vulnerabilities.first() if vulnerability_id: assert vuln.vulnerability_id == vulnerability_id ref_url = f"https://registry.npmjs.org/-/npm/v1/advisories/{vuln_id}" assert models.VulnerabilityReference.objects.get(url=ref_url, vulnerability=vuln) for version in resolved_versions: pkg = models.Package.objects.get(name=package_name, version=version) assert models.PackageRelatedVulnerability.objects.filter( package=pkg, vulnerability=vuln, is_vulnerable=False ) def test_categorize_versions_simple_ranges(): all_versions = {"3.4", "3.8"} impacted_ranges = "<3.5.0" resolved_ranges = ">=3.5.0" impacted_versions, resolved_versions = categorize_versions( all_versions, impacted_ranges, resolved_ranges ) assert impacted_versions == {"3.4"} assert resolved_versions == {"3.8"} def test_categorize_versions_complex_ranges(): all_versions = {"3.7", "4.1.1", "6.1.3", "7.0.0", "7.0.5"} impacted_ranges = ">=4.1.0 <6.1.3 || >= 7.0.0 <7.0.3" resolved_ranges = ">=6.1.3 <7.0.0 || >=7.0.3" impacted_versions, resolved_versions = categorize_versions( all_versions, impacted_ranges, resolved_ranges ) assert impacted_versions == {"4.1.1", "7.0.0"} assert resolved_versions == {"3.7", "6.1.3", "7.0.5"}
true
true
f73bff3f87ec54b155cbd3be7d85764e39c7a12b
2,724
py
Python
purge_files.py
darkrilin/gamemaker-slack-bot
623489dddde37bad906dedb40ceda1335da9c90d
[ "MIT" ]
1
2016-08-09T21:28:31.000Z
2016-08-09T21:28:31.000Z
purge_files.py
darkrilin/gamemaker-slack-bot
623489dddde37bad906dedb40ceda1335da9c90d
[ "MIT" ]
null
null
null
purge_files.py
darkrilin/gamemaker-slack-bot
623489dddde37bad906dedb40ceda1335da9c90d
[ "MIT" ]
null
null
null
from urllib.parse import urlencode from urllib.request import urlopen from time import time from json import load from codecs import getreader from os import environ reader = getreader("utf-8") token = environ['SLACK_TEST_TOKEN'] # Uses legacy test API token - TODO: This will need to be updated days = 14 # Purge files older than 14 days timestamp = int(time()) - days * 24 * 60 * 60 def list_files(slack_token, ts_to): """ Fetches a list of all the public files on the slack server :param slack_token: :param ts_to: Files created before this timestamp :return: List of public files """ params = { 'token': slack_token, 'ts_to': ts_to, 'count': 500, } response = reader(urlopen('https://slack.com/api/files.list?' + urlencode(params))) file_list = load(response)['files'] return file_list def delete_files(file_ids, slack_token, verbose=False): """ Deletes all files with IDs matching the given list :param file_ids: :param slack_token: :param verbose: """ size = 0 count = 0 num_files = len(file_ids) for file_id in file_ids: count += 1 params = { 'token': slack_token, 'file': file_id } response = reader(urlopen('https://slack.com/api/files.info?' + urlencode(params))) size += load(response)['file']['size'] response = reader(urlopen('https://slack.com/api/files.delete?' + urlencode(params))) ok = load(response)['ok'] mb = size / 1048576 if verbose: print("{0} of {1} - {2} {3} ... {4:.2f} MB saved".format(count, num_files, file_id, ok, mb)) def total_file_size(slack_token, verbose=False): """ Finds the total size of all files on the slack server :param slack_token: :param verbose: :return: """ params = { 'token': slack_token, 'count': 500, } response = reader(urlopen('https://slack.com/api/files.list?' + urlencode(params))) size = 0 file_ids = [f['id'] for f in load(response)['files']] for file_id in file_ids: params = { 'token': token, 'file': file_id } response = reader(urlopen('https://slack.com/api/files.info?' + urlencode(params))) size += load(response)['file']['size'] mb = size / 1048576 if verbose: print('{0:.2f} MB total'.format(mb)) mb = size / 1048576 return '{0:.2f} MB'.format(mb) if __name__ == '__main__': files = [f['id'] for f in list_files(token, timestamp)] delete_files(files, token, verbose=True) print("{} files deleted".format(len(files))) print(total_file_size(token))
27.24
104
0.605727
from urllib.parse import urlencode from urllib.request import urlopen from time import time from json import load from codecs import getreader from os import environ reader = getreader("utf-8") token = environ['SLACK_TEST_TOKEN'] days = 14 timestamp = int(time()) - days * 24 * 60 * 60 def list_files(slack_token, ts_to): params = { 'token': slack_token, 'ts_to': ts_to, 'count': 500, } response = reader(urlopen('https://slack.com/api/files.list?' + urlencode(params))) file_list = load(response)['files'] return file_list def delete_files(file_ids, slack_token, verbose=False): size = 0 count = 0 num_files = len(file_ids) for file_id in file_ids: count += 1 params = { 'token': slack_token, 'file': file_id } response = reader(urlopen('https://slack.com/api/files.info?' + urlencode(params))) size += load(response)['file']['size'] response = reader(urlopen('https://slack.com/api/files.delete?' + urlencode(params))) ok = load(response)['ok'] mb = size / 1048576 if verbose: print("{0} of {1} - {2} {3} ... {4:.2f} MB saved".format(count, num_files, file_id, ok, mb)) def total_file_size(slack_token, verbose=False): params = { 'token': slack_token, 'count': 500, } response = reader(urlopen('https://slack.com/api/files.list?' + urlencode(params))) size = 0 file_ids = [f['id'] for f in load(response)['files']] for file_id in file_ids: params = { 'token': token, 'file': file_id } response = reader(urlopen('https://slack.com/api/files.info?' + urlencode(params))) size += load(response)['file']['size'] mb = size / 1048576 if verbose: print('{0:.2f} MB total'.format(mb)) mb = size / 1048576 return '{0:.2f} MB'.format(mb) if __name__ == '__main__': files = [f['id'] for f in list_files(token, timestamp)] delete_files(files, token, verbose=True) print("{} files deleted".format(len(files))) print(total_file_size(token))
true
true
f73c01726f9d59c2094688ab60f9d16fd7fb8794
2,035
py
Python
unix/linux/gnu/redhat.py
fmenabe/python-unix
738907eddcdeece4be8c82f1d5604c296c94e49f
[ "MIT" ]
7
2015-09-17T13:33:10.000Z
2021-09-30T15:07:59.000Z
unix/linux/gnu/redhat.py
fmenabe/python-unix
738907eddcdeece4be8c82f1d5604c296c94e49f
[ "MIT" ]
3
2015-09-17T12:53:17.000Z
2016-09-27T21:10:48.000Z
unix/linux/gnu/redhat.py
fmenabe/python-unix
738907eddcdeece4be8c82f1d5604c296c94e49f
[ "MIT" ]
4
2017-01-16T15:29:22.000Z
2019-08-28T21:11:25.000Z
# -*- coding: utf-8 -*- import re import os import unix import weakref from .. import Linux, Chroot, LinuxError from unix.linux.services import Initd, Upstart, Systemd DISTRIBS = ('RedHat', 'CentOS') _CONFDIR = '/etc/sysconfig' _NETFILE = os.path.join(_CONFDIR, 'network') def RedHat(host, force=False): unix.isvalid(host) root = host.__dict__.get('root', None) instances = unix.instances(host) if len(instances) >= 1: host = Linux(getattr(unix, instances[0]).clone(host)) if root: host = Chroot(host, root) if host.distrib[0] not in DISTRIBS and not force: raise LinuxError('invalid distrib') class RedHatHost(host.__class__): def __init__(self): kwargs = {'root': root} if root else {} host.__class__.__init__(self, **kwargs) self.__dict__.update(host.__dict__) def list_packages(self): return self.execute('dpkg -l') @property def hostname(self): with self.open(_NETFILE) as fhandler: for line in fhandler.read().splitlines(): attr, value = line.split('=') if attr == 'HOSTNAME': return value @hostname.setter def hostname(self, value): contnet = '' with self.open(_NETFILE) as fhandler: content = re.sub('HOSTNAME=[^\n]*', 'HOSTNAME=%s\n' % value, fhandler.read()) with self.open(_NETFILE, 'w') as fhandler: fhandler.write(content) @property def services(self): major_version = int(self.distrib[1][0]) if major_version <= 5: service_handler = Initd elif major_version == 6: service_handler = Upstart elif major_version >= 7: service_handler = Systemd return service_handler(weakref.ref(self)()) return RedHatHost()
29.926471
61
0.55086
import re import os import unix import weakref from .. import Linux, Chroot, LinuxError from unix.linux.services import Initd, Upstart, Systemd DISTRIBS = ('RedHat', 'CentOS') _CONFDIR = '/etc/sysconfig' _NETFILE = os.path.join(_CONFDIR, 'network') def RedHat(host, force=False): unix.isvalid(host) root = host.__dict__.get('root', None) instances = unix.instances(host) if len(instances) >= 1: host = Linux(getattr(unix, instances[0]).clone(host)) if root: host = Chroot(host, root) if host.distrib[0] not in DISTRIBS and not force: raise LinuxError('invalid distrib') class RedHatHost(host.__class__): def __init__(self): kwargs = {'root': root} if root else {} host.__class__.__init__(self, **kwargs) self.__dict__.update(host.__dict__) def list_packages(self): return self.execute('dpkg -l') @property def hostname(self): with self.open(_NETFILE) as fhandler: for line in fhandler.read().splitlines(): attr, value = line.split('=') if attr == 'HOSTNAME': return value @hostname.setter def hostname(self, value): contnet = '' with self.open(_NETFILE) as fhandler: content = re.sub('HOSTNAME=[^\n]*', 'HOSTNAME=%s\n' % value, fhandler.read()) with self.open(_NETFILE, 'w') as fhandler: fhandler.write(content) @property def services(self): major_version = int(self.distrib[1][0]) if major_version <= 5: service_handler = Initd elif major_version == 6: service_handler = Upstart elif major_version >= 7: service_handler = Systemd return service_handler(weakref.ref(self)()) return RedHatHost()
true
true
f73c01b1bfd8189c29eab7f24401720eb6d2b7c0
507
py
Python
src/_dependencies/lazy.py
dry-python/dependencies
1a8bba41ab42d0b5249b36471f5300d9faba81e7
[ "BSD-2-Clause" ]
175
2018-07-21T13:04:44.000Z
2020-05-27T15:31:06.000Z
src/_dependencies/lazy.py
proofit404/dependencies
204e0cfadca801d64857f24aa4c74e7939ed9af0
[ "BSD-2-Clause" ]
325
2016-05-16T11:16:11.000Z
2022-03-04T00:45:57.000Z
src/_dependencies/lazy.py
dry-python/dependencies
1a8bba41ab42d0b5249b36471f5300d9faba81e7
[ "BSD-2-Clause" ]
18
2018-06-17T09:33:16.000Z
2020-05-20T18:12:30.000Z
from _dependencies.graph import _Graph class _LazyGraph: def __init__(self, attrname, namespace): self.attrname = attrname self.namespace = namespace def __get__(self, instance, owner): graph = _Graph() for base in reversed(owner.__bases__): graph.update(base.__dependencies__) for name, dependency in self.namespace.items(): graph.assign(name, dependency) type.__setattr__(owner, self.attrname, graph) return graph
29.823529
55
0.656805
from _dependencies.graph import _Graph class _LazyGraph: def __init__(self, attrname, namespace): self.attrname = attrname self.namespace = namespace def __get__(self, instance, owner): graph = _Graph() for base in reversed(owner.__bases__): graph.update(base.__dependencies__) for name, dependency in self.namespace.items(): graph.assign(name, dependency) type.__setattr__(owner, self.attrname, graph) return graph
true
true
f73c025048313646ffa657c41d4c35ef79bc7325
6,699
py
Python
pageplot/plotmodel.py
JBorrow/pageplot
8abad574fda476d26a59fc8b7d36da2838f2c11e
[ "MIT" ]
null
null
null
pageplot/plotmodel.py
JBorrow/pageplot
8abad574fda476d26a59fc8b7d36da2838f2c11e
[ "MIT" ]
null
null
null
pageplot/plotmodel.py
JBorrow/pageplot
8abad574fda476d26a59fc8b7d36da2838f2c11e
[ "MIT" ]
null
null
null
""" The base top-level plot model class. From this all data and plotting flow. """ from pageplot.exceptions import PagePlotParserError from pathlib import Path from typing import Any, Optional, Dict, List, Union from pageplot.extensionmodel import PlotExtension from pageplot.extensions import built_in_extensions from pageplot.io.spec import IOSpecification from pageplot.config import GlobalConfig from pageplot.mask import get_mask import matplotlib.pyplot as plt import numpy as np import unyt import attr @attr.s(auto_attribs=True) class PlotModel: """ Model describing an individual plot. De-serializes the input json describing an individual figure's extension values. To use this, you'll need to initialise it with the configuration (for all the extensions!), and then associate the data with the appropraite method. The plots can then be created using the methods in the following order: ``setup_figures`` - creates Figure and Axes objects ``run_extensions`` - runs all of the extensions' ``preprocess`` steps ``perform_blitting`` - runs the extensions' ``blit`` functions ``save`` - writes out the figures to disk ``finalize`` - closes the Figure object You can also serialize the contents of the whole figure to a dictionary with the ``serialize`` object. Parameters ---------- name: str Plot name. This is the filename of the plot (without file extension). config: GlobalConfig Global configuration object. plot_spec: Dict[str, Any] Data controlling the behaviour of each extension. The keys should be the same as the used extensions. Mis-matches will raise a ``PagePlotParserError``. x, y, z: str, optional Strings to be passed to the data to load appropriate x, y, and z data. Here only x is required. x_units, y_units, z_units: Union[str, None, unyt.unyt_quantity] Expected output units for the plot, to be parsed. mask: str, optional Mask text (see :func:`get_mask`). """ name: str config: GlobalConfig plot_spec: Dict[str, Any] x: str y: Optional[str] = None z: Optional[str] = None # Output units for the plot. x_units: Union[str, None, unyt.unyt_quantity] = None y_units: Union[str, None, unyt.unyt_quantity] = None z_units: Union[str, None, unyt.unyt_quantity] = None mask: Optional[str] = None data: IOSpecification = attr.ib(init=False) fig: plt.Figure = attr.ib(init=False) axes: plt.Axes = attr.ib(init=False) extensions: Dict[str, PlotExtension] = attr.ib(init=False) def associate_data(self, data: IOSpecification): """ Associates the data file (which conforms to the ``IOSpecification``) with the plot. data: IOSpecification Any data file that conforms to the specification. """ self.data = data def setup_figures(self): """ Sets up the internal figure and axes. """ self.fig, self.axes = plt.subplots() return def run_extensions( self, additional_extensions: Optional[Dict[str, PlotExtension]] = None ): """ Run the figure extensions (these provide all data for the figures, excluding the plotting). Internal extensions are performed first, then any additional extensions are executed. additional_extensions: Dict[str, PlotExtension] Any additional extensions conforming to the specification. """ # First, sort out units and masking units = { "x_units": self.x_units, "y_units": self.y_units, "z_units": self.z_units, } for name, value in units.items(): if value is None: if (associated_data := getattr(self, name[0])) is None: units[name] = unyt.unyt_quantity(1.0, None) else: units[name] = unyt.unyt_quantity( 1.0, associated_data.split(" ", 1)[1] ) else: units[name] = unyt.unyt_quantity(1.0, value) mask = get_mask(data=self.data, mask_text=self.mask) self.extensions = {} if additional_extensions is None: additional_extensions = {} combined_extensions = {**built_in_extensions, **additional_extensions} for name in self.plot_spec.keys(): try: Extension = combined_extensions[name] except KeyError: raise PagePlotParserError( name, "Unable to find matching extension for configuration value." ) extension = Extension( name=name, config=self.config, metadata=self.data.metadata, x=self.data.data_from_string(self.x, mask=mask), y=self.data.data_from_string(self.y, mask=mask), z=self.data.data_from_string(self.z, mask=mask), **units, **self.plot_spec.get(name, {}), ) extension.preprocess() self.extensions[name] = extension return def perform_blitting(self): """ Performs the blitting (creating the figure). Without this, the extensions are just 'created' and pre-processed without affecting or creating the figure. """ for extension in self.extensions.values(): extension.blit(fig=self.fig, axes=self.axes) def save(self, filename: Path): """ Saves the figure to file. filename: Path Filename that you would like to save the figure to. Can have any matplotlib-compatible file extension. Notes ----- It's suggested that you run finalzie() after this function, otherwise there will be lots of figures open at one time causing potential slowdowns. """ self.fig.savefig(filename) return def serialize(self) -> Dict[str, Any]: """ Serializes the contents of the extensions to a dictionary. Note that you do not have to have 'created' the figure to run this, if you just want the data you should be able to just request the serialized data. """ serialized = {name: ext.serialize() for name, ext in self.extensions.items()} return serialized def finalize(self): """ Closes figures and cleans up. """ plt.close(self.fig) class Config: arbitrary_types_allowed = True
29.641593
86
0.618749
from pageplot.exceptions import PagePlotParserError from pathlib import Path from typing import Any, Optional, Dict, List, Union from pageplot.extensionmodel import PlotExtension from pageplot.extensions import built_in_extensions from pageplot.io.spec import IOSpecification from pageplot.config import GlobalConfig from pageplot.mask import get_mask import matplotlib.pyplot as plt import numpy as np import unyt import attr @attr.s(auto_attribs=True) class PlotModel: name: str config: GlobalConfig plot_spec: Dict[str, Any] x: str y: Optional[str] = None z: Optional[str] = None x_units: Union[str, None, unyt.unyt_quantity] = None y_units: Union[str, None, unyt.unyt_quantity] = None z_units: Union[str, None, unyt.unyt_quantity] = None mask: Optional[str] = None data: IOSpecification = attr.ib(init=False) fig: plt.Figure = attr.ib(init=False) axes: plt.Axes = attr.ib(init=False) extensions: Dict[str, PlotExtension] = attr.ib(init=False) def associate_data(self, data: IOSpecification): self.data = data def setup_figures(self): self.fig, self.axes = plt.subplots() return def run_extensions( self, additional_extensions: Optional[Dict[str, PlotExtension]] = None ): units = { "x_units": self.x_units, "y_units": self.y_units, "z_units": self.z_units, } for name, value in units.items(): if value is None: if (associated_data := getattr(self, name[0])) is None: units[name] = unyt.unyt_quantity(1.0, None) else: units[name] = unyt.unyt_quantity( 1.0, associated_data.split(" ", 1)[1] ) else: units[name] = unyt.unyt_quantity(1.0, value) mask = get_mask(data=self.data, mask_text=self.mask) self.extensions = {} if additional_extensions is None: additional_extensions = {} combined_extensions = {**built_in_extensions, **additional_extensions} for name in self.plot_spec.keys(): try: Extension = combined_extensions[name] except KeyError: raise PagePlotParserError( name, "Unable to find matching extension for configuration value." ) extension = Extension( name=name, config=self.config, metadata=self.data.metadata, x=self.data.data_from_string(self.x, mask=mask), y=self.data.data_from_string(self.y, mask=mask), z=self.data.data_from_string(self.z, mask=mask), **units, **self.plot_spec.get(name, {}), ) extension.preprocess() self.extensions[name] = extension return def perform_blitting(self): for extension in self.extensions.values(): extension.blit(fig=self.fig, axes=self.axes) def save(self, filename: Path): self.fig.savefig(filename) return def serialize(self) -> Dict[str, Any]: serialized = {name: ext.serialize() for name, ext in self.extensions.items()} return serialized def finalize(self): plt.close(self.fig) class Config: arbitrary_types_allowed = True
true
true
f73c033c208b084d389d14b26d72c13c43b1378a
1,148
py
Python
jdxapi/routes/preview.py
jobdataexchange/jdx-api
7815a6463de56423c3b4196648607c4ebe56828c
[ "Apache-2.0" ]
null
null
null
jdxapi/routes/preview.py
jobdataexchange/jdx-api
7815a6463de56423c3b4196648607c4ebe56828c
[ "Apache-2.0" ]
9
2019-12-26T17:39:58.000Z
2022-01-13T01:59:49.000Z
jdxapi/routes/preview.py
jobdataexchange/jdx-api
7815a6463de56423c3b4196648607c4ebe56828c
[ "Apache-2.0" ]
null
null
null
from flask import request, jsonify from jdxapi.utils.logger_resource import LoggerResource from jdxapi.app import api, DB from jdxapi.models import Pipeline from jdxapi.utils.functions import RequestHandler, ResponseHandler from jdxapi.utils.error import ApiError from sqlalchemy.orm.exc import NoResultFound, MultipleResultsFound import datetime import jdxapi.utils.constants as c from jdxapi.services.competensor import get_preview @api.resource("/preview") class Preview(LoggerResource): def post(self): req = request.get_json() pipeline_id = RequestHandler.get_pipeline_id(req, True) _ = Pipeline.get_pipeline_from_id(pipeline_id) preview_data = get_preview(pipeline_id) resp_data = self.create_response_data(pipeline_id, preview_data, req) response = ResponseHandler.create_response(resp_data, 200) return response def create_response_data(self, pipeline_id, preview_data, req): resp_data = { c.PIPELINE_ID: str(pipeline_id), c.TIMESTAMP: str(datetime.datetime.now()), "preview": preview_data } return resp_data
32.8
77
0.736063
from flask import request, jsonify from jdxapi.utils.logger_resource import LoggerResource from jdxapi.app import api, DB from jdxapi.models import Pipeline from jdxapi.utils.functions import RequestHandler, ResponseHandler from jdxapi.utils.error import ApiError from sqlalchemy.orm.exc import NoResultFound, MultipleResultsFound import datetime import jdxapi.utils.constants as c from jdxapi.services.competensor import get_preview @api.resource("/preview") class Preview(LoggerResource): def post(self): req = request.get_json() pipeline_id = RequestHandler.get_pipeline_id(req, True) _ = Pipeline.get_pipeline_from_id(pipeline_id) preview_data = get_preview(pipeline_id) resp_data = self.create_response_data(pipeline_id, preview_data, req) response = ResponseHandler.create_response(resp_data, 200) return response def create_response_data(self, pipeline_id, preview_data, req): resp_data = { c.PIPELINE_ID: str(pipeline_id), c.TIMESTAMP: str(datetime.datetime.now()), "preview": preview_data } return resp_data
true
true
f73c03827b72ef05b689c636777a89325d1163bc
43,779
py
Python
moto/sns/responses.py
thomassross/moto
407d5c853dbee9b9e132d97b41414b7dca475765
[ "Apache-2.0" ]
1
2021-12-12T04:23:06.000Z
2021-12-12T04:23:06.000Z
moto/sns/responses.py
thomassross/moto
407d5c853dbee9b9e132d97b41414b7dca475765
[ "Apache-2.0" ]
4
2017-09-30T07:52:52.000Z
2021-12-13T06:56:55.000Z
moto/sns/responses.py
thomassross/moto
407d5c853dbee9b9e132d97b41414b7dca475765
[ "Apache-2.0" ]
2
2021-11-24T08:05:43.000Z
2021-11-25T16:18:48.000Z
from __future__ import unicode_literals import json import re from collections import defaultdict from moto.core.responses import BaseResponse from moto.core.utils import camelcase_to_underscores from .models import sns_backends from .exceptions import InvalidParameterValue, SNSNotFoundError from .utils import is_e164 class SNSResponse(BaseResponse): SMS_ATTR_REGEX = re.compile( r"^attributes\.entry\.(?P<index>\d+)\.(?P<type>key|value)$" ) OPT_OUT_PHONE_NUMBER_REGEX = re.compile(r"^\+?\d+$") @property def backend(self): return sns_backends[self.region] def _error(self, code, message, sender="Sender"): template = self.response_template(ERROR_RESPONSE) return template.render(code=code, message=message, sender=sender) def _get_attributes(self): attributes = self._get_list_prefix("Attributes.entry") return dict((attribute["key"], attribute["value"]) for attribute in attributes) def _get_tags(self): tags = self._get_list_prefix("Tags.member") return {tag["key"]: tag["value"] for tag in tags} def _parse_message_attributes(self, prefix="", value_namespace="Value."): message_attributes = self._get_object_map( "MessageAttributes.entry", name="Name", value="Value" ) # SNS converts some key names before forwarding messages # DataType -> Type, StringValue -> Value, BinaryValue -> Value transformed_message_attributes = {} for name, value in message_attributes.items(): # validation data_type = value["DataType"] if not data_type: raise InvalidParameterValue( "The message attribute '{0}' must contain non-empty " "message attribute value.".format(name) ) data_type_parts = data_type.split(".") if len(data_type_parts) > 2 or data_type_parts[0] not in [ "String", "Binary", "Number", ]: raise InvalidParameterValue( "The message attribute '{0}' has an invalid message " "attribute type, the set of supported type prefixes is " "Binary, Number, and String.".format(name) ) transform_value = None if "StringValue" in value: if data_type == "Number": try: transform_value = float(value["StringValue"]) except ValueError: raise InvalidParameterValue( "An error occurred (ParameterValueInvalid) " "when calling the Publish operation: " "Could not cast message attribute '{0}' value to number.".format( name ) ) else: transform_value = value["StringValue"] elif "BinaryValue" in value: transform_value = value["BinaryValue"] if transform_value == "": raise InvalidParameterValue( "The message attribute '{0}' must contain non-empty " "message attribute value for message attribute " "type '{1}'.".format(name, data_type[0]) ) # transformation transformed_message_attributes[name] = { "Type": data_type, "Value": transform_value, } return transformed_message_attributes def create_topic(self): name = self._get_param("Name") attributes = self._get_attributes() tags = self._get_tags() topic = self.backend.create_topic(name, attributes, tags) if self.request_json: return json.dumps( { "CreateTopicResponse": { "CreateTopicResult": {"TopicArn": topic.arn}, "ResponseMetadata": { "RequestId": "a8dec8b3-33a4-11df-8963-01868b7c937a" }, } } ) template = self.response_template(CREATE_TOPIC_TEMPLATE) return template.render(topic=topic) def list_topics(self): next_token = self._get_param("NextToken") topics, next_token = self.backend.list_topics(next_token=next_token) if self.request_json: return json.dumps( { "ListTopicsResponse": { "ListTopicsResult": { "Topics": [{"TopicArn": topic.arn} for topic in topics], "NextToken": next_token, } }, "ResponseMetadata": { "RequestId": "a8dec8b3-33a4-11df-8963-01868b7c937a" }, } ) template = self.response_template(LIST_TOPICS_TEMPLATE) return template.render(topics=topics, next_token=next_token) def delete_topic(self): topic_arn = self._get_param("TopicArn") self.backend.delete_topic(topic_arn) if self.request_json: return json.dumps( { "DeleteTopicResponse": { "ResponseMetadata": { "RequestId": "a8dec8b3-33a4-11df-8963-01868b7c937a" } } } ) template = self.response_template(DELETE_TOPIC_TEMPLATE) return template.render() def get_topic_attributes(self): topic_arn = self._get_param("TopicArn") topic = self.backend.get_topic(topic_arn) if self.request_json: attributes = { "Owner": topic.account_id, "Policy": topic.policy, "TopicArn": topic.arn, "DisplayName": topic.display_name, "SubscriptionsPending": topic.subscriptions_pending, "SubscriptionsConfirmed": topic.subscriptions_confimed, "SubscriptionsDeleted": topic.subscriptions_deleted, "DeliveryPolicy": topic.delivery_policy, "EffectiveDeliveryPolicy": topic.effective_delivery_policy, } if topic.kms_master_key_id: attributes["KmsMasterKeyId"] = topic.kms_master_key_id response = { "GetTopicAttributesResponse": { "GetTopicAttributesResult": {"Attributes": attributes}, "ResponseMetadata": { "RequestId": "057f074c-33a7-11df-9540-99d0768312d3" }, } } return json.dumps(response) template = self.response_template(GET_TOPIC_ATTRIBUTES_TEMPLATE) return template.render(topic=topic) def set_topic_attributes(self): topic_arn = self._get_param("TopicArn") attribute_name = self._get_param("AttributeName") attribute_name = camelcase_to_underscores(attribute_name) attribute_value = self._get_param("AttributeValue") self.backend.set_topic_attribute(topic_arn, attribute_name, attribute_value) if self.request_json: return json.dumps( { "SetTopicAttributesResponse": { "ResponseMetadata": { "RequestId": "a8763b99-33a7-11df-a9b7-05d48da6f042" } } } ) template = self.response_template(SET_TOPIC_ATTRIBUTES_TEMPLATE) return template.render() def subscribe(self): topic_arn = self._get_param("TopicArn") endpoint = self._get_param("Endpoint") protocol = self._get_param("Protocol") attributes = self._get_attributes() subscription = self.backend.subscribe(topic_arn, endpoint, protocol) if attributes is not None: for attr_name, attr_value in attributes.items(): self.backend.set_subscription_attributes( subscription.arn, attr_name, attr_value ) if self.request_json: return json.dumps( { "SubscribeResponse": { "SubscribeResult": {"SubscriptionArn": subscription.arn}, "ResponseMetadata": { "RequestId": "a8763b99-33a7-11df-a9b7-05d48da6f042" }, } } ) template = self.response_template(SUBSCRIBE_TEMPLATE) return template.render(subscription=subscription) def unsubscribe(self): subscription_arn = self._get_param("SubscriptionArn") self.backend.unsubscribe(subscription_arn) if self.request_json: return json.dumps( { "UnsubscribeResponse": { "ResponseMetadata": { "RequestId": "a8763b99-33a7-11df-a9b7-05d48da6f042" } } } ) template = self.response_template(UNSUBSCRIBE_TEMPLATE) return template.render() def list_subscriptions(self): next_token = self._get_param("NextToken") subscriptions, next_token = self.backend.list_subscriptions( next_token=next_token ) if self.request_json: return json.dumps( { "ListSubscriptionsResponse": { "ListSubscriptionsResult": { "Subscriptions": [ { "TopicArn": subscription.topic.arn, "Protocol": subscription.protocol, "SubscriptionArn": subscription.arn, "Owner": subscription.topic.account_id, "Endpoint": subscription.endpoint, } for subscription in subscriptions ], "NextToken": next_token, }, "ResponseMetadata": { "RequestId": "384ac68d-3775-11df-8963-01868b7c937a" }, } } ) template = self.response_template(LIST_SUBSCRIPTIONS_TEMPLATE) return template.render(subscriptions=subscriptions, next_token=next_token) def list_subscriptions_by_topic(self): topic_arn = self._get_param("TopicArn") next_token = self._get_param("NextToken") subscriptions, next_token = self.backend.list_subscriptions( topic_arn, next_token=next_token ) if self.request_json: return json.dumps( { "ListSubscriptionsByTopicResponse": { "ListSubscriptionsByTopicResult": { "Subscriptions": [ { "TopicArn": subscription.topic.arn, "Protocol": subscription.protocol, "SubscriptionArn": subscription.arn, "Owner": subscription.topic.account_id, "Endpoint": subscription.endpoint, } for subscription in subscriptions ], "NextToken": next_token, }, "ResponseMetadata": { "RequestId": "384ac68d-3775-11df-8963-01868b7c937a" }, } } ) template = self.response_template(LIST_SUBSCRIPTIONS_BY_TOPIC_TEMPLATE) return template.render(subscriptions=subscriptions, next_token=next_token) def publish(self): target_arn = self._get_param("TargetArn") topic_arn = self._get_param("TopicArn") phone_number = self._get_param("PhoneNumber") subject = self._get_param("Subject") message_attributes = self._parse_message_attributes() arn = None if phone_number is not None: # Check phone is correct syntax (e164) if not is_e164(phone_number): return ( self._error( "InvalidParameter", "Phone number does not meet the E164 format" ), dict(status=400), ) elif target_arn is not None: arn = target_arn else: arn = topic_arn message = self._get_param("Message") try: message_id = self.backend.publish( message, arn=arn, phone_number=phone_number, subject=subject, message_attributes=message_attributes, ) except ValueError as err: error_response = self._error("InvalidParameter", str(err)) return error_response, dict(status=400) if self.request_json: return json.dumps( { "PublishResponse": { "PublishResult": {"MessageId": message_id}, "ResponseMetadata": { "RequestId": "384ac68d-3775-11df-8963-01868b7c937a" }, } } ) template = self.response_template(PUBLISH_TEMPLATE) return template.render(message_id=message_id) def create_platform_application(self): name = self._get_param("Name") platform = self._get_param("Platform") attributes = self._get_attributes() platform_application = self.backend.create_platform_application( self.region, name, platform, attributes ) if self.request_json: return json.dumps( { "CreatePlatformApplicationResponse": { "CreatePlatformApplicationResult": { "PlatformApplicationArn": platform_application.arn }, "ResponseMetadata": { "RequestId": "384ac68d-3775-11df-8963-01868b7c937b" }, } } ) template = self.response_template(CREATE_PLATFORM_APPLICATION_TEMPLATE) return template.render(platform_application=platform_application) def get_platform_application_attributes(self): arn = self._get_param("PlatformApplicationArn") application = self.backend.get_application(arn) if self.request_json: return json.dumps( { "GetPlatformApplicationAttributesResponse": { "GetPlatformApplicationAttributesResult": { "Attributes": application.attributes }, "ResponseMetadata": { "RequestId": "384ac68d-3775-11df-8963-01868b7c937f" }, } } ) template = self.response_template(GET_PLATFORM_APPLICATION_ATTRIBUTES_TEMPLATE) return template.render(application=application) def set_platform_application_attributes(self): arn = self._get_param("PlatformApplicationArn") attributes = self._get_attributes() self.backend.set_application_attributes(arn, attributes) if self.request_json: return json.dumps( { "SetPlatformApplicationAttributesResponse": { "ResponseMetadata": { "RequestId": "384ac68d-3775-12df-8963-01868b7c937f" } } } ) template = self.response_template(SET_PLATFORM_APPLICATION_ATTRIBUTES_TEMPLATE) return template.render() def list_platform_applications(self): applications = self.backend.list_platform_applications() if self.request_json: return json.dumps( { "ListPlatformApplicationsResponse": { "ListPlatformApplicationsResult": { "PlatformApplications": [ { "PlatformApplicationArn": application.arn, "attributes": application.attributes, } for application in applications ], "NextToken": None, }, "ResponseMetadata": { "RequestId": "384ac68d-3775-11df-8963-01868b7c937c" }, } } ) template = self.response_template(LIST_PLATFORM_APPLICATIONS_TEMPLATE) return template.render(applications=applications) def delete_platform_application(self): platform_arn = self._get_param("PlatformApplicationArn") self.backend.delete_platform_application(platform_arn) if self.request_json: return json.dumps( { "DeletePlatformApplicationResponse": { "ResponseMetadata": { "RequestId": "384ac68d-3775-11df-8963-01868b7c937e" } } } ) template = self.response_template(DELETE_PLATFORM_APPLICATION_TEMPLATE) return template.render() def create_platform_endpoint(self): application_arn = self._get_param("PlatformApplicationArn") application = self.backend.get_application(application_arn) custom_user_data = self._get_param("CustomUserData") token = self._get_param("Token") attributes = self._get_attributes() platform_endpoint = self.backend.create_platform_endpoint( self.region, application, custom_user_data, token, attributes ) if self.request_json: return json.dumps( { "CreatePlatformEndpointResponse": { "CreatePlatformEndpointResult": { "EndpointArn": platform_endpoint.arn }, "ResponseMetadata": { "RequestId": "384ac68d-3779-11df-8963-01868b7c937b" }, } } ) template = self.response_template(CREATE_PLATFORM_ENDPOINT_TEMPLATE) return template.render(platform_endpoint=platform_endpoint) def list_endpoints_by_platform_application(self): application_arn = self._get_param("PlatformApplicationArn") endpoints = self.backend.list_endpoints_by_platform_application(application_arn) if self.request_json: return json.dumps( { "ListEndpointsByPlatformApplicationResponse": { "ListEndpointsByPlatformApplicationResult": { "Endpoints": [ { "Attributes": endpoint.attributes, "EndpointArn": endpoint.arn, } for endpoint in endpoints ], "NextToken": None, }, "ResponseMetadata": { "RequestId": "384ac68d-3775-11df-8963-01868b7c937a" }, } } ) template = self.response_template( LIST_ENDPOINTS_BY_PLATFORM_APPLICATION_TEMPLATE ) return template.render(endpoints=endpoints) def get_endpoint_attributes(self): arn = self._get_param("EndpointArn") try: endpoint = self.backend.get_endpoint(arn) if self.request_json: return json.dumps( { "GetEndpointAttributesResponse": { "GetEndpointAttributesResult": { "Attributes": endpoint.attributes }, "ResponseMetadata": { "RequestId": "384ac68d-3775-11df-8963-01868b7c937f" }, } } ) template = self.response_template(GET_ENDPOINT_ATTRIBUTES_TEMPLATE) return template.render(endpoint=endpoint) except SNSNotFoundError: error_response = self._error("NotFound", "Endpoint does not exist") return error_response, dict(status=404) def set_endpoint_attributes(self): arn = self._get_param("EndpointArn") attributes = self._get_attributes() self.backend.set_endpoint_attributes(arn, attributes) if self.request_json: return json.dumps( { "SetEndpointAttributesResponse": { "ResponseMetadata": { "RequestId": "384bc68d-3775-12df-8963-01868b7c937f" } } } ) template = self.response_template(SET_ENDPOINT_ATTRIBUTES_TEMPLATE) return template.render() def delete_endpoint(self): arn = self._get_param("EndpointArn") self.backend.delete_endpoint(arn) if self.request_json: return json.dumps( { "DeleteEndpointResponse": { "ResponseMetadata": { "RequestId": "384bc68d-3775-12df-8963-01868b7c937f" } } } ) template = self.response_template(DELETE_ENDPOINT_TEMPLATE) return template.render() def get_subscription_attributes(self): arn = self._get_param("SubscriptionArn") attributes = self.backend.get_subscription_attributes(arn) template = self.response_template(GET_SUBSCRIPTION_ATTRIBUTES_TEMPLATE) return template.render(attributes=attributes) def set_subscription_attributes(self): arn = self._get_param("SubscriptionArn") attr_name = self._get_param("AttributeName") attr_value = self._get_param("AttributeValue") self.backend.set_subscription_attributes(arn, attr_name, attr_value) template = self.response_template(SET_SUBSCRIPTION_ATTRIBUTES_TEMPLATE) return template.render() def set_sms_attributes(self): # attributes.entry.1.key # attributes.entry.1.value # to # 1: {key:X, value:Y} temp_dict = defaultdict(dict) for key, value in self.querystring.items(): match = self.SMS_ATTR_REGEX.match(key) if match is not None: temp_dict[match.group("index")][match.group("type")] = value[0] # 1: {key:X, value:Y} # to # X: Y # All of this, just to take into account when people provide invalid stuff. result = {} for item in temp_dict.values(): if "key" in item and "value" in item: result[item["key"]] = item["value"] self.backend.update_sms_attributes(result) template = self.response_template(SET_SMS_ATTRIBUTES_TEMPLATE) return template.render() def get_sms_attributes(self): filter_list = set() for key, value in self.querystring.items(): if key.startswith("attributes.member.1"): filter_list.add(value[0]) if len(filter_list) > 0: result = { k: v for k, v in self.backend.sms_attributes.items() if k in filter_list } else: result = self.backend.sms_attributes template = self.response_template(GET_SMS_ATTRIBUTES_TEMPLATE) return template.render(attributes=result) def check_if_phone_number_is_opted_out(self): number = self._get_param("phoneNumber") if self.OPT_OUT_PHONE_NUMBER_REGEX.match(number) is None: error_response = self._error( code="InvalidParameter", message="Invalid parameter: PhoneNumber Reason: input incorrectly formatted", ) return error_response, dict(status=400) # There should be a nicer way to set if a nubmer has opted out template = self.response_template(CHECK_IF_OPTED_OUT_TEMPLATE) return template.render(opt_out=str(number.endswith("99")).lower()) def list_phone_numbers_opted_out(self): template = self.response_template(LIST_OPTOUT_TEMPLATE) return template.render(opt_outs=self.backend.opt_out_numbers) def opt_in_phone_number(self): number = self._get_param("phoneNumber") try: self.backend.opt_out_numbers.remove(number) except ValueError: pass template = self.response_template(OPT_IN_NUMBER_TEMPLATE) return template.render() def add_permission(self): topic_arn = self._get_param("TopicArn") label = self._get_param("Label") aws_account_ids = self._get_multi_param("AWSAccountId.member.") action_names = self._get_multi_param("ActionName.member.") self.backend.add_permission(topic_arn, label, aws_account_ids, action_names) template = self.response_template(ADD_PERMISSION_TEMPLATE) return template.render() def remove_permission(self): topic_arn = self._get_param("TopicArn") label = self._get_param("Label") self.backend.remove_permission(topic_arn, label) template = self.response_template(DEL_PERMISSION_TEMPLATE) return template.render() def confirm_subscription(self): arn = self._get_param("TopicArn") if arn not in self.backend.topics: error_response = self._error("NotFound", "Topic does not exist") return error_response, dict(status=404) # Once Tokens are stored by the `subscribe` endpoint and distributed # to the client somehow, then we can check validity of tokens # presented to this method. The following code works, all thats # needed is to perform a token check and assign that value to the # `already_subscribed` variable. # # token = self._get_param('Token') # auth = self._get_param('AuthenticateOnUnsubscribe') # if already_subscribed: # error_response = self._error( # code='AuthorizationError', # message='Subscription already confirmed' # ) # return error_response, dict(status=400) template = self.response_template(CONFIRM_SUBSCRIPTION_TEMPLATE) return template.render( sub_arn="{0}:68762e72-e9b1-410a-8b3b-903da69ee1d5".format(arn) ) def list_tags_for_resource(self): arn = self._get_param("ResourceArn") result = self.backend.list_tags_for_resource(arn) template = self.response_template(LIST_TAGS_FOR_RESOURCE_TEMPLATE) return template.render(tags=result) def tag_resource(self): arn = self._get_param("ResourceArn") tags = self._get_tags() self.backend.tag_resource(arn, tags) return self.response_template(TAG_RESOURCE_TEMPLATE).render() def untag_resource(self): arn = self._get_param("ResourceArn") tag_keys = self._get_multi_param("TagKeys.member") self.backend.untag_resource(arn, tag_keys) return self.response_template(UNTAG_RESOURCE_TEMPLATE).render() CREATE_TOPIC_TEMPLATE = """<CreateTopicResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/"> <CreateTopicResult> <TopicArn>{{ topic.arn }}</TopicArn> </CreateTopicResult> <ResponseMetadata> <RequestId>a8dec8b3-33a4-11df-8963-01868b7c937a</RequestId> </ResponseMetadata> </CreateTopicResponse>""" LIST_TOPICS_TEMPLATE = """<ListTopicsResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/"> <ListTopicsResult> <Topics> {% for topic in topics %} <member> <TopicArn>{{ topic.arn }}</TopicArn> </member> {% endfor %} </Topics> {% if next_token %} <NextToken>{{ next_token }}</NextToken> {% endif %} </ListTopicsResult> <ResponseMetadata> <RequestId>3f1478c7-33a9-11df-9540-99d0768312d3</RequestId> </ResponseMetadata> </ListTopicsResponse>""" DELETE_TOPIC_TEMPLATE = """<DeleteTopicResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/"> <ResponseMetadata> <RequestId>f3aa9ac9-3c3d-11df-8235-9dab105e9c32</RequestId> </ResponseMetadata> </DeleteTopicResponse>""" GET_TOPIC_ATTRIBUTES_TEMPLATE = """<GetTopicAttributesResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/"> <GetTopicAttributesResult> <Attributes> <entry> <key>Owner</key> <value>{{ topic.account_id }}</value> </entry> <entry> <key>Policy</key> <value>{{ topic.policy }}</value> </entry> <entry> <key>TopicArn</key> <value>{{ topic.arn }}</value> </entry> <entry> <key>DisplayName</key> <value>{{ topic.display_name }}</value> </entry> <entry> <key>SubscriptionsPending</key> <value>{{ topic.subscriptions_pending }}</value> </entry> <entry> <key>SubscriptionsConfirmed</key> <value>{{ topic.subscriptions_confimed }}</value> </entry> <entry> <key>SubscriptionsDeleted</key> <value>{{ topic.subscriptions_deleted }}</value> </entry> <entry> <key>DeliveryPolicy</key> <value>{{ topic.delivery_policy }}</value> </entry> <entry> <key>EffectiveDeliveryPolicy</key> <value>{{ topic.effective_delivery_policy }}</value> </entry> {% if topic.kms_master_key_id %} <entry> <key>KmsMasterKeyId</key> <value>{{ topic.kms_master_key_id }}</value> </entry> {% endif %} </Attributes> </GetTopicAttributesResult> <ResponseMetadata> <RequestId>057f074c-33a7-11df-9540-99d0768312d3</RequestId> </ResponseMetadata> </GetTopicAttributesResponse>""" SET_TOPIC_ATTRIBUTES_TEMPLATE = """<SetTopicAttributesResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/"> <ResponseMetadata> <RequestId>a8763b99-33a7-11df-a9b7-05d48da6f042</RequestId> </ResponseMetadata> </SetTopicAttributesResponse>""" CREATE_PLATFORM_APPLICATION_TEMPLATE = """<CreatePlatformApplicationResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/"> <CreatePlatformApplicationResult> <PlatformApplicationArn>{{ platform_application.arn }}</PlatformApplicationArn> </CreatePlatformApplicationResult> <ResponseMetadata> <RequestId>b6f0e78b-e9d4-5a0e-b973-adc04e8a4ff9</RequestId> </ResponseMetadata> </CreatePlatformApplicationResponse>""" CREATE_PLATFORM_ENDPOINT_TEMPLATE = """<CreatePlatformEndpointResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/"> <CreatePlatformEndpointResult> <EndpointArn>{{ platform_endpoint.arn }}</EndpointArn> </CreatePlatformEndpointResult> <ResponseMetadata> <RequestId>6613341d-3e15-53f7-bf3c-7e56994ba278</RequestId> </ResponseMetadata> </CreatePlatformEndpointResponse>""" LIST_PLATFORM_APPLICATIONS_TEMPLATE = """<ListPlatformApplicationsResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/"> <ListPlatformApplicationsResult> <PlatformApplications> {% for application in applications %} <member> <PlatformApplicationArn>{{ application.arn }}</PlatformApplicationArn> <Attributes> {% for attribute in application.attributes %} <entry> <key>{{ attribute }}</key> <value>{{ application.attributes[attribute] }}</value> </entry> {% endfor %} </Attributes> </member> {% endfor %} </PlatformApplications> </ListPlatformApplicationsResult> <ResponseMetadata> <RequestId>315a335e-85d8-52df-9349-791283cbb529</RequestId> </ResponseMetadata> </ListPlatformApplicationsResponse>""" DELETE_PLATFORM_APPLICATION_TEMPLATE = """<DeletePlatformApplicationResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/"> <ResponseMetadata> <RequestId>097dac18-7a77-5823-a8dd-e65476dcb037</RequestId> </ResponseMetadata> </DeletePlatformApplicationResponse>""" GET_ENDPOINT_ATTRIBUTES_TEMPLATE = """<GetEndpointAttributesResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/"> <GetEndpointAttributesResult> <Attributes> {% for attribute in endpoint.attributes %} <entry> <key>{{ attribute }}</key> <value>{{ endpoint.attributes[attribute] }}</value> </entry> {% endfor %} </Attributes> </GetEndpointAttributesResult> <ResponseMetadata> <RequestId>6c725a19-a142-5b77-94f9-1055a9ea04e7</RequestId> </ResponseMetadata> </GetEndpointAttributesResponse>""" LIST_ENDPOINTS_BY_PLATFORM_APPLICATION_TEMPLATE = """<ListEndpointsByPlatformApplicationResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/"> <ListEndpointsByPlatformApplicationResult> <Endpoints> {% for endpoint in endpoints %} <member> <EndpointArn>{{ endpoint.arn }}</EndpointArn> <Attributes> {% for attribute in endpoint.attributes %} <entry> <key>{{ attribute }}</key> <value>{{ endpoint.attributes[attribute] }}</value> </entry> {% endfor %} </Attributes> </member> {% endfor %} </Endpoints> </ListEndpointsByPlatformApplicationResult> <ResponseMetadata> <RequestId>9a48768c-dac8-5a60-aec0-3cc27ea08d96</RequestId> </ResponseMetadata> </ListEndpointsByPlatformApplicationResponse>""" GET_PLATFORM_APPLICATION_ATTRIBUTES_TEMPLATE = """<GetPlatformApplicationAttributesResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/"> <GetPlatformApplicationAttributesResult> <Attributes> {% for attribute in application.attributes %} <entry> <key>{{ attribute }}</key> <value>{{ application.attributes[attribute] }}</value> </entry> {% endfor %} </Attributes> </GetPlatformApplicationAttributesResult> <ResponseMetadata> <RequestId>74848df2-87f6-55ed-890c-c7be80442462</RequestId> </ResponseMetadata> </GetPlatformApplicationAttributesResponse>""" PUBLISH_TEMPLATE = """<PublishResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/"> <PublishResult> <MessageId>{{ message_id }}</MessageId> </PublishResult> <ResponseMetadata> <RequestId>f187a3c1-376f-11df-8963-01868b7c937a</RequestId> </ResponseMetadata> </PublishResponse>""" SET_ENDPOINT_ATTRIBUTES_TEMPLATE = """<SetEndpointAttributesResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/"> <ResponseMetadata> <RequestId>2fe0bfc7-3e85-5ee5-a9e2-f58b35e85f6a</RequestId> </ResponseMetadata> </SetEndpointAttributesResponse>""" DELETE_ENDPOINT_TEMPLATE = """<DeleteEndpointResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/"> <ResponseMetadata> <RequestId>c1d2b191-353c-5a5f-8969-fbdd3900afa8</RequestId> </ResponseMetadata> </DeleteEndpointResponse>""" SET_PLATFORM_APPLICATION_ATTRIBUTES_TEMPLATE = """<SetPlatformApplicationAttributesResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/"> <ResponseMetadata> <RequestId>cf577bcc-b3dc-5463-88f1-3180b9412395</RequestId> </ResponseMetadata> </SetPlatformApplicationAttributesResponse>""" SUBSCRIBE_TEMPLATE = """<SubscribeResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/"> <SubscribeResult> <SubscriptionArn>{{ subscription.arn }}</SubscriptionArn> </SubscribeResult> <ResponseMetadata> <RequestId>c4407779-24a4-56fa-982c-3d927f93a775</RequestId> </ResponseMetadata> </SubscribeResponse>""" UNSUBSCRIBE_TEMPLATE = """<UnsubscribeResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/"> <ResponseMetadata> <RequestId>18e0ac39-3776-11df-84c0-b93cc1666b84</RequestId> </ResponseMetadata> </UnsubscribeResponse>""" LIST_SUBSCRIPTIONS_TEMPLATE = """<ListSubscriptionsResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/"> <ListSubscriptionsResult> <Subscriptions> {% for subscription in subscriptions %} <member> <TopicArn>{{ subscription.topic.arn }}</TopicArn> <Protocol>{{ subscription.protocol }}</Protocol> <SubscriptionArn>{{ subscription.arn }}</SubscriptionArn> <Owner>{{ subscription.account_id }}</Owner> <Endpoint>{{ subscription.endpoint }}</Endpoint> </member> {% endfor %} </Subscriptions> {% if next_token %} <NextToken>{{ next_token }}</NextToken> {% endif %} </ListSubscriptionsResult> <ResponseMetadata> <RequestId>384ac68d-3775-11df-8963-01868b7c937a</RequestId> </ResponseMetadata> </ListSubscriptionsResponse>""" LIST_SUBSCRIPTIONS_BY_TOPIC_TEMPLATE = """<ListSubscriptionsByTopicResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/"> <ListSubscriptionsByTopicResult> <Subscriptions> {% for subscription in subscriptions %} <member> <TopicArn>{{ subscription.topic.arn }}</TopicArn> <Protocol>{{ subscription.protocol }}</Protocol> <SubscriptionArn>{{ subscription.arn }}</SubscriptionArn> <Owner>{{ subscription.account_id }}</Owner> <Endpoint>{{ subscription.endpoint }}</Endpoint> </member> {% endfor %} </Subscriptions> {% if next_token %} <NextToken>{{ next_token }}</NextToken> {% endif %} </ListSubscriptionsByTopicResult> <ResponseMetadata> <RequestId>384ac68d-3775-11df-8963-01868b7c937a</RequestId> </ResponseMetadata> </ListSubscriptionsByTopicResponse>""" # Not responding aws system attribetus like 'Owner' and 'SubscriptionArn' GET_SUBSCRIPTION_ATTRIBUTES_TEMPLATE = """<GetSubscriptionAttributesResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/"> <GetSubscriptionAttributesResult> <Attributes> {% for name, value in attributes.items() %} <entry> <key>{{ name }}</key> <value>{{ value }}</value> </entry> {% endfor %} </Attributes> </GetSubscriptionAttributesResult> <ResponseMetadata> <RequestId>057f074c-33a7-11df-9540-99d0768312d3</RequestId> </ResponseMetadata> </GetSubscriptionAttributesResponse>""" SET_SUBSCRIPTION_ATTRIBUTES_TEMPLATE = """<SetSubscriptionAttributesResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/"> <ResponseMetadata> <RequestId>a8763b99-33a7-11df-a9b7-05d48da6f042</RequestId> </ResponseMetadata> </SetSubscriptionAttributesResponse>""" SET_SMS_ATTRIBUTES_TEMPLATE = """<SetSMSAttributesResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/"> <SetSMSAttributesResult/> <ResponseMetadata> <RequestId>26332069-c04a-5428-b829-72524b56a364</RequestId> </ResponseMetadata> </SetSMSAttributesResponse>""" GET_SMS_ATTRIBUTES_TEMPLATE = """<GetSMSAttributesResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/"> <GetSMSAttributesResult> <attributes> {% for name, value in attributes.items() %} <entry> <key>{{ name }}</key> <value>{{ value }}</value> </entry> {% endfor %} </attributes> </GetSMSAttributesResult> <ResponseMetadata> <RequestId>287f9554-8db3-5e66-8abc-c76f0186db7e</RequestId> </ResponseMetadata> </GetSMSAttributesResponse>""" CHECK_IF_OPTED_OUT_TEMPLATE = """<CheckIfPhoneNumberIsOptedOutResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/"> <CheckIfPhoneNumberIsOptedOutResult> <isOptedOut>{{ opt_out }}</isOptedOut> </CheckIfPhoneNumberIsOptedOutResult> <ResponseMetadata> <RequestId>287f9554-8db3-5e66-8abc-c76f0186db7e</RequestId> </ResponseMetadata> </CheckIfPhoneNumberIsOptedOutResponse>""" ERROR_RESPONSE = """<ErrorResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/"> <Error> <Type>{{ sender }}</Type> <Code>{{ code }}</Code> <Message>{{ message }}</Message> </Error> <RequestId>9dd01905-5012-5f99-8663-4b3ecd0dfaef</RequestId> </ErrorResponse>""" LIST_OPTOUT_TEMPLATE = """<ListPhoneNumbersOptedOutResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/"> <ListPhoneNumbersOptedOutResult> <phoneNumbers> {% for item in opt_outs %} <member>{{ item }}</member> {% endfor %} </phoneNumbers> </ListPhoneNumbersOptedOutResult> <ResponseMetadata> <RequestId>985e196d-a237-51b6-b33a-4b5601276b38</RequestId> </ResponseMetadata> </ListPhoneNumbersOptedOutResponse>""" OPT_IN_NUMBER_TEMPLATE = """<OptInPhoneNumberResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/"> <OptInPhoneNumberResult/> <ResponseMetadata> <RequestId>4c61842c-0796-50ef-95ac-d610c0bc8cf8</RequestId> </ResponseMetadata> </OptInPhoneNumberResponse>""" ADD_PERMISSION_TEMPLATE = """<AddPermissionResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/"> <ResponseMetadata> <RequestId>c046e713-c5ff-5888-a7bc-b52f0e4f1299</RequestId> </ResponseMetadata> </AddPermissionResponse>""" DEL_PERMISSION_TEMPLATE = """<RemovePermissionResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/"> <ResponseMetadata> <RequestId>e767cc9f-314b-5e1b-b283-9ea3fd4e38a3</RequestId> </ResponseMetadata> </RemovePermissionResponse>""" CONFIRM_SUBSCRIPTION_TEMPLATE = """<ConfirmSubscriptionResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/"> <ConfirmSubscriptionResult> <SubscriptionArn>{{ sub_arn }}</SubscriptionArn> </ConfirmSubscriptionResult> <ResponseMetadata> <RequestId>16eb4dde-7b3c-5b3e-a22a-1fe2a92d3293</RequestId> </ResponseMetadata> </ConfirmSubscriptionResponse>""" LIST_TAGS_FOR_RESOURCE_TEMPLATE = """<ListTagsForResourceResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/"> <ListTagsForResourceResult> <Tags> {% for name, value in tags.items() %} <member> <Key>{{ name }}</Key> <Value>{{ value }}</Value> </member> {% endfor %} </Tags> </ListTagsForResourceResult> <ResponseMetadata> <RequestId>97fa763f-861b-5223-a946-20251f2a42e2</RequestId> </ResponseMetadata> </ListTagsForResourceResponse>""" TAG_RESOURCE_TEMPLATE = """<TagResourceResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/"> <TagResourceResult/> <ResponseMetadata> <RequestId>fd4ab1da-692f-50a7-95ad-e7c665877d98</RequestId> </ResponseMetadata> </TagResourceResponse>""" UNTAG_RESOURCE_TEMPLATE = """<UntagResourceResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/"> <UntagResourceResult/> <ResponseMetadata> <RequestId>14eb7b1a-4cbd-5a56-80db-2d06412df769</RequestId> </ResponseMetadata> </UntagResourceResponse>"""
37.163837
146
0.595856
from __future__ import unicode_literals import json import re from collections import defaultdict from moto.core.responses import BaseResponse from moto.core.utils import camelcase_to_underscores from .models import sns_backends from .exceptions import InvalidParameterValue, SNSNotFoundError from .utils import is_e164 class SNSResponse(BaseResponse): SMS_ATTR_REGEX = re.compile( r"^attributes\.entry\.(?P<index>\d+)\.(?P<type>key|value)$" ) OPT_OUT_PHONE_NUMBER_REGEX = re.compile(r"^\+?\d+$") @property def backend(self): return sns_backends[self.region] def _error(self, code, message, sender="Sender"): template = self.response_template(ERROR_RESPONSE) return template.render(code=code, message=message, sender=sender) def _get_attributes(self): attributes = self._get_list_prefix("Attributes.entry") return dict((attribute["key"], attribute["value"]) for attribute in attributes) def _get_tags(self): tags = self._get_list_prefix("Tags.member") return {tag["key"]: tag["value"] for tag in tags} def _parse_message_attributes(self, prefix="", value_namespace="Value."): message_attributes = self._get_object_map( "MessageAttributes.entry", name="Name", value="Value" ) transformed_message_attributes = {} for name, value in message_attributes.items(): data_type = value["DataType"] if not data_type: raise InvalidParameterValue( "The message attribute '{0}' must contain non-empty " "message attribute value.".format(name) ) data_type_parts = data_type.split(".") if len(data_type_parts) > 2 or data_type_parts[0] not in [ "String", "Binary", "Number", ]: raise InvalidParameterValue( "The message attribute '{0}' has an invalid message " "attribute type, the set of supported type prefixes is " "Binary, Number, and String.".format(name) ) transform_value = None if "StringValue" in value: if data_type == "Number": try: transform_value = float(value["StringValue"]) except ValueError: raise InvalidParameterValue( "An error occurred (ParameterValueInvalid) " "when calling the Publish operation: " "Could not cast message attribute '{0}' value to number.".format( name ) ) else: transform_value = value["StringValue"] elif "BinaryValue" in value: transform_value = value["BinaryValue"] if transform_value == "": raise InvalidParameterValue( "The message attribute '{0}' must contain non-empty " "message attribute value for message attribute " "type '{1}'.".format(name, data_type[0]) ) transformed_message_attributes[name] = { "Type": data_type, "Value": transform_value, } return transformed_message_attributes def create_topic(self): name = self._get_param("Name") attributes = self._get_attributes() tags = self._get_tags() topic = self.backend.create_topic(name, attributes, tags) if self.request_json: return json.dumps( { "CreateTopicResponse": { "CreateTopicResult": {"TopicArn": topic.arn}, "ResponseMetadata": { "RequestId": "a8dec8b3-33a4-11df-8963-01868b7c937a" }, } } ) template = self.response_template(CREATE_TOPIC_TEMPLATE) return template.render(topic=topic) def list_topics(self): next_token = self._get_param("NextToken") topics, next_token = self.backend.list_topics(next_token=next_token) if self.request_json: return json.dumps( { "ListTopicsResponse": { "ListTopicsResult": { "Topics": [{"TopicArn": topic.arn} for topic in topics], "NextToken": next_token, } }, "ResponseMetadata": { "RequestId": "a8dec8b3-33a4-11df-8963-01868b7c937a" }, } ) template = self.response_template(LIST_TOPICS_TEMPLATE) return template.render(topics=topics, next_token=next_token) def delete_topic(self): topic_arn = self._get_param("TopicArn") self.backend.delete_topic(topic_arn) if self.request_json: return json.dumps( { "DeleteTopicResponse": { "ResponseMetadata": { "RequestId": "a8dec8b3-33a4-11df-8963-01868b7c937a" } } } ) template = self.response_template(DELETE_TOPIC_TEMPLATE) return template.render() def get_topic_attributes(self): topic_arn = self._get_param("TopicArn") topic = self.backend.get_topic(topic_arn) if self.request_json: attributes = { "Owner": topic.account_id, "Policy": topic.policy, "TopicArn": topic.arn, "DisplayName": topic.display_name, "SubscriptionsPending": topic.subscriptions_pending, "SubscriptionsConfirmed": topic.subscriptions_confimed, "SubscriptionsDeleted": topic.subscriptions_deleted, "DeliveryPolicy": topic.delivery_policy, "EffectiveDeliveryPolicy": topic.effective_delivery_policy, } if topic.kms_master_key_id: attributes["KmsMasterKeyId"] = topic.kms_master_key_id response = { "GetTopicAttributesResponse": { "GetTopicAttributesResult": {"Attributes": attributes}, "ResponseMetadata": { "RequestId": "057f074c-33a7-11df-9540-99d0768312d3" }, } } return json.dumps(response) template = self.response_template(GET_TOPIC_ATTRIBUTES_TEMPLATE) return template.render(topic=topic) def set_topic_attributes(self): topic_arn = self._get_param("TopicArn") attribute_name = self._get_param("AttributeName") attribute_name = camelcase_to_underscores(attribute_name) attribute_value = self._get_param("AttributeValue") self.backend.set_topic_attribute(topic_arn, attribute_name, attribute_value) if self.request_json: return json.dumps( { "SetTopicAttributesResponse": { "ResponseMetadata": { "RequestId": "a8763b99-33a7-11df-a9b7-05d48da6f042" } } } ) template = self.response_template(SET_TOPIC_ATTRIBUTES_TEMPLATE) return template.render() def subscribe(self): topic_arn = self._get_param("TopicArn") endpoint = self._get_param("Endpoint") protocol = self._get_param("Protocol") attributes = self._get_attributes() subscription = self.backend.subscribe(topic_arn, endpoint, protocol) if attributes is not None: for attr_name, attr_value in attributes.items(): self.backend.set_subscription_attributes( subscription.arn, attr_name, attr_value ) if self.request_json: return json.dumps( { "SubscribeResponse": { "SubscribeResult": {"SubscriptionArn": subscription.arn}, "ResponseMetadata": { "RequestId": "a8763b99-33a7-11df-a9b7-05d48da6f042" }, } } ) template = self.response_template(SUBSCRIBE_TEMPLATE) return template.render(subscription=subscription) def unsubscribe(self): subscription_arn = self._get_param("SubscriptionArn") self.backend.unsubscribe(subscription_arn) if self.request_json: return json.dumps( { "UnsubscribeResponse": { "ResponseMetadata": { "RequestId": "a8763b99-33a7-11df-a9b7-05d48da6f042" } } } ) template = self.response_template(UNSUBSCRIBE_TEMPLATE) return template.render() def list_subscriptions(self): next_token = self._get_param("NextToken") subscriptions, next_token = self.backend.list_subscriptions( next_token=next_token ) if self.request_json: return json.dumps( { "ListSubscriptionsResponse": { "ListSubscriptionsResult": { "Subscriptions": [ { "TopicArn": subscription.topic.arn, "Protocol": subscription.protocol, "SubscriptionArn": subscription.arn, "Owner": subscription.topic.account_id, "Endpoint": subscription.endpoint, } for subscription in subscriptions ], "NextToken": next_token, }, "ResponseMetadata": { "RequestId": "384ac68d-3775-11df-8963-01868b7c937a" }, } } ) template = self.response_template(LIST_SUBSCRIPTIONS_TEMPLATE) return template.render(subscriptions=subscriptions, next_token=next_token) def list_subscriptions_by_topic(self): topic_arn = self._get_param("TopicArn") next_token = self._get_param("NextToken") subscriptions, next_token = self.backend.list_subscriptions( topic_arn, next_token=next_token ) if self.request_json: return json.dumps( { "ListSubscriptionsByTopicResponse": { "ListSubscriptionsByTopicResult": { "Subscriptions": [ { "TopicArn": subscription.topic.arn, "Protocol": subscription.protocol, "SubscriptionArn": subscription.arn, "Owner": subscription.topic.account_id, "Endpoint": subscription.endpoint, } for subscription in subscriptions ], "NextToken": next_token, }, "ResponseMetadata": { "RequestId": "384ac68d-3775-11df-8963-01868b7c937a" }, } } ) template = self.response_template(LIST_SUBSCRIPTIONS_BY_TOPIC_TEMPLATE) return template.render(subscriptions=subscriptions, next_token=next_token) def publish(self): target_arn = self._get_param("TargetArn") topic_arn = self._get_param("TopicArn") phone_number = self._get_param("PhoneNumber") subject = self._get_param("Subject") message_attributes = self._parse_message_attributes() arn = None if phone_number is not None: if not is_e164(phone_number): return ( self._error( "InvalidParameter", "Phone number does not meet the E164 format" ), dict(status=400), ) elif target_arn is not None: arn = target_arn else: arn = topic_arn message = self._get_param("Message") try: message_id = self.backend.publish( message, arn=arn, phone_number=phone_number, subject=subject, message_attributes=message_attributes, ) except ValueError as err: error_response = self._error("InvalidParameter", str(err)) return error_response, dict(status=400) if self.request_json: return json.dumps( { "PublishResponse": { "PublishResult": {"MessageId": message_id}, "ResponseMetadata": { "RequestId": "384ac68d-3775-11df-8963-01868b7c937a" }, } } ) template = self.response_template(PUBLISH_TEMPLATE) return template.render(message_id=message_id) def create_platform_application(self): name = self._get_param("Name") platform = self._get_param("Platform") attributes = self._get_attributes() platform_application = self.backend.create_platform_application( self.region, name, platform, attributes ) if self.request_json: return json.dumps( { "CreatePlatformApplicationResponse": { "CreatePlatformApplicationResult": { "PlatformApplicationArn": platform_application.arn }, "ResponseMetadata": { "RequestId": "384ac68d-3775-11df-8963-01868b7c937b" }, } } ) template = self.response_template(CREATE_PLATFORM_APPLICATION_TEMPLATE) return template.render(platform_application=platform_application) def get_platform_application_attributes(self): arn = self._get_param("PlatformApplicationArn") application = self.backend.get_application(arn) if self.request_json: return json.dumps( { "GetPlatformApplicationAttributesResponse": { "GetPlatformApplicationAttributesResult": { "Attributes": application.attributes }, "ResponseMetadata": { "RequestId": "384ac68d-3775-11df-8963-01868b7c937f" }, } } ) template = self.response_template(GET_PLATFORM_APPLICATION_ATTRIBUTES_TEMPLATE) return template.render(application=application) def set_platform_application_attributes(self): arn = self._get_param("PlatformApplicationArn") attributes = self._get_attributes() self.backend.set_application_attributes(arn, attributes) if self.request_json: return json.dumps( { "SetPlatformApplicationAttributesResponse": { "ResponseMetadata": { "RequestId": "384ac68d-3775-12df-8963-01868b7c937f" } } } ) template = self.response_template(SET_PLATFORM_APPLICATION_ATTRIBUTES_TEMPLATE) return template.render() def list_platform_applications(self): applications = self.backend.list_platform_applications() if self.request_json: return json.dumps( { "ListPlatformApplicationsResponse": { "ListPlatformApplicationsResult": { "PlatformApplications": [ { "PlatformApplicationArn": application.arn, "attributes": application.attributes, } for application in applications ], "NextToken": None, }, "ResponseMetadata": { "RequestId": "384ac68d-3775-11df-8963-01868b7c937c" }, } } ) template = self.response_template(LIST_PLATFORM_APPLICATIONS_TEMPLATE) return template.render(applications=applications) def delete_platform_application(self): platform_arn = self._get_param("PlatformApplicationArn") self.backend.delete_platform_application(platform_arn) if self.request_json: return json.dumps( { "DeletePlatformApplicationResponse": { "ResponseMetadata": { "RequestId": "384ac68d-3775-11df-8963-01868b7c937e" } } } ) template = self.response_template(DELETE_PLATFORM_APPLICATION_TEMPLATE) return template.render() def create_platform_endpoint(self): application_arn = self._get_param("PlatformApplicationArn") application = self.backend.get_application(application_arn) custom_user_data = self._get_param("CustomUserData") token = self._get_param("Token") attributes = self._get_attributes() platform_endpoint = self.backend.create_platform_endpoint( self.region, application, custom_user_data, token, attributes ) if self.request_json: return json.dumps( { "CreatePlatformEndpointResponse": { "CreatePlatformEndpointResult": { "EndpointArn": platform_endpoint.arn }, "ResponseMetadata": { "RequestId": "384ac68d-3779-11df-8963-01868b7c937b" }, } } ) template = self.response_template(CREATE_PLATFORM_ENDPOINT_TEMPLATE) return template.render(platform_endpoint=platform_endpoint) def list_endpoints_by_platform_application(self): application_arn = self._get_param("PlatformApplicationArn") endpoints = self.backend.list_endpoints_by_platform_application(application_arn) if self.request_json: return json.dumps( { "ListEndpointsByPlatformApplicationResponse": { "ListEndpointsByPlatformApplicationResult": { "Endpoints": [ { "Attributes": endpoint.attributes, "EndpointArn": endpoint.arn, } for endpoint in endpoints ], "NextToken": None, }, "ResponseMetadata": { "RequestId": "384ac68d-3775-11df-8963-01868b7c937a" }, } } ) template = self.response_template( LIST_ENDPOINTS_BY_PLATFORM_APPLICATION_TEMPLATE ) return template.render(endpoints=endpoints) def get_endpoint_attributes(self): arn = self._get_param("EndpointArn") try: endpoint = self.backend.get_endpoint(arn) if self.request_json: return json.dumps( { "GetEndpointAttributesResponse": { "GetEndpointAttributesResult": { "Attributes": endpoint.attributes }, "ResponseMetadata": { "RequestId": "384ac68d-3775-11df-8963-01868b7c937f" }, } } ) template = self.response_template(GET_ENDPOINT_ATTRIBUTES_TEMPLATE) return template.render(endpoint=endpoint) except SNSNotFoundError: error_response = self._error("NotFound", "Endpoint does not exist") return error_response, dict(status=404) def set_endpoint_attributes(self): arn = self._get_param("EndpointArn") attributes = self._get_attributes() self.backend.set_endpoint_attributes(arn, attributes) if self.request_json: return json.dumps( { "SetEndpointAttributesResponse": { "ResponseMetadata": { "RequestId": "384bc68d-3775-12df-8963-01868b7c937f" } } } ) template = self.response_template(SET_ENDPOINT_ATTRIBUTES_TEMPLATE) return template.render() def delete_endpoint(self): arn = self._get_param("EndpointArn") self.backend.delete_endpoint(arn) if self.request_json: return json.dumps( { "DeleteEndpointResponse": { "ResponseMetadata": { "RequestId": "384bc68d-3775-12df-8963-01868b7c937f" } } } ) template = self.response_template(DELETE_ENDPOINT_TEMPLATE) return template.render() def get_subscription_attributes(self): arn = self._get_param("SubscriptionArn") attributes = self.backend.get_subscription_attributes(arn) template = self.response_template(GET_SUBSCRIPTION_ATTRIBUTES_TEMPLATE) return template.render(attributes=attributes) def set_subscription_attributes(self): arn = self._get_param("SubscriptionArn") attr_name = self._get_param("AttributeName") attr_value = self._get_param("AttributeValue") self.backend.set_subscription_attributes(arn, attr_name, attr_value) template = self.response_template(SET_SUBSCRIPTION_ATTRIBUTES_TEMPLATE) return template.render() def set_sms_attributes(self): temp_dict = defaultdict(dict) for key, value in self.querystring.items(): match = self.SMS_ATTR_REGEX.match(key) if match is not None: temp_dict[match.group("index")][match.group("type")] = value[0] result = {} for item in temp_dict.values(): if "key" in item and "value" in item: result[item["key"]] = item["value"] self.backend.update_sms_attributes(result) template = self.response_template(SET_SMS_ATTRIBUTES_TEMPLATE) return template.render() def get_sms_attributes(self): filter_list = set() for key, value in self.querystring.items(): if key.startswith("attributes.member.1"): filter_list.add(value[0]) if len(filter_list) > 0: result = { k: v for k, v in self.backend.sms_attributes.items() if k in filter_list } else: result = self.backend.sms_attributes template = self.response_template(GET_SMS_ATTRIBUTES_TEMPLATE) return template.render(attributes=result) def check_if_phone_number_is_opted_out(self): number = self._get_param("phoneNumber") if self.OPT_OUT_PHONE_NUMBER_REGEX.match(number) is None: error_response = self._error( code="InvalidParameter", message="Invalid parameter: PhoneNumber Reason: input incorrectly formatted", ) return error_response, dict(status=400) template = self.response_template(CHECK_IF_OPTED_OUT_TEMPLATE) return template.render(opt_out=str(number.endswith("99")).lower()) def list_phone_numbers_opted_out(self): template = self.response_template(LIST_OPTOUT_TEMPLATE) return template.render(opt_outs=self.backend.opt_out_numbers) def opt_in_phone_number(self): number = self._get_param("phoneNumber") try: self.backend.opt_out_numbers.remove(number) except ValueError: pass template = self.response_template(OPT_IN_NUMBER_TEMPLATE) return template.render() def add_permission(self): topic_arn = self._get_param("TopicArn") label = self._get_param("Label") aws_account_ids = self._get_multi_param("AWSAccountId.member.") action_names = self._get_multi_param("ActionName.member.") self.backend.add_permission(topic_arn, label, aws_account_ids, action_names) template = self.response_template(ADD_PERMISSION_TEMPLATE) return template.render() def remove_permission(self): topic_arn = self._get_param("TopicArn") label = self._get_param("Label") self.backend.remove_permission(topic_arn, label) template = self.response_template(DEL_PERMISSION_TEMPLATE) return template.render() def confirm_subscription(self): arn = self._get_param("TopicArn") if arn not in self.backend.topics: error_response = self._error("NotFound", "Topic does not exist") return error_response, dict(status=404) template = self.response_template(CONFIRM_SUBSCRIPTION_TEMPLATE) return template.render( sub_arn="{0}:68762e72-e9b1-410a-8b3b-903da69ee1d5".format(arn) ) def list_tags_for_resource(self): arn = self._get_param("ResourceArn") result = self.backend.list_tags_for_resource(arn) template = self.response_template(LIST_TAGS_FOR_RESOURCE_TEMPLATE) return template.render(tags=result) def tag_resource(self): arn = self._get_param("ResourceArn") tags = self._get_tags() self.backend.tag_resource(arn, tags) return self.response_template(TAG_RESOURCE_TEMPLATE).render() def untag_resource(self): arn = self._get_param("ResourceArn") tag_keys = self._get_multi_param("TagKeys.member") self.backend.untag_resource(arn, tag_keys) return self.response_template(UNTAG_RESOURCE_TEMPLATE).render() CREATE_TOPIC_TEMPLATE = """<CreateTopicResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/"> <CreateTopicResult> <TopicArn>{{ topic.arn }}</TopicArn> </CreateTopicResult> <ResponseMetadata> <RequestId>a8dec8b3-33a4-11df-8963-01868b7c937a</RequestId> </ResponseMetadata> </CreateTopicResponse>""" LIST_TOPICS_TEMPLATE = """<ListTopicsResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/"> <ListTopicsResult> <Topics> {% for topic in topics %} <member> <TopicArn>{{ topic.arn }}</TopicArn> </member> {% endfor %} </Topics> {% if next_token %} <NextToken>{{ next_token }}</NextToken> {% endif %} </ListTopicsResult> <ResponseMetadata> <RequestId>3f1478c7-33a9-11df-9540-99d0768312d3</RequestId> </ResponseMetadata> </ListTopicsResponse>""" DELETE_TOPIC_TEMPLATE = """<DeleteTopicResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/"> <ResponseMetadata> <RequestId>f3aa9ac9-3c3d-11df-8235-9dab105e9c32</RequestId> </ResponseMetadata> </DeleteTopicResponse>""" GET_TOPIC_ATTRIBUTES_TEMPLATE = """<GetTopicAttributesResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/"> <GetTopicAttributesResult> <Attributes> <entry> <key>Owner</key> <value>{{ topic.account_id }}</value> </entry> <entry> <key>Policy</key> <value>{{ topic.policy }}</value> </entry> <entry> <key>TopicArn</key> <value>{{ topic.arn }}</value> </entry> <entry> <key>DisplayName</key> <value>{{ topic.display_name }}</value> </entry> <entry> <key>SubscriptionsPending</key> <value>{{ topic.subscriptions_pending }}</value> </entry> <entry> <key>SubscriptionsConfirmed</key> <value>{{ topic.subscriptions_confimed }}</value> </entry> <entry> <key>SubscriptionsDeleted</key> <value>{{ topic.subscriptions_deleted }}</value> </entry> <entry> <key>DeliveryPolicy</key> <value>{{ topic.delivery_policy }}</value> </entry> <entry> <key>EffectiveDeliveryPolicy</key> <value>{{ topic.effective_delivery_policy }}</value> </entry> {% if topic.kms_master_key_id %} <entry> <key>KmsMasterKeyId</key> <value>{{ topic.kms_master_key_id }}</value> </entry> {% endif %} </Attributes> </GetTopicAttributesResult> <ResponseMetadata> <RequestId>057f074c-33a7-11df-9540-99d0768312d3</RequestId> </ResponseMetadata> </GetTopicAttributesResponse>""" SET_TOPIC_ATTRIBUTES_TEMPLATE = """<SetTopicAttributesResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/"> <ResponseMetadata> <RequestId>a8763b99-33a7-11df-a9b7-05d48da6f042</RequestId> </ResponseMetadata> </SetTopicAttributesResponse>""" CREATE_PLATFORM_APPLICATION_TEMPLATE = """<CreatePlatformApplicationResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/"> <CreatePlatformApplicationResult> <PlatformApplicationArn>{{ platform_application.arn }}</PlatformApplicationArn> </CreatePlatformApplicationResult> <ResponseMetadata> <RequestId>b6f0e78b-e9d4-5a0e-b973-adc04e8a4ff9</RequestId> </ResponseMetadata> </CreatePlatformApplicationResponse>""" CREATE_PLATFORM_ENDPOINT_TEMPLATE = """<CreatePlatformEndpointResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/"> <CreatePlatformEndpointResult> <EndpointArn>{{ platform_endpoint.arn }}</EndpointArn> </CreatePlatformEndpointResult> <ResponseMetadata> <RequestId>6613341d-3e15-53f7-bf3c-7e56994ba278</RequestId> </ResponseMetadata> </CreatePlatformEndpointResponse>""" LIST_PLATFORM_APPLICATIONS_TEMPLATE = """<ListPlatformApplicationsResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/"> <ListPlatformApplicationsResult> <PlatformApplications> {% for application in applications %} <member> <PlatformApplicationArn>{{ application.arn }}</PlatformApplicationArn> <Attributes> {% for attribute in application.attributes %} <entry> <key>{{ attribute }}</key> <value>{{ application.attributes[attribute] }}</value> </entry> {% endfor %} </Attributes> </member> {% endfor %} </PlatformApplications> </ListPlatformApplicationsResult> <ResponseMetadata> <RequestId>315a335e-85d8-52df-9349-791283cbb529</RequestId> </ResponseMetadata> </ListPlatformApplicationsResponse>""" DELETE_PLATFORM_APPLICATION_TEMPLATE = """<DeletePlatformApplicationResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/"> <ResponseMetadata> <RequestId>097dac18-7a77-5823-a8dd-e65476dcb037</RequestId> </ResponseMetadata> </DeletePlatformApplicationResponse>""" GET_ENDPOINT_ATTRIBUTES_TEMPLATE = """<GetEndpointAttributesResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/"> <GetEndpointAttributesResult> <Attributes> {% for attribute in endpoint.attributes %} <entry> <key>{{ attribute }}</key> <value>{{ endpoint.attributes[attribute] }}</value> </entry> {% endfor %} </Attributes> </GetEndpointAttributesResult> <ResponseMetadata> <RequestId>6c725a19-a142-5b77-94f9-1055a9ea04e7</RequestId> </ResponseMetadata> </GetEndpointAttributesResponse>""" LIST_ENDPOINTS_BY_PLATFORM_APPLICATION_TEMPLATE = """<ListEndpointsByPlatformApplicationResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/"> <ListEndpointsByPlatformApplicationResult> <Endpoints> {% for endpoint in endpoints %} <member> <EndpointArn>{{ endpoint.arn }}</EndpointArn> <Attributes> {% for attribute in endpoint.attributes %} <entry> <key>{{ attribute }}</key> <value>{{ endpoint.attributes[attribute] }}</value> </entry> {% endfor %} </Attributes> </member> {% endfor %} </Endpoints> </ListEndpointsByPlatformApplicationResult> <ResponseMetadata> <RequestId>9a48768c-dac8-5a60-aec0-3cc27ea08d96</RequestId> </ResponseMetadata> </ListEndpointsByPlatformApplicationResponse>""" GET_PLATFORM_APPLICATION_ATTRIBUTES_TEMPLATE = """<GetPlatformApplicationAttributesResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/"> <GetPlatformApplicationAttributesResult> <Attributes> {% for attribute in application.attributes %} <entry> <key>{{ attribute }}</key> <value>{{ application.attributes[attribute] }}</value> </entry> {% endfor %} </Attributes> </GetPlatformApplicationAttributesResult> <ResponseMetadata> <RequestId>74848df2-87f6-55ed-890c-c7be80442462</RequestId> </ResponseMetadata> </GetPlatformApplicationAttributesResponse>""" PUBLISH_TEMPLATE = """<PublishResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/"> <PublishResult> <MessageId>{{ message_id }}</MessageId> </PublishResult> <ResponseMetadata> <RequestId>f187a3c1-376f-11df-8963-01868b7c937a</RequestId> </ResponseMetadata> </PublishResponse>""" SET_ENDPOINT_ATTRIBUTES_TEMPLATE = """<SetEndpointAttributesResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/"> <ResponseMetadata> <RequestId>2fe0bfc7-3e85-5ee5-a9e2-f58b35e85f6a</RequestId> </ResponseMetadata> </SetEndpointAttributesResponse>""" DELETE_ENDPOINT_TEMPLATE = """<DeleteEndpointResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/"> <ResponseMetadata> <RequestId>c1d2b191-353c-5a5f-8969-fbdd3900afa8</RequestId> </ResponseMetadata> </DeleteEndpointResponse>""" SET_PLATFORM_APPLICATION_ATTRIBUTES_TEMPLATE = """<SetPlatformApplicationAttributesResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/"> <ResponseMetadata> <RequestId>cf577bcc-b3dc-5463-88f1-3180b9412395</RequestId> </ResponseMetadata> </SetPlatformApplicationAttributesResponse>""" SUBSCRIBE_TEMPLATE = """<SubscribeResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/"> <SubscribeResult> <SubscriptionArn>{{ subscription.arn }}</SubscriptionArn> </SubscribeResult> <ResponseMetadata> <RequestId>c4407779-24a4-56fa-982c-3d927f93a775</RequestId> </ResponseMetadata> </SubscribeResponse>""" UNSUBSCRIBE_TEMPLATE = """<UnsubscribeResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/"> <ResponseMetadata> <RequestId>18e0ac39-3776-11df-84c0-b93cc1666b84</RequestId> </ResponseMetadata> </UnsubscribeResponse>""" LIST_SUBSCRIPTIONS_TEMPLATE = """<ListSubscriptionsResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/"> <ListSubscriptionsResult> <Subscriptions> {% for subscription in subscriptions %} <member> <TopicArn>{{ subscription.topic.arn }}</TopicArn> <Protocol>{{ subscription.protocol }}</Protocol> <SubscriptionArn>{{ subscription.arn }}</SubscriptionArn> <Owner>{{ subscription.account_id }}</Owner> <Endpoint>{{ subscription.endpoint }}</Endpoint> </member> {% endfor %} </Subscriptions> {% if next_token %} <NextToken>{{ next_token }}</NextToken> {% endif %} </ListSubscriptionsResult> <ResponseMetadata> <RequestId>384ac68d-3775-11df-8963-01868b7c937a</RequestId> </ResponseMetadata> </ListSubscriptionsResponse>""" LIST_SUBSCRIPTIONS_BY_TOPIC_TEMPLATE = """<ListSubscriptionsByTopicResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/"> <ListSubscriptionsByTopicResult> <Subscriptions> {% for subscription in subscriptions %} <member> <TopicArn>{{ subscription.topic.arn }}</TopicArn> <Protocol>{{ subscription.protocol }}</Protocol> <SubscriptionArn>{{ subscription.arn }}</SubscriptionArn> <Owner>{{ subscription.account_id }}</Owner> <Endpoint>{{ subscription.endpoint }}</Endpoint> </member> {% endfor %} </Subscriptions> {% if next_token %} <NextToken>{{ next_token }}</NextToken> {% endif %} </ListSubscriptionsByTopicResult> <ResponseMetadata> <RequestId>384ac68d-3775-11df-8963-01868b7c937a</RequestId> </ResponseMetadata> </ListSubscriptionsByTopicResponse>""" GET_SUBSCRIPTION_ATTRIBUTES_TEMPLATE = """<GetSubscriptionAttributesResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/"> <GetSubscriptionAttributesResult> <Attributes> {% for name, value in attributes.items() %} <entry> <key>{{ name }}</key> <value>{{ value }}</value> </entry> {% endfor %} </Attributes> </GetSubscriptionAttributesResult> <ResponseMetadata> <RequestId>057f074c-33a7-11df-9540-99d0768312d3</RequestId> </ResponseMetadata> </GetSubscriptionAttributesResponse>""" SET_SUBSCRIPTION_ATTRIBUTES_TEMPLATE = """<SetSubscriptionAttributesResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/"> <ResponseMetadata> <RequestId>a8763b99-33a7-11df-a9b7-05d48da6f042</RequestId> </ResponseMetadata> </SetSubscriptionAttributesResponse>""" SET_SMS_ATTRIBUTES_TEMPLATE = """<SetSMSAttributesResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/"> <SetSMSAttributesResult/> <ResponseMetadata> <RequestId>26332069-c04a-5428-b829-72524b56a364</RequestId> </ResponseMetadata> </SetSMSAttributesResponse>""" GET_SMS_ATTRIBUTES_TEMPLATE = """<GetSMSAttributesResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/"> <GetSMSAttributesResult> <attributes> {% for name, value in attributes.items() %} <entry> <key>{{ name }}</key> <value>{{ value }}</value> </entry> {% endfor %} </attributes> </GetSMSAttributesResult> <ResponseMetadata> <RequestId>287f9554-8db3-5e66-8abc-c76f0186db7e</RequestId> </ResponseMetadata> </GetSMSAttributesResponse>""" CHECK_IF_OPTED_OUT_TEMPLATE = """<CheckIfPhoneNumberIsOptedOutResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/"> <CheckIfPhoneNumberIsOptedOutResult> <isOptedOut>{{ opt_out }}</isOptedOut> </CheckIfPhoneNumberIsOptedOutResult> <ResponseMetadata> <RequestId>287f9554-8db3-5e66-8abc-c76f0186db7e</RequestId> </ResponseMetadata> </CheckIfPhoneNumberIsOptedOutResponse>""" ERROR_RESPONSE = """<ErrorResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/"> <Error> <Type>{{ sender }}</Type> <Code>{{ code }}</Code> <Message>{{ message }}</Message> </Error> <RequestId>9dd01905-5012-5f99-8663-4b3ecd0dfaef</RequestId> </ErrorResponse>""" LIST_OPTOUT_TEMPLATE = """<ListPhoneNumbersOptedOutResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/"> <ListPhoneNumbersOptedOutResult> <phoneNumbers> {% for item in opt_outs %} <member>{{ item }}</member> {% endfor %} </phoneNumbers> </ListPhoneNumbersOptedOutResult> <ResponseMetadata> <RequestId>985e196d-a237-51b6-b33a-4b5601276b38</RequestId> </ResponseMetadata> </ListPhoneNumbersOptedOutResponse>""" OPT_IN_NUMBER_TEMPLATE = """<OptInPhoneNumberResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/"> <OptInPhoneNumberResult/> <ResponseMetadata> <RequestId>4c61842c-0796-50ef-95ac-d610c0bc8cf8</RequestId> </ResponseMetadata> </OptInPhoneNumberResponse>""" ADD_PERMISSION_TEMPLATE = """<AddPermissionResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/"> <ResponseMetadata> <RequestId>c046e713-c5ff-5888-a7bc-b52f0e4f1299</RequestId> </ResponseMetadata> </AddPermissionResponse>""" DEL_PERMISSION_TEMPLATE = """<RemovePermissionResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/"> <ResponseMetadata> <RequestId>e767cc9f-314b-5e1b-b283-9ea3fd4e38a3</RequestId> </ResponseMetadata> </RemovePermissionResponse>""" CONFIRM_SUBSCRIPTION_TEMPLATE = """<ConfirmSubscriptionResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/"> <ConfirmSubscriptionResult> <SubscriptionArn>{{ sub_arn }}</SubscriptionArn> </ConfirmSubscriptionResult> <ResponseMetadata> <RequestId>16eb4dde-7b3c-5b3e-a22a-1fe2a92d3293</RequestId> </ResponseMetadata> </ConfirmSubscriptionResponse>""" LIST_TAGS_FOR_RESOURCE_TEMPLATE = """<ListTagsForResourceResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/"> <ListTagsForResourceResult> <Tags> {% for name, value in tags.items() %} <member> <Key>{{ name }}</Key> <Value>{{ value }}</Value> </member> {% endfor %} </Tags> </ListTagsForResourceResult> <ResponseMetadata> <RequestId>97fa763f-861b-5223-a946-20251f2a42e2</RequestId> </ResponseMetadata> </ListTagsForResourceResponse>""" TAG_RESOURCE_TEMPLATE = """<TagResourceResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/"> <TagResourceResult/> <ResponseMetadata> <RequestId>fd4ab1da-692f-50a7-95ad-e7c665877d98</RequestId> </ResponseMetadata> </TagResourceResponse>""" UNTAG_RESOURCE_TEMPLATE = """<UntagResourceResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/"> <UntagResourceResult/> <ResponseMetadata> <RequestId>14eb7b1a-4cbd-5a56-80db-2d06412df769</RequestId> </ResponseMetadata> </UntagResourceResponse>"""
true
true
f73c03fdca9ef3303bbf4e974345b850baa55803
17,383
py
Python
sdk/python/pulumi_azure_native/network/latest/vpn_server_configuration.py
pulumi-bot/pulumi-azure-native
f7b9490b5211544318e455e5cceafe47b628e12c
[ "Apache-2.0" ]
null
null
null
sdk/python/pulumi_azure_native/network/latest/vpn_server_configuration.py
pulumi-bot/pulumi-azure-native
f7b9490b5211544318e455e5cceafe47b628e12c
[ "Apache-2.0" ]
null
null
null
sdk/python/pulumi_azure_native/network/latest/vpn_server_configuration.py
pulumi-bot/pulumi-azure-native
f7b9490b5211544318e455e5cceafe47b628e12c
[ "Apache-2.0" ]
null
null
null
# coding=utf-8 # *** WARNING: this file was generated by the Pulumi SDK Generator. *** # *** Do not edit by hand unless you're certain you know what you are doing! *** import warnings import pulumi import pulumi.runtime from typing import Any, Mapping, Optional, Sequence, Union from ... import _utilities, _tables from . import outputs from ._enums import * from ._inputs import * __all__ = ['VpnServerConfiguration'] warnings.warn("""The 'latest' version is deprecated. Please migrate to the resource in the top-level module: 'azure-native:network:VpnServerConfiguration'.""", DeprecationWarning) class VpnServerConfiguration(pulumi.CustomResource): warnings.warn("""The 'latest' version is deprecated. Please migrate to the resource in the top-level module: 'azure-native:network:VpnServerConfiguration'.""", DeprecationWarning) def __init__(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions] = None, aad_authentication_parameters: Optional[pulumi.Input[pulumi.InputType['AadAuthenticationParametersArgs']]] = None, id: Optional[pulumi.Input[str]] = None, location: Optional[pulumi.Input[str]] = None, name: Optional[pulumi.Input[str]] = None, radius_client_root_certificates: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['VpnServerConfigRadiusClientRootCertificateArgs']]]]] = None, radius_server_address: Optional[pulumi.Input[str]] = None, radius_server_root_certificates: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['VpnServerConfigRadiusServerRootCertificateArgs']]]]] = None, radius_server_secret: Optional[pulumi.Input[str]] = None, radius_servers: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['RadiusServerArgs']]]]] = None, resource_group_name: Optional[pulumi.Input[str]] = None, tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None, vpn_authentication_types: Optional[pulumi.Input[Sequence[pulumi.Input[Union[str, 'VpnAuthenticationType']]]]] = None, vpn_client_ipsec_policies: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['IpsecPolicyArgs']]]]] = None, vpn_client_revoked_certificates: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['VpnServerConfigVpnClientRevokedCertificateArgs']]]]] = None, vpn_client_root_certificates: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['VpnServerConfigVpnClientRootCertificateArgs']]]]] = None, vpn_protocols: Optional[pulumi.Input[Sequence[pulumi.Input[Union[str, 'VpnGatewayTunnelingProtocol']]]]] = None, vpn_server_configuration_name: Optional[pulumi.Input[str]] = None, __props__=None, __name__=None, __opts__=None): """ VpnServerConfiguration Resource. Latest API Version: 2020-08-01. :param str resource_name: The name of the resource. :param pulumi.ResourceOptions opts: Options for the resource. :param pulumi.Input[pulumi.InputType['AadAuthenticationParametersArgs']] aad_authentication_parameters: The set of aad vpn authentication parameters. :param pulumi.Input[str] id: Resource ID. :param pulumi.Input[str] location: Resource location. :param pulumi.Input[str] name: The name of the VpnServerConfiguration that is unique within a resource group. :param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['VpnServerConfigRadiusClientRootCertificateArgs']]]] radius_client_root_certificates: Radius client root certificate of VpnServerConfiguration. :param pulumi.Input[str] radius_server_address: The radius server address property of the VpnServerConfiguration resource for point to site client connection. :param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['VpnServerConfigRadiusServerRootCertificateArgs']]]] radius_server_root_certificates: Radius Server root certificate of VpnServerConfiguration. :param pulumi.Input[str] radius_server_secret: The radius secret property of the VpnServerConfiguration resource for point to site client connection. :param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['RadiusServerArgs']]]] radius_servers: Multiple Radius Server configuration for VpnServerConfiguration. :param pulumi.Input[str] resource_group_name: The resource group name of the VpnServerConfiguration. :param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Resource tags. :param pulumi.Input[Sequence[pulumi.Input[Union[str, 'VpnAuthenticationType']]]] vpn_authentication_types: VPN authentication types for the VpnServerConfiguration. :param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['IpsecPolicyArgs']]]] vpn_client_ipsec_policies: VpnClientIpsecPolicies for VpnServerConfiguration. :param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['VpnServerConfigVpnClientRevokedCertificateArgs']]]] vpn_client_revoked_certificates: VPN client revoked certificate of VpnServerConfiguration. :param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['VpnServerConfigVpnClientRootCertificateArgs']]]] vpn_client_root_certificates: VPN client root certificate of VpnServerConfiguration. :param pulumi.Input[Sequence[pulumi.Input[Union[str, 'VpnGatewayTunnelingProtocol']]]] vpn_protocols: VPN protocols for the VpnServerConfiguration. :param pulumi.Input[str] vpn_server_configuration_name: The name of the VpnServerConfiguration being created or updated. """ pulumi.log.warn("""VpnServerConfiguration is deprecated: The 'latest' version is deprecated. Please migrate to the resource in the top-level module: 'azure-native:network:VpnServerConfiguration'.""") if __name__ is not None: warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning) resource_name = __name__ if __opts__ is not None: warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning) opts = __opts__ if opts is None: opts = pulumi.ResourceOptions() if not isinstance(opts, pulumi.ResourceOptions): raise TypeError('Expected resource options to be a ResourceOptions instance') if opts.version is None: opts.version = _utilities.get_version() if opts.id is None: if __props__ is not None: raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource') __props__ = dict() __props__['aad_authentication_parameters'] = aad_authentication_parameters __props__['id'] = id __props__['location'] = location __props__['name'] = name __props__['radius_client_root_certificates'] = radius_client_root_certificates __props__['radius_server_address'] = radius_server_address __props__['radius_server_root_certificates'] = radius_server_root_certificates __props__['radius_server_secret'] = radius_server_secret __props__['radius_servers'] = radius_servers if resource_group_name is None and not opts.urn: raise TypeError("Missing required property 'resource_group_name'") __props__['resource_group_name'] = resource_group_name __props__['tags'] = tags __props__['vpn_authentication_types'] = vpn_authentication_types __props__['vpn_client_ipsec_policies'] = vpn_client_ipsec_policies __props__['vpn_client_revoked_certificates'] = vpn_client_revoked_certificates __props__['vpn_client_root_certificates'] = vpn_client_root_certificates __props__['vpn_protocols'] = vpn_protocols __props__['vpn_server_configuration_name'] = vpn_server_configuration_name __props__['etag'] = None __props__['p2_s_vpn_gateways'] = None __props__['provisioning_state'] = None __props__['type'] = None alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:network/latest:VpnServerConfiguration"), pulumi.Alias(type_="azure-native:network:VpnServerConfiguration"), pulumi.Alias(type_="azure-nextgen:network:VpnServerConfiguration"), pulumi.Alias(type_="azure-native:network/v20190801:VpnServerConfiguration"), pulumi.Alias(type_="azure-nextgen:network/v20190801:VpnServerConfiguration"), pulumi.Alias(type_="azure-native:network/v20190901:VpnServerConfiguration"), pulumi.Alias(type_="azure-nextgen:network/v20190901:VpnServerConfiguration"), pulumi.Alias(type_="azure-native:network/v20191101:VpnServerConfiguration"), pulumi.Alias(type_="azure-nextgen:network/v20191101:VpnServerConfiguration"), pulumi.Alias(type_="azure-native:network/v20191201:VpnServerConfiguration"), pulumi.Alias(type_="azure-nextgen:network/v20191201:VpnServerConfiguration"), pulumi.Alias(type_="azure-native:network/v20200301:VpnServerConfiguration"), pulumi.Alias(type_="azure-nextgen:network/v20200301:VpnServerConfiguration"), pulumi.Alias(type_="azure-native:network/v20200401:VpnServerConfiguration"), pulumi.Alias(type_="azure-nextgen:network/v20200401:VpnServerConfiguration"), pulumi.Alias(type_="azure-native:network/v20200501:VpnServerConfiguration"), pulumi.Alias(type_="azure-nextgen:network/v20200501:VpnServerConfiguration"), pulumi.Alias(type_="azure-native:network/v20200601:VpnServerConfiguration"), pulumi.Alias(type_="azure-nextgen:network/v20200601:VpnServerConfiguration"), pulumi.Alias(type_="azure-native:network/v20200701:VpnServerConfiguration"), pulumi.Alias(type_="azure-nextgen:network/v20200701:VpnServerConfiguration"), pulumi.Alias(type_="azure-native:network/v20200801:VpnServerConfiguration"), pulumi.Alias(type_="azure-nextgen:network/v20200801:VpnServerConfiguration")]) opts = pulumi.ResourceOptions.merge(opts, alias_opts) super(VpnServerConfiguration, __self__).__init__( 'azure-native:network/latest:VpnServerConfiguration', resource_name, __props__, opts) @staticmethod def get(resource_name: str, id: pulumi.Input[str], opts: Optional[pulumi.ResourceOptions] = None) -> 'VpnServerConfiguration': """ Get an existing VpnServerConfiguration resource's state with the given name, id, and optional extra properties used to qualify the lookup. :param str resource_name: The unique name of the resulting resource. :param pulumi.Input[str] id: The unique provider ID of the resource to lookup. :param pulumi.ResourceOptions opts: Options for the resource. """ opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id)) __props__ = dict() __props__["aad_authentication_parameters"] = None __props__["etag"] = None __props__["location"] = None __props__["name"] = None __props__["p2_s_vpn_gateways"] = None __props__["provisioning_state"] = None __props__["radius_client_root_certificates"] = None __props__["radius_server_address"] = None __props__["radius_server_root_certificates"] = None __props__["radius_server_secret"] = None __props__["radius_servers"] = None __props__["tags"] = None __props__["type"] = None __props__["vpn_authentication_types"] = None __props__["vpn_client_ipsec_policies"] = None __props__["vpn_client_revoked_certificates"] = None __props__["vpn_client_root_certificates"] = None __props__["vpn_protocols"] = None return VpnServerConfiguration(resource_name, opts=opts, __props__=__props__) @property @pulumi.getter(name="aadAuthenticationParameters") def aad_authentication_parameters(self) -> pulumi.Output[Optional['outputs.AadAuthenticationParametersResponse']]: """ The set of aad vpn authentication parameters. """ return pulumi.get(self, "aad_authentication_parameters") @property @pulumi.getter def etag(self) -> pulumi.Output[str]: """ A unique read-only string that changes whenever the resource is updated. """ return pulumi.get(self, "etag") @property @pulumi.getter def location(self) -> pulumi.Output[Optional[str]]: """ Resource location. """ return pulumi.get(self, "location") @property @pulumi.getter def name(self) -> pulumi.Output[str]: """ Resource name. """ return pulumi.get(self, "name") @property @pulumi.getter(name="p2SVpnGateways") def p2_s_vpn_gateways(self) -> pulumi.Output[Sequence['outputs.P2SVpnGatewayResponse']]: """ List of references to P2SVpnGateways. """ return pulumi.get(self, "p2_s_vpn_gateways") @property @pulumi.getter(name="provisioningState") def provisioning_state(self) -> pulumi.Output[str]: """ The provisioning state of the VpnServerConfiguration resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. """ return pulumi.get(self, "provisioning_state") @property @pulumi.getter(name="radiusClientRootCertificates") def radius_client_root_certificates(self) -> pulumi.Output[Optional[Sequence['outputs.VpnServerConfigRadiusClientRootCertificateResponse']]]: """ Radius client root certificate of VpnServerConfiguration. """ return pulumi.get(self, "radius_client_root_certificates") @property @pulumi.getter(name="radiusServerAddress") def radius_server_address(self) -> pulumi.Output[Optional[str]]: """ The radius server address property of the VpnServerConfiguration resource for point to site client connection. """ return pulumi.get(self, "radius_server_address") @property @pulumi.getter(name="radiusServerRootCertificates") def radius_server_root_certificates(self) -> pulumi.Output[Optional[Sequence['outputs.VpnServerConfigRadiusServerRootCertificateResponse']]]: """ Radius Server root certificate of VpnServerConfiguration. """ return pulumi.get(self, "radius_server_root_certificates") @property @pulumi.getter(name="radiusServerSecret") def radius_server_secret(self) -> pulumi.Output[Optional[str]]: """ The radius secret property of the VpnServerConfiguration resource for point to site client connection. """ return pulumi.get(self, "radius_server_secret") @property @pulumi.getter(name="radiusServers") def radius_servers(self) -> pulumi.Output[Optional[Sequence['outputs.RadiusServerResponse']]]: """ Multiple Radius Server configuration for VpnServerConfiguration. """ return pulumi.get(self, "radius_servers") @property @pulumi.getter def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]: """ Resource tags. """ return pulumi.get(self, "tags") @property @pulumi.getter def type(self) -> pulumi.Output[str]: """ Resource type. """ return pulumi.get(self, "type") @property @pulumi.getter(name="vpnAuthenticationTypes") def vpn_authentication_types(self) -> pulumi.Output[Optional[Sequence[str]]]: """ VPN authentication types for the VpnServerConfiguration. """ return pulumi.get(self, "vpn_authentication_types") @property @pulumi.getter(name="vpnClientIpsecPolicies") def vpn_client_ipsec_policies(self) -> pulumi.Output[Optional[Sequence['outputs.IpsecPolicyResponse']]]: """ VpnClientIpsecPolicies for VpnServerConfiguration. """ return pulumi.get(self, "vpn_client_ipsec_policies") @property @pulumi.getter(name="vpnClientRevokedCertificates") def vpn_client_revoked_certificates(self) -> pulumi.Output[Optional[Sequence['outputs.VpnServerConfigVpnClientRevokedCertificateResponse']]]: """ VPN client revoked certificate of VpnServerConfiguration. """ return pulumi.get(self, "vpn_client_revoked_certificates") @property @pulumi.getter(name="vpnClientRootCertificates") def vpn_client_root_certificates(self) -> pulumi.Output[Optional[Sequence['outputs.VpnServerConfigVpnClientRootCertificateResponse']]]: """ VPN client root certificate of VpnServerConfiguration. """ return pulumi.get(self, "vpn_client_root_certificates") @property @pulumi.getter(name="vpnProtocols") def vpn_protocols(self) -> pulumi.Output[Optional[Sequence[str]]]: """ VPN protocols for the VpnServerConfiguration. """ return pulumi.get(self, "vpn_protocols") def translate_output_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop def translate_input_property(self, prop): return _tables.SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
57.180921
1,813
0.712075
import warnings import pulumi import pulumi.runtime from typing import Any, Mapping, Optional, Sequence, Union from ... import _utilities, _tables from . import outputs from ._enums import * from ._inputs import * __all__ = ['VpnServerConfiguration'] warnings.warn("""The 'latest' version is deprecated. Please migrate to the resource in the top-level module: 'azure-native:network:VpnServerConfiguration'.""", DeprecationWarning) class VpnServerConfiguration(pulumi.CustomResource): warnings.warn("""The 'latest' version is deprecated. Please migrate to the resource in the top-level module: 'azure-native:network:VpnServerConfiguration'.""", DeprecationWarning) def __init__(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions] = None, aad_authentication_parameters: Optional[pulumi.Input[pulumi.InputType['AadAuthenticationParametersArgs']]] = None, id: Optional[pulumi.Input[str]] = None, location: Optional[pulumi.Input[str]] = None, name: Optional[pulumi.Input[str]] = None, radius_client_root_certificates: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['VpnServerConfigRadiusClientRootCertificateArgs']]]]] = None, radius_server_address: Optional[pulumi.Input[str]] = None, radius_server_root_certificates: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['VpnServerConfigRadiusServerRootCertificateArgs']]]]] = None, radius_server_secret: Optional[pulumi.Input[str]] = None, radius_servers: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['RadiusServerArgs']]]]] = None, resource_group_name: Optional[pulumi.Input[str]] = None, tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None, vpn_authentication_types: Optional[pulumi.Input[Sequence[pulumi.Input[Union[str, 'VpnAuthenticationType']]]]] = None, vpn_client_ipsec_policies: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['IpsecPolicyArgs']]]]] = None, vpn_client_revoked_certificates: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['VpnServerConfigVpnClientRevokedCertificateArgs']]]]] = None, vpn_client_root_certificates: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['VpnServerConfigVpnClientRootCertificateArgs']]]]] = None, vpn_protocols: Optional[pulumi.Input[Sequence[pulumi.Input[Union[str, 'VpnGatewayTunnelingProtocol']]]]] = None, vpn_server_configuration_name: Optional[pulumi.Input[str]] = None, __props__=None, __name__=None, __opts__=None): pulumi.log.warn("""VpnServerConfiguration is deprecated: The 'latest' version is deprecated. Please migrate to the resource in the top-level module: 'azure-native:network:VpnServerConfiguration'.""") if __name__ is not None: warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning) resource_name = __name__ if __opts__ is not None: warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning) opts = __opts__ if opts is None: opts = pulumi.ResourceOptions() if not isinstance(opts, pulumi.ResourceOptions): raise TypeError('Expected resource options to be a ResourceOptions instance') if opts.version is None: opts.version = _utilities.get_version() if opts.id is None: if __props__ is not None: raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource') __props__ = dict() __props__['aad_authentication_parameters'] = aad_authentication_parameters __props__['id'] = id __props__['location'] = location __props__['name'] = name __props__['radius_client_root_certificates'] = radius_client_root_certificates __props__['radius_server_address'] = radius_server_address __props__['radius_server_root_certificates'] = radius_server_root_certificates __props__['radius_server_secret'] = radius_server_secret __props__['radius_servers'] = radius_servers if resource_group_name is None and not opts.urn: raise TypeError("Missing required property 'resource_group_name'") __props__['resource_group_name'] = resource_group_name __props__['tags'] = tags __props__['vpn_authentication_types'] = vpn_authentication_types __props__['vpn_client_ipsec_policies'] = vpn_client_ipsec_policies __props__['vpn_client_revoked_certificates'] = vpn_client_revoked_certificates __props__['vpn_client_root_certificates'] = vpn_client_root_certificates __props__['vpn_protocols'] = vpn_protocols __props__['vpn_server_configuration_name'] = vpn_server_configuration_name __props__['etag'] = None __props__['p2_s_vpn_gateways'] = None __props__['provisioning_state'] = None __props__['type'] = None alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:network/latest:VpnServerConfiguration"), pulumi.Alias(type_="azure-native:network:VpnServerConfiguration"), pulumi.Alias(type_="azure-nextgen:network:VpnServerConfiguration"), pulumi.Alias(type_="azure-native:network/v20190801:VpnServerConfiguration"), pulumi.Alias(type_="azure-nextgen:network/v20190801:VpnServerConfiguration"), pulumi.Alias(type_="azure-native:network/v20190901:VpnServerConfiguration"), pulumi.Alias(type_="azure-nextgen:network/v20190901:VpnServerConfiguration"), pulumi.Alias(type_="azure-native:network/v20191101:VpnServerConfiguration"), pulumi.Alias(type_="azure-nextgen:network/v20191101:VpnServerConfiguration"), pulumi.Alias(type_="azure-native:network/v20191201:VpnServerConfiguration"), pulumi.Alias(type_="azure-nextgen:network/v20191201:VpnServerConfiguration"), pulumi.Alias(type_="azure-native:network/v20200301:VpnServerConfiguration"), pulumi.Alias(type_="azure-nextgen:network/v20200301:VpnServerConfiguration"), pulumi.Alias(type_="azure-native:network/v20200401:VpnServerConfiguration"), pulumi.Alias(type_="azure-nextgen:network/v20200401:VpnServerConfiguration"), pulumi.Alias(type_="azure-native:network/v20200501:VpnServerConfiguration"), pulumi.Alias(type_="azure-nextgen:network/v20200501:VpnServerConfiguration"), pulumi.Alias(type_="azure-native:network/v20200601:VpnServerConfiguration"), pulumi.Alias(type_="azure-nextgen:network/v20200601:VpnServerConfiguration"), pulumi.Alias(type_="azure-native:network/v20200701:VpnServerConfiguration"), pulumi.Alias(type_="azure-nextgen:network/v20200701:VpnServerConfiguration"), pulumi.Alias(type_="azure-native:network/v20200801:VpnServerConfiguration"), pulumi.Alias(type_="azure-nextgen:network/v20200801:VpnServerConfiguration")]) opts = pulumi.ResourceOptions.merge(opts, alias_opts) super(VpnServerConfiguration, __self__).__init__( 'azure-native:network/latest:VpnServerConfiguration', resource_name, __props__, opts) @staticmethod def get(resource_name: str, id: pulumi.Input[str], opts: Optional[pulumi.ResourceOptions] = None) -> 'VpnServerConfiguration': opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id)) __props__ = dict() __props__["aad_authentication_parameters"] = None __props__["etag"] = None __props__["location"] = None __props__["name"] = None __props__["p2_s_vpn_gateways"] = None __props__["provisioning_state"] = None __props__["radius_client_root_certificates"] = None __props__["radius_server_address"] = None __props__["radius_server_root_certificates"] = None __props__["radius_server_secret"] = None __props__["radius_servers"] = None __props__["tags"] = None __props__["type"] = None __props__["vpn_authentication_types"] = None __props__["vpn_client_ipsec_policies"] = None __props__["vpn_client_revoked_certificates"] = None __props__["vpn_client_root_certificates"] = None __props__["vpn_protocols"] = None return VpnServerConfiguration(resource_name, opts=opts, __props__=__props__) @property @pulumi.getter(name="aadAuthenticationParameters") def aad_authentication_parameters(self) -> pulumi.Output[Optional['outputs.AadAuthenticationParametersResponse']]: return pulumi.get(self, "aad_authentication_parameters") @property @pulumi.getter def etag(self) -> pulumi.Output[str]: return pulumi.get(self, "etag") @property @pulumi.getter def location(self) -> pulumi.Output[Optional[str]]: return pulumi.get(self, "location") @property @pulumi.getter def name(self) -> pulumi.Output[str]: return pulumi.get(self, "name") @property @pulumi.getter(name="p2SVpnGateways") def p2_s_vpn_gateways(self) -> pulumi.Output[Sequence['outputs.P2SVpnGatewayResponse']]: return pulumi.get(self, "p2_s_vpn_gateways") @property @pulumi.getter(name="provisioningState") def provisioning_state(self) -> pulumi.Output[str]: return pulumi.get(self, "provisioning_state") @property @pulumi.getter(name="radiusClientRootCertificates") def radius_client_root_certificates(self) -> pulumi.Output[Optional[Sequence['outputs.VpnServerConfigRadiusClientRootCertificateResponse']]]: return pulumi.get(self, "radius_client_root_certificates") @property @pulumi.getter(name="radiusServerAddress") def radius_server_address(self) -> pulumi.Output[Optional[str]]: return pulumi.get(self, "radius_server_address") @property @pulumi.getter(name="radiusServerRootCertificates") def radius_server_root_certificates(self) -> pulumi.Output[Optional[Sequence['outputs.VpnServerConfigRadiusServerRootCertificateResponse']]]: return pulumi.get(self, "radius_server_root_certificates") @property @pulumi.getter(name="radiusServerSecret") def radius_server_secret(self) -> pulumi.Output[Optional[str]]: return pulumi.get(self, "radius_server_secret") @property @pulumi.getter(name="radiusServers") def radius_servers(self) -> pulumi.Output[Optional[Sequence['outputs.RadiusServerResponse']]]: return pulumi.get(self, "radius_servers") @property @pulumi.getter def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]: return pulumi.get(self, "tags") @property @pulumi.getter def type(self) -> pulumi.Output[str]: return pulumi.get(self, "type") @property @pulumi.getter(name="vpnAuthenticationTypes") def vpn_authentication_types(self) -> pulumi.Output[Optional[Sequence[str]]]: return pulumi.get(self, "vpn_authentication_types") @property @pulumi.getter(name="vpnClientIpsecPolicies") def vpn_client_ipsec_policies(self) -> pulumi.Output[Optional[Sequence['outputs.IpsecPolicyResponse']]]: return pulumi.get(self, "vpn_client_ipsec_policies") @property @pulumi.getter(name="vpnClientRevokedCertificates") def vpn_client_revoked_certificates(self) -> pulumi.Output[Optional[Sequence['outputs.VpnServerConfigVpnClientRevokedCertificateResponse']]]: return pulumi.get(self, "vpn_client_revoked_certificates") @property @pulumi.getter(name="vpnClientRootCertificates") def vpn_client_root_certificates(self) -> pulumi.Output[Optional[Sequence['outputs.VpnServerConfigVpnClientRootCertificateResponse']]]: return pulumi.get(self, "vpn_client_root_certificates") @property @pulumi.getter(name="vpnProtocols") def vpn_protocols(self) -> pulumi.Output[Optional[Sequence[str]]]: return pulumi.get(self, "vpn_protocols") def translate_output_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop def translate_input_property(self, prop): return _tables.SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
true
true
f73c04c3a1d478e20e01cd911a0674b41ed26288
18,969
py
Python
unrestricted-advex/unrestricted_advex/attacks.py
fdcck/unrestricted-adversarial-examples
125a21d2073308df05ad2dfaf4f9c58ec749b3e4
[ "Apache-2.0" ]
1
2018-10-08T10:11:51.000Z
2018-10-08T10:11:51.000Z
unrestricted-advex/unrestricted_advex/attacks.py
fdcck/unrestricted-adversarial-examples
125a21d2073308df05ad2dfaf4f9c58ec749b3e4
[ "Apache-2.0" ]
null
null
null
unrestricted-advex/unrestricted_advex/attacks.py
fdcck/unrestricted-adversarial-examples
125a21d2073308df05ad2dfaf4f9c58ec749b3e4
[ "Apache-2.0" ]
null
null
null
from __future__ import absolute_import from __future__ import division from __future__ import print_function import multiprocessing import random from itertools import product, repeat import numpy as np import tensorflow as tf from cleverhans.attacks import SPSA from cleverhans.model import Model from foolbox.attacks import BoundaryAttack as FoolboxBoundaryAttack from imagenet_c import corrupt from six.moves import xrange from unrestricted_advex.cleverhans_fast_spatial_attack import SpatialTransformationMethod class Attack(object): name = None # TODO: Refactor this out of this object _stop_after_n_datapoints = None # An attack can optionally run on only a subset of the dataset def __call__(self, *args, **kwargs): raise NotImplementedError() class CleanData(Attack): """Also known as the "null attack". Just returns the unaltered clean image""" name = 'clean' def __call__(self, model_fn, images_batch_nhwc, y_np): del y_np, model_fn # unused return images_batch_nhwc class SpsaAttack(Attack): name = 'spsa' def __init__(self, model, image_shape_hwc, epsilon=(16. / 255), num_steps=200, batch_size=32, is_debug=False): self.graph = tf.Graph() with self.graph.as_default(): self.sess = tf.Session(graph=self.graph) self.x_input = tf.placeholder(tf.float32, shape=(1,) + image_shape_hwc) self.y_label = tf.placeholder(tf.int32, shape=(1,)) self.model = model attack = SPSA(CleverhansPyfuncModelWrapper(self.model), sess=self.sess) self.x_adv = attack.generate( self.x_input, y=self.y_label, epsilon=epsilon, num_steps=num_steps, early_stop_loss_threshold=-1., batch_size=batch_size, is_debug=is_debug) self.graph.finalize() def __call__(self, model, x_np, y_np): # (4. / 255)): if model != self.model: raise ValueError('Cannot call spsa attack on different models') del model # unused except to check that we already wired it up right with self.graph.as_default(): all_x_adv_np = [] for i in xrange(len(x_np)): x_adv_np = self.sess.run(self.x_adv, feed_dict={ self.x_input: np.expand_dims(x_np[i], axis=0), self.y_label: np.expand_dims(y_np[i], axis=0), }) all_x_adv_np.append(x_adv_np) return np.concatenate(all_x_adv_np) def corrupt_float32_image(x, corruption_name, severity): """Convert to uint8 and back to conform to corruption API""" x = np.copy(x) # We make a copy to avoid changing things in-place x = (x * 255).astype(np.uint8) corrupt_x = corrupt( x, corruption_name=corruption_name, severity=severity) return corrupt_x.astype(np.float32) / 255. def _corrupt_float32_image_star(args): return corrupt_float32_image(*args) class CommonCorruptionsAttack(Attack): name = "common_corruptions" def __init__(self, severity=1): self.corruption_names = [ 'gaussian_noise', 'shot_noise', 'impulse_noise', 'defocus_blur', 'glass_blur', 'motion_blur', 'zoom_blur', # 'snow', # Snow does not work in python 2.7 # 'frost', # Frost is not working correctly 'fog', 'brightness', 'contrast', 'elastic_transform', 'pixelate', 'jpeg_compression', 'speckle_noise', 'gaussian_blur', 'spatter', 'saturate'] self.severity = severity self.pool = multiprocessing.Pool(len(self.corruption_names)) def __call__(self, model_fn, images_batch_nhwc, y_np): assert images_batch_nhwc.shape[1:] == (224, 224, 3), \ "Image shape must equal (N, 224, 224, 3)" batch_size = len(images_batch_nhwc) # Keep track of the worst corruption for each image worst_corruption = np.copy(images_batch_nhwc) worst_loss = [np.NINF] * batch_size # Iterate through each image in the batch for batch_idx, x in enumerate(images_batch_nhwc): corrupt_args = [(x, corruption_name, self.severity) for corruption_name in self.corruption_names] corrupt_x_batch = self.pool.map(_corrupt_float32_image_star, corrupt_args) logits_batch = model_fn(np.array(corrupt_x_batch)) label = y_np[batch_idx] # This is left un-vectorized for readability for (logits, corrupt_x) in zip(logits_batch, corrupt_x_batch): correct_logit, wrong_logit = logits[label], logits[1 - label] # We can choose different loss functions to optimize in the # attack. For now, optimize the magnitude of the wrong logit # because we use this as our confidence threshold loss = wrong_logit # loss = wrong_logit - correct_logit if loss > worst_loss[batch_idx]: worst_corruption[batch_idx] = corrupt_x worst_loss[batch_idx] = loss return worst_corruption class BoundaryAttack(Attack): name = "boundary" def __init__(self, model, image_shape_hwc, max_l2_distortion=4, label_to_examples=None): if label_to_examples is None: label_to_examples = {} self.max_l2_distortion = max_l2_distortion class Model: def bounds(self): return [0, 1] def predictions(self, img): return model(img[np.newaxis, :, :, :])[0] def batch_predictions(self, img): return model(img) self.label_to_examples = label_to_examples h, w, c = image_shape_hwc mse_threshold = max_l2_distortion ** 2 / (h * w * c) try: # Foolbox 1.5 allows us to use a threshold the attack will abort after # reaching. Because we only care about a distortion of less than 4, as soon # as we reach it, we can just abort and move on to the next image. self.attack = FoolboxBoundaryAttack(model=Model(), threshold=mse_threshold) except: # Fall back to the original implementation. print("WARNING: Using foolbox version < 1.5 will cuase the " "boundary attack to perform more work than is required. " "Please upgrade to version 1.5") self.attack = FoolboxBoundaryAttack(model=Model()) def __call__(self, model, x_np, y_np): r = [] for i in range(len(x_np)): other = 1 - y_np[i] initial_adv = random.choice(self.label_to_examples[other]) try: adv = self.attack(x_np[i], y_np[i], log_every_n_steps=100, # Reduce verbosity of the attack starting_point=initial_adv ) distortion = np.sum((x_np[i] - adv) ** 2) ** .5 if distortion > self.max_l2_distortion: # project to the surface of the L2 ball adv = x_np[i] + (adv - x_np[i]) / distortion * self.max_l2_distortion except AssertionError as error: if str(error).startswith("Invalid starting point provided."): print("WARNING: The model misclassified the starting point (the target) " "from BoundaryAttack. This means that the attack will fail on this " "specific point (but is likely to succeed on other points.") adv = x_np[i] # Just return the non-adversarial point else: raise error r.append(adv) return np.array(r) class FastSpatialGridAttack(Attack): """Fast attack from "A Rotation and a Translation Suffice: Fooling CNNs with Simple Transformations", Engstrom et al. 2018 https://arxiv.org/pdf/1712.02779.pdf """ name = 'spatial_grid' def __init__(self, model, image_shape_hwc, spatial_limits, grid_granularity, black_border_size, ): self.graph = tf.Graph() with self.graph.as_default(): self.sess = tf.Session(graph=self.graph) self.x_input = tf.placeholder( tf.float32, shape=[None] + list(image_shape_hwc)) self.y_input = tf.placeholder(tf.float32, shape=(None, 2)) self.model = model attack = SpatialTransformationMethod( CleverhansPyfuncModelWrapper(self.model), sess=self.sess) self.x_adv = attack.generate( self.x_input, y=self.y_input, n_samples=None, dx_min=-float(spatial_limits[0]) / image_shape_hwc[0], dx_max=float(spatial_limits[0]) / image_shape_hwc[0], n_dxs=grid_granularity[0], dy_min=-float(spatial_limits[1]) / image_shape_hwc[1], dy_max=float(spatial_limits[1]) / image_shape_hwc[1], n_dys=grid_granularity[1], angle_min=-spatial_limits[2], angle_max=spatial_limits[2], n_angles=grid_granularity[2], black_border_size=black_border_size, ) self.graph.finalize() def __call__(self, model_fn, x_np, y_np): if model_fn != self.model: raise ValueError('Cannot call spatial attack on different models') del model_fn # unused except to check that we already wired it up right y_np_one_hot = np.zeros([len(y_np), 2], np.float32) y_np_one_hot[np.arange(len(y_np)), y_np] = 1.0 # Reduce the batch size to 1 to avoid OOM errors with self.graph.as_default(): all_x_adv_np = [] for i in xrange(len(x_np)): x_adv_np = self.sess.run(self.x_adv, feed_dict={ self.x_input: np.expand_dims(x_np[i], axis=0), self.y_input: np.expand_dims(y_np_one_hot[i], axis=0), }) all_x_adv_np.append(x_adv_np) return np.concatenate(all_x_adv_np) class SpatialGridAttack(Attack): """Attack from "A Rotation and a Translation Suffice: Fooling CNNs with Simple Transformations", Engstrom et al. 2018 https://arxiv.org/pdf/1712.02779.pdf """ name = 'spatial_grid' def __init__(self, image_shape_hwc, spatial_limits, grid_granularity, black_border_size, valid_check=None, ): """ :param model_fn: a callable: batch-input -> batch-probability in [0, 1] :param spatial_limits: :param grid_granularity: """ self.limits = spatial_limits self.granularity = grid_granularity self.valid_check = valid_check # Construct graph for spatial attack self.graph = tf.Graph() with self.graph.as_default(): self._x_for_trans = tf.placeholder(tf.float32, shape=[None] + list(image_shape_hwc)) self._t_for_trans = tf.placeholder(tf.float32, shape=[None, 3]) x = apply_black_border( self._x_for_trans, image_height=image_shape_hwc[0], image_width=image_shape_hwc[1], border_size=black_border_size ) self._tranformed_x_op = apply_transformation( x, transform=self._t_for_trans, image_height=image_shape_hwc[0], image_width=image_shape_hwc[1], ) self.session = tf.Session() self.grid_store = [] def __call__(self, model_fn, x_np, y_np): n = len(x_np) grid = product(*list(np.linspace(-l, l, num=g) for l, g in zip(self.limits, self.granularity))) worst_x = np.copy(x_np) max_xent = np.zeros(n) all_correct = np.ones(n).astype(bool) trans_np = np.stack( repeat([0, 0, 0], n)) with self.graph.as_default(): x_downsize_np = self.session.run(self._tranformed_x_op, feed_dict={ self._x_for_trans: x_np, self._t_for_trans: trans_np, }) for horizontal_trans, vertical_trans, rotation in grid: trans_np = np.stack( repeat([horizontal_trans, vertical_trans, rotation], n)) # Apply the spatial attack with self.graph.as_default(): x_np_trans = self.session.run(self._tranformed_x_op, feed_dict={ self._x_for_trans: x_np, self._t_for_trans: trans_np, }) # See how the model_fn performs on the perturbed input logits = model_fn(x_np_trans) preds = np.argmax(logits, axis=1) cur_xent = _sparse_softmax_cross_entropy_with_logits_from_numpy( logits, y_np, self.graph, self.session) cur_xent = np.asarray(cur_xent) cur_correct = np.equal(y_np, preds) if self.valid_check: is_valid = self.valid_check(x_downsize_np, x_np_trans) cur_correct |= ~is_valid cur_xent -= is_valid * 1e9 # Select indices to update: we choose the misclassified transformation # of maximum xent (or just highest xent if everything else if correct). idx = (cur_xent > max_xent) & (cur_correct == all_correct) idx = idx | (cur_correct < all_correct) max_xent = np.maximum(cur_xent, max_xent) all_correct = cur_correct & all_correct idx = np.expand_dims(idx, axis=-1) # shape (bsize, 1) idx = np.expand_dims(idx, axis=-1) idx = np.expand_dims(idx, axis=-1) # shape (bsize, 1, 1, 1) worst_x = np.where(idx, x_np_trans, worst_x, ) # shape (bsize, 32, 32, 3) return worst_x def _sparse_softmax_cross_entropy_with_logits_from_numpy(logits_np, labels_np, graph, sess): """Helper that calls the TF sparse_softmax_cross_entropy_with_logits function""" with graph.as_default(): labels_tf = tf.placeholder(tf.int32, [None]) logits_tf = tf.placeholder(tf.float32, [None, None]) xent_tf = tf.nn.sparse_softmax_cross_entropy_with_logits( labels=labels_tf, logits=logits_tf) return sess.run(xent_tf, feed_dict={ labels_tf: labels_np, logits_tf: logits_np}) def apply_black_border(x, image_height, image_width, border_size): x = tf.image.resize_images(x, (image_width - border_size, image_height - border_size)) x = tf.pad(x, [[0, 0], [border_size, border_size], [border_size, border_size], [0, 0]], 'CONSTANT') return x def apply_transformation(x, transform, image_height, image_width): # Map a transformation onto the input trans_x, trans_y, rot = tf.unstack(transform, axis=1) rot *= np.pi / 180 # convert degrees to radians # Pad the image to prevent two-step rotation / translation # resulting in a cropped image x = tf.pad(x, [[0, 0], [50, 50], [50, 50], [0, 0]], 'CONSTANT') # rotate and translate image ones = tf.ones(shape=tf.shape(trans_x)) zeros = tf.zeros(shape=tf.shape(trans_x)) trans = tf.stack([ones, zeros, -trans_x, zeros, ones, -trans_y, zeros, zeros], axis=1) x = tf.contrib.image.rotate(x, rot, interpolation='BILINEAR') x = tf.contrib.image.transform(x, trans, interpolation='BILINEAR') return tf.image.resize_image_with_crop_or_pad( x, image_height, image_width) class CleverhansPyfuncModelWrapper(Model): nb_classes = 2 num_classes = 2 def __init__(self, model_fn): """ Wrap a callable function that takes a numpy array of shape (N, C, H, W), and outputs a numpy vector of length N, with each element in range [0, 1]. """ self.model_fn = model_fn def fprop(self, x, **kwargs): logits_op = tf.py_func(self.model_fn, [x], tf.float32) return {'logits': logits_op} class RandomSpatialAttack(Attack): """Apply a single random rotation and translation as in "A Rotation and a Translation Suffice: Fooling CNNs with Simple Transformations", Engstrom et al. 2018 https://arxiv.org/pdf/1712.02779.pdf """ name = 'random_spatial' def __init__(self, image_shape_hwc, spatial_limits, black_border_size, valid_check=None): self.limits = spatial_limits self.valid_check = valid_check # Construct graph for spatial attack self.graph = tf.Graph() with self.graph.as_default(): self._x_for_trans = tf.placeholder(tf.float32, shape=[None] + list(image_shape_hwc)) self._t_for_trans = tf.placeholder(tf.float32, shape=[None, 3]) x = apply_black_border( self._x_for_trans, image_height=image_shape_hwc[0], image_width=image_shape_hwc[1], border_size=black_border_size ) self._tranformed_x_op = apply_transformation( x, transform=self._t_for_trans, image_height=image_shape_hwc[0], image_width=image_shape_hwc[1], ) self.session = tf.Session() def __call__(self, model_fn, x_np, y_np): # randomize each example separately with self.graph.as_default(): result = np.zeros(x_np.shape, dtype=x_np.dtype) did = np.zeros(x_np.shape[0], dtype=np.bool) trans_np = np.stack( repeat([0, 0, 0], x_np.shape[0])) x_downsize_np = self.session.run(self._tranformed_x_op, feed_dict={ self._x_for_trans: x_np, self._t_for_trans: trans_np, }) while True: random_transforms = (np.random.uniform(-lim, lim, len(x_np)) for lim in self.limits) trans_np = np.stack(random_transforms, axis=1) out = self.session.run(self._tranformed_x_op, feed_dict={ self._x_for_trans: x_np, self._t_for_trans: trans_np, }) if self.valid_check is None: return out else: ok = self.valid_check(x_downsize_np, out) result[ok] = out[ok] did[ok] = True if np.all(did): return result class SpsaWithRandomSpatialAttack(Attack): """Apply a single random rotation and translation and then apply SPSA to the resulting image """ name = "spsa_with_random_spatial" def __init__(self, model, image_shape_hwc, spatial_limits, black_border_size, epsilon=(16. / 255), num_steps=32, is_debug=False, valid_check=None): self.random_spatial_attack = RandomSpatialAttack( image_shape_hwc, valid_check=valid_check, spatial_limits=spatial_limits, black_border_size=black_border_size) self.spsa_attack = SpsaAttack( model, image_shape_hwc, epsilon=epsilon, num_steps=num_steps, batch_size=64, # this is number of samples in the new cleverhans is_debug=is_debug) def __call__(self, model, x_np, y_np): x_after_spatial_np = self.random_spatial_attack(model, x_np, y_np) x_adv = self.spsa_attack(model, x_after_spatial_np, y_np) return x_adv class BoundaryWithRandomSpatialAttack(Attack): """Apply a single random rotation and translation and then apply SPSA to the resulting image """ name = "boundary_with_random_spatial" def __init__(self, model, image_shape_hwc, spatial_limits, black_border_size, max_l2_distortion=4, label_to_examples=None, valid_check=None): self.random_spatial_attack = RandomSpatialAttack( image_shape_hwc, valid_check=valid_check, spatial_limits=spatial_limits, black_border_size=black_border_size) self.boundary_attack = BoundaryAttack( model, max_l2_distortion=max_l2_distortion, image_shape_hwc=image_shape_hwc, label_to_examples=label_to_examples) def __call__(self, model, x_np, y_np): x_after_spatial_np = self.random_spatial_attack(model, x_np, y_np) x_adv = self.boundary_attack(model, x_after_spatial_np, y_np) return x_adv
33.455026
97
0.664611
from __future__ import absolute_import from __future__ import division from __future__ import print_function import multiprocessing import random from itertools import product, repeat import numpy as np import tensorflow as tf from cleverhans.attacks import SPSA from cleverhans.model import Model from foolbox.attacks import BoundaryAttack as FoolboxBoundaryAttack from imagenet_c import corrupt from six.moves import xrange from unrestricted_advex.cleverhans_fast_spatial_attack import SpatialTransformationMethod class Attack(object): name = None _stop_after_n_datapoints = None def __call__(self, *args, **kwargs): raise NotImplementedError() class CleanData(Attack): name = 'clean' def __call__(self, model_fn, images_batch_nhwc, y_np): del y_np, model_fn return images_batch_nhwc class SpsaAttack(Attack): name = 'spsa' def __init__(self, model, image_shape_hwc, epsilon=(16. / 255), num_steps=200, batch_size=32, is_debug=False): self.graph = tf.Graph() with self.graph.as_default(): self.sess = tf.Session(graph=self.graph) self.x_input = tf.placeholder(tf.float32, shape=(1,) + image_shape_hwc) self.y_label = tf.placeholder(tf.int32, shape=(1,)) self.model = model attack = SPSA(CleverhansPyfuncModelWrapper(self.model), sess=self.sess) self.x_adv = attack.generate( self.x_input, y=self.y_label, epsilon=epsilon, num_steps=num_steps, early_stop_loss_threshold=-1., batch_size=batch_size, is_debug=is_debug) self.graph.finalize() def __call__(self, model, x_np, y_np): if model != self.model: raise ValueError('Cannot call spsa attack on different models') del model with self.graph.as_default(): all_x_adv_np = [] for i in xrange(len(x_np)): x_adv_np = self.sess.run(self.x_adv, feed_dict={ self.x_input: np.expand_dims(x_np[i], axis=0), self.y_label: np.expand_dims(y_np[i], axis=0), }) all_x_adv_np.append(x_adv_np) return np.concatenate(all_x_adv_np) def corrupt_float32_image(x, corruption_name, severity): x = np.copy(x) x = (x * 255).astype(np.uint8) corrupt_x = corrupt( x, corruption_name=corruption_name, severity=severity) return corrupt_x.astype(np.float32) / 255. def _corrupt_float32_image_star(args): return corrupt_float32_image(*args) class CommonCorruptionsAttack(Attack): name = "common_corruptions" def __init__(self, severity=1): self.corruption_names = [ 'gaussian_noise', 'shot_noise', 'impulse_noise', 'defocus_blur', 'glass_blur', 'motion_blur', 'zoom_blur', elastic_transform', 'pixelate', 'jpeg_compression', 'speckle_noise', 'gaussian_blur', 'spatter', 'saturate'] self.severity = severity self.pool = multiprocessing.Pool(len(self.corruption_names)) def __call__(self, model_fn, images_batch_nhwc, y_np): assert images_batch_nhwc.shape[1:] == (224, 224, 3), \ "Image shape must equal (N, 224, 224, 3)" batch_size = len(images_batch_nhwc) worst_corruption = np.copy(images_batch_nhwc) worst_loss = [np.NINF] * batch_size for batch_idx, x in enumerate(images_batch_nhwc): corrupt_args = [(x, corruption_name, self.severity) for corruption_name in self.corruption_names] corrupt_x_batch = self.pool.map(_corrupt_float32_image_star, corrupt_args) logits_batch = model_fn(np.array(corrupt_x_batch)) label = y_np[batch_idx] for (logits, corrupt_x) in zip(logits_batch, corrupt_x_batch): correct_logit, wrong_logit = logits[label], logits[1 - label] loss = wrong_logit if loss > worst_loss[batch_idx]: worst_corruption[batch_idx] = corrupt_x worst_loss[batch_idx] = loss return worst_corruption class BoundaryAttack(Attack): name = "boundary" def __init__(self, model, image_shape_hwc, max_l2_distortion=4, label_to_examples=None): if label_to_examples is None: label_to_examples = {} self.max_l2_distortion = max_l2_distortion class Model: def bounds(self): return [0, 1] def predictions(self, img): return model(img[np.newaxis, :, :, :])[0] def batch_predictions(self, img): return model(img) self.label_to_examples = label_to_examples h, w, c = image_shape_hwc mse_threshold = max_l2_distortion ** 2 / (h * w * c) try: self.attack = FoolboxBoundaryAttack(model=Model(), threshold=mse_threshold) except: print("WARNING: Using foolbox version < 1.5 will cuase the " "boundary attack to perform more work than is required. " "Please upgrade to version 1.5") self.attack = FoolboxBoundaryAttack(model=Model()) def __call__(self, model, x_np, y_np): r = [] for i in range(len(x_np)): other = 1 - y_np[i] initial_adv = random.choice(self.label_to_examples[other]) try: adv = self.attack(x_np[i], y_np[i], log_every_n_steps=100, starting_point=initial_adv ) distortion = np.sum((x_np[i] - adv) ** 2) ** .5 if distortion > self.max_l2_distortion: adv = x_np[i] + (adv - x_np[i]) / distortion * self.max_l2_distortion except AssertionError as error: if str(error).startswith("Invalid starting point provided."): print("WARNING: The model misclassified the starting point (the target) " "from BoundaryAttack. This means that the attack will fail on this " "specific point (but is likely to succeed on other points.") adv = x_np[i] else: raise error r.append(adv) return np.array(r) class FastSpatialGridAttack(Attack): name = 'spatial_grid' def __init__(self, model, image_shape_hwc, spatial_limits, grid_granularity, black_border_size, ): self.graph = tf.Graph() with self.graph.as_default(): self.sess = tf.Session(graph=self.graph) self.x_input = tf.placeholder( tf.float32, shape=[None] + list(image_shape_hwc)) self.y_input = tf.placeholder(tf.float32, shape=(None, 2)) self.model = model attack = SpatialTransformationMethod( CleverhansPyfuncModelWrapper(self.model), sess=self.sess) self.x_adv = attack.generate( self.x_input, y=self.y_input, n_samples=None, dx_min=-float(spatial_limits[0]) / image_shape_hwc[0], dx_max=float(spatial_limits[0]) / image_shape_hwc[0], n_dxs=grid_granularity[0], dy_min=-float(spatial_limits[1]) / image_shape_hwc[1], dy_max=float(spatial_limits[1]) / image_shape_hwc[1], n_dys=grid_granularity[1], angle_min=-spatial_limits[2], angle_max=spatial_limits[2], n_angles=grid_granularity[2], black_border_size=black_border_size, ) self.graph.finalize() def __call__(self, model_fn, x_np, y_np): if model_fn != self.model: raise ValueError('Cannot call spatial attack on different models') del model_fn y_np_one_hot = np.zeros([len(y_np), 2], np.float32) y_np_one_hot[np.arange(len(y_np)), y_np] = 1.0 with self.graph.as_default(): all_x_adv_np = [] for i in xrange(len(x_np)): x_adv_np = self.sess.run(self.x_adv, feed_dict={ self.x_input: np.expand_dims(x_np[i], axis=0), self.y_input: np.expand_dims(y_np_one_hot[i], axis=0), }) all_x_adv_np.append(x_adv_np) return np.concatenate(all_x_adv_np) class SpatialGridAttack(Attack): name = 'spatial_grid' def __init__(self, image_shape_hwc, spatial_limits, grid_granularity, black_border_size, valid_check=None, ): self.limits = spatial_limits self.granularity = grid_granularity self.valid_check = valid_check self.graph = tf.Graph() with self.graph.as_default(): self._x_for_trans = tf.placeholder(tf.float32, shape=[None] + list(image_shape_hwc)) self._t_for_trans = tf.placeholder(tf.float32, shape=[None, 3]) x = apply_black_border( self._x_for_trans, image_height=image_shape_hwc[0], image_width=image_shape_hwc[1], border_size=black_border_size ) self._tranformed_x_op = apply_transformation( x, transform=self._t_for_trans, image_height=image_shape_hwc[0], image_width=image_shape_hwc[1], ) self.session = tf.Session() self.grid_store = [] def __call__(self, model_fn, x_np, y_np): n = len(x_np) grid = product(*list(np.linspace(-l, l, num=g) for l, g in zip(self.limits, self.granularity))) worst_x = np.copy(x_np) max_xent = np.zeros(n) all_correct = np.ones(n).astype(bool) trans_np = np.stack( repeat([0, 0, 0], n)) with self.graph.as_default(): x_downsize_np = self.session.run(self._tranformed_x_op, feed_dict={ self._x_for_trans: x_np, self._t_for_trans: trans_np, }) for horizontal_trans, vertical_trans, rotation in grid: trans_np = np.stack( repeat([horizontal_trans, vertical_trans, rotation], n)) with self.graph.as_default(): x_np_trans = self.session.run(self._tranformed_x_op, feed_dict={ self._x_for_trans: x_np, self._t_for_trans: trans_np, }) logits = model_fn(x_np_trans) preds = np.argmax(logits, axis=1) cur_xent = _sparse_softmax_cross_entropy_with_logits_from_numpy( logits, y_np, self.graph, self.session) cur_xent = np.asarray(cur_xent) cur_correct = np.equal(y_np, preds) if self.valid_check: is_valid = self.valid_check(x_downsize_np, x_np_trans) cur_correct |= ~is_valid cur_xent -= is_valid * 1e9 idx = (cur_xent > max_xent) & (cur_correct == all_correct) idx = idx | (cur_correct < all_correct) max_xent = np.maximum(cur_xent, max_xent) all_correct = cur_correct & all_correct idx = np.expand_dims(idx, axis=-1) idx = np.expand_dims(idx, axis=-1) idx = np.expand_dims(idx, axis=-1) worst_x = np.where(idx, x_np_trans, worst_x, ) return worst_x def _sparse_softmax_cross_entropy_with_logits_from_numpy(logits_np, labels_np, graph, sess): with graph.as_default(): labels_tf = tf.placeholder(tf.int32, [None]) logits_tf = tf.placeholder(tf.float32, [None, None]) xent_tf = tf.nn.sparse_softmax_cross_entropy_with_logits( labels=labels_tf, logits=logits_tf) return sess.run(xent_tf, feed_dict={ labels_tf: labels_np, logits_tf: logits_np}) def apply_black_border(x, image_height, image_width, border_size): x = tf.image.resize_images(x, (image_width - border_size, image_height - border_size)) x = tf.pad(x, [[0, 0], [border_size, border_size], [border_size, border_size], [0, 0]], 'CONSTANT') return x def apply_transformation(x, transform, image_height, image_width): trans_x, trans_y, rot = tf.unstack(transform, axis=1) rot *= np.pi / 180 x = tf.pad(x, [[0, 0], [50, 50], [50, 50], [0, 0]], 'CONSTANT') ones = tf.ones(shape=tf.shape(trans_x)) zeros = tf.zeros(shape=tf.shape(trans_x)) trans = tf.stack([ones, zeros, -trans_x, zeros, ones, -trans_y, zeros, zeros], axis=1) x = tf.contrib.image.rotate(x, rot, interpolation='BILINEAR') x = tf.contrib.image.transform(x, trans, interpolation='BILINEAR') return tf.image.resize_image_with_crop_or_pad( x, image_height, image_width) class CleverhansPyfuncModelWrapper(Model): nb_classes = 2 num_classes = 2 def __init__(self, model_fn): self.model_fn = model_fn def fprop(self, x, **kwargs): logits_op = tf.py_func(self.model_fn, [x], tf.float32) return {'logits': logits_op} class RandomSpatialAttack(Attack): name = 'random_spatial' def __init__(self, image_shape_hwc, spatial_limits, black_border_size, valid_check=None): self.limits = spatial_limits self.valid_check = valid_check self.graph = tf.Graph() with self.graph.as_default(): self._x_for_trans = tf.placeholder(tf.float32, shape=[None] + list(image_shape_hwc)) self._t_for_trans = tf.placeholder(tf.float32, shape=[None, 3]) x = apply_black_border( self._x_for_trans, image_height=image_shape_hwc[0], image_width=image_shape_hwc[1], border_size=black_border_size ) self._tranformed_x_op = apply_transformation( x, transform=self._t_for_trans, image_height=image_shape_hwc[0], image_width=image_shape_hwc[1], ) self.session = tf.Session() def __call__(self, model_fn, x_np, y_np): with self.graph.as_default(): result = np.zeros(x_np.shape, dtype=x_np.dtype) did = np.zeros(x_np.shape[0], dtype=np.bool) trans_np = np.stack( repeat([0, 0, 0], x_np.shape[0])) x_downsize_np = self.session.run(self._tranformed_x_op, feed_dict={ self._x_for_trans: x_np, self._t_for_trans: trans_np, }) while True: random_transforms = (np.random.uniform(-lim, lim, len(x_np)) for lim in self.limits) trans_np = np.stack(random_transforms, axis=1) out = self.session.run(self._tranformed_x_op, feed_dict={ self._x_for_trans: x_np, self._t_for_trans: trans_np, }) if self.valid_check is None: return out else: ok = self.valid_check(x_downsize_np, out) result[ok] = out[ok] did[ok] = True if np.all(did): return result class SpsaWithRandomSpatialAttack(Attack): name = "spsa_with_random_spatial" def __init__(self, model, image_shape_hwc, spatial_limits, black_border_size, epsilon=(16. / 255), num_steps=32, is_debug=False, valid_check=None): self.random_spatial_attack = RandomSpatialAttack( image_shape_hwc, valid_check=valid_check, spatial_limits=spatial_limits, black_border_size=black_border_size) self.spsa_attack = SpsaAttack( model, image_shape_hwc, epsilon=epsilon, num_steps=num_steps, batch_size=64, is_debug=is_debug) def __call__(self, model, x_np, y_np): x_after_spatial_np = self.random_spatial_attack(model, x_np, y_np) x_adv = self.spsa_attack(model, x_after_spatial_np, y_np) return x_adv class BoundaryWithRandomSpatialAttack(Attack): name = "boundary_with_random_spatial" def __init__(self, model, image_shape_hwc, spatial_limits, black_border_size, max_l2_distortion=4, label_to_examples=None, valid_check=None): self.random_spatial_attack = RandomSpatialAttack( image_shape_hwc, valid_check=valid_check, spatial_limits=spatial_limits, black_border_size=black_border_size) self.boundary_attack = BoundaryAttack( model, max_l2_distortion=max_l2_distortion, image_shape_hwc=image_shape_hwc, label_to_examples=label_to_examples) def __call__(self, model, x_np, y_np): x_after_spatial_np = self.random_spatial_attack(model, x_np, y_np) x_adv = self.boundary_attack(model, x_after_spatial_np, y_np) return x_adv
true
true
f73c04f5cde18f43d9effdd9b7c06fb277ff3e0a
400
py
Python
sdl2/api.py
vaiorabbit/python-sdl2
77b4ce73d83d19dedc943ae7f56ee6f73f62ecad
[ "Zlib" ]
1
2018-03-12T01:54:40.000Z
2018-03-12T01:54:40.000Z
sdl2/api.py
vaiorabbit/python-sdl2
77b4ce73d83d19dedc943ae7f56ee6f73f62ecad
[ "Zlib" ]
null
null
null
sdl2/api.py
vaiorabbit/python-sdl2
77b4ce73d83d19dedc943ae7f56ee6f73f62ecad
[ "Zlib" ]
null
null
null
SDL2_LOADER = None SDL2_TTF_LOADER = None SDL2_IMG_LOADER = None SDL2_GFX_LOADER = None SDL2_API_NAMES = [] SDL2_API_ARGS_MAP = {} SDL2_API_RETVAL_MAP = {} SDL2_TTF_API_NAMES = [] SDL2_TTF_API_ARGS_MAP = {} SDL2_TTF_API_RETVAL_MAP = {} SDL2_IMG_API_NAMES = [] SDL2_IMG_API_ARGS_MAP = {} SDL2_IMG_API_RETVAL_MAP = {} SDL2_GFX_API_NAMES = [] SDL2_GFX_API_ARGS_MAP = {} SDL2_GFX_API_RETVAL_MAP = {}
19.047619
28
0.77
SDL2_LOADER = None SDL2_TTF_LOADER = None SDL2_IMG_LOADER = None SDL2_GFX_LOADER = None SDL2_API_NAMES = [] SDL2_API_ARGS_MAP = {} SDL2_API_RETVAL_MAP = {} SDL2_TTF_API_NAMES = [] SDL2_TTF_API_ARGS_MAP = {} SDL2_TTF_API_RETVAL_MAP = {} SDL2_IMG_API_NAMES = [] SDL2_IMG_API_ARGS_MAP = {} SDL2_IMG_API_RETVAL_MAP = {} SDL2_GFX_API_NAMES = [] SDL2_GFX_API_ARGS_MAP = {} SDL2_GFX_API_RETVAL_MAP = {}
true
true
f73c05c915497a74bf280bc0b4d01be5e6526e40
780
py
Python
hikvision.py
simpletask1/video_stream
984f24feb38c778e225d24771c6246577b94e614
[ "Apache-2.0" ]
null
null
null
hikvision.py
simpletask1/video_stream
984f24feb38c778e225d24771c6246577b94e614
[ "Apache-2.0" ]
null
null
null
hikvision.py
simpletask1/video_stream
984f24feb38c778e225d24771c6246577b94e614
[ "Apache-2.0" ]
null
null
null
import threading import traceback from camera.sdk_gige_hikvision.GrabImage import MVS_Cam # 工业相机SDK读流 class hikCamera(threading.Thread): def __init__(self, ip_name): threading.Thread.__init__(self) self.ip_name = ip_name # 初始化摄像头 self.device_camera = MVS_Cam(self.ip_name) def run(self): i = 0 while i < 100: try: # 获取图像 # 该读流方式不会缓存,读到的一定是最新帧 frame = self.device_camera.Get_Frame() except: print(traceback.format_exc()) else: if type(frame) != type(None): print(type(frame), self.ip_name, frame.shape[:2]) i += 1 # 关闭摄像头 self.device_camera.Close_Cam()
24.375
69
0.544872
import threading import traceback from camera.sdk_gige_hikvision.GrabImage import MVS_Cam class hikCamera(threading.Thread): def __init__(self, ip_name): threading.Thread.__init__(self) self.ip_name = ip_name self.device_camera = MVS_Cam(self.ip_name) def run(self): i = 0 while i < 100: try: frame = self.device_camera.Get_Frame() except: print(traceback.format_exc()) else: if type(frame) != type(None): print(type(frame), self.ip_name, frame.shape[:2]) i += 1 self.device_camera.Close_Cam()
true
true
f73c06db0b64a1edcd6aa27780845c92273e5b41
3,273
py
Python
test/travis_test_motors.py
Kageken/pimouse_ros
97f289755506c53e238d8aa9b331ad449aec0301
[ "BSD-3-Clause" ]
null
null
null
test/travis_test_motors.py
Kageken/pimouse_ros
97f289755506c53e238d8aa9b331ad449aec0301
[ "BSD-3-Clause" ]
null
null
null
test/travis_test_motors.py
Kageken/pimouse_ros
97f289755506c53e238d8aa9b331ad449aec0301
[ "BSD-3-Clause" ]
null
null
null
#!/usr/bin/env python #encoding: utf8 import unittest, rostest import rosnode, rospy import time from pimouse_ros.msg import MotorFreqs from geometry_msgs.msg import Twist from std_srvs.srv import Trigger, TriggerResponse #追加 from pimouse_ros.srv import TimedMotion class MotorTest(unittest.TestCase): def setUp(self): #このメソッドを追加 rospy.wait_for_service('/motor_on') rospy.wait_for_service('/motor_off') rospy.wait_for_service('/timed_motion') on = rospy.ServiceProxy('/motor_on', Trigger) ret = on() def file_check(self,dev,value,message): with open("/dev/" + dev,"r") as f: self.assertEqual(f.readline(),str(value)+"\n",message) def test_node_exist(self): nodes = rosnode.get_node_names() self.assertIn('/motors', nodes, "node does not exist") def test_put_freq(self): pub = rospy.Publisher('/motor_raw', MotorFreqs) m = MotorFreqs() m.left_hz = 123 m.right_hz = 456 for i in range(10): pub.publish(m) time.sleep(0.1) self.file_check("rtmotor_raw_l0",m.left_hz,"wrong left value from motor_raw") self.file_check("rtmotor_raw_r0",m.right_hz,"wrong left value from motor_raw") def test_put_cmd_vel(self): pub = rospy.Publisher('/cmd_vel', Twist) m = Twist() m.linear.x = 0.1414 m.angular.z = 1.57 for i in range(10): pub.publish(m) time.sleep(0.1) self.file_check("rtmotor_raw_l0",200,"wrong left value from cmd_vel") self.file_check("rtmotor_raw_r0",600,"wrong right value from cmd_vel") time.sleep(1.1) self.file_check("rtmotor_raw_r0",0,"don't stop after 1[s]") self.file_check("rtmotor_raw_l0",0,"don't stop after 1[s]") def test_on_off(self): #このメソッドも追加 off = rospy.ServiceProxy('/motor_off', Trigger) ret = off() self.assertEqual(ret.success, True, "motor off does not succeeded") self.assertEqual(ret.message, "OFF", "motor off wrong message") with open("/dev/rtmotoren0","r") as f: data = f.readline() self.assertEqual(data,"0\n","wrong value in rtmotor0 at motor off") on = rospy.ServiceProxy('/motor_on', Trigger) ret = on() self.assertEqual(ret.success, True, "motor on does not succeeded") self.assertEqual(ret.message, "ON", "motor on wrong message") with open("/dev/rtmotoren0","r") as f: data = f.readline() self.assertEqual(data,"1\n","wrong value in rtmotor0 at motor on") def test_put_value_timed(self): tm = rospy.ServiceProxy('/timed_motion', TimedMotion) tm(-321,654,1500) with open("/dev/rtmotor0","r") as f: data = f.readline() self.assertEqual(data,"-321 654 1500\n","value does not written to rtmotor0") if __name__ == '__main__': rospy.init_node('travis_test_motors') rostest.rosrun('pimouse_ros','travis_test_motors', MotorTest) # Copyright 2016 Ryuichi Ueda # Released under the BSD License. # To make line numbers be identical with the book, this statement is written here. Don't move it to the header.
38.05814
111
0.629086
import unittest, rostest import rosnode, rospy import time from pimouse_ros.msg import MotorFreqs from geometry_msgs.msg import Twist from std_srvs.srv import Trigger, TriggerResponse from pimouse_ros.srv import TimedMotion class MotorTest(unittest.TestCase): def setUp(self): rospy.wait_for_service('/motor_on') rospy.wait_for_service('/motor_off') rospy.wait_for_service('/timed_motion') on = rospy.ServiceProxy('/motor_on', Trigger) ret = on() def file_check(self,dev,value,message): with open("/dev/" + dev,"r") as f: self.assertEqual(f.readline(),str(value)+"\n",message) def test_node_exist(self): nodes = rosnode.get_node_names() self.assertIn('/motors', nodes, "node does not exist") def test_put_freq(self): pub = rospy.Publisher('/motor_raw', MotorFreqs) m = MotorFreqs() m.left_hz = 123 m.right_hz = 456 for i in range(10): pub.publish(m) time.sleep(0.1) self.file_check("rtmotor_raw_l0",m.left_hz,"wrong left value from motor_raw") self.file_check("rtmotor_raw_r0",m.right_hz,"wrong left value from motor_raw") def test_put_cmd_vel(self): pub = rospy.Publisher('/cmd_vel', Twist) m = Twist() m.linear.x = 0.1414 m.angular.z = 1.57 for i in range(10): pub.publish(m) time.sleep(0.1) self.file_check("rtmotor_raw_l0",200,"wrong left value from cmd_vel") self.file_check("rtmotor_raw_r0",600,"wrong right value from cmd_vel") time.sleep(1.1) self.file_check("rtmotor_raw_r0",0,"don't stop after 1[s]") self.file_check("rtmotor_raw_l0",0,"don't stop after 1[s]") def test_on_off(self): off = rospy.ServiceProxy('/motor_off', Trigger) ret = off() self.assertEqual(ret.success, True, "motor off does not succeeded") self.assertEqual(ret.message, "OFF", "motor off wrong message") with open("/dev/rtmotoren0","r") as f: data = f.readline() self.assertEqual(data,"0\n","wrong value in rtmotor0 at motor off") on = rospy.ServiceProxy('/motor_on', Trigger) ret = on() self.assertEqual(ret.success, True, "motor on does not succeeded") self.assertEqual(ret.message, "ON", "motor on wrong message") with open("/dev/rtmotoren0","r") as f: data = f.readline() self.assertEqual(data,"1\n","wrong value in rtmotor0 at motor on") def test_put_value_timed(self): tm = rospy.ServiceProxy('/timed_motion', TimedMotion) tm(-321,654,1500) with open("/dev/rtmotor0","r") as f: data = f.readline() self.assertEqual(data,"-321 654 1500\n","value does not written to rtmotor0") if __name__ == '__main__': rospy.init_node('travis_test_motors') rostest.rosrun('pimouse_ros','travis_test_motors', MotorTest)
true
true
f73c0705e1774ed3975ef5197b448f467103f19d
13,332
py
Python
source/winKernel.py
SWEN-712/screen-reader-brandonp728
e30c25ad2d10ce632fac0548696a61a872328f59
[ "bzip2-1.0.6" ]
null
null
null
source/winKernel.py
SWEN-712/screen-reader-brandonp728
e30c25ad2d10ce632fac0548696a61a872328f59
[ "bzip2-1.0.6" ]
null
null
null
source/winKernel.py
SWEN-712/screen-reader-brandonp728
e30c25ad2d10ce632fac0548696a61a872328f59
[ "bzip2-1.0.6" ]
null
null
null
#winKernel.py #A part of NonVisual Desktop Access (NVDA) #Copyright (C) 2006-2019 NV Access Limited, Rui Batista, Aleksey Sadovoy, Peter Vagner, Mozilla Corporation, Babbage B.V., Joseph Lee #This file is covered by the GNU General Public License. #See the file COPYING for more details. """Functions that wrap Windows API functions from kernel32.dll and advapi32.dll""" import contextlib import ctypes import ctypes.wintypes from ctypes import WinError from ctypes import * from ctypes.wintypes import * kernel32=ctypes.windll.kernel32 advapi32 = windll.advapi32 #Constants INFINITE = 0xffffffff #Process control PROCESS_ALL_ACCESS=0x1F0FFF PROCESS_TERMINATE=0x1 PROCESS_VM_OPERATION=0x8 PROCESS_VM_READ=0x10 PROCESS_VM_WRITE=0X20 SYNCHRONIZE=0x100000 PROCESS_QUERY_INFORMATION=0x400 READ_CONTROL=0x20000 MEM_COMMIT=0x1000 MEM_RELEASE=0x8000 PAGE_READWRITE=0x4 MAXIMUM_ALLOWED = 0x2000000 STARTF_USESTDHANDLES = 0x00000100 #Console handles STD_INPUT_HANDLE=-10 STD_OUTPUT_HANDLE=-11 STD_ERROR_HANDLE=-12 LOCALE_USER_DEFAULT=0x0400 LOCALE_NAME_USER_DEFAULT=None DATE_LONGDATE=0x00000002 TIME_NOSECONDS=0x00000002 # Wait return types WAIT_ABANDONED = 0x00000080 WAIT_IO_COMPLETION = 0x000000c0 WAIT_OBJECT_0 = 0x00000000 WAIT_TIMEOUT = 0x00000102 WAIT_FAILED = 0xffffffff # Image file machine constants IMAGE_FILE_MACHINE_UNKNOWN = 0 def GetStdHandle(handleID): h=kernel32.GetStdHandle(handleID) if h==0: raise WinError() return h GENERIC_READ=0x80000000 GENERIC_WRITE=0x40000000 FILE_SHARE_READ=1 FILE_SHARE_WRITE=2 FILE_SHARE_DELETE=4 OPEN_EXISTING=3 def CreateFile(fileName,desiredAccess,shareMode,securityAttributes,creationDisposition,flags,templateFile): res=kernel32.CreateFileW(fileName,desiredAccess,shareMode,securityAttributes,creationDisposition,flags,templateFile) if res==0: raise ctypes.WinError() return res def createEvent(eventAttributes=None, manualReset=False, initialState=False, name=None): res = kernel32.CreateEventW(eventAttributes, manualReset, initialState, name) if res==0: raise ctypes.WinError() return res def createWaitableTimer(securityAttributes=None, manualReset=False, name=None): """Wrapper to the kernel32 CreateWaitableTimer function. Consult https://msdn.microsoft.com/en-us/library/windows/desktop/ms682492.aspx for Microsoft's documentation. In contrast with the original function, this wrapper assumes the following defaults. @param securityAttributes: Defaults to C{None}; The timer object gets a default security descriptor and the handle cannot be inherited. The ACLs in the default security descriptor for a timer come from the primary or impersonation token of the creator. @type securityAttributes: pointer to L{SECURITY_ATTRIBUTES} @param manualReset: Defaults to C{False} which means the timer is a synchronization timer. If C{True}, the timer is a manual-reset notification timer. @type manualReset: bool @param name: Defaults to C{None}, the timer object is created without a name. @type name: str """ res = kernel32.CreateWaitableTimerW(securityAttributes, manualReset, name) if res==0: raise ctypes.WinError() return res def setWaitableTimer(handle, dueTime, period=0, completionRoutine=None, arg=None, resume=False): """Wrapper to the kernel32 SETWaitableTimer function. Consult https://msdn.microsoft.com/en-us/library/windows/desktop/ms686289.aspx for Microsoft's documentation. @param handle: A handle to the timer object. @type handle: int @param dueTime: Relative time (in miliseconds). Note that the original function requires relative time to be supplied as a negative nanoseconds value. @type dueTime: int @param period: Defaults to 0, timer is only executed once. Value should be supplied in miliseconds. @type period: int @param completionRoutine: The function to be executed when the timer elapses. @type completionRoutine: L{PAPCFUNC} @param arg: Defaults to C{None}; a pointer to a structure that is passed to the completion routine. @type arg: L{ctypes.c_void_p} @param resume: Defaults to C{False}; the system is not restored. If this parameter is TRUE, restores a system in suspended power conservation mode when the timer state is set to signaled. @type resume: bool """ res = kernel32.SetWaitableTimer( handle, # due time is in 100 nanosecond intervals, relative time should be negated. byref(LARGE_INTEGER(dueTime*-10000)), period, completionRoutine, arg, resume ) if res==0: raise ctypes.WinError() return True def openProcess(*args): return kernel32.OpenProcess(*args) def closeHandle(*args): return kernel32.CloseHandle(*args) #added by Rui Batista to use on Say_battery_status script #copied from platform sdk documentation (with required changes to work in python) class SYSTEM_POWER_STATUS(ctypes.Structure): _fields_ = [("ACLineStatus", ctypes.c_byte), ("BatteryFlag", ctypes.c_byte), ("BatteryLifePercent", ctypes.c_byte), ("Reserved1", ctypes.c_byte), ("BatteryLifeTime", ctypes.wintypes.DWORD), ("BatteryFullLiveTime", ctypes.wintypes.DWORD)] def GetSystemPowerStatus(sps): return kernel32.GetSystemPowerStatus(ctypes.byref(sps)) def getThreadLocale(): return kernel32.GetThreadLocale() class SYSTEMTIME(ctypes.Structure): _fields_ = ( ("wYear", WORD), ("wMonth", WORD), ("wDayOfWeek", WORD), ("wDay", WORD), ("wHour", WORD), ("wMinute", WORD), ("wSecond", WORD), ("wMilliseconds", WORD) ) def GetDateFormat(Locale,dwFlags,date,lpFormat): """@Deprecated: use GetDateFormatEx instead.""" if date is not None: date=SYSTEMTIME(date.year,date.month,0,date.day,date.hour,date.minute,date.second,0) lpDate=byref(date) else: lpDate=None bufferLength=kernel32.GetDateFormatW(Locale, dwFlags, lpDate, lpFormat, None, 0) buf=ctypes.create_unicode_buffer("", bufferLength) kernel32.GetDateFormatW(Locale, dwFlags, lpDate, lpFormat, buf, bufferLength) return buf.value def GetDateFormatEx(Locale,dwFlags,date,lpFormat): if date is not None: date=SYSTEMTIME(date.year,date.month,0,date.day,date.hour,date.minute,date.second,0) lpDate=byref(date) else: lpDate=None bufferLength=kernel32.GetDateFormatEx(Locale, dwFlags, lpDate, lpFormat, None, 0, None) buf=ctypes.create_unicode_buffer("", bufferLength) kernel32.GetDateFormatEx(Locale, dwFlags, lpDate, lpFormat, buf, bufferLength, None) return buf.value def GetTimeFormat(Locale,dwFlags,date,lpFormat): """@Deprecated: use GetTimeFormatEx instead.""" if date is not None: date=SYSTEMTIME(date.year,date.month,0,date.day,date.hour,date.minute,date.second,0) lpTime=byref(date) else: lpTime=None bufferLength=kernel32.GetTimeFormatW(Locale,dwFlags,lpTime,lpFormat, None, 0) buf=ctypes.create_unicode_buffer("", bufferLength) kernel32.GetTimeFormatW(Locale,dwFlags,lpTime,lpFormat, buf, bufferLength) return buf.value def GetTimeFormatEx(Locale,dwFlags,date,lpFormat): if date is not None: date=SYSTEMTIME(date.year,date.month,0,date.day,date.hour,date.minute,date.second,0) lpTime=byref(date) else: lpTime=None bufferLength=kernel32.GetTimeFormatEx(Locale,dwFlags,lpTime,lpFormat, None, 0) buf=ctypes.create_unicode_buffer("", bufferLength) kernel32.GetTimeFormatEx(Locale,dwFlags,lpTime,lpFormat, buf, bufferLength) return buf.value def openProcess(*args): return kernel32.OpenProcess(*args) def virtualAllocEx(*args): res = kernel32.VirtualAllocEx(*args) if res == 0: raise WinError() return res def virtualFreeEx(*args): return kernel32.VirtualFreeEx(*args) def readProcessMemory(*args): return kernel32.ReadProcessMemory(*args) def writeProcessMemory(*args): return kernel32.WriteProcessMemory(*args) def waitForSingleObject(handle,timeout): res = kernel32.WaitForSingleObject(handle,timeout) if res==WAIT_FAILED: raise ctypes.WinError() return res def waitForSingleObjectEx(handle,timeout, alertable): res = kernel32.WaitForSingleObjectEx(handle,timeout, alertable) if res==WAIT_FAILED: raise ctypes.WinError() return res SHUTDOWN_NORETRY = 0x00000001 def SetProcessShutdownParameters(level, flags): res = kernel32.SetProcessShutdownParameters(level, flags) if res == 0: raise ctypes.WinError() def GetExitCodeProcess(process): exitCode = ctypes.wintypes.DWORD() if not kernel32.GetExitCodeProcess(process, ctypes.byref(exitCode)): raise ctypes.WinError() return exitCode.value def TerminateProcess(process, exitCode): if not kernel32.TerminateProcess(process, exitCode): raise ctypes.WinError() DRIVE_UNKNOWN = 0 DRIVE_NO_ROOT_DIR = 1 DRIVE_REMOVABLE = 2 DRIVE_FIXED = 3 DRIVE_REMOTE = 4 DRIVE_CDROM = 5 DRIVE_RAMDISK = 6 def GetDriveType(rootPathName): return kernel32.GetDriveTypeW(rootPathName) class SECURITY_ATTRIBUTES(Structure): _fields_ = ( ("nLength", DWORD), ("lpSecurityDescriptor", LPVOID), ("bInheritHandle", BOOL) ) def __init__(self, **kwargs): super(SECURITY_ATTRIBUTES, self).__init__(nLength=sizeof(self), **kwargs) def CreatePipe(pipeAttributes, size): read = ctypes.wintypes.HANDLE() write = ctypes.wintypes.HANDLE() if kernel32.CreatePipe(ctypes.byref(read), ctypes.byref(write), byref(pipeAttributes) if pipeAttributes else None, ctypes.wintypes.DWORD(size)) == 0: raise ctypes.WinError() return read.value, write.value class STARTUPINFOW(Structure): _fields_=( ('cb',DWORD), ('lpReserved',LPWSTR), ('lpDesktop',LPWSTR), ('lpTitle',LPWSTR), ('dwX',DWORD), ('dwY',DWORD), ('dwXSize',DWORD), ('dwYSize',DWORD), ('dwXCountChars',DWORD), ('dwYCountChars',DWORD), ('dwFillAttribute',DWORD), ('dwFlags',DWORD), ('wShowWindow',WORD), ('cbReserved2',WORD), ('lpReserved2',POINTER(c_byte)), ('hSTDInput',HANDLE), ('hSTDOutput',HANDLE), ('hSTDError',HANDLE), ) def __init__(self, **kwargs): super(STARTUPINFOW, self).__init__(cb=sizeof(self), **kwargs) STARTUPINFO = STARTUPINFOW class PROCESS_INFORMATION(Structure): _fields_=( ('hProcess',HANDLE), ('hThread',HANDLE), ('dwProcessID',DWORD), ('dwThreadID',DWORD), ) def CreateProcessAsUser(token, applicationName, commandLine, processAttributes, threadAttributes, inheritHandles, creationFlags, environment, currentDirectory, startupInfo, processInformation): if advapi32.CreateProcessAsUserW(token, applicationName, commandLine, processAttributes, threadAttributes, inheritHandles, creationFlags, environment, currentDirectory, byref(startupInfo), byref(processInformation)) == 0: raise WinError() def GetCurrentProcess(): return kernel32.GetCurrentProcess() def OpenProcessToken(ProcessHandle, DesiredAccess): token = HANDLE() if advapi32.OpenProcessToken(ProcessHandle, DesiredAccess, byref(token)) == 0: raise WinError() return token.value DUPLICATE_SAME_ACCESS = 0x00000002 def DuplicateHandle(sourceProcessHandle, sourceHandle, targetProcessHandle, desiredAccess, inheritHandle, options): targetHandle = HANDLE() if kernel32.DuplicateHandle(sourceProcessHandle, sourceHandle, targetProcessHandle, byref(targetHandle), desiredAccess, inheritHandle, options) == 0: raise WinError() return targetHandle.value PAPCFUNC = ctypes.WINFUNCTYPE(None, ctypes.wintypes.ULONG) THREAD_SET_CONTEXT = 16 GMEM_MOVEABLE=2 class HGLOBAL(HANDLE): """ A class for the HGLOBAL Windows handle type. This class can auto-free the handle when it goes out of scope, and also contains a classmethod for alloc, And a context manager compatible method for locking. """ def __init__(self,h,autoFree=True): """ @param h: the raw Windows HGLOBAL handle @param autoFree: True by default, the handle will automatically be freed with GlobalFree when this object goes out of scope. """ super(HGLOBAL,self).__init__(h) self._autoFree=autoFree def __del__(self): if self and self._autoFree: windll.kernel32.GlobalFree(self) @classmethod def alloc(cls,flags,size): """ Allocates global memory with GlobalAlloc providing it as an instance of this class. This method Takes the same arguments as GlobalAlloc. """ h=windll.kernel32.GlobalAlloc(flags,size) return cls(h) @contextlib.contextmanager def lock(self): """ Used as a context manager, This method locks the global memory with GlobalLock, providing the usable memory address to the body of the 'with' statement. When the body completes, GlobalUnlock is automatically called. """ try: yield windll.kernel32.GlobalLock(self) finally: windll.kernel32.GlobalUnlock(self) def forget(self): """ Sets this HGLOBAL value to NULL, forgetting the existing value. Necessary if you pass this HGLOBAL to an API that takes ownership and therefore will handle freeing itself. """ self.value=None MOVEFILE_COPY_ALLOWED = 0x2 MOVEFILE_CREATE_HARDLINK = 0x10 MOVEFILE_DELAY_UNTIL_REBOOT = 0x4 MOVEFILE_FAIL_IF_NOT_TRACKABLE = 0x20 MOVEFILE_REPLACE_EXISTING = 0x1 MOVEFILE_WRITE_THROUGH = 0x8 def moveFileEx(lpExistingFileName: str, lpNewFileName: str, dwFlags: int): # If MoveFileExW fails, Windows will raise appropriate errors. if not kernel32.MoveFileExW(lpExistingFileName, lpNewFileName, dwFlags): raise ctypes.WinError() # Thread execution states ES_CONTINUOUS = 0x80000000 ES_DISPLAY_REQUIRED = 0x2 ES_SYSTEM_REQUIRED = 0x1 kernel32.SetThreadExecutionState.restype = ctypes.wintypes.DWORD def SetThreadExecutionState(esFlags): res = kernel32.SetThreadExecutionState(esFlags) if not res: raise WinError() return res
32.048077
238
0.779853
import contextlib import ctypes import ctypes.wintypes from ctypes import WinError from ctypes import * from ctypes.wintypes import * kernel32=ctypes.windll.kernel32 advapi32 = windll.advapi32 INFINITE = 0xffffffff PROCESS_ALL_ACCESS=0x1F0FFF PROCESS_TERMINATE=0x1 PROCESS_VM_OPERATION=0x8 PROCESS_VM_READ=0x10 PROCESS_VM_WRITE=0X20 SYNCHRONIZE=0x100000 PROCESS_QUERY_INFORMATION=0x400 READ_CONTROL=0x20000 MEM_COMMIT=0x1000 MEM_RELEASE=0x8000 PAGE_READWRITE=0x4 MAXIMUM_ALLOWED = 0x2000000 STARTF_USESTDHANDLES = 0x00000100 STD_INPUT_HANDLE=-10 STD_OUTPUT_HANDLE=-11 STD_ERROR_HANDLE=-12 LOCALE_USER_DEFAULT=0x0400 LOCALE_NAME_USER_DEFAULT=None DATE_LONGDATE=0x00000002 TIME_NOSECONDS=0x00000002 WAIT_ABANDONED = 0x00000080 WAIT_IO_COMPLETION = 0x000000c0 WAIT_OBJECT_0 = 0x00000000 WAIT_TIMEOUT = 0x00000102 WAIT_FAILED = 0xffffffff IMAGE_FILE_MACHINE_UNKNOWN = 0 def GetStdHandle(handleID): h=kernel32.GetStdHandle(handleID) if h==0: raise WinError() return h GENERIC_READ=0x80000000 GENERIC_WRITE=0x40000000 FILE_SHARE_READ=1 FILE_SHARE_WRITE=2 FILE_SHARE_DELETE=4 OPEN_EXISTING=3 def CreateFile(fileName,desiredAccess,shareMode,securityAttributes,creationDisposition,flags,templateFile): res=kernel32.CreateFileW(fileName,desiredAccess,shareMode,securityAttributes,creationDisposition,flags,templateFile) if res==0: raise ctypes.WinError() return res def createEvent(eventAttributes=None, manualReset=False, initialState=False, name=None): res = kernel32.CreateEventW(eventAttributes, manualReset, initialState, name) if res==0: raise ctypes.WinError() return res def createWaitableTimer(securityAttributes=None, manualReset=False, name=None): res = kernel32.CreateWaitableTimerW(securityAttributes, manualReset, name) if res==0: raise ctypes.WinError() return res def setWaitableTimer(handle, dueTime, period=0, completionRoutine=None, arg=None, resume=False): res = kernel32.SetWaitableTimer( handle, byref(LARGE_INTEGER(dueTime*-10000)), period, completionRoutine, arg, resume ) if res==0: raise ctypes.WinError() return True def openProcess(*args): return kernel32.OpenProcess(*args) def closeHandle(*args): return kernel32.CloseHandle(*args) class SYSTEM_POWER_STATUS(ctypes.Structure): _fields_ = [("ACLineStatus", ctypes.c_byte), ("BatteryFlag", ctypes.c_byte), ("BatteryLifePercent", ctypes.c_byte), ("Reserved1", ctypes.c_byte), ("BatteryLifeTime", ctypes.wintypes.DWORD), ("BatteryFullLiveTime", ctypes.wintypes.DWORD)] def GetSystemPowerStatus(sps): return kernel32.GetSystemPowerStatus(ctypes.byref(sps)) def getThreadLocale(): return kernel32.GetThreadLocale() class SYSTEMTIME(ctypes.Structure): _fields_ = ( ("wYear", WORD), ("wMonth", WORD), ("wDayOfWeek", WORD), ("wDay", WORD), ("wHour", WORD), ("wMinute", WORD), ("wSecond", WORD), ("wMilliseconds", WORD) ) def GetDateFormat(Locale,dwFlags,date,lpFormat): if date is not None: date=SYSTEMTIME(date.year,date.month,0,date.day,date.hour,date.minute,date.second,0) lpDate=byref(date) else: lpDate=None bufferLength=kernel32.GetDateFormatW(Locale, dwFlags, lpDate, lpFormat, None, 0) buf=ctypes.create_unicode_buffer("", bufferLength) kernel32.GetDateFormatW(Locale, dwFlags, lpDate, lpFormat, buf, bufferLength) return buf.value def GetDateFormatEx(Locale,dwFlags,date,lpFormat): if date is not None: date=SYSTEMTIME(date.year,date.month,0,date.day,date.hour,date.minute,date.second,0) lpDate=byref(date) else: lpDate=None bufferLength=kernel32.GetDateFormatEx(Locale, dwFlags, lpDate, lpFormat, None, 0, None) buf=ctypes.create_unicode_buffer("", bufferLength) kernel32.GetDateFormatEx(Locale, dwFlags, lpDate, lpFormat, buf, bufferLength, None) return buf.value def GetTimeFormat(Locale,dwFlags,date,lpFormat): if date is not None: date=SYSTEMTIME(date.year,date.month,0,date.day,date.hour,date.minute,date.second,0) lpTime=byref(date) else: lpTime=None bufferLength=kernel32.GetTimeFormatW(Locale,dwFlags,lpTime,lpFormat, None, 0) buf=ctypes.create_unicode_buffer("", bufferLength) kernel32.GetTimeFormatW(Locale,dwFlags,lpTime,lpFormat, buf, bufferLength) return buf.value def GetTimeFormatEx(Locale,dwFlags,date,lpFormat): if date is not None: date=SYSTEMTIME(date.year,date.month,0,date.day,date.hour,date.minute,date.second,0) lpTime=byref(date) else: lpTime=None bufferLength=kernel32.GetTimeFormatEx(Locale,dwFlags,lpTime,lpFormat, None, 0) buf=ctypes.create_unicode_buffer("", bufferLength) kernel32.GetTimeFormatEx(Locale,dwFlags,lpTime,lpFormat, buf, bufferLength) return buf.value def openProcess(*args): return kernel32.OpenProcess(*args) def virtualAllocEx(*args): res = kernel32.VirtualAllocEx(*args) if res == 0: raise WinError() return res def virtualFreeEx(*args): return kernel32.VirtualFreeEx(*args) def readProcessMemory(*args): return kernel32.ReadProcessMemory(*args) def writeProcessMemory(*args): return kernel32.WriteProcessMemory(*args) def waitForSingleObject(handle,timeout): res = kernel32.WaitForSingleObject(handle,timeout) if res==WAIT_FAILED: raise ctypes.WinError() return res def waitForSingleObjectEx(handle,timeout, alertable): res = kernel32.WaitForSingleObjectEx(handle,timeout, alertable) if res==WAIT_FAILED: raise ctypes.WinError() return res SHUTDOWN_NORETRY = 0x00000001 def SetProcessShutdownParameters(level, flags): res = kernel32.SetProcessShutdownParameters(level, flags) if res == 0: raise ctypes.WinError() def GetExitCodeProcess(process): exitCode = ctypes.wintypes.DWORD() if not kernel32.GetExitCodeProcess(process, ctypes.byref(exitCode)): raise ctypes.WinError() return exitCode.value def TerminateProcess(process, exitCode): if not kernel32.TerminateProcess(process, exitCode): raise ctypes.WinError() DRIVE_UNKNOWN = 0 DRIVE_NO_ROOT_DIR = 1 DRIVE_REMOVABLE = 2 DRIVE_FIXED = 3 DRIVE_REMOTE = 4 DRIVE_CDROM = 5 DRIVE_RAMDISK = 6 def GetDriveType(rootPathName): return kernel32.GetDriveTypeW(rootPathName) class SECURITY_ATTRIBUTES(Structure): _fields_ = ( ("nLength", DWORD), ("lpSecurityDescriptor", LPVOID), ("bInheritHandle", BOOL) ) def __init__(self, **kwargs): super(SECURITY_ATTRIBUTES, self).__init__(nLength=sizeof(self), **kwargs) def CreatePipe(pipeAttributes, size): read = ctypes.wintypes.HANDLE() write = ctypes.wintypes.HANDLE() if kernel32.CreatePipe(ctypes.byref(read), ctypes.byref(write), byref(pipeAttributes) if pipeAttributes else None, ctypes.wintypes.DWORD(size)) == 0: raise ctypes.WinError() return read.value, write.value class STARTUPINFOW(Structure): _fields_=( ('cb',DWORD), ('lpReserved',LPWSTR), ('lpDesktop',LPWSTR), ('lpTitle',LPWSTR), ('dwX',DWORD), ('dwY',DWORD), ('dwXSize',DWORD), ('dwYSize',DWORD), ('dwXCountChars',DWORD), ('dwYCountChars',DWORD), ('dwFillAttribute',DWORD), ('dwFlags',DWORD), ('wShowWindow',WORD), ('cbReserved2',WORD), ('lpReserved2',POINTER(c_byte)), ('hSTDInput',HANDLE), ('hSTDOutput',HANDLE), ('hSTDError',HANDLE), ) def __init__(self, **kwargs): super(STARTUPINFOW, self).__init__(cb=sizeof(self), **kwargs) STARTUPINFO = STARTUPINFOW class PROCESS_INFORMATION(Structure): _fields_=( ('hProcess',HANDLE), ('hThread',HANDLE), ('dwProcessID',DWORD), ('dwThreadID',DWORD), ) def CreateProcessAsUser(token, applicationName, commandLine, processAttributes, threadAttributes, inheritHandles, creationFlags, environment, currentDirectory, startupInfo, processInformation): if advapi32.CreateProcessAsUserW(token, applicationName, commandLine, processAttributes, threadAttributes, inheritHandles, creationFlags, environment, currentDirectory, byref(startupInfo), byref(processInformation)) == 0: raise WinError() def GetCurrentProcess(): return kernel32.GetCurrentProcess() def OpenProcessToken(ProcessHandle, DesiredAccess): token = HANDLE() if advapi32.OpenProcessToken(ProcessHandle, DesiredAccess, byref(token)) == 0: raise WinError() return token.value DUPLICATE_SAME_ACCESS = 0x00000002 def DuplicateHandle(sourceProcessHandle, sourceHandle, targetProcessHandle, desiredAccess, inheritHandle, options): targetHandle = HANDLE() if kernel32.DuplicateHandle(sourceProcessHandle, sourceHandle, targetProcessHandle, byref(targetHandle), desiredAccess, inheritHandle, options) == 0: raise WinError() return targetHandle.value PAPCFUNC = ctypes.WINFUNCTYPE(None, ctypes.wintypes.ULONG) THREAD_SET_CONTEXT = 16 GMEM_MOVEABLE=2 class HGLOBAL(HANDLE): def __init__(self,h,autoFree=True): super(HGLOBAL,self).__init__(h) self._autoFree=autoFree def __del__(self): if self and self._autoFree: windll.kernel32.GlobalFree(self) @classmethod def alloc(cls,flags,size): h=windll.kernel32.GlobalAlloc(flags,size) return cls(h) @contextlib.contextmanager def lock(self): try: yield windll.kernel32.GlobalLock(self) finally: windll.kernel32.GlobalUnlock(self) def forget(self): self.value=None MOVEFILE_COPY_ALLOWED = 0x2 MOVEFILE_CREATE_HARDLINK = 0x10 MOVEFILE_DELAY_UNTIL_REBOOT = 0x4 MOVEFILE_FAIL_IF_NOT_TRACKABLE = 0x20 MOVEFILE_REPLACE_EXISTING = 0x1 MOVEFILE_WRITE_THROUGH = 0x8 def moveFileEx(lpExistingFileName: str, lpNewFileName: str, dwFlags: int): if not kernel32.MoveFileExW(lpExistingFileName, lpNewFileName, dwFlags): raise ctypes.WinError() ES_CONTINUOUS = 0x80000000 ES_DISPLAY_REQUIRED = 0x2 ES_SYSTEM_REQUIRED = 0x1 kernel32.SetThreadExecutionState.restype = ctypes.wintypes.DWORD def SetThreadExecutionState(esFlags): res = kernel32.SetThreadExecutionState(esFlags) if not res: raise WinError() return res
true
true
f73c075a61d79912547acd84cbcd6608b85abf86
7,856
py
Python
tests/katsu_tests/test_authx_diagnoses.py
CanDIG/rego_development_playground
75ba1aa9895f6d949e4ab80f51bfc474895eb304
[ "Apache-2.0" ]
null
null
null
tests/katsu_tests/test_authx_diagnoses.py
CanDIG/rego_development_playground
75ba1aa9895f6d949e4ab80f51bfc474895eb304
[ "Apache-2.0" ]
4
2021-06-04T21:34:18.000Z
2021-11-17T19:23:46.000Z
tests/katsu_tests/test_authx_diagnoses.py
CanDIG/rego_development_playground
75ba1aa9895f6d949e4ab80f51bfc474895eb304
[ "Apache-2.0" ]
null
null
null
from test_helpers import helper_get_katsu_response from test_helpers import helper_get_user_token import pytest """ This test suite will cover the manual testsfor KATSU in README.md, ensuring that authorization happens correctly - beacon permissions - registered/controlled access - modified but live token """ KATSU_URL="http://localhost:8001" OIDC1_NAME="oidc1" OIDC2_NAME="oidc2" @pytest.fixture(scope="session") def user1_token(): """ Return the token for user1 """ return helper_get_user_token("user1", "pass1") def test_user1_diagnoses_access(user1_token): """" Make sure user1 has access to open1, open2, registered3 and controlled4 """ response = helper_get_katsu_response(user1_token, f"{KATSU_URL}/api/diagnoses") assert response.status_code == 200 response_json = response.json() assert response_json["count"] == 4 diagnoses_ids = list() diagnoses_dscps = list() for diagnosis in response_json["results"]: diagnoses_ids.append(diagnosis["id"]) diagnoses_dscps.append(diagnosis["extra_properties"]["description"]) assert "open1" in diagnoses_dscps assert "open2" in diagnoses_dscps assert "registered3" in diagnoses_dscps assert "controlled4" in diagnoses_dscps ''' Make sure user1 has access to open1, open2, registered3 and controlled4 by id ''' for id in diagnoses_ids: response = helper_get_katsu_response(user1_token, f"{KATSU_URL}/api/diagnoses/{id}") assert response.status_code == 200 assert "id" in response.json().keys() def test_user1_diagnoses_invalid(user1_token): """ Make sure invalid token will not have access to datasets other than open datasets """ invalid_token = 'A' + user1_token[1:] response = helper_get_katsu_response(invalid_token, f"{KATSU_URL}/api/diagnoses") assert response.status_code == 200 response_json = response.json() assert response_json["count"] == 2 diagnoses_dscps = set() for diagnosis in response_json["results"]: diagnoses_dscps.add(diagnosis["extra_properties"]["description"]) assert "open1" in diagnoses_dscps assert "open2" in diagnoses_dscps assert "registered3" not in diagnoses_dscps assert "controlled4" not in diagnoses_dscps @pytest.fixture(scope="session") def user2_token(): """ Return the token for user2 """ return helper_get_user_token("user2", "pass2") def test_user2_diagnoses_access(user2_token): """" Make sure user2 has access to open1, open2, registered3 and controlled 4 """ response = helper_get_katsu_response(user2_token, f"{KATSU_URL}/api/diagnoses") assert response.status_code == 200 response_json = response.json() assert response_json["count"] == 3 diagnoses_ids = list() diagnoses_dscps = list() for diagnosis in response_json["results"]: diagnoses_ids.append(diagnosis["id"]) diagnoses_dscps.append(diagnosis["extra_properties"]["description"]) """" Make sure user2 has access to open1, open2, and controlled5 """ for id in diagnoses_ids: response = helper_get_katsu_response(user2_token, f"{KATSU_URL}/api/diagnoses/{id}") assert response.status_code == 200 assert "id" in response.json().keys() def test_user2_diagnoses_invalid(user2_token): """ Make sure invalid token will not have access to datasets other than open datasets """ invalid_token = 'A' + user2_token[1:] response = helper_get_katsu_response(invalid_token, f"{KATSU_URL}/api/diagnoses") assert response.status_code == 200 response_json = response.json() assert response_json["count"] == 2 diagnoses_dscps = set() for diagnosis in response_json["results"]: diagnoses_dscps.add(diagnosis["extra_properties"]["description"]) assert "open1" in diagnoses_dscps assert "open2" in diagnoses_dscps assert "registered3" not in diagnoses_dscps assert "controlled5" not in diagnoses_dscps @pytest.fixture(scope="session") def user3_token(): """ Return the token for user3 """ return helper_get_user_token("user3", "pass3", OIDC2_NAME) def test_user3_diagnoses_access(user3_token): """" Make sure user3 has access to open1, open2, registered3, controlled4, and controlled6 """ response = helper_get_katsu_response(user3_token, f"{KATSU_URL}/api/diagnoses") assert response.status_code == 200 response_json = response.json() assert response_json["count"] == 5 diagnoses_ids = list() diagnoses_dscps = list() for diagnosis in response_json["results"]: diagnoses_ids.append(diagnosis["id"]) diagnoses_dscps.append(diagnosis["extra_properties"]["description"]) assert "open1" in diagnoses_dscps assert "open2" in diagnoses_dscps assert "registered3" in diagnoses_dscps assert "controlled4" in diagnoses_dscps assert "controlled6" in diagnoses_dscps """" Make sure user3 has access to open1, open2, registered3, controlled4, and controlled6 by id """ for id in diagnoses_ids: response = helper_get_katsu_response(user3_token, f"{KATSU_URL}/api/diagnoses/{id}") assert response.status_code == 200 assert "id" in response.json().keys() def test_user3_diagnoses_invalid(user3_token): """ Make sure invalid token will not have access to datasets other than open datasets """ invalid_token = 'A' + user3_token[1:] response = helper_get_katsu_response(invalid_token, f"{KATSU_URL}/api/diagnoses") assert response.status_code == 200 response_json = response.json() assert response_json["count"] == 2 diagnoses_dscps = set() for diagnosis in response_json["results"]: diagnoses_dscps.add(diagnosis["extra_properties"]["description"]) assert "open1" in diagnoses_dscps assert "open2" in diagnoses_dscps assert "registered3" not in diagnoses_dscps assert "controlled4" not in diagnoses_dscps assert "controlled6" not in diagnoses_dscps @pytest.fixture(scope="session") def user4_token(): """ Return the token for user4 """ return helper_get_user_token("user4", "pass4", OIDC2_NAME) def test_user4_diagnoses_access(user4_token): """" Make sure user3 has access to open1, open2, and controlled5 """ response = helper_get_katsu_response(user4_token, f"{KATSU_URL}/api/diagnoses") assert response.status_code == 200 response_json = response.json() assert response_json["count"] == 3 diagnoses_ids = list() diagnoses_dscps = list() for diagnosis in response_json["results"]: diagnoses_ids.append(diagnosis["id"]) diagnoses_dscps.append(diagnosis["extra_properties"]["description"]) assert "open1" in diagnoses_dscps assert "open2" in diagnoses_dscps assert "controlled5" in diagnoses_dscps """" Make sure user4 has access to open1, open2, and controlled4 by id """ for id in diagnoses_ids: response = helper_get_katsu_response(user4_token, f"{KATSU_URL}/api/diagnoses/{id}") assert response.status_code == 200 assert "id" in response.json().keys() def test_user4_diagnoses_invalid(user4_token): """ Make sure invalid token will not have access to datasets other than open datasets """ invalid_token = 'A' + user4_token[1:] response = helper_get_katsu_response(invalid_token, f"{KATSU_URL}/api/diagnoses") assert response.status_code == 200 response_json = response.json() assert response_json["count"] == 2 diagnoses_dscps = set() for diagnosis in response_json["results"]: diagnoses_dscps.add(diagnosis["extra_properties"]["description"]) assert "open1" in diagnoses_dscps assert "open2" in diagnoses_dscps assert "controlled5" not in diagnoses_dscps
36.37037
95
0.710412
from test_helpers import helper_get_katsu_response from test_helpers import helper_get_user_token import pytest KATSU_URL="http://localhost:8001" OIDC1_NAME="oidc1" OIDC2_NAME="oidc2" @pytest.fixture(scope="session") def user1_token(): return helper_get_user_token("user1", "pass1") def test_user1_diagnoses_access(user1_token): response = helper_get_katsu_response(user1_token, f"{KATSU_URL}/api/diagnoses") assert response.status_code == 200 response_json = response.json() assert response_json["count"] == 4 diagnoses_ids = list() diagnoses_dscps = list() for diagnosis in response_json["results"]: diagnoses_ids.append(diagnosis["id"]) diagnoses_dscps.append(diagnosis["extra_properties"]["description"]) assert "open1" in diagnoses_dscps assert "open2" in diagnoses_dscps assert "registered3" in diagnoses_dscps assert "controlled4" in diagnoses_dscps for id in diagnoses_ids: response = helper_get_katsu_response(user1_token, f"{KATSU_URL}/api/diagnoses/{id}") assert response.status_code == 200 assert "id" in response.json().keys() def test_user1_diagnoses_invalid(user1_token): invalid_token = 'A' + user1_token[1:] response = helper_get_katsu_response(invalid_token, f"{KATSU_URL}/api/diagnoses") assert response.status_code == 200 response_json = response.json() assert response_json["count"] == 2 diagnoses_dscps = set() for diagnosis in response_json["results"]: diagnoses_dscps.add(diagnosis["extra_properties"]["description"]) assert "open1" in diagnoses_dscps assert "open2" in diagnoses_dscps assert "registered3" not in diagnoses_dscps assert "controlled4" not in diagnoses_dscps @pytest.fixture(scope="session") def user2_token(): return helper_get_user_token("user2", "pass2") def test_user2_diagnoses_access(user2_token): response = helper_get_katsu_response(user2_token, f"{KATSU_URL}/api/diagnoses") assert response.status_code == 200 response_json = response.json() assert response_json["count"] == 3 diagnoses_ids = list() diagnoses_dscps = list() for diagnosis in response_json["results"]: diagnoses_ids.append(diagnosis["id"]) diagnoses_dscps.append(diagnosis["extra_properties"]["description"]) for id in diagnoses_ids: response = helper_get_katsu_response(user2_token, f"{KATSU_URL}/api/diagnoses/{id}") assert response.status_code == 200 assert "id" in response.json().keys() def test_user2_diagnoses_invalid(user2_token): invalid_token = 'A' + user2_token[1:] response = helper_get_katsu_response(invalid_token, f"{KATSU_URL}/api/diagnoses") assert response.status_code == 200 response_json = response.json() assert response_json["count"] == 2 diagnoses_dscps = set() for diagnosis in response_json["results"]: diagnoses_dscps.add(diagnosis["extra_properties"]["description"]) assert "open1" in diagnoses_dscps assert "open2" in diagnoses_dscps assert "registered3" not in diagnoses_dscps assert "controlled5" not in diagnoses_dscps @pytest.fixture(scope="session") def user3_token(): return helper_get_user_token("user3", "pass3", OIDC2_NAME) def test_user3_diagnoses_access(user3_token): response = helper_get_katsu_response(user3_token, f"{KATSU_URL}/api/diagnoses") assert response.status_code == 200 response_json = response.json() assert response_json["count"] == 5 diagnoses_ids = list() diagnoses_dscps = list() for diagnosis in response_json["results"]: diagnoses_ids.append(diagnosis["id"]) diagnoses_dscps.append(diagnosis["extra_properties"]["description"]) assert "open1" in diagnoses_dscps assert "open2" in diagnoses_dscps assert "registered3" in diagnoses_dscps assert "controlled4" in diagnoses_dscps assert "controlled6" in diagnoses_dscps for id in diagnoses_ids: response = helper_get_katsu_response(user3_token, f"{KATSU_URL}/api/diagnoses/{id}") assert response.status_code == 200 assert "id" in response.json().keys() def test_user3_diagnoses_invalid(user3_token): invalid_token = 'A' + user3_token[1:] response = helper_get_katsu_response(invalid_token, f"{KATSU_URL}/api/diagnoses") assert response.status_code == 200 response_json = response.json() assert response_json["count"] == 2 diagnoses_dscps = set() for diagnosis in response_json["results"]: diagnoses_dscps.add(diagnosis["extra_properties"]["description"]) assert "open1" in diagnoses_dscps assert "open2" in diagnoses_dscps assert "registered3" not in diagnoses_dscps assert "controlled4" not in diagnoses_dscps assert "controlled6" not in diagnoses_dscps @pytest.fixture(scope="session") def user4_token(): return helper_get_user_token("user4", "pass4", OIDC2_NAME) def test_user4_diagnoses_access(user4_token): response = helper_get_katsu_response(user4_token, f"{KATSU_URL}/api/diagnoses") assert response.status_code == 200 response_json = response.json() assert response_json["count"] == 3 diagnoses_ids = list() diagnoses_dscps = list() for diagnosis in response_json["results"]: diagnoses_ids.append(diagnosis["id"]) diagnoses_dscps.append(diagnosis["extra_properties"]["description"]) assert "open1" in diagnoses_dscps assert "open2" in diagnoses_dscps assert "controlled5" in diagnoses_dscps for id in diagnoses_ids: response = helper_get_katsu_response(user4_token, f"{KATSU_URL}/api/diagnoses/{id}") assert response.status_code == 200 assert "id" in response.json().keys() def test_user4_diagnoses_invalid(user4_token): invalid_token = 'A' + user4_token[1:] response = helper_get_katsu_response(invalid_token, f"{KATSU_URL}/api/diagnoses") assert response.status_code == 200 response_json = response.json() assert response_json["count"] == 2 diagnoses_dscps = set() for diagnosis in response_json["results"]: diagnoses_dscps.add(diagnosis["extra_properties"]["description"]) assert "open1" in diagnoses_dscps assert "open2" in diagnoses_dscps assert "controlled5" not in diagnoses_dscps
true
true
f73c076c09cd5d3e41014423f1a1b4021464aba5
7,344
py
Python
settings/common.py
TroJan/EvalAI
e378c5d6fd6a2f5d1ec020198eeae5d17793952f
[ "BSD-3-Clause" ]
null
null
null
settings/common.py
TroJan/EvalAI
e378c5d6fd6a2f5d1ec020198eeae5d17793952f
[ "BSD-3-Clause" ]
null
null
null
settings/common.py
TroJan/EvalAI
e378c5d6fd6a2f5d1ec020198eeae5d17793952f
[ "BSD-3-Clause" ]
1
2020-01-15T17:27:02.000Z
2020-01-15T17:27:02.000Z
""" Django settings for evalai project. Generated by 'django-admin startproject' using Django 1.10.2. For more information on this file, see https://docs.djangoproject.com/en/1.10/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.10/ref/settings/ """ import datetime import os import sys # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) APPS_DIR = os.path.join(BASE_DIR, 'apps') sys.path.append(APPS_DIR) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = os.environ.get('SECRET_KEY', 'random_secret_key') # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True ALLOWED_HOSTS = [] # Application definition DEFAULT_APPS = [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'django.contrib.sites', ] OUR_APPS = [ 'accounts', 'analytics', 'base', 'challenges', 'hosts', 'jobs', 'participants', 'web', ] THIRD_PARTY_APPS = [ 'allauth', 'allauth.account', 'corsheaders', 'import_export', 'rest_auth', 'rest_auth.registration', 'rest_framework.authtoken', 'rest_framework', 'rest_framework_docs', 'rest_framework_expiring_authtoken', ] INSTALLED_APPS = DEFAULT_APPS + OUR_APPS + THIRD_PARTY_APPS MIDDLEWARE = [ 'corsheaders.middleware.CorsMiddleware', 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] ROOT_URLCONF = 'evalai.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION = 'evalai.wsgi.application' # Password validation # https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', # noqa }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', # noqa }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', # noqa }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', # noqa }, ] # Internationalization # https://docs.djangoproject.com/en/1.10/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'UTC' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.10/howto/static-files/ STATIC_URL = '/static/' STATIC_ROOT = os.path.join(BASE_DIR, 'static') MEDIA_ROOT = os.path.join(BASE_DIR, 'media') MEDIA_URL = "/media/" SITE_ID = 1 REST_FRAMEWORK = { 'DEFAULT_PAGINATION_CLASS': ( 'rest_framework.pagination.LimitOffsetPagination'), 'PAGE_SIZE': 10, 'DEFAULT_PERMISSION_CLASSES': [ 'rest_framework.permissions.IsAuthenticatedOrReadOnly' ], 'DEFAULT_AUTHENTICATION_CLASSES': [ 'rest_framework_expiring_authtoken.authentication.ExpiringTokenAuthentication', ], 'TEST_REQUEST_DEFAULT_FORMAT': 'json', 'DEFAULT_THROTTLE_CLASSES': ( 'rest_framework.throttling.AnonRateThrottle', 'rest_framework.throttling.UserRateThrottle' ), 'DEFAULT_THROTTLE_RATES': { 'anon': '100/minute', 'user': '100/minute' }, 'DEFAULT_RENDERER_CLASSES': ( 'rest_framework.renderers.JSONRenderer', ) } # ALLAUTH SETTINGS ACCOUNT_EMAIL_REQUIRED = True OLD_PASSWORD_FIELD_ENABLED = True ACCOUNT_CONFIRM_EMAIL_ON_GET = True ACCOUNT_EMAIL_CONFIRMATION_ANONYMOUS_REDIRECT_URL = '/api/auth/email-confirmed/' ACCOUNT_EMAIL_CONFIRMATION_AUTHENTICATED_REDIRECT_URL = '/api/auth/email-confirmed/' AUTHENTICATION_BACKENDS = ( # Needed to login by username in Django admin, regardless of `allauth` 'django.contrib.auth.backends.ModelBackend', # `allauth` specific authentication methods, such as login by e-mail 'allauth.account.auth_backends.AuthenticationBackend', ) # CORS Settings CORS_ORIGIN_ALLOW_ALL = True # REST Framework Expiring Tokens Configuration EXPIRING_TOKEN_LIFESPAN = datetime.timedelta(days=7) # Logging LOGGING = { 'version': 1, 'disable_existing_loggers': False, 'root': { 'level': 'INFO', 'handlers': ['console'], }, 'filters': { 'require_debug_false': { '()': 'django.utils.log.RequireDebugFalse', }, 'require_debug_true': { '()': 'django.utils.log.RequireDebugTrue', } }, 'formatters': { 'simple': { 'format': '[%(asctime)s] %(levelname)s %(message)s', 'datefmt': '%Y-%m-%d %H:%M:%S' }, 'verbose': { 'format': '[%(asctime)s] %(levelname)s %(module)s %(message)s', 'datefmt': '%Y-%m-%d %H:%M:%S' } }, 'handlers': { 'console': { 'level': 'INFO', 'filters': ['require_debug_true'], 'class': 'logging.StreamHandler', 'formatter': 'simple' }, 'logfile': { 'level': 'DEBUG', 'class': 'logging.handlers.RotatingFileHandler', 'filename': "/tmp/logfile", 'maxBytes': 50000, 'backupCount': 10, 'formatter': 'verbose' }, 'mail_admins': { 'level': 'ERROR', 'class': 'django.utils.log.AdminEmailHandler', 'filters': ['require_debug_false'], } }, 'loggers': { 'django': { 'handlers': ['console'], 'propagate': False, }, 'django.request': { 'handlers': ['mail_admins'], 'level': 'ERROR', 'propagate': False, }, 'django.security': { 'handlers': ['mail_admins'], 'level': 'ERROR', 'propagate': False, }, 'django.db.backends': { 'handlers': ['mail_admins'], 'level': 'ERROR', 'propagate': False, } } } CACHES = { 'default': { 'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache', } } RABBITMQ_PARAMETERS = { 'HOST': 'localhost', 'EVALAI_EXCHANGE': { 'NAME': 'evalai_submissions', 'TYPE': 'topic', }, 'SUBMISSION_QUEUE': 'submission_task_queue', }
26.608696
99
0.6314
import datetime import os import sys BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) APPS_DIR = os.path.join(BASE_DIR, 'apps') sys.path.append(APPS_DIR) SECRET_KEY = os.environ.get('SECRET_KEY', 'random_secret_key') DEBUG = True ALLOWED_HOSTS = [] # Application definition DEFAULT_APPS = [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'django.contrib.sites', ] OUR_APPS = [ 'accounts', 'analytics', 'base', 'challenges', 'hosts', 'jobs', 'participants', 'web', ] THIRD_PARTY_APPS = [ 'allauth', 'allauth.account', 'corsheaders', 'import_export', 'rest_auth', 'rest_auth.registration', 'rest_framework.authtoken', 'rest_framework', 'rest_framework_docs', 'rest_framework_expiring_authtoken', ] INSTALLED_APPS = DEFAULT_APPS + OUR_APPS + THIRD_PARTY_APPS MIDDLEWARE = [ 'corsheaders.middleware.CorsMiddleware', 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] ROOT_URLCONF = 'evalai.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION = 'evalai.wsgi.application' # Password validation # https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', # noqa }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', # noqa }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', # noqa }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', # noqa }, ] # Internationalization # https://docs.djangoproject.com/en/1.10/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'UTC' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.10/howto/static-files/ STATIC_URL = '/static/' STATIC_ROOT = os.path.join(BASE_DIR, 'static') MEDIA_ROOT = os.path.join(BASE_DIR, 'media') MEDIA_URL = "/media/" SITE_ID = 1 REST_FRAMEWORK = { 'DEFAULT_PAGINATION_CLASS': ( 'rest_framework.pagination.LimitOffsetPagination'), 'PAGE_SIZE': 10, 'DEFAULT_PERMISSION_CLASSES': [ 'rest_framework.permissions.IsAuthenticatedOrReadOnly' ], 'DEFAULT_AUTHENTICATION_CLASSES': [ 'rest_framework_expiring_authtoken.authentication.ExpiringTokenAuthentication', ], 'TEST_REQUEST_DEFAULT_FORMAT': 'json', 'DEFAULT_THROTTLE_CLASSES': ( 'rest_framework.throttling.AnonRateThrottle', 'rest_framework.throttling.UserRateThrottle' ), 'DEFAULT_THROTTLE_RATES': { 'anon': '100/minute', 'user': '100/minute' }, 'DEFAULT_RENDERER_CLASSES': ( 'rest_framework.renderers.JSONRenderer', ) } # ALLAUTH SETTINGS ACCOUNT_EMAIL_REQUIRED = True OLD_PASSWORD_FIELD_ENABLED = True ACCOUNT_CONFIRM_EMAIL_ON_GET = True ACCOUNT_EMAIL_CONFIRMATION_ANONYMOUS_REDIRECT_URL = '/api/auth/email-confirmed/' ACCOUNT_EMAIL_CONFIRMATION_AUTHENTICATED_REDIRECT_URL = '/api/auth/email-confirmed/' AUTHENTICATION_BACKENDS = ( # Needed to login by username in Django admin, regardless of `allauth` 'django.contrib.auth.backends.ModelBackend', # `allauth` specific authentication methods, such as login by e-mail 'allauth.account.auth_backends.AuthenticationBackend', ) # CORS Settings CORS_ORIGIN_ALLOW_ALL = True # REST Framework Expiring Tokens Configuration EXPIRING_TOKEN_LIFESPAN = datetime.timedelta(days=7) # Logging LOGGING = { 'version': 1, 'disable_existing_loggers': False, 'root': { 'level': 'INFO', 'handlers': ['console'], }, 'filters': { 'require_debug_false': { '()': 'django.utils.log.RequireDebugFalse', }, 'require_debug_true': { '()': 'django.utils.log.RequireDebugTrue', } }, 'formatters': { 'simple': { 'format': '[%(asctime)s] %(levelname)s %(message)s', 'datefmt': '%Y-%m-%d %H:%M:%S' }, 'verbose': { 'format': '[%(asctime)s] %(levelname)s %(module)s %(message)s', 'datefmt': '%Y-%m-%d %H:%M:%S' } }, 'handlers': { 'console': { 'level': 'INFO', 'filters': ['require_debug_true'], 'class': 'logging.StreamHandler', 'formatter': 'simple' }, 'logfile': { 'level': 'DEBUG', 'class': 'logging.handlers.RotatingFileHandler', 'filename': "/tmp/logfile", 'maxBytes': 50000, 'backupCount': 10, 'formatter': 'verbose' }, 'mail_admins': { 'level': 'ERROR', 'class': 'django.utils.log.AdminEmailHandler', 'filters': ['require_debug_false'], } }, 'loggers': { 'django': { 'handlers': ['console'], 'propagate': False, }, 'django.request': { 'handlers': ['mail_admins'], 'level': 'ERROR', 'propagate': False, }, 'django.security': { 'handlers': ['mail_admins'], 'level': 'ERROR', 'propagate': False, }, 'django.db.backends': { 'handlers': ['mail_admins'], 'level': 'ERROR', 'propagate': False, } } } CACHES = { 'default': { 'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache', } } RABBITMQ_PARAMETERS = { 'HOST': 'localhost', 'EVALAI_EXCHANGE': { 'NAME': 'evalai_submissions', 'TYPE': 'topic', }, 'SUBMISSION_QUEUE': 'submission_task_queue', }
true
true
f73c07bc944c352b0b9d234240f626e277e39eaf
3,705
py
Python
openstack_dashboard/theme_settings.py
ankur-gupta91/block_storage
938548a3d4507dc56c1c26b442767eb41aa2e610
[ "Apache-2.0" ]
9
2016-06-03T03:53:24.000Z
2017-05-20T16:53:23.000Z
openstack_dashboard/theme_settings.py
ankur-gupta91/block_storage
938548a3d4507dc56c1c26b442767eb41aa2e610
[ "Apache-2.0" ]
12
2022-03-22T07:28:29.000Z
2022-03-22T07:29:55.000Z
openstack_dashboard/theme_settings.py
ankur-gupta91/block_storage
938548a3d4507dc56c1c26b442767eb41aa2e610
[ "Apache-2.0" ]
4
2016-08-01T10:50:15.000Z
2017-02-22T12:11:19.000Z
# Copyright 2016 Hewlett Packard Enterprise Software, LLC # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import logging import os from django.utils.translation import pgettext_lazy def get_theme_static_dirs(available_themes, collection_dir, root): static_dirs = [] # Collect and expose the themes that have been configured for theme in available_themes: theme_name, theme_label, theme_path = theme theme_url = os.path.join(collection_dir, theme_name) theme_path = os.path.join(root, theme_path) if os.path.exists(os.path.join(theme_path, 'static')): # Only expose the subdirectory 'static' if it exists from a custom # theme, allowing other logic to live with a theme that we might # not want to expose statically theme_path = os.path.join(theme_path, 'static') static_dirs.append( (theme_url, theme_path), ) return static_dirs def get_available_themes(available_themes, custom_path, default_path, default_theme): new_theme_list = [] # We can only support one path at a time, because of static file # collection. custom_ndx = -1 default_ndx = -1 default_theme_ndx = -1 for ndx, each_theme in enumerate(available_themes): # Maintain Backward Compatibility for CUSTOM_THEME_PATH if custom_path: if each_theme[2] == custom_path: custom_ndx = ndx # Maintain Backward Compatibility for DEFAULT_THEME_PATH if default_path: if each_theme[0] == 'default': default_ndx = ndx each_theme = ( 'default', pgettext_lazy('Default style theme', 'Default'), default_path ) # Make sure that DEFAULT_THEME is configured for use if each_theme[0] == default_theme: default_theme_ndx = ndx new_theme_list.append(each_theme) if custom_ndx != -1: # If CUSTOM_THEME_PATH is set, then we should set that as the default # theme to make sure that upgrading Horizon doesn't jostle anyone default_theme = available_themes[custom_ndx][0] logging.warning("Your AVAILABLE_THEMES already contains your " "CUSTOM_THEME_PATH, therefore using configuration in " "AVAILABLE_THEMES for %s." % custom_path) elif custom_path is not None: new_theme_list.append( ('custom', pgettext_lazy('Custom style theme', 'Custom'), custom_path) ) default_theme = 'custom' # If 'default' isn't present at all, add it with the default_path if default_ndx == -1 and default_path is not None: new_theme_list.append( ('default', pgettext_lazy('Default style theme', 'Default'), default_path) ) # If default is not configured, we have to set one, # just grab the first theme if default_theme_ndx == -1 and custom_ndx == -1: default_theme = available_themes[0][0] return new_theme_list, default_theme
35.625
78
0.647233
import logging import os from django.utils.translation import pgettext_lazy def get_theme_static_dirs(available_themes, collection_dir, root): static_dirs = [] for theme in available_themes: theme_name, theme_label, theme_path = theme theme_url = os.path.join(collection_dir, theme_name) theme_path = os.path.join(root, theme_path) if os.path.exists(os.path.join(theme_path, 'static')): theme_path = os.path.join(theme_path, 'static') static_dirs.append( (theme_url, theme_path), ) return static_dirs def get_available_themes(available_themes, custom_path, default_path, default_theme): new_theme_list = [] custom_ndx = -1 default_ndx = -1 default_theme_ndx = -1 for ndx, each_theme in enumerate(available_themes): if custom_path: if each_theme[2] == custom_path: custom_ndx = ndx if default_path: if each_theme[0] == 'default': default_ndx = ndx each_theme = ( 'default', pgettext_lazy('Default style theme', 'Default'), default_path ) if each_theme[0] == default_theme: default_theme_ndx = ndx new_theme_list.append(each_theme) if custom_ndx != -1: default_theme = available_themes[custom_ndx][0] logging.warning("Your AVAILABLE_THEMES already contains your " "CUSTOM_THEME_PATH, therefore using configuration in " "AVAILABLE_THEMES for %s." % custom_path) elif custom_path is not None: new_theme_list.append( ('custom', pgettext_lazy('Custom style theme', 'Custom'), custom_path) ) default_theme = 'custom' # If 'default' isn't present at all, add it with the default_path if default_ndx == -1 and default_path is not None: new_theme_list.append( ('default', pgettext_lazy('Default style theme', 'Default'), default_path) ) if default_theme_ndx == -1 and custom_ndx == -1: default_theme = available_themes[0][0] return new_theme_list, default_theme
true
true
f73c07caec4ae8a0fc91ef1c9848cf2ff5d8a5d1
120
py
Python
test_output/for_loop.py
roshangol/executed-path-visualize
1759c12b0048fe117205990b151d2f5f57ad9616
[ "MIT" ]
null
null
null
test_output/for_loop.py
roshangol/executed-path-visualize
1759c12b0048fe117205990b151d2f5f57ad9616
[ "MIT" ]
null
null
null
test_output/for_loop.py
roshangol/executed-path-visualize
1759c12b0048fe117205990b151d2f5f57ad9616
[ "MIT" ]
null
null
null
lst = [1, 2, 3, 4, 5] for i in range(len(lst)): print(lst[i], end=" ") for j in range(0, 10): print(j, end=" ")
20
26
0.5
lst = [1, 2, 3, 4, 5] for i in range(len(lst)): print(lst[i], end=" ") for j in range(0, 10): print(j, end=" ")
true
true
f73c09d1298a19e2c65689de3b32ae95532ab4b1
516
py
Python
alg_insertion_sort.py
lukes1582/algoritmi
3313c9ae3cb5f9f0c410ca86ea29e23cb1c3c8fd
[ "Apache-2.0" ]
null
null
null
alg_insertion_sort.py
lukes1582/algoritmi
3313c9ae3cb5f9f0c410ca86ea29e23cb1c3c8fd
[ "Apache-2.0" ]
null
null
null
alg_insertion_sort.py
lukes1582/algoritmi
3313c9ae3cb5f9f0c410ca86ea29e23cb1c3c8fd
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/env python3 """ l0m1s lukes1582@gmail.com algoritmo insertion sort sviluppato per Python """ arr = [5,4,3,1,2,11,9,8,0] print("Array in origine ") print(arr) print ("Lunghezza dell'array "+ str(len(arr))) print(50*"x") for i in range(1, len(arr)): key = arr[i] j = i-1 while j >=0 and key < arr[j] : arr[j+1] = arr[j] j -= 1 arr[j+1] = key print ("l'array ordinato risulta essere: ") for x in range(len(arr)): print ("%d" %arr[x])
18.428571
47
0.546512
arr = [5,4,3,1,2,11,9,8,0] print("Array in origine ") print(arr) print ("Lunghezza dell'array "+ str(len(arr))) print(50*"x") for i in range(1, len(arr)): key = arr[i] j = i-1 while j >=0 and key < arr[j] : arr[j+1] = arr[j] j -= 1 arr[j+1] = key print ("l'array ordinato risulta essere: ") for x in range(len(arr)): print ("%d" %arr[x])
true
true
f73c0a14a8a12842479a4ecfabd7ba1529fc1b0a
19,243
py
Python
py/test/fixture/whale/host/interrupt_handler.py
arccode/factory
a1b0fccd68987d8cd9c89710adc3c04b868347ec
[ "BSD-3-Clause" ]
3
2022-01-06T16:52:52.000Z
2022-03-07T11:30:47.000Z
py/test/fixture/whale/host/interrupt_handler.py
arccode/factory
a1b0fccd68987d8cd9c89710adc3c04b868347ec
[ "BSD-3-Clause" ]
null
null
null
py/test/fixture/whale/host/interrupt_handler.py
arccode/factory
a1b0fccd68987d8cd9c89710adc3c04b868347ec
[ "BSD-3-Clause" ]
1
2021-10-24T01:47:22.000Z
2021-10-24T01:47:22.000Z
#!/usr/bin/env python3 # # Copyright 2014 The Chromium OS Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Handles Whale's button click event.""" import argparse import functools import logging import os import re import sys import time from cros.factory.test.fixture.whale import keyboard_emulator from cros.factory.test.fixture.whale import serial_client from cros.factory.test.fixture.whale import servo_client from cros.factory.utils import gpio_utils from cros.factory.utils import process_utils from cros.factory.utils import ssh_utils from cros.factory.utils import type_utils ActionType = type_utils.Enum(['PUSH_NEEDLE', 'FIXTURE_STARTED']) def TimeClassMethodDebug(func): """A decorator to log method running time on debug level.""" @functools.wraps(func) def Wrapped(*args, **kwargs): logging.debug('Invoking %s()', func.__name__) start_time = time.time() result = func(*args, **kwargs) logging.debug('%s() finished in %.4f secs', func.__name__, time.time() - start_time) return result return Wrapped class InterruptHandler: """Waits for Whale's I/O expanders' interrupt and dispatches it. It connects to BeagleBone's servod and polld, where servod is used to get I/O expanders' input status and reset SR latches; polld is used to wait GPIO 7, the interrupt pin from Whale's I/O expanders. """ # Shortcuts to Whale's button and control dict. _BUTTON = servo_client.WHALE_BUTTON _CONTROL = servo_client.WHALE_CONTROL _FIXTURE_FEEDBACK = servo_client.FIXTURE_FEEDBACK _PLANKTON_FEEDBACK = servo_client.PLANKTON_FEEDBACK _WHALE_DEBUG_MODE_EN = servo_client.WHALE_DEBUG_MODE_EN # List of buttons and feedbacks to scan. # Difference between button and feedback is: button is latched; # no latch for feedback. _BUTTON_LIST = servo_client.WHALE_BUTTONS _FEEDBACK_LIST = servo_client.WHALE_FEEDBACKS # Buttons that operator can use (non debug mode). _OPERATOR_BUTTON_LIST = (_BUTTON.FIXTURE_START, _BUTTON.FIXTURE_STOP) # DUT sensor check list, add (FEEDBACK, Bool) to check if MLB exists. # example: # _DUT_SENSOR_CHECK_LIST = dict([ # (_FIXTURE_FEEDBACK.FB8, True), # (_FIXTURE_FEEDBACK.FB9, False)]) _DUT_SENSOR_CHECK_LIST = dict() _INPUT_LIST = _BUTTON_LIST + _FEEDBACK_LIST _INPUT_INTERRUPT_GPIO = 7 # Used to avoid toggle battery too fast. _BATTERY_CEASE_TOGGLE_SECS = 1.0 _FixtureState = type_utils.Enum( ['WAIT', 'CLOSED', 'ERR_CLOSING', 'CLOSING', 'OPENING']) # Fixture state to LED light and LCD message (green, red, message). _FixtureStateParams = { _FixtureState.WAIT: ('on', 'on', 'ready'), _FixtureState.CLOSED: ('off', 'off', 'closed'), _FixtureState.ERR_CLOSING: ('off', 'on', '!!no board inside!!'), _FixtureState.CLOSING: ('off', 'on', 'closing'), _FixtureState.OPENING: ('off', 'on', 'opening')} def __init__(self, host, polld_port, servod_port, dolphin_port, rpc_debug, polling_wait_secs): """Constructor. Args: host: BeagleBone's hostname or IP address. polld_port: port that polld listens. Set to None if not using polld. servod_port: port that servod listens. dolphin_port: port that dolphin server listens. Set to None if not using dolphin server. rpc_debug: True to enable XMLRPC debug message. polling_wait_secs: # seconds for polling button clicking event. """ self._poll = gpio_utils.GpioManager( use_polld=polld_port is not None, host=host, tcp_port=polld_port, verbose=rpc_debug) self._dolphin = None if dolphin_port: self._dolphin = serial_client.SerialClient( host=host, tcp_port=dolphin_port, verbose=rpc_debug) self._servo = servo_client.ServoClient(host=host, port=servod_port, verbose=rpc_debug) self._polling_wait_secs = polling_wait_secs # Store last feedback value. The value is initialzed in the very first # ScanFeedback call. self._last_feedback = {} self._starting_fixture_action = None # Used to avoid toggle battery too fast. self._last_battery_toggle_time = time.time() @TimeClassMethodDebug def Init(self): """Resets button latch and records feedback value.""" self._last_feedback = self._servo.MultipleIsOn(self._FEEDBACK_LIST) self._servo.MultipleSet([(self._CONTROL.LCM_CMD, 'clear'), (self._CONTROL.LCM_TEXT, 'Initializing...')]) self.ResetLatch() self.ResetInterrupt() self.ResetKeyboard() # Initial fixture state: cover open. self._HandleStopFixture(show_state=False) self._SetState(self._FixtureState.WAIT) def ResetKeyboard(self): keyboard = keyboard_emulator.KeyboardEmulator(self._servo) keyboard.SimulateKeystrokes() def _SetState(self, state): green, red, message = self._FixtureStateParams[state] self._servo.MultipleSet([(self._CONTROL.PASS_LED, green), (self._CONTROL.FAIL_LED, red), (self._CONTROL.LCM_CMD, 'clear'), (self._CONTROL.LCM_TEXT, message)]) self.ShowNucIpOnLED() def _IsMLBInFixture(self): """Checks MLB(s) is(are) inside the fixture. If the project has only one board, check DUT_SENSOR is enough. For two boards project, ex. lid and base boards, check DUT_SENSOR and BASE_SENSOR. Returns: True if MLB(s) is(are) inside the fixture; otherwise False. """ if not self._DUT_SENSOR_CHECK_LIST: logging.info('No dut sensor...') return True dut_sensor_list = list(self._DUT_SENSOR_CHECK_LIST) dut_sensor_status = self._servo.MultipleIsOn(dut_sensor_list) return dut_sensor_status == self._DUT_SENSOR_CHECK_LIST @TimeClassMethodDebug def _HandleStopFixture(self, show_state=True): """Stop Fixture Step""" logging.info('Stopping fixture...') if show_state: self._SetState(self._FixtureState.OPENING) # Disable battery first for safety. self._servo.Disable(self._CONTROL.BATTERY) while True: feedback_status = self._servo.MultipleIsOn(self._FEEDBACK_LIST) if (not feedback_status[self._FIXTURE_FEEDBACK.FB1] or not feedback_status[self._FIXTURE_FEEDBACK.FB3]): self._servo.Disable(self._CONTROL.FIXTURE_PUSH_NEEDLE) continue self._starting_fixture_action = None logging.info('[Fixture stopped]') break self._SetState(self._FixtureState.WAIT) @TimeClassMethodDebug def _HandleStartFixtureFeedbackChange(self, feedback_status): """Processing Start Fixture feedback information""" if (self._starting_fixture_action is not None and self._starting_fixture_action != ActionType.FIXTURE_STARTED): # we are closing the fixture, check if we detect a hand if feedback_status[self._FIXTURE_FEEDBACK.FB5]: # detect hand, abort self._HandleStopFixture() return if self._servo.IsOn(self._BUTTON.FIXTURE_START): if (self._starting_fixture_action == ActionType.PUSH_NEEDLE and feedback_status[self._FIXTURE_FEEDBACK.FB2] and feedback_status[self._FIXTURE_FEEDBACK.FB4]): logging.info('[HandleStartFixture] fixture closed') self._starting_fixture_action = ActionType.FIXTURE_STARTED self._SetState(self._FixtureState.CLOSED) @TimeClassMethodDebug def _HandleStartFixture(self): """Start Fixture Step""" logging.info('[Fixture Start ...]') if self._starting_fixture_action == ActionType.FIXTURE_STARTED: logging.info('[HandleStartFixture] ACTION = FIXTURE_STARTED') return if self._last_feedback[self._FIXTURE_FEEDBACK.FB5]: logging.info('[HandleStartFixture] Detect Hands, stop..') return if self._starting_fixture_action is None: if not self._IsMLBInFixture(): logging.info( '[HandleStartFixture] OOPS! Cannot close cover without MLBs') self._SetState(self._FixtureState.ERR_CLOSING) return self._ResetWhaleDeviceBeforeClosing() self._ResetDolphinDeviceBeforeClosing() self._starting_fixture_action = ActionType.PUSH_NEEDLE self._SetState(self._FixtureState.CLOSING) if self._starting_fixture_action == ActionType.PUSH_NEEDLE: logging.info('[HandleStartFixture] pushing needle') self._servo.Enable(self._CONTROL.FIXTURE_PUSH_NEEDLE) @TimeClassMethodDebug def _ResetWhaleDeviceBeforeClosing(self): """Resets devices on Whale if necessary before closing fixture.""" # Release DUT CC2 pull-high self._servo.Disable(self._CONTROL.DC) self._servo.Disable(self._CONTROL.OUTPUT_RESERVE_1) @TimeClassMethodDebug def _ResetDolphinDeviceBeforeClosing(self): """Resets Dolphin if necessary before closing fixture.""" if self._dolphin is None: return # Set dolphin to discharging mode, if dolphin is charging, DUT will fail to # boot up after battery connection. # Assuming all serial connections are connected to Dolphin. serial_amount = self._dolphin.GetSerialAmount() for serial_index in range(serial_amount): self._dolphin.Send(serial_index, 'usbc_action dev') @TimeClassMethodDebug def _ToggleBattery(self): """Toggles battery status. If battery is on, switches it to off and vise versa. """ if (time.time() - self._last_battery_toggle_time < self._BATTERY_CEASE_TOGGLE_SECS): logging.debug('Toggle too fast, cease toggle for %f second.', self._BATTERY_CEASE_TOGGLE_SECS) return new_battery_status = ('off' if self._servo.IsOn(self._CONTROL.BATTERY) else 'on') logging.info('[Toggle battery to %s]', new_battery_status) self._servo.Set(self._CONTROL.BATTERY, new_battery_status) self._last_battery_toggle_time = time.time() @TimeClassMethodDebug def ScanButton(self): """Scans all buttons and invokes button click handler for clicked buttons. Returns: True if a button is clicked. """ logging.debug('[Scanning button....]') status = self._servo.MultipleIsOn(self._BUTTON_LIST) if status[self._BUTTON.FIXTURE_STOP]: logging.info('Calling _HandleStopFixture because FIXTURE_STOP is True.') self._HandleStopFixture() # Disable stop button, and use 'i2cset' to set it back to input mode. self._servo.Disable(self._BUTTON.FIXTURE_STOP) process_utils.Spawn(['i2cset', '-y', '1', '0x77', '0x07', '0xff']) return True if (self._starting_fixture_action != ActionType.FIXTURE_STARTED and self._starting_fixture_action is not None and not status[self._BUTTON.FIXTURE_START]): logging.info('Calling _HandleStopFixture because FIXTURE_START is False.') self._HandleStopFixture() return False button_clicked = any(status.values()) if not button_clicked: return False operator_mode = not self._servo.IsOn(self._WHALE_DEBUG_MODE_EN) for button, clicked in status.items(): if not clicked: continue if operator_mode and button not in self._OPERATOR_BUTTON_LIST: logging.debug('Supress button %s click because debug mode is off.', button) continue if button == self._BUTTON.FIXTURE_START: if self._starting_fixture_action == ActionType.FIXTURE_STARTED: logging.info('[START] ACTION = FIXTURE_STARTED') else: self._HandleStartFixture() elif button == self._BUTTON.RESERVE_1: self._ToggleBattery() logging.info('Button %s clicked', button) return button_clicked @TimeClassMethodDebug def ScanFeedback(self): """Scans all feedback and invokes handler for those changed feedback. Returns: True if any feedback value is clicked. """ logging.debug('[Scanning feedback....]') feedback_status = self._servo.MultipleIsOn(self._FEEDBACK_LIST) feedback_changed = False for name, value in feedback_status.items(): if self._last_feedback[name] == value: continue self._HandleStartFixtureFeedbackChange(feedback_status) logging.info('Feedback %s value changed to %r', name, value) self._last_feedback[name] = value feedback_changed = True return feedback_changed @TimeClassMethodDebug def ResetLatch(self): """Resets SR latch for buttons.""" self._servo.Click(self._CONTROL.INPUT_RESET) @TimeClassMethodDebug def WaitForInterrupt(self): logging.debug('Polling interrupt (GPIO %d %s) for %r seconds', self._INPUT_INTERRUPT_GPIO, self._poll.GPIO_EDGE_FALLING, self._polling_wait_secs) if self._poll.Poll(self._INPUT_INTERRUPT_GPIO, self._poll.GPIO_EDGE_FALLING, self._polling_wait_secs): logging.debug('Interrupt polled') else: logging.debug('Polling interrupt timeout') @TimeClassMethodDebug def ResetInterrupt(self): """Resets I/O expanders' interrupt. We have four I/O expanders (TCA9539), three of them have inputs. As BeagleBone can only accept one interrupt, we cascade two expanders' (0x75, 0x77) INT to 0x76 input pins. So any input changes from 0x75, 0x76, 0x77 will generate interrupt to BeagleBone. According to TCA9539 manual: "resetting the interrupt circuit is achieved when data on the port is changed to the original setting or data is read from the port that generated the interrupt. ... Because each 8-bit port is read independently, the interrupt caused by port 0 is not cleared by a read of port 1, or vice versa", to reset interrupt, we need to read each changing bit. However, as servod reads a byte each time we read an input pin, so we only need to read 0x77 byte-0, byte-1, 0x75 byte-1, and 0x76 byte-0, byte-1, in sequence to reset INT. The reason to read in sequence is that we need to read 0x76 at last as 0x77 and 0x75 INT reset could change P02 and P03 pin in 0x76. """ # Touch I/O expander 0x77 byte 0 & 1, 0x75 byte 1, 0x76 byte 0 & 1. # Note that we skip I/O expander 0x75 byte-0 as it contains no input # pin, won't trigger interrupt. self._servo.MultipleGet([ self._FIXTURE_FEEDBACK.FB1, self._BUTTON.FIXTURE_START, self._PLANKTON_FEEDBACK.FB1, self._WHALE_DEBUG_MODE_EN, self._BUTTON.RESERVE_1]) def Run(self): """Waits for Whale's button click interrupt and dispatches it.""" while True: button_clicked = self.ScanButton() feedback_changed = self.ScanFeedback() # The reason why we don't poll interrupt right after reset latch is that # it might be possible that a button is clicked after latch is cleared # but before I/O expander is touched. In this case, the button is latched # but the interrupt is consumed (after touching I/O expander) so that the # following click of that button won't trigger interrupt again, and # polling is blocked. # # The solution is to read button again without waiting for interrupt. if button_clicked or feedback_changed: if button_clicked: self.ResetLatch() self.ResetInterrupt() continue self.WaitForInterrupt() def ShowNucIpOnLED(self): """Shows NUC dongle IP on LED second line""" nuc_host = '192.168.234.1' testing_rsa_path = '/usr/local/factory/misc/sshkeys/testing_rsa' get_dongle_eth_script = ( 'timeout 1s /usr/local/factory/py/test/fixture/get_dongle_eth.sh') # Make identity file less open to make ssh happy os.chmod(testing_rsa_path, 0o600) ssh_command_base = ssh_utils.BuildSSHCommand( identity_file=testing_rsa_path) try: interface = process_utils.SpawnOutput( ssh_command_base + [nuc_host, get_dongle_eth_script]).strip() except BaseException: interface = None if not interface: ip_address = 'dongle not found...' else: ifconfig_command = 'ifconfig %s' % interface ifconfig_result = process_utils.SpawnOutput( ssh_command_base + [nuc_host, ifconfig_command]).strip() ip_matcher = re.search(r'inet (\d+\.\d+\.\d+\.\d+)', ifconfig_result, re.MULTILINE) if not ip_matcher: ip_address = 'dongle not found...' else: ip_address = ip_matcher.group(1) self._servo.MultipleSet([(self._CONTROL.LCM_ROW, 'r1'), (self._CONTROL.LCM_TEXT, ip_address)]) def ParseArgs(): """Parses command line arguments. Returns: args from argparse.parse_args(). """ description = ( 'Handle Whale button click event.' ) parser = argparse.ArgumentParser( formatter_class=argparse.RawTextHelpFormatter, description=description) parser.add_argument('-d', '--debug', action='store_true', default=False, help='enable debug messages') parser.add_argument('--rpc_debug', action='store_true', default=False, help='enable debug messages for XMLRPC call') parser.add_argument('--nouse_dolphin', action='store_false', default=True, dest='use_dolphin', help='whether to skip dolphin control' ' (remote server). default: %(default)s') parser.add_argument('--use_polld', action='store_true', default=False, help='whether to use polld (for polling GPIO port on ' 'remote server) or poll local GPIO port, default: ' '%(default)s') parser.add_argument('--host', default='127.0.0.1', type=str, help='hostname of server, default: %(default)s') parser.add_argument('--dolphin_port', default=9997, type=int, help='port that dolphin_server listens, default: ' '%(default)d') parser.add_argument('--polld_port', default=9998, type=int, help='port that polld listens, default: %(default)d') parser.add_argument('--servod_port', default=9999, type=int, help='port that servod listens, default: %(default)d') parser.add_argument('--polling_wait_secs', default=5, type=int, help=('# seconds for polling button clicking event, ' 'default: %(default)d')) return parser.parse_args() def main(): args = ParseArgs() logging.basicConfig( level=logging.DEBUG if args.debug else logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s') polld_port = args.polld_port if args.use_polld else None dolphin_port = args.dolphin_port if args.use_dolphin else None handler = InterruptHandler(args.host, polld_port, args.servod_port, dolphin_port, args.rpc_debug, args.polling_wait_secs) handler.Init() handler.Run() if __name__ == '__main__': try: main() except KeyboardInterrupt: sys.exit(0) except gpio_utils.GpioManagerError as e: sys.stderr.write(str(e) + '\n') sys.exit(1)
37.657534
80
0.686276
import argparse import functools import logging import os import re import sys import time from cros.factory.test.fixture.whale import keyboard_emulator from cros.factory.test.fixture.whale import serial_client from cros.factory.test.fixture.whale import servo_client from cros.factory.utils import gpio_utils from cros.factory.utils import process_utils from cros.factory.utils import ssh_utils from cros.factory.utils import type_utils ActionType = type_utils.Enum(['PUSH_NEEDLE', 'FIXTURE_STARTED']) def TimeClassMethodDebug(func): @functools.wraps(func) def Wrapped(*args, **kwargs): logging.debug('Invoking %s()', func.__name__) start_time = time.time() result = func(*args, **kwargs) logging.debug('%s() finished in %.4f secs', func.__name__, time.time() - start_time) return result return Wrapped class InterruptHandler: _BUTTON = servo_client.WHALE_BUTTON _CONTROL = servo_client.WHALE_CONTROL _FIXTURE_FEEDBACK = servo_client.FIXTURE_FEEDBACK _PLANKTON_FEEDBACK = servo_client.PLANKTON_FEEDBACK _WHALE_DEBUG_MODE_EN = servo_client.WHALE_DEBUG_MODE_EN # List of buttons and feedbacks to scan. # Difference between button and feedback is: button is latched; # no latch for feedback. _BUTTON_LIST = servo_client.WHALE_BUTTONS _FEEDBACK_LIST = servo_client.WHALE_FEEDBACKS # Buttons that operator can use (non debug mode). _OPERATOR_BUTTON_LIST = (_BUTTON.FIXTURE_START, _BUTTON.FIXTURE_STOP) # DUT sensor check list, add (FEEDBACK, Bool) to check if MLB exists. # example: # _DUT_SENSOR_CHECK_LIST = dict([ # (_FIXTURE_FEEDBACK.FB8, True), # (_FIXTURE_FEEDBACK.FB9, False)]) _DUT_SENSOR_CHECK_LIST = dict() _INPUT_LIST = _BUTTON_LIST + _FEEDBACK_LIST _INPUT_INTERRUPT_GPIO = 7 # Used to avoid toggle battery too fast. _BATTERY_CEASE_TOGGLE_SECS = 1.0 _FixtureState = type_utils.Enum( ['WAIT', 'CLOSED', 'ERR_CLOSING', 'CLOSING', 'OPENING']) # Fixture state to LED light and LCD message (green, red, message). _FixtureStateParams = { _FixtureState.WAIT: ('on', 'on', 'ready'), _FixtureState.CLOSED: ('off', 'off', 'closed'), _FixtureState.ERR_CLOSING: ('off', 'on', '!!no board inside!!'), _FixtureState.CLOSING: ('off', 'on', 'closing'), _FixtureState.OPENING: ('off', 'on', 'opening')} def __init__(self, host, polld_port, servod_port, dolphin_port, rpc_debug, polling_wait_secs): self._poll = gpio_utils.GpioManager( use_polld=polld_port is not None, host=host, tcp_port=polld_port, verbose=rpc_debug) self._dolphin = None if dolphin_port: self._dolphin = serial_client.SerialClient( host=host, tcp_port=dolphin_port, verbose=rpc_debug) self._servo = servo_client.ServoClient(host=host, port=servod_port, verbose=rpc_debug) self._polling_wait_secs = polling_wait_secs # Store last feedback value. The value is initialzed in the very first # ScanFeedback call. self._last_feedback = {} self._starting_fixture_action = None # Used to avoid toggle battery too fast. self._last_battery_toggle_time = time.time() @TimeClassMethodDebug def Init(self): self._last_feedback = self._servo.MultipleIsOn(self._FEEDBACK_LIST) self._servo.MultipleSet([(self._CONTROL.LCM_CMD, 'clear'), (self._CONTROL.LCM_TEXT, 'Initializing...')]) self.ResetLatch() self.ResetInterrupt() self.ResetKeyboard() # Initial fixture state: cover open. self._HandleStopFixture(show_state=False) self._SetState(self._FixtureState.WAIT) def ResetKeyboard(self): keyboard = keyboard_emulator.KeyboardEmulator(self._servo) keyboard.SimulateKeystrokes() def _SetState(self, state): green, red, message = self._FixtureStateParams[state] self._servo.MultipleSet([(self._CONTROL.PASS_LED, green), (self._CONTROL.FAIL_LED, red), (self._CONTROL.LCM_CMD, 'clear'), (self._CONTROL.LCM_TEXT, message)]) self.ShowNucIpOnLED() def _IsMLBInFixture(self): if not self._DUT_SENSOR_CHECK_LIST: logging.info('No dut sensor...') return True dut_sensor_list = list(self._DUT_SENSOR_CHECK_LIST) dut_sensor_status = self._servo.MultipleIsOn(dut_sensor_list) return dut_sensor_status == self._DUT_SENSOR_CHECK_LIST @TimeClassMethodDebug def _HandleStopFixture(self, show_state=True): logging.info('Stopping fixture...') if show_state: self._SetState(self._FixtureState.OPENING) # Disable battery first for safety. self._servo.Disable(self._CONTROL.BATTERY) while True: feedback_status = self._servo.MultipleIsOn(self._FEEDBACK_LIST) if (not feedback_status[self._FIXTURE_FEEDBACK.FB1] or not feedback_status[self._FIXTURE_FEEDBACK.FB3]): self._servo.Disable(self._CONTROL.FIXTURE_PUSH_NEEDLE) continue self._starting_fixture_action = None logging.info('[Fixture stopped]') break self._SetState(self._FixtureState.WAIT) @TimeClassMethodDebug def _HandleStartFixtureFeedbackChange(self, feedback_status): if (self._starting_fixture_action is not None and self._starting_fixture_action != ActionType.FIXTURE_STARTED): # we are closing the fixture, check if we detect a hand if feedback_status[self._FIXTURE_FEEDBACK.FB5]: # detect hand, abort self._HandleStopFixture() return if self._servo.IsOn(self._BUTTON.FIXTURE_START): if (self._starting_fixture_action == ActionType.PUSH_NEEDLE and feedback_status[self._FIXTURE_FEEDBACK.FB2] and feedback_status[self._FIXTURE_FEEDBACK.FB4]): logging.info('[HandleStartFixture] fixture closed') self._starting_fixture_action = ActionType.FIXTURE_STARTED self._SetState(self._FixtureState.CLOSED) @TimeClassMethodDebug def _HandleStartFixture(self): logging.info('[Fixture Start ...]') if self._starting_fixture_action == ActionType.FIXTURE_STARTED: logging.info('[HandleStartFixture] ACTION = FIXTURE_STARTED') return if self._last_feedback[self._FIXTURE_FEEDBACK.FB5]: logging.info('[HandleStartFixture] Detect Hands, stop..') return if self._starting_fixture_action is None: if not self._IsMLBInFixture(): logging.info( '[HandleStartFixture] OOPS! Cannot close cover without MLBs') self._SetState(self._FixtureState.ERR_CLOSING) return self._ResetWhaleDeviceBeforeClosing() self._ResetDolphinDeviceBeforeClosing() self._starting_fixture_action = ActionType.PUSH_NEEDLE self._SetState(self._FixtureState.CLOSING) if self._starting_fixture_action == ActionType.PUSH_NEEDLE: logging.info('[HandleStartFixture] pushing needle') self._servo.Enable(self._CONTROL.FIXTURE_PUSH_NEEDLE) @TimeClassMethodDebug def _ResetWhaleDeviceBeforeClosing(self): # Release DUT CC2 pull-high self._servo.Disable(self._CONTROL.DC) self._servo.Disable(self._CONTROL.OUTPUT_RESERVE_1) @TimeClassMethodDebug def _ResetDolphinDeviceBeforeClosing(self): if self._dolphin is None: return # Set dolphin to discharging mode, if dolphin is charging, DUT will fail to # boot up after battery connection. # Assuming all serial connections are connected to Dolphin. serial_amount = self._dolphin.GetSerialAmount() for serial_index in range(serial_amount): self._dolphin.Send(serial_index, 'usbc_action dev') @TimeClassMethodDebug def _ToggleBattery(self): if (time.time() - self._last_battery_toggle_time < self._BATTERY_CEASE_TOGGLE_SECS): logging.debug('Toggle too fast, cease toggle for %f second.', self._BATTERY_CEASE_TOGGLE_SECS) return new_battery_status = ('off' if self._servo.IsOn(self._CONTROL.BATTERY) else 'on') logging.info('[Toggle battery to %s]', new_battery_status) self._servo.Set(self._CONTROL.BATTERY, new_battery_status) self._last_battery_toggle_time = time.time() @TimeClassMethodDebug def ScanButton(self): logging.debug('[Scanning button....]') status = self._servo.MultipleIsOn(self._BUTTON_LIST) if status[self._BUTTON.FIXTURE_STOP]: logging.info('Calling _HandleStopFixture because FIXTURE_STOP is True.') self._HandleStopFixture() # Disable stop button, and use 'i2cset' to set it back to input mode. self._servo.Disable(self._BUTTON.FIXTURE_STOP) process_utils.Spawn(['i2cset', '-y', '1', '0x77', '0x07', '0xff']) return True if (self._starting_fixture_action != ActionType.FIXTURE_STARTED and self._starting_fixture_action is not None and not status[self._BUTTON.FIXTURE_START]): logging.info('Calling _HandleStopFixture because FIXTURE_START is False.') self._HandleStopFixture() return False button_clicked = any(status.values()) if not button_clicked: return False operator_mode = not self._servo.IsOn(self._WHALE_DEBUG_MODE_EN) for button, clicked in status.items(): if not clicked: continue if operator_mode and button not in self._OPERATOR_BUTTON_LIST: logging.debug('Supress button %s click because debug mode is off.', button) continue if button == self._BUTTON.FIXTURE_START: if self._starting_fixture_action == ActionType.FIXTURE_STARTED: logging.info('[START] ACTION = FIXTURE_STARTED') else: self._HandleStartFixture() elif button == self._BUTTON.RESERVE_1: self._ToggleBattery() logging.info('Button %s clicked', button) return button_clicked @TimeClassMethodDebug def ScanFeedback(self): logging.debug('[Scanning feedback....]') feedback_status = self._servo.MultipleIsOn(self._FEEDBACK_LIST) feedback_changed = False for name, value in feedback_status.items(): if self._last_feedback[name] == value: continue self._HandleStartFixtureFeedbackChange(feedback_status) logging.info('Feedback %s value changed to %r', name, value) self._last_feedback[name] = value feedback_changed = True return feedback_changed @TimeClassMethodDebug def ResetLatch(self): self._servo.Click(self._CONTROL.INPUT_RESET) @TimeClassMethodDebug def WaitForInterrupt(self): logging.debug('Polling interrupt (GPIO %d %s) for %r seconds', self._INPUT_INTERRUPT_GPIO, self._poll.GPIO_EDGE_FALLING, self._polling_wait_secs) if self._poll.Poll(self._INPUT_INTERRUPT_GPIO, self._poll.GPIO_EDGE_FALLING, self._polling_wait_secs): logging.debug('Interrupt polled') else: logging.debug('Polling interrupt timeout') @TimeClassMethodDebug def ResetInterrupt(self): # Touch I/O expander 0x77 byte 0 & 1, 0x75 byte 1, 0x76 byte 0 & 1. # Note that we skip I/O expander 0x75 byte-0 as it contains no input # pin, won't trigger interrupt. self._servo.MultipleGet([ self._FIXTURE_FEEDBACK.FB1, self._BUTTON.FIXTURE_START, self._PLANKTON_FEEDBACK.FB1, self._WHALE_DEBUG_MODE_EN, self._BUTTON.RESERVE_1]) def Run(self): while True: button_clicked = self.ScanButton() feedback_changed = self.ScanFeedback() # it might be possible that a button is clicked after latch is cleared # but before I/O expander is touched. In this case, the button is latched # but the interrupt is consumed (after touching I/O expander) so that the # following click of that button won't trigger interrupt again, and if button_clicked or feedback_changed: if button_clicked: self.ResetLatch() self.ResetInterrupt() continue self.WaitForInterrupt() def ShowNucIpOnLED(self): nuc_host = '192.168.234.1' testing_rsa_path = '/usr/local/factory/misc/sshkeys/testing_rsa' get_dongle_eth_script = ( 'timeout 1s /usr/local/factory/py/test/fixture/get_dongle_eth.sh') os.chmod(testing_rsa_path, 0o600) ssh_command_base = ssh_utils.BuildSSHCommand( identity_file=testing_rsa_path) try: interface = process_utils.SpawnOutput( ssh_command_base + [nuc_host, get_dongle_eth_script]).strip() except BaseException: interface = None if not interface: ip_address = 'dongle not found...' else: ifconfig_command = 'ifconfig %s' % interface ifconfig_result = process_utils.SpawnOutput( ssh_command_base + [nuc_host, ifconfig_command]).strip() ip_matcher = re.search(r'inet (\d+\.\d+\.\d+\.\d+)', ifconfig_result, re.MULTILINE) if not ip_matcher: ip_address = 'dongle not found...' else: ip_address = ip_matcher.group(1) self._servo.MultipleSet([(self._CONTROL.LCM_ROW, 'r1'), (self._CONTROL.LCM_TEXT, ip_address)]) def ParseArgs(): description = ( 'Handle Whale button click event.' ) parser = argparse.ArgumentParser( formatter_class=argparse.RawTextHelpFormatter, description=description) parser.add_argument('-d', '--debug', action='store_true', default=False, help='enable debug messages') parser.add_argument('--rpc_debug', action='store_true', default=False, help='enable debug messages for XMLRPC call') parser.add_argument('--nouse_dolphin', action='store_false', default=True, dest='use_dolphin', help='whether to skip dolphin control' ' (remote server). default: %(default)s') parser.add_argument('--use_polld', action='store_true', default=False, help='whether to use polld (for polling GPIO port on ' 'remote server) or poll local GPIO port, default: ' '%(default)s') parser.add_argument('--host', default='127.0.0.1', type=str, help='hostname of server, default: %(default)s') parser.add_argument('--dolphin_port', default=9997, type=int, help='port that dolphin_server listens, default: ' '%(default)d') parser.add_argument('--polld_port', default=9998, type=int, help='port that polld listens, default: %(default)d') parser.add_argument('--servod_port', default=9999, type=int, help='port that servod listens, default: %(default)d') parser.add_argument('--polling_wait_secs', default=5, type=int, help=('# seconds for polling button clicking event, ' 'default: %(default)d')) return parser.parse_args() def main(): args = ParseArgs() logging.basicConfig( level=logging.DEBUG if args.debug else logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s') polld_port = args.polld_port if args.use_polld else None dolphin_port = args.dolphin_port if args.use_dolphin else None handler = InterruptHandler(args.host, polld_port, args.servod_port, dolphin_port, args.rpc_debug, args.polling_wait_secs) handler.Init() handler.Run() if __name__ == '__main__': try: main() except KeyboardInterrupt: sys.exit(0) except gpio_utils.GpioManagerError as e: sys.stderr.write(str(e) + '\n') sys.exit(1)
true
true
f73c0a586407cfff97d64e2d1de431ef07e72d15
1,071
py
Python
proof_of_work/multiagent/turn_based/v4/selfishagentv4.py
michaelneuder/parkes_lab_fa19
18d9f564e0df9c17ac5d54619ed869d778d4f6a4
[ "MIT" ]
null
null
null
proof_of_work/multiagent/turn_based/v4/selfishagentv4.py
michaelneuder/parkes_lab_fa19
18d9f564e0df9c17ac5d54619ed869d778d4f6a4
[ "MIT" ]
null
null
null
proof_of_work/multiagent/turn_based/v4/selfishagentv4.py
michaelneuder/parkes_lab_fa19
18d9f564e0df9c17ac5d54619ed869d778d4f6a4
[ "MIT" ]
null
null
null
import numpy as np class SelfishAgent(object): def __init__(self, T): self.T = T self.policy = np.asarray([ [0, 0, 0, 0, 0, 0, 0, 0, 0], [2, 2, 2, 0, 0, 0, 0, 0, 0], [2, 1, 2, 2, 2, 0, 0, 0, 0], [2, 2, 1, 2, 2, 2, 0, 0, 0], [2, 2, 2, 1, 2, 2, 2, 0, 0], [2, 2, 2, 2, 1, 2, 2, 2, 0], [2, 2, 2, 2, 2, 1, 2, 2, 0], [2, 2, 2, 2, 2, 2, 1, 2, 0], [1, 1, 1, 1, 1, 1, 1, 1, 1] ]) def act(self, state): a, h = state if h == self.T: return 'adopt' if a == self.T: return 'override' if h > a: return 'adopt' # if (h == a) and (h == 1): # return 'match' if (h == a-1) and (h >= 1): return 'override' return 'wait' def act2(self, state): action = self.policy[state] if action == 0: return 'adopt' if action == 1: return 'override' return 'wait'
26.775
41
0.356676
import numpy as np class SelfishAgent(object): def __init__(self, T): self.T = T self.policy = np.asarray([ [0, 0, 0, 0, 0, 0, 0, 0, 0], [2, 2, 2, 0, 0, 0, 0, 0, 0], [2, 1, 2, 2, 2, 0, 0, 0, 0], [2, 2, 1, 2, 2, 2, 0, 0, 0], [2, 2, 2, 1, 2, 2, 2, 0, 0], [2, 2, 2, 2, 1, 2, 2, 2, 0], [2, 2, 2, 2, 2, 1, 2, 2, 0], [2, 2, 2, 2, 2, 2, 1, 2, 0], [1, 1, 1, 1, 1, 1, 1, 1, 1] ]) def act(self, state): a, h = state if h == self.T: return 'adopt' if a == self.T: return 'override' if h > a: return 'adopt' if (h == a-1) and (h >= 1): return 'override' return 'wait' def act2(self, state): action = self.policy[state] if action == 0: return 'adopt' if action == 1: return 'override' return 'wait'
true
true
f73c0a90b7cbe4e9e76241af7bc20068d063d698
2,829
py
Python
openpnm/geometry/_generic.py
lixuekai2001/OpenPNM
9026f0fed427d37f4caf1a79e4a7684490d52cf6
[ "MIT" ]
null
null
null
openpnm/geometry/_generic.py
lixuekai2001/OpenPNM
9026f0fed427d37f4caf1a79e4a7684490d52cf6
[ "MIT" ]
null
null
null
openpnm/geometry/_generic.py
lixuekai2001/OpenPNM
9026f0fed427d37f4caf1a79e4a7684490d52cf6
[ "MIT" ]
null
null
null
from copy import deepcopy from openpnm.core import Subdomain, ModelsMixin, ParamMixin from openpnm.utils import Docorator, SettingsAttr from openpnm.utils import Workspace, logging logger = logging.getLogger(__name__) ws = Workspace() docstr = Docorator() @docstr.get_sections(base='GeometrySettings', sections=['Parameters']) @docstr.dedent class GeometrySettings: r""" Parameters ---------- %(BaseSettings.parameters)s """ prefix = 'geo' @docstr.get_sections(base='GenericGeometry', sections=['Parameters', 'Examples']) @docstr.dedent class GenericGeometry(ParamMixin, Subdomain, ModelsMixin): r""" This generic class is meant as a starter for custom Geometry objects It has no pore-scale models assigned to it, so is a blank slate. Note that all OpenPNM Geometry sub-classes are just GenericGeometry instances with a assortment of models added. Parameters ---------- pores : array_like The list of pores where this geometry applies. throats : array_like The list of throats where this Geometry applies. name : str A unique name to apply to the object. This name will also be used as a label to identify where this Geometry applies. Examples -------- .. plot:: import openpnm as op import matplotlib.pyplot as plt pn = op.network.Cubic(shape=[5, 5, 5]) Ps = pn.pores('all') # Get all pores Ts = pn.throats('all') # Get all throats geom = op.geometry.GenericGeometry(network=pn, pores=Ps, throats=Ts) # Now assign pore-scale models to the empty object geom.add_model(propname='pore.size', model=op.models.misc.random, element='pore', num_range=[0.01, 0.1]) # Confirm that the object has one added model print(geom.models) # The results of the model can be seen using the ``show_hist`` function: geom.show_hist('pore.size') plt.show() """ def __init__(self, network, pores=[], throats=[], settings=None, **kwargs): self.settings = SettingsAttr(GeometrySettings, settings) super().__init__(network=network, settings=self.settings, **kwargs) network[f'pore.{self.name}'] = False network[f'throat.{self.name}'] = False try: self.set_locations(pores=pores, throats=throats, mode='add') except Exception as e: logger.error(f'{e}, instantiation cancelled') network.project.purge_object(self) raise e def _get_phys(self): """A shortcut to get a handle to the associated physics.""" return self.project.find_physics(geometry=self) physics = property(fget=_get_phys)
32.517241
79
0.634853
from copy import deepcopy from openpnm.core import Subdomain, ModelsMixin, ParamMixin from openpnm.utils import Docorator, SettingsAttr from openpnm.utils import Workspace, logging logger = logging.getLogger(__name__) ws = Workspace() docstr = Docorator() @docstr.get_sections(base='GeometrySettings', sections=['Parameters']) @docstr.dedent class GeometrySettings: prefix = 'geo' @docstr.get_sections(base='GenericGeometry', sections=['Parameters', 'Examples']) @docstr.dedent class GenericGeometry(ParamMixin, Subdomain, ModelsMixin): def __init__(self, network, pores=[], throats=[], settings=None, **kwargs): self.settings = SettingsAttr(GeometrySettings, settings) super().__init__(network=network, settings=self.settings, **kwargs) network[f'pore.{self.name}'] = False network[f'throat.{self.name}'] = False try: self.set_locations(pores=pores, throats=throats, mode='add') except Exception as e: logger.error(f'{e}, instantiation cancelled') network.project.purge_object(self) raise e def _get_phys(self): return self.project.find_physics(geometry=self) physics = property(fget=_get_phys)
true
true
f73c0ac78aaa312b971829759014cfbdc0a448ff
1,652
py
Python
xos/synchronizer/migration/0012_auto_20200227_1323.py
etrirepo/olt_synchronizer
493d20a2e404fe7aeca73eaa6d4bb18275faf4ec
[ "Apache-2.0" ]
null
null
null
xos/synchronizer/migration/0012_auto_20200227_1323.py
etrirepo/olt_synchronizer
493d20a2e404fe7aeca73eaa6d4bb18275faf4ec
[ "Apache-2.0" ]
null
null
null
xos/synchronizer/migration/0012_auto_20200227_1323.py
etrirepo/olt_synchronizer
493d20a2e404fe7aeca73eaa6d4bb18275faf4ec
[ "Apache-2.0" ]
null
null
null
# Copyright 2017-present Open Networking Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # -*- coding: utf-8 -*- # Generated by Django 1.11.28 on 2020-02-27 18:23 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('volt', '0011_auto_20190626_2027'), ] operations = [ migrations.RemoveField( model_name='voltservice_decl', name='voltha_pass', ), migrations.RemoveField( model_name='voltservice_decl', name='voltha_user', ), migrations.AlterField( model_name='voltservice_decl', name='voltha_port', field=models.IntegerField(default=55555, help_text=b'The Voltha API port. By default 55555'), ), migrations.AlterField( model_name='voltservice_decl', name='voltha_url', field=models.CharField(default=b'voltha-api.voltha.svc.cluster.local', help_text=b'The Voltha API address. By default voltha-api.default.svc.cluster.local', max_length=256), ), ]
34.416667
185
0.676755
from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('volt', '0011_auto_20190626_2027'), ] operations = [ migrations.RemoveField( model_name='voltservice_decl', name='voltha_pass', ), migrations.RemoveField( model_name='voltservice_decl', name='voltha_user', ), migrations.AlterField( model_name='voltservice_decl', name='voltha_port', field=models.IntegerField(default=55555, help_text=b'The Voltha API port. By default 55555'), ), migrations.AlterField( model_name='voltservice_decl', name='voltha_url', field=models.CharField(default=b'voltha-api.voltha.svc.cluster.local', help_text=b'The Voltha API address. By default voltha-api.default.svc.cluster.local', max_length=256), ), ]
true
true
f73c0bae41c8dfc2d32ba13db9a1bf05b21cd322
2,793
py
Python
qiskit/extensions/standard/u2.py
qinvador/qiskit-terra
4e104de3c113c01688a0ed06b2f2cb1a958fce44
[ "Apache-2.0" ]
2
2019-06-28T19:58:42.000Z
2019-07-26T05:04:02.000Z
qiskit/extensions/standard/u2.py
qinvador/qiskit-terra
4e104de3c113c01688a0ed06b2f2cb1a958fce44
[ "Apache-2.0" ]
3
2018-11-13T17:33:37.000Z
2018-12-03T09:35:00.000Z
qiskit/extensions/standard/u2.py
qinvador/qiskit-terra
4e104de3c113c01688a0ed06b2f2cb1a958fce44
[ "Apache-2.0" ]
2
2017-12-03T15:48:14.000Z
2018-03-11T13:08:03.000Z
# -*- coding: utf-8 -*- # This code is part of Qiskit. # # (C) Copyright IBM 2017. # # This code is licensed under the Apache License, Version 2.0. You may # obtain a copy of this license in the LICENSE.txt file in the root directory # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0. # # Any modifications or derivative works of this code must retain this # copyright notice, and modified files need to carry a notice indicating # that they have been altered from the originals. """ One-pulse single-qubit gate. """ import numpy from qiskit.circuit import Gate from qiskit.circuit import QuantumCircuit from qiskit.circuit import QuantumRegister from qiskit.qasm import pi from qiskit.util import deprecate_arguments class U2Gate(Gate): """One-pulse single-qubit gate.""" def __init__(self, phi, lam, label=None): """Create new one-pulse single-qubit gate.""" super().__init__('u2', 1, [phi, lam], label=label) def _define(self): from qiskit.extensions.standard.u3 import U3Gate definition = [] q = QuantumRegister(1, 'q') rule = [(U3Gate(pi / 2, self.params[0], self.params[1]), [q[0]], [])] for inst in rule: definition.append(inst) self.definition = definition def inverse(self): """Invert this gate. u2(phi,lamb)^dagger = u2(-lamb-pi,-phi+pi) """ return U2Gate(-self.params[1] - pi, -self.params[0] + pi) def to_matrix(self): """Return a Numpy.array for the U2 gate.""" isqrt2 = 1 / numpy.sqrt(2) phi, lam = self.params phi, lam = float(phi), float(lam) return numpy.array([ [ isqrt2, -numpy.exp(1j * lam) * isqrt2 ], [ numpy.exp(1j * phi) * isqrt2, numpy.exp(1j * (phi + lam)) * isqrt2 ] ], dtype=complex) @deprecate_arguments({'q': 'qubit'}) def u2(self, phi, lam, qubit, *, q=None): # pylint: disable=invalid-name,unused-argument """Apply U2 gate with angle phi and lam to a specified qubit (qubit). u2(φ,λ) := U(π/2,φ,λ) = Rz(φ + π/2)Rx(π/2)Rz(λ − π/2) Examples: Circuit Representation: .. jupyter-execute:: from qiskit.circuit import QuantumCircuit, Parameter phi = Parameter('φ') lam = Parameter('λ') circuit = QuantumCircuit(1) circuit.u2(phi,lam,0) circuit.draw() Matrix Representation: .. jupyter-execute:: import numpy from qiskit.extensions.standard.u2 import U2Gate U2Gate(numpy.pi/2,numpy.pi/2).to_matrix() """ return self.append(U2Gate(phi, lam), [qubit], []) QuantumCircuit.u2 = u2
28.793814
89
0.595059
import numpy from qiskit.circuit import Gate from qiskit.circuit import QuantumCircuit from qiskit.circuit import QuantumRegister from qiskit.qasm import pi from qiskit.util import deprecate_arguments class U2Gate(Gate): def __init__(self, phi, lam, label=None): super().__init__('u2', 1, [phi, lam], label=label) def _define(self): from qiskit.extensions.standard.u3 import U3Gate definition = [] q = QuantumRegister(1, 'q') rule = [(U3Gate(pi / 2, self.params[0], self.params[1]), [q[0]], [])] for inst in rule: definition.append(inst) self.definition = definition def inverse(self): return U2Gate(-self.params[1] - pi, -self.params[0] + pi) def to_matrix(self): isqrt2 = 1 / numpy.sqrt(2) phi, lam = self.params phi, lam = float(phi), float(lam) return numpy.array([ [ isqrt2, -numpy.exp(1j * lam) * isqrt2 ], [ numpy.exp(1j * phi) * isqrt2, numpy.exp(1j * (phi + lam)) * isqrt2 ] ], dtype=complex) @deprecate_arguments({'q': 'qubit'}) def u2(self, phi, lam, qubit, *, q=None): return self.append(U2Gate(phi, lam), [qubit], []) QuantumCircuit.u2 = u2
true
true
f73c0c5783bcd381970f18749f58281f9fcb878f
5,107
py
Python
export_weekly_ad_contacts.py
AfricasVoices/Project-WUSC-LEAP-II
76fa04a92c352483abd2e6b696bd0af26eea517a
[ "MIT" ]
null
null
null
export_weekly_ad_contacts.py
AfricasVoices/Project-WUSC-LEAP-II
76fa04a92c352483abd2e6b696bd0af26eea517a
[ "MIT" ]
null
null
null
export_weekly_ad_contacts.py
AfricasVoices/Project-WUSC-LEAP-II
76fa04a92c352483abd2e6b696bd0af26eea517a
[ "MIT" ]
null
null
null
import argparse import csv import importlib from core_data_modules.cleaners import Codes from core_data_modules.logging import Logger from core_data_modules.traced_data.io import TracedDataJsonIO from src.engagement_db_to_analysis.membership_group import (get_membership_groups_data) log = Logger(__name__) if __name__ == "__main__": parser = argparse.ArgumentParser(description="Exports weekly ad contacts from analysis Traced Data") parser.add_argument("google_cloud_credentials_file_path", metavar="google-cloud-credentials-file-path", help="Path to a Google Cloud service account credentials file to use to access the " "credentials bucket"), parser.add_argument("configuration_module", help="Configuration module to import e.g. 'configurations.test_config'. " "This module must contain a PIPELINE_CONFIGURATION property") parser.add_argument("traced_data_paths", metavar="traced-data-paths", nargs="+", help="Paths to the traced data files (either messages or individuals) to extract phone " "numbers from") parser.add_argument("membership_group_dir_path", metavar="membership-group-dir-path", help="Path to directory containing de-identified membership groups CSVs containing membership" "groups data stored as `avf-participant-uuid` column.") parser.add_argument("csv_output_file_path", metavar="csv-output-file-path", help="Path to a CSV file to write the contacts from the locations of interest to. " "Exported file is in a format suitable for direct upload to Rapid Pro") args = parser.parse_args() google_cloud_credentials_file_path = args.google_cloud_credentials_file_path pipeline_config = importlib.import_module(args.configuration_module).PIPELINE_CONFIGURATION traced_data_paths = args.traced_data_paths membership_group_dir_path = args.membership_group_dir_path csv_output_file_path = args.csv_output_file_path pipeline = pipeline_config.pipeline_name uuid_table = pipeline_config.uuid_table.init_uuid_table_client(google_cloud_credentials_file_path) uuids = set() opt_out_uuids = set() for path in traced_data_paths: log.info(f"Loading previous traced data from file '{path}'...") with open(path) as f: data = TracedDataJsonIO.import_jsonl_to_traced_data_iterable(f) log.info(f"Loaded {len(data)} traced data objects") for td in data: if td["consent_withdrawn"] == Codes.TRUE: opt_out_uuids.add(td["participant_uuid"]) uuids.add(td["participant_uuid"]) # If available, add consented membership group uids to advert uuids if pipeline_config.analysis.membership_group_configuration is not None: log.info(f"Adding consented membership group uids to advert uuids ") membership_group_csv_urls = \ pipeline_config.analysis.membership_group_configuration.membership_group_csv_urls.items() membership_groups_data = get_membership_groups_data(google_cloud_credentials_file_path, membership_group_csv_urls, membership_group_dir_path) consented_membership_groups_uuids = 0 opt_out_membership_groups_uuids = 0 for membership_group in membership_groups_data.values(): for uuid in membership_group: if uuid in opt_out_uuids: opt_out_membership_groups_uuids += 1 continue consented_membership_groups_uuids += 1 uuids.add(uuid) log.info(f"Found {opt_out_membership_groups_uuids} membership_groups_uuids who have opted out") log.info(f"Added {consented_membership_groups_uuids} membership_groups_uuids to advert uuids") log.info(f"Loaded {len(uuids)} uuids (of which {len(opt_out_uuids)} uuids withdrew consent)") advert_uuids = uuids - opt_out_uuids log.info(f"Proceeding with {len(advert_uuids)} opt-in uuids") log.info(f"Converting {len(advert_uuids)} uuids to urns...") urn_lut = uuid_table.uuid_to_data_batch(advert_uuids) urns = {urn_lut[uuid] for uuid in advert_uuids} log.info(f"Converted {len(advert_uuids)} uuids to {len(urns)} urns") # Export contacts CSV log.warning(f"Exporting {len(urns)} urns to {csv_output_file_path}...") with open(csv_output_file_path, "w") as f: urn_namespaces = {urn.split(":")[0] for urn in urns} headers = [f"URN:{namespace}" for namespace in urn_namespaces] writer = csv.DictWriter(f, fieldnames=headers, lineterminator="\n") writer.writeheader() for urn in urns: namespace = urn.split(":")[0] value = urn.split(":")[1] writer.writerow({ f"URN:{namespace}": value }) log.info(f"Wrote {len(urns)} urns to {csv_output_file_path}")
48.179245
118
0.682005
import argparse import csv import importlib from core_data_modules.cleaners import Codes from core_data_modules.logging import Logger from core_data_modules.traced_data.io import TracedDataJsonIO from src.engagement_db_to_analysis.membership_group import (get_membership_groups_data) log = Logger(__name__) if __name__ == "__main__": parser = argparse.ArgumentParser(description="Exports weekly ad contacts from analysis Traced Data") parser.add_argument("google_cloud_credentials_file_path", metavar="google-cloud-credentials-file-path", help="Path to a Google Cloud service account credentials file to use to access the " "credentials bucket"), parser.add_argument("configuration_module", help="Configuration module to import e.g. 'configurations.test_config'. " "This module must contain a PIPELINE_CONFIGURATION property") parser.add_argument("traced_data_paths", metavar="traced-data-paths", nargs="+", help="Paths to the traced data files (either messages or individuals) to extract phone " "numbers from") parser.add_argument("membership_group_dir_path", metavar="membership-group-dir-path", help="Path to directory containing de-identified membership groups CSVs containing membership" "groups data stored as `avf-participant-uuid` column.") parser.add_argument("csv_output_file_path", metavar="csv-output-file-path", help="Path to a CSV file to write the contacts from the locations of interest to. " "Exported file is in a format suitable for direct upload to Rapid Pro") args = parser.parse_args() google_cloud_credentials_file_path = args.google_cloud_credentials_file_path pipeline_config = importlib.import_module(args.configuration_module).PIPELINE_CONFIGURATION traced_data_paths = args.traced_data_paths membership_group_dir_path = args.membership_group_dir_path csv_output_file_path = args.csv_output_file_path pipeline = pipeline_config.pipeline_name uuid_table = pipeline_config.uuid_table.init_uuid_table_client(google_cloud_credentials_file_path) uuids = set() opt_out_uuids = set() for path in traced_data_paths: log.info(f"Loading previous traced data from file '{path}'...") with open(path) as f: data = TracedDataJsonIO.import_jsonl_to_traced_data_iterable(f) log.info(f"Loaded {len(data)} traced data objects") for td in data: if td["consent_withdrawn"] == Codes.TRUE: opt_out_uuids.add(td["participant_uuid"]) uuids.add(td["participant_uuid"]) if pipeline_config.analysis.membership_group_configuration is not None: log.info(f"Adding consented membership group uids to advert uuids ") membership_group_csv_urls = \ pipeline_config.analysis.membership_group_configuration.membership_group_csv_urls.items() membership_groups_data = get_membership_groups_data(google_cloud_credentials_file_path, membership_group_csv_urls, membership_group_dir_path) consented_membership_groups_uuids = 0 opt_out_membership_groups_uuids = 0 for membership_group in membership_groups_data.values(): for uuid in membership_group: if uuid in opt_out_uuids: opt_out_membership_groups_uuids += 1 continue consented_membership_groups_uuids += 1 uuids.add(uuid) log.info(f"Found {opt_out_membership_groups_uuids} membership_groups_uuids who have opted out") log.info(f"Added {consented_membership_groups_uuids} membership_groups_uuids to advert uuids") log.info(f"Loaded {len(uuids)} uuids (of which {len(opt_out_uuids)} uuids withdrew consent)") advert_uuids = uuids - opt_out_uuids log.info(f"Proceeding with {len(advert_uuids)} opt-in uuids") log.info(f"Converting {len(advert_uuids)} uuids to urns...") urn_lut = uuid_table.uuid_to_data_batch(advert_uuids) urns = {urn_lut[uuid] for uuid in advert_uuids} log.info(f"Converted {len(advert_uuids)} uuids to {len(urns)} urns") log.warning(f"Exporting {len(urns)} urns to {csv_output_file_path}...") with open(csv_output_file_path, "w") as f: urn_namespaces = {urn.split(":")[0] for urn in urns} headers = [f"URN:{namespace}" for namespace in urn_namespaces] writer = csv.DictWriter(f, fieldnames=headers, lineterminator="\n") writer.writeheader() for urn in urns: namespace = urn.split(":")[0] value = urn.split(":")[1] writer.writerow({ f"URN:{namespace}": value }) log.info(f"Wrote {len(urns)} urns to {csv_output_file_path}")
true
true
f73c0c9cfa0ee96cff187a0a7a14716c2d825e1e
521
py
Python
WeatherApp/weatherapp.py
ragreenburg/Amazing-Python-Scripts
d91d0ddd312eb82ac307b58d6d09e6ca19384ea8
[ "MIT" ]
930
2020-09-05T22:07:28.000Z
2022-03-30T07:56:18.000Z
WeatherApp/weatherapp.py
ragreenburg/Amazing-Python-Scripts
d91d0ddd312eb82ac307b58d6d09e6ca19384ea8
[ "MIT" ]
893
2020-09-04T07:57:24.000Z
2022-02-08T02:12:26.000Z
WeatherApp/weatherapp.py
ragreenburg/Amazing-Python-Scripts
d91d0ddd312eb82ac307b58d6d09e6ca19384ea8
[ "MIT" ]
497
2020-09-05T08:16:24.000Z
2022-03-31T00:55:57.000Z
import requests from bs4 import BeautifulSoup # Taking City You Want to Check Temperaature city = input("Enter City : ") # Storing City You Want to Check Temperaature search = "weather in" + city # Searching it on google url = f"https://www.google.com/search?&q={search}" # Sending and Receiving Requests r = requests.get(url) # Scraping Details s = BeautifulSoup(r.text, "html.parser") # Storing Details update = s.find("div", class_="BNeawe").text # Printing Details print("Temperature in " + city + " is: " + update)
28.944444
50
0.725528
import requests from bs4 import BeautifulSoup city = input("Enter City : ") search = "weather in" + city url = f"https://www.google.com/search?&q={search}" r = requests.get(url) s = BeautifulSoup(r.text, "html.parser") update = s.find("div", class_="BNeawe").text print("Temperature in " + city + " is: " + update)
true
true
f73c0d558d0f1dbc184863759c8cef0fcbc8b587
8,519
py
Python
sympy/physics/units.py
SpeedyiiiTH/sympy
37b9b84d4c997ccc9307baf29dd83d12ec31298f
[ "BSD-3-Clause" ]
null
null
null
sympy/physics/units.py
SpeedyiiiTH/sympy
37b9b84d4c997ccc9307baf29dd83d12ec31298f
[ "BSD-3-Clause" ]
7
2017-05-01T14:15:32.000Z
2017-09-06T20:44:24.000Z
sympy/physics/units.py
SpeedyiiiTH/sympy
37b9b84d4c997ccc9307baf29dd83d12ec31298f
[ "BSD-3-Clause" ]
null
null
null
""" Physical units and dimensions. The base class is Unit, where all here defined units (~200) inherit from. The find_unit function can help you find units for a given quantity: >>> import sympy.physics.units as u >>> u.find_unit('coul') ['coulomb', 'coulombs'] >>> u.find_unit(u.charge) ['C', 'charge', 'coulomb', 'coulombs'] >>> u.coulomb A*s Units are always given in terms of base units that have a name and an abbreviation: >>> u.A.name 'ampere' >>> u.ampere.abbrev 'A' The generic name for a unit (like 'length', 'mass', etc...) can help you find units: >>> u.find_unit('magnet') ['magnetic_flux', 'magnetic_constant', 'magnetic_flux_density'] >>> u.find_unit(u.magnetic_flux) ['Wb', 'wb', 'weber', 'webers', 'magnetic_flux'] If, for a given session, you wish to add a unit you may do so: >>> u.find_unit('gal') [] >>> u.gal = 4*u.quart >>> u.gal/u.inch**3 231 To see a given quantity in terms of some other unit, divide by the desired unit: >>> mph = u.miles/u.hours >>> (u.m/u.s/mph).n(2) 2.2 The units are defined in terms of base units, so when you divide similar units you will obtain a pure number. This means, for example, that if you divide a real-world mass (like grams) by the atomic mass unit (amu) you will obtain Avogadro's number. To obtain the answer in moles you should divide by the unit ``avogadro``: >>> u.grams/u.amu 602214179000000000000000 >>> _/u.avogadro mol For chemical calculations the unit ``mmu`` (molar mass unit) has been defined so this conversion is handled automatically. For example, the number of moles in 1 kg of water might be calculated as: >>> u.kg/(18*u.mmu).n(3) 55.5*mol If you need the number of atoms in a mol as a pure number you can use ``avogadro_number`` but if you need it as a dimensional quantity you should use ``avogadro_constant``. (``avogadro`` is a shorthand for the dimensional quantity.) >>> u.avogadro_number 602214179000000000000000 >>> u.avogadro_constant 602214179000000000000000/mol """ from __future__ import print_function, division from sympy import Rational, pi from sympy.core import AtomicExpr class Unit(AtomicExpr): """ Base class for base unit of physical units. >>> from sympy.physics.units import Unit >>> Unit("meter", "m") m Other units are derived from base units: >>> import sympy.physics.units as u >>> cm = u.m/100 >>> 100*u.cm m """ is_positive = True # make sqrt(m**2) --> m is_commutative = True __slots__ = ["name", "abbrev"] def __new__(cls, name, abbrev, **assumptions): obj = AtomicExpr.__new__(cls, **assumptions) assert isinstance(name, str), repr(type(name)) assert isinstance(abbrev, str), repr(type(abbrev)) obj.name = name obj.abbrev = abbrev return obj def __getnewargs__(self): return (self.name, self.abbrev) def __eq__(self, other): return isinstance(other, Unit) and self.name == other.name def __hash__(self): return super(Unit, self).__hash__() def _hashable_content(self): return (self.name, self.abbrev) @property def free_symbols(self): return set() # Dimensionless percent = percents = Rational(1, 100) permille = permille = Rational(1, 1000) ten = Rational(10) yotta = ten**24 zetta = ten**21 exa = ten**18 peta = ten**15 tera = ten**12 giga = ten**9 mega = ten**6 kilo = ten**3 deca = ten**1 deci = ten**-1 centi = ten**-2 milli = ten**-3 micro = ten**-6 nano = ten**-9 pico = ten**-12 femto = ten**-15 atto = ten**-18 zepto = ten**-21 yocto = ten**-24 rad = radian = radians = 1 deg = degree = degrees = pi/180 sr = steradian = steradians = 1 # Base units length = m = meter = meters = Unit('meter', 'm') mass = kg = kilogram = kilograms = Unit('kilogram', 'kg') time = s = second = seconds = Unit('second', 's') current = A = ampere = amperes = Unit('ampere', 'A') temperature = K = kelvin = kelvins = Unit('kelvin', 'K') amount = mol = mole = moles = Unit('mole', 'mol') luminosity = cd = candela = candelas = Unit('candela', 'cd') # Derived units volume = meter**3 frequency = Hz = hz = hertz = 1/s force = N = newton = newtons = m*kg/s**2 energy = J = joule = joules = N*m power = W = watt = watts = J/s pressure = Pa = pa = pascal = pascals = N/m**2 charge = C = coulomb = coulombs = s*A voltage = v = V = volt = volts = W/A resistance = ohm = ohms = V/A conductance = S = siemens = mho = mhos = A/V capacitance = F = farad = farads = C/V magnetic_flux = Wb = wb = weber = webers = J/A magnetic_flux_density = T = tesla = teslas = V*s/m**2 inductance = H = henry = henrys = V*s/A speed = m/s acceleration = m/s**2 density = kg/m**3 optical_power = dioptre = D = 1/m illuminance = lux = lx = sr*cd/m**2 # Common length units km = kilometer = kilometers = kilo*m dm = decimeter = decimeters = deci*m cm = centimeter = centimeters = centi*m mm = millimeter = millimeters = milli*m um = micrometer = micrometers = micron = microns = micro*m nm = nanometer = nanometers = nano*m pm = picometer = picometers = pico*m ft = foot = feet = Rational('0.3048')*m inch = inches = Rational('25.4')*mm yd = yard = yards = 3*ft mi = mile = miles = 5280*ft # Common volume and area units l = liter = liters = m**3 / 1000 dl = deciliter = deciliters = deci*l cl = centiliter = centiliters = centi*l ml = milliliter = milliliters = milli*l # Common time units ms = millisecond = milliseconds = milli*s us = microsecond = microseconds = micro*s ns = nanosecond = nanoseconds = nano*s ps = picosecond = picoseconds = pico*s minute = minutes = 60*s h = hour = hours = 60*minute day = days = 24*hour anomalistic_year = anomalistic_years = Rational('365.259636')*day sidereal_year = sidereal_years = Rational('31558149.540')*s tropical_year = tropical_years = Rational('365.24219')*day common_year = common_years = Rational('365')*day julian_year = julian_years = Rational('365.25')*day draconic_year = draconic_years = Rational('346.62')*day gaussian_year = gaussian_years = Rational('365.2568983')*day full_moon_cycle = full_moon_cycles = Rational('411.78443029')*day year = years = tropical_year # Common mass units g = gram = grams = kilogram / kilo mg = milligram = milligrams = milli * g ug = microgram = micrograms = micro * g #---------------------------------------------------------------------------- # Physical constants # c = speed_of_light = 299792458 * m/s G = gravitational_constant = Rational('6.67428') * ten**-11 * m**3 / kg / s**2 u0 = magnetic_constant = 4*pi * ten**-7 * N/A**2 e0 = electric_constant = 1/(u0 * c**2) Z0 = vacuum_impedance = u0 * c planck = Rational('6.62606896') * ten**-34 * J*s hbar = planck / (2*pi) avogadro_number = Rational('6.02214179') * 10**23 avogadro = avogadro_constant = avogadro_number / mol boltzmann = Rational('1.3806505') * ten**-23 * J / K gee = gees = Rational('9.80665') * m/s**2 atmosphere = atmospheres = atm = 101325 * pascal kPa = kilo*Pa bar = bars = 100*kPa pound = pounds = 0.45359237 * kg * gee # exact psi = pound / inch ** 2 dHg0 = 13.5951 # approx value at 0 C mmHg = dHg0 * 9.80665 * Pa amu = amus = gram / avogadro / mol mmu = mmus = gram / mol quart = quarts = Rational(231, 4) * inch**3 eV = 1.602176487e-19 * J # Other convenient units and magnitudes ly = lightyear = lightyears = c*julian_year au = astronomical_unit = astronomical_units = 149597870691*m def find_unit(quantity): """ Return a list of matching units names. if quantity is a string -- units containing the string `quantity` if quantity is a unit -- units having matching base units Examples ======== >>> from sympy.physics import units as u >>> u.find_unit('charge') ['charge'] >>> u.find_unit(u.charge) ['C', 'charge', 'coulomb', 'coulombs'] >>> u.find_unit('volt') ['volt', 'volts', 'voltage'] >>> u.find_unit(u.inch**3)[:5] ['l', 'cl', 'dl', 'ml', 'liter'] """ import sympy.physics.units as u rv = [] if isinstance(quantity, str): rv = [i for i in dir(u) if quantity in i] else: units = quantity.as_coeff_Mul()[1] for i in dir(u): try: if units == eval('u.' + i).as_coeff_Mul()[1]: rv.append(str(i)) except Exception: pass return sorted(rv, key=len) # Delete this so it doesn't pollute the namespace del Rational, pi
27.044444
79
0.638925
from __future__ import print_function, division from sympy import Rational, pi from sympy.core import AtomicExpr class Unit(AtomicExpr): is_positive = True is_commutative = True __slots__ = ["name", "abbrev"] def __new__(cls, name, abbrev, **assumptions): obj = AtomicExpr.__new__(cls, **assumptions) assert isinstance(name, str), repr(type(name)) assert isinstance(abbrev, str), repr(type(abbrev)) obj.name = name obj.abbrev = abbrev return obj def __getnewargs__(self): return (self.name, self.abbrev) def __eq__(self, other): return isinstance(other, Unit) and self.name == other.name def __hash__(self): return super(Unit, self).__hash__() def _hashable_content(self): return (self.name, self.abbrev) @property def free_symbols(self): return set() percent = percents = Rational(1, 100) permille = permille = Rational(1, 1000) ten = Rational(10) yotta = ten**24 zetta = ten**21 exa = ten**18 peta = ten**15 tera = ten**12 giga = ten**9 mega = ten**6 kilo = ten**3 deca = ten**1 deci = ten**-1 centi = ten**-2 milli = ten**-3 micro = ten**-6 nano = ten**-9 pico = ten**-12 femto = ten**-15 atto = ten**-18 zepto = ten**-21 yocto = ten**-24 rad = radian = radians = 1 deg = degree = degrees = pi/180 sr = steradian = steradians = 1 length = m = meter = meters = Unit('meter', 'm') mass = kg = kilogram = kilograms = Unit('kilogram', 'kg') time = s = second = seconds = Unit('second', 's') current = A = ampere = amperes = Unit('ampere', 'A') temperature = K = kelvin = kelvins = Unit('kelvin', 'K') amount = mol = mole = moles = Unit('mole', 'mol') luminosity = cd = candela = candelas = Unit('candela', 'cd') volume = meter**3 frequency = Hz = hz = hertz = 1/s force = N = newton = newtons = m*kg/s**2 energy = J = joule = joules = N*m power = W = watt = watts = J/s pressure = Pa = pa = pascal = pascals = N/m**2 charge = C = coulomb = coulombs = s*A voltage = v = V = volt = volts = W/A resistance = ohm = ohms = V/A conductance = S = siemens = mho = mhos = A/V capacitance = F = farad = farads = C/V magnetic_flux = Wb = wb = weber = webers = J/A magnetic_flux_density = T = tesla = teslas = V*s/m**2 inductance = H = henry = henrys = V*s/A speed = m/s acceleration = m/s**2 density = kg/m**3 optical_power = dioptre = D = 1/m illuminance = lux = lx = sr*cd/m**2 km = kilometer = kilometers = kilo*m dm = decimeter = decimeters = deci*m cm = centimeter = centimeters = centi*m mm = millimeter = millimeters = milli*m um = micrometer = micrometers = micron = microns = micro*m nm = nanometer = nanometers = nano*m pm = picometer = picometers = pico*m ft = foot = feet = Rational('0.3048')*m inch = inches = Rational('25.4')*mm yd = yard = yards = 3*ft mi = mile = miles = 5280*ft l = liter = liters = m**3 / 1000 dl = deciliter = deciliters = deci*l cl = centiliter = centiliters = centi*l ml = milliliter = milliliters = milli*l ms = millisecond = milliseconds = milli*s us = microsecond = microseconds = micro*s ns = nanosecond = nanoseconds = nano*s ps = picosecond = picoseconds = pico*s minute = minutes = 60*s h = hour = hours = 60*minute day = days = 24*hour anomalistic_year = anomalistic_years = Rational('365.259636')*day sidereal_year = sidereal_years = Rational('31558149.540')*s tropical_year = tropical_years = Rational('365.24219')*day common_year = common_years = Rational('365')*day julian_year = julian_years = Rational('365.25')*day draconic_year = draconic_years = Rational('346.62')*day gaussian_year = gaussian_years = Rational('365.2568983')*day full_moon_cycle = full_moon_cycles = Rational('411.78443029')*day year = years = tropical_year g = gram = grams = kilogram / kilo mg = milligram = milligrams = milli * g ug = microgram = micrograms = micro * g c = speed_of_light = 299792458 * m/s G = gravitational_constant = Rational('6.67428') * ten**-11 * m**3 / kg / s**2 u0 = magnetic_constant = 4*pi * ten**-7 * N/A**2 e0 = electric_constant = 1/(u0 * c**2) Z0 = vacuum_impedance = u0 * c planck = Rational('6.62606896') * ten**-34 * J*s hbar = planck / (2*pi) avogadro_number = Rational('6.02214179') * 10**23 avogadro = avogadro_constant = avogadro_number / mol boltzmann = Rational('1.3806505') * ten**-23 * J / K gee = gees = Rational('9.80665') * m/s**2 atmosphere = atmospheres = atm = 101325 * pascal kPa = kilo*Pa bar = bars = 100*kPa pound = pounds = 0.45359237 * kg * gee psi = pound / inch ** 2 dHg0 = 13.5951 mmHg = dHg0 * 9.80665 * Pa amu = amus = gram / avogadro / mol mmu = mmus = gram / mol quart = quarts = Rational(231, 4) * inch**3 eV = 1.602176487e-19 * J ly = lightyear = lightyears = c*julian_year au = astronomical_unit = astronomical_units = 149597870691*m def find_unit(quantity): import sympy.physics.units as u rv = [] if isinstance(quantity, str): rv = [i for i in dir(u) if quantity in i] else: units = quantity.as_coeff_Mul()[1] for i in dir(u): try: if units == eval('u.' + i).as_coeff_Mul()[1]: rv.append(str(i)) except Exception: pass return sorted(rv, key=len) del Rational, pi
true
true
f73c0dd0bc664d16bb63c14b2c4d78cf60a252b4
1,705
py
Python
tests/settings.py
OlgaBorisova/django-pgfields
fc21efc116d017f1bf75c88fa3502cc23f8923b3
[ "BSD-3-Clause" ]
null
null
null
tests/settings.py
OlgaBorisova/django-pgfields
fc21efc116d017f1bf75c88fa3502cc23f8923b3
[ "BSD-3-Clause" ]
null
null
null
tests/settings.py
OlgaBorisova/django-pgfields
fc21efc116d017f1bf75c88fa3502cc23f8923b3
[ "BSD-3-Clause" ]
1
2018-10-23T21:31:29.000Z
2018-10-23T21:31:29.000Z
from __future__ import absolute_import, unicode_literals from django_pg.utils.south import south_installed import os import sys DEBUG = True TEMPLATE_DEBUG = DEBUG # Database settings. This assumes that the default user and empty # password will work. DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'django_pg', 'USER': '', 'PASSWORD': '', 'HOST': 'localhost', 'PORT': 5432, } } # Boilerplate settings. TIME_ZONE = 'America/Chicago' USE_TZ = True LANGUAGE_CODE = 'en-us' ADMIN_MEDIA_PREFIX = '/static/admin/' STATICFILES_DIRS = () SECRET_KEY = 'vu8[/=:+pjz8o:9c6g7spzq_c14ke9zymjq(:m]5_+-gc3l8)]' # Installed apps. Be smart about this: search for things under the # `tests/` directory, but add them as applications as they have their # own models that we need in order to test this stuff. INSTALLED_APPS = [ 'django_pg', ] for module in os.listdir(os.path.dirname(__file__)): full_dir = os.path.dirname(__file__) + '/' + module if os.path.isdir(full_dir) and os.path.isfile(full_dir + '/__init__.py'): INSTALLED_APPS.append('tests.' + module) # If South is installed, then add it to installed apps. if south_installed: INSTALLED_APPS.append('south') # We don't really care what middleware is installed for our purposes, # but we really want Django 1.7's check/warning system to be quiet about # this not being explicitly set. MIDDLEWARE_CLASSES = ( 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', ) # Easiest default test settings for django-pgfields itself. DJANGOPG_IMPROVED_REPR = True DJANGOPG_REPR_TEMPLATE = 'single_line'
29.396552
77
0.718475
from __future__ import absolute_import, unicode_literals from django_pg.utils.south import south_installed import os import sys DEBUG = True TEMPLATE_DEBUG = DEBUG DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'django_pg', 'USER': '', 'PASSWORD': '', 'HOST': 'localhost', 'PORT': 5432, } } TIME_ZONE = 'America/Chicago' USE_TZ = True LANGUAGE_CODE = 'en-us' ADMIN_MEDIA_PREFIX = '/static/admin/' STATICFILES_DIRS = () SECRET_KEY = 'vu8[/=:+pjz8o:9c6g7spzq_c14ke9zymjq(:m]5_+-gc3l8)]' INSTALLED_APPS = [ 'django_pg', ] for module in os.listdir(os.path.dirname(__file__)): full_dir = os.path.dirname(__file__) + '/' + module if os.path.isdir(full_dir) and os.path.isfile(full_dir + '/__init__.py'): INSTALLED_APPS.append('tests.' + module) if south_installed: INSTALLED_APPS.append('south') # but we really want Django 1.7's check/warning system to be quiet about MIDDLEWARE_CLASSES = ( 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', ) DJANGOPG_IMPROVED_REPR = True DJANGOPG_REPR_TEMPLATE = 'single_line'
true
true
f73c0e51df590bc8a015c8e97adf0eb20ac088c3
1,383
py
Python
website/canvas/url_util.py
bopopescu/canvas
2dfd6009eaecd8dac64ccc6125084e65305fb5d0
[ "BSD-3-Clause" ]
61
2015-11-10T17:13:46.000Z
2021-08-06T17:58:30.000Z
website/canvas/url_util.py
bopopescu/canvas
2dfd6009eaecd8dac64ccc6125084e65305fb5d0
[ "BSD-3-Clause" ]
13
2015-11-11T07:49:41.000Z
2021-06-09T03:45:31.000Z
website/canvas/url_util.py
bopopescu/canvas
2dfd6009eaecd8dac64ccc6125084e65305fb5d0
[ "BSD-3-Clause" ]
18
2015-11-11T04:50:04.000Z
2021-08-20T00:57:11.000Z
import urlparse from django.conf.urls.defaults import url from canvas.exceptions import ServiceError re_slug = lambda name: "(?P<%s>[a-zA-Z0-9_.,-]+)" % name re_group_slug = lambda name: "(?P<%s>[-a-zA-Z0-9_]+)" % name re_int = lambda name: "(?P<%s>\d+)" % name re_year = re_int('year') re_month = re_year + "/" + re_int("month") re_day = re_month + "/" + re_int('day') def url_decorator(urls): def decorator(url_regexp, *args, **kwargs): def wrapper(fun): urls.append(url(url_regexp, fun, *args, **kwargs)) return fun return wrapper return decorator def dynamic_urls(): """ Usage Example: urls, api = dynamic_urls() """ urls = [] return urls, url_decorator(urls) def maybe(regexp): return '(%s|)' % regexp def verify_first_party_url(url): """ Also allows iTunes store URLs. """ if not url or not url.startswith('/'): parsed_url = urlparse.urlparse(url) try: protocol = parsed_url[0] domain = parsed_url[1] except IndexError: raise ServiceError("Invalid share url.") if protocol not in ['http', 'https'] or domain not in ['itunes.apple.com', 'example.com']: # Only 1st party redirects, to avoid security holes that 3rd party redirects imply raise ServiceError("Invalid share url.")
27.117647
99
0.606652
import urlparse from django.conf.urls.defaults import url from canvas.exceptions import ServiceError re_slug = lambda name: "(?P<%s>[a-zA-Z0-9_.,-]+)" % name re_group_slug = lambda name: "(?P<%s>[-a-zA-Z0-9_]+)" % name re_int = lambda name: "(?P<%s>\d+)" % name re_year = re_int('year') re_month = re_year + "/" + re_int("month") re_day = re_month + "/" + re_int('day') def url_decorator(urls): def decorator(url_regexp, *args, **kwargs): def wrapper(fun): urls.append(url(url_regexp, fun, *args, **kwargs)) return fun return wrapper return decorator def dynamic_urls(): urls = [] return urls, url_decorator(urls) def maybe(regexp): return '(%s|)' % regexp def verify_first_party_url(url): if not url or not url.startswith('/'): parsed_url = urlparse.urlparse(url) try: protocol = parsed_url[0] domain = parsed_url[1] except IndexError: raise ServiceError("Invalid share url.") if protocol not in ['http', 'https'] or domain not in ['itunes.apple.com', 'example.com']: raise ServiceError("Invalid share url.")
true
true
f73c0f2c39b11fc9eed97e1360702015396621e1
9,602
py
Python
oase-root/libs/backyardlibs/oase_action_common_libs.py
wreathvine/oase
e560b124de6239ec94b662ebff3c310c3f2fa631
[ "Apache-2.0" ]
null
null
null
oase-root/libs/backyardlibs/oase_action_common_libs.py
wreathvine/oase
e560b124de6239ec94b662ebff3c310c3f2fa631
[ "Apache-2.0" ]
null
null
null
oase-root/libs/backyardlibs/oase_action_common_libs.py
wreathvine/oase
e560b124de6239ec94b662ebff3c310c3f2fa631
[ "Apache-2.0" ]
null
null
null
# Copyright 2019 NEC Corporation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # """ [概要] """ import json import os import sys import re import pytz import datetime from socket import gethostname import traceback # import検索パス追加 my_path = os.path.dirname(os.path.abspath(__file__)) tmp_path = my_path.split('oase-root') root_dir_path = tmp_path[0] + 'oase-root' sys.path.append(root_dir_path) # oaseログモジュールのimport from web_app.models.models import ActionLog from libs.commonlibs.oase_logger import OaseLogger logger = OaseLogger.get_instance() # ロガー初期化 class ConstantModules: """ [クラス概要] アクションドライバ コンスタント定義クラス """ ## OASE DB USER DB_OASE_USER = -2140000001 ## module result code # deprecate 以下3つのステータスはITA_DBを使っていたもので廃止予定 RET_REST_ERROR = 100 # REST Access Error RET_DATA_ERROR = 101 # DBの登録データが異常 RET_REST_MULTI_UPDATE = 102 # 追越し更新 ## テーブルカラム位置 ## 共通 COL_FUNCTION_NAME = 0 COL_DISUSE_FLAG = 1 ## C_MOVEMENT_CLASS_MNG CMCM_MOVEMENT_CLASS_NO = 2 CMCM_ORCHESTRATOR_ID = 3 CMCM_PATTERN_ID = 4 CMCM_MOVEMENT_SEQ = 5 CMCM_NEXT_PENDING_FLAG = 6 CMCM_DESCRIPTION = 7 CMCM_SYMPHONY_CLASS_NO = 8 CMCM_NOTE = 9 CMCM_LAST_UPDATE_TIMESTAMP = 10 CMCM_UPDATE_LAST_UPDATE_TIMESTAMP = 11 CMCM_LAST_UPDATE_USER = 12 ## C_SYMPHONY_CLASS_MNG CSCM_SYMPHONY_CLASS_ID = 2 CSCM_SYMPHONY_NAME = 3 ## C_CONDUCTOR_EDIT_CLASS_MNG CCCM_CONDUCTOR_CLASS_ID = 2 CCCM_CONDUCTOR_NAME = 3 ## C_NODE_CLASS_MNG CNCM_NODE_CLASS_NO = 2 CNCM_NODE_NAME = 3 CNCM_NODE_TYPE_ID = 4 CNCM_ORCHESTRATOR_ID = 5 CNCM_PATTERN_ID = 6 CNCM_CONDUCTOR_CLASS_NO = 7 CNCM_CONDUCTOR_CALL_CLASS_NO = 8 CNCM_OPERATION_NO_IDBH = 9 CNCM_SKIP_FLAG = 10 ## C_STM_LIST CSL_SYSTEM_ID = 2 CSL_HARDAWRE_TYPE_ID = 3 CSL_HOSTNAME = 4 CSL_IP_ADDRESS = 5 ## C_OPERATION_LIST COL_OPERATION_NO_UAPK = 2 COL_OPERATION_NO_IDBH = 3 COL_OPERATION_NAME = 4 COL_OPERATION_DATE = 5 COL_NOTE = 6 COL_LAST_UPDATE_TIMESTAMP = 7 COL_UPDATE_LAST_UPDATE_TIMESTAMP = 8 COL_LAST_UPDATE_USER = 9 TBL_COL_MAX = COL_LAST_UPDATE_USER + 1 ## C_PARAMETER_SHEET COL_PARAMETER_NO = 2 COL_HOSTNAME = 3 COL_OPERATION_ID = 4 COL_OPERATION_NAME_PARAM = 5 COL_SCHEDULE_TIMESTAMP_ID_NAME = 9 COL_PARAMETER = 10 TBL_CPS_MAX = COL_PARAMETER + 1 ## C_PARAMETER_SHEET_VERTICAL COL_PARAMETER_NO_VERTICAL = 2 COL_HOSTNAME_VERTICAL = 3 COL_OPERATION_ID_VERTICAL = 4 COL_OPERATION_NAME_PARAM_VERTICAL = 5 COL_SCHEDULE_TIMESTAMP_ID_NAME_VERTICAL = 9 COL_SUBSTITUTION_ORDER_VERTICAL = 10 COL_PARAMETER_VERTICAL = 11 ## C_PATTERN_PER_ORCH CPPO_PATTERN_ID = 2 CPPO_PATTERN_NAME = 3 CPPO_ITA_EXT_STM_ID = 4 ## B_ANSIBLE_xxx_PHO_LINK BAPL_PHO_LINK_ID = 2 BAPL_OPERATION_NO_UAPK = 3 BAPL_PATTERN_ID = 4 BAPL_SYSTEM_ID = 5 BAPL_NOTE = 6 BAPL_LAST_UPDATE_TIMESTAMP = 7 BAPL_UPDATE_LAST_UPDATE_TIMESTAMP = 8 BAPL_LAST_UPDATE_USER = 9 TBL_BAPL_MAX = BAPL_LAST_UPDATE_USER + 1 ## E_ANSIBLE_xxx_PATTERN EAP_PATTERN_ID = 2 EAP_PATTERN_NAME = 3 EAP_LEGACY_VAR_COUNT = 10 EAP_PIONEER_VAR_COUNT = 8 EAP_LEGACYROLE_VAR_COUNT = 10 ## F_CREATE_MENU_INFO FCMI_MENU_ID = 2 FCMI_MENU_NAME = 3 FCMI_TARGET = 4 FCMI_USE = 6 FCMI_MENUGROUP_FOR_VERTICAL_2 = 7 FCMI_MENUGROUP_FOR_HOSTGROUP = 8 FCMI_MENUGROUP_FOR_HOST = 9 FCMI_MENUGROUP_FOR_VERTICAL = 11 ## A_MENU_GROUP_LIST AMGL_MENU_GROUP_ID = 2 AMGL_MENU_GROUP_NAME = 3 ## A_MENU_LIST AML_MENU_ID = 2 AML_MENU_GROUP_ID = 3 AML_MENU_GROUP_NAME = 4 AML_MENU_NAME = 6 ## D_ANS_LNS_VAL_ASSIGN DALVA_ID = 2 DALVA_MENU_NAME = 3 DALVA_ITEM_NAME = 4 DALVA_ITEM_ORDER = 5 DALVA_COLUMN_GROUP = 8 #OASE_T_RHDM_RESPONSE OTRR_RESPONSE_ID = 2 OTRR_REQUEST_TYPE_ID = 5 OTRR_STATUS = 6 OTRR_UPDATE_STATUS_ID = 7 #OASE_T_RHDM_RESPONSE_ACTION OTRRA_RESPONSE_ID = 3 OTRRA_EXECUTION_ORDER = 5 #OASE_T_ACTION_HISTORY OTAH_RESPONSE_ID = 3 OTAH_RULE_NAME = 5 OTAH_EXECUTION_ORDER = 6 OTAH_ACTION_START_TIME = 7 OTAH_ACTION_TYPE_ID = 8 OTAH_STATUS = 10 #OASE_T_ITA_DRIVER OTID_ITA_DISP_NAME = 3 #OASE_T_MAIL_DRIVER OTMD_MAIL_DISP_NAME = 3 #OASE_T_MAIL_TEMPLATE OTMT_MAIL_TEMPLATE_NAME = 3 #トレースIDがない場合の設定値 UnsetTraceID = '----------------------------------------' class ActionDriverCommonModules: """ [クラス概要] アクションドライバ共通処理クラス """ def getStringNowDate(self): """ [概要] 現在日付を文字列で取得するメソット """ now = datetime.datetime.now(pytz.timezone('UTC')) return now.strftime("%Y/%m/%d") @classmethod def getStringNowDateTime(cls, convert_flg=False): """ [概要] 現在日時を文字列で取得するメソット """ now = datetime.datetime.now(pytz.timezone('UTC')) if convert_flg: return now.strftime("%Y-%m-%d %H:%M:%S.%f") return now def KeyValueStringFind(self,pattern,string): """ [概要] Key Value型文字列からValueを抜き出すメソット """ string_tmp = string.split('=') if len(string_tmp) < 2: return None key = string_tmp[0] if key != pattern: return None val = '='.join(string_tmp[1:]) return val @classmethod def back_trace(self): """ [概要] Key Value型文字列からValueを抜き出すメソット """ exc_type, exc_value, exc_traceback = sys.exc_info() stack_trace = traceback.format_exception(exc_type, exc_value, exc_traceback) edit_trace = '' for line in stack_trace: edit_trace = edit_trace + line return edit_trace @staticmethod def SaveActionLog(resp_id, exe_order, trace_id, msg_id, **kwargs): """ [概要] アクション履歴ログを保存する """ now = datetime.datetime.now(pytz.timezone('UTC')) msg_params = None if kwargs: msg_params = '%s' % (kwargs) try: ActionLog( response_id = resp_id, execution_order = exe_order, trace_id = trace_id, message_id = msg_id, message_params = msg_params, last_update_timestamp = now ).save(force_insert=True) except Exception as ex: logger.system_log('LOSM01007', resp_id, exe_order, trace_id, msg_id, msg_params) # libs/webcommonlibs/common.py から引用 class TimeConversion: @classmethod def get_time_conversion(cls, naive, tz): """ [概要] 時刻変換処理を行う [戻り値] 変換した時刻 """ return naive.astimezone(pytz.timezone(tz)).strftime('%Y-%m-%d %H:%M:%S') @classmethod def get_time_conversion_utc(cls, naive, tz): """ [概要] 時刻変換処理を行う [戻り値] utc_dt : 変換した時刻(UTC) """ tz_ex = pytz.timezone(tz) naive = naive.replace('/', '-') user_dt = datetime.datetime.strptime(naive, '%Y-%m-%d %H:%M:%S') cou_dt = tz_ex.localize(user_dt, is_dst=None) utc_dt = cou_dt.astimezone(pytz.utc) return utc_dt
30.194969
92
0.532493
import json import os import sys import re import pytz import datetime from socket import gethostname import traceback my_path = os.path.dirname(os.path.abspath(__file__)) tmp_path = my_path.split('oase-root') root_dir_path = tmp_path[0] + 'oase-root' sys.path.append(root_dir_path) from web_app.models.models import ActionLog from libs.commonlibs.oase_logger import OaseLogger logger = OaseLogger.get_instance() class ConstantModules: SER = -2140000001 RROR = 100 RET_DATA_ERROR = 101 RET_REST_MULTI_UPDATE = 102 UNCTION_NAME = 0 COL_DISUSE_FLAG = 1 SS_NO = 2 CMCM_ORCHESTRATOR_ID = 3 CMCM_PATTERN_ID = 4 CMCM_MOVEMENT_SEQ = 5 CMCM_NEXT_PENDING_FLAG = 6 CMCM_DESCRIPTION = 7 CMCM_SYMPHONY_CLASS_NO = 8 CMCM_NOTE = 9 CMCM_LAST_UPDATE_TIMESTAMP = 10 CMCM_UPDATE_LAST_UPDATE_TIMESTAMP = 11 CMCM_LAST_UPDATE_USER = 12 SS_ID = 2 CSCM_SYMPHONY_NAME = 3 = 2 CCCM_CONDUCTOR_NAME = 3 SS_NO = 2 CNCM_NODE_NAME = 3 CNCM_NODE_TYPE_ID = 4 CNCM_ORCHESTRATOR_ID = 5 CNCM_PATTERN_ID = 6 CNCM_CONDUCTOR_CLASS_NO = 7 CNCM_CONDUCTOR_CALL_CLASS_NO = 8 CNCM_OPERATION_NO_IDBH = 9 CNCM_SKIP_FLAG = 10 TEM_ID = 2 CSL_HARDAWRE_TYPE_ID = 3 CSL_HOSTNAME = 4 CSL_IP_ADDRESS = 5 _NO_UAPK = 2 COL_OPERATION_NO_IDBH = 3 COL_OPERATION_NAME = 4 COL_OPERATION_DATE = 5 COL_NOTE = 6 COL_LAST_UPDATE_TIMESTAMP = 7 COL_UPDATE_LAST_UPDATE_TIMESTAMP = 8 COL_LAST_UPDATE_USER = 9 TBL_COL_MAX = COL_LAST_UPDATE_USER + 1 NO = 2 COL_HOSTNAME = 3 COL_OPERATION_ID = 4 COL_OPERATION_NAME_PARAM = 5 COL_SCHEDULE_TIMESTAMP_ID_NAME = 9 COL_PARAMETER = 10 TBL_CPS_MAX = COL_PARAMETER + 1 AL = 2 COL_HOSTNAME_VERTICAL = 3 COL_OPERATION_ID_VERTICAL = 4 COL_OPERATION_NAME_PARAM_VERTICAL = 5 COL_SCHEDULE_TIMESTAMP_ID_NAME_VERTICAL = 9 COL_SUBSTITUTION_ORDER_VERTICAL = 10 COL_PARAMETER_VERTICAL = 11 = 2 CPPO_PATTERN_NAME = 3 CPPO_ITA_EXT_STM_ID = 4 = 2 BAPL_OPERATION_NO_UAPK = 3 BAPL_PATTERN_ID = 4 BAPL_SYSTEM_ID = 5 BAPL_NOTE = 6 BAPL_LAST_UPDATE_TIMESTAMP = 7 BAPL_UPDATE_LAST_UPDATE_TIMESTAMP = 8 BAPL_LAST_UPDATE_USER = 9 TBL_BAPL_MAX = BAPL_LAST_UPDATE_USER + 1 = 2 EAP_PATTERN_NAME = 3 EAP_LEGACY_VAR_COUNT = 10 EAP_PIONEER_VAR_COUNT = 8 EAP_LEGACYROLE_VAR_COUNT = 10 = 2 FCMI_MENU_NAME = 3 FCMI_TARGET = 4 FCMI_USE = 6 FCMI_MENUGROUP_FOR_VERTICAL_2 = 7 FCMI_MENUGROUP_FOR_HOSTGROUP = 8 FCMI_MENUGROUP_FOR_HOST = 9 FCMI_MENUGROUP_FOR_VERTICAL = 11 P_ID = 2 AMGL_MENU_GROUP_NAME = 3 _ID = 2 AML_MENU_GROUP_ID = 3 AML_MENU_GROUP_NAME = 4 AML_MENU_NAME = 6 = 2 DALVA_MENU_NAME = 3 DALVA_ITEM_NAME = 4 DALVA_ITEM_ORDER = 5 DALVA_COLUMN_GROUP = 8 OTRR_RESPONSE_ID = 2 OTRR_REQUEST_TYPE_ID = 5 OTRR_STATUS = 6 OTRR_UPDATE_STATUS_ID = 7 OTRRA_RESPONSE_ID = 3 OTRRA_EXECUTION_ORDER = 5 OTAH_RESPONSE_ID = 3 OTAH_RULE_NAME = 5 OTAH_EXECUTION_ORDER = 6 OTAH_ACTION_START_TIME = 7 OTAH_ACTION_TYPE_ID = 8 OTAH_STATUS = 10 OTID_ITA_DISP_NAME = 3 OTMD_MAIL_DISP_NAME = 3 OTMT_MAIL_TEMPLATE_NAME = 3 UnsetTraceID = '----------------------------------------' class ActionDriverCommonModules: def getStringNowDate(self): now = datetime.datetime.now(pytz.timezone('UTC')) return now.strftime("%Y/%m/%d") @classmethod def getStringNowDateTime(cls, convert_flg=False): now = datetime.datetime.now(pytz.timezone('UTC')) if convert_flg: return now.strftime("%Y-%m-%d %H:%M:%S.%f") return now def KeyValueStringFind(self,pattern,string): string_tmp = string.split('=') if len(string_tmp) < 2: return None key = string_tmp[0] if key != pattern: return None val = '='.join(string_tmp[1:]) return val @classmethod def back_trace(self): exc_type, exc_value, exc_traceback = sys.exc_info() stack_trace = traceback.format_exception(exc_type, exc_value, exc_traceback) edit_trace = '' for line in stack_trace: edit_trace = edit_trace + line return edit_trace @staticmethod def SaveActionLog(resp_id, exe_order, trace_id, msg_id, **kwargs): now = datetime.datetime.now(pytz.timezone('UTC')) msg_params = None if kwargs: msg_params = '%s' % (kwargs) try: ActionLog( response_id = resp_id, execution_order = exe_order, trace_id = trace_id, message_id = msg_id, message_params = msg_params, last_update_timestamp = now ).save(force_insert=True) except Exception as ex: logger.system_log('LOSM01007', resp_id, exe_order, trace_id, msg_id, msg_params) class TimeConversion: @classmethod def get_time_conversion(cls, naive, tz): return naive.astimezone(pytz.timezone(tz)).strftime('%Y-%m-%d %H:%M:%S') @classmethod def get_time_conversion_utc(cls, naive, tz): tz_ex = pytz.timezone(tz) naive = naive.replace('/', '-') user_dt = datetime.datetime.strptime(naive, '%Y-%m-%d %H:%M:%S') cou_dt = tz_ex.localize(user_dt, is_dst=None) utc_dt = cou_dt.astimezone(pytz.utc) return utc_dt
true
true
f73c0fe703b389cfc864ec8c3e6ef753f1d11bd1
38,347
py
Python
tests/workflow_builder_test.py
isi-vista/vista-pegasus-wrapper
bbc220bb9aa6adaaad1df785b51526b6d4b0fafc
[ "MIT" ]
2
2021-03-11T13:33:52.000Z
2021-06-18T17:49:41.000Z
tests/workflow_builder_test.py
isi-vista/vista-pegasus-wrapper
bbc220bb9aa6adaaad1df785b51526b6d4b0fafc
[ "MIT" ]
108
2020-02-25T21:42:30.000Z
2022-03-25T21:59:30.000Z
tests/workflow_builder_test.py
isi-vista/vista-pegasus-wrapper
bbc220bb9aa6adaaad1df785b51526b6d4b0fafc
[ "MIT" ]
1
2020-08-05T15:33:52.000Z
2020-08-05T15:33:52.000Z
import subprocess from pathlib import Path from random import Random from immutablecollections import immutableset from vistautils.parameters import Parameters from pegasus_wrapper import PegasusProfile from pegasus_wrapper.artifact import ValueArtifact from pegasus_wrapper.locator import Locator, _parse_parts from pegasus_wrapper.pegasus_utils import build_submit_script from pegasus_wrapper.resource_request import SlurmResourceRequest from pegasus_wrapper.scripts.add_y import main as add_main from pegasus_wrapper.scripts.multiply_by_x import main as multiply_by_x_main from pegasus_wrapper.scripts.sort_nums_in_file import main as sort_nums_main from pegasus_wrapper.workflow import WorkflowBuilder import pytest from yaml import SafeLoader, load def test_simple_dax(tmp_path): params = Parameters.from_mapping( { "workflow_name": "Test", "workflow_created": "Testing", "workflow_log_dir": str(tmp_path / "log"), "workflow_directory": str(tmp_path / "working"), "site": "saga", "namespace": "test", "partition": "gaia", "experiment_name": "fred", "home_dir": str(tmp_path), } ) workflow_builder = WorkflowBuilder.from_parameters(params) assert workflow_builder.name == "Test" assert workflow_builder.created_by == "Testing" assert ( workflow_builder._workflow_directory # pylint:disable=protected-access == tmp_path / "working" ) assert workflow_builder._namespace == "test" # pylint:disable=protected-access assert workflow_builder._default_site == "saga" # pylint:disable=protected-access assert workflow_builder.default_resource_request # pylint:disable=protected-access assert workflow_builder._job_graph is not None # pylint:disable=protected-access assert workflow_builder._experiment_name == "fred" # pylint:disable=protected-access def test_locator(): job = Locator(_parse_parts("job")) example = Locator(_parse_parts("example/path")) combined = example / job combined_from_string = example / "job" assert combined.__repr__() == combined_from_string.__repr__() with pytest.raises(RuntimeError): _ = combined / 90 # Confirm we can't create a locator with an equals sign in the name with pytest.raises(ValueError): _ = Locator(_parse_parts("x=20")) def test_dax_with_job_on_saga(tmp_path): workflow_params = Parameters.from_mapping( { "workflow_name": "Test", "workflow_created": "Testing", "workflow_log_dir": str(tmp_path / "log"), "workflow_directory": str(tmp_path / "working"), "site": "saga", "namespace": "test", "partition": "gaia", "experiment_name": "fred", "home_dir": str(tmp_path), } ) slurm_params = Parameters.from_mapping( {"partition": "gaia", "num_cpus": 1, "num_gpus": 0, "memory": "4G"} ) multiply_input_file = tmp_path / "raw_nums.txt" random = Random() random.seed(0) nums = immutableset(int(random.random() * 100) for _ in range(25)) multiply_output_file = tmp_path / "multiplied_nums.txt" sorted_output_file = tmp_path / "sorted_nums.txt" add_output_file = tmp_path / "add_nums.txt" with multiply_input_file.open("w") as mult_file: mult_file.writelines(f"{num}\n" for num in nums) multiply_params = Parameters.from_mapping( {"input_file": multiply_input_file, "output_file": multiply_output_file, "x": 4} ) sort_params = Parameters.from_mapping( {"input_file": multiply_output_file, "output_file": sorted_output_file} ) add_args = f"{sorted_output_file} {add_output_file} --y 10" resources = SlurmResourceRequest.from_parameters(slurm_params) workflow_builder = WorkflowBuilder.from_parameters(workflow_params) multiply_job_name = Locator(_parse_parts("jobs/multiply")) multiply_artifact = ValueArtifact( multiply_output_file, depends_on=workflow_builder.run_python_on_parameters( multiply_job_name, multiply_by_x_main, multiply_params, depends_on=[] ), locator=Locator("multiply"), ) multiple_dir = workflow_builder.directory_for(multiply_job_name) assert (multiple_dir / "___run.sh").exists() assert (multiple_dir / "____params.params").exists() sort_job_name = Locator(_parse_parts("jobs/sort")) sort_dir = workflow_builder.directory_for(sort_job_name) sort_artifact = workflow_builder.run_python_on_parameters( sort_job_name, sort_nums_main, sort_params, depends_on=[multiply_artifact], resource_request=resources, category="add", ) assert (sort_dir / "___run.sh").exists() assert (sort_dir / "____params.params").exists() add_job_name = Locator(_parse_parts("jobs/add")) add_dir = workflow_builder.directory_for(add_job_name) workflow_builder.run_python_on_args( add_job_name, add_main, add_args, depends_on=[sort_artifact] ) assert (add_dir / "___run.sh").exists() dax_file_one = workflow_builder.write_dax_to_dir(tmp_path) dax_file_two = workflow_builder.write_dax_to_dir() assert dax_file_one.exists() assert dax_file_two.exists() submit_script_one = tmp_path / "submit_script_one.sh" submit_script_two = tmp_path / "submit_script_two.sh" build_submit_script( submit_script_one, str(dax_file_one), workflow_builder._workflow_directory, # pylint:disable=protected-access ) build_submit_script( submit_script_two, str(dax_file_two), workflow_builder._workflow_directory, # pylint:disable=protected-access ) assert submit_script_one.exists() assert submit_script_two.exists() site_catalog = workflow_params.existing_directory("workflow_directory") / "sites.yml" assert site_catalog.exists() replica_catalog = ( workflow_params.existing_directory("workflow_directory") / "replicas.yml" ) assert replica_catalog.exists() transformations_catalog = ( workflow_params.existing_directory("workflow_directory") / "transformations.yml" ) assert transformations_catalog.exists() properties_file = ( workflow_params.existing_directory("workflow_directory") / "pegasus.properties" ) assert properties_file.exists() submit_script_process = subprocess.Popen( ["sh", str(submit_script_one)], stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding="utf-8", ) stdout, stderr = submit_script_process.communicate() print(stdout) print(stderr) def test_dax_with_checkpointed_jobs_on_saga(tmp_path): workflow_params = Parameters.from_mapping( { "workflow_name": "Test", "workflow_created": "Testing", "workflow_log_dir": str(tmp_path / "log"), "workflow_directory": str(tmp_path / "working"), "site": "saga", "namespace": "test", "partition": "gaia", "home_dir": str(tmp_path), } ) slurm_params = Parameters.from_mapping( {"partition": "gaia", "num_cpus": 1, "num_gpus": 0, "memory": "4G"} ) resources = SlurmResourceRequest.from_parameters(slurm_params) workflow_builder = WorkflowBuilder.from_parameters(workflow_params) multiply_job_name = Locator(_parse_parts("jobs/multiply")) multiply_output_file = tmp_path / "multiplied_nums.txt" multiply_input_file = tmp_path / "raw_nums.txt" multiply_params = Parameters.from_mapping( {"input_file": multiply_input_file, "output_file": multiply_output_file, "x": 4} ) multiple_dir = workflow_builder.directory_for(multiply_job_name) # Create checkpointed file so that when trying to create the job again, # Pegasus just adds the file to the Replica Catalog checkpointed_multiply_file = multiple_dir / "___ckpt" checkpointed_multiply_file.touch() multiply_output_file.touch() assert checkpointed_multiply_file.exists() assert multiply_output_file.exists() multiply_artifact = ValueArtifact( multiply_output_file, depends_on=workflow_builder.run_python_on_parameters( multiply_job_name, multiply_by_x_main, multiply_params, depends_on=[] ), locator=Locator("multiply"), ) sort_job_name = Locator(_parse_parts("jobs/sort")) sorted_output_file = tmp_path / "sorted_nums.txt" sort_params = Parameters.from_mapping( {"input_file": multiply_output_file, "output_file": sorted_output_file} ) workflow_builder.run_python_on_parameters( sort_job_name, sort_nums_main, sort_params, depends_on=[multiply_artifact], resource_request=resources, ) workflow_builder.write_dax_to_dir() site_catalog = workflow_params.existing_directory("workflow_directory") / "sites.yml" assert site_catalog.exists() replica_catalog = ( workflow_params.existing_directory("workflow_directory") / "replicas.yml" ) assert replica_catalog.exists() transformations_catalog = ( workflow_params.existing_directory("workflow_directory") / "transformations.yml" ) assert transformations_catalog.exists() properties_file = ( workflow_params.existing_directory("workflow_directory") / "pegasus.properties" ) assert properties_file.exists() # Make sure the Replica Catalog is not empty assert replica_catalog.stat().st_size > 0 def test_clearing_ckpts(monkeypatch, tmp_path): workflow_params = Parameters.from_mapping( { "workflow_name": "Test", "workflow_created": "Testing", "workflow_log_dir": str(tmp_path / "log"), "workflow_directory": str(tmp_path / "working"), "site": "saga", "namespace": "test", "partition": "scavenge", "home_dir": str(tmp_path), } ) workflow_builder = WorkflowBuilder.from_parameters(workflow_params) multiply_job_name = Locator(_parse_parts("jobs/multiply")) multiply_output_file = tmp_path / "multiplied_nums.txt" multiply_input_file = tmp_path / "raw_nums.txt" multiply_params = Parameters.from_mapping( {"input_file": multiply_input_file, "output_file": multiply_output_file, "x": 4} ) multiple_dir = workflow_builder.directory_for(multiply_job_name) checkpointed_multiply_file = multiple_dir / "___ckpt" checkpointed_multiply_file.touch() multiply_output_file.touch() workflow_builder.run_python_on_parameters( multiply_job_name, multiply_by_x_main, multiply_params, depends_on=[] ) monkeypatch.setattr("builtins.input", lambda _: "y") workflow_builder.write_dax_to_dir() assert not checkpointed_multiply_file.exists() def test_not_clearing_ckpts(monkeypatch, tmp_path): workflow_params = Parameters.from_mapping( { "workflow_name": "Test", "workflow_created": "Testing", "workflow_log_dir": str(tmp_path / "log"), "workflow_directory": str(tmp_path / "working"), "site": "saga", "namespace": "test", "partition": "scavenge", "home_dir": str(tmp_path), } ) workflow_builder = WorkflowBuilder.from_parameters(workflow_params) multiply_job_name = Locator(_parse_parts("jobs/multiply")) multiply_output_file = tmp_path / "multiplied_nums.txt" multiply_input_file = tmp_path / "raw_nums.txt" multiply_params = Parameters.from_mapping( {"input_file": multiply_input_file, "output_file": multiply_output_file, "x": 4} ) multiple_dir = workflow_builder.directory_for(multiply_job_name) checkpointed_multiply_file = multiple_dir / "___ckpt" checkpointed_multiply_file.touch() multiply_output_file.touch() workflow_builder.run_python_on_parameters( multiply_job_name, multiply_by_x_main, multiply_params, depends_on=[] ) monkeypatch.setattr("builtins.input", lambda _: "n") workflow_builder.write_dax_to_dir() assert checkpointed_multiply_file.exists() def _job_in_dax_has_category(dax_file, target_job_locator, category): """ Return whether the given DAX file has a job corresponding to `target_job_locator` which has the category `category`. """ target_job_name = str(target_job_locator).replace("/", "_") with dax_file.open("r") as f: data = load(f, Loader=SafeLoader) for item in data["jobs"]: if item["name"] == target_job_name: if "dagman" in item["profiles"].keys(): if "CATEGORY" in item["profiles"]["dagman"].keys(): if item["profiles"]["dagman"]["CATEGORY"] == category: return True return False def test_dax_with_categories(tmp_path): workflow_params = Parameters.from_mapping( { "workflow_name": "Test", "workflow_created": "Testing", "workflow_log_dir": str(tmp_path / "log"), "workflow_directory": str(tmp_path / "working"), "site": "saga", "namespace": "test", "partition": "gaia", "home_dir": str(tmp_path), } ) workflow_builder = WorkflowBuilder.from_parameters(workflow_params) multiply_job_name = Locator(_parse_parts("jobs/multiply")) multiply_output_file = tmp_path / "multiplied_nums.txt" multiply_input_file = tmp_path / "raw_nums.txt" multiply_params = Parameters.from_mapping( {"input_file": multiply_input_file, "output_file": multiply_output_file, "x": 4} ) multiply_job_category = "arithmetic" workflow_builder.run_python_on_parameters( multiply_job_name, multiply_by_x_main, multiply_params, depends_on=[], category=multiply_job_category, ) # Check that the multiply job has the appropriate category set in the DAX file dax_file = workflow_builder.write_dax_to_dir() assert dax_file.exists() assert _job_in_dax_has_category(dax_file, multiply_job_name, multiply_job_category) assert not _job_in_dax_has_category( dax_file, multiply_job_name, "an-arbitrary-category" ) def test_dax_with_saga_categories(tmp_path): workflow_params = Parameters.from_mapping( { "workflow_name": "Test", "workflow_created": "Testing", "workflow_log_dir": str(tmp_path / "log"), "workflow_directory": str(tmp_path / "working"), "site": "saga", "namespace": "test", "partition": "gaia", "home_dir": str(tmp_path), } ) multiply_partition = "gaia" multiply_slurm_params = Parameters.from_mapping( {"partition": multiply_partition, "num_cpus": 1, "num_gpus": 0, "memory": "4G"} ) multiply_resources = SlurmResourceRequest.from_parameters(multiply_slurm_params) workflow_builder = WorkflowBuilder.from_parameters(workflow_params) multiply_job_name = Locator(_parse_parts("jobs/multiply")) multiply_output_file = tmp_path / "multiplied_nums.txt" multiply_input_file = tmp_path / "raw_nums.txt" multiply_params = Parameters.from_mapping( {"input_file": multiply_input_file, "output_file": multiply_output_file, "x": 4} ) multiply_artifact = ValueArtifact( multiply_output_file, depends_on=workflow_builder.run_python_on_parameters( multiply_job_name, multiply_by_x_main, multiply_params, depends_on=[], resource_request=multiply_resources, ), locator=Locator("multiply"), ) sort_partition = "lestat" sort_slurm_params = Parameters.from_mapping( {"partition": sort_partition, "num_cpus": 1, "num_gpus": 0, "memory": "4G"} ) sort_resources = SlurmResourceRequest.from_parameters(sort_slurm_params) sort_job_name = Locator(_parse_parts("jobs/sort")) sorted_output_file = tmp_path / "sorted_nums.txt" sort_params = Parameters.from_mapping( {"input_file": multiply_output_file, "output_file": sorted_output_file} ) workflow_builder.run_python_on_parameters( sort_job_name, sort_nums_main, sort_params, depends_on=[multiply_artifact], resource_request=sort_resources, ) dax_file = workflow_builder.write_dax_to_dir() assert dax_file.exists() # Check that the multiply and sort jobs have the appropriate partition-defined categories set in # the DAX file assert _job_in_dax_has_category(dax_file, multiply_job_name, multiply_partition) assert not _job_in_dax_has_category(dax_file, multiply_job_name, sort_partition) assert _job_in_dax_has_category(dax_file, sort_job_name, sort_partition) assert not _job_in_dax_has_category(dax_file, sort_job_name, multiply_partition) def test_category_max_jobs(tmp_path): workflow_params = Parameters.from_mapping( { "workflow_name": "Test", "workflow_created": "Testing", "workflow_log_dir": str(tmp_path / "log"), "workflow_directory": str(tmp_path / "working"), "site": "saga", "namespace": "test", "partition": "gaia", "home_dir": str(tmp_path), } ) multiply_slurm_params = Parameters.from_mapping( {"partition": "gaia", "num_cpus": 1, "num_gpus": 0, "memory": "4G"} ) multiply_resources = SlurmResourceRequest.from_parameters(multiply_slurm_params) workflow_builder = WorkflowBuilder.from_parameters(workflow_params) multiply_job_name = Locator(_parse_parts("jobs/multiply")) multiply_output_file = tmp_path / "multiplied_nums.txt" multiply_input_file = tmp_path / "raw_nums.txt" multiply_params = Parameters.from_mapping( {"input_file": multiply_input_file, "output_file": multiply_output_file, "x": 4} ) multiply_artifact = ValueArtifact( multiply_output_file, depends_on=workflow_builder.run_python_on_parameters( multiply_job_name, multiply_by_x_main, multiply_params, depends_on=[], resource_request=multiply_resources, ), locator=Locator("multiply"), ) sort_slurm_params = Parameters.from_mapping( { "partition": "ephemeral", "num_cpus": 1, "num_gpus": 0, "memory": "4G", "job_time_in_minutes": 120, } ) sort_resources = SlurmResourceRequest.from_parameters(sort_slurm_params) sort_job_name = Locator(_parse_parts("jobs/sort")) sorted_output_file = tmp_path / "sorted_nums.txt" sort_params = Parameters.from_mapping( {"input_file": multiply_output_file, "output_file": sorted_output_file} ) workflow_builder.run_python_on_parameters( sort_job_name, sort_nums_main, sort_params, depends_on=[multiply_artifact], resource_request=sort_resources, ) workflow_builder.limit_jobs_for_category("gaia", 1) workflow_builder.write_dax_to_dir() site_catalog = workflow_params.existing_directory("workflow_directory") / "sites.yml" assert site_catalog.exists() replica_catalog = ( workflow_params.existing_directory("workflow_directory") / "replicas.yml" ) assert replica_catalog.exists() transformations_catalog = ( workflow_params.existing_directory("workflow_directory") / "transformations.yml" ) assert transformations_catalog.exists() properties_file = ( workflow_params.existing_directory("workflow_directory") / "pegasus.properties" ) assert properties_file.exists() # Make sure the config contains the appropriate maxjobs lines and no inappropriate maxjobs lines with properties_file.open("r") as f: lines = f.readlines() for line in lines: print(line) assert any("dagman.gaia.maxjobs = 1" in line for line in lines) assert all("dagman.ephemeral.maxjobs =" not in line for line in lines) def test_dax_test_exclude_nodes_on_saga(tmp_path): sample_exclude = "saga01,saga03,saga21,saga05" sample_include = "saga06" params = Parameters.from_mapping( { "workflow_name": "Test", "workflow_created": "Testing", "workflow_log_dir": str(tmp_path / "log"), "workflow_directory": str(tmp_path / "working"), "site": "saga", "namespace": "test", "partition": "gaia", "exclude_list": sample_exclude, "home_dir": str(tmp_path), } ) slurm_params = Parameters.from_mapping( {"partition": "gaia", "num_cpus": 1, "num_gpus": 0, "memory": "4G"} ) multiply_input_file = tmp_path / "raw_nums.txt" random = Random() random.seed(0) nums = immutableset(int(random.random() * 100) for _ in range(0, 25)) multiply_output_file = tmp_path / "multiplied_nums.txt" sorted_output_file = tmp_path / "sorted_nums.txt" with multiply_input_file.open("w") as mult_file: mult_file.writelines(f"{num}\n" for num in nums) multiply_params = Parameters.from_mapping( {"input_file": multiply_input_file, "output_file": multiply_output_file, "x": 4} ) sort_params = Parameters.from_mapping( {"input_file": multiply_output_file, "output_file": sorted_output_file} ) job_profile = PegasusProfile( namespace="pegasus", key="transfer.bypass.input.staging", value="True" ) resources = SlurmResourceRequest.from_parameters( slurm_params.unify({"run_on_single_node": sample_include}) ) workflow_builder = WorkflowBuilder.from_parameters(params) multiply_job_locator = Locator(_parse_parts("jobs/multiply")) multiply_artifact = ValueArtifact( multiply_output_file, depends_on=workflow_builder.run_python_on_parameters( multiply_job_locator, multiply_by_x_main, multiply_params, depends_on=[], job_profiles=[job_profile], ), locator=Locator("multiply"), ) sort_job_locator = Locator(_parse_parts("jobs/sort")) workflow_builder.run_python_on_parameters( sort_job_locator, sort_nums_main, sort_params, depends_on=[multiply_artifact], resource_request=resources, ) dax_file = workflow_builder.write_dax_to_dir(tmp_path) with dax_file.open("r") as dax: dax_yaml = load(dax, Loader=SafeLoader) root = dax_yaml["jobs"] for item in root: if item["type"] == "job": if "pegasus" in item["profiles"]: if item["name"] == "jobs_multiply": assert ( f"--exclude={sample_exclude}" in item["profiles"]["pegasus"]["glite.arguments"] ) elif item["name"] == "jobs_sort": assert "--exclude=" in item["profiles"]["pegasus"]["glite.arguments"] assert ( f"--nodelist={sample_include}" in item["profiles"]["pegasus"]["glite.arguments"] ) else: assert False def test_dax_with_job_in_container(tmp_path): workflow_params = Parameters.from_mapping( { "workflow_name": "Test", "workflow_created": "Testing", "workflow_log_dir": str(tmp_path / "log"), "workflow_directory": str(tmp_path / "working"), "site": "saga", "namespace": "test", "partition": "gaia", "experiment_name": "fred", "home_dir": str(tmp_path), } ) slurm_params = Parameters.from_mapping( {"partition": "gaia", "num_cpus": 1, "num_gpus": 0, "memory": "4G"} ) multiply_input_file = tmp_path / "raw_nums.txt" random = Random() random.seed(0) nums = immutableset(int(random.random() * 100) for _ in range(25)) multiply_output_file = tmp_path / "multiplied_nums.txt" sorted_output_file = tmp_path / "sorted_nums.txt" with multiply_input_file.open("w") as mult_file: mult_file.writelines(f"{num}\n" for num in nums) multiply_params = Parameters.from_mapping( {"input_file": multiply_input_file, "output_file": multiply_output_file, "x": 4} ) sort_params = Parameters.from_mapping( {"input_file": multiply_output_file, "output_file": sorted_output_file} ) resources = SlurmResourceRequest.from_parameters(slurm_params) workflow_builder = WorkflowBuilder.from_parameters(workflow_params) # Add Container example_docker = workflow_builder.add_container( "example_container", "docker", tmp_path / "docker.img" ) with pytest.raises(ValueError): _ = workflow_builder.add_container( "fake_container", "invalid", tmp_path / "invalid_docker.img" ) multiply_job_name = Locator(_parse_parts("jobs/multiply")) multiply_artifact = ValueArtifact( multiply_output_file, depends_on=workflow_builder.run_python_on_parameters( multiply_job_name, multiply_by_x_main, multiply_params, depends_on=[], container=example_docker, ), locator=Locator("multiply"), ) multiple_dir = workflow_builder.directory_for(multiply_job_name) assert (multiple_dir / "___run.sh").exists() assert (multiple_dir / "____params.params").exists() sort_job_name = Locator(_parse_parts("jobs/sort")) sort_dir = workflow_builder.directory_for(sort_job_name) workflow_builder.run_python_on_parameters( sort_job_name, sort_nums_main, sort_params, depends_on=[multiply_artifact], resource_request=resources, container=example_docker, ) assert (sort_dir / "___run.sh").exists() assert (sort_dir / "____params.params").exists() dax_file_one = workflow_builder.write_dax_to_dir() assert dax_file_one.exists() site_catalog = workflow_params.existing_directory("workflow_directory") / "sites.yml" assert site_catalog.exists() replica_catalog = ( workflow_params.existing_directory("workflow_directory") / "replicas.yml" ) assert replica_catalog.exists() transformations_catalog = ( workflow_params.existing_directory("workflow_directory") / "transformations.yml" ) assert transformations_catalog.exists() properties_file = ( workflow_params.existing_directory("workflow_directory") / "pegasus.properties" ) assert properties_file.exists() def test_dax_with_job_on_saga_with_dict_as_params(tmp_path): workflow_params = Parameters.from_mapping( { "workflow_name": "Test", "workflow_created": "Testing", "workflow_log_dir": str(tmp_path / "log"), "workflow_directory": str(tmp_path / "working"), "site": "saga", "namespace": "test", "partition": "gaia", "experiment_name": "fred", "home_dir": str(tmp_path), } ) slurm_params = Parameters.from_mapping( {"partition": "gaia", "num_cpus": 1, "num_gpus": 0, "memory": "4G"} ) multiply_input_file = tmp_path / "raw_nums.txt" random = Random() random.seed(0) nums = immutableset(int(random.random() * 100) for _ in range(25)) multiply_output_file = tmp_path / "multiplied_nums.txt" sorted_output_file = tmp_path / "sorted_nums.txt" add_output_file = tmp_path / "add_nums.txt" with multiply_input_file.open("w") as mult_file: mult_file.writelines(f"{num}\n" for num in nums) multiply_params = { "input_file": multiply_input_file, "output_file": multiply_output_file, "x": 4, } sort_params = {"input_file": multiply_output_file, "output_file": sorted_output_file} add_args = f"{sorted_output_file} {add_output_file} --y 10" resources = SlurmResourceRequest.from_parameters(slurm_params) workflow_builder = WorkflowBuilder.from_parameters(workflow_params) multiply_job_name = Locator(_parse_parts("jobs/multiply")) multiply_artifact = ValueArtifact( multiply_output_file, depends_on=workflow_builder.run_python_on_parameters( multiply_job_name, multiply_by_x_main, multiply_params, depends_on=[] ), locator=Locator("multiply"), ) multiple_dir = workflow_builder.directory_for(multiply_job_name) assert (multiple_dir / "___run.sh").exists() assert (multiple_dir / "____params.params").exists() sort_job_name = Locator(_parse_parts("jobs/sort")) sort_dir = workflow_builder.directory_for(sort_job_name) sort_artifact = workflow_builder.run_python_on_parameters( sort_job_name, sort_nums_main, sort_params, depends_on=[multiply_artifact], resource_request=resources, category="add", ) assert (sort_dir / "___run.sh").exists() assert (sort_dir / "____params.params").exists() add_job_name = Locator(_parse_parts("jobs/add")) add_dir = workflow_builder.directory_for(add_job_name) workflow_builder.run_python_on_args( add_job_name, "add_job_main.py", add_args, depends_on=[sort_artifact] ) assert (add_dir / "___run.sh").exists() dax_file_one = workflow_builder.write_dax_to_dir(tmp_path) dax_file_two = workflow_builder.write_dax_to_dir() assert dax_file_one.exists() assert dax_file_two.exists() submit_script_one = tmp_path / "submit_script_one.sh" submit_script_two = tmp_path / "submit_script_two.sh" build_submit_script( submit_script_one, str(dax_file_one), workflow_builder._workflow_directory, # pylint:disable=protected-access ) build_submit_script( submit_script_two, str(dax_file_two), workflow_builder._workflow_directory, # pylint:disable=protected-access ) assert submit_script_one.exists() assert submit_script_two.exists() site_catalog = workflow_params.existing_directory("workflow_directory") / "sites.yml" assert site_catalog.exists() replica_catalog = ( workflow_params.existing_directory("workflow_directory") / "replicas.yml" ) assert replica_catalog.exists() transformations_catalog = ( workflow_params.existing_directory("workflow_directory") / "transformations.yml" ) assert transformations_catalog.exists() properties_file = ( workflow_params.existing_directory("workflow_directory") / "pegasus.properties" ) assert properties_file.exists() def test_dax_with_python_into_container_jobs(tmp_path): docker_tar = Path(f"{tmp_path}/docker/tar.tar") docker_build_dir = tmp_path docker_image_name = "pegasus_wrapper_container_demo" docker_image_tag = "0.2" # Generating parameters for initializing a workflow # We recommend making workflow directory, site, and partition parameters # in an research workflow workflow_params = Parameters.from_mapping( { "workflow_name": "Test", "workflow_created": "Testing", "workflow_log_dir": str(tmp_path / "log"), "workflow_directory": str(tmp_path / "working"), "site": "saga", "namespace": "test", "home_dir": str(tmp_path), "partition": "scavenge", } ) saga31_request = SlurmResourceRequest.from_parameters( Parameters.from_mapping({"run_on_single_node": "saga31", "partition": "gaia"}) ) # Our source input for the sample jobs input_file = tmp_path / "raw_nums.txt" add_y_output_file_nas = tmp_path / "nums_y.txt" sorted_output_file_nas = tmp_path / "sorted.txt" random = Random() random.seed(0) nums = [int(random.random() * 100) for _ in range(0, 25)] # Base Job Locator job_locator = Locator(("jobs",)) docker_python_root = Path("/home/app/") # Write a list of numbers out to be able to run the workflow with input_file.open("w") as mult_file: mult_file.writelines(f"{num}\n" for num in nums) workflow_builder = WorkflowBuilder.from_parameters(workflow_params) build_container_locator = job_locator / "build_docker" build_container = workflow_builder.run_bash( build_container_locator, command=[ "mkdir -p /scratch/dockermount/pegasus_wrapper_tmp", f"cd {docker_build_dir}", f"docker build . -t {docker_image_name}:{docker_image_tag}", f"docker save -o /scratch/dockermount/pegasus_wrapper_tmp/{docker_tar.name} {docker_image_name}:{docker_image_tag}", f"cp /scratch/dockermount/pegasus_wrapper_tmp/{docker_tar.name} {docker_tar.absolute()}", f"chmod go+r {docker_tar.absolute()}", ], depends_on=[], resource_request=saga31_request, ) build_container_dir = workflow_builder.directory_for(build_container_locator) assert (build_container_dir / "script.sh").exists() python36 = workflow_builder.add_container( f"{docker_image_name}:{docker_image_tag}", "docker", str(docker_tar.absolute()), image_site="saga", bypass_staging=True, ) job_profile = PegasusProfile( namespace="pegasus", key="transfer.bypass.input.staging", value="True" ) mongo4_4 = workflow_builder.add_container( "mongo:4.4", "docker", "path/to/tar.tar", image_site="saga", bypass_staging=True ) with pytest.raises(RuntimeError): _ = workflow_builder.stop_docker_as_service( mongo4_4, depends_on=[], resource_request=saga31_request ) start_mongo = workflow_builder.start_docker_as_service( mongo4_4, depends_on=[build_container], docker_args=f"-v /scratch/mongo/data/db:/data/db", resource_request=saga31_request, ) mongo4_4_dir = workflow_builder.directory_for(Locator(("containers", mongo4_4.name))) assert (mongo4_4_dir / "start.sh").exists() assert (mongo4_4_dir / "stop.sh").exists() add_y_locator = job_locator / "add" add_y_job = workflow_builder.run_python_on_args( add_y_locator, docker_python_root / "add_y.py", set_args=f"{input_file} {add_y_output_file_nas} --y 10", depends_on=[build_container], job_profiles=[job_profile], resource_request=saga31_request, container=python36, input_file_paths=[input_file], output_file_paths=[add_y_output_file_nas], ) add_y_dir = workflow_builder.directory_for(add_y_locator) assert (add_y_dir / "___run.sh").exists() with pytest.raises(RuntimeError): _ = workflow_builder.run_python_on_args( add_y_locator, docker_python_root / "add_y.py", set_args=f"{input_file} {add_y_output_file_nas} --y 10", depends_on=[build_container], job_profiles=[job_profile], resource_request=saga31_request, container=python36, input_file_paths=[input_file, input_file], output_file_paths=[add_y_output_file_nas], ) sort_job_locator = job_locator / "sort" sort_job = workflow_builder.run_python_on_parameters( sort_job_locator, sort_nums_main, {"input_file": add_y_output_file_nas, "output_file": sorted_output_file_nas}, depends_on=[add_y_job], container=python36, job_profiles=[job_profile], resource_request=saga31_request, input_file_paths=add_y_output_file_nas, output_file_paths=sorted_output_file_nas, ) assert sort_job == workflow_builder.run_python_on_parameters( sort_job_locator, sort_nums_main, {"input_file": add_y_output_file_nas, "output_file": sorted_output_file_nas}, depends_on=[add_y_job], container=python36, job_profiles=[job_profile], resource_request=saga31_request, input_file_paths=add_y_output_file_nas, output_file_paths=sorted_output_file_nas, ) sort_job_dir = workflow_builder.directory_for(sort_job_locator) assert (sort_job_dir / "___run.sh").exists() assert (sort_job_dir / "____params.params").exists() with pytest.raises(RuntimeError): _ = workflow_builder.run_python_on_parameters( sort_job_locator, sort_nums_main, {"input_file": add_y_output_file_nas, "output_file": sorted_output_file_nas}, depends_on=[add_y_job], container=python36, job_profiles=[job_profile], resource_request=saga31_request, input_file_paths=add_y_output_file_nas, output_file_paths=[sorted_output_file_nas, sorted_output_file_nas], ) celebration_bash_locator = job_locator / "celebrate" celebration_bash = workflow_builder.run_bash( celebration_bash_locator, 'echo "Jobs Runs Successfully"', depends_on=[sort_job], job_profiles=[job_profile], ) assert celebration_bash == workflow_builder.run_bash( celebration_bash_locator, 'echo "Jobs Runs Successfully"', depends_on=[sort_job], job_profiles=[job_profile], ) celebration_bash_dir = workflow_builder.directory_for(celebration_bash_locator) assert (celebration_bash_dir / "script.sh").exists() _ = workflow_builder.stop_docker_as_service( mongo4_4, depends_on=[start_mongo, sort_job], resource_request=saga31_request ) dax_file_one = workflow_builder.write_dax_to_dir(tmp_path) assert dax_file_one.exists() submit_script_one = tmp_path / "submit_script_one.sh" build_submit_script( submit_script_one, str(dax_file_one), workflow_builder._workflow_directory, # pylint:disable=protected-access ) assert submit_script_one.exists()
36.108286
128
0.673534
import subprocess from pathlib import Path from random import Random from immutablecollections import immutableset from vistautils.parameters import Parameters from pegasus_wrapper import PegasusProfile from pegasus_wrapper.artifact import ValueArtifact from pegasus_wrapper.locator import Locator, _parse_parts from pegasus_wrapper.pegasus_utils import build_submit_script from pegasus_wrapper.resource_request import SlurmResourceRequest from pegasus_wrapper.scripts.add_y import main as add_main from pegasus_wrapper.scripts.multiply_by_x import main as multiply_by_x_main from pegasus_wrapper.scripts.sort_nums_in_file import main as sort_nums_main from pegasus_wrapper.workflow import WorkflowBuilder import pytest from yaml import SafeLoader, load def test_simple_dax(tmp_path): params = Parameters.from_mapping( { "workflow_name": "Test", "workflow_created": "Testing", "workflow_log_dir": str(tmp_path / "log"), "workflow_directory": str(tmp_path / "working"), "site": "saga", "namespace": "test", "partition": "gaia", "experiment_name": "fred", "home_dir": str(tmp_path), } ) workflow_builder = WorkflowBuilder.from_parameters(params) assert workflow_builder.name == "Test" assert workflow_builder.created_by == "Testing" assert ( workflow_builder._workflow_directory == tmp_path / "working" ) assert workflow_builder._namespace == "test" assert workflow_builder._default_site == "saga" assert workflow_builder.default_resource_request assert workflow_builder._job_graph is not None assert workflow_builder._experiment_name == "fred" def test_locator(): job = Locator(_parse_parts("job")) example = Locator(_parse_parts("example/path")) combined = example / job combined_from_string = example / "job" assert combined.__repr__() == combined_from_string.__repr__() with pytest.raises(RuntimeError): _ = combined / 90 with pytest.raises(ValueError): _ = Locator(_parse_parts("x=20")) def test_dax_with_job_on_saga(tmp_path): workflow_params = Parameters.from_mapping( { "workflow_name": "Test", "workflow_created": "Testing", "workflow_log_dir": str(tmp_path / "log"), "workflow_directory": str(tmp_path / "working"), "site": "saga", "namespace": "test", "partition": "gaia", "experiment_name": "fred", "home_dir": str(tmp_path), } ) slurm_params = Parameters.from_mapping( {"partition": "gaia", "num_cpus": 1, "num_gpus": 0, "memory": "4G"} ) multiply_input_file = tmp_path / "raw_nums.txt" random = Random() random.seed(0) nums = immutableset(int(random.random() * 100) for _ in range(25)) multiply_output_file = tmp_path / "multiplied_nums.txt" sorted_output_file = tmp_path / "sorted_nums.txt" add_output_file = tmp_path / "add_nums.txt" with multiply_input_file.open("w") as mult_file: mult_file.writelines(f"{num}\n" for num in nums) multiply_params = Parameters.from_mapping( {"input_file": multiply_input_file, "output_file": multiply_output_file, "x": 4} ) sort_params = Parameters.from_mapping( {"input_file": multiply_output_file, "output_file": sorted_output_file} ) add_args = f"{sorted_output_file} {add_output_file} --y 10" resources = SlurmResourceRequest.from_parameters(slurm_params) workflow_builder = WorkflowBuilder.from_parameters(workflow_params) multiply_job_name = Locator(_parse_parts("jobs/multiply")) multiply_artifact = ValueArtifact( multiply_output_file, depends_on=workflow_builder.run_python_on_parameters( multiply_job_name, multiply_by_x_main, multiply_params, depends_on=[] ), locator=Locator("multiply"), ) multiple_dir = workflow_builder.directory_for(multiply_job_name) assert (multiple_dir / "___run.sh").exists() assert (multiple_dir / "____params.params").exists() sort_job_name = Locator(_parse_parts("jobs/sort")) sort_dir = workflow_builder.directory_for(sort_job_name) sort_artifact = workflow_builder.run_python_on_parameters( sort_job_name, sort_nums_main, sort_params, depends_on=[multiply_artifact], resource_request=resources, category="add", ) assert (sort_dir / "___run.sh").exists() assert (sort_dir / "____params.params").exists() add_job_name = Locator(_parse_parts("jobs/add")) add_dir = workflow_builder.directory_for(add_job_name) workflow_builder.run_python_on_args( add_job_name, add_main, add_args, depends_on=[sort_artifact] ) assert (add_dir / "___run.sh").exists() dax_file_one = workflow_builder.write_dax_to_dir(tmp_path) dax_file_two = workflow_builder.write_dax_to_dir() assert dax_file_one.exists() assert dax_file_two.exists() submit_script_one = tmp_path / "submit_script_one.sh" submit_script_two = tmp_path / "submit_script_two.sh" build_submit_script( submit_script_one, str(dax_file_one), workflow_builder._workflow_directory, # pylint:disable=protected-access ) build_submit_script( submit_script_two, str(dax_file_two), workflow_builder._workflow_directory, # pylint:disable=protected-access ) assert submit_script_one.exists() assert submit_script_two.exists() site_catalog = workflow_params.existing_directory("workflow_directory") / "sites.yml" assert site_catalog.exists() replica_catalog = ( workflow_params.existing_directory("workflow_directory") / "replicas.yml" ) assert replica_catalog.exists() transformations_catalog = ( workflow_params.existing_directory("workflow_directory") / "transformations.yml" ) assert transformations_catalog.exists() properties_file = ( workflow_params.existing_directory("workflow_directory") / "pegasus.properties" ) assert properties_file.exists() submit_script_process = subprocess.Popen( ["sh", str(submit_script_one)], stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding="utf-8", ) stdout, stderr = submit_script_process.communicate() print(stdout) print(stderr) def test_dax_with_checkpointed_jobs_on_saga(tmp_path): workflow_params = Parameters.from_mapping( { "workflow_name": "Test", "workflow_created": "Testing", "workflow_log_dir": str(tmp_path / "log"), "workflow_directory": str(tmp_path / "working"), "site": "saga", "namespace": "test", "partition": "gaia", "home_dir": str(tmp_path), } ) slurm_params = Parameters.from_mapping( {"partition": "gaia", "num_cpus": 1, "num_gpus": 0, "memory": "4G"} ) resources = SlurmResourceRequest.from_parameters(slurm_params) workflow_builder = WorkflowBuilder.from_parameters(workflow_params) multiply_job_name = Locator(_parse_parts("jobs/multiply")) multiply_output_file = tmp_path / "multiplied_nums.txt" multiply_input_file = tmp_path / "raw_nums.txt" multiply_params = Parameters.from_mapping( {"input_file": multiply_input_file, "output_file": multiply_output_file, "x": 4} ) multiple_dir = workflow_builder.directory_for(multiply_job_name) # Create checkpointed file so that when trying to create the job again, # Pegasus just adds the file to the Replica Catalog checkpointed_multiply_file = multiple_dir / "___ckpt" checkpointed_multiply_file.touch() multiply_output_file.touch() assert checkpointed_multiply_file.exists() assert multiply_output_file.exists() multiply_artifact = ValueArtifact( multiply_output_file, depends_on=workflow_builder.run_python_on_parameters( multiply_job_name, multiply_by_x_main, multiply_params, depends_on=[] ), locator=Locator("multiply"), ) sort_job_name = Locator(_parse_parts("jobs/sort")) sorted_output_file = tmp_path / "sorted_nums.txt" sort_params = Parameters.from_mapping( {"input_file": multiply_output_file, "output_file": sorted_output_file} ) workflow_builder.run_python_on_parameters( sort_job_name, sort_nums_main, sort_params, depends_on=[multiply_artifact], resource_request=resources, ) workflow_builder.write_dax_to_dir() site_catalog = workflow_params.existing_directory("workflow_directory") / "sites.yml" assert site_catalog.exists() replica_catalog = ( workflow_params.existing_directory("workflow_directory") / "replicas.yml" ) assert replica_catalog.exists() transformations_catalog = ( workflow_params.existing_directory("workflow_directory") / "transformations.yml" ) assert transformations_catalog.exists() properties_file = ( workflow_params.existing_directory("workflow_directory") / "pegasus.properties" ) assert properties_file.exists() # Make sure the Replica Catalog is not empty assert replica_catalog.stat().st_size > 0 def test_clearing_ckpts(monkeypatch, tmp_path): workflow_params = Parameters.from_mapping( { "workflow_name": "Test", "workflow_created": "Testing", "workflow_log_dir": str(tmp_path / "log"), "workflow_directory": str(tmp_path / "working"), "site": "saga", "namespace": "test", "partition": "scavenge", "home_dir": str(tmp_path), } ) workflow_builder = WorkflowBuilder.from_parameters(workflow_params) multiply_job_name = Locator(_parse_parts("jobs/multiply")) multiply_output_file = tmp_path / "multiplied_nums.txt" multiply_input_file = tmp_path / "raw_nums.txt" multiply_params = Parameters.from_mapping( {"input_file": multiply_input_file, "output_file": multiply_output_file, "x": 4} ) multiple_dir = workflow_builder.directory_for(multiply_job_name) checkpointed_multiply_file = multiple_dir / "___ckpt" checkpointed_multiply_file.touch() multiply_output_file.touch() workflow_builder.run_python_on_parameters( multiply_job_name, multiply_by_x_main, multiply_params, depends_on=[] ) monkeypatch.setattr("builtins.input", lambda _: "y") workflow_builder.write_dax_to_dir() assert not checkpointed_multiply_file.exists() def test_not_clearing_ckpts(monkeypatch, tmp_path): workflow_params = Parameters.from_mapping( { "workflow_name": "Test", "workflow_created": "Testing", "workflow_log_dir": str(tmp_path / "log"), "workflow_directory": str(tmp_path / "working"), "site": "saga", "namespace": "test", "partition": "scavenge", "home_dir": str(tmp_path), } ) workflow_builder = WorkflowBuilder.from_parameters(workflow_params) multiply_job_name = Locator(_parse_parts("jobs/multiply")) multiply_output_file = tmp_path / "multiplied_nums.txt" multiply_input_file = tmp_path / "raw_nums.txt" multiply_params = Parameters.from_mapping( {"input_file": multiply_input_file, "output_file": multiply_output_file, "x": 4} ) multiple_dir = workflow_builder.directory_for(multiply_job_name) checkpointed_multiply_file = multiple_dir / "___ckpt" checkpointed_multiply_file.touch() multiply_output_file.touch() workflow_builder.run_python_on_parameters( multiply_job_name, multiply_by_x_main, multiply_params, depends_on=[] ) monkeypatch.setattr("builtins.input", lambda _: "n") workflow_builder.write_dax_to_dir() assert checkpointed_multiply_file.exists() def _job_in_dax_has_category(dax_file, target_job_locator, category): target_job_name = str(target_job_locator).replace("/", "_") with dax_file.open("r") as f: data = load(f, Loader=SafeLoader) for item in data["jobs"]: if item["name"] == target_job_name: if "dagman" in item["profiles"].keys(): if "CATEGORY" in item["profiles"]["dagman"].keys(): if item["profiles"]["dagman"]["CATEGORY"] == category: return True return False def test_dax_with_categories(tmp_path): workflow_params = Parameters.from_mapping( { "workflow_name": "Test", "workflow_created": "Testing", "workflow_log_dir": str(tmp_path / "log"), "workflow_directory": str(tmp_path / "working"), "site": "saga", "namespace": "test", "partition": "gaia", "home_dir": str(tmp_path), } ) workflow_builder = WorkflowBuilder.from_parameters(workflow_params) multiply_job_name = Locator(_parse_parts("jobs/multiply")) multiply_output_file = tmp_path / "multiplied_nums.txt" multiply_input_file = tmp_path / "raw_nums.txt" multiply_params = Parameters.from_mapping( {"input_file": multiply_input_file, "output_file": multiply_output_file, "x": 4} ) multiply_job_category = "arithmetic" workflow_builder.run_python_on_parameters( multiply_job_name, multiply_by_x_main, multiply_params, depends_on=[], category=multiply_job_category, ) # Check that the multiply job has the appropriate category set in the DAX file dax_file = workflow_builder.write_dax_to_dir() assert dax_file.exists() assert _job_in_dax_has_category(dax_file, multiply_job_name, multiply_job_category) assert not _job_in_dax_has_category( dax_file, multiply_job_name, "an-arbitrary-category" ) def test_dax_with_saga_categories(tmp_path): workflow_params = Parameters.from_mapping( { "workflow_name": "Test", "workflow_created": "Testing", "workflow_log_dir": str(tmp_path / "log"), "workflow_directory": str(tmp_path / "working"), "site": "saga", "namespace": "test", "partition": "gaia", "home_dir": str(tmp_path), } ) multiply_partition = "gaia" multiply_slurm_params = Parameters.from_mapping( {"partition": multiply_partition, "num_cpus": 1, "num_gpus": 0, "memory": "4G"} ) multiply_resources = SlurmResourceRequest.from_parameters(multiply_slurm_params) workflow_builder = WorkflowBuilder.from_parameters(workflow_params) multiply_job_name = Locator(_parse_parts("jobs/multiply")) multiply_output_file = tmp_path / "multiplied_nums.txt" multiply_input_file = tmp_path / "raw_nums.txt" multiply_params = Parameters.from_mapping( {"input_file": multiply_input_file, "output_file": multiply_output_file, "x": 4} ) multiply_artifact = ValueArtifact( multiply_output_file, depends_on=workflow_builder.run_python_on_parameters( multiply_job_name, multiply_by_x_main, multiply_params, depends_on=[], resource_request=multiply_resources, ), locator=Locator("multiply"), ) sort_partition = "lestat" sort_slurm_params = Parameters.from_mapping( {"partition": sort_partition, "num_cpus": 1, "num_gpus": 0, "memory": "4G"} ) sort_resources = SlurmResourceRequest.from_parameters(sort_slurm_params) sort_job_name = Locator(_parse_parts("jobs/sort")) sorted_output_file = tmp_path / "sorted_nums.txt" sort_params = Parameters.from_mapping( {"input_file": multiply_output_file, "output_file": sorted_output_file} ) workflow_builder.run_python_on_parameters( sort_job_name, sort_nums_main, sort_params, depends_on=[multiply_artifact], resource_request=sort_resources, ) dax_file = workflow_builder.write_dax_to_dir() assert dax_file.exists() # Check that the multiply and sort jobs have the appropriate partition-defined categories set in # the DAX file assert _job_in_dax_has_category(dax_file, multiply_job_name, multiply_partition) assert not _job_in_dax_has_category(dax_file, multiply_job_name, sort_partition) assert _job_in_dax_has_category(dax_file, sort_job_name, sort_partition) assert not _job_in_dax_has_category(dax_file, sort_job_name, multiply_partition) def test_category_max_jobs(tmp_path): workflow_params = Parameters.from_mapping( { "workflow_name": "Test", "workflow_created": "Testing", "workflow_log_dir": str(tmp_path / "log"), "workflow_directory": str(tmp_path / "working"), "site": "saga", "namespace": "test", "partition": "gaia", "home_dir": str(tmp_path), } ) multiply_slurm_params = Parameters.from_mapping( {"partition": "gaia", "num_cpus": 1, "num_gpus": 0, "memory": "4G"} ) multiply_resources = SlurmResourceRequest.from_parameters(multiply_slurm_params) workflow_builder = WorkflowBuilder.from_parameters(workflow_params) multiply_job_name = Locator(_parse_parts("jobs/multiply")) multiply_output_file = tmp_path / "multiplied_nums.txt" multiply_input_file = tmp_path / "raw_nums.txt" multiply_params = Parameters.from_mapping( {"input_file": multiply_input_file, "output_file": multiply_output_file, "x": 4} ) multiply_artifact = ValueArtifact( multiply_output_file, depends_on=workflow_builder.run_python_on_parameters( multiply_job_name, multiply_by_x_main, multiply_params, depends_on=[], resource_request=multiply_resources, ), locator=Locator("multiply"), ) sort_slurm_params = Parameters.from_mapping( { "partition": "ephemeral", "num_cpus": 1, "num_gpus": 0, "memory": "4G", "job_time_in_minutes": 120, } ) sort_resources = SlurmResourceRequest.from_parameters(sort_slurm_params) sort_job_name = Locator(_parse_parts("jobs/sort")) sorted_output_file = tmp_path / "sorted_nums.txt" sort_params = Parameters.from_mapping( {"input_file": multiply_output_file, "output_file": sorted_output_file} ) workflow_builder.run_python_on_parameters( sort_job_name, sort_nums_main, sort_params, depends_on=[multiply_artifact], resource_request=sort_resources, ) workflow_builder.limit_jobs_for_category("gaia", 1) workflow_builder.write_dax_to_dir() site_catalog = workflow_params.existing_directory("workflow_directory") / "sites.yml" assert site_catalog.exists() replica_catalog = ( workflow_params.existing_directory("workflow_directory") / "replicas.yml" ) assert replica_catalog.exists() transformations_catalog = ( workflow_params.existing_directory("workflow_directory") / "transformations.yml" ) assert transformations_catalog.exists() properties_file = ( workflow_params.existing_directory("workflow_directory") / "pegasus.properties" ) assert properties_file.exists() # Make sure the config contains the appropriate maxjobs lines and no inappropriate maxjobs lines with properties_file.open("r") as f: lines = f.readlines() for line in lines: print(line) assert any("dagman.gaia.maxjobs = 1" in line for line in lines) assert all("dagman.ephemeral.maxjobs =" not in line for line in lines) def test_dax_test_exclude_nodes_on_saga(tmp_path): sample_exclude = "saga01,saga03,saga21,saga05" sample_include = "saga06" params = Parameters.from_mapping( { "workflow_name": "Test", "workflow_created": "Testing", "workflow_log_dir": str(tmp_path / "log"), "workflow_directory": str(tmp_path / "working"), "site": "saga", "namespace": "test", "partition": "gaia", "exclude_list": sample_exclude, "home_dir": str(tmp_path), } ) slurm_params = Parameters.from_mapping( {"partition": "gaia", "num_cpus": 1, "num_gpus": 0, "memory": "4G"} ) multiply_input_file = tmp_path / "raw_nums.txt" random = Random() random.seed(0) nums = immutableset(int(random.random() * 100) for _ in range(0, 25)) multiply_output_file = tmp_path / "multiplied_nums.txt" sorted_output_file = tmp_path / "sorted_nums.txt" with multiply_input_file.open("w") as mult_file: mult_file.writelines(f"{num}\n" for num in nums) multiply_params = Parameters.from_mapping( {"input_file": multiply_input_file, "output_file": multiply_output_file, "x": 4} ) sort_params = Parameters.from_mapping( {"input_file": multiply_output_file, "output_file": sorted_output_file} ) job_profile = PegasusProfile( namespace="pegasus", key="transfer.bypass.input.staging", value="True" ) resources = SlurmResourceRequest.from_parameters( slurm_params.unify({"run_on_single_node": sample_include}) ) workflow_builder = WorkflowBuilder.from_parameters(params) multiply_job_locator = Locator(_parse_parts("jobs/multiply")) multiply_artifact = ValueArtifact( multiply_output_file, depends_on=workflow_builder.run_python_on_parameters( multiply_job_locator, multiply_by_x_main, multiply_params, depends_on=[], job_profiles=[job_profile], ), locator=Locator("multiply"), ) sort_job_locator = Locator(_parse_parts("jobs/sort")) workflow_builder.run_python_on_parameters( sort_job_locator, sort_nums_main, sort_params, depends_on=[multiply_artifact], resource_request=resources, ) dax_file = workflow_builder.write_dax_to_dir(tmp_path) with dax_file.open("r") as dax: dax_yaml = load(dax, Loader=SafeLoader) root = dax_yaml["jobs"] for item in root: if item["type"] == "job": if "pegasus" in item["profiles"]: if item["name"] == "jobs_multiply": assert ( f"--exclude={sample_exclude}" in item["profiles"]["pegasus"]["glite.arguments"] ) elif item["name"] == "jobs_sort": assert "--exclude=" in item["profiles"]["pegasus"]["glite.arguments"] assert ( f"--nodelist={sample_include}" in item["profiles"]["pegasus"]["glite.arguments"] ) else: assert False def test_dax_with_job_in_container(tmp_path): workflow_params = Parameters.from_mapping( { "workflow_name": "Test", "workflow_created": "Testing", "workflow_log_dir": str(tmp_path / "log"), "workflow_directory": str(tmp_path / "working"), "site": "saga", "namespace": "test", "partition": "gaia", "experiment_name": "fred", "home_dir": str(tmp_path), } ) slurm_params = Parameters.from_mapping( {"partition": "gaia", "num_cpus": 1, "num_gpus": 0, "memory": "4G"} ) multiply_input_file = tmp_path / "raw_nums.txt" random = Random() random.seed(0) nums = immutableset(int(random.random() * 100) for _ in range(25)) multiply_output_file = tmp_path / "multiplied_nums.txt" sorted_output_file = tmp_path / "sorted_nums.txt" with multiply_input_file.open("w") as mult_file: mult_file.writelines(f"{num}\n" for num in nums) multiply_params = Parameters.from_mapping( {"input_file": multiply_input_file, "output_file": multiply_output_file, "x": 4} ) sort_params = Parameters.from_mapping( {"input_file": multiply_output_file, "output_file": sorted_output_file} ) resources = SlurmResourceRequest.from_parameters(slurm_params) workflow_builder = WorkflowBuilder.from_parameters(workflow_params) # Add Container example_docker = workflow_builder.add_container( "example_container", "docker", tmp_path / "docker.img" ) with pytest.raises(ValueError): _ = workflow_builder.add_container( "fake_container", "invalid", tmp_path / "invalid_docker.img" ) multiply_job_name = Locator(_parse_parts("jobs/multiply")) multiply_artifact = ValueArtifact( multiply_output_file, depends_on=workflow_builder.run_python_on_parameters( multiply_job_name, multiply_by_x_main, multiply_params, depends_on=[], container=example_docker, ), locator=Locator("multiply"), ) multiple_dir = workflow_builder.directory_for(multiply_job_name) assert (multiple_dir / "___run.sh").exists() assert (multiple_dir / "____params.params").exists() sort_job_name = Locator(_parse_parts("jobs/sort")) sort_dir = workflow_builder.directory_for(sort_job_name) workflow_builder.run_python_on_parameters( sort_job_name, sort_nums_main, sort_params, depends_on=[multiply_artifact], resource_request=resources, container=example_docker, ) assert (sort_dir / "___run.sh").exists() assert (sort_dir / "____params.params").exists() dax_file_one = workflow_builder.write_dax_to_dir() assert dax_file_one.exists() site_catalog = workflow_params.existing_directory("workflow_directory") / "sites.yml" assert site_catalog.exists() replica_catalog = ( workflow_params.existing_directory("workflow_directory") / "replicas.yml" ) assert replica_catalog.exists() transformations_catalog = ( workflow_params.existing_directory("workflow_directory") / "transformations.yml" ) assert transformations_catalog.exists() properties_file = ( workflow_params.existing_directory("workflow_directory") / "pegasus.properties" ) assert properties_file.exists() def test_dax_with_job_on_saga_with_dict_as_params(tmp_path): workflow_params = Parameters.from_mapping( { "workflow_name": "Test", "workflow_created": "Testing", "workflow_log_dir": str(tmp_path / "log"), "workflow_directory": str(tmp_path / "working"), "site": "saga", "namespace": "test", "partition": "gaia", "experiment_name": "fred", "home_dir": str(tmp_path), } ) slurm_params = Parameters.from_mapping( {"partition": "gaia", "num_cpus": 1, "num_gpus": 0, "memory": "4G"} ) multiply_input_file = tmp_path / "raw_nums.txt" random = Random() random.seed(0) nums = immutableset(int(random.random() * 100) for _ in range(25)) multiply_output_file = tmp_path / "multiplied_nums.txt" sorted_output_file = tmp_path / "sorted_nums.txt" add_output_file = tmp_path / "add_nums.txt" with multiply_input_file.open("w") as mult_file: mult_file.writelines(f"{num}\n" for num in nums) multiply_params = { "input_file": multiply_input_file, "output_file": multiply_output_file, "x": 4, } sort_params = {"input_file": multiply_output_file, "output_file": sorted_output_file} add_args = f"{sorted_output_file} {add_output_file} --y 10" resources = SlurmResourceRequest.from_parameters(slurm_params) workflow_builder = WorkflowBuilder.from_parameters(workflow_params) multiply_job_name = Locator(_parse_parts("jobs/multiply")) multiply_artifact = ValueArtifact( multiply_output_file, depends_on=workflow_builder.run_python_on_parameters( multiply_job_name, multiply_by_x_main, multiply_params, depends_on=[] ), locator=Locator("multiply"), ) multiple_dir = workflow_builder.directory_for(multiply_job_name) assert (multiple_dir / "___run.sh").exists() assert (multiple_dir / "____params.params").exists() sort_job_name = Locator(_parse_parts("jobs/sort")) sort_dir = workflow_builder.directory_for(sort_job_name) sort_artifact = workflow_builder.run_python_on_parameters( sort_job_name, sort_nums_main, sort_params, depends_on=[multiply_artifact], resource_request=resources, category="add", ) assert (sort_dir / "___run.sh").exists() assert (sort_dir / "____params.params").exists() add_job_name = Locator(_parse_parts("jobs/add")) add_dir = workflow_builder.directory_for(add_job_name) workflow_builder.run_python_on_args( add_job_name, "add_job_main.py", add_args, depends_on=[sort_artifact] ) assert (add_dir / "___run.sh").exists() dax_file_one = workflow_builder.write_dax_to_dir(tmp_path) dax_file_two = workflow_builder.write_dax_to_dir() assert dax_file_one.exists() assert dax_file_two.exists() submit_script_one = tmp_path / "submit_script_one.sh" submit_script_two = tmp_path / "submit_script_two.sh" build_submit_script( submit_script_one, str(dax_file_one), workflow_builder._workflow_directory, # pylint:disable=protected-access ) build_submit_script( submit_script_two, str(dax_file_two), workflow_builder._workflow_directory, # pylint:disable=protected-access ) assert submit_script_one.exists() assert submit_script_two.exists() site_catalog = workflow_params.existing_directory("workflow_directory") / "sites.yml" assert site_catalog.exists() replica_catalog = ( workflow_params.existing_directory("workflow_directory") / "replicas.yml" ) assert replica_catalog.exists() transformations_catalog = ( workflow_params.existing_directory("workflow_directory") / "transformations.yml" ) assert transformations_catalog.exists() properties_file = ( workflow_params.existing_directory("workflow_directory") / "pegasus.properties" ) assert properties_file.exists() def test_dax_with_python_into_container_jobs(tmp_path): docker_tar = Path(f"{tmp_path}/docker/tar.tar") docker_build_dir = tmp_path docker_image_name = "pegasus_wrapper_container_demo" docker_image_tag = "0.2" # Generating parameters for initializing a workflow # We recommend making workflow directory, site, and partition parameters # in an research workflow workflow_params = Parameters.from_mapping( { "workflow_name": "Test", "workflow_created": "Testing", "workflow_log_dir": str(tmp_path / "log"), "workflow_directory": str(tmp_path / "working"), "site": "saga", "namespace": "test", "home_dir": str(tmp_path), "partition": "scavenge", } ) saga31_request = SlurmResourceRequest.from_parameters( Parameters.from_mapping({"run_on_single_node": "saga31", "partition": "gaia"}) ) # Our source input for the sample jobs input_file = tmp_path / "raw_nums.txt" add_y_output_file_nas = tmp_path / "nums_y.txt" sorted_output_file_nas = tmp_path / "sorted.txt" random = Random() random.seed(0) nums = [int(random.random() * 100) for _ in range(0, 25)] # Base Job Locator job_locator = Locator(("jobs",)) docker_python_root = Path("/home/app/") # Write a list of numbers out to be able to run the workflow with input_file.open("w") as mult_file: mult_file.writelines(f"{num}\n" for num in nums) workflow_builder = WorkflowBuilder.from_parameters(workflow_params) build_container_locator = job_locator / "build_docker" build_container = workflow_builder.run_bash( build_container_locator, command=[ "mkdir -p /scratch/dockermount/pegasus_wrapper_tmp", f"cd {docker_build_dir}", f"docker build . -t {docker_image_name}:{docker_image_tag}", f"docker save -o /scratch/dockermount/pegasus_wrapper_tmp/{docker_tar.name} {docker_image_name}:{docker_image_tag}", f"cp /scratch/dockermount/pegasus_wrapper_tmp/{docker_tar.name} {docker_tar.absolute()}", f"chmod go+r {docker_tar.absolute()}", ], depends_on=[], resource_request=saga31_request, ) build_container_dir = workflow_builder.directory_for(build_container_locator) assert (build_container_dir / "script.sh").exists() python36 = workflow_builder.add_container( f"{docker_image_name}:{docker_image_tag}", "docker", str(docker_tar.absolute()), image_site="saga", bypass_staging=True, ) job_profile = PegasusProfile( namespace="pegasus", key="transfer.bypass.input.staging", value="True" ) mongo4_4 = workflow_builder.add_container( "mongo:4.4", "docker", "path/to/tar.tar", image_site="saga", bypass_staging=True ) with pytest.raises(RuntimeError): _ = workflow_builder.stop_docker_as_service( mongo4_4, depends_on=[], resource_request=saga31_request ) start_mongo = workflow_builder.start_docker_as_service( mongo4_4, depends_on=[build_container], docker_args=f"-v /scratch/mongo/data/db:/data/db", resource_request=saga31_request, ) mongo4_4_dir = workflow_builder.directory_for(Locator(("containers", mongo4_4.name))) assert (mongo4_4_dir / "start.sh").exists() assert (mongo4_4_dir / "stop.sh").exists() add_y_locator = job_locator / "add" add_y_job = workflow_builder.run_python_on_args( add_y_locator, docker_python_root / "add_y.py", set_args=f"{input_file} {add_y_output_file_nas} --y 10", depends_on=[build_container], job_profiles=[job_profile], resource_request=saga31_request, container=python36, input_file_paths=[input_file], output_file_paths=[add_y_output_file_nas], ) add_y_dir = workflow_builder.directory_for(add_y_locator) assert (add_y_dir / "___run.sh").exists() with pytest.raises(RuntimeError): _ = workflow_builder.run_python_on_args( add_y_locator, docker_python_root / "add_y.py", set_args=f"{input_file} {add_y_output_file_nas} --y 10", depends_on=[build_container], job_profiles=[job_profile], resource_request=saga31_request, container=python36, input_file_paths=[input_file, input_file], output_file_paths=[add_y_output_file_nas], ) sort_job_locator = job_locator / "sort" sort_job = workflow_builder.run_python_on_parameters( sort_job_locator, sort_nums_main, {"input_file": add_y_output_file_nas, "output_file": sorted_output_file_nas}, depends_on=[add_y_job], container=python36, job_profiles=[job_profile], resource_request=saga31_request, input_file_paths=add_y_output_file_nas, output_file_paths=sorted_output_file_nas, ) assert sort_job == workflow_builder.run_python_on_parameters( sort_job_locator, sort_nums_main, {"input_file": add_y_output_file_nas, "output_file": sorted_output_file_nas}, depends_on=[add_y_job], container=python36, job_profiles=[job_profile], resource_request=saga31_request, input_file_paths=add_y_output_file_nas, output_file_paths=sorted_output_file_nas, ) sort_job_dir = workflow_builder.directory_for(sort_job_locator) assert (sort_job_dir / "___run.sh").exists() assert (sort_job_dir / "____params.params").exists() with pytest.raises(RuntimeError): _ = workflow_builder.run_python_on_parameters( sort_job_locator, sort_nums_main, {"input_file": add_y_output_file_nas, "output_file": sorted_output_file_nas}, depends_on=[add_y_job], container=python36, job_profiles=[job_profile], resource_request=saga31_request, input_file_paths=add_y_output_file_nas, output_file_paths=[sorted_output_file_nas, sorted_output_file_nas], ) celebration_bash_locator = job_locator / "celebrate" celebration_bash = workflow_builder.run_bash( celebration_bash_locator, 'echo "Jobs Runs Successfully"', depends_on=[sort_job], job_profiles=[job_profile], ) assert celebration_bash == workflow_builder.run_bash( celebration_bash_locator, 'echo "Jobs Runs Successfully"', depends_on=[sort_job], job_profiles=[job_profile], ) celebration_bash_dir = workflow_builder.directory_for(celebration_bash_locator) assert (celebration_bash_dir / "script.sh").exists() _ = workflow_builder.stop_docker_as_service( mongo4_4, depends_on=[start_mongo, sort_job], resource_request=saga31_request ) dax_file_one = workflow_builder.write_dax_to_dir(tmp_path) assert dax_file_one.exists() submit_script_one = tmp_path / "submit_script_one.sh" build_submit_script( submit_script_one, str(dax_file_one), workflow_builder._workflow_directory, # pylint:disable=protected-access ) assert submit_script_one.exists()
true
true
f73c10ee3e5c24d52e1500d097c6c6a3310f74e0
10,284
py
Python
plugin/radar/depth_net.py
a1600012888/mmdetection3d
2e01549c56dabf1965abc975a7301a8d746973ad
[ "Apache-2.0" ]
null
null
null
plugin/radar/depth_net.py
a1600012888/mmdetection3d
2e01549c56dabf1965abc975a7301a8d746973ad
[ "Apache-2.0" ]
null
null
null
plugin/radar/depth_net.py
a1600012888/mmdetection3d
2e01549c56dabf1965abc975a7301a8d746973ad
[ "Apache-2.0" ]
null
null
null
# Copyright 2020 Toyota Research Institute. All rights reserved. import torch import torch.nn as nn from .layers01 import \ PackLayerConv3d, UnpackLayerConv3d, Conv2D, ResidualBlock, InvDepth from .utils import DepthPredictHead2Up, get_depth_metrics from mmdet.models import DETECTORS import torch.nn.functional as F @DETECTORS.register_module() class PackNetSlim01(nn.Module): """ PackNet network with 3d convolutions (version 01, from the CVPR paper). Slimmer version, with fewer feature channels https://arxiv.org/abs/1905.02693 Parameters ---------- dropout : float Dropout value to use version : str Has a XY format, where: X controls upsampling variations (not used at the moment). Y controls feature stacking (A for concatenation and B for addition) kwargs : dict Extra parameters """ def __init__(self, dropout=None, version=None, min_depth=0.5, **kwargs): super().__init__() self.version = version[1:] # Input/output channels in_channels = 3 out_channels = 1 # Hyper-parameters ni, no = 32, out_channels n1, n2, n3, n4, n5 = 16, 32, 64, 128, 256 #n1, n2, n3, n4, n5 = 32, 64, 128, 256, 512 num_blocks = [2, 2, 3, 3] pack_kernel = [5, 3, 3, 3, 3] unpack_kernel = [3, 3, 3, 3, 3] iconv_kernel = [3, 3, 3, 3, 3] num_3d_feat = 4 # Initial convolutional layer #self.down_sample_conv = Conv2D(in_channels, 16, 5, 2) self.pre_calc = Conv2D(in_channels, ni, 5, 1) # Support for different versions if self.version == 'A': # Channel concatenation n1o, n1i = n1, n1 + ni + no n2o, n2i = n2, n2 + n1 + no n3o, n3i = n3, n3 + n2 + no n4o, n4i = n4, n4 + n3 n5o, n5i = n5, n5 + n4 elif self.version == 'B': # Channel addition n1o, n1i = n1, n1 + no n2o, n2i = n2, n2 + no n3o, n3i = n3//2, n3//2 + no n4o, n4i = n4//2, n4//2 n5o, n5i = n5//2, n5//2 else: raise ValueError('Unknown PackNet version {}'.format(version)) # Encoder self.pack1 = PackLayerConv3d(n1, pack_kernel[0], d=num_3d_feat) self.pack2 = PackLayerConv3d(n2, pack_kernel[1], d=num_3d_feat) self.pack3 = PackLayerConv3d(n3, pack_kernel[2], d=num_3d_feat) self.pack4 = PackLayerConv3d(n4, pack_kernel[3], d=num_3d_feat) self.pack5 = PackLayerConv3d(n5, pack_kernel[4], d=num_3d_feat) self.conv1 = Conv2D(ni, n1, 7, 1) self.conv2 = ResidualBlock(n1, n2, num_blocks[0], 1, dropout=dropout) self.conv3 = ResidualBlock(n2, n3, num_blocks[1], 1, dropout=dropout) self.conv4 = ResidualBlock(n3, n4, num_blocks[2], 1, dropout=dropout) self.conv5 = ResidualBlock(n4, n5, num_blocks[3], 1, dropout=dropout) # Decoder self.unpack5 = UnpackLayerConv3d(n5, n5o, unpack_kernel[0], d=num_3d_feat) self.unpack4 = UnpackLayerConv3d(n5, n4o, unpack_kernel[1], d=num_3d_feat) self.unpack3 = UnpackLayerConv3d(n4, n3o, unpack_kernel[2], d=num_3d_feat) self.unpack2 = UnpackLayerConv3d(n3, n2o, unpack_kernel[3], d=num_3d_feat) self.unpack1 = UnpackLayerConv3d(n2, n1o, unpack_kernel[4], d=num_3d_feat) self.iconv5 = Conv2D(n5i, n5, iconv_kernel[0], 1) self.iconv4 = Conv2D(n4i, n4, iconv_kernel[1], 1) self.iconv3 = Conv2D(n3i, n3, iconv_kernel[2], 1) self.iconv2 = Conv2D(n2i, n2, iconv_kernel[3], 1) self.iconv1 = Conv2D(n1i, n1, iconv_kernel[4], 1) # Depth Layers self.unpack_disps = nn.PixelShuffle(2) self.unpack_disp4 = nn.Upsample(scale_factor=2, mode='nearest', align_corners=None) self.unpack_disp3 = nn.Upsample(scale_factor=2, mode='nearest', align_corners=None) self.unpack_disp2 = nn.Upsample(scale_factor=2, mode='nearest', align_corners=None) self.disp4_layer = InvDepth(n4, out_channels=out_channels, min_depth=min_depth) self.disp3_layer = InvDepth(n3, out_channels=out_channels, min_depth=min_depth) self.disp2_layer = InvDepth(n2, out_channels=out_channels, min_depth=min_depth) self.disp1_layer = InvDepth(n1, out_channels=out_channels, min_depth=min_depth) self.init_weights() def init_weights(self): """Initializes network weights.""" for m in self.modules(): if isinstance(m, (nn.Conv2d, nn.Conv3d)): nn.init.xavier_uniform_(m.weight) if m.bias is not None: m.bias.data.zero_() def get_pred(self, data, **kwargs): """ Runs the network and returns inverse depth maps (4 scales if training and 1 if not). """ # x = data['img'] x = data #x = self.down_sample_conv(x) x = self.pre_calc(x) # Encoder x1 = self.conv1(x) x1p = self.pack1(x1) x2 = self.conv2(x1p) x2p = self.pack2(x2) x3 = self.conv3(x2p) x3p = self.pack3(x3) x4 = self.conv4(x3p) x4p = self.pack4(x4) x5 = self.conv5(x4p) x5p = self.pack5(x5) # Skips skip1 = x skip2 = x1p skip3 = x2p skip4 = x3p skip5 = x4p # Decoder unpack5 = self.unpack5(x5p) if self.version == 'A': concat5 = torch.cat((unpack5, skip5), 1) else: concat5 = unpack5 + skip5 iconv5 = self.iconv5(concat5) unpack4 = self.unpack4(iconv5) if self.version == 'A': concat4 = torch.cat((unpack4, skip4), 1) else: concat4 = unpack4 + skip4 iconv4 = self.iconv4(concat4) inv_depth4 = self.disp4_layer(iconv4) up_inv_depth4 = self.unpack_disp4(inv_depth4) unpack3 = self.unpack3(iconv4) if self.version == 'A': concat3 = torch.cat((unpack3, skip3, up_inv_depth4), 1) else: concat3 = torch.cat((unpack3 + skip3, up_inv_depth4), 1) iconv3 = self.iconv3(concat3) inv_depth3 = self.disp3_layer(iconv3) up_inv_depth3 = self.unpack_disp3(inv_depth3) unpack2 = self.unpack2(iconv3) if self.version == 'A': concat2 = torch.cat((unpack2, skip2, up_inv_depth3), 1) else: concat2 = torch.cat((unpack2 + skip2, up_inv_depth3), 1) iconv2 = self.iconv2(concat2) inv_depth2 = self.disp2_layer(iconv2) up_inv_depth2 = self.unpack_disp2(inv_depth2) unpack1 = self.unpack1(iconv2) if self.version == 'A': concat1 = torch.cat((unpack1, skip1, up_inv_depth2), 1) else: concat1 = torch.cat((unpack1 + skip1, up_inv_depth2), 1) iconv1 = self.iconv1(concat1) inv_depth1 = self.disp1_layer(iconv1) if self.training: inv_depths = [inv_depth1, inv_depth2, inv_depth3, inv_depth4] #inv_depths = [inv_depth1] else: inv_depths = [inv_depth1] #inv_depths = [F.interpolate(t_inv_depth, scale_factor=2, mode="bilinear", align_corners=False) for t_inv_depth in inv_depths] # ret depth pred return inv_depths def forward(self, return_loss=True, rescale=False, **kwargs): if not return_loss: # in evalhook! x = kwargs['img'] label = kwargs['depth_map'] data = {'img':x, 'depth_map':label} depth_pred = self.get_pred(data)[0] label = data['depth_map'].unsqueeze(dim=1) mask = (label > 0) #print(depth_pred.shape, label.shape, mask.shape, 'data shape') loss = torch.abs((label - depth_pred)) * mask loss = torch.sum(loss) / torch.sum(mask) with torch.no_grad(): metrics = get_depth_metrics(depth_pred, label, mask) # abs_diff, abs_rel, sq_rel, rmse, rmse_log metrics = [m.item() for m in metrics] # hack the hook # outputs[0]=None. see https://github.com/open-mmlab/mmdetection/blob/master/mmdet/apis/test.py#L99 #outputs = {'loss': loss, 'log_vars':log_vars, 'num_samples':depth_pred.size(0), 0:None} #print('val', loss) metrics.append(loss.item()) return [metrics] raise NotImplementedError def train_step(self, data, optimzier): depth_pred = self.get_pred(data)[0] label = data['depth_map'].unsqueeze(dim=1) mask = (label > 0) #print(depth_pred.shape, label.shape, mask.shape, 'data shape') #from IPython import embed #embed() loss = torch.abs((label - depth_pred)) * mask loss = torch.sum(loss) / torch.sum(mask) log_var = {} with torch.no_grad(): metrics = get_depth_metrics(depth_pred, label, mask) # abs_diff, abs_rel, sq_rel, rmse, rmse_log metrics = [m.item() for m in metrics] abs_diff, abs_rel, sq_rel, rmse, rmse_log = metrics sparsity = torch.sum(mask) * 1.0 / torch.numel(mask) std = torch.tensor([58.395, 57.12, 57.375]).cuda().view(1, -1, 1, 1) mean = torch.tensor([123.675, 116.28, 103.53]).cuda().view(1, -1, 1, 1) img = data['img'] * std + mean img = img / 255.0 depth_at_gt = depth_pred * mask log_vars = {'loss': loss.item(), 'sparsity': sparsity.item(), 'abs_diff': abs_diff, 'abs_rel': abs_rel, 'sq_rel': sq_rel, 'rmse': rmse, 'rmse_log': rmse_log } # 'pred', 'data', 'label', 'depth_at_gt' is used for visualization only! outputs = {'pred': torch.clamp(1.0 / (depth_pred+1e-4), 0, 1), 'data': img, 'label': torch.clamp(1.0 / (label+1e-4), 0, 1), 'depth_at_gt': torch.clamp(1.0 / (depth_at_gt+1e-4), 0., 1), 'loss':loss, 'log_vars':log_vars, 'num_samples':depth_pred.size(0)} return outputs def val_step(self, data, optimizer): return self.train_step(self, data, optimizer)
37.808824
134
0.583333
import torch import torch.nn as nn from .layers01 import \ PackLayerConv3d, UnpackLayerConv3d, Conv2D, ResidualBlock, InvDepth from .utils import DepthPredictHead2Up, get_depth_metrics from mmdet.models import DETECTORS import torch.nn.functional as F @DETECTORS.register_module() class PackNetSlim01(nn.Module): def __init__(self, dropout=None, version=None, min_depth=0.5, **kwargs): super().__init__() self.version = version[1:] in_channels = 3 out_channels = 1 ni, no = 32, out_channels n1, n2, n3, n4, n5 = 16, 32, 64, 128, 256 num_blocks = [2, 2, 3, 3] pack_kernel = [5, 3, 3, 3, 3] unpack_kernel = [3, 3, 3, 3, 3] iconv_kernel = [3, 3, 3, 3, 3] num_3d_feat = 4 self.pre_calc = Conv2D(in_channels, ni, 5, 1) if self.version == 'A': n1o, n1i = n1, n1 + ni + no n2o, n2i = n2, n2 + n1 + no n3o, n3i = n3, n3 + n2 + no n4o, n4i = n4, n4 + n3 n5o, n5i = n5, n5 + n4 elif self.version == 'B': n1o, n1i = n1, n1 + no n2o, n2i = n2, n2 + no n3o, n3i = n3//2, n3//2 + no n4o, n4i = n4//2, n4//2 n5o, n5i = n5//2, n5//2 else: raise ValueError('Unknown PackNet version {}'.format(version)) self.pack1 = PackLayerConv3d(n1, pack_kernel[0], d=num_3d_feat) self.pack2 = PackLayerConv3d(n2, pack_kernel[1], d=num_3d_feat) self.pack3 = PackLayerConv3d(n3, pack_kernel[2], d=num_3d_feat) self.pack4 = PackLayerConv3d(n4, pack_kernel[3], d=num_3d_feat) self.pack5 = PackLayerConv3d(n5, pack_kernel[4], d=num_3d_feat) self.conv1 = Conv2D(ni, n1, 7, 1) self.conv2 = ResidualBlock(n1, n2, num_blocks[0], 1, dropout=dropout) self.conv3 = ResidualBlock(n2, n3, num_blocks[1], 1, dropout=dropout) self.conv4 = ResidualBlock(n3, n4, num_blocks[2], 1, dropout=dropout) self.conv5 = ResidualBlock(n4, n5, num_blocks[3], 1, dropout=dropout) self.unpack5 = UnpackLayerConv3d(n5, n5o, unpack_kernel[0], d=num_3d_feat) self.unpack4 = UnpackLayerConv3d(n5, n4o, unpack_kernel[1], d=num_3d_feat) self.unpack3 = UnpackLayerConv3d(n4, n3o, unpack_kernel[2], d=num_3d_feat) self.unpack2 = UnpackLayerConv3d(n3, n2o, unpack_kernel[3], d=num_3d_feat) self.unpack1 = UnpackLayerConv3d(n2, n1o, unpack_kernel[4], d=num_3d_feat) self.iconv5 = Conv2D(n5i, n5, iconv_kernel[0], 1) self.iconv4 = Conv2D(n4i, n4, iconv_kernel[1], 1) self.iconv3 = Conv2D(n3i, n3, iconv_kernel[2], 1) self.iconv2 = Conv2D(n2i, n2, iconv_kernel[3], 1) self.iconv1 = Conv2D(n1i, n1, iconv_kernel[4], 1) self.unpack_disps = nn.PixelShuffle(2) self.unpack_disp4 = nn.Upsample(scale_factor=2, mode='nearest', align_corners=None) self.unpack_disp3 = nn.Upsample(scale_factor=2, mode='nearest', align_corners=None) self.unpack_disp2 = nn.Upsample(scale_factor=2, mode='nearest', align_corners=None) self.disp4_layer = InvDepth(n4, out_channels=out_channels, min_depth=min_depth) self.disp3_layer = InvDepth(n3, out_channels=out_channels, min_depth=min_depth) self.disp2_layer = InvDepth(n2, out_channels=out_channels, min_depth=min_depth) self.disp1_layer = InvDepth(n1, out_channels=out_channels, min_depth=min_depth) self.init_weights() def init_weights(self): for m in self.modules(): if isinstance(m, (nn.Conv2d, nn.Conv3d)): nn.init.xavier_uniform_(m.weight) if m.bias is not None: m.bias.data.zero_() def get_pred(self, data, **kwargs): x = data x = self.pre_calc(x) x1 = self.conv1(x) x1p = self.pack1(x1) x2 = self.conv2(x1p) x2p = self.pack2(x2) x3 = self.conv3(x2p) x3p = self.pack3(x3) x4 = self.conv4(x3p) x4p = self.pack4(x4) x5 = self.conv5(x4p) x5p = self.pack5(x5) skip1 = x skip2 = x1p skip3 = x2p skip4 = x3p skip5 = x4p unpack5 = self.unpack5(x5p) if self.version == 'A': concat5 = torch.cat((unpack5, skip5), 1) else: concat5 = unpack5 + skip5 iconv5 = self.iconv5(concat5) unpack4 = self.unpack4(iconv5) if self.version == 'A': concat4 = torch.cat((unpack4, skip4), 1) else: concat4 = unpack4 + skip4 iconv4 = self.iconv4(concat4) inv_depth4 = self.disp4_layer(iconv4) up_inv_depth4 = self.unpack_disp4(inv_depth4) unpack3 = self.unpack3(iconv4) if self.version == 'A': concat3 = torch.cat((unpack3, skip3, up_inv_depth4), 1) else: concat3 = torch.cat((unpack3 + skip3, up_inv_depth4), 1) iconv3 = self.iconv3(concat3) inv_depth3 = self.disp3_layer(iconv3) up_inv_depth3 = self.unpack_disp3(inv_depth3) unpack2 = self.unpack2(iconv3) if self.version == 'A': concat2 = torch.cat((unpack2, skip2, up_inv_depth3), 1) else: concat2 = torch.cat((unpack2 + skip2, up_inv_depth3), 1) iconv2 = self.iconv2(concat2) inv_depth2 = self.disp2_layer(iconv2) up_inv_depth2 = self.unpack_disp2(inv_depth2) unpack1 = self.unpack1(iconv2) if self.version == 'A': concat1 = torch.cat((unpack1, skip1, up_inv_depth2), 1) else: concat1 = torch.cat((unpack1 + skip1, up_inv_depth2), 1) iconv1 = self.iconv1(concat1) inv_depth1 = self.disp1_layer(iconv1) if self.training: inv_depths = [inv_depth1, inv_depth2, inv_depth3, inv_depth4] else: inv_depths = [inv_depth1] return inv_depths def forward(self, return_loss=True, rescale=False, **kwargs): if not return_loss: x = kwargs['img'] label = kwargs['depth_map'] data = {'img':x, 'depth_map':label} depth_pred = self.get_pred(data)[0] label = data['depth_map'].unsqueeze(dim=1) mask = (label > 0) loss = torch.abs((label - depth_pred)) * mask loss = torch.sum(loss) / torch.sum(mask) with torch.no_grad(): metrics = get_depth_metrics(depth_pred, label, mask) metrics = [m.item() for m in metrics] metrics.append(loss.item()) return [metrics] raise NotImplementedError def train_step(self, data, optimzier): depth_pred = self.get_pred(data)[0] label = data['depth_map'].unsqueeze(dim=1) mask = (label > 0) loss = torch.abs((label - depth_pred)) * mask loss = torch.sum(loss) / torch.sum(mask) log_var = {} with torch.no_grad(): metrics = get_depth_metrics(depth_pred, label, mask) metrics = [m.item() for m in metrics] abs_diff, abs_rel, sq_rel, rmse, rmse_log = metrics sparsity = torch.sum(mask) * 1.0 / torch.numel(mask) std = torch.tensor([58.395, 57.12, 57.375]).cuda().view(1, -1, 1, 1) mean = torch.tensor([123.675, 116.28, 103.53]).cuda().view(1, -1, 1, 1) img = data['img'] * std + mean img = img / 255.0 depth_at_gt = depth_pred * mask log_vars = {'loss': loss.item(), 'sparsity': sparsity.item(), 'abs_diff': abs_diff, 'abs_rel': abs_rel, 'sq_rel': sq_rel, 'rmse': rmse, 'rmse_log': rmse_log } outputs = {'pred': torch.clamp(1.0 / (depth_pred+1e-4), 0, 1), 'data': img, 'label': torch.clamp(1.0 / (label+1e-4), 0, 1), 'depth_at_gt': torch.clamp(1.0 / (depth_at_gt+1e-4), 0., 1), 'loss':loss, 'log_vars':log_vars, 'num_samples':depth_pred.size(0)} return outputs def val_step(self, data, optimizer): return self.train_step(self, data, optimizer)
true
true
f73c139ec5017afdf9ac4cced3afcaf2a5531129
5,997
py
Python
pybind/slxos/v17s_1_02/brocade_tunnels_ext_rpc/get_tunnel_info/output/tunnel/nodes/__init__.py
extremenetworks/pybind
44c467e71b2b425be63867aba6e6fa28b2cfe7fb
[ "Apache-2.0" ]
null
null
null
pybind/slxos/v17s_1_02/brocade_tunnels_ext_rpc/get_tunnel_info/output/tunnel/nodes/__init__.py
extremenetworks/pybind
44c467e71b2b425be63867aba6e6fa28b2cfe7fb
[ "Apache-2.0" ]
null
null
null
pybind/slxos/v17s_1_02/brocade_tunnels_ext_rpc/get_tunnel_info/output/tunnel/nodes/__init__.py
extremenetworks/pybind
44c467e71b2b425be63867aba6e6fa28b2cfe7fb
[ "Apache-2.0" ]
1
2021-11-05T22:15:42.000Z
2021-11-05T22:15:42.000Z
from operator import attrgetter import pyangbind.lib.xpathhelper as xpathhelper from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType, RestrictedClassType, TypedListType from pyangbind.lib.yangtypes import YANGBool, YANGListType, YANGDynClass, ReferenceType from pyangbind.lib.base import PybindBase from decimal import Decimal from bitarray import bitarray import __builtin__ class nodes(PybindBase): """ This class was auto-generated by the PythonClass plugin for PYANG from YANG module brocade-tunnels-ext - based on the path /brocade_tunnels_ext_rpc/get-tunnel-info/output/tunnel/nodes. Each member element of the container is represented as a class variable - with a specific YANG type. YANG Description: Indicates the nodes from which this tunnel data are retrieved. """ __slots__ = ('_pybind_generated_by', '_path_helper', '_yang_name', '_rest_name', '_extmethods', '__node_id',) _yang_name = 'nodes' _rest_name = 'nodes' _pybind_generated_by = 'container' def __init__(self, *args, **kwargs): path_helper_ = kwargs.pop("path_helper", None) if path_helper_ is False: self._path_helper = False elif path_helper_ is not None and isinstance(path_helper_, xpathhelper.YANGPathHelper): self._path_helper = path_helper_ elif hasattr(self, "_parent"): path_helper_ = getattr(self._parent, "_path_helper", False) self._path_helper = path_helper_ else: self._path_helper = False extmethods = kwargs.pop("extmethods", None) if extmethods is False: self._extmethods = False elif extmethods is not None and isinstance(extmethods, dict): self._extmethods = extmethods elif hasattr(self, "_parent"): extmethods = getattr(self._parent, "_extmethods", None) self._extmethods = extmethods else: self._extmethods = False self.__node_id = YANGDynClass(base=TypedListType(allowed_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32)), is_leaf=False, yang_name="node-id", rest_name="node-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-tunnels-ext', defining_module='brocade-tunnels-ext', yang_type='uint32', is_config=True) load = kwargs.pop("load", None) if args: if len(args) > 1: raise TypeError("cannot create a YANG container with >1 argument") all_attr = True for e in self._pyangbind_elements: if not hasattr(args[0], e): all_attr = False break if not all_attr: raise ValueError("Supplied object did not have the correct attributes") for e in self._pyangbind_elements: nobj = getattr(args[0], e) if nobj._changed() is False: continue setmethod = getattr(self, "_set_%s" % e) if load is None: setmethod(getattr(args[0], e)) else: setmethod(getattr(args[0], e), load=load) def _path(self): if hasattr(self, "_parent"): return self._parent._path()+[self._yang_name] else: return [u'brocade_tunnels_ext_rpc', u'get-tunnel-info', u'output', u'tunnel', u'nodes'] def _rest_path(self): if hasattr(self, "_parent"): if self._rest_name: return self._parent._rest_path()+[self._rest_name] else: return self._parent._rest_path() else: return [u'get-tunnel-info', u'output', u'tunnel', u'nodes'] def _get_node_id(self): """ Getter method for node_id, mapped from YANG variable /brocade_tunnels_ext_rpc/get_tunnel_info/output/tunnel/nodes/node_id (uint32) YANG Description: Node id """ return self.__node_id def _set_node_id(self, v, load=False): """ Setter method for node_id, mapped from YANG variable /brocade_tunnels_ext_rpc/get_tunnel_info/output/tunnel/nodes/node_id (uint32) If this variable is read-only (config: false) in the source YANG file, then _set_node_id is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_node_id() directly. YANG Description: Node id """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=TypedListType(allowed_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32)), is_leaf=False, yang_name="node-id", rest_name="node-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-tunnels-ext', defining_module='brocade-tunnels-ext', yang_type='uint32', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """node_id must be of a type compatible with uint32""", 'defined-type': "uint32", 'generated-type': """YANGDynClass(base=TypedListType(allowed_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32)), is_leaf=False, yang_name="node-id", rest_name="node-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-tunnels-ext', defining_module='brocade-tunnels-ext', yang_type='uint32', is_config=True)""", }) self.__node_id = t if hasattr(self, '_set'): self._set() def _unset_node_id(self): self.__node_id = YANGDynClass(base=TypedListType(allowed_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32)), is_leaf=False, yang_name="node-id", rest_name="node-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-tunnels-ext', defining_module='brocade-tunnels-ext', yang_type='uint32', is_config=True) node_id = __builtin__.property(_get_node_id, _set_node_id) _pyangbind_elements = {'node_id': node_id, }
46.130769
458
0.717859
from operator import attrgetter import pyangbind.lib.xpathhelper as xpathhelper from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType, RestrictedClassType, TypedListType from pyangbind.lib.yangtypes import YANGBool, YANGListType, YANGDynClass, ReferenceType from pyangbind.lib.base import PybindBase from decimal import Decimal from bitarray import bitarray import __builtin__ class nodes(PybindBase): __slots__ = ('_pybind_generated_by', '_path_helper', '_yang_name', '_rest_name', '_extmethods', '__node_id',) _yang_name = 'nodes' _rest_name = 'nodes' _pybind_generated_by = 'container' def __init__(self, *args, **kwargs): path_helper_ = kwargs.pop("path_helper", None) if path_helper_ is False: self._path_helper = False elif path_helper_ is not None and isinstance(path_helper_, xpathhelper.YANGPathHelper): self._path_helper = path_helper_ elif hasattr(self, "_parent"): path_helper_ = getattr(self._parent, "_path_helper", False) self._path_helper = path_helper_ else: self._path_helper = False extmethods = kwargs.pop("extmethods", None) if extmethods is False: self._extmethods = False elif extmethods is not None and isinstance(extmethods, dict): self._extmethods = extmethods elif hasattr(self, "_parent"): extmethods = getattr(self._parent, "_extmethods", None) self._extmethods = extmethods else: self._extmethods = False self.__node_id = YANGDynClass(base=TypedListType(allowed_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32)), is_leaf=False, yang_name="node-id", rest_name="node-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-tunnels-ext', defining_module='brocade-tunnels-ext', yang_type='uint32', is_config=True) load = kwargs.pop("load", None) if args: if len(args) > 1: raise TypeError("cannot create a YANG container with >1 argument") all_attr = True for e in self._pyangbind_elements: if not hasattr(args[0], e): all_attr = False break if not all_attr: raise ValueError("Supplied object did not have the correct attributes") for e in self._pyangbind_elements: nobj = getattr(args[0], e) if nobj._changed() is False: continue setmethod = getattr(self, "_set_%s" % e) if load is None: setmethod(getattr(args[0], e)) else: setmethod(getattr(args[0], e), load=load) def _path(self): if hasattr(self, "_parent"): return self._parent._path()+[self._yang_name] else: return [u'brocade_tunnels_ext_rpc', u'get-tunnel-info', u'output', u'tunnel', u'nodes'] def _rest_path(self): if hasattr(self, "_parent"): if self._rest_name: return self._parent._rest_path()+[self._rest_name] else: return self._parent._rest_path() else: return [u'get-tunnel-info', u'output', u'tunnel', u'nodes'] def _get_node_id(self): return self.__node_id def _set_node_id(self, v, load=False): if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=TypedListType(allowed_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32)), is_leaf=False, yang_name="node-id", rest_name="node-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-tunnels-ext', defining_module='brocade-tunnels-ext', yang_type='uint32', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """node_id must be of a type compatible with uint32""", 'defined-type': "uint32", 'generated-type': """YANGDynClass(base=TypedListType(allowed_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32)), is_leaf=False, yang_name="node-id", rest_name="node-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-tunnels-ext', defining_module='brocade-tunnels-ext', yang_type='uint32', is_config=True)""", }) self.__node_id = t if hasattr(self, '_set'): self._set() def _unset_node_id(self): self.__node_id = YANGDynClass(base=TypedListType(allowed_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32)), is_leaf=False, yang_name="node-id", rest_name="node-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-tunnels-ext', defining_module='brocade-tunnels-ext', yang_type='uint32', is_config=True) node_id = __builtin__.property(_get_node_id, _set_node_id) _pyangbind_elements = {'node_id': node_id, }
true
true
f73c13bb34372d043ea74f3ff8fb5fc7580b5518
35,618
py
Python
utils/RigidPatching.py
thomaskuestner/CNNArt
c2fc639dd2ce035f6ca90113290682a0ccd26fb8
[ "Apache-2.0" ]
22
2018-04-27T21:28:46.000Z
2021-12-24T06:44:55.000Z
utils/RigidPatching.py
thomaskuestner/CNNArt
c2fc639dd2ce035f6ca90113290682a0ccd26fb8
[ "Apache-2.0" ]
81
2017-11-09T17:23:15.000Z
2020-01-28T22:54:13.000Z
utils/RigidPatching.py
thomaskuestner/CNNArt
c2fc639dd2ce035f6ca90113290682a0ccd26fb8
[ "Apache-2.0" ]
18
2017-11-13T16:12:17.000Z
2020-08-27T10:17:34.000Z
''' Copyright: 2016-2019 Thomas Kuestner (thomas.kuestner@med.uni-tuebingen.de) under Apache2 license @author: Thomas Kuestner ''' import numpy as np import tensorflow as tf import math from utils.Label import Label ######################################################################################################################################### #Function: fRigidPatching # #The function fRigidPatching is responsible for splitting the dicom numpy array in patches depending on the patchSize and the # #patchOverlap. Besides the function creates an 1D array with the corresponding labels. # # # #Input: dicom_numpy_array ---> 3D dicom array (height, width, number of slices) # # patchSize ---> size of patches, example: [40, 40], patchSize[0] = height, patchSize[1] = weight, height and weight can differ # # patchOverlap ---> the ratio for overlapping, example: 0.25 # # mask_numpy_array ---> 3D mask array contains information about the areas of artefacts. movement-artefact = 1, shim-artefact = 2 # # noise-artefact = 3 # # ratio_labeling ---> set the ratio of the number of 'Pixel-Artefacts' to the whole number of pixels of one patch # #Output: dPatches ---> 3D-Numpy-Array, which contain all Patches. # # dLabels ---> 1D-Numpy-Array with all corresponding labels # ######################################################################################################################################### def fRigidPatching(dicom_numpy_array, patchSize, patchOverlap, mask_numpy_array, ratio_labeling): move_artefact = False shim_artefact = False noise_artefact = False #dLabels = [] dOverlap = np.multiply(patchSize, patchOverlap) dNotOverlap = np.round(np.multiply(patchSize, (1 - patchOverlap))) size_zero_pad = np.array(([math.ceil((dicom_numpy_array.shape[0] - dOverlap[0]) / (dNotOverlap[0])) * dNotOverlap[0] + dOverlap[ 0], math.ceil((dicom_numpy_array.shape[1] - dOverlap[1]) / (dNotOverlap[1])) * dNotOverlap[1] + dOverlap[1]])) zero_pad = np.array(([int(size_zero_pad[0]) - dicom_numpy_array.shape[0], int(size_zero_pad[1]) - dicom_numpy_array.shape[1]])) zero_pad_part = np.array(([int(math.ceil(zero_pad[0] / 2)), int(math.ceil(zero_pad[1] / 2))])) Img_zero_pad = np.lib.pad(dicom_numpy_array, ( (zero_pad_part[0], zero_pad[0] - zero_pad_part[0]), (zero_pad_part[1], zero_pad[1] - zero_pad_part[1]), (0, 0)), mode='constant') Mask_zero_pad = np.lib.pad(mask_numpy_array, ( (zero_pad_part[0], zero_pad[0] - zero_pad_part[0]), (zero_pad_part[1], zero_pad[1] - zero_pad_part[1]), (0, 0)), mode='constant') nbPatches = int(((size_zero_pad[0]-patchSize[0])/((1-patchOverlap)*patchSize[0])+1)*((size_zero_pad[1]-patchSize[1])/((1-patchOverlap)*patchSize[1])+1)*dicom_numpy_array.shape[2]) dPatches = np.zeros((patchSize[0], patchSize[1], nbPatches), dtype=float) #dtype=np.float32 dLabels = np.zeros((nbPatches), dtype = float) #dtype = float idxPatch = 0 for iZ in range(0, dicom_numpy_array.shape[2], 1): for iY in range(0, int(size_zero_pad[0] - dOverlap[0]), int(dNotOverlap[0])): for iX in range(0, int(size_zero_pad[1] - dOverlap[1]), int(dNotOverlap[1])): dPatch = Img_zero_pad[iY:iY + patchSize[0], iX:iX + patchSize[1], iZ] dPatches[:,:,idxPatch] = dPatch dPatch_mask = Mask_zero_pad[iY:iY + patchSize[0], iX:iX + patchSize[1], iZ] patch_number_value = patchSize[0] * patchSize[1] if np.count_nonzero((dPatch_mask==1).astype(np.int)) > int(ratio_labeling*patch_number_value): move_artefact = True if np.count_nonzero((dPatch_mask==2).astype(np.int)) > int(ratio_labeling*patch_number_value): shim_artefact = True if np.count_nonzero((dPatch_mask==3).astype(np.int)) > int(ratio_labeling*patch_number_value): noise_artefact = True label = 0 if move_artefact == True and shim_artefact != True and noise_artefact != True: label = 1 elif move_artefact != True and shim_artefact == True and noise_artefact != True: label = 2 elif move_artefact != True and shim_artefact != True and noise_artefact == True: label = 3 elif move_artefact == True and shim_artefact == True and noise_artefact != True: label = 4 elif move_artefact == True and shim_artefact != True and noise_artefact == True: label = 5 elif move_artefact != True and shim_artefact == True and noise_artefact == True: label = 6 elif move_artefact == True and shim_artefact == True and noise_artefact == True: label = 7 print(label) dLabels[idxPatch] = label idxPatch += 1 move_artefact = False shim_artefact = False noise_artefact = False print("Rigid done!") print(dLabels) return dPatches, dLabels, nbPatches ######################################################################################################################################### #Function: fRigidPatching3D # #The function fRigidPatching3D is responsible for splitting the dicom numpy array in patches depending on the patchSize and the # #patchOverlap. Besides the function creates an 1D array with the corresponding labels. # # # #Input: dicom_numpy_array ---> 3D dicom array (height, width, number of slices) # # patchSize ---> size of patches, example: [40, 40], patchSize[0] = height, patchSize[1] = weight, height and weight can differ # # patchOverlap ---> the ratio for overlapping, example: 0.25 # # mask_numpy_array ---> 3D mask array contains information about the areas of artefacts. movement-artefact = 1, shim-artefact = 2 # # noise-artefact = 3 # # ratio_labeling ---> set the ratio of the number of 'Pixel-Artefacts' to the whole number of pixels of one patch # #Output: dPatches ---> 3D-Numpy-Array, which contain all Patches. # # dLabels ---> 1D-Numpy-Array with all corresponding labels # ######################################################################################################################################### def fRigidPatching3D(dicom_numpy_array, patchSize, patchOverlap, mask_numpy_array, ratio_labeling): move_artefact = False shim_artefact = False noise_artefact = False #dLabels = [] print(patchSize) dOverlap = np.round(np.multiply(patchSize, patchOverlap)) dNotOverlap = np.round(np.multiply(patchSize, (1 - patchOverlap))) print(dOverlap, dNotOverlap) size_zero_pad = np.array(([math.ceil((dicom_numpy_array.shape[0] - dOverlap[0]) / (dNotOverlap[0])) * dNotOverlap[0] + dOverlap[ 0], math.ceil((dicom_numpy_array.shape[1] - dOverlap[1]) / (dNotOverlap[1])) * dNotOverlap[1] + dOverlap[1], math.ceil((dicom_numpy_array.shape[2] - dOverlap[2]) / (dNotOverlap[2])) * dNotOverlap[2] + dOverlap[2]])) print(size_zero_pad.shape) zero_pad = np.array(([int(size_zero_pad[0]) - dicom_numpy_array.shape[0], int(size_zero_pad[1]) - dicom_numpy_array.shape[1], int(size_zero_pad[2]) - dicom_numpy_array.shape[2]])) print(zero_pad.shape) zero_pad_part = np.array(([int(math.ceil(zero_pad[0] / 2)), int(math.ceil(zero_pad[1] / 2)), int(math.ceil(zero_pad[2] / 2))])) print(zero_pad_part.shape) Img_zero_pad = np.lib.pad(dicom_numpy_array, ( (zero_pad_part[0], zero_pad[0] - zero_pad_part[0]), (zero_pad_part[1], zero_pad[1] - zero_pad_part[1]), (zero_pad_part[2], zero_pad[2] - zero_pad_part[2])), mode='constant') print(Img_zero_pad.shape) Mask_zero_pad = np.lib.pad(mask_numpy_array, ( (zero_pad_part[0], zero_pad[0] - zero_pad_part[0]), (zero_pad_part[1], zero_pad[1] - zero_pad_part[1]), (zero_pad_part[2], zero_pad[2] - zero_pad_part[2])), mode='constant') print(Mask_zero_pad.shape) print(size_zero_pad[2]) print(np.round((1-patchOverlap)*patchSize[2])) print(((size_zero_pad[2]-patchSize[2])/(np.round((1-patchOverlap)*patchSize[2]))+1)) nbPatches = ((size_zero_pad[0]-patchSize[0])/((1-patchOverlap)*patchSize[0])+1)*((size_zero_pad[1]-patchSize[1])/((1-patchOverlap)*patchSize[1])+1)*((size_zero_pad[2]-patchSize[2])/(np.round((1-patchOverlap)*patchSize[2]))+1) print(nbPatches) dPatches = np.zeros((patchSize[0], patchSize[1], patchSize[2], int(nbPatches)), dtype=float) dLabels = np.zeros((int(nbPatches)), dtype = int) #float idxPatch = 0 for iZ in range(0, int(size_zero_pad[2] - dOverlap[2]), int(dNotOverlap[2])): for iY in range(0, int(size_zero_pad[0] - dOverlap[0]), int(dNotOverlap[0])): for iX in range(0, int(size_zero_pad[1] - dOverlap[1]), int(dNotOverlap[1])): dPatch = Img_zero_pad[iY:iY + patchSize[0], iX:iX + patchSize[1], iZ:iZ + patchSize[2]] print(dPatch.shape) print(dPatches[:,:,:,idxPatch].shape) dPatches[:,:,:,idxPatch] = dPatch dPatch_mask = Mask_zero_pad[iY:iY + patchSize[0], iX:iX + patchSize[1], iZ:iZ + patchSize[2]] patch_number_value = patchSize[0] * patchSize[1]*patchSize[2] if np.count_nonzero((dPatch_mask==1).astype(np.int)) > int(ratio_labeling*patch_number_value): move_artefact = True if np.count_nonzero((dPatch_mask==2).astype(np.int)) > int(ratio_labeling*patch_number_value): shim_artefact = True if np.count_nonzero((dPatch_mask==3).astype(np.int)) > int(ratio_labeling*patch_number_value): noise_artefact = True label = 0 if move_artefact == True and shim_artefact != True and noise_artefact != True: label = 1 elif move_artefact != True and shim_artefact == True and noise_artefact != True: label = 2 elif move_artefact != True and shim_artefact != True and noise_artefact == True: label = 3 elif move_artefact == True and shim_artefact == True and noise_artefact != True: label = 4 elif move_artefact == True and shim_artefact != True and noise_artefact == True: label = 5 elif move_artefact != True and shim_artefact == True and noise_artefact == True: label = 6 elif move_artefact == True and shim_artefact == True and noise_artefact == True: label = 7 print(label) dLabels[idxPatch] = label idxPatch += 1 move_artefact = False shim_artefact = False noise_artefact = False print("Rigid done!") print(dLabels.dtype) return dPatches, dLabels, nbPatches def fRigidPatching3DN(dicom_numpy_array, patchSize, patchOverlap, mask_numpy_array, ratio_labeling): move_artefact = False shim_artefact = False noise_artefact = False #dLabels = [] dOverlap = np.multiply(patchSize, patchOverlap) dNotOverlap = np.round(np.multiply(patchSize, (1 - patchOverlap))) print(dOverlap,dNotOverlap) size_zero_pad = np.array(([math.ceil((dicom_numpy_array.shape[0] - dOverlap[0]) / (dNotOverlap[0])) * dNotOverlap[0] + dOverlap[ 0], math.ceil((dicom_numpy_array.shape[1] - dOverlap[1]) / (dNotOverlap[1])) * dNotOverlap[1] + dOverlap[1], math.ceil((dicom_numpy_array.shape[2] - dOverlap[2]) / (dNotOverlap[2])) * dNotOverlap[2] + dOverlap[2]])) zero_pad = np.array(([int(size_zero_pad[0]) - dicom_numpy_array.shape[0], int(size_zero_pad[1]) - dicom_numpy_array.shape[1], int(size_zero_pad[2]) - dicom_numpy_array.shape[2]])) zero_pad_part = np.array(([int(math.ceil(zero_pad[0] / 2)), int(math.ceil(zero_pad[1] / 2)), int(math.ceil(zero_pad[2] / 2))])) Img_zero_pad = np.lib.pad(dicom_numpy_array, ( (zero_pad_part[0], zero_pad[0] - zero_pad_part[0]), (zero_pad_part[1], zero_pad[1] - zero_pad_part[1]), (zero_pad_part[2], zero_pad[2] - zero_pad_part[2])), mode='constant') print(Img_zero_pad.shape) Mask_zero_pad = np.lib.pad(mask_numpy_array, ( (zero_pad_part[0], zero_pad[0] - zero_pad_part[0]), (zero_pad_part[1], zero_pad[1] - zero_pad_part[1]), (zero_pad_part[2], zero_pad[2] - zero_pad_part[2])), mode='constant') nbPatches = ((size_zero_pad[0]-patchSize[0])/((1-patchOverlap)*patchSize[0])+1)*((size_zero_pad[1]-patchSize[1])/((1-patchOverlap)*patchSize[1])+1)*((size_zero_pad[2]-patchSize[2])/((1-patchOverlap)*patchSize[2])+1) print(((size_zero_pad[0]-patchSize[0])/((1-patchOverlap)*patchSize[0])+1)) print(((size_zero_pad[1]-patchSize[1])/((1-patchOverlap)*patchSize[1])+1)) print(((size_zero_pad[2]-patchSize[2])/((1-patchOverlap)*patchSize[2])+1)) print(int(patchSize[0]), int(patchSize[1]), int(patchSize[2]), int(nbPatches)) dPatches = np.zeros((int(patchSize[0]), int(patchSize[1]), int(patchSize[2]), int(nbPatches)), dtype=float) dLabels = np.zeros((int(nbPatches)), dtype = float) idxPatch = 0 for iZ in range(0, dicom_numpy_array.shape[2], int(dNotOverlap[2])): for iY in range(0, int(size_zero_pad[0] - dOverlap[0]), int(dNotOverlap[0])): for iX in range(0, int(size_zero_pad[1] - dOverlap[1]), int(dNotOverlap[1])): print(iX, iY, iZ) dPatch = Img_zero_pad[iY:iY + patchSize[0], iX:iX + patchSize[1], iZ:iZ + patchSize[2]] print(idxPatch) print(dPatch.shape) dPatches[:,:,:,idxPatch] = dPatch dPatch_mask = Mask_zero_pad[iY:iY + patchSize[0], iX:iX + patchSize[1], iZ:iZ + patchSize[2]] patch_number_value = patchSize[0] * patchSize[1]*patchSize[2] if np.count_nonzero((dPatch_mask==1).astype(np.int)) > int(ratio_labeling*patch_number_value): move_artefact = True if np.count_nonzero((dPatch_mask==2).astype(np.int)) > int(ratio_labeling*patch_number_value): shim_artefact = True if np.count_nonzero((dPatch_mask==3).astype(np.int)) > int(ratio_labeling*patch_number_value): noise_artefact = True label = 0 if move_artefact == True and shim_artefact != True and noise_artefact != True: label = 1 elif move_artefact != True and shim_artefact == True and noise_artefact != True: label = 2 elif move_artefact != True and shim_artefact != True and noise_artefact == True: label = 3 elif move_artefact == True and shim_artefact == True and noise_artefact != True: label = 4 elif move_artefact == True and shim_artefact != True and noise_artefact == True: label = 5 elif move_artefact != True and shim_artefact == True and noise_artefact == True: label = 6 elif move_artefact == True and shim_artefact == True and noise_artefact == True: label = 7 dLabels[idxPatch] = label idxPatch += 1 move_artefact = False shim_artefact = False noise_artefact = False print("Rigid patching done!") #print("Rigid done!") #print(dLabels) return dPatches, dLabels, nbPatches def fRigidPatching_maskLabeling(dicom_numpy_array, patchSize, patchOverlap, mask_numpy_array, ratio_labeling, dataset): dPatches = None move_artefact = False shim_artefact = False noise_artefact = False #body region bodyRegion, bodyRegionLabel = dataset.getBodyRegion() # MRT weighting label (T1, T2) weighting, weightingLabel = dataset.getMRTWeighting() #dOverlap = np.multiply(patchSize, patchOverlap) dOverlap = np.round(np.multiply(patchSize, patchOverlap)) #dNotOverlap = np.round(np.multiply(patchSize, (1 - patchOverlap))) dNotOverlap = [patchSize[0]-dOverlap[0], patchSize[1]-dOverlap[1]] size_zero_pad = np.array( ([math.ceil((dicom_numpy_array.shape[0] - dOverlap[0]) / (dNotOverlap[0])) * dNotOverlap[0] + dOverlap[0], math.ceil((dicom_numpy_array.shape[1] - dOverlap[1]) / (dNotOverlap[1])) * dNotOverlap[1] + dOverlap[1]])) zero_pad = np.array( ([int(size_zero_pad[0]) - dicom_numpy_array.shape[0], int(size_zero_pad[1]) - dicom_numpy_array.shape[1]])) zero_pad_part = np.array(([int(math.ceil(zero_pad[0] / 2)), int(math.ceil(zero_pad[1] / 2))])) Img_zero_pad = np.lib.pad(dicom_numpy_array, ( (zero_pad_part[0], zero_pad[0] - zero_pad_part[0]), (zero_pad_part[1], zero_pad[1] - zero_pad_part[1]), (0, 0)), mode='constant') Mask_zero_pad = np.lib.pad(mask_numpy_array, ((zero_pad_part[0], zero_pad[0] - zero_pad_part[0]), (zero_pad_part[1], zero_pad[1] - zero_pad_part[1]), (0, 0)), mode='constant') nbPatches = int(((size_zero_pad[0]-patchSize[0])/((1-patchOverlap)*patchSize[0])+1)*((size_zero_pad[1]-patchSize[1])/((1-patchOverlap)*patchSize[1])+1)*dicom_numpy_array.shape[2]) nbPatches_in_Y = int((size_zero_pad[0]-dOverlap[0])/dNotOverlap[0]) nbPatches_in_X = int((size_zero_pad[1]-dOverlap[1])/dNotOverlap[1]) nbPatches_in_Z = dicom_numpy_array.shape[2] nbPatches = nbPatches_in_X*nbPatches_in_Y*nbPatches_in_Z dPatches = np.zeros((patchSize[0], patchSize[1], nbPatches), dtype=float) # dtype=np.float32 #dLabels = np.zeros((nbPatches), dtype=float) # dtype = float dLabels = np.zeros((nbPatches), dtype=np.dtype('i4')) idxPatch = 0 for iZ in range(0, dicom_numpy_array.shape[2], 1): for iY in range(0, int(size_zero_pad[0] - dOverlap[0]), int(dNotOverlap[0])): for iX in range(0, int(size_zero_pad[1] - dOverlap[1]), int(dNotOverlap[1])): dPatch = Img_zero_pad[iY:iY + patchSize[0], iX:iX + patchSize[1], iZ] dPatches[:, :, idxPatch] = dPatch #if idxPatch == 7678: # print() dPatch_mask = Mask_zero_pad[iY:iY + patchSize[0], iX:iX + patchSize[1], iZ] patch_number_value = patchSize[0] * patchSize[1] if np.count_nonzero((dPatch_mask == 1).astype(np.int)) > int(ratio_labeling * patch_number_value): move_artefact = True if np.count_nonzero((dPatch_mask == 2).astype(np.int)) > int(ratio_labeling * patch_number_value): shim_artefact = True if np.count_nonzero((dPatch_mask == 3).astype(np.int)) > int(ratio_labeling * patch_number_value): noise_artefact = True label = Label.REFERENCE if move_artefact == True and shim_artefact != True and noise_artefact != True: label = Label.MOTION elif move_artefact != True and shim_artefact == True and noise_artefact != True: label = Label.SHIM elif move_artefact != True and shim_artefact != True and noise_artefact == True: label = Label.NOISE elif move_artefact == True and shim_artefact == True and noise_artefact != True: label = Label.MOTION_AND_SHIM elif move_artefact == True and shim_artefact != True and noise_artefact == True: label = Label.MOTION_AND_NOISE elif move_artefact != True and shim_artefact == True and noise_artefact == True: label = Label.SHIM_AND_NOISE elif move_artefact == True and shim_artefact == True and noise_artefact == True: label = Label.MOTION_AND_SHIM_AND_NOISE # calculate final label label = label + bodyRegionLabel + weightingLabel #print(label) dLabels[idxPatch] = label idxPatch += 1 move_artefact = False shim_artefact = False noise_artefact = False print("Rigid patching done for %s!" % dataset.getPathdata()) #print(dLabels) #return dPatches, dLabels, nbPatches return dPatches, dLabels def fRigidPatching_patchLabeling(dicom_numpy_array, patchSize, patchOverlap, ratio_labeling): dPatches = None move_artefact = False shim_artefact = False noise_artefact = False dLabels = [] dOverlap = np.multiply(patchSize, patchOverlap) dNotOverlap = np.round(np.multiply(patchSize, (1 - patchOverlap))) size_zero_pad = np.array( ([math.ceil((dicom_numpy_array.shape[0] - dOverlap[0]) / (dNotOverlap[0])) * dNotOverlap[0] + dOverlap[ 0], math.ceil((dicom_numpy_array.shape[1] - dOverlap[1]) / (dNotOverlap[1])) * dNotOverlap[1] + dOverlap[1]])) zero_pad = np.array( ([int(size_zero_pad[0]) - dicom_numpy_array.shape[0], int(size_zero_pad[1]) - dicom_numpy_array.shape[1]])) zero_pad_part = np.array(([int(math.ceil(zero_pad[0] / 2)), int(math.ceil(zero_pad[1] / 2))])) Img_zero_pad = np.lib.pad(dicom_numpy_array, ( (zero_pad_part[0], zero_pad[0] - zero_pad_part[0]), (zero_pad_part[1], zero_pad[1] - zero_pad_part[1]), (0, 0)), mode='constant') for iZ in range(0, dicom_numpy_array.shape[2], 1): for iY in range(0, int(size_zero_pad[0] - dOverlap[0]), int(dNotOverlap[0])): for iX in range(0, int(size_zero_pad[1] - dOverlap[1]), int(dNotOverlap[1])): dPatch = Img_zero_pad[iY:iY + patchSize[0], iX:iX + patchSize[1], iZ] dPatch = dPatch[:, :, np.newaxis] if dPatches is None: dPatches = dPatch else: dPatches = np.concatenate((dPatches, dPatch), axis=2) dLabels = np.ones((dPatches.shape[2]), dtype=np.dtype('i4')) return dPatches, dLabels ######################################################################################################################################### #Function: fRigidPatching3D # #The function fRigidPatching3D is responsible for splitting the dicom numpy array in patches depending on the patchSize and the # #patchOverlap. Besides the function creates an 1D array with the corresponding labels. # # # #Input: dicom_numpy_array ---> 3D dicom array (height, width, number of slices) # # patchSize ---> size of patches, example: [40, 40], patchSize[0] = height, patchSize[1] = weight, height and weight can differ # # patchOverlap ---> the ratio for overlapping, example: 0.25 # # mask_numpy_array ---> 3D mask array contains information about the areas of artefacts. movement-artefact = 1, shim-artefact = 2 # # noise-artefact = 3 # # ratio_labeling ---> set the ratio of the number of 'Pixel-Artefacts' to the whole number of pixels of one patch # #Output: dPatches ---> 3D-Numpy-Array, which contain all Patches. # # dLabels ---> 1D-Numpy-Array with all corresponding labels # ######################################################################################################################################### def fRigidPatching3D_maskLabeling(dicom_numpy_array, patchSize, patchOverlap, mask_numpy_array, ratio_labeling, dataset=None, dopatching=True): #ToDo odd patch size not supported! move_artefact = False shim_artefact = False noise_artefact = False if isinstance(dataset, int): # already pre-processed label bodyRegionLabel + weightingLabel bodyRegionweightingLabel = dataset else: # body region bodyRegion, bodyRegionLabel = dataset.getBodyRegion() # MRT weighting label (T1, T2) weighting, weightingLabel = dataset.getMRTWeighting() dOverlap = np.round(np.multiply(patchSize, patchOverlap)) dNotOverlap = np.round(np.multiply(patchSize, (1 - patchOverlap))) size_zero_pad = np.array(([math.ceil((dicom_numpy_array.shape[0] - dOverlap[0]) / (dNotOverlap[0])) * dNotOverlap[0] + dOverlap[0], math.ceil((dicom_numpy_array.shape[1] - dOverlap[1]) / (dNotOverlap[1])) * dNotOverlap[1] + dOverlap[1], math.ceil((dicom_numpy_array.shape[2] - dOverlap[2]) / (dNotOverlap[2])) * dNotOverlap[2] + dOverlap[2]])) zero_pad = np.array(([int(size_zero_pad[0]) - dicom_numpy_array.shape[0], int(size_zero_pad[1]) - dicom_numpy_array.shape[1], int(size_zero_pad[2]) - dicom_numpy_array.shape[2]])) zero_pad_part = np.array(([int(math.ceil(zero_pad[0] / 2)), int(math.ceil(zero_pad[1] / 2)), int(math.ceil(zero_pad[2] / 2))])) Img_zero_pad = np.lib.pad(dicom_numpy_array, ((zero_pad_part[0], zero_pad[0] - zero_pad_part[0]), (zero_pad_part[1], zero_pad[1] - zero_pad_part[1]), (zero_pad_part[2], zero_pad[2] - zero_pad_part[2])), mode='constant') Mask_zero_pad = np.lib.pad(mask_numpy_array, ((zero_pad_part[0], zero_pad[0] - zero_pad_part[0]), (zero_pad_part[1], zero_pad[1] - zero_pad_part[1]), (zero_pad_part[2], zero_pad[2] - zero_pad_part[2])), mode='constant') nbPatches = ((size_zero_pad[0]-patchSize[0])/((1-patchOverlap)*patchSize[0])+1)*((size_zero_pad[1]-patchSize[1])/((1-patchOverlap)*patchSize[1])+1)*((size_zero_pad[2]-patchSize[2])/(np.round((1-patchOverlap)*patchSize[2]))+1) nbPatches_in_Y = int((size_zero_pad[0] - dOverlap[0]) / dNotOverlap[0]) nbPatches_in_X = int((size_zero_pad[1] - dOverlap[1]) / dNotOverlap[1]) nbPatches_in_Z = int((size_zero_pad[2] - dOverlap[2]) / dNotOverlap[2]) nbPatches = nbPatches_in_X * nbPatches_in_Y * nbPatches_in_Z dPatches = np.zeros((patchSize[0], patchSize[1], patchSize[2], int(nbPatches)), dtype=float) dLabels = np.zeros((int(nbPatches)), dtype = int) #float idxPatch = 0 for iZ in range(0, int(size_zero_pad[2] - dOverlap[2]), int(dNotOverlap[2])): for iY in range(0, int(size_zero_pad[0] - dOverlap[0]), int(dNotOverlap[0])): for iX in range(0, int(size_zero_pad[1] - dOverlap[1]), int(dNotOverlap[1])): dPatch = Img_zero_pad[iY:iY + patchSize[0], iX:iX + patchSize[1], iZ:iZ + patchSize[2]] dPatches[:,:,:,idxPatch] = dPatch dPatch_mask = Mask_zero_pad[iY:iY + patchSize[0], iX:iX + patchSize[1], iZ:iZ + patchSize[2]] patch_number_value = patchSize[0] * patchSize[1]*patchSize[2] if np.count_nonzero((dPatch_mask==1).astype(np.int)) > int(ratio_labeling*patch_number_value): move_artefact = True if np.count_nonzero((dPatch_mask==2).astype(np.int)) > int(ratio_labeling*patch_number_value): shim_artefact = True if np.count_nonzero((dPatch_mask==3).astype(np.int)) > int(ratio_labeling*patch_number_value): noise_artefact = True label = Label.REFERENCE if move_artefact == True and shim_artefact != True and noise_artefact != True: label = Label.MOTION elif move_artefact != True and shim_artefact == True and noise_artefact != True: label = Label.SHIM elif move_artefact != True and shim_artefact != True and noise_artefact == True: label = Label.NOISE elif move_artefact == True and shim_artefact == True and noise_artefact != True: label = Label.MOTION_AND_SHIM elif move_artefact == True and shim_artefact != True and noise_artefact == True: label = Label.MOTION_AND_NOISE elif move_artefact != True and shim_artefact == True and noise_artefact == True: label = Label.SHIM_AND_NOISE elif move_artefact == True and shim_artefact == True and noise_artefact == True: label = Label.MOTION_AND_SHIM_AND_NOISE if isinstance(dataset, int): label = bodyRegionweightingLabel + label else: label = weightingLabel + bodyRegionLabel + label dLabels[idxPatch] = label idxPatch += 1 move_artefact = False shim_artefact = False noise_artefact = False if isinstance(dataset, int): return dPatches else: print("Rigid patching done for %s " % dataset.getPathdata()) #print(dLabels) return dPatches, dLabels#, nbPatches def fRigidPatching3D_maskLabeling_tf(dicom_tensor, patchSize, patchOverlap, mask_numpy_array, ratio_labeling, dataset=None, dopatching=True): #ToDo odd patch size not supported! dOverlap = tf.math.round(tf.math.multiply(patchSize, patchOverlap)) dNotOverlap = tf.math.round(tf.math.multiply(patchSize, (1 - patchOverlap))) imgShape = dicom_tensor.shape.as_list() size_zero_pad = np.array(([math.ceil((imgShape[0] - dOverlap[0]) / (dNotOverlap[0])) * dNotOverlap[0] + dOverlap[0], math.ceil((imgShape[1] - dOverlap[1]) / (dNotOverlap[1])) * dNotOverlap[1] + dOverlap[1], math.ceil((imgShape[2] - dOverlap[2]) / (dNotOverlap[2])) * dNotOverlap[2] + dOverlap[2]])) zero_pad = np.array(([int(size_zero_pad[0]) - imgShape[0], int(size_zero_pad[1]) - imgShape[1], int(size_zero_pad[2]) - imgShape[2]])) zero_pad_part = np.array(([int(math.ceil(zero_pad[0] / 2)), int(math.ceil(zero_pad[1] / 2)), int(math.ceil(zero_pad[2] / 2))])) Img_zero_pad = tf.pad(dicom_tensor, tf.Variable((zero_pad_part[0], zero_pad[0] - zero_pad_part[0]), (zero_pad_part[1], zero_pad[1] - zero_pad_part[1]), (zero_pad_part[2], zero_pad[2] - zero_pad_part[2])), mode='constant') #nbPatches = ((size_zero_pad[0]-patchSize[0])/((1-patchOverlap)*patchSize[0])+1)*((size_zero_pad[1]-patchSize[1])/((1-patchOverlap)*patchSize[1])+1)*((size_zero_pad[2]-patchSize[2])/(tf.math.round((1-patchOverlap)*patchSize[2]))+1) #nbPatches_in_Y = int((size_zero_pad[0] - dOverlap[0]) / dNotOverlap[0]) #nbPatches_in_X = int((size_zero_pad[1] - dOverlap[1]) / dNotOverlap[1]) #nbPatches_in_Z = int((size_zero_pad[2] - dOverlap[2]) / dNotOverlap[2]) #nbPatches = nbPatches_in_X * nbPatches_in_Y * nbPatches_in_Z #dPatches = tf.zeros((patchSize[0], patchSize[1], patchSize[2], int(nbPatches)), dtype=float) patch = [None for _ in range(fcalculatepatches(imgShape, patchSize, patchOverlap))] idxPatch = 0 for iZ in range(0, int(size_zero_pad[2] - dOverlap[2]), int(dNotOverlap[2])): for iY in range(0, int(size_zero_pad[0] - dOverlap[0]), int(dNotOverlap[0])): for iX in range(0, int(size_zero_pad[1] - dOverlap[1]), int(dNotOverlap[1])): patch[idxPatch] = tf.slice(Img_zero_pad, begin=[iY, iX, iZ], size=[patchSize[0], patchSize[1], patchSize[2]]) idxPatch += 1 dPatches = tf.stack(patch, axis=3) return dPatches def fcalculatepatches(imageSize, patchSize, patchOverlap): dOverlap = np.round(np.multiply(patchSize, patchOverlap)) dNotOverlap = np.round(np.multiply(patchSize, (1 - patchOverlap))) size_zero_pad = np.array( ([math.ceil((imageSize[0] - dOverlap[0]) / (dNotOverlap[0])) * dNotOverlap[0] + dOverlap[0], math.ceil((imageSize[1] - dOverlap[1]) / (dNotOverlap[1])) * dNotOverlap[1] + dOverlap[1], math.ceil((imageSize[2] - dOverlap[2]) / (dNotOverlap[2])) * dNotOverlap[2] + dOverlap[2]])) idxPatch = 0 for iZ in range(0, int(size_zero_pad[2] - dOverlap[2]), int(dNotOverlap[2])): for iY in range(0, int(size_zero_pad[0] - dOverlap[0]), int(dNotOverlap[0])): for iX in range(0, int(size_zero_pad[1] - dOverlap[1]), int(dNotOverlap[1])): idxPatch += 1 return idxPatch
61.199313
236
0.556264
import numpy as np import tensorflow as tf import math from utils.Label import Label
true
true
f73c14d25861ed70f0c0e1a0b50b9d6c72f556b2
24,829
py
Python
cirq-core/cirq/protocols/apply_unitary_protocol.py
Zshan0/Cirq
93bbaa853305faa65117bcbdc2063f741cb2977c
[ "Apache-2.0" ]
1
2022-02-02T07:13:54.000Z
2022-02-02T07:13:54.000Z
cirq-core/cirq/protocols/apply_unitary_protocol.py
bradyb/Cirq
610b0d4ea3a7862169610797266734c844ddcc1f
[ "Apache-2.0" ]
null
null
null
cirq-core/cirq/protocols/apply_unitary_protocol.py
bradyb/Cirq
610b0d4ea3a7862169610797266734c844ddcc1f
[ "Apache-2.0" ]
null
null
null
# Copyright 2018 The Cirq Developers # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """A protocol for implementing high performance unitary left-multiplies.""" from typing import ( Any, cast, Iterable, Optional, Sequence, Tuple, TYPE_CHECKING, TypeVar, Union, ) import numpy as np from typing_extensions import Protocol from cirq import linalg, qis from cirq._doc import doc_private from cirq.protocols import qid_shape_protocol from cirq.protocols.decompose_protocol import ( _try_decompose_into_operations_and_qubits, ) from cirq.type_workarounds import NotImplementedType if TYPE_CHECKING: import cirq # This is a special indicator value used by the apply_unitary method # to determine whether or not the caller provided a 'default' argument. It must # be of type np.ndarray to ensure the method has the correct type signature in # that case. It is checked for using `is`, so it won't have a false positive if # the user provides a different np.array([]) value. RaiseTypeErrorIfNotProvided: np.ndarray = np.array([]) TDefault = TypeVar('TDefault') class ApplyUnitaryArgs: """Arguments for performing an efficient left-multiplication by a unitary. The receiving object is expected to mutate `target_tensor` so that it contains the state after multiplication, and then return `target_tensor`. Alternatively, if workspace is required, the receiving object can overwrite `available_buffer` with the results and return `available_buffer`. Or, if the receiving object is attempting to be simple instead of fast, it can create an entirely new array and return that. Attributes: target_tensor: The input tensor that needs to be left-multiplied by the unitary effect of the receiving object. The tensor will have the shape (2, 2, 2, ..., 2). It usually corresponds to a multi-qubit superposition, but it could also be a multi-qubit unitary transformation or some other concept. available_buffer: Pre-allocated workspace with the same shape and dtype as the target tensor. axes: Which axes the unitary effect is being applied to (e.g. the qubits that the gate is operating on). """ def __init__( self, target_tensor: np.ndarray, available_buffer: np.ndarray, axes: Iterable[int] ): """Inits ApplyUnitaryArgs. Args: target_tensor: The input tensor that needs to be left-multiplied by the unitary effect of the receiving object. The tensor will have the shape (2, 2, 2, ..., 2). It usually corresponds to a multi-qubit superposition, but it could also be a multi-qubit unitary transformation or some other concept. available_buffer: Pre-allocated workspace with the same shape and dtype as the target tensor. axes: Which axes the unitary effect is being applied to (e.g. the qubits that the gate is operating on). """ self.target_tensor = target_tensor self.available_buffer = available_buffer self.axes = tuple(axes) @staticmethod def default( num_qubits: Optional[int] = None, *, qid_shape: Optional[Tuple[int, ...]] = None ) -> 'ApplyUnitaryArgs': """A default instance starting in state |0⟩. Specify exactly one argument. Args: num_qubits: The number of qubits to make space for in the state. qid_shape: The shape of the state, specifying the dimension of each qid. Raises: TypeError: If exactly neither `num_qubits` or `qid_shape` is provided or both are provided. """ if (num_qubits is None) == (qid_shape is None): raise TypeError('Specify exactly one of num_qubits or qid_shape.') if num_qubits is not None: qid_shape = (2,) * num_qubits qid_shape = cast(Tuple[int, ...], qid_shape) # Satisfy mypy num_qubits = len(qid_shape) state = qis.one_hot(index=(0,) * num_qubits, shape=qid_shape, dtype=np.complex128) return ApplyUnitaryArgs(state, np.empty_like(state), range(num_qubits)) def with_axes_transposed_to_start(self) -> 'ApplyUnitaryArgs': """Returns a transposed view of the same arguments. Returns: A view over the same target tensor and available workspace, but with the numpy arrays transposed such that the axes field is guaranteed to equal `range(len(result.axes))`. This allows one to say e.g. `result.target_tensor[0, 1, 0, ...]` instead of `result.target_tensor[result.subspace_index(0b010)]`. """ axis_set = set(self.axes) other_axes = [axis for axis in range(len(self.target_tensor.shape)) if axis not in axis_set] perm = (*self.axes, *other_axes) target_tensor = self.target_tensor.transpose(*perm) available_buffer = self.available_buffer.transpose(*perm) return ApplyUnitaryArgs(target_tensor, available_buffer, range(len(self.axes))) def _for_operation_with_qid_shape( self, indices: Iterable[int], qid_shape: Tuple[int, ...] ) -> 'ApplyUnitaryArgs': """Creates a sliced and transposed view of `self` appropriate for an operation with shape `qid_shape` on qubits with the given indices. Example: sub_args = args._for_operation_with_qid_shape(indices, (2, 2, 2)) # Slice where the first qubit is |1>. sub_args.target_tensor[..., 1, :, :] Args: indices: Integer indices into `self.axes` specifying which qubits the operation applies to. qid_shape: The qid shape of the operation, the expected number of quantum levels in each qubit the operation applies to. Returns: A new `ApplyUnitaryArgs` where `sub_args.target_tensor` and `sub_args.available_buffer` are sliced and transposed views of `self.target_tensor` and `self.available_buffer` respectively. """ slices = [slice(0, size) for size in qid_shape] sub_axes = [self.axes[i] for i in indices] axis_set = set(sub_axes) other_axes = [axis for axis in range(len(self.target_tensor.shape)) if axis not in axis_set] ordered_axes = (*other_axes, *sub_axes) # Transpose sub_axes to the end of the shape and slice them target_tensor = self.target_tensor.transpose(*ordered_axes)[(..., *slices)] available_buffer = self.available_buffer.transpose(*ordered_axes)[(..., *slices)] new_axes = range(len(other_axes), len(ordered_axes)) return ApplyUnitaryArgs(target_tensor, available_buffer, new_axes) def subspace_index( self, little_endian_bits_int: int = 0, *, big_endian_bits_int: int = 0 ) -> Tuple[Union[slice, int, 'ellipsis'], ...]: """An index for the subspace where the target axes equal a value. Args: little_endian_bits_int: The desired value of the qubits at the targeted `axes`, packed into an integer. The least significant bit of the integer is the desired bit for the first axis, and so forth in increasing order. Can't be specified at the same time as `big_endian_bits_int`. big_endian_bits_int: The desired value of the qubits at the targeted `axes`, packed into an integer. The most significant bit of the integer is the desired bit for the first axis, and so forth in decreasing order. Can't be specified at the same time as `little_endian_bits_int`. Returns: A value that can be used to index into `target_tensor` and `available_buffer`, and manipulate only the part of Hilbert space corresponding to a given bit assignment. Example: If `target_tensor` is a 4 qubit tensor and `axes` is `[1, 3]` and then this method will return the following when given `little_endian_bits=0b01`: `(slice(None), 0, slice(None), 1, Ellipsis)` Therefore the following two lines would be equivalent: args.target_tensor[args.subspace_index(0b01)] += 1 args.target_tensor[:, 0, :, 1] += 1 """ return linalg.slice_for_qubits_equal_to( self.axes, little_endian_qureg_value=little_endian_bits_int, big_endian_qureg_value=big_endian_bits_int, qid_shape=self.target_tensor.shape, ) class SupportsConsistentApplyUnitary(Protocol): """An object that can be efficiently left-multiplied into tensors.""" @doc_private def _apply_unitary_( self, args: ApplyUnitaryArgs ) -> Union[np.ndarray, None, NotImplementedType]: """Left-multiplies a unitary effect onto a tensor with good performance. This method is given both the target tensor and workspace of the same shape and dtype. The method then either performs inline modifications of the target tensor and returns it, or writes its output into the workspace tensor and returns that. This signature makes it possible to write specialized simulation methods that run without performing large allocations, significantly increasing simulation performance. The target may represent a wave function, a unitary matrix, or some other tensor. Implementations will work in all of these cases as long as they correctly focus on only operating on the given axes. Args: args: A `cirq.ApplyUnitaryArgs` object with the `args.target_tensor` to operate on, an `args.available_workspace` buffer to use as temporary workspace, and the `args.axes` of the tensor to target with the unitary operation. Note that this method is permitted (and in fact expected) to mutate `args.target_tensor` and `args.available_workspace`. Returns: If the receiving object is not able to apply its unitary effect, None or NotImplemented should be returned. If the receiving object is able to work inline, it should directly mutate `args.target_tensor` and then return `args.target_tensor`. The caller will understand this to mean that the result is in `args.target_tensor`. If the receiving object is unable to work inline, it can write its output over `args.available_buffer` and then return `args.available_buffer`. The caller will understand this to mean that the result is in `args.available_buffer` (and so what was `args.available_buffer` will become `args.target_tensor` in the next call, and vice versa). The receiving object is also permitted to allocate a new numpy.ndarray and return that as its result. """ def apply_unitary( unitary_value: Any, args: ApplyUnitaryArgs, default: TDefault = RaiseTypeErrorIfNotProvided, *, allow_decompose: bool = True, ) -> Union[np.ndarray, TDefault]: """High performance left-multiplication of a unitary effect onto a tensor. Applies the unitary effect of `unitary_value` to the tensor specified in `args` by using the following strategies: A. Try to use `unitary_value._apply_unitary_(args)`. Case a) Method not present or returns `NotImplemented`. Continue to next strategy. Case b) Method returns `None`. Conclude `unitary_value` has no unitary effect. Case c) Method returns a numpy array. Forward the successful result to the caller. B. Try to use `unitary_value._unitary_()`. Case a) Method not present or returns `NotImplemented`. Continue to next strategy. Case b) Method returns `None`. Conclude `unitary_value` has no unitary effect. Case c) Method returns a numpy array. Multiply the matrix onto the target tensor and return to the caller. C. Try to use `unitary_value._decompose_()` (if `allow_decompose`). Case a) Method not present or returns `NotImplemented` or `None`. Continue to next strategy. Case b) Method returns an OP_TREE. Delegate to `cirq.apply_unitaries`. D. Conclude that `unitary_value` has no unitary effect. The order that the strategies are tried depends on the number of qubits being operated on. For small numbers of qubits (4 or less) the order is ABCD. For larger numbers of qubits the order is ACBD (because it is expected that decomposing will outperform generating the raw matrix). Args: unitary_value: The value with a unitary effect to apply to the target. args: A mutable `cirq.ApplyUnitaryArgs` object describing the target tensor, available workspace, and axes to operate on. The attributes of this object will be mutated as part of computing the result. default: What should be returned if `unitary_value` doesn't have a unitary effect. If not specified, a TypeError is raised instead of returning a default value. allow_decompose: Defaults to True. If set to False, and applying the unitary effect requires decomposing the object, the method will pretend the object has no unitary effect. Returns: If the receiving object does not have a unitary effect, then the specified default value is returned (or a TypeError is raised). If this occurs, then `target_tensor` should not have been mutated. Otherwise the result is the `np.ndarray` instance storing the result. This may be `args.target_tensor`, `args.available_workspace`, or some other numpy array. It is the caller's responsibility to correctly handle all three of these cases. In all cases `args.target_tensor` and `args.available_buffer` may have been mutated. Raises: TypeError: `unitary_value` doesn't have a unitary effect and `default` wasn't specified. """ # Decide on order to attempt application strategies. if len(args.axes) <= 4: strats = [ _strat_apply_unitary_from_apply_unitary, _strat_apply_unitary_from_unitary, _strat_apply_unitary_from_decompose, ] else: strats = [ _strat_apply_unitary_from_apply_unitary, _strat_apply_unitary_from_decompose, _strat_apply_unitary_from_unitary, ] if not allow_decompose: strats.remove(_strat_apply_unitary_from_decompose) # Try each strategy, stopping if one works. for strat in strats: result = strat(unitary_value, args) if result is None: break if result is not NotImplemented: return result # Don't know how to apply. Fallback to specified default behavior. if default is not RaiseTypeErrorIfNotProvided: return default raise TypeError( "cirq.apply_unitary failed. " "Value doesn't have a (non-parameterized) unitary effect.\n" "\n" "type: {}\n" "value: {!r}\n" "\n" "The value failed to satisfy any of the following criteria:\n" "- An `_apply_unitary_(self, args) method that returned a value " "besides None or NotImplemented.\n" "- A `_unitary_(self)` method that returned a value " "besides None or NotImplemented.\n" "- A `_decompose_(self)` method that returned a " "list of unitary operations.\n" "".format(type(unitary_value), unitary_value) ) def _strat_apply_unitary_from_apply_unitary( unitary_value: Any, args: ApplyUnitaryArgs ) -> Optional[np.ndarray]: # Check for magic method. func = getattr(unitary_value, '_apply_unitary_', None) if func is None: return NotImplemented op_qid_shape = qid_shape_protocol.qid_shape(unitary_value, (2,) * len(args.axes)) sub_args = args._for_operation_with_qid_shape(range(len(op_qid_shape)), op_qid_shape) sub_result = func(sub_args) if sub_result is NotImplemented or sub_result is None: return sub_result return _incorporate_result_into_target(args, sub_args, sub_result) def _strat_apply_unitary_from_unitary( unitary_value: Any, args: ApplyUnitaryArgs ) -> Optional[np.ndarray]: # Check for magic method. method = getattr(unitary_value, '_unitary_', None) if method is None: return NotImplemented # Attempt to get the unitary matrix. matrix = method() if matrix is NotImplemented or matrix is None: return matrix val_qid_shape = qid_shape_protocol.qid_shape(unitary_value, default=(2,) * len(args.axes)) sub_args = args._for_operation_with_qid_shape(range(len(val_qid_shape)), val_qid_shape) matrix = matrix.astype(sub_args.target_tensor.dtype) if len(val_qid_shape) == 1 and val_qid_shape[0] <= 2: # Special case for single-qubit, 2x2 or 1x1 operations. # np.einsum is faster for larger cases. subspaces = [(..., level) for level in range(val_qid_shape[0])] sub_result = linalg.apply_matrix_to_slices( sub_args.target_tensor, matrix, subspaces, out=sub_args.available_buffer ) else: # General case via np.einsum. sub_result = linalg.targeted_left_multiply( matrix.reshape(val_qid_shape * 2), sub_args.target_tensor, sub_args.axes, out=sub_args.available_buffer, ) return _incorporate_result_into_target(args, sub_args, sub_result) def _strat_apply_unitary_from_decompose(val: Any, args: ApplyUnitaryArgs) -> Optional[np.ndarray]: operations, qubits, _ = _try_decompose_into_operations_and_qubits(val) if operations is None: return NotImplemented return apply_unitaries(operations, qubits, args, None) def apply_unitaries( unitary_values: Iterable[Any], qubits: Sequence['cirq.Qid'], args: Optional[ApplyUnitaryArgs] = None, default: Any = RaiseTypeErrorIfNotProvided, ) -> Optional[np.ndarray]: """Apply a series of unitaries onto a state tensor. Uses `cirq.apply_unitary` on each of the unitary values, to apply them to the state tensor from the `args` argument. CAUTION: if one of the given unitary values does not have a unitary effect, forcing the method to terminate, the method will not rollback changes from previous unitary values. Args: unitary_values: The values with unitary effects to apply to the target. qubits: The qubits that will be targeted by the unitary values. These qubits match up, index by index, with the `indices` property of the `args` argument. args: A mutable `cirq.ApplyUnitaryArgs` object describing the target tensor, available workspace, and axes to operate on. The attributes of this object will be mutated as part of computing the result. If not specified, this defaults to the zero state of the given qubits with an axis ordering matching the given qubit ordering. default: What should be returned if any of the unitary values actually don't have a unitary effect. If not specified, a TypeError is raised instead of returning a default value. Returns: If any of the unitary values do not have a unitary effect, the specified default value is returned (or a TypeError is raised). CAUTION: If this occurs, the contents of `args.target_tensor` and `args.available_buffer` may have been mutated. If all of the unitary values had a unitary effect that was successfully applied, this method returns the `np.ndarray` storing the final result. This `np.ndarray` may be `args.target_tensor`, `args.available_buffer`, or some other instance. The caller is responsible for dealing with this potential aliasing of the inputs and the result. Raises: TypeError: An item from `unitary_values` doesn't have a unitary effect and `default` wasn't specified. ValueError: If the number of qubits does not match the number of axes provided in the `args`. """ if args is None: qid_shape = qid_shape_protocol.qid_shape(qubits) args = ApplyUnitaryArgs.default(qid_shape=qid_shape) if len(qubits) != len(args.axes): raise ValueError('len(qubits) != len(args.axes)') qubit_map = {q.with_dimension(1): args.axes[i] for i, q in enumerate(qubits)} state = args.target_tensor buffer = args.available_buffer for op in unitary_values: indices = [qubit_map[q.with_dimension(1)] for q in op.qubits] result = apply_unitary( unitary_value=op, args=ApplyUnitaryArgs(state, buffer, indices), default=None ) # Handle failure. if result is None: if default is RaiseTypeErrorIfNotProvided: raise TypeError( "cirq.apply_unitaries failed. " "There was a non-unitary value in the `unitary_values` " "list.\n" "\n" "non-unitary value type: {}\n" "non-unitary value: {!r}".format(type(op), op) ) return default # Handle aliasing of results. if result is buffer: buffer = state state = result return state def _incorporate_result_into_target( args: 'ApplyUnitaryArgs', sub_args: 'ApplyUnitaryArgs', sub_result: np.ndarray ): """Takes the result of calling `_apply_unitary_` on `sub_args` and copies it back into `args.target_tensor` or `args.available_buffer` as necessary to return the result of applying the unitary to the full args. Also swaps the buffers so the result is always in `args.target_tensor`. Args: args: The original args. sub_args: A version of `args` with transposed and sliced views of it's tensors. sub_result: The result of calling an object's `_apply_unitary_` method on `sub_args`. A transposed subspace of the desired result. Returns: The full result tensor after applying the unitary. Always `args.target_tensor`. Raises: ValueError: If `sub_args` tensors are not views of `args` tensors. """ if not ( np.may_share_memory(args.target_tensor, sub_args.target_tensor) and np.may_share_memory(args.available_buffer, sub_args.available_buffer) ): raise ValueError( 'sub_args.target_tensor and subargs.available_buffer must be views of ' 'args.target_tensor and args.available_buffer respectively.' ) is_subspace = sub_args.target_tensor.size < args.target_tensor.size if sub_result is sub_args.target_tensor: return args.target_tensor if sub_result is sub_args.available_buffer: if is_subspace: # The subspace that was modified is likely much smaller than # the whole tensor so copy sub_result back into target_tensor. sub_args.target_tensor[...] = sub_result return args.target_tensor return args.available_buffer # The subspace that was modified is likely much smaller than # the whole tensor so copy sub_result back into target_tensor. # It's an uncommon case where sub_result is a new array. if np.may_share_memory(sub_args.target_tensor, sub_result): # Someone did something clever. E.g. implementing SWAP with a # reshape. # Copy to available_buffer instead. if is_subspace: args.available_buffer[...] = args.target_tensor sub_args.available_buffer[...] = sub_result return args.available_buffer sub_args.target_tensor[...] = sub_result return args.target_tensor
43.407343
100
0.668452
from typing import ( Any, cast, Iterable, Optional, Sequence, Tuple, TYPE_CHECKING, TypeVar, Union, ) import numpy as np from typing_extensions import Protocol from cirq import linalg, qis from cirq._doc import doc_private from cirq.protocols import qid_shape_protocol from cirq.protocols.decompose_protocol import ( _try_decompose_into_operations_and_qubits, ) from cirq.type_workarounds import NotImplementedType if TYPE_CHECKING: import cirq # the user provides a different np.array([]) value. RaiseTypeErrorIfNotProvided: np.ndarray = np.array([]) TDefault = TypeVar('TDefault') class ApplyUnitaryArgs: def __init__( self, target_tensor: np.ndarray, available_buffer: np.ndarray, axes: Iterable[int] ): self.target_tensor = target_tensor self.available_buffer = available_buffer self.axes = tuple(axes) @staticmethod def default( num_qubits: Optional[int] = None, *, qid_shape: Optional[Tuple[int, ...]] = None ) -> 'ApplyUnitaryArgs': if (num_qubits is None) == (qid_shape is None): raise TypeError('Specify exactly one of num_qubits or qid_shape.') if num_qubits is not None: qid_shape = (2,) * num_qubits qid_shape = cast(Tuple[int, ...], qid_shape) # Satisfy mypy num_qubits = len(qid_shape) state = qis.one_hot(index=(0,) * num_qubits, shape=qid_shape, dtype=np.complex128) return ApplyUnitaryArgs(state, np.empty_like(state), range(num_qubits)) def with_axes_transposed_to_start(self) -> 'ApplyUnitaryArgs': axis_set = set(self.axes) other_axes = [axis for axis in range(len(self.target_tensor.shape)) if axis not in axis_set] perm = (*self.axes, *other_axes) target_tensor = self.target_tensor.transpose(*perm) available_buffer = self.available_buffer.transpose(*perm) return ApplyUnitaryArgs(target_tensor, available_buffer, range(len(self.axes))) def _for_operation_with_qid_shape( self, indices: Iterable[int], qid_shape: Tuple[int, ...] ) -> 'ApplyUnitaryArgs': slices = [slice(0, size) for size in qid_shape] sub_axes = [self.axes[i] for i in indices] axis_set = set(sub_axes) other_axes = [axis for axis in range(len(self.target_tensor.shape)) if axis not in axis_set] ordered_axes = (*other_axes, *sub_axes) # Transpose sub_axes to the end of the shape and slice them target_tensor = self.target_tensor.transpose(*ordered_axes)[(..., *slices)] available_buffer = self.available_buffer.transpose(*ordered_axes)[(..., *slices)] new_axes = range(len(other_axes), len(ordered_axes)) return ApplyUnitaryArgs(target_tensor, available_buffer, new_axes) def subspace_index( self, little_endian_bits_int: int = 0, *, big_endian_bits_int: int = 0 ) -> Tuple[Union[slice, int, 'ellipsis'], ...]: return linalg.slice_for_qubits_equal_to( self.axes, little_endian_qureg_value=little_endian_bits_int, big_endian_qureg_value=big_endian_bits_int, qid_shape=self.target_tensor.shape, ) class SupportsConsistentApplyUnitary(Protocol): @doc_private def _apply_unitary_( self, args: ApplyUnitaryArgs ) -> Union[np.ndarray, None, NotImplementedType]: def apply_unitary( unitary_value: Any, args: ApplyUnitaryArgs, default: TDefault = RaiseTypeErrorIfNotProvided, *, allow_decompose: bool = True, ) -> Union[np.ndarray, TDefault]: # Decide on order to attempt application strategies. if len(args.axes) <= 4: strats = [ _strat_apply_unitary_from_apply_unitary, _strat_apply_unitary_from_unitary, _strat_apply_unitary_from_decompose, ] else: strats = [ _strat_apply_unitary_from_apply_unitary, _strat_apply_unitary_from_decompose, _strat_apply_unitary_from_unitary, ] if not allow_decompose: strats.remove(_strat_apply_unitary_from_decompose) # Try each strategy, stopping if one works. for strat in strats: result = strat(unitary_value, args) if result is None: break if result is not NotImplemented: return result # Don't know how to apply. Fallback to specified default behavior. if default is not RaiseTypeErrorIfNotProvided: return default raise TypeError( "cirq.apply_unitary failed. " "Value doesn't have a (non-parameterized) unitary effect.\n" "\n" "type: {}\n" "value: {!r}\n" "\n" "The value failed to satisfy any of the following criteria:\n" "- An `_apply_unitary_(self, args) method that returned a value " "besides None or NotImplemented.\n" "- A `_unitary_(self)` method that returned a value " "besides None or NotImplemented.\n" "- A `_decompose_(self)` method that returned a " "list of unitary operations.\n" "".format(type(unitary_value), unitary_value) ) def _strat_apply_unitary_from_apply_unitary( unitary_value: Any, args: ApplyUnitaryArgs ) -> Optional[np.ndarray]: # Check for magic method. func = getattr(unitary_value, '_apply_unitary_', None) if func is None: return NotImplemented op_qid_shape = qid_shape_protocol.qid_shape(unitary_value, (2,) * len(args.axes)) sub_args = args._for_operation_with_qid_shape(range(len(op_qid_shape)), op_qid_shape) sub_result = func(sub_args) if sub_result is NotImplemented or sub_result is None: return sub_result return _incorporate_result_into_target(args, sub_args, sub_result) def _strat_apply_unitary_from_unitary( unitary_value: Any, args: ApplyUnitaryArgs ) -> Optional[np.ndarray]: # Check for magic method. method = getattr(unitary_value, '_unitary_', None) if method is None: return NotImplemented # Attempt to get the unitary matrix. matrix = method() if matrix is NotImplemented or matrix is None: return matrix val_qid_shape = qid_shape_protocol.qid_shape(unitary_value, default=(2,) * len(args.axes)) sub_args = args._for_operation_with_qid_shape(range(len(val_qid_shape)), val_qid_shape) matrix = matrix.astype(sub_args.target_tensor.dtype) if len(val_qid_shape) == 1 and val_qid_shape[0] <= 2: # Special case for single-qubit, 2x2 or 1x1 operations. # np.einsum is faster for larger cases. subspaces = [(..., level) for level in range(val_qid_shape[0])] sub_result = linalg.apply_matrix_to_slices( sub_args.target_tensor, matrix, subspaces, out=sub_args.available_buffer ) else: # General case via np.einsum. sub_result = linalg.targeted_left_multiply( matrix.reshape(val_qid_shape * 2), sub_args.target_tensor, sub_args.axes, out=sub_args.available_buffer, ) return _incorporate_result_into_target(args, sub_args, sub_result) def _strat_apply_unitary_from_decompose(val: Any, args: ApplyUnitaryArgs) -> Optional[np.ndarray]: operations, qubits, _ = _try_decompose_into_operations_and_qubits(val) if operations is None: return NotImplemented return apply_unitaries(operations, qubits, args, None) def apply_unitaries( unitary_values: Iterable[Any], qubits: Sequence['cirq.Qid'], args: Optional[ApplyUnitaryArgs] = None, default: Any = RaiseTypeErrorIfNotProvided, ) -> Optional[np.ndarray]: if args is None: qid_shape = qid_shape_protocol.qid_shape(qubits) args = ApplyUnitaryArgs.default(qid_shape=qid_shape) if len(qubits) != len(args.axes): raise ValueError('len(qubits) != len(args.axes)') qubit_map = {q.with_dimension(1): args.axes[i] for i, q in enumerate(qubits)} state = args.target_tensor buffer = args.available_buffer for op in unitary_values: indices = [qubit_map[q.with_dimension(1)] for q in op.qubits] result = apply_unitary( unitary_value=op, args=ApplyUnitaryArgs(state, buffer, indices), default=None ) # Handle failure. if result is None: if default is RaiseTypeErrorIfNotProvided: raise TypeError( "cirq.apply_unitaries failed. " "There was a non-unitary value in the `unitary_values` " "list.\n" "\n" "non-unitary value type: {}\n" "non-unitary value: {!r}".format(type(op), op) ) return default # Handle aliasing of results. if result is buffer: buffer = state state = result return state def _incorporate_result_into_target( args: 'ApplyUnitaryArgs', sub_args: 'ApplyUnitaryArgs', sub_result: np.ndarray ): if not ( np.may_share_memory(args.target_tensor, sub_args.target_tensor) and np.may_share_memory(args.available_buffer, sub_args.available_buffer) ): raise ValueError( 'sub_args.target_tensor and subargs.available_buffer must be views of ' 'args.target_tensor and args.available_buffer respectively.' ) is_subspace = sub_args.target_tensor.size < args.target_tensor.size if sub_result is sub_args.target_tensor: return args.target_tensor if sub_result is sub_args.available_buffer: if is_subspace: # The subspace that was modified is likely much smaller than # the whole tensor so copy sub_result back into target_tensor. sub_args.target_tensor[...] = sub_result return args.target_tensor return args.available_buffer # The subspace that was modified is likely much smaller than # the whole tensor so copy sub_result back into target_tensor. # It's an uncommon case where sub_result is a new array. if np.may_share_memory(sub_args.target_tensor, sub_result): if is_subspace: args.available_buffer[...] = args.target_tensor sub_args.available_buffer[...] = sub_result return args.available_buffer sub_args.target_tensor[...] = sub_result return args.target_tensor
true
true
f73c15662ec170505e5b105b68fa5669388e46eb
2,384
py
Python
test/unit/utils/test_expiring_dict.py
dolphinridercrypto/bxcommon
8f70557c1dbff785a5dd3fcdf91176066e085c3a
[ "MIT" ]
12
2019-11-06T17:39:10.000Z
2022-03-01T11:26:19.000Z
test/unit/utils/test_expiring_dict.py
dolphinridercrypto/bxcommon
8f70557c1dbff785a5dd3fcdf91176066e085c3a
[ "MIT" ]
8
2019-11-06T21:31:11.000Z
2021-06-02T00:46:50.000Z
test/unit/utils/test_expiring_dict.py
dolphinridercrypto/bxcommon
8f70557c1dbff785a5dd3fcdf91176066e085c3a
[ "MIT" ]
5
2019-11-14T18:08:11.000Z
2022-02-08T09:36:22.000Z
import time import unittest from mock import MagicMock from bxcommon.utils.expiring_dict import ExpiringDict from bxcommon.utils.alarm_queue import AlarmQueue class ExpiringDictTests(unittest.TestCase): EXPIRATION_TIME_S = 1 def setUp(self): self.ALARM_QUEUE = AlarmQueue() self.e_dict = ExpiringDict(self.ALARM_QUEUE, self.EXPIRATION_TIME_S, "testdict") def test_cleanup(self): kv1 = (1, 2) kv2 = (3, 4) kv3 = (5, 6) kv4 = (7, 8) kv5 = ("str1", 1) kv6 = ("str2", 2) # adding first 2 items to the dict self.e_dict.add(kv1[0], kv1[1]) self.e_dict.add(kv2[0], kv2[1]) time.time = MagicMock(return_value=time.time() + self.EXPIRATION_TIME_S+1) self.assertEqual(len(self.e_dict.contents), 2) self.assertTrue(kv1[0] in self.e_dict.contents) self.assertTrue(kv2[0] in self.e_dict.contents) self.assertEqual(self.e_dict.contents[kv1[0]], kv1[1]) self.assertEqual(self.e_dict.contents[kv2[0]], kv2[1]) # adding last 2 items to the dict self.e_dict.add(kv3[0], kv3[1]) self.e_dict.add(kv4[0], kv4[1]) self.e_dict.add(kv5[0], kv5[1]) self.e_dict.add(kv6[0], kv6[1]) self.ALARM_QUEUE.fire_alarms() # first 2 items are expired, last two have not self.assertFalse(kv1[0] in self.e_dict.contents) self.assertFalse(kv2[0] in self.e_dict.contents) self.assertTrue(kv3[0] in self.e_dict.contents) self.assertTrue(kv4[0] in self.e_dict.contents) self.assertTrue(kv5[0] in self.e_dict.contents) self.assertTrue(kv6[0] in self.e_dict.contents) def test_remove_item(self): kv1 = (1, 2) self.e_dict.add(kv1[0], kv1[1]) self.assertTrue(kv1[0] in self.e_dict.contents) self.e_dict.remove_item(kv1[0]) self.assertFalse(kv1[0] in self.e_dict.contents) def test_cleanup__not_existing_item(self): kv1 = (1, 2) self.e_dict.add(kv1[0], kv1[1]) self.assertTrue(kv1[0] in self.e_dict.contents) self.e_dict.remove_item(kv1[0]) self.assertFalse(kv1[0] in self.e_dict.contents) time.time = MagicMock(return_value=time.time() + self.EXPIRATION_TIME_S + 1) self.ALARM_QUEUE.fire_alarms() self.assertFalse(kv1[0] in self.e_dict.contents)
31.786667
88
0.635906
import time import unittest from mock import MagicMock from bxcommon.utils.expiring_dict import ExpiringDict from bxcommon.utils.alarm_queue import AlarmQueue class ExpiringDictTests(unittest.TestCase): EXPIRATION_TIME_S = 1 def setUp(self): self.ALARM_QUEUE = AlarmQueue() self.e_dict = ExpiringDict(self.ALARM_QUEUE, self.EXPIRATION_TIME_S, "testdict") def test_cleanup(self): kv1 = (1, 2) kv2 = (3, 4) kv3 = (5, 6) kv4 = (7, 8) kv5 = ("str1", 1) kv6 = ("str2", 2) self.e_dict.add(kv1[0], kv1[1]) self.e_dict.add(kv2[0], kv2[1]) time.time = MagicMock(return_value=time.time() + self.EXPIRATION_TIME_S+1) self.assertEqual(len(self.e_dict.contents), 2) self.assertTrue(kv1[0] in self.e_dict.contents) self.assertTrue(kv2[0] in self.e_dict.contents) self.assertEqual(self.e_dict.contents[kv1[0]], kv1[1]) self.assertEqual(self.e_dict.contents[kv2[0]], kv2[1]) self.e_dict.add(kv3[0], kv3[1]) self.e_dict.add(kv4[0], kv4[1]) self.e_dict.add(kv5[0], kv5[1]) self.e_dict.add(kv6[0], kv6[1]) self.ALARM_QUEUE.fire_alarms() self.assertFalse(kv1[0] in self.e_dict.contents) self.assertFalse(kv2[0] in self.e_dict.contents) self.assertTrue(kv3[0] in self.e_dict.contents) self.assertTrue(kv4[0] in self.e_dict.contents) self.assertTrue(kv5[0] in self.e_dict.contents) self.assertTrue(kv6[0] in self.e_dict.contents) def test_remove_item(self): kv1 = (1, 2) self.e_dict.add(kv1[0], kv1[1]) self.assertTrue(kv1[0] in self.e_dict.contents) self.e_dict.remove_item(kv1[0]) self.assertFalse(kv1[0] in self.e_dict.contents) def test_cleanup__not_existing_item(self): kv1 = (1, 2) self.e_dict.add(kv1[0], kv1[1]) self.assertTrue(kv1[0] in self.e_dict.contents) self.e_dict.remove_item(kv1[0]) self.assertFalse(kv1[0] in self.e_dict.contents) time.time = MagicMock(return_value=time.time() + self.EXPIRATION_TIME_S + 1) self.ALARM_QUEUE.fire_alarms() self.assertFalse(kv1[0] in self.e_dict.contents)
true
true