hexsha
stringlengths 40
40
| size
int64 1
1.03M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
239
| max_stars_repo_name
stringlengths 5
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
239
| max_issues_repo_name
stringlengths 5
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
239
| max_forks_repo_name
stringlengths 5
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 1
1.03M
| avg_line_length
float64 1
958k
| max_line_length
int64 1
1.03M
| alphanum_fraction
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
4a17224f6572387f2bf5e2725f83d913460c2b76
| 14,313
|
py
|
Python
|
BagOfWordsClassification.py
|
JustusSchwan/MasterThesis
|
a9b928ed1c121a72ad1bfec28d941d31e4b232e8
|
[
"MIT"
] | null | null | null |
BagOfWordsClassification.py
|
JustusSchwan/MasterThesis
|
a9b928ed1c121a72ad1bfec28d941d31e4b232e8
|
[
"MIT"
] | null | null | null |
BagOfWordsClassification.py
|
JustusSchwan/MasterThesis
|
a9b928ed1c121a72ad1bfec28d941d31e4b232e8
|
[
"MIT"
] | null | null | null |
from __future__ import print_function
import cPickle
import itertools
import sqlite3
from os import path
import matplotlib.pyplot as plt
import numpy as np
import pylab
from scipy import stats
from sklearn.decomposition import PCA
from sklearn.metrics import confusion_matrix
from sklearn.model_selection import LeaveOneOut
from sklearn.model_selection import cross_val_predict
from sklearn.model_selection import cross_val_score
from sklearn.preprocessing import MinMaxScaler
from sklearn.svm import SVC
from sklearn.svm import SVR
import BagOfWordsModel
from dataflow import connectables
from dataflow import logging
from util.np_buffers import GrowingNumpyArray
project_folder = 'D:/Master Thesis/'
logfiles_folder = 'D:/Master Thesis/logfiles/'
source_folder = 'C:/Master Thesis/Dataset/'
cache_folder = 'D:/Master Thesis/BOWTransformed'
# Gets features (expreeivity, geometric) and boredom, engagement, frustration annotations for the given user
def getFeaturesAndAnnotations(user):
db = sqlite3.connect(path.join(source_folder, 'Users.sqlite'))
c = db.cursor()
c.execute(
'SELECT sessionId, Boredom_Annotation, Engagement_Annotation, Frustration_Annotation FROM StudentsSession'
' WHERE userId=?', (user,))
annotations = np.array(list(session for session in c), dtype=np.int32)
sessions = annotations[:, 0]
if len(sessions) == 0:
return None
paths = list(path.join(logfiles_folder, '{}_{}.features'.format(user, session)) for session in sessions)
for vid_path in paths:
assert path.exists(vid_path), vid_path + ' does not exist'
db.close()
def get_features(filename):
log_reader = logging.Logger(logging.LoggingMode.REPLAY)
store = GrowingNumpyArray()
source_features = connectables.StateToEvent(log_reader.source_state('features'))
source_features.source >> store.append
log_reader.open(filename)
while True:
if not log_reader.read():
break
source_features.get_and_fire()
log_reader.write()
return store()
session_features = list(get_features(vid_path) for vid_path in paths)
not_none_sessions = list(x is not None for x in session_features)
session_features = list(x for x, b in zip(session_features, not_none_sessions) if b)
labels = annotations[np.array(not_none_sessions), 1:]
return session_features, labels
# Returns features and continuous labels for all sessions of the given user
# session_features is a set of videos. There are several frames in each video, each of which has a feature vector
# Each session has one label for each of boredem, engagement and frustration
# A session is considered valid when features exist for at least 100 frames
def getValidSessions(user):
session_features, annotations = getFeaturesAndAnnotations(user)
valid_sessions = list(x.shape[0] > 100 for x in session_features)
session_features = list(x for x, b in zip(session_features, valid_sessions) if b)
labels = annotations[np.array(valid_sessions), :]
assert (len(session_features) == len(labels))
return session_features, labels
# Returns features and class labels for all sessions of the given user
# session_features is a set of videos. There are several frames in each video, each of which has a feature vector
# Each session is labeled with either 'bored', 'engaged' or 'frustrated'
# A session is considered valid when features exist for at least 100 frames and the max continuous label is unique
# e.g. a session with boredom 2, engagement 4 and frustration 4 is not valid and therefore excluded
def getValidDiscretizedSessions(user):
session_features, annotations = getFeaturesAndAnnotations(user)
valid_sessions = np.logical_and(
np.array(list(x is not None and x.shape[0] > 100 for x in session_features), dtype=np.bool_),
np.sum(annotations.T == annotations.max(axis=1), axis=0) == 1)
session_features = list(x for x, b in zip(session_features, valid_sessions) if b)
labels = np.argmax(annotations[valid_sessions], axis=1)
assert (len(session_features) == len(labels))
return session_features, labels
# Constructs a bag of words model which uses agglomerative clustering with pseudo cosine distance
# if pca_components is >0, a PCA transformer with the respective number of components will be included in the pipeline
# Data comes from all users
def constructBowModel(n_clusters, pca_components):
user_list = list(range(1, 34))
temp_file = path.join(cache_folder, '/bow_model_cl_{}_train_{}{}.pkl'.
format(n_clusters,
str(user_list).replace(',', '_')[1:-1],
'_with_pca_' + str(pca_components) if pca_components > 0 else ''))
if path.exists(temp_file):
with open(temp_file, 'rb') as f:
return cPickle.load(f)
else:
user_sessions = []
for u in user_list:
sessions = getFeaturesAndAnnotations(u)
if sessions is not None and len(sessions[0]) > 0:
user_sessions.append(np.vstack(sessions[0]))
normalizer = [('Scaler', MinMaxScaler())]
if pca_components > 0:
normalizer.append(('PCA', PCA(n_components=pca_components)))
model = BagOfWordsModel.BagOfWordsModel(n_clusters=n_clusters, k_neighbors=5, transforms=normalizer)
model.fit(np.vstack(user_sessions))
with open(temp_file, 'wb') as f:
cPickle.dump(model, file=f, protocol=-1)
return model
# Utility function, returns features for given user transformed with the BoW model as described in constructBowModel
# session_getter is a function pointer to either getValidSessions or getValidDiscretizedSessions
# model_cache is used to cache the BoW model to prevent frequent expensive fitting
def bowTransformedFeatures(user, n_clusters, pca_components, session_getter, model_cache):
temp_file = path.join(cache_folder, 'usr_{}_cl_{}_{}{}.pkl'.
format(user, n_clusters, session_getter.__name__,
'_with_pca_' + str(pca_components) if pca_components > 0 else ''))
if path.exists(temp_file):
with open(temp_file, 'rb') as f:
return cPickle.load(f)
else:
if model_cache is None:
model_cache = constructBowModel(n_clusters, pca_components)
sessions = session_getter(user)
if sessions is None or len(sessions[1]) == 0:
with open(temp_file, 'wb') as f:
cPickle.dump(None, file=f, protocol=-1)
return None
sessions_features, labels = sessions
transformed_features = []
for features in sessions_features:
prediction = model_cache.predict(np.vstack(features))
result = np.histogram(prediction, bins=np.arange(n_clusters + 1), normed=True)[0]
transformed_features.append(result)
assert (len(transformed_features) == len(labels))
with open(temp_file, 'wb') as f:
cPickle.dump([np.array(transformed_features), labels], file=f, protocol=-1)
return [np.array(transformed_features), labels]
# Computes cross validation for regressing the affect labels
def CrossValRegression(features, labels, **params):
loo = LeaveOneOut()
reg = SVR(epsilon=0.1, **params)
result = []
for i in range(3):
result.append(
-1 * cross_val_score(reg, X=features, y=labels[:, i], cv=loo, scoring='neg_mean_absolute_error'))
return np.array(result).T
# computes predictions of a weak regressor averaging the affect labels per candidate
def getMeanRegression(user):
temp_file = path.join(cache_folder, 'mean_regression_usr_{}.pkl'.format(user))
if path.exists(temp_file):
with open(temp_file, 'rb') as f:
return cPickle.load(f)
session_data = getValidSessions(user)
if session_data is None:
return None
session_features, labels = session_data
if len(labels) == 0:
return None
err = []
for i in range(labels.shape[0]):
others = np.delete(labels, [i], axis=0)
assert others.shape[1] == 3
err.append(np.abs(labels[i, :] - np.mean(others, axis=0)))
ret = np.array(err)
with open(temp_file, 'wb') as f:
cPickle.dump(ret, f, -1)
return ret
# Dumps a matrix to console in csv format
def print_csv(mat):
for l1 in mat:
for l2 in l1:
print(l2, ',', sep='', end='')
print()
# Predicts affect labels using a Support Vector Regressor and compares them to the weak predictor
def EvaluateRegression(users, n_clusters, pca_components):
comp_data = []
model_cache = None
features = []
labels = []
for user in users:
session_data = bowTransformedFeatures(user, n_clusters, pca_components, getValidSessions, model_cache)
if session_data is not None and len(session_data[0]) > 1:
features.append(session_data[0])
labels.append(session_data[1])
comp_data.append(getMeanRegression(user))
user_data = CrossValRegression(np.vstack(features), np.vstack(labels), C=1)
comp_data = np.vstack(comp_data)
csv_data = np.zeros((3, 3))
for i in range(3):
p = stats.ttest_rel(user_data[:, i], comp_data[:, i]).pvalue
csv_data[i, :] = [np.mean(user_data[:, i]), np.mean(comp_data[:, i] - user_data[:, i]), p]
print_csv(csv_data)
# Predicts affect labels per candidate using a Support Vector Regressor and compares them to the weak predictor
def EvaluateRegressionPerCandidate(users, n_clusters, pca_components):
comp_data_all = []
model_cache = None
features = []
labels = []
chosen_ones = []
for user in users:
session_data = bowTransformedFeatures(user, n_clusters, pca_components, getValidSessions, model_cache)
if session_data is not None and len(session_data[0]) > 1:
features.append(session_data[0])
labels.append(session_data[1])
comp_data_all.append(getMeanRegression(user))
chosen_ones.append(user)
csv_data = np.zeros((3, 3))
label = ['Boredom', 'Engagement', 'Frustration']
num_executed = 0
for f, l, comp_data, chosen_one, in zip(features, labels, comp_data_all, chosen_ones):
if len(labels) < 10 or not all(np.mean(comp_data, axis=0).tolist()):
continue
num_executed += 1
user_data = CrossValRegression(f, l, C=1)
for i in range(3):
p = stats.ttest_rel(user_data[:, i], comp_data[:, i]).pvalue
csv_data[i, :] = [np.mean(user_data[:, i]), np.mean(comp_data[:, i] - user_data[:, i]), p]
for i in range(3):
print((chosen_one if i == 0 else ''), ',', sep='', end='')
print(label[i], *(csv_data[i, :].tolist()), sep=',')
# Returns BoW transformed features and affect class labels
def GetDiscretizedFeaturesAndLabels(users, n_clusters, pca_components):
model = None
features = []
labels = []
for user in users:
data = bowTransformedFeatures(user, n_clusters, pca_components, getValidDiscretizedSessions, model)
if data is not None and len(data[0]) > 0:
features.append(data[0])
labels.append(data[1])
labels = np.hstack(labels)
print(labels)
print(len(labels))
features = np.vstack(features)
return features, labels
# plots a confusion matrix to the current pyplot axes object
# largely taken from http://scikit-learn.org/stable/auto_examples/model_selection/plot_confusion_matrix.html
def plot_confusion_matrix(cm, classes,
normalize=False,
title='Confusion matrix',
cmap=plt.cm.Blues):
"""
This function prints and plots the confusion matrix.
Normalization can be applied by setting `normalize=True`.
"""
if normalize:
cm = cm.astype('float') / cm.sum(axis=1)[:, np.newaxis]
print("Normalized confusion matrix")
else:
print('Confusion matrix, without normalization')
print(cm)
plt.imshow(cm, interpolation='nearest', cmap=cmap)
plt.gca().set_title(title)
# plt.colorbar()
tick_marks = np.arange(len(classes))
plt.xticks(tick_marks, classes, rotation=45)
plt.yticks(tick_marks, classes)
fmt = '.2f' if normalize else 'd'
thresh = cm.max() / 2.
for i, j in itertools.product(range(cm.shape[0]), range(cm.shape[1])):
plt.text(j, i, format(cm[i, j], fmt),
horizontalalignment="center",
color="white" if cm[i, j] > thresh else "black")
# Predicts affect labels and estimates performance using LOO cross validation
# Takes a list of n_clusters and a list of C to make a matrix of confusion matrices
def MakeConfusionMatrix(users, n_clusters_list, pca_components, c_list):
plt_size = 2.5
fig, axes = plt.subplots(nrows=len(n_clusters_list), ncols=len(c_list),
figsize=(plt_size * len(c_list), plt_size * len(n_clusters_list)), dpi=300)
np.set_printoptions(precision=2)
for i, n_clusters in enumerate(n_clusters_list):
for j, c in enumerate(c_list):
features, labels = GetDiscretizedFeaturesAndLabels(users, n_clusters, pca_components)
loo = LeaveOneOut()
classifier = SVC(C=c, class_weight='balanced')
predicted = cross_val_predict(classifier, X=features, y=labels, cv=loo)
mat = confusion_matrix(labels, predicted)
# mat = np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9]])
plt.axes(axes[i, j])
plot_confusion_matrix(mat, classes=['B', 'E', 'F'], normalize=True,
title='C={}, Clusters={}'.format(c, n_clusters))
plt.tight_layout()
pylab.savefig(path.join(project_folder, 'confusion_matrix.png'), dpi=fig.dpi)
# plt.show()
if __name__ == '__main__':
users = range(1, 34)
MakeConfusionMatrix(users, [4, 6, 8, 10], 0, [0.1, 1, 10])
# for clusters in [3, 4, 5, 6, 7, 8]:
# EvaluateRegression(users, clusters, 0)
# EvaluateRegressionPerCandidate(users, clusters, 0)
| 37.566929
| 118
| 0.667854
|
4a1722bc0cc582a48bfff34394bf3211b751f442
| 4,818
|
py
|
Python
|
src/offline/news/model-update-embedding/src/train.py
|
shenshaoyong/recommender-system-dev-workshop-code
|
ce422627181472ad513f473b65bf42410c46304a
|
[
"Apache-2.0"
] | 1
|
2021-07-14T09:15:40.000Z
|
2021-07-14T09:15:40.000Z
|
src/offline/news/model-update-embedding/src/train.py
|
shenshaoyong/recommender-system-dev-workshop-code
|
ce422627181472ad513f473b65bf42410c46304a
|
[
"Apache-2.0"
] | null | null | null |
src/offline/news/model-update-embedding/src/train.py
|
shenshaoyong/recommender-system-dev-workshop-code
|
ce422627181472ad513f473b65bf42410c46304a
|
[
"Apache-2.0"
] | null | null | null |
from __future__ import print_function
import os
import sys
import math
import pickle
import boto3
import os
import numpy as np
import kg
import pandas as pd
# from tqdm import tqdm
import time
import argparse
import json
import logging
import re
import dglke
# tqdm.pandas()
# pandarallel.initialize(progress_bar=True)
# bucket = os.environ.get("BUCKET_NAME", " ")
# raw_data_folder = os.environ.get("RAW_DATA", " ")
# logger = logging.getLogger()
# logger.setLevel(logging.INFO)
# tqdm_notebook().pandas()
print("dglke version:", dglke.__version__)
########################################
# 从s3同步数据
########################################
def sync_s3(file_name_list, s3_folder, local_folder):
for f in file_name_list:
print("file preparation: download src key {} to dst key {}".format(os.path.join(
s3_folder, f), os.path.join(local_folder, f)))
s3client.download_file(bucket, os.path.join(
s3_folder, f), os.path.join(local_folder, f))
def write_to_s3(filename, bucket, key):
print("upload s3://{}/{}".format(bucket, key))
with open(filename, 'rb') as f: # Read in binary mode
# return s3client.upload_fileobj(f, bucket, key)
return s3client.put_object(
ACL='bucket-owner-full-control',
Bucket=bucket,
Key=key,
Body=f
)
def write_str_to_s3(content, bucket, key):
print("write s3://{}/{}, content={}".format(bucket, key, content))
s3client.put_object(Body=str(content).encode(
"utf8"), Bucket=bucket, Key=key, ACL='bucket-owner-full-control')
region = None
param_path = os.path.join('/opt/ml/', 'input/config/hyperparameters.json')
if os.path.exists(param_path):
print("load param from {}".format(param_path))
with open(param_path) as f:
hp = json.load(f)
bucket = hp['bucket']
prefix = hp['prefix']
region = hp.get("region")
else:
parser = argparse.ArgumentParser()
parser.add_argument('--bucket', type=str)
parser.add_argument('--prefix', type=str)
parser.add_argument("--region", type=str, help="aws region")
args, _ = parser.parse_known_args()
bucket = args.bucket
prefix = args.prefix
if args.region:
region = args.region
if region:
print("region:", region)
boto3.setup_default_session(region_name=region)
if prefix.endswith("/"):
prefix = prefix[:-1]
print("bucket={}".format(bucket))
print("prefix='{}'".format(prefix))
s3client = boto3.client('s3')
out_s3_path = "s3://{}/{}/feature/content/inverted-list".format(bucket, prefix)
local_folder = 'info'
if not os.path.exists(local_folder):
os.makedirs(local_folder)
# prepare model for batch process
meta_file_prefix = "{}/model/meta_files".format(prefix)
os.environ['GRAPH_BUCKET'] = bucket
os.environ['KG_DBPEDIA_KEY'] = '{}/kg_dbpedia.txt'.format(meta_file_prefix)
os.environ['KG_ENTITY_KEY'] = '{}/entities_dbpedia.dict'.format(
meta_file_prefix)
os.environ['KG_RELATION_KEY'] = '{}/relations_dbpedia.dict'.format(
meta_file_prefix)
os.environ['KG_DBPEDIA_TRAIN_KEY'] = '{}/kg_dbpedia_train.txt'.format(
meta_file_prefix)
os.environ['KG_ENTITY_TRAIN_KEY'] = '{}/entities_dbpedia_train.dict'.format(
meta_file_prefix)
os.environ['KG_RELATION_TRAIN_KEY'] = '{}/relations_dbpedia_train.dict'.format(
meta_file_prefix)
os.environ['KG_ENTITY_INDUSTRY_KEY'] = '{}/entity_industry.txt'.format(
meta_file_prefix)
os.environ['KG_VOCAB_KEY'] = '{}/vocab.json'.format(meta_file_prefix)
os.environ['DATA_INPUT_KEY'] = ''
os.environ['TRAIN_OUTPUT_KEY'] = '{}/model/rank/content/dkn_embedding_latest/'.format(
prefix)
kg_path = os.environ['GRAPH_BUCKET']
dbpedia_key = os.environ['KG_DBPEDIA_KEY']
entity_key = os.environ['KG_ENTITY_KEY']
relation_key = os.environ['KG_RELATION_KEY']
dbpedia_train_key = os.environ['KG_DBPEDIA_TRAIN_KEY']
entity_train_key = os.environ['KG_ENTITY_TRAIN_KEY']
relation_train_key = os.environ['KG_RELATION_TRAIN_KEY']
entity_industry_key = os.environ['KG_ENTITY_INDUSTRY_KEY']
vocab_key = os.environ['KG_VOCAB_KEY']
data_input_key = os.environ['DATA_INPUT_KEY']
train_output_key = os.environ['TRAIN_OUTPUT_KEY']
env = {
'GRAPH_BUCKET': kg_path,
'KG_DBPEDIA_KEY': dbpedia_key,
'KG_ENTITY_KEY': entity_key,
'KG_RELATION_KEY': relation_key,
'KG_DBPEDIA_TRAIN_KEY': dbpedia_train_key,
'KG_ENTITY_TRAIN_KEY': entity_train_key,
'KG_RELATION_TRAIN_KEY': relation_train_key,
'KG_ENTITY_INDUSTRY_KEY': entity_industry_key,
'KG_VOCAB_KEY': vocab_key,
'DATA_INPUT_KEY': data_input_key,
'TRAIN_OUTPUT_KEY': train_output_key
}
print("Kg env: {}".format(env))
graph = kg.Kg(env, region=region) # Where we keep the model when it's loaded
# model = encoding.encoding(graph, env)
graph.train()
# graph.train(max_step=2000)
| 31.907285
| 88
| 0.697177
|
4a17230546b098643b2dfed1560dce43de4cbac9
| 17,538
|
py
|
Python
|
Lib/test/test_gettext.py
|
deadsnakes/python2.3
|
0b4a6871ca57123c10aa48cc2a5d2b7c0ee3c849
|
[
"PSF-2.0"
] | 1
|
2020-11-26T18:53:46.000Z
|
2020-11-26T18:53:46.000Z
|
Lib/test/test_gettext.py
|
deadsnakes/python2.3
|
0b4a6871ca57123c10aa48cc2a5d2b7c0ee3c849
|
[
"PSF-2.0"
] | null | null | null |
Lib/test/test_gettext.py
|
deadsnakes/python2.3
|
0b4a6871ca57123c10aa48cc2a5d2b7c0ee3c849
|
[
"PSF-2.0"
] | 1
|
2019-04-11T11:27:01.000Z
|
2019-04-11T11:27:01.000Z
|
import os
import base64
import shutil
import gettext
import unittest
from test.test_support import run_suite
# TODO:
# - Add new tests, for example for "dgettext"
# - Remove dummy tests, for example testing for single and double quotes
# has no sense, it would have if we were testing a parser (i.e. pygettext)
# - Tests should have only one assert.
GNU_MO_DATA = '''\
3hIElQAAAAAGAAAAHAAAAEwAAAALAAAAfAAAAAAAAACoAAAAFQAAAKkAAAAjAAAAvwAAAKEAAADj
AAAABwAAAIUBAAALAAAAjQEAAEUBAACZAQAAFgAAAN8CAAAeAAAA9gIAAKEAAAAVAwAABQAAALcD
AAAJAAAAvQMAAAEAAAADAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEAAAABQAAAAYAAAACAAAAAFJh
eW1vbmQgTHV4dXJ5IFlhY2gtdABUaGVyZSBpcyAlcyBmaWxlAFRoZXJlIGFyZSAlcyBmaWxlcwBU
aGlzIG1vZHVsZSBwcm92aWRlcyBpbnRlcm5hdGlvbmFsaXphdGlvbiBhbmQgbG9jYWxpemF0aW9u
CnN1cHBvcnQgZm9yIHlvdXIgUHl0aG9uIHByb2dyYW1zIGJ5IHByb3ZpZGluZyBhbiBpbnRlcmZh
Y2UgdG8gdGhlIEdOVQpnZXR0ZXh0IG1lc3NhZ2UgY2F0YWxvZyBsaWJyYXJ5LgBtdWxsdXNrAG51
ZGdlIG51ZGdlAFByb2plY3QtSWQtVmVyc2lvbjogMi4wClBPLVJldmlzaW9uLURhdGU6IDIwMDAt
MDgtMjkgMTI6MTktMDQ6MDAKTGFzdC1UcmFuc2xhdG9yOiBKLiBEYXZpZCBJYsOhw7FleiA8ai1k
YXZpZEBub29zLmZyPgpMYW5ndWFnZS1UZWFtOiBYWCA8cHl0aG9uLWRldkBweXRob24ub3JnPgpN
SU1FLVZlcnNpb246IDEuMApDb250ZW50LVR5cGU6IHRleHQvcGxhaW47IGNoYXJzZXQ9aXNvLTg4
NTktMQpDb250ZW50LVRyYW5zZmVyLUVuY29kaW5nOiBub25lCkdlbmVyYXRlZC1CeTogcHlnZXR0
ZXh0LnB5IDEuMQpQbHVyYWwtRm9ybXM6IG5wbHVyYWxzPTI7IHBsdXJhbD1uIT0xOwoAVGhyb2F0
d29iYmxlciBNYW5ncm92ZQBIYXkgJXMgZmljaGVybwBIYXkgJXMgZmljaGVyb3MAR3V2ZiB6YnFo
eXIgY2ViaXZxcmYgdmFncmVhbmd2YmFueXZtbmd2YmEgbmFxIHlicG55dm1uZ3ZiYQpmaGNjYmVn
IHNiZSBsYmhlIENsZ3ViYSBjZWJ0ZW56ZiBvbCBjZWJpdnF2YXQgbmEgdmFncmVzbnByIGdiIGd1
ciBUQUgKdHJnZ3JrZyB6cmZmbnRyIHBuZ255YnQgeXZvZW5lbC4AYmFjb24Ad2luayB3aW5rAA==
'''
UMO_DATA = '''\
3hIElQAAAAACAAAAHAAAACwAAAAFAAAAPAAAAAAAAABQAAAABAAAAFEAAAAPAQAAVgAAAAQAAABm
AQAAAQAAAAIAAAAAAAAAAAAAAAAAAAAAYWLDngBQcm9qZWN0LUlkLVZlcnNpb246IDIuMApQTy1S
ZXZpc2lvbi1EYXRlOiAyMDAzLTA0LTExIDEyOjQyLTA0MDAKTGFzdC1UcmFuc2xhdG9yOiBCYXJy
eSBBLiBXQXJzYXcgPGJhcnJ5QHB5dGhvbi5vcmc+Ckxhbmd1YWdlLVRlYW06IFhYIDxweXRob24t
ZGV2QHB5dGhvbi5vcmc+Ck1JTUUtVmVyc2lvbjogMS4wCkNvbnRlbnQtVHlwZTogdGV4dC9wbGFp
bjsgY2hhcnNldD11dGYtOApDb250ZW50LVRyYW5zZmVyLUVuY29kaW5nOiA3Yml0CkdlbmVyYXRl
ZC1CeTogbWFudWFsbHkKAMKkeXoA
'''
MMO_DATA = '''\
3hIElQAAAAABAAAAHAAAACQAAAADAAAALAAAAAAAAAA4AAAAeAEAADkAAAABAAAAAAAAAAAAAAAA
UHJvamVjdC1JZC1WZXJzaW9uOiBObyBQcm9qZWN0IDAuMApQT1QtQ3JlYXRpb24tRGF0ZTogV2Vk
IERlYyAxMSAwNzo0NDoxNSAyMDAyClBPLVJldmlzaW9uLURhdGU6IDIwMDItMDgtMTQgMDE6MTg6
NTgrMDA6MDAKTGFzdC1UcmFuc2xhdG9yOiBKb2huIERvZSA8amRvZUBleGFtcGxlLmNvbT4KSmFu
ZSBGb29iYXIgPGpmb29iYXJAZXhhbXBsZS5jb20+Ckxhbmd1YWdlLVRlYW06IHh4IDx4eEBleGFt
cGxlLmNvbT4KTUlNRS1WZXJzaW9uOiAxLjAKQ29udGVudC1UeXBlOiB0ZXh0L3BsYWluOyBjaGFy
c2V0PWlzby04ODU5LTE1CkNvbnRlbnQtVHJhbnNmZXItRW5jb2Rpbmc6IHF1b3RlZC1wcmludGFi
bGUKR2VuZXJhdGVkLUJ5OiBweWdldHRleHQucHkgMS4zCgA=
'''
LOCALEDIR = os.path.join('xx', 'LC_MESSAGES')
MOFILE = os.path.join(LOCALEDIR, 'gettext.mo')
UMOFILE = os.path.join(LOCALEDIR, 'ugettext.mo')
MMOFILE = os.path.join(LOCALEDIR, 'metadata.mo')
try:
LANG = os.environ['LANGUAGE']
except:
LANG = 'en'
class GettextBaseTest(unittest.TestCase):
def setUp(self):
os.makedirs(LOCALEDIR)
fp = open(MOFILE, 'wb')
fp.write(base64.decodestring(GNU_MO_DATA))
fp.close()
fp = open(UMOFILE, 'wb')
fp.write(base64.decodestring(UMO_DATA))
fp.close()
fp = open(MMOFILE, 'wb')
fp.write(base64.decodestring(MMO_DATA))
fp.close()
os.environ['LANGUAGE'] = 'xx'
def tearDown(self):
os.environ['LANGUAGE'] = LANG
shutil.rmtree(os.path.split(LOCALEDIR)[0])
class GettextTestCase1(GettextBaseTest):
def setUp(self):
GettextBaseTest.setUp(self)
self.localedir = os.curdir
self.mofile = MOFILE
gettext.install('gettext', self.localedir)
def test_some_translations(self):
eq = self.assertEqual
# test some translations
eq(_('albatross'), 'albatross')
eq(_(u'mullusk'), 'bacon')
eq(_(r'Raymond Luxury Yach-t'), 'Throatwobbler Mangrove')
eq(_(ur'nudge nudge'), 'wink wink')
def test_double_quotes(self):
eq = self.assertEqual
# double quotes
eq(_("albatross"), 'albatross')
eq(_(u"mullusk"), 'bacon')
eq(_(r"Raymond Luxury Yach-t"), 'Throatwobbler Mangrove')
eq(_(ur"nudge nudge"), 'wink wink')
def test_triple_single_quotes(self):
eq = self.assertEqual
# triple single quotes
eq(_('''albatross'''), 'albatross')
eq(_(u'''mullusk'''), 'bacon')
eq(_(r'''Raymond Luxury Yach-t'''), 'Throatwobbler Mangrove')
eq(_(ur'''nudge nudge'''), 'wink wink')
def test_triple_double_quotes(self):
eq = self.assertEqual
# triple double quotes
eq(_("""albatross"""), 'albatross')
eq(_(u"""mullusk"""), 'bacon')
eq(_(r"""Raymond Luxury Yach-t"""), 'Throatwobbler Mangrove')
eq(_(ur"""nudge nudge"""), 'wink wink')
def test_multiline_strings(self):
eq = self.assertEqual
# multiline strings
eq(_('''This module provides internationalization and localization
support for your Python programs by providing an interface to the GNU
gettext message catalog library.'''),
'''Guvf zbqhyr cebivqrf vagreangvbanyvmngvba naq ybpnyvmngvba
fhccbeg sbe lbhe Clguba cebtenzf ol cebivqvat na vagresnpr gb gur TAH
trggrkg zrffntr pngnybt yvoenel.''')
def test_the_alternative_interface(self):
eq = self.assertEqual
# test the alternative interface
fp = open(self.mofile, 'rb')
t = gettext.GNUTranslations(fp)
fp.close()
# Install the translation object
t.install()
eq(_('nudge nudge'), 'wink wink')
# Try unicode return type
t.install(unicode=True)
eq(_('mullusk'), 'bacon')
class GettextTestCase2(GettextBaseTest):
def setUp(self):
GettextBaseTest.setUp(self)
self.localedir = os.curdir
# Set up the bindings
gettext.bindtextdomain('gettext', self.localedir)
gettext.textdomain('gettext')
# For convenience
self._ = gettext.gettext
def test_bindtextdomain(self):
self.assertEqual(gettext.bindtextdomain('gettext'), self.localedir)
def test_textdomain(self):
self.assertEqual(gettext.textdomain(), 'gettext')
def test_some_translations(self):
eq = self.assertEqual
# test some translations
eq(self._('albatross'), 'albatross')
eq(self._(u'mullusk'), 'bacon')
eq(self._(r'Raymond Luxury Yach-t'), 'Throatwobbler Mangrove')
eq(self._(ur'nudge nudge'), 'wink wink')
def test_double_quotes(self):
eq = self.assertEqual
# double quotes
eq(self._("albatross"), 'albatross')
eq(self._(u"mullusk"), 'bacon')
eq(self._(r"Raymond Luxury Yach-t"), 'Throatwobbler Mangrove')
eq(self._(ur"nudge nudge"), 'wink wink')
def test_triple_single_quotes(self):
eq = self.assertEqual
# triple single quotes
eq(self._('''albatross'''), 'albatross')
eq(self._(u'''mullusk'''), 'bacon')
eq(self._(r'''Raymond Luxury Yach-t'''), 'Throatwobbler Mangrove')
eq(self._(ur'''nudge nudge'''), 'wink wink')
def test_triple_double_quotes(self):
eq = self.assertEqual
# triple double quotes
eq(self._("""albatross"""), 'albatross')
eq(self._(u"""mullusk"""), 'bacon')
eq(self._(r"""Raymond Luxury Yach-t"""), 'Throatwobbler Mangrove')
eq(self._(ur"""nudge nudge"""), 'wink wink')
def test_multiline_strings(self):
eq = self.assertEqual
# multiline strings
eq(self._('''This module provides internationalization and localization
support for your Python programs by providing an interface to the GNU
gettext message catalog library.'''),
'''Guvf zbqhyr cebivqrf vagreangvbanyvmngvba naq ybpnyvmngvba
fhccbeg sbe lbhe Clguba cebtenzf ol cebivqvat na vagresnpr gb gur TAH
trggrkg zrffntr pngnybt yvoenel.''')
class PluralFormsTestCase(GettextBaseTest):
def setUp(self):
GettextBaseTest.setUp(self)
self.mofile = MOFILE
def test_plural_forms1(self):
eq = self.assertEqual
x = gettext.ngettext('There is %s file', 'There are %s files', 1)
eq(x, 'Hay %s fichero')
x = gettext.ngettext('There is %s file', 'There are %s files', 2)
eq(x, 'Hay %s ficheros')
def test_plural_forms2(self):
eq = self.assertEqual
fp = open(self.mofile, 'rb')
t = gettext.GNUTranslations(fp)
fp.close()
x = t.ngettext('There is %s file', 'There are %s files', 1)
eq(x, 'Hay %s fichero')
x = t.ngettext('There is %s file', 'There are %s files', 2)
eq(x, 'Hay %s ficheros')
def test_hu(self):
eq = self.assertEqual
f = gettext.c2py('0')
s = ''.join([ str(f(x)) for x in range(200) ])
eq(s, "00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000")
def test_de(self):
eq = self.assertEqual
f = gettext.c2py('n != 1')
s = ''.join([ str(f(x)) for x in range(200) ])
eq(s, "10111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111")
def test_fr(self):
eq = self.assertEqual
f = gettext.c2py('n>1')
s = ''.join([ str(f(x)) for x in range(200) ])
eq(s, "00111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111")
def test_gd(self):
eq = self.assertEqual
f = gettext.c2py('n==1 ? 0 : n==2 ? 1 : 2')
s = ''.join([ str(f(x)) for x in range(200) ])
eq(s, "20122222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222")
def test_gd2(self):
eq = self.assertEqual
# Tests the combination of parentheses and "?:"
f = gettext.c2py('n==1 ? 0 : (n==2 ? 1 : 2)')
s = ''.join([ str(f(x)) for x in range(200) ])
eq(s, "20122222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222")
def test_lt(self):
eq = self.assertEqual
f = gettext.c2py('n%10==1 && n%100!=11 ? 0 : n%10>=2 && (n%100<10 || n%100>=20) ? 1 : 2')
s = ''.join([ str(f(x)) for x in range(200) ])
eq(s, "20111111112222222222201111111120111111112011111111201111111120111111112011111111201111111120111111112011111111222222222220111111112011111111201111111120111111112011111111201111111120111111112011111111")
def test_ru(self):
eq = self.assertEqual
f = gettext.c2py('n%10==1 && n%100!=11 ? 0 : n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2')
s = ''.join([ str(f(x)) for x in range(200) ])
eq(s, "20111222222222222222201112222220111222222011122222201112222220111222222011122222201112222220111222222011122222222222222220111222222011122222201112222220111222222011122222201112222220111222222011122222")
def test_pl(self):
eq = self.assertEqual
f = gettext.c2py('n==1 ? 0 : n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2')
s = ''.join([ str(f(x)) for x in range(200) ])
eq(s, "20111222222222222222221112222222111222222211122222221112222222111222222211122222221112222222111222222211122222222222222222111222222211122222221112222222111222222211122222221112222222111222222211122222")
def test_sl(self):
eq = self.assertEqual
f = gettext.c2py('n%100==1 ? 0 : n%100==2 ? 1 : n%100==3 || n%100==4 ? 2 : 3')
s = ''.join([ str(f(x)) for x in range(200) ])
eq(s, "30122333333333333333333333333333333333333333333333333333333333333333333333333333333333333333333333333012233333333333333333333333333333333333333333333333333333333333333333333333333333333333333333333333")
def test_security(self):
raises = self.assertRaises
# Test for a dangerous expression
raises(ValueError, gettext.c2py, "os.chmod('/etc/passwd',0777)")
class UnicodeTranslationsTest(GettextBaseTest):
def setUp(self):
GettextBaseTest.setUp(self)
fp = open(UMOFILE, 'rb')
try:
self.t = gettext.GNUTranslations(fp)
finally:
fp.close()
self._ = self.t.ugettext
def test_unicode_msgid(self):
unless = self.failUnless
unless(isinstance(self._(''), unicode))
unless(isinstance(self._(u''), unicode))
def test_unicode_msgstr(self):
eq = self.assertEqual
eq(self._(u'ab\xde'), u'\xa4yz')
class WeirdMetadataTest(GettextBaseTest):
def setUp(self):
GettextBaseTest.setUp(self)
fp = open(MMOFILE, 'rb')
try:
try:
self.t = gettext.GNUTranslations(fp)
except:
self.tearDown()
raise
finally:
fp.close()
def test_weird_metadata(self):
info = self.t.info()
self.assertEqual(info['last-translator'],
'John Doe <jdoe@example.com>\nJane Foobar <jfoobar@example.com>')
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(GettextTestCase1))
suite.addTest(unittest.makeSuite(GettextTestCase2))
suite.addTest(unittest.makeSuite(PluralFormsTestCase))
suite.addTest(unittest.makeSuite(UnicodeTranslationsTest))
suite.addTest(unittest.makeSuite(WeirdMetadataTest))
return suite
def test_main():
run_suite(suite())
if __name__ == '__main__':
test_main()
# For reference, here's the .po file used to created the GNU_MO_DATA above.
#
# The original version was automatically generated from the sources with
# pygettext. Later it was manually modified to add plural forms support.
'''
# Dummy translation for the Python test_gettext.py module.
# Copyright (C) 2001 Python Software Foundation
# Barry Warsaw <barry@python.org>, 2000.
#
msgid ""
msgstr ""
"Project-Id-Version: 2.0\n"
"PO-Revision-Date: 2003-04-11 14:32-0400\n"
"Last-Translator: J. David Ibanez <j-david@noos.fr>\n"
"Language-Team: XX <python-dev@python.org>\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=iso-8859-1\n"
"Content-Transfer-Encoding: 8bit\n"
"Generated-By: pygettext.py 1.1\n"
"Plural-Forms: nplurals=2; plural=n!=1;\n"
#: test_gettext.py:19 test_gettext.py:25 test_gettext.py:31 test_gettext.py:37
#: test_gettext.py:51 test_gettext.py:80 test_gettext.py:86 test_gettext.py:92
#: test_gettext.py:98
msgid "nudge nudge"
msgstr "wink wink"
#: test_gettext.py:16 test_gettext.py:22 test_gettext.py:28 test_gettext.py:34
#: test_gettext.py:77 test_gettext.py:83 test_gettext.py:89 test_gettext.py:95
msgid "albatross"
msgstr ""
#: test_gettext.py:18 test_gettext.py:24 test_gettext.py:30 test_gettext.py:36
#: test_gettext.py:79 test_gettext.py:85 test_gettext.py:91 test_gettext.py:97
msgid "Raymond Luxury Yach-t"
msgstr "Throatwobbler Mangrove"
#: test_gettext.py:17 test_gettext.py:23 test_gettext.py:29 test_gettext.py:35
#: test_gettext.py:56 test_gettext.py:78 test_gettext.py:84 test_gettext.py:90
#: test_gettext.py:96
msgid "mullusk"
msgstr "bacon"
#: test_gettext.py:40 test_gettext.py:101
msgid ""
"This module provides internationalization and localization\n"
"support for your Python programs by providing an interface to the GNU\n"
"gettext message catalog library."
msgstr ""
"Guvf zbqhyr cebivqrf vagreangvbanyvmngvba naq ybpnyvmngvba\n"
"fhccbeg sbe lbhe Clguba cebtenzf ol cebivqvat na vagresnpr gb gur TAH\n"
"trggrkg zrffntr pngnybt yvoenel."
# Manually added, as neither pygettext nor xgettext support plural forms
# in Python.
msgid "There is %s file"
msgid_plural "There are %s files"
msgstr[0] "Hay %s fichero"
msgstr[1] "Hay %s ficheros"
'''
# Here's the second example po file example, used to generate the UMO_DATA
# containing utf-8 encoded Unicode strings
'''
# Dummy translation for the Python test_gettext.py module.
# Copyright (C) 2001 Python Software Foundation
# Barry Warsaw <barry@python.org>, 2000.
#
msgid ""
msgstr ""
"Project-Id-Version: 2.0\n"
"PO-Revision-Date: 2003-04-11 12:42-0400\n"
"Last-Translator: Barry A. WArsaw <barry@python.org>\n"
"Language-Team: XX <python-dev@python.org>\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=utf-8\n"
"Content-Transfer-Encoding: 7bit\n"
"Generated-By: manually\n"
#: nofile:0
msgid "ab\xc3\x9e"
msgstr "\xc2\xa4yz"
'''
# Here's the third example po file, used to generate MMO_DATA
'''
msgid ""
msgstr ""
"Project-Id-Version: No Project 0.0\n"
"POT-Creation-Date: Wed Dec 11 07:44:15 2002\n"
"PO-Revision-Date: 2002-08-14 01:18:58+00:00\n"
"Last-Translator: John Doe <jdoe@example.com>\n"
"Jane Foobar <jfoobar@example.com>\n"
"Language-Team: xx <xx@example.com>\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=iso-8859-15\n"
"Content-Transfer-Encoding: quoted-printable\n"
"Generated-By: pygettext.py 1.3\n"
'''
| 38.973333
| 217
| 0.717243
|
4a17230e44b6828d642f6f6e5280fb7a11660f5d
| 5,351
|
py
|
Python
|
tests/generator/test_rom.py
|
13thProgression/peas-blockchain
|
8e058cbfe0c1ab73f7c1ec41bedb39071c63141c
|
[
"Apache-2.0"
] | 2
|
2021-08-16T17:45:07.000Z
|
2021-09-18T19:00:58.000Z
|
tests/generator/test_rom.py
|
13thProgression/peas-blockchain
|
8e058cbfe0c1ab73f7c1ec41bedb39071c63141c
|
[
"Apache-2.0"
] | 4
|
2021-09-26T15:50:20.000Z
|
2021-10-06T06:18:51.000Z
|
tests/generator/test_rom.py
|
13thProgression/peas-blockchain
|
8e058cbfe0c1ab73f7c1ec41bedb39071c63141c
|
[
"Apache-2.0"
] | 3
|
2021-09-29T19:08:41.000Z
|
2022-03-15T08:47:28.000Z
|
from clvm_tools import binutils
from clvm_tools.clvmc import compile_clvm_text
from peas.full_node.generator import run_generator
from peas.full_node.mempool_check_conditions import get_name_puzzle_conditions
from peas.types.blockchain_format.program import Program, SerializedProgram
from peas.types.blockchain_format.sized_bytes import bytes32
from peas.types.condition_with_args import ConditionWithArgs
from peas.types.name_puzzle_condition import NPC
from peas.types.generator_types import BlockGenerator, GeneratorArg
from peas.util.clvm import int_to_bytes
from peas.util.condition_tools import ConditionOpcode
from peas.util.ints import uint32
from peas.wallet.puzzles.load_clvm import load_clvm
MAX_COST = int(1e15)
COST_PER_BYTE = int(12000)
DESERIALIZE_MOD = load_clvm("peaslisp_deserialisation.clvm", package_or_requirement="peas.wallet.puzzles")
GENERATOR_CODE = """
(mod (deserialize-mod historical-generators)
(defun first-block (deserialize-mod historical-generators)
(a deserialize-mod (list (f historical-generators))))
(defun second-block (deserialize-mod historical-generators)
(a deserialize-mod (r historical-generators)))
(defun go (deserialize-mod historical-generators)
(c (first-block deserialize-mod historical-generators)
(second-block deserialize-mod historical-generators)
))
(go deserialize-mod historical-generators)
)
"""
COMPILED_GENERATOR_CODE = bytes.fromhex(
"ff02ffff01ff04ffff02ff04ffff04ff02ffff04ff05ffff04ff0bff8080808080ffff02"
"ff06ffff04ff02ffff04ff05ffff04ff0bff808080808080ffff04ffff01ffff02ff05ff"
"1380ff02ff05ff2b80ff018080"
)
COMPILED_GENERATOR_CODE = bytes(Program.to(compile_clvm_text(GENERATOR_CODE, [])))
FIRST_GENERATOR = Program.to(
binutils.assemble('((parent_id (c 1 (q "puzzle blob")) 50000 "solution is here" extra data for coin))')
).as_bin()
SECOND_GENERATOR = Program.to(binutils.assemble("(extra data for block)")).as_bin()
FIRST_GENERATOR = Program.to(
binutils.assemble(
"""
((0x0000000000000000000000000000000000000000000000000000000000000000 1 50000
((51 0x0000000000000000000000000000000000000000000000000000000000000001 500)) "extra" "data" "for" "coin" ))"""
)
).as_bin()
SECOND_GENERATOR = Program.to(binutils.assemble("(extra data for block)")).as_bin()
def to_sp(sexp) -> SerializedProgram:
return SerializedProgram.from_bytes(bytes(sexp))
def block_generator() -> BlockGenerator:
generator_args = [GeneratorArg(uint32(0), to_sp(FIRST_GENERATOR)), GeneratorArg(uint32(1), to_sp(SECOND_GENERATOR))]
return BlockGenerator(to_sp(COMPILED_GENERATOR_CODE), generator_args)
EXPECTED_ABBREVIATED_COST = 108379
EXPECTED_COST = 113415
EXPECTED_OUTPUT = (
"ffffffa00000000000000000000000000000000000000000000000000000000000000000"
"ff01ff8300c350ffffff33ffa00000000000000000000000000000000000000000000000"
"000000000000000001ff8201f48080ff856578747261ff8464617461ff83666f72ff8463"
"6f696e8080ff856578747261ff8464617461ff83666f72ff85626c6f636b80"
)
class TestROM:
def test_rom_inputs(self):
# this test checks that the generator just works
# It's useful for debugging the generator prior to having the ROM invoke it.
args = Program.to([DESERIALIZE_MOD, [FIRST_GENERATOR, SECOND_GENERATOR]])
sp = to_sp(COMPILED_GENERATOR_CODE)
cost, r = sp.run_with_cost(MAX_COST, args)
assert cost == EXPECTED_ABBREVIATED_COST
assert r.as_bin().hex() == EXPECTED_OUTPUT
def test_get_name_puzzle_conditions(self):
# this tests that extra block or coin data doesn't confuse `get_name_puzzle_conditions`
gen = block_generator()
cost, r = run_generator(gen, max_cost=MAX_COST)
print(r)
npc_result = get_name_puzzle_conditions(gen, max_cost=MAX_COST, cost_per_byte=COST_PER_BYTE, safe_mode=False)
assert npc_result.error is None
assert npc_result.clvm_cost == EXPECTED_COST
cond_1 = ConditionWithArgs(ConditionOpcode.CREATE_COIN, [bytes([0] * 31 + [1]), int_to_bytes(500)])
CONDITIONS = [
(ConditionOpcode.CREATE_COIN, [cond_1]),
]
npc = NPC(
coin_name=bytes32.fromhex("e8538c2d14f2a7defae65c5c97f5d4fae7ee64acef7fec9d28ad847a0880fd03"),
puzzle_hash=bytes32.fromhex("9dcf97a184f32623d11a73124ceb99a5709b083721e878a16d78f596718ba7b2"),
conditions=CONDITIONS,
)
assert npc_result.npc_list == [npc]
def test_coin_extras(self):
# the ROM supports extra data after a coin. This test checks that it actually gets passed through
gen = block_generator()
cost, r = run_generator(gen, max_cost=MAX_COST)
coin_spends = r.first()
for coin_spend in coin_spends.as_iter():
extra_data = coin_spend.rest().rest().rest().rest()
assert extra_data.as_atom_list() == b"extra data for coin".split()
def test_block_extras(self):
# the ROM supports extra data after the coin spend list. This test checks that it actually gets passed through
gen = block_generator()
cost, r = run_generator(gen, max_cost=MAX_COST)
extra_block_data = r.rest()
assert extra_block_data.as_atom_list() == b"extra data for block".split()
| 39.345588
| 120
| 0.744721
|
4a1724911663165b0241f657d18fff72cfa61cb6
| 4,907
|
py
|
Python
|
test/test_unit_connection.py
|
richiverse/snowflake-connector-python
|
1dd45059ba06bdfeb840914982df51f1b6b913a7
|
[
"Apache-2.0"
] | null | null | null |
test/test_unit_connection.py
|
richiverse/snowflake-connector-python
|
1dd45059ba06bdfeb840914982df51f1b6b913a7
|
[
"Apache-2.0"
] | null | null | null |
test/test_unit_connection.py
|
richiverse/snowflake-connector-python
|
1dd45059ba06bdfeb840914982df51f1b6b913a7
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved.
#
import os
import snowflake.connector
from snowflake.connector.auth import (
delete_temporary_credential_file,
)
from snowflake.connector.compat import PY2
if PY2:
from mock import patch
else:
from unittest.mock import patch
@patch(
'snowflake.connector.auth_webbrowser.AuthByWebBrowser.authenticate')
@patch(
'snowflake.connector.network.SnowflakeRestful._post_request'
)
def test_connect_externalbrowser(
mockSnowflakeRestfulPostRequest,
mockAuthByBrowserAuthenticate):
"""
Connect with authentictor=externalbrowser mock.
"""
os.environ['SF_TEMPORARY_CREDENTIAL_CACHE_DIR'] = os.getenv(
"WORKSPACE", os.path.expanduser("~"))
def mock_post_request(url, headers, json_body, **kwargs):
global mock_cnt
ret = None
if mock_cnt == 0:
# return from /v1/login-request
ret = {
u'success': True,
u'message': None,
u'data': {
u'token': u'TOKEN',
u'masterToken': u'MASTER_TOKEN',
u'idToken': u'ID_TOKEN',
}}
elif mock_cnt == 1:
# return from /token-request
ret = {
u'success': True,
u'message': None,
u'data': {
u'sessionToken': u'NEW_TOKEN',
}}
elif mock_cnt == 2:
# return from USE WAREHOUSE TESTWH_NEW
ret = {
u'success': True,
u'message': None,
u'data': {
u'finalDatabase': 'TESTDB',
u'finalWarehouse': 'TESTWH_NEW',
}}
elif mock_cnt == 3:
# return from USE DATABASE TESTDB_NEW
ret = {
u'success': True,
u'message': None,
u'data': {
u'finalDatabase': 'TESTDB_NEW',
u'finalWarehouse': 'TESTWH_NEW',
}}
elif mock_cnt == 4:
# return from SELECT 1
ret = {
u'success': True,
u'message': None,
u'data': {
u'finalDatabase': 'TESTDB_NEW',
u'finalWarehouse': 'TESTWH_NEW',
}}
mock_cnt += 1
return ret
global mock_cnt
mock_cnt = 0
# pre-authentication doesn't matter
mockAuthByBrowserAuthenticate.return_value = None
# POST requests mock
mockSnowflakeRestfulPostRequest.side_effect = mock_post_request
delete_temporary_credential_file()
mock_cnt = 0
account = 'testaccount'
user = 'testuser'
authenticator = 'externalbrowser'
# first connection
con = snowflake.connector.connect(
account=account,
user=user,
authenticator=authenticator,
database='TESTDB',
warehouse='TESTWH',
)
assert con._rest.token == u'TOKEN'
assert con._rest.master_token == u'MASTER_TOKEN'
assert con._rest.id_token == u'ID_TOKEN'
# second connection that uses the id token to get the session token
con = snowflake.connector.connect(
account=account,
user=user,
authenticator=authenticator,
database='TESTDB_NEW', # override the database
warehouse='TESTWH_NEW', # override the warehouse
)
assert con._rest.token == u'NEW_TOKEN'
assert con._rest.master_token is None
assert con._rest.id_token == 'ID_TOKEN'
assert con.database == 'TESTDB_NEW'
assert con.warehouse == 'TESTWH_NEW'
@patch(
'snowflake.connector.network.SnowflakeRestful._post_request'
)
def test_connect_with_service_name(mockSnowflakeRestfulPostRequest):
def mock_post_request(url, headers, json_body, **kwargs):
global mock_cnt
ret = None
if mock_cnt == 0:
# return from /v1/login-request
ret = {
u'success': True,
u'message': None,
u'data': {
u'token': u'TOKEN',
u'masterToken': u'MASTER_TOKEN',
u'idToken': u'ID_TOKEN',
u'parameters': [
{'name': 'SERVICE_NAME', 'value': "FAKE_SERVICE_NAME"}
],
}}
return ret
# POST requests mock
mockSnowflakeRestfulPostRequest.side_effect = mock_post_request
global mock_cnt
mock_cnt = 0
account = 'testaccount'
user = 'testuser'
# connection
con = snowflake.connector.connect(
account=account,
user=user,
password='testpassword',
database='TESTDB',
warehouse='TESTWH',
)
assert con.service_name == 'FAKE_SERVICE_NAME'
| 28.864706
| 78
| 0.557367
|
4a1724b5d3b3fe1888267a8eadcc1b16fa815be8
| 113
|
py
|
Python
|
code/pyFoamCompressCaseFiles.py
|
sosohungry/pyfoam
|
b19e40a0ef1f41268930122226660414722178e6
|
[
"MIT"
] | null | null | null |
code/pyFoamCompressCaseFiles.py
|
sosohungry/pyfoam
|
b19e40a0ef1f41268930122226660414722178e6
|
[
"MIT"
] | null | null | null |
code/pyFoamCompressCaseFiles.py
|
sosohungry/pyfoam
|
b19e40a0ef1f41268930122226660414722178e6
|
[
"MIT"
] | null | null | null |
#! /usr/bin/env python
from PyFoam.Applications.CompressCaseFiles import CompressCaseFiles
CompressCaseFiles()
| 18.833333
| 67
| 0.831858
|
4a1725f6924bd4375834af5f4d5a0ce6e62579a5
| 22,178
|
py
|
Python
|
tslearn/svm/svm.py
|
hoangph3/tslearn
|
c589de380398379f2587f8cc812571d2a6d75938
|
[
"BSD-2-Clause"
] | null | null | null |
tslearn/svm/svm.py
|
hoangph3/tslearn
|
c589de380398379f2587f8cc812571d2a6d75938
|
[
"BSD-2-Clause"
] | null | null | null |
tslearn/svm/svm.py
|
hoangph3/tslearn
|
c589de380398379f2587f8cc812571d2a6d75938
|
[
"BSD-2-Clause"
] | null | null | null |
from sklearn.svm import SVC, SVR
from sklearn.base import ClassifierMixin, RegressorMixin
from sklearn.utils import deprecated
from sklearn.utils import check_array, check_X_y
from sklearn.utils.validation import check_is_fitted
import numpy
from ..metrics import cdist_gak, gamma_soft_dtw, VARIABLE_LENGTH_METRICS
from ..utils import to_time_series_dataset, check_dims, to_sklearn_dataset
from ..bases import TimeSeriesBaseEstimator
import warnings
__author__ = 'Romain Tavenard romain.tavenard[at]univ-rennes2.fr'
class TimeSeriesSVMMixin:
def _preprocess_sklearn(self, X, y=None, fit_time=False):
force_all_finite = self.kernel not in VARIABLE_LENGTH_METRICS
if y is None:
X = check_array(X, allow_nd=True,
force_all_finite=force_all_finite)
else:
X, y = check_X_y(X, y, allow_nd=True,
force_all_finite=force_all_finite)
X = to_time_series_dataset(X)
if fit_time:
self._X_fit = X
if self.gamma == "auto":
self.gamma_ = gamma_soft_dtw(X)
else:
self.gamma_ = self.gamma
self.classes_ = numpy.unique(y)
else:
check_is_fitted(self, ['svm_estimator_', '_X_fit'])
X = check_dims(
X,
X_fit_dims=self._X_fit.shape,
extend=True,
check_n_features_only=(self.kernel in VARIABLE_LENGTH_METRICS)
)
if self.kernel in VARIABLE_LENGTH_METRICS:
assert self.kernel == "gak"
self.estimator_kernel_ = "precomputed"
if fit_time:
sklearn_X = cdist_gak(X,
sigma=numpy.sqrt(self.gamma_ / 2.),
n_jobs=self.n_jobs,
verbose=self.verbose)
else:
sklearn_X = cdist_gak(X,
self._X_fit,
sigma=numpy.sqrt(self.gamma_ / 2.),
n_jobs=self.n_jobs,
verbose=self.verbose)
else:
self.estimator_kernel_ = self.kernel
sklearn_X = to_sklearn_dataset(X)
if y is None:
return sklearn_X
else:
return sklearn_X, y
class TimeSeriesSVC(TimeSeriesSVMMixin, ClassifierMixin,
TimeSeriesBaseEstimator):
"""Time-series specific Support Vector Classifier.
Parameters
----------
C : float, optional (default=1.0)
Penalty parameter C of the error term.
kernel : string, optional (default='gak')
Specifies the kernel type to be used in the algorithm.
It must be one of 'gak' or a kernel accepted by ``sklearn.svm.SVC``.
If none is given, 'gak' will be used. If a callable is given it is
used to pre-compute the kernel matrix from data matrices; that matrix
should be an array of shape ``(n_samples, n_samples)``.
degree : int, optional (default=3)
Degree of the polynomial kernel function ('poly').
Ignored by all other kernels.
gamma : float, optional (default='auto')
Kernel coefficient for 'gak', 'rbf', 'poly' and 'sigmoid'.
If gamma is 'auto' then:
- for 'gak' kernel, it is computed based on a sampling of the training
set (cf :ref:`tslearn.metrics.gamma_soft_dtw <fun-tslearn.metrics.gamma_soft_dtw>`)
- for other kernels (eg. 'rbf'), 1/n_features will be used.
coef0 : float, optional (default=0.0)
Independent term in kernel function.
It is only significant in 'poly' and 'sigmoid'.
shrinking : boolean, optional (default=True)
Whether to use the shrinking heuristic.
probability : boolean, optional (default=False)
Whether to enable probability estimates. This must be enabled prior
to calling `fit`, and will slow down that method.
Also, probability estimates are not guaranteed to match predict output.
See our :ref:`dedicated user guide section <kernels-ml>`
for more details.
tol : float, optional (default=1e-3)
Tolerance for stopping criterion.
cache_size : float, optional (default=200.0)
Specify the size of the kernel cache (in MB).
class_weight : {dict, 'balanced'}, optional
Set the parameter C of class i to class_weight[i]*C for
SVC. If not given, all classes are supposed to have
weight one.
The "balanced" mode uses the values of y to automatically adjust
weights inversely proportional to class frequencies in the input data
as ``n_samples / (n_classes * np.bincount(y))``
n_jobs : int or None, optional (default=None)
The number of jobs to run in parallel for GAK cross-similarity matrix
computations.
``None`` means 1 unless in a :obj:`joblib.parallel_backend` context.
``-1`` means using all processors. See scikit-learns'
`Glossary <https://scikit-learn.org/stable/glossary.html#term-n-jobs>`_
for more details.
verbose : int, default: 0
Enable verbose output. Note that this setting takes advantage of a
per-process runtime setting in libsvm that, if enabled, may not work
properly in a multithreaded context.
max_iter : int, optional (default=-1)
Hard limit on iterations within solver, or -1 for no limit.
decision_function_shape : 'ovo', 'ovr', default='ovr'
Whether to return a one-vs-rest ('ovr') decision function of shape
(n_samples, n_classes) as all other classifiers, or the original
one-vs-one ('ovo') decision function of libsvm which has shape
(n_samples, n_classes * (n_classes - 1) / 2).
random_state : int, RandomState instance or None, optional (default=None)
The seed of the pseudo random number generator to use when shuffling
the data. If int, random_state is the seed used by the random number
generator; If RandomState instance, random_state is the random number
generator; If None, the random number generator is the RandomState
instance used by `np.random`.
Attributes
----------
support_ : array-like, shape = [n_SV]
Indices of support vectors.
n_support_ : array-like, dtype=int32, shape = [n_class]
Number of support vectors for each class.
support_vectors_ : list of arrays of shape [n_SV, sz, d]
List of support vectors in tslearn dataset format, one array per class
dual_coef_ : array, shape = [n_class-1, n_SV]
Coefficients of the support vector in the decision function.
For multiclass, coefficient for all 1-vs-1 classifiers.
The layout of the coefficients in the multiclass case is somewhat
non-trivial. See the section about multi-class classification in the
SVM section of the User Guide of ``sklearn`` for details.
coef_ : array, shape = [n_class-1, n_features]
Weights assigned to the features (coefficients in the primal
problem). This is only available in the case of a linear kernel.
`coef_` is a readonly property derived from `dual_coef_` and
`support_vectors_`.
intercept_ : array, shape = [n_class * (n_class-1) / 2]
Constants in decision function.
svm_estimator_ : sklearn.svm.SVC
The underlying sklearn estimator
Examples
--------
>>> from tslearn.generators import random_walk_blobs
>>> X, y = random_walk_blobs(n_ts_per_blob=10, sz=64, d=2, n_blobs=2)
>>> clf = TimeSeriesSVC(kernel="gak", gamma="auto", probability=True)
>>> clf.fit(X, y).predict(X).shape
(20,)
>>> sv = clf.support_vectors_
>>> len(sv) # should be equal to the nr of classes in the clf problem
2
>>> sv[0].shape # doctest: +ELLIPSIS
(..., 64, 2)
>>> sv_sum = sum([sv_i.shape[0] for sv_i in sv])
>>> sv_sum == clf.svm_estimator_.n_support_.sum()
True
>>> clf.decision_function(X).shape
(20,)
>>> clf.predict_log_proba(X).shape
(20, 2)
>>> clf.predict_proba(X).shape
(20, 2)
References
----------
Fast Global Alignment Kernels.
Marco Cuturi.
ICML 2011.
"""
def __init__(self, C=1.0, kernel="gak", degree=3, gamma="auto", coef0=0.0,
shrinking=True, probability=False, tol=0.001, cache_size=200,
class_weight=None, n_jobs=None, verbose=0, max_iter=-1,
decision_function_shape="ovr", random_state=None):
self.C = C
self.kernel = kernel
self.degree = degree
self.gamma = gamma
self.coef0 = coef0
self.shrinking = shrinking
self.probability = probability
self.tol = tol
self.cache_size = cache_size
self.class_weight = class_weight
self.n_jobs = n_jobs
self.verbose = verbose
self.max_iter = max_iter
self.decision_function_shape = decision_function_shape
self.random_state = random_state
@property
def n_iter_(self):
warnings.warn('n_iter_ is always set to 1 for TimeSeriesSVC, since '
'it is non-trivial to access the underlying libsvm')
return 1
@deprecated('The use of '
'`support_vectors_time_series_` is deprecated in '
'tslearn v0.4 and will be removed in v0.6. Use '
'`support_vectors_` property instead.')
def support_vectors_time_series_(self, X=None):
warnings.warn('The use of '
'`support_vectors_time_series_` is deprecated in '
'tslearn v0.4 and will be removed in v0.6. Use '
'`support_vectors_` property instead.')
check_is_fitted(self, '_X_fit')
return self._X_fit[self.svm_estimator_.support_]
@property
def support_vectors_(self):
check_is_fitted(self, '_X_fit')
sv = []
idx_start = 0
for cl in range(len(self.svm_estimator_.n_support_)):
idx_end = idx_start + self.svm_estimator_.n_support_[cl]
indices = self.svm_estimator_.support_[idx_start:idx_end]
sv.append(self._X_fit[indices])
idx_start += self.svm_estimator_.n_support_[cl]
return sv
def fit(self, X, y, sample_weight=None):
"""Fit the SVM model according to the given training data.
Parameters
----------
X : array-like of shape=(n_ts, sz, d)
Time series dataset.
y : array-like of shape=(n_ts, )
Time series labels.
sample_weight : array-like of shape (n_samples,), default=None
Per-sample weights. Rescale C per sample. Higher weights force the
classifier to put more emphasis on these points.
"""
sklearn_X, y = self._preprocess_sklearn(X, y, fit_time=True)
self.svm_estimator_ = SVC(
C=self.C, kernel=self.estimator_kernel_, degree=self.degree,
gamma=self.gamma_, coef0=self.coef0, shrinking=self.shrinking,
probability=self.probability, tol=self.tol,
cache_size=self.cache_size, class_weight=self.class_weight,
verbose=self.verbose, max_iter=self.max_iter,
decision_function_shape=self.decision_function_shape,
random_state=self.random_state
)
self.svm_estimator_.fit(sklearn_X, y, sample_weight=sample_weight)
return self
def predict(self, X):
"""Predict class for a given set of time series.
Parameters
----------
X : array-like of shape=(n_ts, sz, d)
Time series dataset.
Returns
-------
array of shape=(n_ts, ) or (n_ts, n_classes), depending on the shape
of the label vector provided at training time.
Index of the cluster each sample belongs to or class probability
matrix, depending on what was provided at training time.
"""
sklearn_X = self._preprocess_sklearn(X, fit_time=False)
return self.svm_estimator_.predict(sklearn_X)
def decision_function(self, X):
"""Evaluates the decision function for the samples in X.
Parameters
----------
X : array-like of shape=(n_ts, sz, d)
Time series dataset.
Returns
-------
ndarray of shape (n_samples, n_classes * (n_classes-1) / 2)
Returns the decision function of the sample for each class
in the model.
If decision_function_shape='ovr', the shape is (n_samples,
n_classes)."""
sklearn_X = self._preprocess_sklearn(X, fit_time=False)
return self.svm_estimator_.decision_function(sklearn_X)
def predict_log_proba(self, X):
"""Predict class log-probabilities for a given set of time series.
Note that probability estimates are not guaranteed to match predict
output.
See our :ref:`dedicated user guide section <kernels-ml>`
for more details.
Parameters
----------
X : array-like of shape=(n_ts, sz, d)
Time series dataset.
Returns
-------
array of shape=(n_ts, n_classes),
Class probability matrix.
"""
sklearn_X = self._preprocess_sklearn(X, fit_time=False)
return self.svm_estimator_.predict_log_proba(sklearn_X)
def predict_proba(self, X):
"""Predict class probability for a given set of time series.
Note that probability estimates are not guaranteed to match predict
output.
See our :ref:`dedicated user guide section <kernels-ml>`
for more details.
Parameters
----------
X : array-like of shape=(n_ts, sz, d)
Time series dataset.
Returns
-------
array of shape=(n_ts, n_classes),
Class probability matrix.
"""
sklearn_X = self._preprocess_sklearn(X, fit_time=False)
return self.svm_estimator_.predict_proba(sklearn_X)
def _more_tags(self):
return {'non_deterministic': True, 'allow_nan': True,
'allow_variable_length': True,
"_xfail_checks": {
"check_sample_weights_invariance": (
"zero sample_weight is not equivalent to removing samples"
),
}}
class TimeSeriesSVR(TimeSeriesSVMMixin, RegressorMixin,
TimeSeriesBaseEstimator):
"""Time-series specific Support Vector Regressor.
Parameters
----------
C : float, optional (default=1.0)
Penalty parameter C of the error term.
kernel : string, optional (default='gak')
Specifies the kernel type to be used in the algorithm.
It must be one of 'gak' or a kernel accepted by ``sklearn.svm.SVC``.
If none is given, 'gak' will be used. If a callable is given it is
used to pre-compute the kernel matrix from data matrices; that matrix
should be an array of shape ``(n_samples, n_samples)``.
degree : int, optional (default=3)
Degree of the polynomial kernel function ('poly').
Ignored by all other kernels.
gamma : float, optional (default='auto')
Kernel coefficient for 'gak', 'rbf', 'poly' and 'sigmoid'.
If gamma is 'auto' then:
- for 'gak' kernel, it is computed based on a sampling of the training
set (cf :ref:`tslearn.metrics.gamma_soft_dtw <fun-tslearn.metrics.gamma_soft_dtw>`)
- for other kernels (eg. 'rbf'), 1/n_features will be used.
coef0 : float, optional (default=0.0)
Independent term in kernel function.
It is only significant in 'poly' and 'sigmoid'.
tol : float, optional (default=1e-3)
Tolerance for stopping criterion.
epsilon : float, optional (default=0.1)
Epsilon in the epsilon-SVR model. It specifies the epsilon-tube
within which no penalty is associated in the training loss function
with points predicted within a distance epsilon from the actual
value.
shrinking : boolean, optional (default=True)
Whether to use the shrinking heuristic.
cache_size : float, optional (default=200.0)
Specify the size of the kernel cache (in MB).
n_jobs : int or None, optional (default=None)
The number of jobs to run in parallel for GAK cross-similarity matrix
computations.
``None`` means 1 unless in a :obj:`joblib.parallel_backend` context.
``-1`` means using all processors. See scikit-learns'
`Glossary <https://scikit-learn.org/stable/glossary.html#term-n-jobs>`_
for more details.
verbose : int, default: 0
Enable verbose output. Note that this setting takes advantage of a
per-process runtime setting in libsvm that, if enabled, may not work
properly in a multithreaded context.
max_iter : int, optional (default=-1)
Hard limit on iterations within solver, or -1 for no limit.
Attributes
----------
support_ : array-like, shape = [n_SV]
Indices of support vectors.
support_vectors_ : array of shape [n_SV, sz, d]
Support vectors in tslearn dataset format
dual_coef_ : array, shape = [1, n_SV]
Coefficients of the support vector in the decision function.
coef_ : array, shape = [1, n_features]
Weights assigned to the features (coefficients in the primal
problem). This is only available in the case of a linear kernel.
`coef_` is readonly property derived from `dual_coef_` and
`support_vectors_`.
intercept_ : array, shape = [1]
Constants in decision function.
sample_weight : array-like, shape = [n_samples]
Individual weights for each sample
svm_estimator_ : sklearn.svm.SVR
The underlying sklearn estimator
Examples
--------
>>> from tslearn.generators import random_walk_blobs
>>> X, y = random_walk_blobs(n_ts_per_blob=10, sz=64, d=2, n_blobs=2)
>>> import numpy
>>> y = y.astype(numpy.float) + numpy.random.randn(20) * .1
>>> reg = TimeSeriesSVR(kernel="gak", gamma="auto")
>>> reg.fit(X, y).predict(X).shape
(20,)
>>> sv = reg.support_vectors_
>>> sv.shape # doctest: +ELLIPSIS
(..., 64, 2)
>>> sv.shape[0] <= 20
True
References
----------
Fast Global Alignment Kernels.
Marco Cuturi.
ICML 2011.
"""
def __init__(self, C=1.0, kernel="gak", degree=3, gamma="auto",
coef0=0.0, tol=0.001, epsilon=0.1, shrinking=True,
cache_size=200, n_jobs=None, verbose=0, max_iter=-1):
self.C = C
self.kernel = kernel
self.degree = degree
self.gamma = gamma
self.coef0 = coef0
self.tol = tol
self.epsilon = epsilon
self.shrinking = shrinking
self.cache_size = cache_size
self.n_jobs = n_jobs
self.verbose = verbose
self.max_iter = max_iter
@property
def n_iter_(self):
warnings.warn('n_iter_ is always set to 1 for TimeSeriesSVR, since '
'it is non-trivial to access the underlying libsvm')
return 1
@deprecated('The use of '
'`support_vectors_time_series_` is deprecated in '
'tslearn v0.4 and will be removed in v0.6. Use '
'`support_vectors_` property instead.')
def support_vectors_time_series_(self, X=None):
warnings.warn('The use of '
'`support_vectors_time_series_` is deprecated in '
'tslearn v0.4 and will be removed in v0.6. Use '
'`support_vectors_` property instead.')
check_is_fitted(self, '_X_fit')
return self._X_fit[self.svm_estimator_.support_]
@property
def support_vectors_(self):
check_is_fitted(self, '_X_fit')
return self._X_fit[self.svm_estimator_.support_]
def fit(self, X, y, sample_weight=None):
"""Fit the SVM model according to the given training data.
Parameters
----------
X : array-like of shape=(n_ts, sz, d)
Time series dataset.
y : array-like of shape=(n_ts, )
Time series labels.
sample_weight : array-like of shape (n_samples,), default=None
Per-sample weights. Rescale C per sample. Higher weights force the
classifier to put more emphasis on these points.
"""
sklearn_X, y = self._preprocess_sklearn(X, y, fit_time=True)
self.svm_estimator_ = SVR(
C=self.C, kernel=self.estimator_kernel_, degree=self.degree,
gamma=self.gamma_, coef0=self.coef0, shrinking=self.shrinking,
tol=self.tol, cache_size=self.cache_size,
verbose=self.verbose, max_iter=self.max_iter
)
self.svm_estimator_.fit(sklearn_X, y, sample_weight=sample_weight)
return self
def predict(self, X):
"""Predict class for a given set of time series.
Parameters
----------
X : array-like of shape=(n_ts, sz, d)
Time series dataset.
Returns
-------
array of shape=(n_ts, ) or (n_ts, dim_output), depending on the shape
of the target vector provided at training time.
Predicted targets
"""
sklearn_X = self._preprocess_sklearn(X, fit_time=False)
return self.svm_estimator_.predict(sklearn_X)
def _more_tags(self):
return {'non_deterministic': True, 'allow_nan': True,
'allow_variable_length': True,
"_xfail_checks": {
"check_sample_weights_invariance": (
"zero sample_weight is not equivalent to removing samples"
),
}}
| 38.237931
| 93
| 0.61304
|
4a17260a521997d4902b4dddb17d7af15c01d4bd
| 4,773
|
py
|
Python
|
volatility/volatility/plugins/mac/ldrmodules.py
|
williamclot/MemoryVisualizer
|
2ff9f30f07519d6578bc36c12f8d08acc9cb4383
|
[
"MIT"
] | 2
|
2018-07-16T13:30:40.000Z
|
2018-07-17T12:02:05.000Z
|
volatility/volatility/plugins/mac/ldrmodules.py
|
williamclot/MemoryVisualizer
|
2ff9f30f07519d6578bc36c12f8d08acc9cb4383
|
[
"MIT"
] | null | null | null |
volatility/volatility/plugins/mac/ldrmodules.py
|
williamclot/MemoryVisualizer
|
2ff9f30f07519d6578bc36c12f8d08acc9cb4383
|
[
"MIT"
] | null | null | null |
# Volatility
# Copyright (C) 2007-2013 Volatility Foundation
#
# This file is part of Volatility.
#
# Volatility is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License Version 2 as
# published by the Free Software Foundation. You may not use, modify or
# distribute this program under any other version of the GNU General
# Public License.
#
# Volatility is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Volatility. If not, see <http://www.gnu.org/licenses/>.
#
"""
@author: Andrew Case
@license: GNU General Public License 2.0
@contact: atcuno@gmail.com
@organization:
"""
import volatility.obj as obj
import volatility.plugins.mac.common as mac_common
import volatility.plugins.mac.pslist as mac_pslist
from volatility.renderers import TreeGrid
from volatility.renderers.basic import Address
class mac_ldrmodules(mac_pslist.mac_pslist):
"""Compares the output of proc maps with the list of libraries from libdl"""
def calculate(self):
mac_common.set_plugin_members(self)
procs = mac_pslist.mac_pslist(self._config).calculate()
proc_maps = {}
dl_maps = {}
seen_starts = []
for task in procs:
proc_maps[task.obj_offset] = {}
proc_as = task.get_process_address_space()
for map in task.get_proc_maps():
sig = proc_as.read(map.start, 4)
if sig in ['\xce\xfa\xed\xfe', '\xcf\xfa\xed\xfe']:
prot = map.get_perms()
if prot in ["rw-", "r--"]:
continue
fname = map.get_path()
proc_maps[task.obj_offset][map.start.v()] = (task, proc_as, fname)
dl_maps[task.obj_offset] = {}
for so in task.get_dyld_maps():
dl_maps[task.obj_offset][so.imageLoadAddress] = (task, proc_as, str(so.imageFilePath))
for task_offset in dl_maps:
for vm_start in dl_maps[task_offset]:
seen_starts.append(vm_start)
(task, proc_as, vm_name) = dl_maps[task_offset][vm_start]
yield (task_offset, task, proc_as, vm_start, vm_name, proc_maps, dl_maps)
for task_offset in proc_maps:
for vm_start in proc_maps[task_offset]:
if vm_start in seen_starts:
continue
(task, proc_as, vm_name) = proc_maps[task_offset][vm_start]
yield (task_offset, task, proc_as, vm_start, vm_name, proc_maps, dl_maps)
def unified_output(self, data):
return TreeGrid([("Pid", int),
("Name", str),
("Start", Address),
("File Path", str),
("Kernel", str),
("Dyld", str),
], self.generator(data))
def generator(self, data):
for task_offset, task, proc_as, vm_start, map_name, proc_maps, dl_maps in data:
if vm_start in proc_maps[task_offset]:
pmaps = "True"
else:
pmaps = "False"
if vm_start in dl_maps[task_offset]:
dmaps = "True"
else:
dmaps = "False"
yield(0, [
int(task.p_pid),
str(task.p_comm),
Address(vm_start),
str(map_name),
str(pmaps),
str(dmaps),
])
def render_text(self, outfd, data):
self.table_header(outfd, [("Pid", "8"),
("Name", "16"),
("Start", "#018x"),
("File Path", "100"),
("Kernel", "6"),
("Dyld", "6"),
])
for task_offset, task, proc_as, vm_start, map_name, proc_maps, dl_maps in data:
if vm_start in proc_maps[task_offset]:
pmaps = "True"
else:
pmaps = "False"
if vm_start in dl_maps[task_offset]:
dmaps = "True"
else:
dmaps = "False"
self.table_row(outfd,
task.p_pid,
str(task.p_comm),
vm_start,
map_name,
pmaps,
dmaps)
| 34.338129
| 102
| 0.528808
|
4a172659751010b4af67318b973afa751485a574
| 23,508
|
py
|
Python
|
GramAddict/core/device_facade.py
|
patbengr/bot
|
902ce2ea0cd9e9ccae5b6c58a04674939cdd7921
|
[
"MIT"
] | null | null | null |
GramAddict/core/device_facade.py
|
patbengr/bot
|
902ce2ea0cd9e9ccae5b6c58a04674939cdd7921
|
[
"MIT"
] | null | null | null |
GramAddict/core/device_facade.py
|
patbengr/bot
|
902ce2ea0cd9e9ccae5b6c58a04674939cdd7921
|
[
"MIT"
] | null | null | null |
import logging
import string
from datetime import datetime
from enum import Enum, auto
from os import getcwd, listdir
from random import randint, uniform
from re import search
from subprocess import PIPE, run
from time import sleep
import uiautomator2
from GramAddict.core.utils import random_sleep
logger = logging.getLogger(__name__)
def create_device(device_id):
try:
return DeviceFacade(device_id)
except ImportError as e:
logger.error(str(e))
return None
def get_device_info(device):
logger.debug(
f"Phone Name: {device.get_info()['productName']}, SDK Version: {device.get_info()['sdkInt']}"
)
if int(device.get_info()["sdkInt"]) < 19:
logger.warning("Only Android 4.4+ (SDK 19+) devices are supported!")
logger.debug(
f"Screen dimension: {device.get_info()['displayWidth']}x{device.get_info()['displayHeight']}"
)
logger.debug(
f"Screen resolution: {device.get_info()['displaySizeDpX']}x{device.get_info()['displaySizeDpY']}"
)
logger.debug(f"Device ID: {device.deviceV2.serial}")
class Timeout(Enum):
ZERO = auto()
SHORT = auto()
MEDIUM = auto()
LONG = auto()
class SleepTime(Enum):
ZERO = auto()
TINY = auto()
SHORT = auto()
DEFAULT = auto()
class Location(Enum):
CUSTOM = auto()
WHOLE = auto()
CENTER = auto()
BOTTOM = auto()
RIGHT = auto()
LEFT = auto()
BOTTOMRIGHT = auto()
RIGHTEDGE = auto()
TOPLEFT = auto()
class Direction(Enum):
UP = auto()
DOWN = auto()
RIGHT = auto()
LEFT = auto()
class DeviceFacade:
deviceV2 = None # uiautomator2
def __init__(self, device_id):
self.device_id = device_id
device_ip = None
# self.deviceV2.debug = True
try:
if True:
self.deviceV2 = (
uiautomator2.connect()
if device_id is None
else uiautomator2.connect(device_id)
)
else:
self.deviveV2 = uiautomator2.connect_adb_wifi(f"{device_ip}:5555")
except ImportError:
raise ImportError("Please install uiautomator2: pip3 install uiautomator2")
def find(
self,
index=None,
*args,
**kwargs,
):
try:
view = self.deviceV2(*args, **kwargs)
if index is not None and view.count > 1:
view = self.deviceV2(*args, **kwargs)[index]
except uiautomator2.JSONRPCError as e:
raise DeviceFacade.JsonRpcError(e)
return DeviceFacade.View(view=view, device=self.deviceV2)
def back(self):
logger.debug("Press back button.")
self.deviceV2.press("back")
random_sleep()
def start_screenrecord(self, output="debug_0000.mp4", fps=20):
mp4_files = [f for f in listdir(getcwd()) if f.endswith(".mp4")]
if mp4_files != []:
last_mp4 = mp4_files[-1]
debug_number = "{0:0=4d}".format(int(last_mp4[-8:-4]) + 1)
output = f"debug_{debug_number}.mp4"
self.deviceV2.screenrecord(output, fps)
logger.warning(
f"Start screen recording: it will be saved as '{output}' in '{getcwd()}'."
)
def stop_screenrecord(self):
if self.deviceV2.screenrecord.stop():
mp4_files = [f for f in listdir(getcwd()) if f.endswith(".mp4")]
if mp4_files != []:
last_mp4 = mp4_files[-1]
logger.warning(
f"Screen recorder has been stopped succesfully! File '{last_mp4}' available in '{getcwd()}'."
)
def screenshot(self, path):
self.deviceV2.screenshot(path)
def dump_hierarchy(self, path):
xml_dump = self.deviceV2.dump_hierarchy()
with open(path, "w", encoding="utf-8") as outfile:
outfile.write(xml_dump)
def press_power(self):
self.deviceV2.press("power")
sleep(2)
def is_screen_locked(self):
data = run(
f"adb -s {self.deviceV2.serial} shell dumpsys window",
encoding="utf-8",
stdout=PIPE,
stderr=PIPE,
shell=True,
)
if data != "":
flag = search("mDreamingLockscreen=(true|false)", data.stdout)
return True if flag is not None and flag.group(1) == "true" else False
else:
logger.debug(
f"'adb -s {self.deviceV2.serial} shell dumpsys window' returns nothing!"
)
return None
def is_keyboard_show(serial):
data = run(
f"adb -s {serial} shell dumpsys input_method",
encoding="utf-8",
stdout=PIPE,
stderr=PIPE,
shell=True,
)
if data != "":
flag = search("mInputShown=(true|false)", data.stdout)
return True if flag.group(1) == "true" else False
else:
logger.debug(
f"'adb -s {serial} shell dumpsys input_method' returns nothing!"
)
return None
def is_alive(self):
return self.deviceV2._is_alive()
def wake_up(self):
"""Make sure agent is alive or bring it back up before starting."""
if self.deviceV2 is not None:
attempts = 0
while not self.is_alive() and attempts < 5:
self.get_info()
attempts += 1
def unlock(self):
self.swipe(Direction.UP, 0.8)
sleep(2)
if self.is_screen_locked():
self.swipe(Direction.RIGHT, 0.8)
sleep(2)
def screen_off(self):
self.deviceV2.screen_off()
def get_orientation(self):
try:
return self.deviceV2._get_orientation()
except uiautomator2.JSONRPCError as e:
raise DeviceFacade.JsonRpcError(e)
def window_size(self):
"""return (width, height)"""
try:
self.deviceV2.window_size()
except uiautomator2.JSONRPCError as e:
raise DeviceFacade.JsonRpcError(e)
def swipe(self, direction: "Direction", scale=0.5):
"""Swipe finger in the `direction`.
Scale is the sliding distance. Default to 50% of the screen width
"""
swipe_dir = ""
if direction == Direction.UP:
swipe_dir = "up"
elif direction == Direction.RIGHT:
swipe_dir = "right"
elif direction == Direction.LEFT:
swipe_dir = "left"
elif direction == Direction.DOWN:
swipe_dir = "down"
logger.debug(f"Swipe {swipe_dir}, scale={scale}")
try:
self.deviceV2.swipe_ext(swipe_dir, scale=scale)
DeviceFacade.sleep_mode(SleepTime.TINY)
except uiautomator2.JSONRPCError as e:
raise DeviceFacade.JsonRpcError(e)
def swipe_points(self, sx, sy, ex, ey, random_x=True, random_y=True):
if random_x:
sx = int(sx * uniform(0.85, 1.15))
ex = int(ex * uniform(0.85, 1.15))
if random_y:
ey = int(ey * uniform(0.98, 1.02))
sy = int(sy)
try:
logger.debug(f"Swipe from: ({sx},{sy}) to ({ex},{ey}).")
self.deviceV2.swipe_points([[sx, sy], [ex, ey]], uniform(0.2, 0.5))
DeviceFacade.sleep_mode(SleepTime.TINY)
except uiautomator2.JSONRPCError as e:
raise DeviceFacade.JsonRpcError(e)
def get_info(self):
# {'currentPackageName': 'net.oneplus.launcher', 'displayHeight': 1920, 'displayRotation': 0, 'displaySizeDpX': 411,
# 'displaySizeDpY': 731, 'displayWidth': 1080, 'productName': 'OnePlus5', '
# screenOn': True, 'sdkInt': 27, 'naturalOrientation': True}
try:
return self.deviceV2.info
except uiautomator2.JSONRPCError as e:
raise DeviceFacade.JsonRpcError(e)
@staticmethod
def sleep_mode(mode):
mode = SleepTime.DEFAULT if mode is None else mode
if mode == SleepTime.DEFAULT:
random_sleep()
elif mode == SleepTime.TINY:
random_sleep(0, 1)
elif mode == SleepTime.SHORT:
random_sleep(1, 2)
elif mode == SleepTime.ZERO:
pass
class View:
deviceV2 = None # uiautomator2
viewV2 = None # uiautomator2
def __init__(self, view, device):
self.viewV2 = view
self.deviceV2 = device
def __iter__(self):
children = []
try:
for item in self.viewV2:
children.append(DeviceFacade.View(view=item, device=self.deviceV2))
return iter(children)
except uiautomator2.JSONRPCError as e:
raise DeviceFacade.JsonRpcError(e)
def ui_info(self):
try:
return self.viewV2.info
except uiautomator2.JSONRPCError as e:
raise DeviceFacade.JsonRpcError(e)
def child(self, *args, **kwargs):
try:
view = self.viewV2.child(*args, **kwargs)
except uiautomator2.JSONRPCError as e:
raise DeviceFacade.JsonRpcError(e)
return DeviceFacade.View(view=view, device=self.deviceV2)
def sibling(self, *args, **kwargs):
try:
view = self.viewV2.sibling(*args, **kwargs)
except uiautomator2.JSONRPCError as e:
raise DeviceFacade.JsonRpcError(e)
return DeviceFacade.View(view=view, device=self.deviceV2)
def left(self, *args, **kwargs):
try:
view = self.viewV2.left(*args, **kwargs)
except uiautomator2.JSONRPCError as e:
raise DeviceFacade.JsonRpcError(e)
return DeviceFacade.View(view=view, device=self.deviceV2)
def right(self, *args, **kwargs):
try:
view = self.viewV2.right(*args, **kwargs)
except uiautomator2.JSONRPCError as e:
raise DeviceFacade.JsonRpcError(e)
return DeviceFacade.View(view=view, device=self.deviceV2)
def up(self, *args, **kwargs):
try:
view = self.viewV2.up(*args, **kwargs)
except uiautomator2.JSONRPCError as e:
raise DeviceFacade.JsonRpcError(e)
return DeviceFacade.View(view=view, device=self.deviceV2)
def down(self, *args, **kwargs):
try:
view = self.viewV2.down(*args, **kwargs)
except uiautomator2.JSONRPCError as e:
raise DeviceFacade.JsonRpcError(e)
return DeviceFacade.View(view=view, device=self.deviceV2)
def click_gone(self, maxretry=3, interval=1.0):
try:
self.viewV2.click_gone(maxretry, interval)
except uiautomator2.JSONRPCError as e:
raise DeviceFacade.JsonRpcError(e)
def click(self, mode=None, sleep=None, coord=[], crash_report_if_fails=True):
mode = Location.WHOLE if mode is None else mode
x_abs = -1
y_abs = -1
if mode == Location.WHOLE:
x_offset = uniform(0.15, 0.85)
y_offset = uniform(0.15, 0.85)
elif mode == Location.LEFT:
x_offset = uniform(0.15, 0.4)
y_offset = uniform(0.15, 0.85)
elif mode == Location.CENTER:
x_offset = uniform(0.4, 0.6)
y_offset = uniform(0.15, 0.85)
elif mode == Location.RIGHT:
x_offset = uniform(0.6, 0.85)
y_offset = uniform(0.15, 0.85)
elif mode == Location.RIGHTEDGE:
x_offset = uniform(0.8, 0.9)
y_offset = uniform(0.30, 0.70)
elif mode == Location.BOTTOMRIGHT:
x_offset = uniform(0.8, 0.9)
y_offset = uniform(0.8, 0.9)
elif mode == Location.TOPLEFT:
x_offset = uniform(0.05, 0.15)
y_offset = uniform(0.05, 0.25)
elif mode == Location.CUSTOM:
try:
logger.debug(f"Single click ({coord[0]},{coord[1]})")
self.deviceV2.click(coord[0], coord[1])
DeviceFacade.sleep_mode(sleep)
return
except uiautomator2.JSONRPCError as e:
if crash_report_if_fails:
raise DeviceFacade.JsonRpcError(e)
else:
logger.debug("Trying to press on a obj which is gone.")
else:
x_offset = 0.5
y_offset = 0.5
try:
visible_bounds = self.get_bounds()
x_abs = int(
visible_bounds["left"]
+ (visible_bounds["right"] - visible_bounds["left"]) * x_offset
)
y_abs = int(
visible_bounds["top"]
+ (visible_bounds["bottom"] - visible_bounds["top"]) * y_offset
)
logger.debug(
f"Single click in ({x_abs},{y_abs}). Surface: ({visible_bounds['left']}-{visible_bounds['right']},{visible_bounds['top']}-{visible_bounds['bottom']})"
)
self.viewV2.click(
self.get_ui_timeout(Timeout.LONG),
offset=(x_offset, y_offset),
)
DeviceFacade.sleep_mode(sleep)
except uiautomator2.JSONRPCError as e:
if crash_report_if_fails:
raise DeviceFacade.JsonRpcError(e)
else:
logger.debug("Trying to press on a obj which is gone.")
def click_retry(self, mode=None, sleep=None, coord=[], maxretry=2):
"""return True if successfully open the element, else False"""
self.click(mode, sleep, coord)
while maxretry > 0:
# we wait a little bit more before try again
random_sleep(2, 4, modulable=False)
if not self.exists():
return True
logger.debug("UI element didn't open! Try again..")
self.click(mode, sleep, coord)
maxretry -= 1
if not self.exists():
return True
else:
logger.warning("Failed to open the UI element!")
return False
def double_click(self, padding=0.3, obj_over=0):
"""Double click randomly in the selected view using padding
padding: % of how far from the borders we want the double
click to happen.
"""
visible_bounds = self.get_bounds()
horizontal_len = visible_bounds["right"] - visible_bounds["left"]
vertical_len = visible_bounds["bottom"] - max(
visible_bounds["top"], obj_over
)
horizontal_padding = int(padding * horizontal_len)
vertical_padding = int(padding * vertical_len)
random_x = int(
uniform(
visible_bounds["left"] + horizontal_padding,
visible_bounds["right"] - horizontal_padding,
)
)
random_y = int(
uniform(
visible_bounds["top"] + vertical_padding,
visible_bounds["bottom"] - vertical_padding,
)
)
time_between_clicks = uniform(0.050, 0.140)
try:
logger.debug(
f"Double click in ({random_x},{random_y}) with t={int(time_between_clicks*1000)}ms. Surface: ({visible_bounds['left']}-{visible_bounds['right']},{visible_bounds['top']}-{visible_bounds['bottom']})."
)
self.deviceV2.double_click(
random_x, random_y, duration=time_between_clicks
)
DeviceFacade.sleep_mode(SleepTime.DEFAULT)
except uiautomator2.JSONRPCError as e:
raise DeviceFacade.JsonRpcError(e)
def scroll(self, direction):
try:
if direction == Direction.UP:
self.viewV2.scroll.toBeginning(max_swipes=1)
else:
self.viewV2.scroll.toEnd(max_swipes=1)
except uiautomator2.JSONRPCError as e:
raise DeviceFacade.JsonRpcError(e)
def fling(self, direction):
try:
if direction == Direction.UP:
self.viewV2.fling.toBeginning(max_swipes=5)
else:
self.viewV2.fling.toEnd(max_swipes=5)
except uiautomator2.JSONRPCError as e:
raise DeviceFacade.JsonRpcError(e)
def exists(self, ui_timeout=None):
try:
# Currently the methods left, right, up and down from
# uiautomator2 return None when a Selector does not exist.
# All other selectors return an UiObject with exists() == False.
# We will open a ticket to uiautomator2 to fix this inconsistency.
if self.viewV2 is None:
return False
exists = self.viewV2.exists(self.get_ui_timeout(ui_timeout))
if hasattr(self.viewV2, "count"):
if not exists and self.viewV2.count >= 1:
logger.debug(
f"BUG: exists return False, but there is/are {self.viewV2.count} element(s)!"
)
# More info about that: https://github.com/openatx/uiautomator2/issues/689"
return False
return exists
except uiautomator2.JSONRPCError as e:
raise DeviceFacade.JsonRpcError(e)
def count_items(self):
try:
return self.viewV2.count
except uiautomator2.JSONRPCError as e:
raise DeviceFacade.JsonRpcError(e)
def wait(self, ui_timeout=None):
try:
return self.viewV2.wait(timeout=self.get_ui_timeout(ui_timeout))
except uiautomator2.JSONRPCError as e:
raise DeviceFacade.JsonRpcError(e)
def wait_gone(self, ui_timeout=None):
try:
return self.viewV2.wait_gone(timeout=self.get_ui_timeout(ui_timeout))
except uiautomator2.JSONRPCError as e:
raise DeviceFacade.JsonRpcError(e)
def get_bounds(self):
try:
return self.viewV2.info["bounds"]
except uiautomator2.JSONRPCError as e:
raise DeviceFacade.JsonRpcError(e)
def get_property(self, property):
try:
return self.viewV2.info[property]
except uiautomator2.JSONRPCError as e:
raise DeviceFacade.JsonRpcError(e)
@staticmethod
def get_ui_timeout(ui_timeout):
ui_timeout = Timeout.ZERO if ui_timeout is None else ui_timeout
if ui_timeout == Timeout.ZERO:
ui_timeout = 0
elif ui_timeout == Timeout.SHORT:
ui_timeout = 3
elif ui_timeout == Timeout.MEDIUM:
ui_timeout = 5
elif ui_timeout == Timeout.LONG:
ui_timeout = 8
return ui_timeout
def get_text(self, retry=True, error=True, index=None):
max_attempts = 1 if not retry else 3
attempts = 0
while attempts < max_attempts:
attempts += 1
try:
text = (
self.viewV2.info["text"]
if index is None
else self.viewV2[index].info["text"]
)
if text is None:
logger.debug(
"Could not get text. Waiting 2 seconds and trying again..."
)
sleep(2) # wait 2 seconds and retry
else:
return text
except uiautomator2.JSONRPCError as e:
if error:
raise DeviceFacade.JsonRpcError(e)
else:
return ""
logger.error(
f"Attempted to get text {attempts} times. You may have a slow network or are experiencing another problem."
)
return ""
def get_selected(self) -> bool:
try:
return self.viewV2.info["selected"]
except uiautomator2.JSONRPCError as e:
raise DeviceFacade.JsonRpcError(e)
def set_text(self, text):
punct_list = string.punctuation
try:
self.click(sleep=SleepTime.SHORT)
self.deviceV2.clear_text()
start = datetime.now()
random_sleep(0.3, 1, modulable=False)
word_list = text.split()
n_words = len(word_list)
i = 0
n = 1
for word in word_list:
n_single_letters = randint(1, 3)
for char in word:
if i < n_single_letters:
self.deviceV2.send_keys(char, clear=False)
random_sleep(0.01, 0.1, modulable=False, logging=False)
i += 1
else:
if word[-1] in punct_list:
self.deviceV2.send_keys(word[i:-1], clear=False)
random_sleep(0.01, 0.1, modulable=False, logging=False)
self.deviceV2.send_keys(word[-1], clear=False)
random_sleep(0.01, 0.1, modulable=False, logging=False)
else:
self.deviceV2.send_keys(word[i:], clear=False)
random_sleep(0.01, 0.1, modulable=False, logging=False)
break
if n < n_words:
self.deviceV2.send_keys(" ", clear=False)
random_sleep(0.01, 0.1, modulable=False, logging=False)
i = 0
n += 1
typed_text = self.viewV2.get_text()
if (
typed_text is None
or typed_text == "Add a comment…"
or typed_text == "Message…"
or typed_text == ""
or typed_text.startswith("Comment as ")
):
logger.warning(
"Failed to write in text field, let's try in the old way.."
)
self.viewV2.set_text(text)
else:
logger.debug(
f"Text typed in: {(datetime.now()-start).total_seconds():.2f}s"
)
DeviceFacade.sleep_mode(SleepTime.SHORT)
except uiautomator2.JSONRPCError as e:
raise DeviceFacade.JsonRpcError(e)
class JsonRpcError(Exception):
pass
| 36.73125
| 218
| 0.526076
|
4a172a5612f74e6bfae1b490ef02e054a4ae7509
| 1,833
|
py
|
Python
|
rpc_client.py
|
guimarac/dag-evaluate
|
7b0248536dd67ee5d6bb52b6164dfb49868366a5
|
[
"MIT"
] | null | null | null |
rpc_client.py
|
guimarac/dag-evaluate
|
7b0248536dd67ee5d6bb52b6164dfb49868366a5
|
[
"MIT"
] | null | null | null |
rpc_client.py
|
guimarac/dag-evaluate
|
7b0248536dd67ee5d6bb52b6164dfb49868366a5
|
[
"MIT"
] | null | null | null |
import json
import time
from xmlrpc.client import ServerProxy
class RPCClient(object):
def __init__(self, server_url):
self.server_url = server_url
self.server_proxy = ServerProxy(server_url)
def evaluate_pipeline(self, candidate, dataset, metrics_list, n_splits, timeout):
_dataset = dataset + '.csv'
cand_id = self._submit(candidate, _dataset, metrics_list, n_splits, timeout)
return self._get_evaluated(cand_id)
def _submit(self, candidate, dataset, metrics_list, n_splits, timeout):
return self.server_proxy.submit(
candidate, dataset, metrics_list, n_splits, timeout)
def _get_evaluated(self, candidate_id):
attempts = 0
step = 2
ev_id, results = json.loads(
self.server_proxy.get_evaluated(candidate_id))
while ev_id != candidate_id:
time.sleep(attempts)
attempts += step
ev_id, results = json.loads(
self.server_proxy.get_evaluated(candidate_id))
results['id'] = ev_id
if 'error' in results.keys():
raise ValueError(results['error'])
return results
def _get_evaluated_time(self, candidate_id):
attempts = 0
limit = 20
step = 2
ev_id, results = json.loads(
self.server_proxy.get_evaluated(candidate_id))
while ev_id != candidate_id and attempts <= limit:
time.sleep(attempts)
attempts += step
ev_id, results = json.loads(
self.server_proxy.get_evaluated(candidate_id))
results['id'] = ev_id
if 'error' in results.keys():
raise ValueError(results['error'])
return results
def get_datasets(self):
pass
def get_metrics(self):
pass
| 25.816901
| 85
| 0.616476
|
4a172a6a252690d919bee020c1e2c63f3d9aa7f8
| 2,566
|
py
|
Python
|
validations/library/ip_range.py
|
mail2nsrajesh/tripleo-validations
|
591a65f4dd70e4989a4340eb09a2dfc7577e8d4d
|
[
"Apache-2.0"
] | null | null | null |
validations/library/ip_range.py
|
mail2nsrajesh/tripleo-validations
|
591a65f4dd70e4989a4340eb09a2dfc7577e8d4d
|
[
"Apache-2.0"
] | null | null | null |
validations/library/ip_range.py
|
mail2nsrajesh/tripleo-validations
|
591a65f4dd70e4989a4340eb09a2dfc7577e8d4d
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import netaddr
from ansible.module_utils.basic import * # NOQA
def check_arguments(start, end, min_size):
'''Validate format of arguments'''
errors = []
# Check format of arguments
try:
startIP = netaddr.IPAddress(start)
except netaddr.core.AddrFormatError:
errors.append('Argument start ({}) must be an IP'.format(start))
try:
endIP = netaddr.IPAddress(end)
except netaddr.core.AddrFormatError:
errors.append('Argument end ({}) must be an IP'.format(end))
if (not errors) and (startIP.version != endIP.version):
errors.append('Arguments start, end must share the same IP version')
if min_size < 0:
errors.append('Argument min_size({}) must be greater than 0'
.format(min_size))
return errors
def check_IP_range(start, end, min_size):
'''Compare IP range with minimum size'''
warnings = []
iprange = netaddr.IPRange(start, end)
if len(iprange) < min_size:
warnings = [
'The IP range {} - {} contains {} addresses.'.format(
start, end, len(iprange)),
'This might not be enough for the deployment or later scaling.'
]
return warnings
def main():
module = AnsibleModule(argument_spec=dict(
start=dict(required=True, type='str'),
end=dict(required=True, type='str'),
min_size=dict(required=True, type='int'),
))
start = module.params.get('start')
end = module.params.get('end')
min_size = module.params.get('min_size')
# Check arguments
errors = check_arguments(start, end, min_size)
if errors:
module.fail_json(msg='\n'.join(errors))
else:
# Check IP range
warnings = check_IP_range(start, end, min_size)
if warnings:
module.exit_json(changed=True, warnings=warnings)
else:
module.exit_json(msg='success')
if __name__ == '__main__':
main()
| 28.197802
| 76
| 0.646142
|
4a172ade62b7cb0a2c752283b0114c92a7862fcf
| 115,174
|
py
|
Python
|
Statistical Methods and Data Analysis/Module 2 Assignment/venv/lib/python3.8/site-packages/matplotlib/pyplot.py
|
ZohaibZ/DataScience
|
ba06c724293f8674375827bdf2d4f42d32788ebb
|
[
"MIT"
] | 9
|
2021-04-12T16:11:38.000Z
|
2022-03-18T09:03:58.000Z
|
Statistical Methods and Data Analysis/Module 2 Assignment/venv/lib/python3.8/site-packages/matplotlib/pyplot.py
|
ZohaibZ/DataScience
|
ba06c724293f8674375827bdf2d4f42d32788ebb
|
[
"MIT"
] | 21
|
2021-04-13T01:17:40.000Z
|
2022-03-11T16:06:50.000Z
|
Statistical Methods and Data Analysis/Module 2 Assignment/venv/lib/python3.8/site-packages/matplotlib/pyplot.py
|
ZohaibZ/DataScience
|
ba06c724293f8674375827bdf2d4f42d32788ebb
|
[
"MIT"
] | 2
|
2020-09-10T10:24:52.000Z
|
2021-01-05T21:54:51.000Z
|
# Note: The first part of this file can be modified in place, but the latter
# part is autogenerated by the boilerplate.py script.
"""
`matplotlib.pyplot` is a state-based interface to matplotlib. It provides
a MATLAB-like way of plotting.
pyplot is mainly intended for interactive plots and simple cases of
programmatic plot generation::
import numpy as np
import matplotlib.pyplot as plt
x = np.arange(0, 5, 0.1)
y = np.sin(x)
plt.plot(x, y)
The object-oriented API is recommended for more complex plots.
"""
import functools
import importlib
import inspect
import logging
from numbers import Number
import re
import sys
import time
try:
import threading
except ImportError:
import dummy_threading as threading
from cycler import cycler
import matplotlib
import matplotlib.colorbar
import matplotlib.image
from matplotlib import rcsetup, style
from matplotlib import _pylab_helpers, interactive
from matplotlib import cbook
from matplotlib import docstring
from matplotlib.backend_bases import FigureCanvasBase, MouseButton
from matplotlib.figure import Figure, figaspect
from matplotlib.gridspec import GridSpec
from matplotlib import rcParams, rcParamsDefault, get_backend, rcParamsOrig
from matplotlib.rcsetup import interactive_bk as _interactive_bk
from matplotlib.artist import Artist
from matplotlib.axes import Axes, Subplot
from matplotlib.projections import PolarAxes
from matplotlib import mlab # for detrend_none, window_hanning
from matplotlib.scale import get_scale_names
from matplotlib import cm
from matplotlib.cm import get_cmap, register_cmap
import numpy as np
# We may not need the following imports here:
from matplotlib.colors import Normalize
from matplotlib.lines import Line2D
from matplotlib.text import Text, Annotation
from matplotlib.patches import Polygon, Rectangle, Circle, Arrow
from matplotlib.widgets import SubplotTool, Button, Slider, Widget
from .ticker import (
TickHelper, Formatter, FixedFormatter, NullFormatter, FuncFormatter,
FormatStrFormatter, ScalarFormatter, LogFormatter, LogFormatterExponent,
LogFormatterMathtext, Locator, IndexLocator, FixedLocator, NullLocator,
LinearLocator, LogLocator, AutoLocator, MultipleLocator, MaxNLocator)
_log = logging.getLogger(__name__)
_code_objs = {
cbook._rename_parameter:
cbook._rename_parameter("", "old", "new", lambda new: None).__code__,
cbook._make_keyword_only:
cbook._make_keyword_only("", "p", lambda p: None).__code__,
}
def _copy_docstring_and_deprecators(method, func=None):
if func is None:
return functools.partial(_copy_docstring_and_deprecators, method)
decorators = [docstring.copy(method)]
# Check whether the definition of *method* includes _rename_parameter or
# _make_keyword_only decorators; if so, propagate them to the pyplot
# wrapper as well.
while getattr(method, "__wrapped__", None) is not None:
for decorator_maker, code in _code_objs.items():
if method.__code__ is code:
kwargs = {
k: v.cell_contents
for k, v in zip(code.co_freevars, method.__closure__)}
assert kwargs["func"] is method.__wrapped__
kwargs.pop("func")
decorators.append(decorator_maker(**kwargs))
method = method.__wrapped__
for decorator in decorators[::-1]:
func = decorator(func)
return func
## Global ##
_IP_REGISTERED = None
_INSTALL_FIG_OBSERVER = False
def install_repl_displayhook():
"""
Install a repl display hook so that any stale figure are automatically
redrawn when control is returned to the repl.
This works both with IPython and with vanilla python shells.
"""
global _IP_REGISTERED
global _INSTALL_FIG_OBSERVER
class _NotIPython(Exception):
pass
# see if we have IPython hooks around, if use them
try:
if 'IPython' in sys.modules:
from IPython import get_ipython
ip = get_ipython()
if ip is None:
raise _NotIPython()
if _IP_REGISTERED:
return
def post_execute():
if matplotlib.is_interactive():
draw_all()
# IPython >= 2
try:
ip.events.register('post_execute', post_execute)
except AttributeError:
# IPython 1.x
ip.register_post_execute(post_execute)
_IP_REGISTERED = post_execute
_INSTALL_FIG_OBSERVER = False
# trigger IPython's eventloop integration, if available
from IPython.core.pylabtools import backend2gui
ipython_gui_name = backend2gui.get(get_backend())
if ipython_gui_name:
ip.enable_gui(ipython_gui_name)
else:
_INSTALL_FIG_OBSERVER = True
# import failed or ipython is not running
except (ImportError, _NotIPython):
_INSTALL_FIG_OBSERVER = True
def uninstall_repl_displayhook():
"""
Uninstall the matplotlib display hook.
.. warning::
Need IPython >= 2 for this to work. For IPython < 2 will raise a
``NotImplementedError``
.. warning::
If you are using vanilla python and have installed another
display hook this will reset ``sys.displayhook`` to what ever
function was there when matplotlib installed it's displayhook,
possibly discarding your changes.
"""
global _IP_REGISTERED
global _INSTALL_FIG_OBSERVER
if _IP_REGISTERED:
from IPython import get_ipython
ip = get_ipython()
try:
ip.events.unregister('post_execute', _IP_REGISTERED)
except AttributeError as err:
raise NotImplementedError("Can not unregister events "
"in IPython < 2.0") from err
_IP_REGISTERED = None
if _INSTALL_FIG_OBSERVER:
_INSTALL_FIG_OBSERVER = False
draw_all = _pylab_helpers.Gcf.draw_all
@functools.wraps(matplotlib.set_loglevel)
def set_loglevel(*args, **kwargs): # Ensure this appears in the pyplot docs.
return matplotlib.set_loglevel(*args, **kwargs)
@_copy_docstring_and_deprecators(Artist.findobj)
def findobj(o=None, match=None, include_self=True):
if o is None:
o = gcf()
return o.findobj(match, include_self=include_self)
def _get_required_interactive_framework(backend_mod):
return getattr(
backend_mod.FigureCanvas, "required_interactive_framework", None)
def switch_backend(newbackend):
"""
Close all open figures and set the Matplotlib backend.
The argument is case-insensitive. Switching to an interactive backend is
possible only if no event loop for another interactive backend has started.
Switching to and from non-interactive backends is always possible.
Parameters
----------
newbackend : str
The name of the backend to use.
"""
global _backend_mod
# make sure the init is pulled up so we can assign to it later
import matplotlib.backends
close("all")
if newbackend is rcsetup._auto_backend_sentinel:
# Don't try to fallback on the cairo-based backends as they each have
# an additional dependency (pycairo) over the agg-based backend, and
# are of worse quality.
for candidate in ["macosx", "qt5agg", "gtk3agg", "tkagg", "wxagg"]:
try:
switch_backend(candidate)
except ImportError:
continue
else:
rcParamsOrig['backend'] = candidate
return
else:
# Switching to Agg should always succeed; if it doesn't, let the
# exception propagate out.
switch_backend("agg")
rcParamsOrig["backend"] = "agg"
return
# Backends are implemented as modules, but "inherit" default method
# implementations from backend_bases._Backend. This is achieved by
# creating a "class" that inherits from backend_bases._Backend and whose
# body is filled with the module's globals.
backend_name = cbook._backend_module_name(newbackend)
class backend_mod(matplotlib.backend_bases._Backend):
locals().update(vars(importlib.import_module(backend_name)))
required_framework = _get_required_interactive_framework(backend_mod)
if required_framework is not None:
current_framework = cbook._get_running_interactive_framework()
if (current_framework and required_framework
and current_framework != required_framework):
raise ImportError(
"Cannot load backend {!r} which requires the {!r} interactive "
"framework, as {!r} is currently running".format(
newbackend, required_framework, current_framework))
_log.debug("Loaded backend %s version %s.",
newbackend, backend_mod.backend_version)
rcParams['backend'] = rcParamsDefault['backend'] = newbackend
_backend_mod = backend_mod
for func_name in ["new_figure_manager", "draw_if_interactive", "show"]:
globals()[func_name].__signature__ = inspect.signature(
getattr(backend_mod, func_name))
# Need to keep a global reference to the backend for compatibility reasons.
# See https://github.com/matplotlib/matplotlib/issues/6092
matplotlib.backends.backend = newbackend
def _warn_if_gui_out_of_main_thread():
if (_get_required_interactive_framework(_backend_mod)
and threading.current_thread() is not threading.main_thread()):
cbook._warn_external(
"Starting a Matplotlib GUI outside of the main thread will likely "
"fail.")
# This function's signature is rewritten upon backend-load by switch_backend.
def new_figure_manager(*args, **kwargs):
"""Create a new figure manager instance."""
_warn_if_gui_out_of_main_thread()
return _backend_mod.new_figure_manager(*args, **kwargs)
# This function's signature is rewritten upon backend-load by switch_backend.
def draw_if_interactive(*args, **kwargs):
return _backend_mod.draw_if_interactive(*args, **kwargs)
# This function's signature is rewritten upon backend-load by switch_backend.
def show(*args, **kwargs):
"""
Display all open figures.
In non-interactive mode, *block* defaults to True. All figures
will display and show will not return until all windows are closed.
If there are no figures, return immediately.
In interactive mode *block* defaults to False. This will ensure
that all of the figures are shown and this function immediately returns.
Parameters
----------
block : bool, optional
If `True` block and run the GUI main loop until all windows
are closed.
If `False` ensure that all windows are displayed and return
immediately. In this case, you are responsible for ensuring
that the event loop is running to have responsive figures.
See Also
--------
ion : enable interactive mode
ioff : disable interactive mode
"""
_warn_if_gui_out_of_main_thread()
return _backend_mod.show(*args, **kwargs)
def isinteractive():
"""
Return if pyplot is in "interactive mode" or not.
If in interactive mode then:
- newly created figures will be shown immediately
- figures will automatically redraw on change
- `.pyplot.show` will not block by default
If not in interactive mode then:
- newly created figures and changes to figures will
not be reflected until explicitly asked to be
- `.pyplot.show` will block by default
See Also
--------
ion : enable interactive mode
ioff : disable interactive mode
show : show windows (and maybe block)
pause : show windows, run GUI event loop, and block for a time
"""
return matplotlib.is_interactive()
def ioff():
"""
Turn the interactive mode off.
See Also
--------
ion : enable interactive mode
isinteractive : query current state
show : show windows (and maybe block)
pause : show windows, run GUI event loop, and block for a time
"""
matplotlib.interactive(False)
uninstall_repl_displayhook()
def ion():
"""
Turn the interactive mode on.
See Also
--------
ioff : disable interactive mode
isinteractive : query current state
show : show windows (and maybe block)
pause : show windows, run GUI event loop, and block for a time
"""
matplotlib.interactive(True)
install_repl_displayhook()
def pause(interval):
"""
Run the GUI event loop for *interval* seconds.
If there is an active figure, it will be updated and displayed before the
pause, and the GUI event loop (if any) will run during the pause.
This can be used for crude animation. For more complex animation use
:mod:`matplotlib.animation`.
If there is no active figure, sleep for *interval* seconds instead.
See Also
--------
matplotlib.animation : Complex animation
show : show figures and optional block forever
"""
manager = _pylab_helpers.Gcf.get_active()
if manager is not None:
canvas = manager.canvas
if canvas.figure.stale:
canvas.draw_idle()
show(block=False)
canvas.start_event_loop(interval)
else:
time.sleep(interval)
@_copy_docstring_and_deprecators(matplotlib.rc)
def rc(group, **kwargs):
matplotlib.rc(group, **kwargs)
@_copy_docstring_and_deprecators(matplotlib.rc_context)
def rc_context(rc=None, fname=None):
return matplotlib.rc_context(rc, fname)
@_copy_docstring_and_deprecators(matplotlib.rcdefaults)
def rcdefaults():
matplotlib.rcdefaults()
if matplotlib.is_interactive():
draw_all()
# getp/get/setp are explicitly reexported so that they show up in pyplot docs.
@_copy_docstring_and_deprecators(matplotlib.artist.getp)
def getp(obj, *args, **kwargs):
return matplotlib.artist.getp(obj, *args, **kwargs)
@_copy_docstring_and_deprecators(matplotlib.artist.get)
def get(obj, *args, **kwargs):
return matplotlib.artist.get(obj, *args, **kwargs)
@_copy_docstring_and_deprecators(matplotlib.artist.setp)
def setp(obj, *args, **kwargs):
return matplotlib.artist.setp(obj, *args, **kwargs)
def xkcd(scale=1, length=100, randomness=2):
"""
Turn on `xkcd <https://xkcd.com/>`_ sketch-style drawing mode. This will
only have effect on things drawn after this function is called.
For best results, the "Humor Sans" font should be installed: it is
not included with Matplotlib.
Parameters
----------
scale : float, optional
The amplitude of the wiggle perpendicular to the source line.
length : float, optional
The length of the wiggle along the line.
randomness : float, optional
The scale factor by which the length is shrunken or expanded.
Notes
-----
This function works by a number of rcParams, so it will probably
override others you have set before.
If you want the effects of this function to be temporary, it can
be used as a context manager, for example::
with plt.xkcd():
# This figure will be in XKCD-style
fig1 = plt.figure()
# ...
# This figure will be in regular style
fig2 = plt.figure()
"""
return _xkcd(scale, length, randomness)
class _xkcd:
# This cannot be implemented in terms of rc_context() because this needs to
# work as a non-contextmanager too.
def __init__(self, scale, length, randomness):
self._orig = rcParams.copy()
if rcParams['text.usetex']:
raise RuntimeError(
"xkcd mode is not compatible with text.usetex = True")
from matplotlib import patheffects
rcParams.update({
'font.family': ['xkcd', 'xkcd Script', 'Humor Sans', 'Comic Neue',
'Comic Sans MS'],
'font.size': 14.0,
'path.sketch': (scale, length, randomness),
'path.effects': [
patheffects.withStroke(linewidth=4, foreground="w")],
'axes.linewidth': 1.5,
'lines.linewidth': 2.0,
'figure.facecolor': 'white',
'grid.linewidth': 0.0,
'axes.grid': False,
'axes.unicode_minus': False,
'axes.edgecolor': 'black',
'xtick.major.size': 8,
'xtick.major.width': 3,
'ytick.major.size': 8,
'ytick.major.width': 3,
})
def __enter__(self):
return self
def __exit__(self, *args):
dict.update(rcParams, self._orig)
## Figures ##
def figure(num=None, # autoincrement if None, else integer from 1-N
figsize=None, # defaults to rc figure.figsize
dpi=None, # defaults to rc figure.dpi
facecolor=None, # defaults to rc figure.facecolor
edgecolor=None, # defaults to rc figure.edgecolor
frameon=True,
FigureClass=Figure,
clear=False,
**kwargs
):
"""
Create a new figure, or activate an existing figure.
Parameters
----------
num : int or str, optional
A unique identifier for the figure.
If a figure with that identifier already exists, this figure is made
active and returned. An integer refers to the ``Figure.number``
attribute, a string refers to the figure label.
If there is no figure with the identifier or *num* is not given, a new
figure is created, made active and returned. If *num* is an int, it
will be used for the ``Figure.number`` attribute, otherwise, an
auto-generated integer value is used (starting at 1 and incremented
for each new figure). If *num* is a string, the figure label and the
window title is set to this value.
figsize : (float, float), default: :rc:`figure.figsize`
Width, height in inches.
dpi : float, default: :rc:`figure.dpi`
The resolution of the figure in dots-per-inch.
facecolor : color, default: :rc:`figure.facecolor`
The background color.
edgecolor : color, default: :rc:`figure.edgecolor`
The border color.
frameon : bool, default: True
If False, suppress drawing the figure frame.
FigureClass : subclass of `~matplotlib.figure.Figure`
Optionally use a custom `.Figure` instance.
clear : bool, default: False
If True and the figure already exists, then it is cleared.
tight_layout : bool or dict, default: :rc:`figure.autolayout`
If ``False`` use *subplotpars*. If ``True`` adjust subplot
parameters using `.tight_layout` with default padding.
When providing a dict containing the keys ``pad``, ``w_pad``,
``h_pad``, and ``rect``, the default `.tight_layout` paddings
will be overridden.
constrained_layout : bool, default: :rc:`figure.constrained_layout.use`
If ``True`` use constrained layout to adjust positioning of plot
elements. Like ``tight_layout``, but designed to be more
flexible. See
:doc:`/tutorials/intermediate/constrainedlayout_guide`
for examples. (Note: does not work with `add_subplot` or
`~.pyplot.subplot2grid`.)
**kwargs : optional
See `~.matplotlib.figure.Figure` for other possible arguments.
Returns
-------
`~matplotlib.figure.Figure`
The `.Figure` instance returned will also be passed to
new_figure_manager in the backends, which allows to hook custom
`.Figure` classes into the pyplot interface. Additional kwargs will be
passed to the `.Figure` init function.
Notes
-----
If you are creating many figures, make sure you explicitly call
`.pyplot.close` on the figures you are not using, because this will
enable pyplot to properly clean up the memory.
`~matplotlib.rcParams` defines the default values, which can be modified
in the matplotlibrc file.
"""
if figsize is None:
figsize = rcParams['figure.figsize']
if dpi is None:
dpi = rcParams['figure.dpi']
if facecolor is None:
facecolor = rcParams['figure.facecolor']
if edgecolor is None:
edgecolor = rcParams['figure.edgecolor']
allnums = get_fignums()
next_num = max(allnums) + 1 if allnums else 1
figLabel = ''
if num is None:
num = next_num
elif isinstance(num, str):
figLabel = num
allLabels = get_figlabels()
if figLabel not in allLabels:
if figLabel == 'all':
cbook._warn_external(
"close('all') closes all existing figures")
num = next_num
else:
inum = allLabels.index(figLabel)
num = allnums[inum]
else:
num = int(num) # crude validation of num argument
figManager = _pylab_helpers.Gcf.get_fig_manager(num)
if figManager is None:
max_open_warning = rcParams['figure.max_open_warning']
if len(allnums) == max_open_warning >= 1:
cbook._warn_external(
"More than %d figures have been opened. Figures "
"created through the pyplot interface "
"(`matplotlib.pyplot.figure`) are retained until "
"explicitly closed and may consume too much memory. "
"(To control this warning, see the rcParam "
"`figure.max_open_warning`)." %
max_open_warning, RuntimeWarning)
if get_backend().lower() == 'ps':
dpi = 72
figManager = new_figure_manager(num, figsize=figsize,
dpi=dpi,
facecolor=facecolor,
edgecolor=edgecolor,
frameon=frameon,
FigureClass=FigureClass,
**kwargs)
fig = figManager.canvas.figure
if figLabel:
fig.set_label(figLabel)
_pylab_helpers.Gcf._set_new_active_manager(figManager)
# make sure backends (inline) that we don't ship that expect this
# to be called in plotting commands to make the figure call show
# still work. There is probably a better way to do this in the
# FigureManager base class.
draw_if_interactive()
if _INSTALL_FIG_OBSERVER:
fig.stale_callback = _auto_draw_if_interactive
if clear:
figManager.canvas.figure.clear()
return figManager.canvas.figure
def _auto_draw_if_interactive(fig, val):
"""
An internal helper function for making sure that auto-redrawing
works as intended in the plain python repl.
Parameters
----------
fig : Figure
A figure object which is assumed to be associated with a canvas
"""
if (val and matplotlib.is_interactive()
and not fig.canvas.is_saving()
and not fig.canvas._is_idle_drawing):
# Some artists can mark themselves as stale in the middle of drawing
# (e.g. axes position & tick labels being computed at draw time), but
# this shouldn't trigger a redraw because the current redraw will
# already take them into account.
with fig.canvas._idle_draw_cntx():
fig.canvas.draw_idle()
def gcf():
"""
Get the current figure.
If no current figure exists, a new one is created using
`~.pyplot.figure()`.
"""
figManager = _pylab_helpers.Gcf.get_active()
if figManager is not None:
return figManager.canvas.figure
else:
return figure()
def fignum_exists(num):
"""Return whether the figure with the given id exists."""
return _pylab_helpers.Gcf.has_fignum(num) or num in get_figlabels()
def get_fignums():
"""Return a list of existing figure numbers."""
return sorted(_pylab_helpers.Gcf.figs)
def get_figlabels():
"""Return a list of existing figure labels."""
figManagers = _pylab_helpers.Gcf.get_all_fig_managers()
figManagers.sort(key=lambda m: m.num)
return [m.canvas.figure.get_label() for m in figManagers]
def get_current_fig_manager():
"""
Return the figure manager of the current figure.
The figure manager is a container for the actual backend-depended window
that displays the figure on screen.
If if no current figure exists, a new one is created an its figure
manager is returned.
Returns
-------
`.FigureManagerBase` or backend-dependent subclass thereof
"""
return gcf().canvas.manager
@_copy_docstring_and_deprecators(FigureCanvasBase.mpl_connect)
def connect(s, func):
return gcf().canvas.mpl_connect(s, func)
@_copy_docstring_and_deprecators(FigureCanvasBase.mpl_disconnect)
def disconnect(cid):
return gcf().canvas.mpl_disconnect(cid)
def close(fig=None):
"""
Close a figure window.
Parameters
----------
fig : None or int or str or `.Figure`
The figure to close. There are a number of ways to specify this:
- *None*: the current figure
- `.Figure`: the given `.Figure` instance
- ``int``: a figure number
- ``str``: a figure name
- 'all': all figures
"""
if fig is None:
figManager = _pylab_helpers.Gcf.get_active()
if figManager is None:
return
else:
_pylab_helpers.Gcf.destroy(figManager)
elif fig == 'all':
_pylab_helpers.Gcf.destroy_all()
elif isinstance(fig, int):
_pylab_helpers.Gcf.destroy(fig)
elif hasattr(fig, 'int'):
# if we are dealing with a type UUID, we
# can use its integer representation
_pylab_helpers.Gcf.destroy(fig.int)
elif isinstance(fig, str):
allLabels = get_figlabels()
if fig in allLabels:
num = get_fignums()[allLabels.index(fig)]
_pylab_helpers.Gcf.destroy(num)
elif isinstance(fig, Figure):
_pylab_helpers.Gcf.destroy_fig(fig)
else:
raise TypeError("close() argument must be a Figure, an int, a string, "
"or None, not '%s'")
def clf():
"""Clear the current figure."""
gcf().clf()
def draw():
"""
Redraw the current figure.
This is used to update a figure that has been altered, but not
automatically re-drawn. If interactive mode is on (via `.ion()`), this
should be only rarely needed, but there may be ways to modify the state of
a figure without marking it as "stale". Please report these cases as bugs.
This is equivalent to calling ``fig.canvas.draw_idle()``, where ``fig`` is
the current figure.
"""
gcf().canvas.draw_idle()
@_copy_docstring_and_deprecators(Figure.savefig)
def savefig(*args, **kwargs):
fig = gcf()
res = fig.savefig(*args, **kwargs)
fig.canvas.draw_idle() # need this if 'transparent=True' to reset colors
return res
## Putting things in figures ##
def figlegend(*args, **kwargs):
return gcf().legend(*args, **kwargs)
if Figure.legend.__doc__:
figlegend.__doc__ = Figure.legend.__doc__.replace("legend(", "figlegend(")
## Axes ##
@docstring.dedent_interpd
def axes(arg=None, **kwargs):
"""
Add an axes to the current figure and make it the current axes.
Call signatures::
plt.axes()
plt.axes(rect, projection=None, polar=False, **kwargs)
plt.axes(ax)
Parameters
----------
arg : None or 4-tuple
The exact behavior of this function depends on the type:
- *None*: A new full window axes is added using
``subplot(111, **kwargs)``.
- 4-tuple of floats *rect* = ``[left, bottom, width, height]``.
A new axes is added with dimensions *rect* in normalized
(0, 1) units using `~.Figure.add_axes` on the current figure.
projection : {None, 'aitoff', 'hammer', 'lambert', 'mollweide', \
'polar', 'rectilinear', str}, optional
The projection type of the `~.axes.Axes`. *str* is the name of
a custom projection, see `~matplotlib.projections`. The default
None results in a 'rectilinear' projection.
polar : bool, default: False
If True, equivalent to projection='polar'.
sharex, sharey : `~.axes.Axes`, optional
Share the x or y `~matplotlib.axis` with sharex and/or sharey.
The axis will have the same limits, ticks, and scale as the axis
of the shared axes.
label : str
A label for the returned axes.
Returns
-------
`~.axes.Axes`, or a subclass of `~.axes.Axes`
The returned axes class depends on the projection used. It is
`~.axes.Axes` if rectilinear projection is used and
`.projections.polar.PolarAxes` if polar projection is used.
Other Parameters
----------------
**kwargs
This method also takes the keyword arguments for
the returned axes class. The keyword arguments for the
rectilinear axes class `~.axes.Axes` can be found in
the following table but there might also be other keyword
arguments if another projection is used, see the actual axes
class.
%(Axes)s
Notes
-----
If the figure already has a axes with key (*args*,
*kwargs*) then it will simply make that axes current and
return it. This behavior is deprecated. Meanwhile, if you do
not want this behavior (i.e., you want to force the creation of a
new axes), you must use a unique set of args and kwargs. The axes
*label* attribute has been exposed for this purpose: if you want
two axes that are otherwise identical to be added to the figure,
make sure you give them unique labels.
See Also
--------
.Figure.add_axes
.pyplot.subplot
.Figure.add_subplot
.Figure.subplots
.pyplot.subplots
Examples
--------
::
# Creating a new full window axes
plt.axes()
# Creating a new axes with specified dimensions and some kwargs
plt.axes((left, bottom, width, height), facecolor='w')
"""
if arg is None:
return subplot(111, **kwargs)
else:
return gcf().add_axes(arg, **kwargs)
def delaxes(ax=None):
"""
Remove an `~.axes.Axes` (defaulting to the current axes) from its figure.
"""
if ax is None:
ax = gca()
ax.remove()
def sca(ax):
"""
Set the current Axes to *ax* and the current Figure to the parent of *ax*.
"""
if not hasattr(ax.figure.canvas, "manager"):
raise ValueError("Axes parent figure is not managed by pyplot")
_pylab_helpers.Gcf.set_active(ax.figure.canvas.manager)
ax.figure.sca(ax)
## More ways of creating axes ##
@docstring.dedent_interpd
def subplot(*args, **kwargs):
"""
Add a subplot to the current figure.
Wrapper of `.Figure.add_subplot` with a difference in behavior
explained in the notes section.
Call signatures::
subplot(nrows, ncols, index, **kwargs)
subplot(pos, **kwargs)
subplot(**kwargs)
subplot(ax)
Parameters
----------
*args : int, (int, int, *index*), or `.SubplotSpec`, default: (1, 1, 1)
The position of the subplot described by one of
- Three integers (*nrows*, *ncols*, *index*). The subplot will take the
*index* position on a grid with *nrows* rows and *ncols* columns.
*index* starts at 1 in the upper left corner and increases to the
right. *index* can also be a two-tuple specifying the (*first*,
*last*) indices (1-based, and including *last*) of the subplot, e.g.,
``fig.add_subplot(3, 1, (1, 2))`` makes a subplot that spans the
upper 2/3 of the figure.
- A 3-digit integer. The digits are interpreted as if given separately
as three single-digit integers, i.e. ``fig.add_subplot(235)`` is the
same as ``fig.add_subplot(2, 3, 5)``. Note that this can only be used
if there are no more than 9 subplots.
- A `.SubplotSpec`.
projection : {None, 'aitoff', 'hammer', 'lambert', 'mollweide', \
'polar', 'rectilinear', str}, optional
The projection type of the subplot (`~.axes.Axes`). *str* is the name
of a custom projection, see `~matplotlib.projections`. The default
None results in a 'rectilinear' projection.
polar : bool, default: False
If True, equivalent to projection='polar'.
sharex, sharey : `~.axes.Axes`, optional
Share the x or y `~matplotlib.axis` with sharex and/or sharey. The
axis will have the same limits, ticks, and scale as the axis of the
shared axes.
label : str
A label for the returned axes.
Returns
-------
`.axes.SubplotBase`, or another subclass of `~.axes.Axes`
The axes of the subplot. The returned axes base class depends on
the projection used. It is `~.axes.Axes` if rectilinear projection
is used and `.projections.polar.PolarAxes` if polar projection
is used. The returned axes is then a subplot subclass of the
base class.
Other Parameters
----------------
**kwargs
This method also takes the keyword arguments for the returned axes
base class; except for the *figure* argument. The keyword arguments
for the rectilinear base class `~.axes.Axes` can be found in
the following table but there might also be other keyword
arguments if another projection is used.
%(Axes)s
Notes
-----
Creating a subplot will delete any pre-existing subplot that overlaps
with it beyond sharing a boundary::
import matplotlib.pyplot as plt
# plot a line, implicitly creating a subplot(111)
plt.plot([1, 2, 3])
# now create a subplot which represents the top plot of a grid
# with 2 rows and 1 column. Since this subplot will overlap the
# first, the plot (and its axes) previously created, will be removed
plt.subplot(211)
If you do not want this behavior, use the `.Figure.add_subplot` method
or the `.pyplot.axes` function instead.
If the figure already has a subplot with key (*args*,
*kwargs*) then it will simply make that subplot current and
return it. This behavior is deprecated. Meanwhile, if you do
not want this behavior (i.e., you want to force the creation of a
new subplot), you must use a unique set of args and kwargs. The axes
*label* attribute has been exposed for this purpose: if you want
two subplots that are otherwise identical to be added to the figure,
make sure you give them unique labels.
In rare circumstances, `.add_subplot` may be called with a single
argument, a subplot axes instance already created in the
present figure but not in the figure's list of axes.
See Also
--------
.Figure.add_subplot
.pyplot.subplots
.pyplot.axes
.Figure.subplots
Examples
--------
::
plt.subplot(221)
# equivalent but more general
ax1=plt.subplot(2, 2, 1)
# add a subplot with no frame
ax2=plt.subplot(222, frameon=False)
# add a polar subplot
plt.subplot(223, projection='polar')
# add a red subplot that shares the x-axis with ax1
plt.subplot(224, sharex=ax1, facecolor='red')
# delete ax2 from the figure
plt.delaxes(ax2)
# add ax2 to the figure again
plt.subplot(ax2)
"""
# if subplot called without arguments, create subplot(1, 1, 1)
if len(args) == 0:
args = (1, 1, 1)
# This check was added because it is very easy to type
# subplot(1, 2, False) when subplots(1, 2, False) was intended
# (sharex=False, that is). In most cases, no error will
# ever occur, but mysterious behavior can result because what was
# intended to be the sharex argument is instead treated as a
# subplot index for subplot()
if len(args) >= 3 and isinstance(args[2], bool):
cbook._warn_external("The subplot index argument to subplot() appears "
"to be a boolean. Did you intend to use "
"subplots()?")
# Check for nrows and ncols, which are not valid subplot args:
if 'nrows' in kwargs or 'ncols' in kwargs:
raise TypeError("subplot() got an unexpected keyword argument 'ncols' "
"and/or 'nrows'. Did you intend to call subplots()?")
fig = gcf()
ax = fig.add_subplot(*args, **kwargs)
bbox = ax.bbox
axes_to_delete = []
for other_ax in fig.axes:
if other_ax == ax:
continue
if bbox.fully_overlaps(other_ax.bbox):
axes_to_delete.append(other_ax)
for ax_to_del in axes_to_delete:
delaxes(ax_to_del)
return ax
@cbook._make_keyword_only("3.3", "sharex")
def subplots(nrows=1, ncols=1, sharex=False, sharey=False, squeeze=True,
subplot_kw=None, gridspec_kw=None, **fig_kw):
"""
Create a figure and a set of subplots.
This utility wrapper makes it convenient to create common layouts of
subplots, including the enclosing figure object, in a single call.
Parameters
----------
nrows, ncols : int, default: 1
Number of rows/columns of the subplot grid.
sharex, sharey : bool or {'none', 'all', 'row', 'col'}, default: False
Controls sharing of properties among x (*sharex*) or y (*sharey*)
axes:
- True or 'all': x- or y-axis will be shared among all subplots.
- False or 'none': each subplot x- or y-axis will be independent.
- 'row': each subplot row will share an x- or y-axis.
- 'col': each subplot column will share an x- or y-axis.
When subplots have a shared x-axis along a column, only the x tick
labels of the bottom subplot are created. Similarly, when subplots
have a shared y-axis along a row, only the y tick labels of the first
column subplot are created. To later turn other subplots' ticklabels
on, use `~matplotlib.axes.Axes.tick_params`.
squeeze : bool, default: True
- If True, extra dimensions are squeezed out from the returned
array of `~matplotlib.axes.Axes`:
- if only one subplot is constructed (nrows=ncols=1), the
resulting single Axes object is returned as a scalar.
- for Nx1 or 1xM subplots, the returned object is a 1D numpy
object array of Axes objects.
- for NxM, subplots with N>1 and M>1 are returned as a 2D array.
- If False, no squeezing at all is done: the returned Axes object is
always a 2D array containing Axes instances, even if it ends up
being 1x1.
subplot_kw : dict, optional
Dict with keywords passed to the
`~matplotlib.figure.Figure.add_subplot` call used to create each
subplot.
gridspec_kw : dict, optional
Dict with keywords passed to the `~matplotlib.gridspec.GridSpec`
constructor used to create the grid the subplots are placed on.
**fig_kw
All additional keyword arguments are passed to the
`.pyplot.figure` call.
Returns
-------
fig : `~.figure.Figure`
ax : `.axes.Axes` or array of Axes
*ax* can be either a single `~matplotlib.axes.Axes` object or an
array of Axes objects if more than one subplot was created. The
dimensions of the resulting array can be controlled with the squeeze
keyword, see above.
Typical idioms for handling the return value are::
# using the variable ax for single a Axes
fig, ax = plt.subplots()
# using the variable axs for multiple Axes
fig, axs = plt.subplots(2, 2)
# using tuple unpacking for multiple Axes
fig, (ax1, ax2) = plt.subplot(1, 2)
fig, ((ax1, ax2), (ax3, ax4)) = plt.subplot(2, 2)
The names ``ax`` and pluralized ``axs`` are preferred over ``axes``
because for the latter it's not clear if it refers to a single
`~.axes.Axes` instance or a collection of these.
See Also
--------
.pyplot.figure
.pyplot.subplot
.pyplot.axes
.Figure.subplots
.Figure.add_subplot
Examples
--------
::
# First create some toy data:
x = np.linspace(0, 2*np.pi, 400)
y = np.sin(x**2)
# Create just a figure and only one subplot
fig, ax = plt.subplots()
ax.plot(x, y)
ax.set_title('Simple plot')
# Create two subplots and unpack the output array immediately
f, (ax1, ax2) = plt.subplots(1, 2, sharey=True)
ax1.plot(x, y)
ax1.set_title('Sharing Y axis')
ax2.scatter(x, y)
# Create four polar axes and access them through the returned array
fig, axs = plt.subplots(2, 2, subplot_kw=dict(polar=True))
axs[0, 0].plot(x, y)
axs[1, 1].scatter(x, y)
# Share a X axis with each column of subplots
plt.subplots(2, 2, sharex='col')
# Share a Y axis with each row of subplots
plt.subplots(2, 2, sharey='row')
# Share both X and Y axes with all subplots
plt.subplots(2, 2, sharex='all', sharey='all')
# Note that this is the same as
plt.subplots(2, 2, sharex=True, sharey=True)
# Create figure number 10 with a single subplot
# and clears it if it already exists.
fig, ax = plt.subplots(num=10, clear=True)
"""
fig = figure(**fig_kw)
axs = fig.subplots(nrows=nrows, ncols=ncols, sharex=sharex, sharey=sharey,
squeeze=squeeze, subplot_kw=subplot_kw,
gridspec_kw=gridspec_kw)
return fig, axs
def subplot_mosaic(layout, *, subplot_kw=None, gridspec_kw=None,
empty_sentinel='.', **fig_kw):
"""
Build a layout of Axes based on ASCII art or nested lists.
This is a helper function to build complex GridSpec layouts visually.
.. note ::
This API is provisional and may be revised in the future based on
early user feedback.
Parameters
----------
layout : list of list of {hashable or nested} or str
A visual layout of how you want your Axes to be arranged
labeled as strings. For example ::
x = [['A panel', 'A panel', 'edge'],
['C panel', '.', 'edge']]
Produces 4 axes:
- 'A panel' which is 1 row high and spans the first two columns
- 'edge' which is 2 rows high and is on the right edge
- 'C panel' which in 1 row and 1 column wide in the bottom left
- a blank space 1 row and 1 column wide in the bottom center
Any of the entries in the layout can be a list of lists
of the same form to create nested layouts.
If input is a str, then it must be of the form ::
'''
AAE
C.E
'''
where each character is a column and each line is a row.
This only allows only single character Axes labels and does
not allow nesting but is very terse.
subplot_kw : dict, optional
Dictionary with keywords passed to the `.Figure.add_subplot` call
used to create each subplot.
gridspec_kw : dict, optional
Dictionary with keywords passed to the `.GridSpec` constructor used
to create the grid the subplots are placed on.
empty_sentinel : object, optional
Entry in the layout to mean "leave this space empty". Defaults
to ``'.'``. Note, if *layout* is a string, it is processed via
`inspect.cleandoc` to remove leading white space, which may
interfere with using white-space as the empty sentinel.
**fig_kw
All additional keyword arguments are passed to the
`.pyplot.figure` call.
Returns
-------
fig : `~.figure.Figure`
The new figure
dict[label, Axes]
A dictionary mapping the labels to the Axes objects.
"""
fig = figure(**fig_kw)
ax_dict = fig.subplot_mosaic(
layout,
subplot_kw=subplot_kw,
gridspec_kw=gridspec_kw,
empty_sentinel=empty_sentinel
)
return fig, ax_dict
def subplot2grid(shape, loc, rowspan=1, colspan=1, fig=None, **kwargs):
"""
Create a subplot at a specific location inside a regular grid.
Parameters
----------
shape : (int, int)
Number of rows and of columns of the grid in which to place axis.
loc : (int, int)
Row number and column number of the axis location within the grid.
rowspan : int, default: 1
Number of rows for the axis to span to the right.
colspan : int, default: 1
Number of columns for the axis to span downwards.
fig : `.Figure`, optional
Figure to place the subplot in. Defaults to the current figure.
**kwargs
Additional keyword arguments are handed to `~.Figure.add_subplot`.
Returns
-------
`.axes.SubplotBase`, or another subclass of `~.axes.Axes`
The axes of the subplot. The returned axes base class depends on the
projection used. It is `~.axes.Axes` if rectilinear projection is used
and `.projections.polar.PolarAxes` if polar projection is used. The
returned axes is then a subplot subclass of the base class.
Notes
-----
The following call ::
ax = subplot2grid((nrows, ncols), (row, col), rowspan, colspan)
is identical to ::
fig = gcf()
gs = fig.add_gridspec(nrows, ncols)
ax = fig.add_subplot(gs[row:row+rowspan, col:col+colspan])
"""
if fig is None:
fig = gcf()
s1, s2 = shape
subplotspec = GridSpec(s1, s2).new_subplotspec(loc,
rowspan=rowspan,
colspan=colspan)
ax = fig.add_subplot(subplotspec, **kwargs)
bbox = ax.bbox
axes_to_delete = []
for other_ax in fig.axes:
if other_ax == ax:
continue
if bbox.fully_overlaps(other_ax.bbox):
axes_to_delete.append(other_ax)
for ax_to_del in axes_to_delete:
delaxes(ax_to_del)
return ax
def twinx(ax=None):
"""
Make and return a second axes that shares the *x*-axis. The new axes will
overlay *ax* (or the current axes if *ax* is *None*), and its ticks will be
on the right.
Examples
--------
:doc:`/gallery/subplots_axes_and_figures/two_scales`
"""
if ax is None:
ax = gca()
ax1 = ax.twinx()
return ax1
def twiny(ax=None):
"""
Make and return a second axes that shares the *y*-axis. The new axes will
overlay *ax* (or the current axes if *ax* is *None*), and its ticks will be
on the top.
Examples
--------
:doc:`/gallery/subplots_axes_and_figures/two_scales`
"""
if ax is None:
ax = gca()
ax1 = ax.twiny()
return ax1
def subplot_tool(targetfig=None):
"""
Launch a subplot tool window for a figure.
A :class:`matplotlib.widgets.SubplotTool` instance is returned.
"""
if targetfig is None:
targetfig = gcf()
with rc_context({'toolbar': 'None'}): # No nav toolbar for the toolfig.
toolfig = figure(figsize=(6, 3))
toolfig.subplots_adjust(top=0.9)
if hasattr(targetfig.canvas, "manager"): # Restore the current figure.
_pylab_helpers.Gcf.set_active(targetfig.canvas.manager)
return SubplotTool(targetfig, toolfig)
# After deprecation elapses, this can be autogenerated by boilerplate.py.
@cbook._make_keyword_only("3.3", "pad")
def tight_layout(pad=1.08, h_pad=None, w_pad=None, rect=None):
"""
Adjust the padding between and around subplots.
Parameters
----------
pad : float, default: 1.08
Padding between the figure edge and the edges of subplots,
as a fraction of the font size.
h_pad, w_pad : float, default: *pad*
Padding (height/width) between edges of adjacent subplots,
as a fraction of the font size.
rect : tuple (left, bottom, right, top), default: (0, 0, 1, 1)
A rectangle in normalized figure coordinates into which the whole
subplots area (including labels) will fit.
"""
gcf().tight_layout(pad=pad, h_pad=h_pad, w_pad=w_pad, rect=rect)
def box(on=None):
"""
Turn the axes box on or off on the current axes.
Parameters
----------
on : bool or None
The new `~matplotlib.axes.Axes` box state. If ``None``, toggle
the state.
See Also
--------
:meth:`matplotlib.axes.Axes.set_frame_on`
:meth:`matplotlib.axes.Axes.get_frame_on`
"""
ax = gca()
if on is None:
on = not ax.get_frame_on()
ax.set_frame_on(on)
## Axis ##
def xlim(*args, **kwargs):
"""
Get or set the x limits of the current axes.
Call signatures::
left, right = xlim() # return the current xlim
xlim((left, right)) # set the xlim to left, right
xlim(left, right) # set the xlim to left, right
If you do not specify args, you can pass *left* or *right* as kwargs,
i.e.::
xlim(right=3) # adjust the right leaving left unchanged
xlim(left=1) # adjust the left leaving right unchanged
Setting limits turns autoscaling off for the x-axis.
Returns
-------
left, right
A tuple of the new x-axis limits.
Notes
-----
Calling this function with no arguments (e.g. ``xlim()``) is the pyplot
equivalent of calling `~.Axes.get_xlim` on the current axes.
Calling this function with arguments is the pyplot equivalent of calling
`~.Axes.set_xlim` on the current axes. All arguments are passed though.
"""
ax = gca()
if not args and not kwargs:
return ax.get_xlim()
ret = ax.set_xlim(*args, **kwargs)
return ret
def ylim(*args, **kwargs):
"""
Get or set the y-limits of the current axes.
Call signatures::
bottom, top = ylim() # return the current ylim
ylim((bottom, top)) # set the ylim to bottom, top
ylim(bottom, top) # set the ylim to bottom, top
If you do not specify args, you can alternatively pass *bottom* or
*top* as kwargs, i.e.::
ylim(top=3) # adjust the top leaving bottom unchanged
ylim(bottom=1) # adjust the bottom leaving top unchanged
Setting limits turns autoscaling off for the y-axis.
Returns
-------
bottom, top
A tuple of the new y-axis limits.
Notes
-----
Calling this function with no arguments (e.g. ``ylim()``) is the pyplot
equivalent of calling `~.Axes.get_ylim` on the current axes.
Calling this function with arguments is the pyplot equivalent of calling
`~.Axes.set_ylim` on the current axes. All arguments are passed though.
"""
ax = gca()
if not args and not kwargs:
return ax.get_ylim()
ret = ax.set_ylim(*args, **kwargs)
return ret
def xticks(ticks=None, labels=None, **kwargs):
"""
Get or set the current tick locations and labels of the x-axis.
Pass no arguments to return the current values without modifying them.
Parameters
----------
ticks : array-like, optional
The list of xtick locations. Passing an empty list removes all xticks.
labels : array-like, optional
The labels to place at the given *ticks* locations. This argument can
only be passed if *ticks* is passed as well.
**kwargs
`.Text` properties can be used to control the appearance of the labels.
Returns
-------
locs
The list of xtick locations.
labels
The list of xlabel `.Text` objects.
Notes
-----
Calling this function with no arguments (e.g. ``xticks()``) is the pyplot
equivalent of calling `~.Axes.get_xticks` and `~.Axes.get_xticklabels` on
the current axes.
Calling this function with arguments is the pyplot equivalent of calling
`~.Axes.set_xticks` and `~.Axes.set_xticklabels` on the current axes.
Examples
--------
>>> locs, labels = xticks() # Get the current locations and labels.
>>> xticks(np.arange(0, 1, step=0.2)) # Set label locations.
>>> xticks(np.arange(3), ['Tom', 'Dick', 'Sue']) # Set text labels.
>>> xticks([0, 1, 2], ['January', 'February', 'March'],
... rotation=20) # Set text labels and properties.
>>> xticks([]) # Disable xticks.
"""
ax = gca()
if ticks is None:
locs = ax.get_xticks()
if labels is not None:
raise TypeError("xticks(): Parameter 'labels' can't be set "
"without setting 'ticks'")
else:
locs = ax.set_xticks(ticks)
if labels is None:
labels = ax.get_xticklabels()
else:
labels = ax.set_xticklabels(labels, **kwargs)
for l in labels:
l.update(kwargs)
return locs, labels
def yticks(ticks=None, labels=None, **kwargs):
"""
Get or set the current tick locations and labels of the y-axis.
Pass no arguments to return the current values without modifying them.
Parameters
----------
ticks : array-like, optional
The list of ytick locations. Passing an empty list removes all yticks.
labels : array-like, optional
The labels to place at the given *ticks* locations. This argument can
only be passed if *ticks* is passed as well.
**kwargs
`.Text` properties can be used to control the appearance of the labels.
Returns
-------
locs
The list of ytick locations.
labels
The list of ylabel `.Text` objects.
Notes
-----
Calling this function with no arguments (e.g. ``yticks()``) is the pyplot
equivalent of calling `~.Axes.get_yticks` and `~.Axes.get_yticklabels` on
the current axes.
Calling this function with arguments is the pyplot equivalent of calling
`~.Axes.set_yticks` and `~.Axes.set_yticklabels` on the current axes.
Examples
--------
>>> locs, labels = yticks() # Get the current locations and labels.
>>> yticks(np.arange(0, 1, step=0.2)) # Set label locations.
>>> yticks(np.arange(3), ['Tom', 'Dick', 'Sue']) # Set text labels.
>>> yticks([0, 1, 2], ['January', 'February', 'March'],
... rotation=45) # Set text labels and properties.
>>> yticks([]) # Disable yticks.
"""
ax = gca()
if ticks is None:
locs = ax.get_yticks()
if labels is not None:
raise TypeError("yticks(): Parameter 'labels' can't be set "
"without setting 'ticks'")
else:
locs = ax.set_yticks(ticks)
if labels is None:
labels = ax.get_yticklabels()
else:
labels = ax.set_yticklabels(labels, **kwargs)
for l in labels:
l.update(kwargs)
return locs, labels
def rgrids(radii=None, labels=None, angle=None, fmt=None, **kwargs):
"""
Get or set the radial gridlines on the current polar plot.
Call signatures::
lines, labels = rgrids()
lines, labels = rgrids(radii, labels=None, angle=22.5, fmt=None, **kwargs)
When called with no arguments, `.rgrids` simply returns the tuple
(*lines*, *labels*). When called with arguments, the labels will
appear at the specified radial distances and angle.
Parameters
----------
radii : tuple with floats
The radii for the radial gridlines
labels : tuple with strings or None
The labels to use at each radial gridline. The
`matplotlib.ticker.ScalarFormatter` will be used if None.
angle : float
The angular position of the radius labels in degrees.
fmt : str or None
Format string used in `matplotlib.ticker.FormatStrFormatter`.
For example '%f'.
Returns
-------
lines : list of `.lines.Line2D`
The radial gridlines.
labels : list of `.text.Text`
The tick labels.
Other Parameters
----------------
**kwargs
*kwargs* are optional `~.Text` properties for the labels.
See Also
--------
.pyplot.thetagrids
.projections.polar.PolarAxes.set_rgrids
.Axis.get_gridlines
.Axis.get_ticklabels
Examples
--------
::
# set the locations of the radial gridlines
lines, labels = rgrids( (0.25, 0.5, 1.0) )
# set the locations and labels of the radial gridlines
lines, labels = rgrids( (0.25, 0.5, 1.0), ('Tom', 'Dick', 'Harry' ))
"""
ax = gca()
if not isinstance(ax, PolarAxes):
raise RuntimeError('rgrids only defined for polar axes')
if all(p is None for p in [radii, labels, angle, fmt]) and not kwargs:
lines = ax.yaxis.get_gridlines()
labels = ax.yaxis.get_ticklabels()
else:
lines, labels = ax.set_rgrids(
radii, labels=labels, angle=angle, fmt=fmt, **kwargs)
return lines, labels
def thetagrids(angles=None, labels=None, fmt=None, **kwargs):
"""
Get or set the theta gridlines on the current polar plot.
Call signatures::
lines, labels = thetagrids()
lines, labels = thetagrids(angles, labels=None, fmt=None, **kwargs)
When called with no arguments, `.thetagrids` simply returns the tuple
(*lines*, *labels*). When called with arguments, the labels will
appear at the specified angles.
Parameters
----------
angles : tuple with floats, degrees
The angles of the theta gridlines.
labels : tuple with strings or None
The labels to use at each radial gridline. The
`.projections.polar.ThetaFormatter` will be used if None.
fmt : str or None
Format string used in `matplotlib.ticker.FormatStrFormatter`.
For example '%f'. Note that the angle in radians will be used.
Returns
-------
lines : list of `.lines.Line2D`
The theta gridlines.
labels : list of `.text.Text`
The tick labels.
Other Parameters
----------------
**kwargs
*kwargs* are optional `~.Text` properties for the labels.
See Also
--------
.pyplot.rgrids
.projections.polar.PolarAxes.set_thetagrids
.Axis.get_gridlines
.Axis.get_ticklabels
Examples
--------
::
# set the locations of the angular gridlines
lines, labels = thetagrids(range(45, 360, 90))
# set the locations and labels of the angular gridlines
lines, labels = thetagrids(range(45, 360, 90), ('NE', 'NW', 'SW', 'SE'))
"""
ax = gca()
if not isinstance(ax, PolarAxes):
raise RuntimeError('thetagrids only defined for polar axes')
if all(param is None for param in [angles, labels, fmt]) and not kwargs:
lines = ax.xaxis.get_ticklines()
labels = ax.xaxis.get_ticklabels()
else:
lines, labels = ax.set_thetagrids(angles,
labels=labels, fmt=fmt, **kwargs)
return lines, labels
## Plotting Info ##
def plotting():
pass
def get_plot_commands():
"""
Get a sorted list of all of the plotting commands.
"""
# This works by searching for all functions in this module and removing
# a few hard-coded exclusions, as well as all of the colormap-setting
# functions, and anything marked as private with a preceding underscore.
exclude = {'colormaps', 'colors', 'connect', 'disconnect',
'get_plot_commands', 'get_current_fig_manager', 'ginput',
'plotting', 'waitforbuttonpress'}
exclude |= set(colormaps())
this_module = inspect.getmodule(get_plot_commands)
return sorted(
name for name, obj in globals().items()
if not name.startswith('_') and name not in exclude
and inspect.isfunction(obj)
and inspect.getmodule(obj) is this_module)
def colormaps():
"""
Matplotlib provides a number of colormaps, and others can be added using
:func:`~matplotlib.cm.register_cmap`. This function documents the built-in
colormaps, and will also return a list of all registered colormaps if
called.
You can set the colormap for an image, pcolor, scatter, etc,
using a keyword argument::
imshow(X, cmap=cm.hot)
or using the :func:`set_cmap` function::
imshow(X)
pyplot.set_cmap('hot')
pyplot.set_cmap('jet')
In interactive mode, :func:`set_cmap` will update the colormap post-hoc,
allowing you to see which one works best for your data.
All built-in colormaps can be reversed by appending ``_r``: For instance,
``gray_r`` is the reverse of ``gray``.
There are several common color schemes used in visualization:
Sequential schemes
for unipolar data that progresses from low to high
Diverging schemes
for bipolar data that emphasizes positive or negative deviations from a
central value
Cyclic schemes
for plotting values that wrap around at the endpoints, such as phase
angle, wind direction, or time of day
Qualitative schemes
for nominal data that has no inherent ordering, where color is used
only to distinguish categories
Matplotlib ships with 4 perceptually uniform color maps which are
the recommended color maps for sequential data:
========= ===================================================
Colormap Description
========= ===================================================
inferno perceptually uniform shades of black-red-yellow
magma perceptually uniform shades of black-red-white
plasma perceptually uniform shades of blue-red-yellow
viridis perceptually uniform shades of blue-green-yellow
========= ===================================================
The following colormaps are based on the `ColorBrewer
<https://colorbrewer2.org>`_ color specifications and designs developed by
Cynthia Brewer:
ColorBrewer Diverging (luminance is highest at the midpoint, and
decreases towards differently-colored endpoints):
======== ===================================
Colormap Description
======== ===================================
BrBG brown, white, blue-green
PiYG pink, white, yellow-green
PRGn purple, white, green
PuOr orange, white, purple
RdBu red, white, blue
RdGy red, white, gray
RdYlBu red, yellow, blue
RdYlGn red, yellow, green
Spectral red, orange, yellow, green, blue
======== ===================================
ColorBrewer Sequential (luminance decreases monotonically):
======== ====================================
Colormap Description
======== ====================================
Blues white to dark blue
BuGn white, light blue, dark green
BuPu white, light blue, dark purple
GnBu white, light green, dark blue
Greens white to dark green
Greys white to black (not linear)
Oranges white, orange, dark brown
OrRd white, orange, dark red
PuBu white, light purple, dark blue
PuBuGn white, light purple, dark green
PuRd white, light purple, dark red
Purples white to dark purple
RdPu white, pink, dark purple
Reds white to dark red
YlGn light yellow, dark green
YlGnBu light yellow, light green, dark blue
YlOrBr light yellow, orange, dark brown
YlOrRd light yellow, orange, dark red
======== ====================================
ColorBrewer Qualitative:
(For plotting nominal data, `.ListedColormap` is used,
not `.LinearSegmentedColormap`. Different sets of colors are
recommended for different numbers of categories.)
* Accent
* Dark2
* Paired
* Pastel1
* Pastel2
* Set1
* Set2
* Set3
A set of colormaps derived from those of the same name provided
with Matlab are also included:
========= =======================================================
Colormap Description
========= =======================================================
autumn sequential linearly-increasing shades of red-orange-yellow
bone sequential increasing black-white color map with
a tinge of blue, to emulate X-ray film
cool linearly-decreasing shades of cyan-magenta
copper sequential increasing shades of black-copper
flag repetitive red-white-blue-black pattern (not cyclic at
endpoints)
gray sequential linearly-increasing black-to-white
grayscale
hot sequential black-red-yellow-white, to emulate blackbody
radiation from an object at increasing temperatures
jet a spectral map with dark endpoints, blue-cyan-yellow-red;
based on a fluid-jet simulation by NCSA [#]_
pink sequential increasing pastel black-pink-white, meant
for sepia tone colorization of photographs
prism repetitive red-yellow-green-blue-purple-...-green pattern
(not cyclic at endpoints)
spring linearly-increasing shades of magenta-yellow
summer sequential linearly-increasing shades of green-yellow
winter linearly-increasing shades of blue-green
========= =======================================================
A set of palettes from the `Yorick scientific visualisation
package <https://dhmunro.github.io/yorick-doc/>`_, an evolution of
the GIST package, both by David H. Munro are included:
============ =======================================================
Colormap Description
============ =======================================================
gist_earth mapmaker's colors from dark blue deep ocean to green
lowlands to brown highlands to white mountains
gist_heat sequential increasing black-red-orange-white, to emulate
blackbody radiation from an iron bar as it grows hotter
gist_ncar pseudo-spectral black-blue-green-yellow-red-purple-white
colormap from National Center for Atmospheric
Research [#]_
gist_rainbow runs through the colors in spectral order from red to
violet at full saturation (like *hsv* but not cyclic)
gist_stern "Stern special" color table from Interactive Data
Language software
============ =======================================================
A set of cyclic color maps:
================ =================================================
Colormap Description
================ =================================================
hsv red-yellow-green-cyan-blue-magenta-red, formed by
changing the hue component in the HSV color space
twilight perceptually uniform shades of
white-blue-black-red-white
twilight_shifted perceptually uniform shades of
black-blue-white-red-black
================ =================================================
Other miscellaneous schemes:
============= =======================================================
Colormap Description
============= =======================================================
afmhot sequential black-orange-yellow-white blackbody
spectrum, commonly used in atomic force microscopy
brg blue-red-green
bwr diverging blue-white-red
coolwarm diverging blue-gray-red, meant to avoid issues with 3D
shading, color blindness, and ordering of colors [#]_
CMRmap "Default colormaps on color images often reproduce to
confusing grayscale images. The proposed colormap
maintains an aesthetically pleasing color image that
automatically reproduces to a monotonic grayscale with
discrete, quantifiable saturation levels." [#]_
cubehelix Unlike most other color schemes cubehelix was designed
by D.A. Green to be monotonically increasing in terms
of perceived brightness. Also, when printed on a black
and white postscript printer, the scheme results in a
greyscale with monotonically increasing brightness.
This color scheme is named cubehelix because the (r, g, b)
values produced can be visualised as a squashed helix
around the diagonal in the (r, g, b) color cube.
gnuplot gnuplot's traditional pm3d scheme
(black-blue-red-yellow)
gnuplot2 sequential color printable as gray
(black-blue-violet-yellow-white)
ocean green-blue-white
rainbow spectral purple-blue-green-yellow-orange-red colormap
with diverging luminance
seismic diverging blue-white-red
nipy_spectral black-purple-blue-green-yellow-red-white spectrum,
originally from the Neuroimaging in Python project
terrain mapmaker's colors, blue-green-yellow-brown-white,
originally from IGOR Pro
turbo Spectral map (purple-blue-green-yellow-orange-red) with
a bright center and darker endpoints. A smoother
alternative to jet.
============= =======================================================
The following colormaps are redundant and may be removed in future
versions. It's recommended to use the names in the descriptions
instead, which produce identical output:
========= =======================================================
Colormap Description
========= =======================================================
gist_gray identical to *gray*
gist_yarg identical to *gray_r*
binary identical to *gray_r*
========= =======================================================
.. rubric:: Footnotes
.. [#] Rainbow colormaps, ``jet`` in particular, are considered a poor
choice for scientific visualization by many researchers: `Rainbow Color
Map (Still) Considered Harmful
<https://ieeexplore.ieee.org/document/4118486/?arnumber=4118486>`_
.. [#] Resembles "BkBlAqGrYeOrReViWh200" from NCAR Command
Language. See `Color Table Gallery
<https://www.ncl.ucar.edu/Document/Graphics/color_table_gallery.shtml>`_
.. [#] See `Diverging Color Maps for Scientific Visualization
<http://www.kennethmoreland.com/color-maps/>`_ by Kenneth Moreland.
.. [#] See `A Color Map for Effective Black-and-White Rendering of
Color-Scale Images
<https://www.mathworks.com/matlabcentral/fileexchange/2662-cmrmap-m>`_
by Carey Rappaport
"""
return sorted(cm._cmap_registry)
def _setup_pyplot_info_docstrings():
"""
Generate the plotting docstring.
These must be done after the entire module is imported, so it is
called from the end of this module, which is generated by
boilerplate.py.
"""
commands = get_plot_commands()
first_sentence = re.compile(r"(?:\s*).+?\.(?:\s+|$)", flags=re.DOTALL)
# Collect the first sentence of the docstring for all of the
# plotting commands.
rows = []
max_name = len("Function")
max_summary = len("Description")
for name in commands:
doc = globals()[name].__doc__
summary = ''
if doc is not None:
match = first_sentence.match(doc)
if match is not None:
summary = inspect.cleandoc(match.group(0)).replace('\n', ' ')
name = '`%s`' % name
rows.append([name, summary])
max_name = max(max_name, len(name))
max_summary = max(max_summary, len(summary))
separator = '=' * max_name + ' ' + '=' * max_summary
lines = [
separator,
'{:{}} {:{}}'.format('Function', max_name, 'Description', max_summary),
separator,
] + [
'{:{}} {:{}}'.format(name, max_name, summary, max_summary)
for name, summary in rows
] + [
separator,
]
plotting.__doc__ = '\n'.join(lines)
## Plotting part 1: manually generated functions and wrappers ##
def colorbar(mappable=None, cax=None, ax=None, **kw):
if mappable is None:
mappable = gci()
if mappable is None:
raise RuntimeError('No mappable was found to use for colorbar '
'creation. First define a mappable such as '
'an image (with imshow) or a contour set ('
'with contourf).')
if ax is None:
ax = gca()
ret = gcf().colorbar(mappable, cax=cax, ax=ax, **kw)
return ret
colorbar.__doc__ = matplotlib.colorbar.colorbar_doc
def clim(vmin=None, vmax=None):
"""
Set the color limits of the current image.
If either *vmin* or *vmax* is None, the image min/max respectively
will be used for color scaling.
If you want to set the clim of multiple images, use
`~.ScalarMappable.set_clim` on every image, for example::
for im in gca().get_images():
im.set_clim(0, 0.5)
"""
im = gci()
if im is None:
raise RuntimeError('You must first define an image, e.g., with imshow')
im.set_clim(vmin, vmax)
def set_cmap(cmap):
"""
Set the default colormap, and applies it to the current image if any.
Parameters
----------
cmap : `~matplotlib.colors.Colormap` or str
A colormap instance or the name of a registered colormap.
See Also
--------
colormaps
matplotlib.cm.register_cmap
matplotlib.cm.get_cmap
"""
cmap = cm.get_cmap(cmap)
rc('image', cmap=cmap.name)
im = gci()
if im is not None:
im.set_cmap(cmap)
@_copy_docstring_and_deprecators(matplotlib.image.imread)
def imread(fname, format=None):
return matplotlib.image.imread(fname, format)
@_copy_docstring_and_deprecators(matplotlib.image.imsave)
def imsave(fname, arr, **kwargs):
return matplotlib.image.imsave(fname, arr, **kwargs)
def matshow(A, fignum=None, **kwargs):
"""
Display an array as a matrix in a new figure window.
The origin is set at the upper left hand corner and rows (first
dimension of the array) are displayed horizontally. The aspect
ratio of the figure window is that of the array, unless this would
make an excessively short or narrow figure.
Tick labels for the xaxis are placed on top.
Parameters
----------
A : array-like(M, N)
The matrix to be displayed.
fignum : None or int or False
If *None*, create a new figure window with automatic numbering.
If a nonzero integer, draw into the figure with the given number
(create it if it does not exist).
If 0, use the current axes (or create one if it does not exist).
.. note::
Because of how `.Axes.matshow` tries to set the figure aspect
ratio to be the one of the array, strange things may happen if you
reuse an existing figure.
Returns
-------
`~matplotlib.image.AxesImage`
Other Parameters
----------------
**kwargs : `~matplotlib.axes.Axes.imshow` arguments
"""
A = np.asanyarray(A)
if fignum == 0:
ax = gca()
else:
# Extract actual aspect ratio of array and make appropriately sized
# figure.
fig = figure(fignum, figsize=figaspect(A))
ax = fig.add_axes([0.15, 0.09, 0.775, 0.775])
im = ax.matshow(A, **kwargs)
sci(im)
return im
def polar(*args, **kwargs):
"""
Make a polar plot.
call signature::
polar(theta, r, **kwargs)
Multiple *theta*, *r* arguments are supported, with format strings, as in
`plot`.
"""
# If an axis already exists, check if it has a polar projection
if gcf().get_axes():
if not isinstance(gca(), PolarAxes):
cbook._warn_external('Trying to create polar plot on an axis '
'that does not have a polar projection.')
ax = gca(polar=True)
ret = ax.plot(*args, **kwargs)
return ret
# If rcParams['backend_fallback'] is true, and an interactive backend is
# requested, ignore rcParams['backend'] and force selection of a backend that
# is compatible with the current running interactive framework.
if (rcParams["backend_fallback"]
and dict.__getitem__(rcParams, "backend") in (
set(_interactive_bk) - {'WebAgg', 'nbAgg'})
and cbook._get_running_interactive_framework()):
dict.__setitem__(rcParams, "backend", rcsetup._auto_backend_sentinel)
# Set up the backend.
switch_backend(rcParams["backend"])
# Just to be safe. Interactive mode can be turned on without
# calling `plt.ion()` so register it again here.
# This is safe because multiple calls to `install_repl_displayhook`
# are no-ops and the registered function respect `mpl.is_interactive()`
# to determine if they should trigger a draw.
install_repl_displayhook()
################# REMAINING CONTENT GENERATED BY boilerplate.py ##############
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Figure.figimage)
def figimage(
X, xo=0, yo=0, alpha=None, norm=None, cmap=None, vmin=None,
vmax=None, origin=None, resize=False, **kwargs):
return gcf().figimage(
X, xo=xo, yo=yo, alpha=alpha, norm=norm, cmap=cmap, vmin=vmin,
vmax=vmax, origin=origin, resize=resize, **kwargs)
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Figure.text)
def figtext(x, y, s, fontdict=None, **kwargs):
return gcf().text(x, y, s, fontdict=fontdict, **kwargs)
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Figure.gca)
def gca(**kwargs):
return gcf().gca(**kwargs)
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Figure._gci)
def gci():
return gcf()._gci()
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Figure.ginput)
def ginput(
n=1, timeout=30, show_clicks=True,
mouse_add=MouseButton.LEFT, mouse_pop=MouseButton.RIGHT,
mouse_stop=MouseButton.MIDDLE):
return gcf().ginput(
n=n, timeout=timeout, show_clicks=show_clicks,
mouse_add=mouse_add, mouse_pop=mouse_pop,
mouse_stop=mouse_stop)
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Figure.subplots_adjust)
def subplots_adjust(
left=None, bottom=None, right=None, top=None, wspace=None,
hspace=None):
return gcf().subplots_adjust(
left=left, bottom=bottom, right=right, top=top, wspace=wspace,
hspace=hspace)
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Figure.suptitle)
def suptitle(t, **kwargs):
return gcf().suptitle(t, **kwargs)
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Figure.waitforbuttonpress)
def waitforbuttonpress(timeout=-1):
return gcf().waitforbuttonpress(timeout=timeout)
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.acorr)
def acorr(x, *, data=None, **kwargs):
return gca().acorr(
x, **({"data": data} if data is not None else {}), **kwargs)
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.angle_spectrum)
def angle_spectrum(
x, Fs=None, Fc=None, window=None, pad_to=None, sides=None, *,
data=None, **kwargs):
return gca().angle_spectrum(
x, Fs=Fs, Fc=Fc, window=window, pad_to=pad_to, sides=sides,
**({"data": data} if data is not None else {}), **kwargs)
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.annotate)
def annotate(text, xy, *args, **kwargs):
return gca().annotate(text, xy, *args, **kwargs)
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.arrow)
def arrow(x, y, dx, dy, **kwargs):
return gca().arrow(x, y, dx, dy, **kwargs)
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.autoscale)
def autoscale(enable=True, axis='both', tight=None):
return gca().autoscale(enable=enable, axis=axis, tight=tight)
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.axhline)
def axhline(y=0, xmin=0, xmax=1, **kwargs):
return gca().axhline(y=y, xmin=xmin, xmax=xmax, **kwargs)
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.axhspan)
def axhspan(ymin, ymax, xmin=0, xmax=1, **kwargs):
return gca().axhspan(ymin, ymax, xmin=xmin, xmax=xmax, **kwargs)
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.axis)
def axis(*args, emit=True, **kwargs):
return gca().axis(*args, emit=emit, **kwargs)
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.axline)
def axline(xy1, xy2=None, *, slope=None, **kwargs):
return gca().axline(xy1, xy2=xy2, slope=slope, **kwargs)
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.axvline)
def axvline(x=0, ymin=0, ymax=1, **kwargs):
return gca().axvline(x=x, ymin=ymin, ymax=ymax, **kwargs)
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.axvspan)
def axvspan(xmin, xmax, ymin=0, ymax=1, **kwargs):
return gca().axvspan(xmin, xmax, ymin=ymin, ymax=ymax, **kwargs)
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.bar)
def bar(
x, height, width=0.8, bottom=None, *, align='center',
data=None, **kwargs):
return gca().bar(
x, height, width=width, bottom=bottom, align=align,
**({"data": data} if data is not None else {}), **kwargs)
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.barbs)
def barbs(*args, data=None, **kw):
return gca().barbs(
*args, **({"data": data} if data is not None else {}), **kw)
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.barh)
def barh(y, width, height=0.8, left=None, *, align='center', **kwargs):
return gca().barh(
y, width, height=height, left=left, align=align, **kwargs)
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.boxplot)
def boxplot(
x, notch=None, sym=None, vert=None, whis=None,
positions=None, widths=None, patch_artist=None,
bootstrap=None, usermedians=None, conf_intervals=None,
meanline=None, showmeans=None, showcaps=None, showbox=None,
showfliers=None, boxprops=None, labels=None, flierprops=None,
medianprops=None, meanprops=None, capprops=None,
whiskerprops=None, manage_ticks=True, autorange=False,
zorder=None, *, data=None):
return gca().boxplot(
x, notch=notch, sym=sym, vert=vert, whis=whis,
positions=positions, widths=widths, patch_artist=patch_artist,
bootstrap=bootstrap, usermedians=usermedians,
conf_intervals=conf_intervals, meanline=meanline,
showmeans=showmeans, showcaps=showcaps, showbox=showbox,
showfliers=showfliers, boxprops=boxprops, labels=labels,
flierprops=flierprops, medianprops=medianprops,
meanprops=meanprops, capprops=capprops,
whiskerprops=whiskerprops, manage_ticks=manage_ticks,
autorange=autorange, zorder=zorder,
**({"data": data} if data is not None else {}))
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.broken_barh)
def broken_barh(xranges, yrange, *, data=None, **kwargs):
return gca().broken_barh(
xranges, yrange,
**({"data": data} if data is not None else {}), **kwargs)
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.cla)
def cla():
return gca().cla()
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.clabel)
def clabel(CS, levels=None, **kwargs):
return gca().clabel(CS, levels=levels, **kwargs)
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.cohere)
def cohere(
x, y, NFFT=256, Fs=2, Fc=0, detrend=mlab.detrend_none,
window=mlab.window_hanning, noverlap=0, pad_to=None,
sides='default', scale_by_freq=None, *, data=None, **kwargs):
return gca().cohere(
x, y, NFFT=NFFT, Fs=Fs, Fc=Fc, detrend=detrend, window=window,
noverlap=noverlap, pad_to=pad_to, sides=sides,
scale_by_freq=scale_by_freq,
**({"data": data} if data is not None else {}), **kwargs)
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.contour)
def contour(*args, data=None, **kwargs):
__ret = gca().contour(
*args, **({"data": data} if data is not None else {}),
**kwargs)
if __ret._A is not None: sci(__ret) # noqa
return __ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.contourf)
def contourf(*args, data=None, **kwargs):
__ret = gca().contourf(
*args, **({"data": data} if data is not None else {}),
**kwargs)
if __ret._A is not None: sci(__ret) # noqa
return __ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.csd)
def csd(
x, y, NFFT=None, Fs=None, Fc=None, detrend=None, window=None,
noverlap=None, pad_to=None, sides=None, scale_by_freq=None,
return_line=None, *, data=None, **kwargs):
return gca().csd(
x, y, NFFT=NFFT, Fs=Fs, Fc=Fc, detrend=detrend, window=window,
noverlap=noverlap, pad_to=pad_to, sides=sides,
scale_by_freq=scale_by_freq, return_line=return_line,
**({"data": data} if data is not None else {}), **kwargs)
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.errorbar)
def errorbar(
x, y, yerr=None, xerr=None, fmt='', ecolor=None,
elinewidth=None, capsize=None, barsabove=False, lolims=False,
uplims=False, xlolims=False, xuplims=False, errorevery=1,
capthick=None, *, data=None, **kwargs):
return gca().errorbar(
x, y, yerr=yerr, xerr=xerr, fmt=fmt, ecolor=ecolor,
elinewidth=elinewidth, capsize=capsize, barsabove=barsabove,
lolims=lolims, uplims=uplims, xlolims=xlolims,
xuplims=xuplims, errorevery=errorevery, capthick=capthick,
**({"data": data} if data is not None else {}), **kwargs)
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.eventplot)
def eventplot(
positions, orientation='horizontal', lineoffsets=1,
linelengths=1, linewidths=None, colors=None,
linestyles='solid', *, data=None, **kwargs):
return gca().eventplot(
positions, orientation=orientation, lineoffsets=lineoffsets,
linelengths=linelengths, linewidths=linewidths, colors=colors,
linestyles=linestyles,
**({"data": data} if data is not None else {}), **kwargs)
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.fill)
def fill(*args, data=None, **kwargs):
return gca().fill(
*args, **({"data": data} if data is not None else {}),
**kwargs)
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.fill_between)
def fill_between(
x, y1, y2=0, where=None, interpolate=False, step=None, *,
data=None, **kwargs):
return gca().fill_between(
x, y1, y2=y2, where=where, interpolate=interpolate, step=step,
**({"data": data} if data is not None else {}), **kwargs)
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.fill_betweenx)
def fill_betweenx(
y, x1, x2=0, where=None, step=None, interpolate=False, *,
data=None, **kwargs):
return gca().fill_betweenx(
y, x1, x2=x2, where=where, step=step, interpolate=interpolate,
**({"data": data} if data is not None else {}), **kwargs)
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.grid)
def grid(b=None, which='major', axis='both', **kwargs):
return gca().grid(b=b, which=which, axis=axis, **kwargs)
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.hexbin)
def hexbin(
x, y, C=None, gridsize=100, bins=None, xscale='linear',
yscale='linear', extent=None, cmap=None, norm=None, vmin=None,
vmax=None, alpha=None, linewidths=None, edgecolors='face',
reduce_C_function=np.mean, mincnt=None, marginals=False, *,
data=None, **kwargs):
__ret = gca().hexbin(
x, y, C=C, gridsize=gridsize, bins=bins, xscale=xscale,
yscale=yscale, extent=extent, cmap=cmap, norm=norm, vmin=vmin,
vmax=vmax, alpha=alpha, linewidths=linewidths,
edgecolors=edgecolors, reduce_C_function=reduce_C_function,
mincnt=mincnt, marginals=marginals,
**({"data": data} if data is not None else {}), **kwargs)
sci(__ret)
return __ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.hist)
def hist(
x, bins=None, range=None, density=False, weights=None,
cumulative=False, bottom=None, histtype='bar', align='mid',
orientation='vertical', rwidth=None, log=False, color=None,
label=None, stacked=False, *, data=None, **kwargs):
return gca().hist(
x, bins=bins, range=range, density=density, weights=weights,
cumulative=cumulative, bottom=bottom, histtype=histtype,
align=align, orientation=orientation, rwidth=rwidth, log=log,
color=color, label=label, stacked=stacked,
**({"data": data} if data is not None else {}), **kwargs)
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.hist2d)
def hist2d(
x, y, bins=10, range=None, density=False, weights=None,
cmin=None, cmax=None, *, data=None, **kwargs):
__ret = gca().hist2d(
x, y, bins=bins, range=range, density=density,
weights=weights, cmin=cmin, cmax=cmax,
**({"data": data} if data is not None else {}), **kwargs)
sci(__ret[-1])
return __ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.hlines)
def hlines(
y, xmin, xmax, colors=None, linestyles='solid', label='', *,
data=None, **kwargs):
return gca().hlines(
y, xmin, xmax, colors=colors, linestyles=linestyles,
label=label, **({"data": data} if data is not None else {}),
**kwargs)
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.imshow)
def imshow(
X, cmap=None, norm=None, aspect=None, interpolation=None,
alpha=None, vmin=None, vmax=None, origin=None, extent=None, *,
filternorm=True, filterrad=4.0, resample=None, url=None,
data=None, **kwargs):
__ret = gca().imshow(
X, cmap=cmap, norm=norm, aspect=aspect,
interpolation=interpolation, alpha=alpha, vmin=vmin,
vmax=vmax, origin=origin, extent=extent,
filternorm=filternorm, filterrad=filterrad, resample=resample,
url=url, **({"data": data} if data is not None else {}),
**kwargs)
sci(__ret)
return __ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.legend)
def legend(*args, **kwargs):
return gca().legend(*args, **kwargs)
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.locator_params)
def locator_params(axis='both', tight=None, **kwargs):
return gca().locator_params(axis=axis, tight=tight, **kwargs)
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.loglog)
def loglog(*args, **kwargs):
return gca().loglog(*args, **kwargs)
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.magnitude_spectrum)
def magnitude_spectrum(
x, Fs=None, Fc=None, window=None, pad_to=None, sides=None,
scale=None, *, data=None, **kwargs):
return gca().magnitude_spectrum(
x, Fs=Fs, Fc=Fc, window=window, pad_to=pad_to, sides=sides,
scale=scale, **({"data": data} if data is not None else {}),
**kwargs)
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.margins)
def margins(*margins, x=None, y=None, tight=True):
return gca().margins(*margins, x=x, y=y, tight=tight)
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.minorticks_off)
def minorticks_off():
return gca().minorticks_off()
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.minorticks_on)
def minorticks_on():
return gca().minorticks_on()
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.pcolor)
def pcolor(
*args, shading=None, alpha=None, norm=None, cmap=None,
vmin=None, vmax=None, data=None, **kwargs):
__ret = gca().pcolor(
*args, shading=shading, alpha=alpha, norm=norm, cmap=cmap,
vmin=vmin, vmax=vmax,
**({"data": data} if data is not None else {}), **kwargs)
sci(__ret)
return __ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.pcolormesh)
def pcolormesh(
*args, alpha=None, norm=None, cmap=None, vmin=None,
vmax=None, shading=None, antialiased=False, data=None,
**kwargs):
__ret = gca().pcolormesh(
*args, alpha=alpha, norm=norm, cmap=cmap, vmin=vmin,
vmax=vmax, shading=shading, antialiased=antialiased,
**({"data": data} if data is not None else {}), **kwargs)
sci(__ret)
return __ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.phase_spectrum)
def phase_spectrum(
x, Fs=None, Fc=None, window=None, pad_to=None, sides=None, *,
data=None, **kwargs):
return gca().phase_spectrum(
x, Fs=Fs, Fc=Fc, window=window, pad_to=pad_to, sides=sides,
**({"data": data} if data is not None else {}), **kwargs)
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.pie)
def pie(
x, explode=None, labels=None, colors=None, autopct=None,
pctdistance=0.6, shadow=False, labeldistance=1.1,
startangle=0, radius=1, counterclock=True, wedgeprops=None,
textprops=None, center=(0, 0), frame=False,
rotatelabels=False, *, normalize=None, data=None):
return gca().pie(
x, explode=explode, labels=labels, colors=colors,
autopct=autopct, pctdistance=pctdistance, shadow=shadow,
labeldistance=labeldistance, startangle=startangle,
radius=radius, counterclock=counterclock,
wedgeprops=wedgeprops, textprops=textprops, center=center,
frame=frame, rotatelabels=rotatelabels, normalize=normalize,
**({"data": data} if data is not None else {}))
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.plot)
def plot(*args, scalex=True, scaley=True, data=None, **kwargs):
return gca().plot(
*args, scalex=scalex, scaley=scaley,
**({"data": data} if data is not None else {}), **kwargs)
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.plot_date)
def plot_date(
x, y, fmt='o', tz=None, xdate=True, ydate=False, *,
data=None, **kwargs):
return gca().plot_date(
x, y, fmt=fmt, tz=tz, xdate=xdate, ydate=ydate,
**({"data": data} if data is not None else {}), **kwargs)
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.psd)
def psd(
x, NFFT=None, Fs=None, Fc=None, detrend=None, window=None,
noverlap=None, pad_to=None, sides=None, scale_by_freq=None,
return_line=None, *, data=None, **kwargs):
return gca().psd(
x, NFFT=NFFT, Fs=Fs, Fc=Fc, detrend=detrend, window=window,
noverlap=noverlap, pad_to=pad_to, sides=sides,
scale_by_freq=scale_by_freq, return_line=return_line,
**({"data": data} if data is not None else {}), **kwargs)
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.quiver)
def quiver(*args, data=None, **kw):
__ret = gca().quiver(
*args, **({"data": data} if data is not None else {}), **kw)
sci(__ret)
return __ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.quiverkey)
def quiverkey(Q, X, Y, U, label, **kw):
return gca().quiverkey(Q, X, Y, U, label, **kw)
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.scatter)
def scatter(
x, y, s=None, c=None, marker=None, cmap=None, norm=None,
vmin=None, vmax=None, alpha=None, linewidths=None,
verts=cbook.deprecation._deprecated_parameter,
edgecolors=None, *, plotnonfinite=False, data=None, **kwargs):
__ret = gca().scatter(
x, y, s=s, c=c, marker=marker, cmap=cmap, norm=norm,
vmin=vmin, vmax=vmax, alpha=alpha, linewidths=linewidths,
verts=verts, edgecolors=edgecolors,
plotnonfinite=plotnonfinite,
**({"data": data} if data is not None else {}), **kwargs)
sci(__ret)
return __ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.semilogx)
def semilogx(*args, **kwargs):
return gca().semilogx(*args, **kwargs)
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.semilogy)
def semilogy(*args, **kwargs):
return gca().semilogy(*args, **kwargs)
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.specgram)
def specgram(
x, NFFT=None, Fs=None, Fc=None, detrend=None, window=None,
noverlap=None, cmap=None, xextent=None, pad_to=None,
sides=None, scale_by_freq=None, mode=None, scale=None,
vmin=None, vmax=None, *, data=None, **kwargs):
__ret = gca().specgram(
x, NFFT=NFFT, Fs=Fs, Fc=Fc, detrend=detrend, window=window,
noverlap=noverlap, cmap=cmap, xextent=xextent, pad_to=pad_to,
sides=sides, scale_by_freq=scale_by_freq, mode=mode,
scale=scale, vmin=vmin, vmax=vmax,
**({"data": data} if data is not None else {}), **kwargs)
sci(__ret[-1])
return __ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.spy)
def spy(
Z, precision=0, marker=None, markersize=None, aspect='equal',
origin='upper', **kwargs):
__ret = gca().spy(
Z, precision=precision, marker=marker, markersize=markersize,
aspect=aspect, origin=origin, **kwargs)
if isinstance(__ret, cm.ScalarMappable): sci(__ret) # noqa
return __ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.stackplot)
def stackplot(
x, *args, labels=(), colors=None, baseline='zero', data=None,
**kwargs):
return gca().stackplot(
x, *args, labels=labels, colors=colors, baseline=baseline,
**({"data": data} if data is not None else {}), **kwargs)
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.stem)
def stem(
*args, linefmt=None, markerfmt=None, basefmt=None, bottom=0,
label=None, use_line_collection=True, data=None):
return gca().stem(
*args, linefmt=linefmt, markerfmt=markerfmt, basefmt=basefmt,
bottom=bottom, label=label,
use_line_collection=use_line_collection,
**({"data": data} if data is not None else {}))
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.step)
def step(x, y, *args, where='pre', data=None, **kwargs):
return gca().step(
x, y, *args, where=where,
**({"data": data} if data is not None else {}), **kwargs)
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.streamplot)
def streamplot(
x, y, u, v, density=1, linewidth=None, color=None, cmap=None,
norm=None, arrowsize=1, arrowstyle='-|>', minlength=0.1,
transform=None, zorder=None, start_points=None, maxlength=4.0,
integration_direction='both', *, data=None):
__ret = gca().streamplot(
x, y, u, v, density=density, linewidth=linewidth, color=color,
cmap=cmap, norm=norm, arrowsize=arrowsize,
arrowstyle=arrowstyle, minlength=minlength,
transform=transform, zorder=zorder, start_points=start_points,
maxlength=maxlength,
integration_direction=integration_direction,
**({"data": data} if data is not None else {}))
sci(__ret.lines)
return __ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.table)
def table(
cellText=None, cellColours=None, cellLoc='right',
colWidths=None, rowLabels=None, rowColours=None,
rowLoc='left', colLabels=None, colColours=None,
colLoc='center', loc='bottom', bbox=None, edges='closed',
**kwargs):
return gca().table(
cellText=cellText, cellColours=cellColours, cellLoc=cellLoc,
colWidths=colWidths, rowLabels=rowLabels,
rowColours=rowColours, rowLoc=rowLoc, colLabels=colLabels,
colColours=colColours, colLoc=colLoc, loc=loc, bbox=bbox,
edges=edges, **kwargs)
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.text)
def text(x, y, s, fontdict=None, **kwargs):
return gca().text(x, y, s, fontdict=fontdict, **kwargs)
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.tick_params)
def tick_params(axis='both', **kwargs):
return gca().tick_params(axis=axis, **kwargs)
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.ticklabel_format)
def ticklabel_format(
*, axis='both', style='', scilimits=None, useOffset=None,
useLocale=None, useMathText=None):
return gca().ticklabel_format(
axis=axis, style=style, scilimits=scilimits,
useOffset=useOffset, useLocale=useLocale,
useMathText=useMathText)
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.tricontour)
def tricontour(*args, **kwargs):
__ret = gca().tricontour(*args, **kwargs)
if __ret._A is not None: sci(__ret) # noqa
return __ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.tricontourf)
def tricontourf(*args, **kwargs):
__ret = gca().tricontourf(*args, **kwargs)
if __ret._A is not None: sci(__ret) # noqa
return __ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.tripcolor)
def tripcolor(
*args, alpha=1.0, norm=None, cmap=None, vmin=None, vmax=None,
shading='flat', facecolors=None, **kwargs):
__ret = gca().tripcolor(
*args, alpha=alpha, norm=norm, cmap=cmap, vmin=vmin,
vmax=vmax, shading=shading, facecolors=facecolors, **kwargs)
sci(__ret)
return __ret
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.triplot)
def triplot(*args, **kwargs):
return gca().triplot(*args, **kwargs)
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.violinplot)
def violinplot(
dataset, positions=None, vert=True, widths=0.5,
showmeans=False, showextrema=True, showmedians=False,
quantiles=None, points=100, bw_method=None, *, data=None):
return gca().violinplot(
dataset, positions=positions, vert=vert, widths=widths,
showmeans=showmeans, showextrema=showextrema,
showmedians=showmedians, quantiles=quantiles, points=points,
bw_method=bw_method,
**({"data": data} if data is not None else {}))
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.vlines)
def vlines(
x, ymin, ymax, colors=None, linestyles='solid', label='', *,
data=None, **kwargs):
return gca().vlines(
x, ymin, ymax, colors=colors, linestyles=linestyles,
label=label, **({"data": data} if data is not None else {}),
**kwargs)
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.xcorr)
def xcorr(
x, y, normed=True, detrend=mlab.detrend_none, usevlines=True,
maxlags=10, *, data=None, **kwargs):
return gca().xcorr(
x, y, normed=normed, detrend=detrend, usevlines=usevlines,
maxlags=maxlags,
**({"data": data} if data is not None else {}), **kwargs)
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes._sci)
def sci(im):
return gca()._sci(im)
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.set_title)
def title(label, fontdict=None, loc=None, pad=None, *, y=None, **kwargs):
return gca().set_title(
label, fontdict=fontdict, loc=loc, pad=pad, y=y, **kwargs)
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.set_xlabel)
def xlabel(xlabel, fontdict=None, labelpad=None, *, loc=None, **kwargs):
return gca().set_xlabel(
xlabel, fontdict=fontdict, labelpad=labelpad, loc=loc,
**kwargs)
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.set_ylabel)
def ylabel(ylabel, fontdict=None, labelpad=None, *, loc=None, **kwargs):
return gca().set_ylabel(
ylabel, fontdict=fontdict, labelpad=labelpad, loc=loc,
**kwargs)
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.set_xscale)
def xscale(value, **kwargs):
return gca().set_xscale(value, **kwargs)
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
@_copy_docstring_and_deprecators(Axes.set_yscale)
def yscale(value, **kwargs):
return gca().set_yscale(value, **kwargs)
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
def autumn():
"""
Set the colormap to "autumn".
This changes the default colormap as well as the colormap of the current
image if there is one. See ``help(colormaps)`` for more information.
"""
set_cmap("autumn")
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
def bone():
"""
Set the colormap to "bone".
This changes the default colormap as well as the colormap of the current
image if there is one. See ``help(colormaps)`` for more information.
"""
set_cmap("bone")
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
def cool():
"""
Set the colormap to "cool".
This changes the default colormap as well as the colormap of the current
image if there is one. See ``help(colormaps)`` for more information.
"""
set_cmap("cool")
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
def copper():
"""
Set the colormap to "copper".
This changes the default colormap as well as the colormap of the current
image if there is one. See ``help(colormaps)`` for more information.
"""
set_cmap("copper")
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
def flag():
"""
Set the colormap to "flag".
This changes the default colormap as well as the colormap of the current
image if there is one. See ``help(colormaps)`` for more information.
"""
set_cmap("flag")
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
def gray():
"""
Set the colormap to "gray".
This changes the default colormap as well as the colormap of the current
image if there is one. See ``help(colormaps)`` for more information.
"""
set_cmap("gray")
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
def hot():
"""
Set the colormap to "hot".
This changes the default colormap as well as the colormap of the current
image if there is one. See ``help(colormaps)`` for more information.
"""
set_cmap("hot")
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
def hsv():
"""
Set the colormap to "hsv".
This changes the default colormap as well as the colormap of the current
image if there is one. See ``help(colormaps)`` for more information.
"""
set_cmap("hsv")
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
def jet():
"""
Set the colormap to "jet".
This changes the default colormap as well as the colormap of the current
image if there is one. See ``help(colormaps)`` for more information.
"""
set_cmap("jet")
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
def pink():
"""
Set the colormap to "pink".
This changes the default colormap as well as the colormap of the current
image if there is one. See ``help(colormaps)`` for more information.
"""
set_cmap("pink")
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
def prism():
"""
Set the colormap to "prism".
This changes the default colormap as well as the colormap of the current
image if there is one. See ``help(colormaps)`` for more information.
"""
set_cmap("prism")
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
def spring():
"""
Set the colormap to "spring".
This changes the default colormap as well as the colormap of the current
image if there is one. See ``help(colormaps)`` for more information.
"""
set_cmap("spring")
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
def summer():
"""
Set the colormap to "summer".
This changes the default colormap as well as the colormap of the current
image if there is one. See ``help(colormaps)`` for more information.
"""
set_cmap("summer")
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
def winter():
"""
Set the colormap to "winter".
This changes the default colormap as well as the colormap of the current
image if there is one. See ``help(colormaps)`` for more information.
"""
set_cmap("winter")
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
def magma():
"""
Set the colormap to "magma".
This changes the default colormap as well as the colormap of the current
image if there is one. See ``help(colormaps)`` for more information.
"""
set_cmap("magma")
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
def inferno():
"""
Set the colormap to "inferno".
This changes the default colormap as well as the colormap of the current
image if there is one. See ``help(colormaps)`` for more information.
"""
set_cmap("inferno")
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
def plasma():
"""
Set the colormap to "plasma".
This changes the default colormap as well as the colormap of the current
image if there is one. See ``help(colormaps)`` for more information.
"""
set_cmap("plasma")
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
def viridis():
"""
Set the colormap to "viridis".
This changes the default colormap as well as the colormap of the current
image if there is one. See ``help(colormaps)`` for more information.
"""
set_cmap("viridis")
# Autogenerated by boilerplate.py. Do not edit as changes will be lost.
def nipy_spectral():
"""
Set the colormap to "nipy_spectral".
This changes the default colormap as well as the colormap of the current
image if there is one. See ``help(colormaps)`` for more information.
"""
set_cmap("nipy_spectral")
_setup_pyplot_info_docstrings()
| 34.555656
| 79
| 0.644685
|
4a172af280e5aa16f3e40aac98d423bf199007e1
| 793
|
py
|
Python
|
2015/python/day17/chal34.py
|
astonshane/AdventOfCode
|
25c7380e73eede3f79287de6a9dedc8314ab7965
|
[
"MIT"
] | null | null | null |
2015/python/day17/chal34.py
|
astonshane/AdventOfCode
|
25c7380e73eede3f79287de6a9dedc8314ab7965
|
[
"MIT"
] | null | null | null |
2015/python/day17/chal34.py
|
astonshane/AdventOfCode
|
25c7380e73eede3f79287de6a9dedc8314ab7965
|
[
"MIT"
] | null | null | null |
import sys
from itertools import combinations
def sumCombo(combo):
total = 0
for i in combo:
total += i
return total
# ######################
if len(sys.argv) != 3:
print "need an input file and storage capacity"
exit(1)
storage = int(sys.argv[2])
containers = []
f = open(sys.argv[1])
for line in f:
containers.append(int(line.strip()))
total_combos = None
combo_length = None
for i in range(0, len(containers)):
for combo in combinations(containers, i):
if sumCombo(combo) == storage:
if combo_length is None or len(combo) < combo_length:
total_combos = 1
combo_length = len(combo)
elif len(combo) == combo_length:
total_combos += 1
print combo_length, total_combos
| 21.432432
| 65
| 0.602774
|
4a172b083cc57fa4070b443c68ff2062a9a7ee11
| 27,239
|
py
|
Python
|
pypy/lib_pypy/_cffi_ssl/_stdssl/errorcodes.py
|
Clear-Sight/cython-vs-pypy-performance
|
a81df5e1dbc115468ddfd60670ddfad448a5c992
|
[
"MIT"
] | 1
|
2021-06-02T23:02:09.000Z
|
2021-06-02T23:02:09.000Z
|
pypy/lib_pypy/_cffi_ssl/_stdssl/errorcodes.py
|
Clear-Sight/cython-vs-pypy-performance
|
a81df5e1dbc115468ddfd60670ddfad448a5c992
|
[
"MIT"
] | 1
|
2021-03-30T18:08:41.000Z
|
2021-03-30T18:08:41.000Z
|
pypy/lib_pypy/_cffi_ssl/_stdssl/errorcodes.py
|
Clear-Sight/cython-vs-pypy-performance
|
a81df5e1dbc115468ddfd60670ddfad448a5c992
|
[
"MIT"
] | null | null | null |
# File generated by tools/make_ssl_data.py
# Generated on 2016-11-10T17:38:59.402032
from _pypy_openssl import ffi, lib
_lib_codes = []
_lib_codes.append(("PEM", lib.ERR_LIB_PEM))
_lib_codes.append(("SSL", lib.ERR_LIB_SSL))
_lib_codes.append(("X509", lib.ERR_LIB_X509))
_error_codes = []
_error_codes.append(("BAD_BASE64_DECODE", lib.ERR_LIB_PEM, 100))
_error_codes.append(("BAD_DECRYPT", lib.ERR_LIB_PEM, 101))
_error_codes.append(("BAD_END_LINE", lib.ERR_LIB_PEM, 102))
_error_codes.append(("BAD_IV_CHARS", lib.ERR_LIB_PEM, 103))
_error_codes.append(("BAD_MAGIC_NUMBER", lib.ERR_LIB_PEM, 116))
_error_codes.append(("BAD_PASSWORD_READ", lib.ERR_LIB_PEM, 104))
_error_codes.append(("BAD_VERSION_NUMBER", lib.ERR_LIB_PEM, 117))
_error_codes.append(("BIO_WRITE_FAILURE", lib.ERR_LIB_PEM, 118))
_error_codes.append(("CIPHER_IS_NULL", lib.ERR_LIB_PEM, 127))
_error_codes.append(("ERROR_CONVERTING_PRIVATE_KEY", lib.ERR_LIB_PEM, 115))
_error_codes.append(("EXPECTING_PRIVATE_KEY_BLOB", lib.ERR_LIB_PEM, 119))
_error_codes.append(("EXPECTING_PUBLIC_KEY_BLOB", lib.ERR_LIB_PEM, 120))
_error_codes.append(("INCONSISTENT_HEADER", lib.ERR_LIB_PEM, 121))
_error_codes.append(("KEYBLOB_HEADER_PARSE_ERROR", lib.ERR_LIB_PEM, 122))
_error_codes.append(("KEYBLOB_TOO_SHORT", lib.ERR_LIB_PEM, 123))
_error_codes.append(("NOT_DEK_INFO", lib.ERR_LIB_PEM, 105))
_error_codes.append(("NOT_ENCRYPTED", lib.ERR_LIB_PEM, 106))
_error_codes.append(("NOT_PROC_TYPE", lib.ERR_LIB_PEM, 107))
_error_codes.append(("NO_START_LINE", lib.ERR_LIB_PEM, 108))
_error_codes.append(("PROBLEMS_GETTING_PASSWORD", lib.ERR_LIB_PEM, 109))
_error_codes.append(("PUBLIC_KEY_NO_RSA", lib.ERR_LIB_PEM, 110))
_error_codes.append(("PVK_DATA_TOO_SHORT", lib.ERR_LIB_PEM, 124))
_error_codes.append(("PVK_TOO_SHORT", lib.ERR_LIB_PEM, 125))
_error_codes.append(("READ_KEY", lib.ERR_LIB_PEM, 111))
_error_codes.append(("SHORT_HEADER", lib.ERR_LIB_PEM, 112))
_error_codes.append(("UNSUPPORTED_ENCRYPTION", lib.ERR_LIB_PEM, 114))
_error_codes.append(("UNSUPPORTED_KEY_COMPONENTS", lib.ERR_LIB_PEM, 126))
_error_codes.append(("APP_DATA_IN_HANDSHAKE", lib.ERR_LIB_SSL, 100))
_error_codes.append(("ATTEMPT_TO_REUSE_SESSION_IN_DIFFERENT_CONTEXT", lib.ERR_LIB_SSL, 272))
_error_codes.append(("BAD_ALERT_RECORD", lib.ERR_LIB_SSL, 101))
_error_codes.append(("BAD_AUTHENTICATION_TYPE", lib.ERR_LIB_SSL, 102))
_error_codes.append(("BAD_CHANGE_CIPHER_SPEC", lib.ERR_LIB_SSL, 103))
_error_codes.append(("BAD_CHECKSUM", lib.ERR_LIB_SSL, 104))
_error_codes.append(("BAD_DATA", lib.ERR_LIB_SSL, 390))
_error_codes.append(("BAD_DATA_RETURNED_BY_CALLBACK", lib.ERR_LIB_SSL, 106))
_error_codes.append(("BAD_DECOMPRESSION", lib.ERR_LIB_SSL, 107))
_error_codes.append(("BAD_DH_G_LENGTH", lib.ERR_LIB_SSL, 108))
_error_codes.append(("BAD_DH_PUB_KEY_LENGTH", lib.ERR_LIB_SSL, 109))
_error_codes.append(("BAD_DH_P_LENGTH", lib.ERR_LIB_SSL, 110))
_error_codes.append(("BAD_DIGEST_LENGTH", lib.ERR_LIB_SSL, 111))
_error_codes.append(("BAD_DSA_SIGNATURE", lib.ERR_LIB_SSL, 112))
_error_codes.append(("BAD_ECC_CERT", lib.ERR_LIB_SSL, 304))
_error_codes.append(("BAD_ECDSA_SIGNATURE", lib.ERR_LIB_SSL, 305))
_error_codes.append(("BAD_ECPOINT", lib.ERR_LIB_SSL, 306))
_error_codes.append(("BAD_HANDSHAKE_LENGTH", lib.ERR_LIB_SSL, 332))
_error_codes.append(("BAD_HELLO_REQUEST", lib.ERR_LIB_SSL, 105))
_error_codes.append(("BAD_LENGTH", lib.ERR_LIB_SSL, 271))
_error_codes.append(("BAD_MAC_DECODE", lib.ERR_LIB_SSL, 113))
_error_codes.append(("BAD_MAC_LENGTH", lib.ERR_LIB_SSL, 333))
_error_codes.append(("BAD_MESSAGE_TYPE", lib.ERR_LIB_SSL, 114))
_error_codes.append(("BAD_PACKET_LENGTH", lib.ERR_LIB_SSL, 115))
_error_codes.append(("BAD_PROTOCOL_VERSION_NUMBER", lib.ERR_LIB_SSL, 116))
_error_codes.append(("BAD_PSK_IDENTITY_HINT_LENGTH", lib.ERR_LIB_SSL, 316))
_error_codes.append(("BAD_RESPONSE_ARGUMENT", lib.ERR_LIB_SSL, 117))
_error_codes.append(("BAD_RSA_DECRYPT", lib.ERR_LIB_SSL, 118))
_error_codes.append(("BAD_RSA_ENCRYPT", lib.ERR_LIB_SSL, 119))
_error_codes.append(("BAD_RSA_E_LENGTH", lib.ERR_LIB_SSL, 120))
_error_codes.append(("BAD_RSA_MODULUS_LENGTH", lib.ERR_LIB_SSL, 121))
_error_codes.append(("BAD_RSA_SIGNATURE", lib.ERR_LIB_SSL, 122))
_error_codes.append(("BAD_SIGNATURE", lib.ERR_LIB_SSL, 123))
_error_codes.append(("BAD_SRP_A_LENGTH", lib.ERR_LIB_SSL, 347))
_error_codes.append(("BAD_SRP_B_LENGTH", lib.ERR_LIB_SSL, 348))
_error_codes.append(("BAD_SRP_G_LENGTH", lib.ERR_LIB_SSL, 349))
_error_codes.append(("BAD_SRP_N_LENGTH", lib.ERR_LIB_SSL, 350))
_error_codes.append(("BAD_SRP_PARAMETERS", lib.ERR_LIB_SSL, 371))
_error_codes.append(("BAD_SRP_S_LENGTH", lib.ERR_LIB_SSL, 351))
_error_codes.append(("BAD_SRTP_MKI_VALUE", lib.ERR_LIB_SSL, 352))
_error_codes.append(("BAD_SRTP_PROTECTION_PROFILE_LIST", lib.ERR_LIB_SSL, 353))
_error_codes.append(("BAD_SSL_FILETYPE", lib.ERR_LIB_SSL, 124))
_error_codes.append(("BAD_SSL_SESSION_ID_LENGTH", lib.ERR_LIB_SSL, 125))
_error_codes.append(("BAD_STATE", lib.ERR_LIB_SSL, 126))
_error_codes.append(("BAD_VALUE", lib.ERR_LIB_SSL, 384))
_error_codes.append(("BAD_WRITE_RETRY", lib.ERR_LIB_SSL, 127))
_error_codes.append(("BIO_NOT_SET", lib.ERR_LIB_SSL, 128))
_error_codes.append(("BLOCK_CIPHER_PAD_IS_WRONG", lib.ERR_LIB_SSL, 129))
_error_codes.append(("BN_LIB", lib.ERR_LIB_SSL, 130))
_error_codes.append(("CA_DN_LENGTH_MISMATCH", lib.ERR_LIB_SSL, 131))
_error_codes.append(("CA_DN_TOO_LONG", lib.ERR_LIB_SSL, 132))
_error_codes.append(("CERTIFICATE_VERIFY_FAILED", lib.ERR_LIB_SSL, 134))
_error_codes.append(("CA_KEY_TOO_SMALL", lib.ERR_LIB_SSL, 397))
_error_codes.append(("CA_MD_TOO_WEAK", lib.ERR_LIB_SSL, 398))
_error_codes.append(("CCS_RECEIVED_EARLY", lib.ERR_LIB_SSL, 133))
_error_codes.append(("CERTIFICATE_VERIFY_FAILED", lib.ERR_LIB_SSL, 134))
_error_codes.append(("CERT_CB_ERROR", lib.ERR_LIB_SSL, 377))
_error_codes.append(("CERT_LENGTH_MISMATCH", lib.ERR_LIB_SSL, 135))
_error_codes.append(("CHALLENGE_IS_DIFFERENT", lib.ERR_LIB_SSL, 136))
_error_codes.append(("CIPHER_CODE_WRONG_LENGTH", lib.ERR_LIB_SSL, 137))
_error_codes.append(("CIPHER_OR_HASH_UNAVAILABLE", lib.ERR_LIB_SSL, 138))
_error_codes.append(("CIPHER_TABLE_SRC_ERROR", lib.ERR_LIB_SSL, 139))
_error_codes.append(("CLIENTHELLO_TLSEXT", lib.ERR_LIB_SSL, 226))
_error_codes.append(("COMPRESSED_LENGTH_TOO_LONG", lib.ERR_LIB_SSL, 140))
_error_codes.append(("COMPRESSION_DISABLED", lib.ERR_LIB_SSL, 343))
_error_codes.append(("COMPRESSION_FAILURE", lib.ERR_LIB_SSL, 141))
_error_codes.append(("COMPRESSION_ID_NOT_WITHIN_PRIVATE_RANGE", lib.ERR_LIB_SSL, 307))
_error_codes.append(("COMPRESSION_LIBRARY_ERROR", lib.ERR_LIB_SSL, 142))
_error_codes.append(("CONNECTION_ID_IS_DIFFERENT", lib.ERR_LIB_SSL, 143))
_error_codes.append(("CONNECTION_TYPE_NOT_SET", lib.ERR_LIB_SSL, 144))
_error_codes.append(("COOKIE_MISMATCH", lib.ERR_LIB_SSL, 308))
_error_codes.append(("DATA_BETWEEN_CCS_AND_FINISHED", lib.ERR_LIB_SSL, 145))
_error_codes.append(("DATA_LENGTH_TOO_LONG", lib.ERR_LIB_SSL, 146))
_error_codes.append(("DECRYPTION_FAILED", lib.ERR_LIB_SSL, 147))
_error_codes.append(("DECRYPTION_FAILED_OR_BAD_RECORD_MAC", lib.ERR_LIB_SSL, 281))
_error_codes.append(("DH_KEY_TOO_SMALL", lib.ERR_LIB_SSL, 372))
_error_codes.append(("DH_PUBLIC_VALUE_LENGTH_IS_WRONG", lib.ERR_LIB_SSL, 148))
_error_codes.append(("DIGEST_CHECK_FAILED", lib.ERR_LIB_SSL, 149))
_error_codes.append(("DTLS_MESSAGE_TOO_BIG", lib.ERR_LIB_SSL, 334))
_error_codes.append(("DUPLICATE_COMPRESSION_ID", lib.ERR_LIB_SSL, 309))
_error_codes.append(("ECC_CERT_NOT_FOR_KEY_AGREEMENT", lib.ERR_LIB_SSL, 317))
_error_codes.append(("ECC_CERT_NOT_FOR_SIGNING", lib.ERR_LIB_SSL, 318))
_error_codes.append(("ECC_CERT_SHOULD_HAVE_RSA_SIGNATURE", lib.ERR_LIB_SSL, 322))
_error_codes.append(("ECC_CERT_SHOULD_HAVE_SHA1_SIGNATURE", lib.ERR_LIB_SSL, 323))
_error_codes.append(("ECDH_REQUIRED_FOR_SUITEB_MODE", lib.ERR_LIB_SSL, 374))
_error_codes.append(("ECGROUP_TOO_LARGE_FOR_CIPHER", lib.ERR_LIB_SSL, 310))
_error_codes.append(("EE_KEY_TOO_SMALL", lib.ERR_LIB_SSL, 399))
_error_codes.append(("EMPTY_SRTP_PROTECTION_PROFILE_LIST", lib.ERR_LIB_SSL, 354))
_error_codes.append(("ENCRYPTED_LENGTH_TOO_LONG", lib.ERR_LIB_SSL, 150))
_error_codes.append(("ERROR_GENERATING_TMP_RSA_KEY", lib.ERR_LIB_SSL, 282))
_error_codes.append(("ERROR_IN_RECEIVED_CIPHER_LIST", lib.ERR_LIB_SSL, 151))
_error_codes.append(("EXCESSIVE_MESSAGE_SIZE", lib.ERR_LIB_SSL, 152))
_error_codes.append(("EXTRA_DATA_IN_MESSAGE", lib.ERR_LIB_SSL, 153))
_error_codes.append(("GOT_A_FIN_BEFORE_A_CCS", lib.ERR_LIB_SSL, 154))
_error_codes.append(("GOT_NEXT_PROTO_BEFORE_A_CCS", lib.ERR_LIB_SSL, 355))
_error_codes.append(("GOT_NEXT_PROTO_WITHOUT_EXTENSION", lib.ERR_LIB_SSL, 356))
_error_codes.append(("HTTPS_PROXY_REQUEST", lib.ERR_LIB_SSL, 155))
_error_codes.append(("HTTP_REQUEST", lib.ERR_LIB_SSL, 156))
_error_codes.append(("ILLEGAL_PADDING", lib.ERR_LIB_SSL, 283))
_error_codes.append(("ILLEGAL_SUITEB_DIGEST", lib.ERR_LIB_SSL, 380))
_error_codes.append(("INAPPROPRIATE_FALLBACK", lib.ERR_LIB_SSL, 373))
_error_codes.append(("INCONSISTENT_COMPRESSION", lib.ERR_LIB_SSL, 340))
_error_codes.append(("INVALID_CHALLENGE_LENGTH", lib.ERR_LIB_SSL, 158))
_error_codes.append(("INVALID_COMMAND", lib.ERR_LIB_SSL, 280))
_error_codes.append(("INVALID_COMPRESSION_ALGORITHM", lib.ERR_LIB_SSL, 341))
_error_codes.append(("INVALID_NULL_CMD_NAME", lib.ERR_LIB_SSL, 385))
_error_codes.append(("INVALID_PURPOSE", lib.ERR_LIB_SSL, 278))
_error_codes.append(("INVALID_SERVERINFO_DATA", lib.ERR_LIB_SSL, 388))
_error_codes.append(("INVALID_SRP_USERNAME", lib.ERR_LIB_SSL, 357))
_error_codes.append(("INVALID_STATUS_RESPONSE", lib.ERR_LIB_SSL, 328))
_error_codes.append(("INVALID_TICKET_KEYS_LENGTH", lib.ERR_LIB_SSL, 325))
_error_codes.append(("KEY_ARG_TOO_LONG", lib.ERR_LIB_SSL, 284))
_error_codes.append(("KRB5", lib.ERR_LIB_SSL, 285))
_error_codes.append(("KRB5_C_CC_PRINC", lib.ERR_LIB_SSL, 286))
_error_codes.append(("KRB5_C_GET_CRED", lib.ERR_LIB_SSL, 287))
_error_codes.append(("KRB5_C_INIT", lib.ERR_LIB_SSL, 288))
_error_codes.append(("KRB5_C_MK_REQ", lib.ERR_LIB_SSL, 289))
_error_codes.append(("KRB5_S_BAD_TICKET", lib.ERR_LIB_SSL, 290))
_error_codes.append(("KRB5_S_INIT", lib.ERR_LIB_SSL, 291))
_error_codes.append(("KRB5_S_RD_REQ", lib.ERR_LIB_SSL, 292))
_error_codes.append(("KRB5_S_TKT_EXPIRED", lib.ERR_LIB_SSL, 293))
_error_codes.append(("KRB5_S_TKT_NYV", lib.ERR_LIB_SSL, 294))
_error_codes.append(("KRB5_S_TKT_SKEW", lib.ERR_LIB_SSL, 295))
_error_codes.append(("LENGTH_MISMATCH", lib.ERR_LIB_SSL, 159))
_error_codes.append(("LENGTH_TOO_SHORT", lib.ERR_LIB_SSL, 160))
_error_codes.append(("LIBRARY_BUG", lib.ERR_LIB_SSL, 274))
_error_codes.append(("LIBRARY_HAS_NO_CIPHERS", lib.ERR_LIB_SSL, 161))
_error_codes.append(("MESSAGE_TOO_LONG", lib.ERR_LIB_SSL, 296))
_error_codes.append(("MISSING_DH_DSA_CERT", lib.ERR_LIB_SSL, 162))
_error_codes.append(("MISSING_DH_KEY", lib.ERR_LIB_SSL, 163))
_error_codes.append(("MISSING_DH_RSA_CERT", lib.ERR_LIB_SSL, 164))
_error_codes.append(("MISSING_DSA_SIGNING_CERT", lib.ERR_LIB_SSL, 165))
_error_codes.append(("MISSING_ECDH_CERT", lib.ERR_LIB_SSL, 382))
_error_codes.append(("MISSING_ECDSA_SIGNING_CERT", lib.ERR_LIB_SSL, 381))
_error_codes.append(("MISSING_EXPORT_TMP_DH_KEY", lib.ERR_LIB_SSL, 166))
_error_codes.append(("MISSING_EXPORT_TMP_RSA_KEY", lib.ERR_LIB_SSL, 167))
_error_codes.append(("MISSING_RSA_CERTIFICATE", lib.ERR_LIB_SSL, 168))
_error_codes.append(("MISSING_RSA_ENCRYPTING_CERT", lib.ERR_LIB_SSL, 169))
_error_codes.append(("MISSING_RSA_SIGNING_CERT", lib.ERR_LIB_SSL, 170))
_error_codes.append(("MISSING_SRP_PARAM", lib.ERR_LIB_SSL, 358))
_error_codes.append(("MISSING_TMP_DH_KEY", lib.ERR_LIB_SSL, 171))
_error_codes.append(("MISSING_TMP_ECDH_KEY", lib.ERR_LIB_SSL, 311))
_error_codes.append(("MISSING_TMP_RSA_KEY", lib.ERR_LIB_SSL, 172))
_error_codes.append(("MISSING_TMP_RSA_PKEY", lib.ERR_LIB_SSL, 173))
_error_codes.append(("MISSING_VERIFY_MESSAGE", lib.ERR_LIB_SSL, 174))
_error_codes.append(("MULTIPLE_SGC_RESTARTS", lib.ERR_LIB_SSL, 346))
_error_codes.append(("NON_SSLV2_INITIAL_PACKET", lib.ERR_LIB_SSL, 175))
_error_codes.append(("NO_CERTIFICATES_RETURNED", lib.ERR_LIB_SSL, 176))
_error_codes.append(("NO_CERTIFICATE_ASSIGNED", lib.ERR_LIB_SSL, 177))
_error_codes.append(("NO_CERTIFICATE_RETURNED", lib.ERR_LIB_SSL, 178))
_error_codes.append(("NO_CERTIFICATE_SET", lib.ERR_LIB_SSL, 179))
_error_codes.append(("NO_CERTIFICATE_SPECIFIED", lib.ERR_LIB_SSL, 180))
_error_codes.append(("NO_CIPHERS_AVAILABLE", lib.ERR_LIB_SSL, 181))
_error_codes.append(("NO_CIPHERS_PASSED", lib.ERR_LIB_SSL, 182))
_error_codes.append(("NO_CIPHERS_SPECIFIED", lib.ERR_LIB_SSL, 183))
_error_codes.append(("NO_CIPHER_LIST", lib.ERR_LIB_SSL, 184))
_error_codes.append(("NO_CIPHER_MATCH", lib.ERR_LIB_SSL, 185))
_error_codes.append(("NO_CLIENT_CERT_METHOD", lib.ERR_LIB_SSL, 331))
_error_codes.append(("NO_CLIENT_CERT_RECEIVED", lib.ERR_LIB_SSL, 186))
_error_codes.append(("NO_COMPRESSION_SPECIFIED", lib.ERR_LIB_SSL, 187))
_error_codes.append(("NO_GOST_CERTIFICATE_SENT_BY_PEER", lib.ERR_LIB_SSL, 330))
_error_codes.append(("NO_METHOD_SPECIFIED", lib.ERR_LIB_SSL, 188))
_error_codes.append(("NO_PEM_EXTENSIONS", lib.ERR_LIB_SSL, 389))
_error_codes.append(("NO_PRIVATEKEY", lib.ERR_LIB_SSL, 189))
_error_codes.append(("NO_PRIVATE_KEY_ASSIGNED", lib.ERR_LIB_SSL, 190))
_error_codes.append(("NO_PROTOCOLS_AVAILABLE", lib.ERR_LIB_SSL, 191))
_error_codes.append(("NO_PUBLICKEY", lib.ERR_LIB_SSL, 192))
_error_codes.append(("NO_RENEGOTIATION", lib.ERR_LIB_SSL, 339))
_error_codes.append(("NO_REQUIRED_DIGEST", lib.ERR_LIB_SSL, 324))
_error_codes.append(("NO_SHARED_CIPHER", lib.ERR_LIB_SSL, 193))
_error_codes.append(("NO_SHARED_SIGATURE_ALGORITHMS", lib.ERR_LIB_SSL, 376))
_error_codes.append(("NO_SRTP_PROFILES", lib.ERR_LIB_SSL, 359))
_error_codes.append(("NO_VERIFY_CALLBACK", lib.ERR_LIB_SSL, 194))
_error_codes.append(("NULL_SSL_CTX", lib.ERR_LIB_SSL, 195))
_error_codes.append(("NULL_SSL_METHOD_PASSED", lib.ERR_LIB_SSL, 196))
_error_codes.append(("OLD_SESSION_CIPHER_NOT_RETURNED", lib.ERR_LIB_SSL, 197))
_error_codes.append(("OLD_SESSION_COMPRESSION_ALGORITHM_NOT_RETURNED", lib.ERR_LIB_SSL, 344))
_error_codes.append(("ONLY_DTLS_1_2_ALLOWED_IN_SUITEB_MODE", lib.ERR_LIB_SSL, 387))
_error_codes.append(("ONLY_TLS_1_2_ALLOWED_IN_SUITEB_MODE", lib.ERR_LIB_SSL, 379))
_error_codes.append(("ONLY_TLS_ALLOWED_IN_FIPS_MODE", lib.ERR_LIB_SSL, 297))
_error_codes.append(("OPAQUE_PRF_INPUT_TOO_LONG", lib.ERR_LIB_SSL, 327))
_error_codes.append(("PACKET_LENGTH_TOO_LONG", lib.ERR_LIB_SSL, 198))
_error_codes.append(("PARSE_TLSEXT", lib.ERR_LIB_SSL, 227))
_error_codes.append(("PATH_TOO_LONG", lib.ERR_LIB_SSL, 270))
_error_codes.append(("PEER_DID_NOT_RETURN_A_CERTIFICATE", lib.ERR_LIB_SSL, 199))
_error_codes.append(("PEER_ERROR", lib.ERR_LIB_SSL, 200))
_error_codes.append(("PEER_ERROR_CERTIFICATE", lib.ERR_LIB_SSL, 201))
_error_codes.append(("PEER_ERROR_NO_CERTIFICATE", lib.ERR_LIB_SSL, 202))
_error_codes.append(("PEER_ERROR_NO_CIPHER", lib.ERR_LIB_SSL, 203))
_error_codes.append(("PEER_ERROR_UNSUPPORTED_CERTIFICATE_TYPE", lib.ERR_LIB_SSL, 204))
_error_codes.append(("PEM_NAME_BAD_PREFIX", lib.ERR_LIB_SSL, 391))
_error_codes.append(("PEM_NAME_TOO_SHORT", lib.ERR_LIB_SSL, 392))
_error_codes.append(("PRE_MAC_LENGTH_TOO_LONG", lib.ERR_LIB_SSL, 205))
_error_codes.append(("PROBLEMS_MAPPING_CIPHER_FUNCTIONS", lib.ERR_LIB_SSL, 206))
_error_codes.append(("PROTOCOL_IS_SHUTDOWN", lib.ERR_LIB_SSL, 207))
_error_codes.append(("PSK_IDENTITY_NOT_FOUND", lib.ERR_LIB_SSL, 223))
_error_codes.append(("PSK_NO_CLIENT_CB", lib.ERR_LIB_SSL, 224))
_error_codes.append(("PSK_NO_SERVER_CB", lib.ERR_LIB_SSL, 225))
_error_codes.append(("PUBLIC_KEY_ENCRYPT_ERROR", lib.ERR_LIB_SSL, 208))
_error_codes.append(("PUBLIC_KEY_IS_NOT_RSA", lib.ERR_LIB_SSL, 209))
_error_codes.append(("PUBLIC_KEY_NOT_RSA", lib.ERR_LIB_SSL, 210))
_error_codes.append(("READ_BIO_NOT_SET", lib.ERR_LIB_SSL, 211))
_error_codes.append(("READ_TIMEOUT_EXPIRED", lib.ERR_LIB_SSL, 312))
_error_codes.append(("READ_WRONG_PACKET_TYPE", lib.ERR_LIB_SSL, 212))
_error_codes.append(("RECORD_LENGTH_MISMATCH", lib.ERR_LIB_SSL, 213))
_error_codes.append(("RECORD_TOO_LARGE", lib.ERR_LIB_SSL, 214))
_error_codes.append(("RECORD_TOO_SMALL", lib.ERR_LIB_SSL, 298))
_error_codes.append(("RENEGOTIATE_EXT_TOO_LONG", lib.ERR_LIB_SSL, 335))
_error_codes.append(("RENEGOTIATION_ENCODING_ERR", lib.ERR_LIB_SSL, 336))
_error_codes.append(("RENEGOTIATION_MISMATCH", lib.ERR_LIB_SSL, 337))
_error_codes.append(("REQUIRED_CIPHER_MISSING", lib.ERR_LIB_SSL, 215))
_error_codes.append(("REQUIRED_COMPRESSSION_ALGORITHM_MISSING", lib.ERR_LIB_SSL, 342))
_error_codes.append(("REUSE_CERT_LENGTH_NOT_ZERO", lib.ERR_LIB_SSL, 216))
_error_codes.append(("REUSE_CERT_TYPE_NOT_ZERO", lib.ERR_LIB_SSL, 217))
_error_codes.append(("REUSE_CIPHER_LIST_NOT_ZERO", lib.ERR_LIB_SSL, 218))
_error_codes.append(("SCSV_RECEIVED_WHEN_RENEGOTIATING", lib.ERR_LIB_SSL, 345))
_error_codes.append(("SERVERHELLO_TLSEXT", lib.ERR_LIB_SSL, 275))
_error_codes.append(("SESSION_ID_CONTEXT_UNINITIALIZED", lib.ERR_LIB_SSL, 277))
_error_codes.append(("SHORT_READ", lib.ERR_LIB_SSL, 219))
_error_codes.append(("SIGNATURE_ALGORITHMS_ERROR", lib.ERR_LIB_SSL, 360))
_error_codes.append(("SIGNATURE_FOR_NON_SIGNING_CERTIFICATE", lib.ERR_LIB_SSL, 220))
_error_codes.append(("SRP_A_CALC", lib.ERR_LIB_SSL, 361))
_error_codes.append(("SRTP_COULD_NOT_ALLOCATE_PROFILES", lib.ERR_LIB_SSL, 362))
_error_codes.append(("SRTP_PROTECTION_PROFILE_LIST_TOO_LONG", lib.ERR_LIB_SSL, 363))
_error_codes.append(("SRTP_UNKNOWN_PROTECTION_PROFILE", lib.ERR_LIB_SSL, 364))
_error_codes.append(("SSL23_DOING_SESSION_ID_REUSE", lib.ERR_LIB_SSL, 221))
_error_codes.append(("SSL2_CONNECTION_ID_TOO_LONG", lib.ERR_LIB_SSL, 299))
_error_codes.append(("SSL3_EXT_INVALID_ECPOINTFORMAT", lib.ERR_LIB_SSL, 321))
_error_codes.append(("SSL3_EXT_INVALID_SERVERNAME", lib.ERR_LIB_SSL, 319))
_error_codes.append(("SSL3_EXT_INVALID_SERVERNAME_TYPE", lib.ERR_LIB_SSL, 320))
_error_codes.append(("SSL3_SESSION_ID_TOO_LONG", lib.ERR_LIB_SSL, 300))
_error_codes.append(("SSL3_SESSION_ID_TOO_SHORT", lib.ERR_LIB_SSL, 222))
_error_codes.append(("SSLV3_ALERT_BAD_CERTIFICATE", lib.ERR_LIB_SSL, 1042))
_error_codes.append(("SSLV3_ALERT_BAD_RECORD_MAC", lib.ERR_LIB_SSL, 1020))
_error_codes.append(("SSLV3_ALERT_CERTIFICATE_EXPIRED", lib.ERR_LIB_SSL, 1045))
_error_codes.append(("SSLV3_ALERT_CERTIFICATE_REVOKED", lib.ERR_LIB_SSL, 1044))
_error_codes.append(("SSLV3_ALERT_CERTIFICATE_UNKNOWN", lib.ERR_LIB_SSL, 1046))
_error_codes.append(("SSLV3_ALERT_DECOMPRESSION_FAILURE", lib.ERR_LIB_SSL, 1030))
_error_codes.append(("SSLV3_ALERT_HANDSHAKE_FAILURE", lib.ERR_LIB_SSL, 1040))
_error_codes.append(("SSLV3_ALERT_ILLEGAL_PARAMETER", lib.ERR_LIB_SSL, 1047))
_error_codes.append(("SSLV3_ALERT_NO_CERTIFICATE", lib.ERR_LIB_SSL, 1041))
_error_codes.append(("SSLV3_ALERT_UNEXPECTED_MESSAGE", lib.ERR_LIB_SSL, 1010))
_error_codes.append(("SSLV3_ALERT_UNSUPPORTED_CERTIFICATE", lib.ERR_LIB_SSL, 1043))
_error_codes.append(("SSL_CTX_HAS_NO_DEFAULT_SSL_VERSION", lib.ERR_LIB_SSL, 228))
_error_codes.append(("SSL_HANDSHAKE_FAILURE", lib.ERR_LIB_SSL, 229))
_error_codes.append(("SSL_LIBRARY_HAS_NO_CIPHERS", lib.ERR_LIB_SSL, 230))
_error_codes.append(("SSL_NEGATIVE_LENGTH", lib.ERR_LIB_SSL, 372))
_error_codes.append(("SSL_SESSION_ID_CALLBACK_FAILED", lib.ERR_LIB_SSL, 301))
_error_codes.append(("SSL_SESSION_ID_CONFLICT", lib.ERR_LIB_SSL, 302))
_error_codes.append(("SSL_SESSION_ID_CONTEXT_TOO_LONG", lib.ERR_LIB_SSL, 273))
_error_codes.append(("SSL_SESSION_ID_HAS_BAD_LENGTH", lib.ERR_LIB_SSL, 303))
_error_codes.append(("SSL_SESSION_ID_IS_DIFFERENT", lib.ERR_LIB_SSL, 231))
_error_codes.append(("TLSV1_ALERT_ACCESS_DENIED", lib.ERR_LIB_SSL, 1049))
_error_codes.append(("TLSV1_ALERT_DECODE_ERROR", lib.ERR_LIB_SSL, 1050))
_error_codes.append(("TLSV1_ALERT_DECRYPTION_FAILED", lib.ERR_LIB_SSL, 1021))
_error_codes.append(("TLSV1_ALERT_DECRYPT_ERROR", lib.ERR_LIB_SSL, 1051))
_error_codes.append(("TLSV1_ALERT_EXPORT_RESTRICTION", lib.ERR_LIB_SSL, 1060))
_error_codes.append(("TLSV1_ALERT_INAPPROPRIATE_FALLBACK", lib.ERR_LIB_SSL, 1086))
_error_codes.append(("TLSV1_ALERT_INSUFFICIENT_SECURITY", lib.ERR_LIB_SSL, 1071))
_error_codes.append(("TLSV1_ALERT_INTERNAL_ERROR", lib.ERR_LIB_SSL, 1080))
_error_codes.append(("TLSV1_ALERT_NO_RENEGOTIATION", lib.ERR_LIB_SSL, 1100))
_error_codes.append(("TLSV1_ALERT_PROTOCOL_VERSION", lib.ERR_LIB_SSL, 1070))
_error_codes.append(("TLSV1_ALERT_RECORD_OVERFLOW", lib.ERR_LIB_SSL, 1022))
_error_codes.append(("TLSV1_ALERT_UNKNOWN_CA", lib.ERR_LIB_SSL, 1048))
_error_codes.append(("TLSV1_ALERT_USER_CANCELLED", lib.ERR_LIB_SSL, 1090))
_error_codes.append(("TLSV1_BAD_CERTIFICATE_HASH_VALUE", lib.ERR_LIB_SSL, 1114))
_error_codes.append(("TLSV1_BAD_CERTIFICATE_STATUS_RESPONSE", lib.ERR_LIB_SSL, 1113))
_error_codes.append(("TLSV1_CERTIFICATE_UNOBTAINABLE", lib.ERR_LIB_SSL, 1111))
_error_codes.append(("TLSV1_UNRECOGNIZED_NAME", lib.ERR_LIB_SSL, 1112))
_error_codes.append(("TLSV1_UNSUPPORTED_EXTENSION", lib.ERR_LIB_SSL, 1110))
_error_codes.append(("TLS_CLIENT_CERT_REQ_WITH_ANON_CIPHER", lib.ERR_LIB_SSL, 232))
_error_codes.append(("TLS_HEARTBEAT_PEER_DOESNT_ACCEPT", lib.ERR_LIB_SSL, 365))
_error_codes.append(("TLS_HEARTBEAT_PENDING", lib.ERR_LIB_SSL, 366))
_error_codes.append(("TLS_ILLEGAL_EXPORTER_LABEL", lib.ERR_LIB_SSL, 367))
_error_codes.append(("TLS_INVALID_ECPOINTFORMAT_LIST", lib.ERR_LIB_SSL, 157))
_error_codes.append(("TLS_PEER_DID_NOT_RESPOND_WITH_CERTIFICATE_LIST", lib.ERR_LIB_SSL, 233))
_error_codes.append(("TLS_RSA_ENCRYPTED_VALUE_LENGTH_IS_WRONG", lib.ERR_LIB_SSL, 234))
_error_codes.append(("TRIED_TO_USE_UNSUPPORTED_CIPHER", lib.ERR_LIB_SSL, 235))
_error_codes.append(("UNABLE_TO_DECODE_DH_CERTS", lib.ERR_LIB_SSL, 236))
_error_codes.append(("UNABLE_TO_DECODE_ECDH_CERTS", lib.ERR_LIB_SSL, 313))
_error_codes.append(("UNABLE_TO_EXTRACT_PUBLIC_KEY", lib.ERR_LIB_SSL, 237))
_error_codes.append(("UNABLE_TO_FIND_DH_PARAMETERS", lib.ERR_LIB_SSL, 238))
_error_codes.append(("UNABLE_TO_FIND_ECDH_PARAMETERS", lib.ERR_LIB_SSL, 314))
_error_codes.append(("UNABLE_TO_FIND_PUBLIC_KEY_PARAMETERS", lib.ERR_LIB_SSL, 239))
_error_codes.append(("UNABLE_TO_FIND_SSL_METHOD", lib.ERR_LIB_SSL, 240))
_error_codes.append(("UNABLE_TO_LOAD_SSL2_MD5_ROUTINES", lib.ERR_LIB_SSL, 241))
_error_codes.append(("UNABLE_TO_LOAD_SSL3_MD5_ROUTINES", lib.ERR_LIB_SSL, 242))
_error_codes.append(("UNABLE_TO_LOAD_SSL3_SHA1_ROUTINES", lib.ERR_LIB_SSL, 243))
_error_codes.append(("UNEXPECTED_MESSAGE", lib.ERR_LIB_SSL, 244))
_error_codes.append(("UNEXPECTED_RECORD", lib.ERR_LIB_SSL, 245))
_error_codes.append(("UNINITIALIZED", lib.ERR_LIB_SSL, 276))
_error_codes.append(("UNKNOWN_ALERT_TYPE", lib.ERR_LIB_SSL, 246))
_error_codes.append(("UNKNOWN_CERTIFICATE_TYPE", lib.ERR_LIB_SSL, 247))
_error_codes.append(("UNKNOWN_CIPHER_RETURNED", lib.ERR_LIB_SSL, 248))
_error_codes.append(("UNKNOWN_CIPHER_TYPE", lib.ERR_LIB_SSL, 249))
_error_codes.append(("UNKNOWN_CMD_NAME", lib.ERR_LIB_SSL, 386))
_error_codes.append(("UNKNOWN_DIGEST", lib.ERR_LIB_SSL, 368))
_error_codes.append(("UNKNOWN_KEY_EXCHANGE_TYPE", lib.ERR_LIB_SSL, 250))
_error_codes.append(("UNKNOWN_PKEY_TYPE", lib.ERR_LIB_SSL, 251))
_error_codes.append(("UNKNOWN_PROTOCOL", lib.ERR_LIB_SSL, 252))
_error_codes.append(("UNKNOWN_REMOTE_ERROR_TYPE", lib.ERR_LIB_SSL, 253))
_error_codes.append(("UNKNOWN_SSL_VERSION", lib.ERR_LIB_SSL, 254))
_error_codes.append(("UNKNOWN_STATE", lib.ERR_LIB_SSL, 255))
_error_codes.append(("UNSAFE_LEGACY_RENEGOTIATION_DISABLED", lib.ERR_LIB_SSL, 338))
_error_codes.append(("UNSUPPORTED_CIPHER", lib.ERR_LIB_SSL, 256))
_error_codes.append(("UNSUPPORTED_COMPRESSION_ALGORITHM", lib.ERR_LIB_SSL, 257))
_error_codes.append(("UNSUPPORTED_DIGEST_TYPE", lib.ERR_LIB_SSL, 326))
_error_codes.append(("UNSUPPORTED_ELLIPTIC_CURVE", lib.ERR_LIB_SSL, 315))
_error_codes.append(("UNSUPPORTED_PROTOCOL", lib.ERR_LIB_SSL, 258))
_error_codes.append(("UNSUPPORTED_SSL_VERSION", lib.ERR_LIB_SSL, 259))
_error_codes.append(("UNSUPPORTED_STATUS_TYPE", lib.ERR_LIB_SSL, 329))
_error_codes.append(("USE_SRTP_NOT_NEGOTIATED", lib.ERR_LIB_SSL, 369))
_error_codes.append(("VERSION_TOO_LOW", lib.ERR_LIB_SSL, 396))
_error_codes.append(("WRITE_BIO_NOT_SET", lib.ERR_LIB_SSL, 260))
_error_codes.append(("WRONG_CERTIFICATE_TYPE", lib.ERR_LIB_SSL, 383))
_error_codes.append(("WRONG_CIPHER_RETURNED", lib.ERR_LIB_SSL, 261))
_error_codes.append(("WRONG_CURVE", lib.ERR_LIB_SSL, 378))
_error_codes.append(("WRONG_MESSAGE_TYPE", lib.ERR_LIB_SSL, 262))
_error_codes.append(("WRONG_NUMBER_OF_KEY_BITS", lib.ERR_LIB_SSL, 263))
_error_codes.append(("WRONG_SIGNATURE_LENGTH", lib.ERR_LIB_SSL, 264))
_error_codes.append(("WRONG_SIGNATURE_SIZE", lib.ERR_LIB_SSL, 265))
_error_codes.append(("WRONG_SIGNATURE_TYPE", lib.ERR_LIB_SSL, 370))
_error_codes.append(("WRONG_SSL_VERSION", lib.ERR_LIB_SSL, 266))
_error_codes.append(("WRONG_VERSION_NUMBER", lib.ERR_LIB_SSL, 267))
_error_codes.append(("X509_LIB", lib.ERR_LIB_SSL, 268))
_error_codes.append(("X509_VERIFICATION_SETUP_PROBLEMS", lib.ERR_LIB_SSL, 269))
_error_codes.append(("AKID_MISMATCH", lib.ERR_LIB_X509, 110))
_error_codes.append(("BAD_X509_FILETYPE", lib.ERR_LIB_X509, 100))
_error_codes.append(("BASE64_DECODE_ERROR", lib.ERR_LIB_X509, 118))
_error_codes.append(("CANT_CHECK_DH_KEY", lib.ERR_LIB_X509, 114))
_error_codes.append(("CERT_ALREADY_IN_HASH_TABLE", lib.ERR_LIB_X509, 101))
_error_codes.append(("CRL_ALREADY_DELTA", lib.ERR_LIB_X509, 127))
_error_codes.append(("CRL_VERIFY_FAILURE", lib.ERR_LIB_X509, 131))
_error_codes.append(("ERR_ASN1_LIB", lib.ERR_LIB_X509, 102))
_error_codes.append(("IDP_MISMATCH", lib.ERR_LIB_X509, 128))
_error_codes.append(("INVALID_DIRECTORY", lib.ERR_LIB_X509, 113))
_error_codes.append(("INVALID_FIELD_NAME", lib.ERR_LIB_X509, 119))
_error_codes.append(("INVALID_TRUST", lib.ERR_LIB_X509, 123))
_error_codes.append(("ISSUER_MISMATCH", lib.ERR_LIB_X509, 129))
_error_codes.append(("KEY_TYPE_MISMATCH", lib.ERR_LIB_X509, 115))
_error_codes.append(("KEY_VALUES_MISMATCH", lib.ERR_LIB_X509, 116))
_error_codes.append(("LOADING_CERT_DIR", lib.ERR_LIB_X509, 103))
_error_codes.append(("LOADING_DEFAULTS", lib.ERR_LIB_X509, 104))
_error_codes.append(("METHOD_NOT_SUPPORTED", lib.ERR_LIB_X509, 124))
_error_codes.append(("NEWER_CRL_NOT_NEWER", lib.ERR_LIB_X509, 132))
_error_codes.append(("NO_CERT_SET_FOR_US_TO_VERIFY", lib.ERR_LIB_X509, 105))
_error_codes.append(("NO_CRL_NUMBER", lib.ERR_LIB_X509, 130))
_error_codes.append(("PUBLIC_KEY_DECODE_ERROR", lib.ERR_LIB_X509, 125))
_error_codes.append(("PUBLIC_KEY_ENCODE_ERROR", lib.ERR_LIB_X509, 126))
_error_codes.append(("SHOULD_RETRY", lib.ERR_LIB_X509, 106))
_error_codes.append(("UNABLE_TO_FIND_PARAMETERS_IN_CHAIN", lib.ERR_LIB_X509, 107))
_error_codes.append(("UNABLE_TO_GET_CERTS_PUBLIC_KEY", lib.ERR_LIB_X509, 108))
_error_codes.append(("UNKNOWN_KEY_TYPE", lib.ERR_LIB_X509, 117))
_error_codes.append(("UNKNOWN_NID", lib.ERR_LIB_X509, 109))
_error_codes.append(("UNKNOWN_PURPOSE_ID", lib.ERR_LIB_X509, 121))
_error_codes.append(("UNKNOWN_TRUST_ID", lib.ERR_LIB_X509, 120))
_error_codes.append(("UNSUPPORTED_ALGORITHM", lib.ERR_LIB_X509, 111))
_error_codes.append(("WRONG_LOOKUP_TYPE", lib.ERR_LIB_X509, 112))
_error_codes.append(("WRONG_TYPE", lib.ERR_LIB_X509, 122))
| 68.785354
| 93
| 0.813172
|
4a172d10cd92fb1c390b6030710a25d082926d60
| 1,487
|
py
|
Python
|
tacker/tests/etc/samples/etsi/nfv/user_data_sample_userdata_invalid_hot_param/UserData/lcm_user_data_invalid_hot_param.py
|
takahashi-tsc/tacker
|
a0ae01a13dcc51bb374060adcbb4fd484ab37156
|
[
"Apache-2.0"
] | 116
|
2015-10-18T02:57:08.000Z
|
2022-03-15T04:09:18.000Z
|
tacker/tests/etc/samples/etsi/nfv/user_data_sample_userdata_invalid_hot_param/UserData/lcm_user_data_invalid_hot_param.py
|
takahashi-tsc/tacker
|
a0ae01a13dcc51bb374060adcbb4fd484ab37156
|
[
"Apache-2.0"
] | 6
|
2016-11-07T22:15:54.000Z
|
2021-05-09T06:13:08.000Z
|
tacker/tests/etc/samples/etsi/nfv/user_data_sample_userdata_invalid_hot_param/UserData/lcm_user_data_invalid_hot_param.py
|
takahashi-tsc/tacker
|
a0ae01a13dcc51bb374060adcbb4fd484ab37156
|
[
"Apache-2.0"
] | 166
|
2015-10-20T15:31:52.000Z
|
2021-11-12T08:39:49.000Z
|
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import tacker.vnfm.lcm_user_data.utils as UserDataUtil
from tacker.vnfm.lcm_user_data.abstract_user_data import AbstractUserData
class SampleUserData(AbstractUserData):
@staticmethod
def instantiate(base_hot_dict=None,
vnfd_dict=None,
inst_req_info=None,
grant_info=None):
# Create HOT input parameter using util functions.
initial_param_dict = UserDataUtil.create_initial_param_dict(
base_hot_dict)
# vdu_flavor_dict = UserDataUtil.create_vdu_flavor_dict(vnfd_dict)
# vdu_image_dict = UserDataUtil.create_vdu_image_dict(grant_info)
# cpd_vl_dict = UserDataUtil.create_cpd_vl_dict(
# base_hot_dict, inst_req_info)
#
# final_param_dict = UserDataUtil.create_final_param_dict(
# initial_param_dict, vdu_flavor_dict, vdu_image_dict, cpd_vl_dict)
return initial_param_dict
| 37.175
| 79
| 0.724277
|
4a172e9819f319254684647161038a03bdf91f2a
| 4,788
|
py
|
Python
|
predata.py
|
sunsunza2009/thai-sent_tokenize
|
896fdd8c83822eb9767799b481fad68f6b8c1997
|
[
"Apache-2.0"
] | 4
|
2018-07-10T08:17:07.000Z
|
2019-06-19T13:15:29.000Z
|
predata.py
|
wannaphongcom/test-thai-sent-tokenize-NaiveBayesClassifier
|
be2cbd172d3434ab140ab15c5ef962a971568dc8
|
[
"Apache-2.0"
] | null | null | null |
predata.py
|
wannaphongcom/test-thai-sent-tokenize-NaiveBayesClassifier
|
be2cbd172d3434ab140ab15c5ef962a971568dc8
|
[
"Apache-2.0"
] | 2
|
2019-07-30T16:28:08.000Z
|
2019-11-07T14:42:48.000Z
|
# -*- coding: utf-8 -*-
import codecs
from tokenizeword import wordcut as word_tokenize
from nltk.tokenize import RegexpTokenizer
from pythainlp.tag import pos_tag
import glob
import re
from random import shuffle
#จัดการประโยคซ้ำ
data_not=[]
def Unique(p):
text=re.sub("<[^>]*>","",p)
text=re.sub("\[(.*?)\]","",text)
text=re.sub("\[\/(.*?)\]","",text)
if text not in data_not:
data_not.append(text)
return True
else:
return False
# เตรียมตัวตัด tag ด้วย re
pattern = r'\[(.*?)\](.*?)\[\/(.*?)\]'
tokenizer = RegexpTokenizer(pattern) # ใช้ nltk.tokenize.RegexpTokenizer เพื่อตัด [TIME]8.00[/TIME] ให้เป็น ('TIME','ไง','TIME')
# จัดการกับ tag ที่ไม่ได้ tag
def toolner_to_tag(text):
text=text.strip()
text=re.sub("<[^>]*>","",text)
text=re.sub("(\[\/(.*?)\])","\\1***",text)#.replace('(\[(.*?)\])','***\\1')# text.replace('>','>***') # ตัดการกับพวกไม่มี tag word
text=re.sub("(\[\w+\])","***\\1",text)
text2=[]
for i in text.split('***'):
if "[" in i:
text2.append(i)
else:
text2.append("[word]"+i+"[/word]")
text="".join(text2)#re.sub("[word][/word]","","".join(text2))
return text.replace("[word][/word]","")
# แปลง text ให้เป็น conll2002
def postag(text):
listtxt=[i for i in text.split('\n') if i!='']
list_word=[]
for data in listtxt:
list_word.append(data.split('\t')[0])
list_word=pos_tag(list_word,engine="perceptron", corpus="orchid_ud")
text=""
i=0
for data in listtxt:
text+=data.split('\t')[0]+'\t'+list_word[i][1]+'\t'+data.split('\t')[1]+'\n'
i+=1
return text
def text2conll2002(text,pos=True):
"""
ใช้แปลงข้อความให้กลายเป็น conll2002
"""
text=toolner_to_tag(text)
text=text.replace("''",'"')
text=text.replace("’",'"').replace("‘",'"')#.replace('"',"")
tag=tokenizer.tokenize(text)
j=0
conll2002=""
for tagopen,text,tagclose in tag:
word_cut=word_tokenize(text) # ใช้ตัวตัดคำ newmm
i=0
txt5=""
while i<len(word_cut):
if word_cut[i]=="''" or word_cut[i]=='"':pass
elif i==0 and tagopen!='word':
txt5+=word_cut[i]
txt5+='\t'+'B-'+tagopen
elif tagopen!='word':
txt5+=word_cut[i]
txt5+='\t'+'I-'+tagopen
else:
txt5+=word_cut[i]
txt5+='\t'+'O'
txt5+='\n'
#j+=1
i+=1
conll2002+=txt5
if pos==False:
return conll2002
return postag(conll2002)
# ใช้สำหรับกำกับ pos tag เพื่อใช้กับ NER
# print(text2conll2002(t,pos=False))
# เขียนไฟล์ข้อมูล conll2002
def write_conll2002(file_name,data):
"""
ใช้สำหรับเขียนไฟล์
"""
with codecs.open(file_name, "w", "utf-8-sig") as temp:
temp.write(data)
return True
def to(text):
temp=word_tokenize(text)
i=0
j=len(temp)
while i<j:
if temp[i]=="|" and i>0:
temp[i+1]="[S]"+temp[i+1]+"[/S]"
elif i==0:
temp[i]="[S]"+temp[i]+"[/S]"
i+=1
return "".join([i for i in temp if i!="|"])
# อ่านข้อมูลจากไฟล์
def get_data(fileopen):
"""
สำหรับใช้อ่านทั้งหมดทั้งในไฟล์ทีละรรทัดออกมาเป็น list
"""
with codecs.open(fileopen, 'r',encoding='utf-8-sig') as f:
lines = f.read().splitlines()
return [to(a) for a in lines if Unique(a)] # เอาไม่ซ้ำกัน
def alldata(lists):
text=""
for data in lists:
text+=text2conll2002(data)
text+='\n'
return text
def alldata_list(lists,postag):
data_all=[]
for data in lists:
data_num=[]
try:
txt=text2conll2002(data,postag).split('\n')
for d in txt:
tt=d.split('\t')
if d!="":
if len(tt)==3:
data_num.append((tt[0],tt[1],tt[2]))
else:
data_num.append((tt[0],tt[1]))
data_all.append(data_num)
except:
print(data)
return data_all
def alldata_list_str(lists):
string=""
for data in lists:
string1=""
for j in data:
string1+=j[0]+" "+j[1]+" "+j[2]+"\n"
string1+="\n"
string+=string1
return string
def get_data_tag(listd):
list_all=[]
c=[]
for i in listd:
if i !='':
c.append((i.split("\t")[0],i.split("\t")[1],i.split("\t")[2]))
else:
list_all.append(c)
c=[]
return list_all
def getall(lista):
ll=[]
for i in lista:
o=True
for j in ll:
if re.sub("\[(.*?)\]","",i)==re.sub("\[(.*?)\]","",j):
o=False
break
if o==True:
ll.append(i)
return ll
def get_conll(filename,postag=False):
d =get_data(filename)
print("จำนวนประโยค "+str(len(d))+" ประโยค")
shuffle(d)
return alldata_list(getall(d),postag)
| 27.517241
| 131
| 0.531537
|
4a172f8270c112d84868f656851f7b3cbe2107fb
| 27
|
py
|
Python
|
cocotb_test/__init__.py
|
mciepluc/cocotb-test
|
0a8ad18f9396638c3abbfd4304f5fd33a2a34e5a
|
[
"BSD-2-Clause"
] | 14
|
2021-09-17T18:23:07.000Z
|
2022-03-20T14:28:48.000Z
|
lib/contourpy/_version.py
|
ianthomas23/contourpy
|
10df582b7631332467b848981a0255f4739ef901
|
[
"BSD-3-Clause"
] | 9
|
2021-04-22T07:56:38.000Z
|
2022-03-05T14:28:36.000Z
|
lib/contourpy/_version.py
|
ianthomas23/contourpy
|
10df582b7631332467b848981a0255f4739ef901
|
[
"BSD-3-Clause"
] | 1
|
2021-05-29T05:03:55.000Z
|
2021-05-29T05:03:55.000Z
|
__version__ = "0.0.5.dev1"
| 13.5
| 26
| 0.666667
|
4a172fae5e2225c738c7531c6b195d46fe478f04
| 6,713
|
py
|
Python
|
rqalpha/environment.py
|
xiecang/rqalpha
|
b31fd71692f0cc17b5bd72691446d3c1f576f0b6
|
[
"Apache-2.0"
] | 1
|
2019-04-22T14:29:24.000Z
|
2019-04-22T14:29:24.000Z
|
rqalpha/environment.py
|
1M15M3/rqalpha
|
eeee5859c30728a2dbc5d6a30a7ebcc6fde8b5ee
|
[
"Apache-2.0"
] | null | null | null |
rqalpha/environment.py
|
1M15M3/rqalpha
|
eeee5859c30728a2dbc5d6a30a7ebcc6fde8b5ee
|
[
"Apache-2.0"
] | 1
|
2019-04-28T01:24:16.000Z
|
2019-04-28T01:24:16.000Z
|
# -*- coding: utf-8 -*-
#
# Copyright 2017 Ricequant, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from rqalpha.events import EventBus
from rqalpha.utils import get_account_type
from rqalpha.utils.logger import system_log, user_log, user_detail_log
from rqalpha.utils.i18n import gettext as _
class Environment(object):
_env = None
def __init__(self, config):
Environment._env = self
self.config = config
self._universe = None
self.data_proxy = None
self.data_source = None
self.price_board = None
self.event_source = None
self.strategy_loader = None
self.global_vars = None
self.persist_provider = None
self.persist_helper = None
self.broker = None
self.profile_deco = None
self.system_log = system_log
self.user_log = user_log
self.user_detail_log = user_detail_log
self.event_bus = EventBus()
self.portfolio = None
self.booking = None
self.benchmark_portfolio = None
self.calendar_dt = None
self.trading_dt = None
self.mod_dict = None
self.plot_store = None
self.bar_dict = None
self._frontend_validators = []
self._account_model_dict = {}
self._position_model_dict = {}
self._transaction_cost_decider_dict = {}
@classmethod
def get_instance(cls):
"""
返回已经创建的 Environment 对象
"""
if Environment._env is None:
raise RuntimeError(
_(u"Environment has not been created. Please Use `Environment.get_instance()` after RQAlpha init"))
return Environment._env
def set_data_proxy(self, data_proxy):
self.data_proxy = data_proxy
def set_data_source(self, data_source):
self.data_source = data_source
def set_price_board(self, price_board):
self.price_board = price_board
def set_strategy_loader(self, strategy_loader):
self.strategy_loader = strategy_loader
def set_global_vars(self, global_vars):
self.global_vars = global_vars
def set_hold_strategy(self):
self.config.extra.is_hold = True
def cancel_hold_strategy(self):
self.config.extra.is_hold = False
def set_persist_helper(self, helper):
self.persist_helper = helper
def set_persist_provider(self, provider):
self.persist_provider = provider
def set_event_source(self, event_source):
self.event_source = event_source
def set_broker(self, broker):
self.broker = broker
def add_frontend_validator(self, validator):
self._frontend_validators.append(validator)
def set_account_model(self, account_type, account_model):
self._account_model_dict[account_type] = account_model
def get_account_model(self, account_type):
if account_type not in self._account_model_dict:
raise RuntimeError(_(u"Unknown Account Type {}").format(account_type))
return self._account_model_dict[account_type]
def set_position_model(self, account_type, position_model):
self._position_model_dict[account_type] = position_model
def get_position_model(self, account_type):
if account_type not in self._position_model_dict:
raise RuntimeError(_(u"Unknown Account Type {}").format(account_type))
return self._position_model_dict[account_type]
def can_submit_order(self, order):
if Environment.get_instance().config.extra.is_hold:
return False
account = self.get_account(order.order_book_id)
for v in self._frontend_validators:
if not v.can_submit_order(account, order):
return False
return True
def can_cancel_order(self, order):
if order.is_final():
return False
account = self.get_account(order.order_book_id)
for v in self._frontend_validators:
if not v.can_cancel_order(account, order):
return False
return True
def set_bar_dict(self, bar_dict):
self.bar_dict = bar_dict
def get_universe(self):
return self._universe.get()
def update_universe(self, universe):
self._universe.update(universe)
def get_plot_store(self):
if self.plot_store is None:
from rqalpha.utils.plot_store import PlotStore
self.plot_store = PlotStore()
return self.plot_store
def add_plot(self, series_name, value):
self.get_plot_store().add_plot(self.trading_dt.date(), series_name, value)
def get_bar(self, order_book_id):
return self.bar_dict[order_book_id]
def get_last_price(self, order_book_id):
return float(self.price_board.get_last_price(order_book_id))
def get_instrument(self, order_book_id):
return self.data_proxy.instruments(order_book_id)
def get_account_type(self, order_book_id):
# 如果新的account_type 可以通过重写该函数来进行扩展
return get_account_type(order_book_id)
def get_account(self, order_book_id):
account_type = get_account_type(order_book_id)
return self.portfolio.accounts[account_type]
def get_open_orders(self, order_book_id=None):
return self.broker.get_open_orders(order_book_id)
def set_transaction_cost_decider(self, account_type, decider):
self._transaction_cost_decider_dict[account_type] = decider
def _get_transaction_cost_decider(self, account_type):
try:
return self._transaction_cost_decider_dict[account_type]
except KeyError:
raise NotImplementedError(_(u"No such transaction cost decider for such account_type {}.".format(
account_type
)))
def get_trade_tax(self, account_type, trade):
return self._get_transaction_cost_decider(account_type).get_trade_tax(trade)
def get_trade_commission(self, account_type, trade):
return self._get_transaction_cost_decider(account_type).get_trade_commission(trade)
def get_order_transaction_cost(self, account_type, order):
return self._get_transaction_cost_decider(account_type).get_order_transaction_cost(order)
| 34.25
| 115
| 0.693133
|
4a17303d42dbe2b6267842f596301dcfadcb12de
| 217
|
py
|
Python
|
Problem_sets/spiral_matrix/test_script/solution.py
|
zanderhinton/DSA_collaborative_prep
|
8427255e0084c6d69031027492d847a90b970840
|
[
"MIT"
] | 3
|
2020-02-02T14:52:16.000Z
|
2020-09-28T12:32:35.000Z
|
Problem_sets/spiral_matrix/test_script/solution.py
|
zanderhinton/DSA_collaborative_prep
|
8427255e0084c6d69031027492d847a90b970840
|
[
"MIT"
] | 14
|
2020-02-02T21:17:49.000Z
|
2020-02-10T15:48:36.000Z
|
Problem_sets/spiral_matrix/test_script/solution.py
|
zanderhinton/DSA_collaborative_prep
|
8427255e0084c6d69031027492d847a90b970840
|
[
"MIT"
] | 9
|
2020-02-02T20:00:05.000Z
|
2020-02-17T19:02:32.000Z
|
import pickle
def spiral_matrix(desired_input):
soln_idx = desired_input -1
with open("test_cases_spiral_op.pkl", "rb") as f:
desired_output = pickle.load(f)
return desired_output[soln_idx]
| 24.111111
| 53
| 0.700461
|
4a17307e0fc74d06e69f31c07d9dcf26596bd962
| 8,882
|
py
|
Python
|
Train/gan_trainer.py
|
ZM-Zhou/MDE_Platform_Pytorch
|
d86efe061bf14a6eed3352cc45e1437e46c138b1
|
[
"MIT"
] | null | null | null |
Train/gan_trainer.py
|
ZM-Zhou/MDE_Platform_Pytorch
|
d86efe061bf14a6eed3352cc45e1437e46c138b1
|
[
"MIT"
] | null | null | null |
Train/gan_trainer.py
|
ZM-Zhou/MDE_Platform_Pytorch
|
d86efe061bf14a6eed3352cc45e1437e46c138b1
|
[
"MIT"
] | null | null | null |
import numpy as np
import time
import torch
import torch.optim as optim
from torch.utils.data import DataLoader
import os
import sys
sys.path.append(os.getcwd())
from Train.logger import *
from Utils.import_choice import JsonArg, Stage, json_to_data, setup_seed
class Trainer:
def __init__(self):
json_arg = JsonArg()
json_path = json_arg.parse().json_path
self.opts, Dataset, Network, Costfunc, describles\
= json_to_data(json_path)
self.eval_best = 1e10
setup_seed(self.opts["t"].rand_seed)
self.stage = Stage()
self.logger = TrainLog(self.opts)
self.logger.device = torch.device("cpu" if self.opts["t"].no_cuda
else "cuda")
# load network
self.network = Network(self.opts["n"], self.logger)
self.network.check_info()
for k, layers in self.network.networks.items():
layers.to(self.logger.device)
self.network.networks[k] = layers
# load loss-functions
self.loss_func = Costfunc(self.opts["c"], self.logger)
# load dataset
train_dataset = Dataset(self.opts["d"], mode="train")
self.train_loader = DataLoader(
train_dataset, self.opts["t"].batch_size,
shuffle=self.opts["t"].is_shuffle,
num_workers=self.opts["t"].num_workers, pin_memory=True,
drop_last=True)
self.valid_dataset = Dataset(self.opts["d"], mode="val")
self.valid_loader = DataLoader(
self.valid_dataset, self.opts["t"].batch_size,
shuffle=True,
num_workers=self.opts["t"].num_workers, pin_memory=True,
drop_last=True)
self.valid_iter = iter(self.valid_loader)
# load optimizer
self.params_sets = self.network.get_trainable_params()
self.model_optimizer = []
self.model_lr_scheduler = []
for sets in self.params_sets:
trainable_params = sets[0]
if self.opts["t"].optim == "Adam":
self.model_optimizer.append(optim.Adam(trainable_params,
self.opts["t"].learning_rate))
elif self.opts["t"].optim == "SGD":
self.model_optimizera.append(optim.SGD(trainable_params,
self.opts["t"].learning_rate,
momentum=0.9,
weight_decay=0.0005))
if self.opts["t"].scheduler == "Step":
self.model_lr_scheduler.append(optim.lr_scheduler.MultiStepLR(
self.model_optimizer[-1], self.opts["t"].scheduler_step_size,
self.opts["t"].scheduler_rate))
elif self.opts["t"].scheduler == "Plateau":
self.model_lr_scheduler.append(optim.lr_scheduler.ReduceLROnPlateau(
self.model_optimizer[-1], factor=self.opts["t"].scheduler_rate,
patience=self.opts["t"].scheduler_step_size, min_lr=1e-6,
verbose=True))
# load pretrain model
self.epoch = 0
if self.opts["t"].load_weights_folder is not None:
self.network.networks, self.epoch, self.eval_best\
= self.logger.load_models(self.network.get_networks(),
self.model_optimizer)
for i in range(2):
self.model_lr_scheduler[i].last_epoch = self.epoch - 1
# compute steps
num_train_samples = len(train_dataset)
self.logger.step = self.epoch * num_train_samples\
// self.opts["t"].batch_size
self.start_step = self.logger.step
self.num_total_steps = num_train_samples\
// self.opts["t"].batch_size * (self.opts["t"].num_epochs
- self.epoch)
self.visual_stop_step = self.opts["t"].visual_frequency\
* self.opts["t"].visual_stop + self.start_step
self.logger.do_log_before_train(train_dataset,
self.valid_dataset, describles)
def do_train(self):
"""Run the entire training pipeline
"""
self.start_time = time.time()
while self.epoch < self.opts["t"].num_epochs:
self.process_epoch()
self.epoch = self.epoch + 1
def process_epoch(self):
for i in range(2):
self.logger.do_log_epoch(self.model_optimizer[i],
self.params_sets[i][1])
self.network.set_train()
self.stage.phase = "train"
for batch_idx, inputs in enumerate(self.train_loader):
before_op_time = time.time()
outputs, losses = self.process_batch(inputs)
if self.logger.step % 5 == 0:
train_part = ["loss", "D_loss"]
else:
train_part = ["loss"]
for i, name in enumerate(train_part):
self.model_optimizer[i].zero_grad()
if i == 0 and len(train_part) > 1:
losses["{}".format(name)].backward(retain_graph=True)
else:
losses["{}".format(name)].backward()
# check the gard
temp_params = self.model_optimizer[i].param_groups
self.logger.do_grad_check(temp_params, self.params_sets[i][1])
self.model_optimizer[i].step()
self.model_optimizer[i].zero_grad()
duration = time.time() - before_op_time
log_flag = batch_idx % self.opts["t"].log_frequency == 0\
and batch_idx != 0
if log_flag:
self.logger.do_log(batch_idx, duration, losses,
self.model_optimizer[0].state_dict()
['param_groups'][0]["lr"],
self.start_time, self.epoch,
(self.num_total_steps /
(self.logger.step-self.start_step) - 1.0),
is_gan=True)
self.do_valid()
self.logger.step += 1
eval_all = 0
for i in range(int(500 / self.opts["t"].batch_size)):
eval_all += self.do_valid(do_log=False)
eval_all /= int(500 / self.opts["t"].batch_size)
if eval_all < self.eval_best:
self.eval_best = eval_all
self.logger.save_models(self.network.get_networks(),
self.epoch, self.eval_best,
True)
if (self.epoch + 1) % self.opts["t"].save_frequency == 0:
self.logger.save_models(self.network.get_networks(),
self.epoch, eval_all)
else:
self.logger.do_log_validphase(eval_all)
if self.opts["t"].scheduler == "Step":
for i in range(2):
self.model_lr_scheduler[i].step()
elif self.opts["t"].scheduler == "Plateau":
for i in range(2):
self.model_lr_scheduler[i].step(eval_all)
def process_batch(self, inputs):
"""Pass a minibatch through the network and generate images and losses
"""
for key, ipt in inputs.items():
inputs[key] = ipt.to(self.logger.device, non_blocking=True)
visual_flag = self.logger.step % self.opts["t"].visual_frequency == 0\
and self.logger.step < self.visual_stop_step\
and self.network.train_phase
if visual_flag:
self.stage.is_visual = True
else:
self.stage.is_visual = False
outputs = self.network(inputs, self.stage)
losses = self.loss_func.compute_losses(inputs, outputs, self.stage)
return outputs, losses
def do_valid(self, do_log=True):
self.network.set_eval()
self.stage.phase = "val"
try:
inputs = self.valid_iter.next()
except StopIteration:
self.valid_iter = iter(self.valid_loader)
inputs = self.valid_iter.next()
with torch.no_grad():
outputs, losses = self.process_batch(inputs)
eval_value, losses, _ = self.valid_dataset.evaluation(inputs,
outputs,
losses)
if do_log:
self.logger.do_log_valid(losses)
del inputs, outputs, losses
self.network.set_train()
self.stage.phase = "train"
return eval_value
if __name__ == '__main__':
train = Trainer()
train.do_train()
| 39.829596
| 84
| 0.537154
|
4a1730aca576368412e5a3afc57890d7125a816b
| 280
|
py
|
Python
|
main.py
|
cconrey3/agilent8164
|
12d48321a075ffefbf71408ab95ac1c64f5ae027
|
[
"MIT"
] | 1
|
2022-03-25T00:29:21.000Z
|
2022-03-25T00:29:21.000Z
|
main.py
|
cconrey3/agilent8164
|
12d48321a075ffefbf71408ab95ac1c64f5ae027
|
[
"MIT"
] | null | null | null |
main.py
|
cconrey3/agilent8164
|
12d48321a075ffefbf71408ab95ac1c64f5ae027
|
[
"MIT"
] | null | null | null |
import pyvisa
from al8164 import AL8164
#Instrument Initialization
rm = pyvisa.ResourceManager()
rl = rm.list_resources()
resource = rl[1]
inst = rm.open_resource(resource)
my_laser = AL8164(inst)
print("SUCCESSFUL CONSTRUCTION")
my_laser.get_IDN()
print("SUCCESSFUL QUERY")
#
| 17.5
| 33
| 0.775
|
4a1731326faedddbf627b67e57b429139b836374
| 930
|
py
|
Python
|
datliser/urls.py
|
arunikayadav42/Backend
|
5364884f178e2a338b321b4a63a19fbc55212fe2
|
[
"MIT"
] | 2
|
2018-11-22T21:09:56.000Z
|
2018-11-26T07:41:14.000Z
|
datliser/urls.py
|
arunikayadav42/Backend
|
5364884f178e2a338b321b4a63a19fbc55212fe2
|
[
"MIT"
] | 8
|
2018-11-26T12:00:08.000Z
|
2019-01-19T11:11:19.000Z
|
datliser/urls.py
|
arunikayadav42/Backend
|
5364884f178e2a338b321b4a63a19fbc55212fe2
|
[
"MIT"
] | 4
|
2018-11-30T19:14:05.000Z
|
2018-12-22T07:10:16.000Z
|
"""datliser URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import include, url
from django.contrib import admin
urlpatterns = [
url(r'', include('backend.urls')),
url(r'^admin/', admin.site.urls),
url(r'^accounts/', include('rest_auth.urls')),
url(r'^auth/', include('rest_framework_social_oauth2.urls')),
]
| 37.2
| 79
| 0.695699
|
4a17316ab038e3b5f1e67f4850623988ad86d27f
| 566
|
py
|
Python
|
examples/mpu6050_plotter_example.py
|
rhou7873/Adafruit_CircuitPython_MPU6050
|
aa4e43fe82d285e8c0e358dc6b975e5864c0f611
|
[
"MIT"
] | 22
|
2020-02-29T12:00:14.000Z
|
2022-03-21T12:14:41.000Z
|
examples/mpu6050_plotter_example.py
|
rhou7873/Adafruit_CircuitPython_MPU6050
|
aa4e43fe82d285e8c0e358dc6b975e5864c0f611
|
[
"MIT"
] | 13
|
2020-01-05T12:35:38.000Z
|
2022-03-30T02:02:57.000Z
|
examples/mpu6050_plotter_example.py
|
rhou7873/Adafruit_CircuitPython_MPU6050
|
aa4e43fe82d285e8c0e358dc6b975e5864c0f611
|
[
"MIT"
] | 21
|
2019-10-17T23:21:28.000Z
|
2022-03-29T14:48:04.000Z
|
# SPDX-FileCopyrightText: 2021 ladyada for Adafruit Industries
# SPDX-License-Identifier: MIT
import time
import board
import adafruit_mpu6050
i2c = board.I2C() # uses board.SCL and board.SDA
mpu = adafruit_mpu6050.MPU6050(i2c)
mpu.accelerometer_range = adafruit_mpu6050.Range.RANGE_2_G
mpu.gyro_range = adafruit_mpu6050.GyroRange.RANGE_250_DPS
while True:
# this prints out all the values like a tuple which Mu's plotter prefer
print("(%.2f, %.2f, %.2f " % (mpu.acceleration), end=", ")
print("%.2f, %.2f, %.2f)" % (mpu.gyro))
time.sleep(0.010)
| 31.444444
| 75
| 0.726148
|
4a17319badf4087989a653e81000e26148f3fc55
| 2,577
|
py
|
Python
|
rl/make_game.py
|
Seanny123/alphazero_singleplayer
|
906d75a46221eb4a838560c19eb04d14788af436
|
[
"MIT"
] | null | null | null |
rl/make_game.py
|
Seanny123/alphazero_singleplayer
|
906d75a46221eb4a838560c19eb04d14788af436
|
[
"MIT"
] | null | null | null |
rl/make_game.py
|
Seanny123/alphazero_singleplayer
|
906d75a46221eb4a838560c19eb04d14788af436
|
[
"MIT"
] | 1
|
2019-11-19T05:21:30.000Z
|
2019-11-19T05:21:30.000Z
|
import gym
from gym.envs.registration import register
import numpy as np
from .wrappers import (NormalizeWrapper, ReparametrizeWrapper, PILCOWrapper, ScaleRewardWrapper, ClipRewardWrapper,
ScaledObservationWrapper)
# Register deterministic FrozenLakes
register(
id='FrozenLakeNotSlippery-v0',
entry_point='gym.envs.toy_text:FrozenLakeEnv',
kwargs={'map_name': '4x4', 'is_slippery': False},
max_episode_steps=100,
reward_threshold=0.78, # optimum = .8196
)
register(
id='FrozenLakeNotSlippery-v1',
entry_point='gym.envs.toy_text:FrozenLakeEnv',
kwargs={'map_name': '8x8', 'is_slippery': False},
max_episode_steps=100,
reward_threshold=0.78, # optimum = .8196
)
def get_base_env(env):
""" removes all wrappers """
while hasattr(env, 'env'):
env = env.env
return env
def is_atari_game(env):
""" Verify whether game uses the Arcade Learning Environment """
return hasattr(get_base_env(env), 'ale')
def make_game(game):
""" Modifications to Env """
name, version = game.rsplit('-', 1)
if len(version) > 2:
modify = version[2:]
game = name + '-' + version[:2]
else:
modify = ''
print('Making game {}'.format(game))
env = gym.make(game)
# remove timelimit wrapper
if type(env) == gym.wrappers.time_limit.TimeLimit:
env = env.env
if is_atari_game(env):
return prepare_atari_env(env)
else:
return prepare_control_env(env, game, modify)
def prepare_control_env(env, game, modify):
if 'n' in modify and type(env.observation_space) == gym.spaces.Box:
print('Normalizing input space')
env = NormalizeWrapper(env)
if 'r' in modify:
print('Reparametrizing the reward function')
env = ReparametrizeWrapper(env)
if 'p' in modify:
env = PILCOWrapper(env)
if 's' in modify:
print('Rescaled the reward function')
env = ScaleRewardWrapper(env)
if 'CartPole' in game:
env.observation_space = gym.spaces.Box(np.array([-4.8, -10, -4.8, -10]),
np.array([4.8, 10, 4.8, 10]))
return env
def prepare_atari_env(Env, frame_skip=3, repeat_action_prob=0.0, reward_clip=True):
""" Initialize an Atari environment """
env = get_base_env(Env)
env.ale.setFloat('repeat_action_probability'.encode('utf-8'), repeat_action_prob)
env.frame_skip = frame_skip
Env = ScaledObservationWrapper(Env)
if reward_clip:
Env = ClipRewardWrapper(Env)
return Env
| 29.965116
| 115
| 0.648428
|
4a17323448f676508302a03ee84a72f6f28f93b2
| 786
|
py
|
Python
|
ckan/config/middleware/__init__.py
|
gg2/ckan
|
d61a533cc330b6050f4957573f58ec912695ed0a
|
[
"BSD-3-Clause"
] | 2,805
|
2015-01-02T18:13:15.000Z
|
2022-03-31T03:35:01.000Z
|
ckan/config/middleware/__init__.py
|
gg2/ckan
|
d61a533cc330b6050f4957573f58ec912695ed0a
|
[
"BSD-3-Clause"
] | 3,801
|
2015-01-02T11:05:36.000Z
|
2022-03-31T19:24:37.000Z
|
ckan/config/middleware/__init__.py
|
gg2/ckan
|
d61a533cc330b6050f4957573f58ec912695ed0a
|
[
"BSD-3-Clause"
] | 1,689
|
2015-01-02T19:46:43.000Z
|
2022-03-28T14:59:43.000Z
|
# encoding: utf-8
"""WSGI app initialization"""
import logging
from ckan.config.environment import load_environment
from ckan.config.middleware.flask_app import make_flask_stack
log = logging.getLogger(__name__)
# This is a test Flask request context to be used internally.
# Do not use it!
_internal_test_request_context = None
def make_app(conf):
'''
Initialise the Flask app and wrap it in dispatcher middleware.
'''
load_environment(conf)
flask_app = make_flask_stack(conf)
# Set this internal test request context with the configured environment so
# it can be used when calling url_for from tests
global _internal_test_request_context
_internal_test_request_context = flask_app._wsgi_app.test_request_context()
return flask_app
| 23.818182
| 79
| 0.767176
|
4a1732a47c49d898a63aed18ae5d3b2daa2568ab
| 943
|
py
|
Python
|
test/unit/test_constants.py
|
Spendency/cw-logs-to-lambda
|
24dcd104ddbae159f2568d0672d05731b9884504
|
[
"MIT"
] | null | null | null |
test/unit/test_constants.py
|
Spendency/cw-logs-to-lambda
|
24dcd104ddbae159f2568d0672d05731b9884504
|
[
"MIT"
] | null | null | null |
test/unit/test_constants.py
|
Spendency/cw-logs-to-lambda
|
24dcd104ddbae159f2568d0672d05731b9884504
|
[
"MIT"
] | null | null | null |
"""Constants used for unit tests.
This can be used to define values for environment variables so unit tests can use these to assert on expected values.
"""
LAMBDA_NAME = 'name'
AWS_LOG_EVENT = {
"awslogs":
{
"data": "H4sIAAAAAAAAAHWPwQqCQBCGX0Xm7EFtK+smZBEUgXoLCdMhFtKV3akI8d0bLYmibvPPN3wz00CJxmQnTO41whwWQRIctmEcB6sQbFC3CjW3XW8kxpOpP+OC22d1Wml1qZkQGtoMsScxaczKN3plG8zlaHIta5KqWsozoTYw3/djzwhpLwivWFGHGpAFe7DL68JlBUk+l7KSN7tCOEJ4M3/qOI49vMHj+zCKdlFqLaU2ZHV2a4Ct/an0/ivdX8oYc1UVX860fQDQiMdxRQEAAA=="
}
}
EXTRACTED_LOG_EVENTS = [{'id': 'eventId1', 'timestamp': 1440442987000, 'message': '[ERROR] First test message'}, {'id': 'eventId2', 'timestamp': 1440442987001, 'message': '[ERROR] Second test message'}]
EXTRACTED_LOG_EVENTS_JSON = ['{"id": "eventId1", "timestamp": 1440442987000, "message": "[ERROR] First test message"}','{"id": "eventId2", "timestamp": 1440442987001, "message": "[ERROR] Second test message"}']
| 52.388889
| 298
| 0.760339
|
4a1734fe3684a21545203f67b64a6c59ee7d6229
| 12,031
|
py
|
Python
|
gui/demo_trader.py
|
MaxGosselin/TelferRIT
|
37b4f5aecc3f315b5ee6db757b2b2b622b854f6a
|
[
"Unlicense",
"MIT"
] | null | null | null |
gui/demo_trader.py
|
MaxGosselin/TelferRIT
|
37b4f5aecc3f315b5ee6db757b2b2b622b854f6a
|
[
"Unlicense",
"MIT"
] | null | null | null |
gui/demo_trader.py
|
MaxGosselin/TelferRIT
|
37b4f5aecc3f315b5ee6db757b2b2b622b854f6a
|
[
"Unlicense",
"MIT"
] | null | null | null |
"""draws the price charts for all the securities in the currently active
case"""
from multiprocessing.connection import Listener
from bokeh.driving import count
from bokeh.layouts import layout, column, gridplot, row, widgetbox
from bokeh.models import ColumnDataSource, CustomJS, Span
from bokeh.plotting import curdoc, figure
from bokeh.models.widgets import Div
import pandas as pd
def receive_data():
"""need to get something like [(ticker, tick, price)]"""
data = conn.recv()
# print(data)
return data
def depth(ticker, books, level=50):
"""Extract the book for our ticker and set up the df the way we want."""
bids = fill(books.loc[ticker, "BUY"].drop_duplicates("price", "first"), True).head(
level
)
asks = fill(books.loc[ticker, "SELL"].drop_duplicates("price", "last"), False).tail(
level
)
return bids, asks
# center(bids, asks)
def fill(book, isbid):
""" clean up the duplicates and fill up the empty spaces. """
# if it's a bid, drop the first duplicate.
# if isbid:
# clean = book.drop_duplicates('price', 'first')
# else:
# clean = book.drop_duplicates('price', 'last')
# count how many cents the book covers
# _range = round(book['price'].max() - book['price'].min(), 2)
# rungs = int(_range * 100)
# Get the price range in a list to pass to numpy.linspace to generate our new index
# pricerange = [book['price'].min(), book['price'].max()]
pmax = int(book["price"].max() * 100)
pmin = int(book["price"].min() * 100)
# print(f"MAX/MIN : {pmax}/{pmin}")
ix = []
for i in range(pmin, pmax, 1):
# print(i/100)
ix.append(i / 100)
newind = pd.Index(ix, name="priceline")
# print(newind)
# Set the new index and backfill the cvol values
filled = book.set_index("price").reindex(newind, method="pad")
# filled['price'] = newind.values
filled["price"] = newind.get_values()
# if isbid:
filled = filled[::-1]
# print(filled[["price", "cvol"]].to_string())
return filled
def center(bids, asks):
""" Modify the last data point to make the two books have symetric price ranges. """
bidrange = bids["price"].max() - bids["price"].min()
askrange = asks["price"].max() - asks["price"].min()
if bidrange > askrange:
#
distance = round(bidrange - askrange, 2)
shim_ask = asks["price"].max() + distance
asks.iloc[-1, 0] = shim_ask
elif bidrange < askrange:
# 00
distance = round(askrange - bidrange, 2)
shim_bid = bids["price"].min() - distance
bids.iloc[-1, 0] = shim_bid
return bids, asks
@count()
def update(step):
data = receive_data()
# print(data["CRZY_candle"], data["TAME_candle"])
if data["case"]["status"] == "ACTIVE":
if data["CRZY_candle"]["tick"] is not None:
color1 = (
"#fe0000"
if data["CRZY_candle"]["open"] > data["CRZY_candle"]["close"]
else "#00fd02"
)
CRZY_data = dict(
tick=[data["CRZY_candle"]["tick"]],
open=[data["CRZY_candle"]["open"]],
high=[data["CRZY_candle"]["high"]],
low=[data["CRZY_candle"]["low"]],
close=[data["CRZY_candle"]["close"]],
mid=[(data["CRZY_candle"]["open"] + data["CRZY_candle"]["close"]) / 2],
height=[
max(
0.01,
abs(data["CRZY_candle"]["open"] - data["CRZY_candle"]["close"]),
)
],
color=[color1],
)
color2 = (
"#fe0000"
if data["TAME_candle"]["open"] > data["TAME_candle"]["close"]
else "#00fd02"
)
TAME_data = dict(
tick=[data["TAME_candle"]["tick"]],
open=[data["TAME_candle"]["open"]],
high=[data["TAME_candle"]["high"]],
low=[data["TAME_candle"]["low"]],
close=[data["TAME_candle"]["close"]],
mid=[(data["TAME_candle"]["open"] + data["TAME_candle"]["close"]) / 2],
height=[
max(
0.01,
abs(data["TAME_candle"]["open"] - data["TAME_candle"]["close"]),
)
],
color=[color2],
)
# tick_num = len(CRZY.data['tick'])
# if tick_num > 0:
# print(CRZY.data, len(CRZY.data['tick'])) #, tick_num)
if (
len(CRZY.data["tick"])
and CRZY.data["tick"][-1] == data["CRZY_candle"]["tick"]
):
index = max(0, len(CRZY.data["tick"]) - 1)
rpatches = {
"open": [(index, data["CRZY_candle"]["open"])],
"high": [(index, data["CRZY_candle"]["high"])],
"low": [(index, data["CRZY_candle"]["low"])],
"close": [(index, data["CRZY_candle"]["close"])],
"color": [(index, color1)],
"mid": [
(
index,
(data["CRZY_candle"]["open"] + data["CRZY_candle"]["close"])
/ 2,
)
],
"height": [
(
index,
max(
0.01,
abs(
data["CRZY_candle"]["open"]
- data["CRZY_candle"]["close"]
),
),
)
],
}
CRZY.patch(rpatches)
cpatches = {
"open": [(index, data["TAME_candle"]["open"])],
"high": [(index, data["TAME_candle"]["high"])],
"low": [(index, data["TAME_candle"]["low"])],
"close": [(index, data["TAME_candle"]["close"])],
"color": [(index, color2)],
"mid": [
(
index,
(data["TAME_candle"]["open"] + data["TAME_candle"]["close"])
/ 2,
)
],
"height": [
(
index,
max(
0.01,
abs(
data["TAME_candle"]["open"]
- data["TAME_candle"]["close"]
),
),
)
],
}
TAME.patch(cpatches)
else:
CRZY.stream(CRZY_data, 600)
TAME.stream(TAME_data, 600)
# else:
# CRZY.stream(CRZY_data, 600)
# TAME.stream(TAME_data, 600)
CRZY_price.location = data["CRZY_candle"]["close"]
TAME_price.location = data["TAME_candle"]["close"]
CRZY_bid_depth, CRZY_ask_depth = depth("CRZY", data["orderbook"])
CRZY_bidbook.data = ColumnDataSource._data_from_df(CRZY_bid_depth)
CRZY_askbook.data = ColumnDataSource._data_from_df(CRZY_ask_depth)
# print(CRZY_bid_depth, CRZY_ask_depth, CRZY_bidbook.data)
TAME_bid_depth, TAME_ask_depth = depth("TAME", data["orderbook"])
TAME_bidbook.data = ColumnDataSource._data_from_df(TAME_bid_depth)
TAME_askbook.data = ColumnDataSource._data_from_df(TAME_ask_depth)
if data["tenders"]:
output = ""
for tender in data["tenders"]:
reserve = " " if not tender["biddable"] else " BIDDABLE "
text = f"<b>{tender['ticker']} {tender['action']}{reserve}TENDER</b>: {tender['quantity']//1000}K @ {tender['price']}<br>"
# print(text)
output += text
div.text = output
else:
div.text = f"""Trader PnL : {data['trader']['nlv']}<br>
CRZY POSITION: {data['securities'].loc['CRZY', 'position']}<br>
TAME POSITION: {data['securities'].loc['TAME', 'position']}"""
elif data["case"]["status"] == "STOPPED":
div.text = f"Round Over, final PnL : {data['trader']['nlv']}"
CRZY.data = ColumnDataSource(
dict(
tick=[], mid=[], height=[], open=[], high=[], low=[], close=[], color=[]
)
)
TAME.data = ColumnDataSource(
dict(
tick=[], mid=[], height=[], open=[], high=[], low=[], close=[], color=[]
)
)
CRZY_bidbook.data = ColumnDataSource(dict(price=[], cvol=[]))
CRZY_askbook.data = ColumnDataSource(dict(price=[], cvol=[]))
TAME_bidbook.data = ColumnDataSource(dict(price=[], cvol=[]))
TAME_askbook.data = ColumnDataSource(dict(price=[], cvol=[]))
# Data sources
CRZY = ColumnDataSource(
dict(tick=[], mid=[], height=[], open=[], high=[], low=[], close=[], color=[])
)
TAME = ColumnDataSource(
dict(tick=[], mid=[], height=[], open=[], high=[], low=[], close=[], color=[])
)
CRZY_bidbook = ColumnDataSource(dict(price=[], cvol=[]))
CRZY_askbook = ColumnDataSource(dict(price=[], cvol=[]))
TAME_bidbook = ColumnDataSource(dict(price=[], cvol=[]))
TAME_askbook = ColumnDataSource(dict(price=[], cvol=[]))
CRZY_chart = figure(
plot_height=300,
plot_width=600,
y_axis_location="left",
title="CRZY",
background_fill_color="#d3d3d3",
)
CRZY_price = Span(location=9, dimension="width", line_width=2, line_color="gold")
CRZY_chart.add_layout(CRZY_price)
CRZY_chart.segment(
x0="tick", y0="low", x1="tick", y1="high", line_width=1, color="black", source=CRZY
)
CRZY_chart.rect(
x="tick",
y="mid",
width=4,
height="height",
line_width=1,
line_color="black",
fill_color="color",
source=CRZY,
)
TAME_chart = figure(
plot_height=300,
plot_width=600,
y_axis_location="left",
title="TAME",
background_fill_color="#d3d3d3",
)
TAME_price = Span(location=25, dimension="width", line_width=2, line_color="gold")
TAME_chart.add_layout(TAME_price)
TAME_chart.segment(
x0="tick", y0="low", x1="tick", y1="high", line_width=2, color="black", source=TAME
)
TAME_chart.rect(
x="tick",
y="mid",
width=4,
height="height",
line_width=1,
line_color="black",
fill_color="color",
source=TAME,
)
CRZY_dchart = figure(
plot_height=175, plot_width=600, y_axis_location="left", title="Orderbook"
)
CRZY_dchart.vbar(x="price", top="cvol", width=0.01, color="green", source=CRZY_bidbook)
CRZY_dchart.vbar(x="price", top="cvol", width=0.01, color="red", source=CRZY_askbook)
TAME_dchart = figure(
plot_height=175, plot_width=600, y_axis_location="left", title="Orderbook"
)
TAME_dchart.vbar(x="price", top="cvol", width=0.01, color="green", source=TAME_bidbook)
TAME_dchart.vbar(x="price", top="cvol", width=0.01, color="red", source=TAME_askbook)
div = Div(
text=f"<b>MADE BY UOTTAWA</br>", width=1100, height=200, style={"font-size": "200%"}
)
curdoc().add_root(
layout(
gridplot(
[[CRZY_chart, TAME_chart], [CRZY_dchart, TAME_dchart]],
toolbar_location=None,
),
widgetbox(div),
)
)
listener = Listener(("localhost", 6000))
print("Server up and running! Just waiting for you to run the main in another process.\n\n\
Listening...")
conn = listener.accept()
# Add a periodic callback to be run every X milliseconds
curdoc().add_periodic_callback(update, 250)
| 32.961644
| 138
| 0.503865
|
4a17351ac1f8d8ce044453269c336d2b4fd57e85
| 299
|
py
|
Python
|
Practice and Revision/if.py
|
emmapatton/Programming-1-and-2
|
4104e60858429d26f1ca754899968b97d55fd897
|
[
"Apache-2.0"
] | null | null | null |
Practice and Revision/if.py
|
emmapatton/Programming-1-and-2
|
4104e60858429d26f1ca754899968b97d55fd897
|
[
"Apache-2.0"
] | null | null | null |
Practice and Revision/if.py
|
emmapatton/Programming-1-and-2
|
4104e60858429d26f1ca754899968b97d55fd897
|
[
"Apache-2.0"
] | null | null | null |
#Adapted from: https://docs.python.org/3/tutorial/controlflow.html
x = int(input("Please enter an integer: "))
if x < 0:
x = 0
print('Negative changed to zero')
elif x == 0:
print('Zero')
elif x == 1:
print('Single')
else:
print('More')
print("The final value of x is:", x)
| 21.357143
| 66
| 0.61204
|
4a17354b3cacab042caadb9dbeffa484386cb4a9
| 390
|
py
|
Python
|
shop/shop/settings/components/background.py
|
Mykytenkovladislav/book_shop_and_warehouse
|
60852e5ed3869291e73623b8b8d7901d39d66c9d
|
[
"MIT"
] | null | null | null |
shop/shop/settings/components/background.py
|
Mykytenkovladislav/book_shop_and_warehouse
|
60852e5ed3869291e73623b8b8d7901d39d66c9d
|
[
"MIT"
] | null | null | null |
shop/shop/settings/components/background.py
|
Mykytenkovladislav/book_shop_and_warehouse
|
60852e5ed3869291e73623b8b8d7901d39d66c9d
|
[
"MIT"
] | null | null | null |
from datetime import timedelta
# from celery.schedules import crontab
CELERY_TASK_RESULT_EXPIRES = 3600
CELERY_BEAT_SCHEDULE = {
"celery.backend_cleanup": {
"task": "celery.backend_cleanup",
"schedule": timedelta(seconds=300),
"args": (),
},
"periodical": {
"task": "store.tasks.book_sync",
"schedule": timedelta(seconds=10),
},
}
| 21.666667
| 43
| 0.628205
|
4a1735abab0b4971f1bd684a455402366f5195bc
| 157
|
py
|
Python
|
Desafio005 - Antecessor e Sucessor.py
|
kleberfsobrinho/python
|
34739d127c1a3908f5a2fd5a7ef07d4c78658802
|
[
"MIT"
] | null | null | null |
Desafio005 - Antecessor e Sucessor.py
|
kleberfsobrinho/python
|
34739d127c1a3908f5a2fd5a7ef07d4c78658802
|
[
"MIT"
] | null | null | null |
Desafio005 - Antecessor e Sucessor.py
|
kleberfsobrinho/python
|
34739d127c1a3908f5a2fd5a7ef07d4c78658802
|
[
"MIT"
] | null | null | null |
n = int(input('Entre com um inteiro: '))
print('Analisando o número {}, temos que seu sucessor é {} e seu antecessor {}!'.format(n, n+1, n-1))
print('\n')
| 26.166667
| 101
| 0.630573
|
4a173792c88779bf9f0ea174116609c1fef5bb45
| 4,234
|
py
|
Python
|
aixing_bot.py
|
mikeysan/aixingBot
|
1c95cc5bae86ae0da0b6c0afabded295fe74c1d5
|
[
"MIT"
] | 3
|
2020-10-02T09:24:01.000Z
|
2021-05-21T17:06:32.000Z
|
aixing_bot.py
|
mikeysan/aixingBot
|
1c95cc5bae86ae0da0b6c0afabded295fe74c1d5
|
[
"MIT"
] | 2
|
2020-10-03T10:42:50.000Z
|
2021-06-24T17:05:48.000Z
|
aixing_bot.py
|
mikeysan/aixingBot
|
1c95cc5bae86ae0da0b6c0afabded295fe74c1d5
|
[
"MIT"
] | 4
|
2020-10-02T09:24:04.000Z
|
2021-06-22T06:03:06.000Z
|
# aixing_bot.py
# A discord bot created by Mikey San
# This is mostly a tutorial project for use on my discord server.
import os
import logging
import datetime
from itertools import cycle
import discord
from discord.ext import commands, tasks
from discord.ext.commands import Context, CommandError
from dotenv import load_dotenv
load_dotenv()
TOKEN = os.getenv('DISCORD_TOKEN')
GUILD = os.getenv('DISCORD_GUILD')
# Setup logging to a file called discord.log.
logger = logging.getLogger('discord')
logger.setLevel(logging.DEBUG)
handler = logging.FileHandler(filename='discord.log', encoding='utf-8', mode='w')
handler.setFormatter(logging.Formatter('%(asctime)s:%(levelname)s:%(name)s: %(message)s'))
logger.addHandler(handler)
# We are using the Bot API to interact with discord.
# Assign "bot" to commands.Bot and set the commnd prefix to look for.
# We have also set our commands to be case insensitive. this means $help or
# $Help or even $helP will trigger the bot.
bot = commands.Bot(command_prefix='$',
description="A support Bot for NLB Clan",
case_insensitive=True)
# Create a cycle of status changes.
# This doesn't do much. It's just fun to have; something to play with in future
status = cycle(['hating on D2', 'Thinking...', 'bathroom break', 'dumping on Apex', '$help'])
# Create an event that takes the on_ready function
# This will do a few things once our bot goes live
@bot.event
async def on_ready():
'''
Description: Gives the status of aixingBot when it becomes ready
and loads a footer block with additional notes and URL to gitHub
'''
change_status.start()
print("Bot is ready.")
# Check that we are in the expected server.
for guild in bot.guilds:
if guild.name == GUILD:
break
# Print to terminal (log file) when we make a connection.
# Also confirm the server name and ID we're connected to.
print(
f'{bot.user} is connected to the following guild:\n'
f'{guild.name}(id: {guild.id})'
)
# Send a message to the channel "chat" once we are connected.
# So we can see that we are live there too.
channel = discord.utils.get(guild.channels, name="chat")
# wave = ":wave:"
# Create a discord embed instance.
# Set title, colour and timestamp. ps. don't forget to import datetime module
embed = discord.Embed(
title = f"{bot.user.name} Online!",
colour = discord.Colour.from_rgb(255,191,0),
url = "https://github.com/mikeysan/aixingBot",
timestamp = datetime.datetime.now(datetime.timezone.utc)
)
# Set a footer using the embed instance.
embed.set_footer(
text = "I am Open Source. I am Skynet."
)
# Send our embeded content to the channel.
await channel.send(embed = embed)
async def on_command_error(self, ctx: Context, exception: CommandError) -> None:
"""Fired when exception happens."""
logger.error(
"Exception happened while executing command",
exc_info=(type(exception), exception, exception.__traceback__)
)
# Change status task
@tasks.loop(hours=2)
async def change_status():
# Let's pretend the bot is playing the game of $help
game = discord.Game(next(status))
await bot.change_presence(status=discord.Status.idle, activity = game)
# Reload cogs
@bot.command()
@commands.is_owner()
async def reload(ctx, cog):
'''
Description: Reloads all Cog files
'''
try:
bot.unload_extension(f"cogs.{cog}")
bot.load_extension(f"cogs.{cog}")
ctx.send(f"{cog} reloaded successfully")
except Exception as e:
print(f"{cog} can not be loaded:")
raise e
# Load cogs
cogPath = "./cogs/"
for cogFile in os.listdir(cogPath):
if cogFile.endswith(".py"):
try:
cogFile = f"cogs.{cogFile.replace('.py', '')}"
bot.load_extension(cogFile)
except Exception as e:
print(f"{cogFile} can not be loaded:")
raise e
# Finally, authenticate with discord and let's get cracking.
bot.run(TOKEN)
| 32.569231
| 93
| 0.651157
|
4a1737d53e42358aad0b8bcf8cfd9ddfd89784a1
| 2,303
|
py
|
Python
|
vmm/cli/main.py
|
kmohrf/vmm
|
5e0dc8c9502d07681bfaca8634ed5b083deae77b
|
[
"BSD-3-Clause"
] | 4
|
2020-03-08T08:45:35.000Z
|
2021-10-17T11:05:17.000Z
|
vmm/cli/main.py
|
kmohrf/vmm
|
5e0dc8c9502d07681bfaca8634ed5b083deae77b
|
[
"BSD-3-Clause"
] | null | null | null |
vmm/cli/main.py
|
kmohrf/vmm
|
5e0dc8c9502d07681bfaca8634ed5b083deae77b
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: UTF-8 -*-
# Copyright (c) 2007 - 2014, Pascal Volk
# See COPYING for distribution information.
"""
vmm.cli.main
~~~~~~~~~~~~~~~~~~~~~~~~~~~
vmm's command line interface.
"""
from configparser import NoOptionError, NoSectionError
from gettext import gettext as _
from vmm import errors
from vmm.config import BadOptionError, ConfigValueError
from vmm.cli import w_err
from vmm.cli.handler import CliHandler
from vmm.constants import (
EX_MISSING_ARGS,
EX_SUCCESS,
EX_USER_INTERRUPT,
INVALID_ARGUMENT,
)
from vmm.cli.subcommands import RunContext, setup_parser
def _get_handler():
"""Try to get a CliHandler. Exit the program when an error occurs."""
try:
handler = CliHandler()
except (
errors.NotRootError,
errors.PermissionError,
errors.VMMError,
errors.ConfigError,
) as err:
w_err(err.code, _("Error: %s") % err.msg)
else:
handler.cfg_install()
return handler
def run(argv):
parser = setup_parser()
if len(argv) < 2:
parser.print_usage()
parser.exit(
status=EX_MISSING_ARGS,
message=_("You must specify a subcommand at least.") + "\n",
)
args = parser.parse_args()
handler = _get_handler()
run_ctx = RunContext(args, handler)
try:
args.func(run_ctx)
except (EOFError, KeyboardInterrupt):
# TP: We have to cry, because root has killed/interrupted vmm
# with Ctrl+C or Ctrl+D.
w_err(EX_USER_INTERRUPT, "", _("Ouch!"), "")
except errors.VMMError as err:
if handler.has_warnings():
w_err(0, _("Warnings:"), *handler.get_warnings())
w_err(err.code, _("Error: %s") % err.msg)
except (BadOptionError, ConfigValueError) as err:
w_err(INVALID_ARGUMENT, _("Error: %s") % err)
except NoSectionError as err:
w_err(INVALID_ARGUMENT, _("Error: Unknown section: '%s'") % err.section)
except NoOptionError as err:
w_err(
INVALID_ARGUMENT,
_("Error: No option '%(option)s' in section: '%(section)s'")
% {"option": err.option, "section": err.section},
)
if handler.has_warnings():
w_err(0, _("Warnings:"), *handler.get_warnings())
return EX_SUCCESS
| 29.909091
| 80
| 0.622666
|
4a173867e013b882c2ea524e55c921ea25624edf
| 6,561
|
py
|
Python
|
my_portal/projects/models.py
|
cgajagon/my_portal
|
cea810512528ea4ef30bbc7e14873fa25ed2f54f
|
[
"MIT"
] | null | null | null |
my_portal/projects/models.py
|
cgajagon/my_portal
|
cea810512528ea4ef30bbc7e14873fa25ed2f54f
|
[
"MIT"
] | null | null | null |
my_portal/projects/models.py
|
cgajagon/my_portal
|
cea810512528ea4ef30bbc7e14873fa25ed2f54f
|
[
"MIT"
] | null | null | null |
import datetime
from django.db import models
from django.urls import reverse_lazy
from my_portal.users.models import User
class Supplier(models.Model):
USA = 'USA'
CAN = 'CANADA'
OTH = 'OTHER'
COUNTRY = [
(USA, 'USA'),
(CAN, 'CANADA'),
(OTH, 'OTHER'),
]
vendor_code = models.IntegerField(null=False, blank=False, unique=True)
vendor_name = models.CharField(max_length=200, null=False, blank=False)
country = models.CharField(max_length=10, choices=COUNTRY, default=CAN)
account_manager = models.ForeignKey(User, on_delete=models.SET_NULL, null= True, blank=True)
class Meta:
ordering = ['vendor_name']
def __str__(self):
return self.vendor_name
class Project(models.Model):
QUEUED = 'Queued'
ACTIVE = 'Active'
INACTIVE = 'Inactive'
CANCELED = 'Canceled'
COMPLETED = 'Completed'
STATUS = [
(QUEUED, 'Queued'),
(ACTIVE, 'Active'),
(INACTIVE, 'Inactive'),
(CANCELED, 'Canceled'),
(COMPLETED, 'Completed'),
]
REGULAR = 'Regular'
COMPLEX = 'Complex'
COMPLEXITY = [
(REGULAR, 'Regular'),
(COMPLEX, 'Complex'),
]
C1 = 'Machining'
C2 = 'Composites and Fabrications'
C5 = 'Structural Castings'
C6 = 'Blades'
C8 = 'Vanes and Rings'
COMMODITY = [
(C1, 'Machining'),
(C2, 'Composites and Fabrications'),
(C5, 'Structural Castings'),
(C6, 'Blades'),
(C8, 'Vanes and Rings'),
]
finance_ID = models.IntegerField(null=False, blank=False, default=0)
title = models.CharField(max_length=200, null=False, blank=False)
customer = models.ForeignKey(Supplier, on_delete=models.CASCADE, null=False, blank=False)
part_number_affected = models.CharField(max_length=200, null=False, blank=False)
tool_serial_number_affected = models.CharField(max_length=200, null=True, blank=True)
project_description = models.TextField(max_length=400, null=False, blank=False)
project_justification = models.TextField(max_length=400, null=False, blank=False)
start_date = models.DateField(null=False, blank=False, default=datetime.date.today)
end_date = models.DateField(null=False, blank=False)
constraint_end_date = models.DateField(null=True, blank=True)
project_manager = models.ForeignKey(User, on_delete=models.SET_NULL, null= True, blank=True)
design_job = models.CharField(max_length=25, null=True, blank=True, unique=True)
commodity = models.CharField(max_length=50, choices=COMMODITY)
complexity = models.CharField(max_length=10, choices=COMPLEXITY, default=REGULAR)
status = models.CharField(max_length=10, choices=STATUS, default=QUEUED)
def __str__(self):
return self.title
def get_absolute_url(self):
return reverse_lazy('projects:project_detail', args=[self.pk])
class Meta:
permissions = (
('can_view_project', 'Can view project'),
)
class ProjectJournal(models.Model):
project_related = models.ForeignKey(Project, on_delete=models.CASCADE, null=False, blank=False)
title = models.CharField(max_length=100, null=False, blank=False)
comment = models.TextField(max_length=500, null=True, blank=True)
entry_date = models.DateField(null=False, blank=False, default=datetime.date.today)
due_date = models.DateField(null=True, blank=True)
is_completed = models.BooleanField(default=False)
class Meta:
ordering = ['-entry_date', '-due_date']
def __str__(self):
return self.title
def get_absolute_url(self):
return reverse_lazy('projects:project_detail', args=[self.project_related.pk])
class ProjectMilestone(models.Model):
project_related = models.ForeignKey(Project, on_delete=models.CASCADE, null=False, blank=False)
milestone = models.CharField(max_length=200, null=False, blank=False)
comment = models.TextField(max_length=200, null=True, blank=True)
start_date = models.DateField(null=False, blank=False)
due_date = models.DateField(null=False, blank=False)
is_completed = models.BooleanField()
class Meta:
ordering = ['-start_date']
def duration(self):
days = (self.due_date-self.start_date).days
duration = round(days/7,0)
return duration
def __str__(self):
return self.milestone
def get_absolute_url(self):
return reverse_lazy('projects:project_detail', args=[self.project_related.pk])
class ProjectCost(models.Model):
CAD = 'CAD'
USD = 'USD'
OTHER='OTHER'
CURRENCY = [
(CAD, 'CAD'),
(USD, 'USD'),
(OTHER, 'Other')
]
CAPEX = 'CAPEX'
OPEX = 'OPEX'
EXPENSE = [
(CAPEX, 'CAPEX'),
(OPEX, 'OPEX'),
]
description = models.TextField(max_length=200, null=False, blank=False)
project_related = models.ForeignKey(Project, on_delete=models.CASCADE, null=False, blank=False)
amount = models.FloatField(max_length=20, blank=False, null=False)
currency = models.CharField(max_length=5, choices=CURRENCY, default=USD)
expense_type = models.CharField(max_length=5, choices=EXPENSE, default=CAPEX)
entry_date = models.DateField(null=False, blank=False, default=datetime.date.today)
def __str__(self):
return self.description
def get_absolute_url(self):
return reverse_lazy('projects:project_detail', args=[self.project_related.pk])
class ProjectDocument(models.Model):
FORM10024 = 'Form 10024'
FORM10141 = 'Form 10141'
FORM11212 = 'Form 11212'
FORM11248 = 'Form 11248'
FORM11615 = 'Form 11615'
FORM11674 = 'Form 11674'
FORM12165 = 'Form 12165'
INVOICE = 'Invoice'
BUSINESSCASE = 'Business Case'
QUOTE = 'Quote'
CONTRACT = 'Contract'
OTHER = 'Other'
DOC = [
(FORM10024,'Form 10024'),
(FORM10141,'Form 10141'),
(FORM11212,'Form 11212'),
(FORM11248,'Form 1248'),
(FORM11615,'Form 11615'),
(FORM11674,'Form 11674'),
(FORM12165,'Form 12165'),
(INVOICE,'Invoice'),
(BUSINESSCASE,'Business Case'),
(QUOTE,'Quote'),
(CONTRACT,'Contract'),
(OTHER,'Other'),
]
project_related = models.ForeignKey(Project, on_delete=models.CASCADE, null=False, blank=False)
title = models.CharField(max_length=255, blank=False)
document_type = models.CharField(max_length=20, choices=DOC, default=OTHER)
document = models.FileField(upload_to='documents/')
uploaded_at = models.DateTimeField(auto_now_add=True)
| 34.350785
| 99
| 0.670782
|
4a17393262725c0e3abdeeb54322f1fa2053f3f6
| 627
|
py
|
Python
|
src/exif.py
|
philchand/phockup
|
b88b4fc48524df07371a06b25a302c039aa1bf9c
|
[
"MIT"
] | null | null | null |
src/exif.py
|
philchand/phockup
|
b88b4fc48524df07371a06b25a302c039aa1bf9c
|
[
"MIT"
] | null | null | null |
src/exif.py
|
philchand/phockup
|
b88b4fc48524df07371a06b25a302c039aa1bf9c
|
[
"MIT"
] | 1
|
2017-10-05T02:47:43.000Z
|
2017-10-05T02:47:43.000Z
|
from subprocess import check_output, CalledProcessError
import json
import shlex
import sys
class Exif(object):
def __init__(self, filename):
self.filename = filename
def data(self):
try:
exif_command = 'exiftool -time:all -mimetype -j %s' % shlex.quote(self.filename)
if sys.platform == 'win32':
exif_command = exif_command.replace("\'", "\"")
data = check_output(exif_command, shell=True).decode('UTF-8')
exif = json.loads(data)[0]
except (CalledProcessError, UnicodeDecodeError):
return None
return exif
| 28.5
| 92
| 0.617225
|
4a1739b9e517724ee5f981f7beb755adb2a2d604
| 1,336
|
py
|
Python
|
src/openprocurement/tender/competitivedialogue/views/stage1/qualification_complaint.py
|
pontostroy/api
|
5afdd3a62a8e562cf77e2d963d88f1a26613d16a
|
[
"Apache-2.0"
] | 3
|
2020-03-13T06:44:23.000Z
|
2020-11-05T18:25:29.000Z
|
src/openprocurement/tender/competitivedialogue/views/stage1/qualification_complaint.py
|
pontostroy/api
|
5afdd3a62a8e562cf77e2d963d88f1a26613d16a
|
[
"Apache-2.0"
] | 2
|
2021-03-25T23:27:04.000Z
|
2022-03-21T22:18:15.000Z
|
src/openprocurement/tender/competitivedialogue/views/stage1/qualification_complaint.py
|
scrubele/prozorro-testing
|
42b93ea2f25d8cc40e66c596f582c7c05e2a9d76
|
[
"Apache-2.0"
] | 3
|
2020-10-16T16:25:14.000Z
|
2021-05-22T12:26:20.000Z
|
# -*- coding: utf-8 -*-
from openprocurement.tender.openeu.utils import qualifications_resource
from openprocurement.tender.openeu.views.qualification_complaint import (
TenderEUQualificationComplaintResource as BaseTenderQualificationComplaintResource,
)
from openprocurement.tender.competitivedialogue.constants import CD_EU_TYPE, CD_UA_TYPE
@qualifications_resource(
name="{}:Tender Qualification Complaints".format(CD_EU_TYPE),
collection_path="/tenders/{tender_id}/qualifications/{qualification_id}/complaints",
path="/tenders/{tender_id}/qualifications/{qualification_id}/complaints/{complaint_id}",
procurementMethodType=CD_EU_TYPE,
description="Competitive Dialogue EU qualification complaints",
)
class CompetitiveDialogueEUQualificationComplaintResource(BaseTenderQualificationComplaintResource):
pass
@qualifications_resource(
name="{}:Tender Qualification Complaints".format(CD_UA_TYPE),
collection_path="/tenders/{tender_id}/qualifications/{qualification_id}/complaints",
path="/tenders/{tender_id}/qualifications/{qualification_id}/complaints/{complaint_id}",
procurementMethodType=CD_UA_TYPE,
description="Competitive Dialogue UA qualification complaints",
)
class CompetitiveDialogueUAQualificationComplaintResource(BaseTenderQualificationComplaintResource):
pass
| 46.068966
| 100
| 0.824102
|
4a173a95b813fbab13ac2160a9f309c8607499b0
| 3,353
|
py
|
Python
|
sdk/lusid/models/transaction_query_mode.py
|
slemasne/lusid-sdk-python-preview
|
94a97951ec2052bc1672b7be21e52ad2fcf6eea0
|
[
"MIT"
] | null | null | null |
sdk/lusid/models/transaction_query_mode.py
|
slemasne/lusid-sdk-python-preview
|
94a97951ec2052bc1672b7be21e52ad2fcf6eea0
|
[
"MIT"
] | null | null | null |
sdk/lusid/models/transaction_query_mode.py
|
slemasne/lusid-sdk-python-preview
|
94a97951ec2052bc1672b7be21e52ad2fcf6eea0
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
LUSID API
FINBOURNE Technology # noqa: E501
The version of the OpenAPI document: 0.11.3725
Contact: info@finbourne.com
Generated by: https://openapi-generator.tech
"""
try:
from inspect import getfullargspec
except ImportError:
from inspect import getargspec as getfullargspec
import pprint
import re # noqa: F401
import six
from lusid.configuration import Configuration
class TransactionQueryMode(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
allowed enum values
"""
TRADEDATE = "TradeDate"
SETTLEDATE = "SettleDate"
allowable_values = [TRADEDATE, SETTLEDATE] # noqa: E501
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
required_map (dict): The key is attribute name
and the value is whether it is 'required' or 'optional'.
"""
openapi_types = {
}
attribute_map = {
}
required_map = {
}
def __init__(self, local_vars_configuration=None): # noqa: E501
"""TransactionQueryMode - a model defined in OpenAPI"
""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration.get_default_copy()
self.local_vars_configuration = local_vars_configuration
self.discriminator = None
def to_dict(self, serialize=False):
"""Returns the model properties as a dict"""
result = {}
def convert(x):
if hasattr(x, "to_dict"):
args = getfullargspec(x.to_dict).args
if len(args) == 1:
return x.to_dict()
else:
return x.to_dict(serialize)
else:
return x
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
attr = self.attribute_map.get(attr, attr) if serialize else attr
if isinstance(value, list):
result[attr] = list(map(
lambda x: convert(x),
value
))
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], convert(item[1])),
value.items()
))
else:
result[attr] = convert(value)
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, TransactionQueryMode):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, TransactionQueryMode):
return True
return self.to_dict() != other.to_dict()
| 27.710744
| 83
| 0.573516
|
4a173c023e0c459b9637e8156ae513f4c78a6ce2
| 3,700
|
py
|
Python
|
hide_and_seek/ui/game.py
|
houtaru/hide-and-seek
|
ee5523b16e10419e8b9f58ac2b1f66bff8935418
|
[
"MIT"
] | null | null | null |
hide_and_seek/ui/game.py
|
houtaru/hide-and-seek
|
ee5523b16e10419e8b9f58ac2b1f66bff8935418
|
[
"MIT"
] | null | null | null |
hide_and_seek/ui/game.py
|
houtaru/hide-and-seek
|
ee5523b16e10419e8b9f58ac2b1f66bff8935418
|
[
"MIT"
] | null | null | null |
import os
import pygame
from pygame.locals import Rect
from ..ui.player import Player
from ..ui.table import Table
import hide_and_seek.utils.constants as Constants
from ..utils.rand import Rand
from ..controllers.level_1 import Backtrack
class Game:
def __init__(self, opt):
pygame.init()
pygame.display.set_caption("Hide and Seek")
self.window = pygame.display.set_mode(
(opt["screen_width"], opt["screen_height"])
)
self.fps = opt["FRAME_PER_SECONDS"]
self.rect = Rect(0, 0, opt["screen_width"], opt["screen_height"])
self.table = Table(
opt["map"],
{"scr_wt": opt["screen_height"], "scr_ht": opt["screen_height"]},
opt["line"]["thickness"],
)
_players = {"hider": [], "seeker": []}
temp = []
for i in range(opt["amount"]["seeker"]):
x, y = Rand().get_pos(self.table.get_table())
self.table.update_table(x, y, 3)
temp.append([0, 0, 3])
for i in range(opt["amount"]["hider"]):
x, y = Rand().get_pos(self.table.get_table())
self.table.update_table(x, y, 2)
temp.append([x, y, 2])
for i, j, typ in temp:
lhs = self.table.get_pos_on_board(i, j)
if typ == 2:
_players["hider"].append(
Player(
x=i,
y=j,
rect=Rect(
lhs[0] + 1,
lhs[1] + 1,
self.table._grid_size["x"],
self.table._grid_size["y"],
),
color="blue",
radius=self.table._grid_size["x"] / 3,
view_range=opt["view"]["hider"],
moveable=opt["moveable"]["hider"],
pushable=opt["pushable"]["hider"],
)
)
if typ == 3:
_players["seeker"].append(
Player(
x=i,
y=j,
rect=Rect(
lhs[0] + 1,
lhs[1] + 1,
self.table._grid_size["x"],
self.table._grid_size["y"],
),
color="red",
radius=self.table._grid_size["x"] / 3,
view_range=opt["view"]["seeker"],
moveable=opt["moveable"]["seeker"],
pushable=opt["pushable"]["seeker"],
)
)
self._players = _players
self._list_player = temp
self._result = Backtrack(self.table, self._list_player).run()
print(self._result)
def __del__(self):
pygame.quit()
def run(self):
running = True
clock = pygame.time.Clock()
while running:
clock.tick(self.fps)
for event in pygame.event.get():
if event.type == pygame.QUIT:
running = False
# agent
self.draw()
def draw_players(self):
for type in ["hider", "seeker"]:
for player in self._players[type]:
player.draw(
self.window, self.table._n, self.table._m, self.table._grid_size
)
def draw(self):
pygame.draw.rect(self.window, Constants.colors["white"], self.rect)
self.draw_players()
self.table.draw(self.window)
pygame.display.update()
| 33.636364
| 84
| 0.446486
|
4a173cc988c2316007e202437d742354a5cf09c4
| 1,562
|
py
|
Python
|
tools/ridet/test_hrsc2016_8p.py
|
Artcs1/RotationDetection
|
095be17345ee9984d8de8f24eb6b5a0b2d764a06
|
[
"Apache-2.0"
] | 850
|
2020-10-27T08:51:54.000Z
|
2022-03-30T15:12:06.000Z
|
tools/ridet/test_hrsc2016_8p.py
|
Artcs1/RotationDetection
|
095be17345ee9984d8de8f24eb6b5a0b2d764a06
|
[
"Apache-2.0"
] | 94
|
2020-12-01T02:18:47.000Z
|
2022-03-30T08:14:27.000Z
|
tools/ridet/test_hrsc2016_8p.py
|
Artcs1/RotationDetection
|
095be17345ee9984d8de8f24eb6b5a0b2d764a06
|
[
"Apache-2.0"
] | 149
|
2020-10-29T03:30:32.000Z
|
2022-03-29T09:53:23.000Z
|
# -*- coding:utf-8 -*-
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
import os
import sys
import tensorflow as tf
import time
import cv2
import pickle
import numpy as np
import argparse
from tqdm import tqdm
sys.path.append("../../")
from libs.models.detectors.ridet import build_whole_network_8p
from tools.test_hrsc2016_base_q import TestHRSC2016
from libs.configs import cfgs
from libs.val_libs.voc_eval_r import EVAL
class TestHRSC2016RIDet(TestHRSC2016):
def eval(self):
ridet = build_whole_network_8p.DetectionNetworkRIDet(cfgs=self.cfgs,
is_training=False)
all_boxes_r = self.eval_with_plac(img_dir=self.args.img_dir, det_net=ridet,
image_ext=self.args.image_ext)
# with open(cfgs.VERSION + '_detections_r.pkl', 'rb') as f2:
# all_boxes_r = pickle.load(f2)
#
# print(len(all_boxes_r))
imgs = os.listdir(self.args.img_dir)
real_test_imgname_list = [i.split(self.args.image_ext)[0] for i in imgs]
print(10 * "**")
print('rotation eval:')
evaler = EVAL(self.cfgs)
evaler.voc_evaluate_detections(all_boxes=all_boxes_r,
test_imgid_list=real_test_imgname_list,
test_annotation_path=self.args.test_annotation_path)
if __name__ == '__main__':
tester = TestHRSC2016RIDet(cfgs)
tester.eval()
| 29.471698
| 91
| 0.644686
|
4a173db01f86af5887c830e7dab5cffa1e96f122
| 18,860
|
py
|
Python
|
qa/rpc-tests/test_framework/comptool.py
|
v1nc0/macclone14.3
|
e91fb2566205b5f4e2e1b2384cd93309a24261c4
|
[
"MIT"
] | null | null | null |
qa/rpc-tests/test_framework/comptool.py
|
v1nc0/macclone14.3
|
e91fb2566205b5f4e2e1b2384cd93309a24261c4
|
[
"MIT"
] | null | null | null |
qa/rpc-tests/test_framework/comptool.py
|
v1nc0/macclone14.3
|
e91fb2566205b5f4e2e1b2384cd93309a24261c4
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# Copyright (c) 2015-2016 The Machinecoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from .mininode import *
from .blockstore import BlockStore, TxStore
from .util import p2p_port
'''
This is a tool for comparing two or more machinecoinds to each other
using a script provided.
To use, create a class that implements get_tests(), and pass it in
as the test generator to TestManager. get_tests() should be a python
generator that returns TestInstance objects. See below for definition.
'''
# TestNode behaves as follows:
# Configure with a BlockStore and TxStore
# on_inv: log the message but don't request
# on_headers: log the chain tip
# on_pong: update ping response map (for synchronization)
# on_getheaders: provide headers via BlockStore
# on_getdata: provide blocks via BlockStore
global mininode_lock
class RejectResult(object):
'''
Outcome that expects rejection of a transaction or block.
'''
def __init__(self, code, reason=b''):
self.code = code
self.reason = reason
def match(self, other):
if self.code != other.code:
return False
return other.reason.startswith(self.reason)
def __repr__(self):
return '%i:%s' % (self.code,self.reason or '*')
class TestNode(NodeConnCB):
def __init__(self, block_store, tx_store):
NodeConnCB.__init__(self)
self.conn = None
self.bestblockhash = None
self.block_store = block_store
self.block_request_map = {}
self.tx_store = tx_store
self.tx_request_map = {}
self.block_reject_map = {}
self.tx_reject_map = {}
# When the pingmap is non-empty we're waiting for
# a response
self.pingMap = {}
self.lastInv = []
self.closed = False
def on_close(self, conn):
self.closed = True
def add_connection(self, conn):
self.conn = conn
def on_headers(self, conn, message):
if len(message.headers) > 0:
best_header = message.headers[-1]
best_header.calc_sha256()
self.bestblockhash = best_header.sha256
def on_getheaders(self, conn, message):
response = self.block_store.headers_for(message.locator, message.hashstop)
if response is not None:
conn.send_message(response)
def on_getdata(self, conn, message):
[conn.send_message(r) for r in self.block_store.get_blocks(message.inv)]
[conn.send_message(r) for r in self.tx_store.get_transactions(message.inv)]
for i in message.inv:
if i.type == 1:
self.tx_request_map[i.hash] = True
elif i.type == 2:
self.block_request_map[i.hash] = True
def on_inv(self, conn, message):
self.lastInv = [x.hash for x in message.inv]
def on_pong(self, conn, message):
try:
del self.pingMap[message.nonce]
except KeyError:
raise AssertionError("Got pong for unknown ping [%s]" % repr(message))
def on_reject(self, conn, message):
if message.message == b'tx':
self.tx_reject_map[message.data] = RejectResult(message.code, message.reason)
if message.message == b'block':
self.block_reject_map[message.data] = RejectResult(message.code, message.reason)
def send_inv(self, obj):
mtype = 2 if isinstance(obj, CBlock) else 1
self.conn.send_message(msg_inv([CInv(mtype, obj.sha256)]))
def send_getheaders(self):
# We ask for headers from their last tip.
m = msg_getheaders()
m.locator = self.block_store.get_locator(self.bestblockhash)
self.conn.send_message(m)
def send_header(self, header):
m = msg_headers()
m.headers.append(header)
self.conn.send_message(m)
# This assumes BIP31
def send_ping(self, nonce):
self.pingMap[nonce] = True
self.conn.send_message(msg_ping(nonce))
def received_ping_response(self, nonce):
return nonce not in self.pingMap
def send_mempool(self):
self.lastInv = []
self.conn.send_message(msg_mempool())
# TestInstance:
#
# Instances of these are generated by the test generator, and fed into the
# comptool.
#
# "blocks_and_transactions" should be an array of
# [obj, True/False/None, hash/None]:
# - obj is either a CBlock, CBlockHeader, or a CTransaction, and
# - the second value indicates whether the object should be accepted
# into the blockchain or mempool (for tests where we expect a certain
# answer), or "None" if we don't expect a certain answer and are just
# comparing the behavior of the nodes being tested.
# - the third value is the hash to test the tip against (if None or omitted,
# use the hash of the block)
# - NOTE: if a block header, no test is performed; instead the header is
# just added to the block_store. This is to facilitate block delivery
# when communicating with headers-first clients (when withholding an
# intermediate block).
# sync_every_block: if True, then each block will be inv'ed, synced, and
# nodes will be tested based on the outcome for the block. If False,
# then inv's accumulate until all blocks are processed (or max inv size
# is reached) and then sent out in one inv message. Then the final block
# will be synced across all connections, and the outcome of the final
# block will be tested.
# sync_every_tx: analogous to behavior for sync_every_block, except if outcome
# on the final tx is None, then contents of entire mempool are compared
# across all connections. (If outcome of final tx is specified as true
# or false, then only the last tx is tested against outcome.)
class TestInstance(object):
def __init__(self, objects=None, sync_every_block=True, sync_every_tx=False):
self.blocks_and_transactions = objects if objects else []
self.sync_every_block = sync_every_block
self.sync_every_tx = sync_every_tx
class TestManager(object):
def __init__(self, testgen, datadir):
self.test_generator = testgen
self.connections = []
self.test_nodes = []
self.block_store = BlockStore(datadir)
self.tx_store = TxStore(datadir)
self.ping_counter = 1
def add_all_connections(self, nodes):
for i in range(len(nodes)):
# Create a p2p connection to each node
test_node = TestNode(self.block_store, self.tx_store)
self.test_nodes.append(test_node)
self.connections.append(NodeConn('127.0.0.1', p2p_port(i), nodes[i], test_node))
# Make sure the TestNode (callback class) has a reference to its
# associated NodeConn
test_node.add_connection(self.connections[-1])
def clear_all_connections(self):
self.connections = []
self.test_nodes = []
def wait_for_disconnections(self):
def disconnected():
return all(node.closed for node in self.test_nodes)
return wait_until(disconnected, timeout=10)
def wait_for_verack(self):
def veracked():
return all(node.verack_received for node in self.test_nodes)
return wait_until(veracked, timeout=10)
def wait_for_pings(self, counter):
def received_pongs():
return all(node.received_ping_response(counter) for node in self.test_nodes)
return wait_until(received_pongs)
# sync_blocks: Wait for all connections to request the blockhash given
# then send get_headers to find out the tip of each node, and synchronize
# the response by using a ping (and waiting for pong with same nonce).
def sync_blocks(self, blockhash, num_blocks):
def blocks_requested():
return all(
blockhash in node.block_request_map and node.block_request_map[blockhash]
for node in self.test_nodes
)
# --> error if not requested
if not wait_until(blocks_requested, attempts=20*num_blocks):
# print [ c.cb.block_request_map for c in self.connections ]
raise AssertionError("Not all nodes requested block")
# Send getheaders message
[ c.cb.send_getheaders() for c in self.connections ]
# Send ping and wait for response -- synchronization hack
[ c.cb.send_ping(self.ping_counter) for c in self.connections ]
self.wait_for_pings(self.ping_counter)
self.ping_counter += 1
# Analogous to sync_block (see above)
def sync_transaction(self, txhash, num_events):
# Wait for nodes to request transaction (50ms sleep * 20 tries * num_events)
def transaction_requested():
return all(
txhash in node.tx_request_map and node.tx_request_map[txhash]
for node in self.test_nodes
)
# --> error if not requested
if not wait_until(transaction_requested, attempts=20*num_events):
# print [ c.cb.tx_request_map for c in self.connections ]
raise AssertionError("Not all nodes requested transaction")
# Get the mempool
[ c.cb.send_mempool() for c in self.connections ]
# Send ping and wait for response -- synchronization hack
[ c.cb.send_ping(self.ping_counter) for c in self.connections ]
self.wait_for_pings(self.ping_counter)
self.ping_counter += 1
# Sort inv responses from each node
with mininode_lock:
[ c.cb.lastInv.sort() for c in self.connections ]
# Verify that the tip of each connection all agree with each other, and
# with the expected outcome (if given)
def check_results(self, blockhash, outcome):
with mininode_lock:
for c in self.connections:
if outcome is None:
if c.cb.bestblockhash != self.connections[0].cb.bestblockhash:
return False
elif isinstance(outcome, RejectResult): # Check that block was rejected w/ code
if c.cb.bestblockhash == blockhash:
return False
if blockhash not in c.cb.block_reject_map:
print('Block not in reject map: %064x' % (blockhash))
return False
if not outcome.match(c.cb.block_reject_map[blockhash]):
print('Block rejected with %s instead of expected %s: %064x' % (c.cb.block_reject_map[blockhash], outcome, blockhash))
return False
elif ((c.cb.bestblockhash == blockhash) != outcome):
# print c.cb.bestblockhash, blockhash, outcome
return False
return True
# Either check that the mempools all agree with each other, or that
# txhash's presence in the mempool matches the outcome specified.
# This is somewhat of a strange comparison, in that we're either comparing
# a particular tx to an outcome, or the entire mempools altogether;
# perhaps it would be useful to add the ability to check explicitly that
# a particular tx's existence in the mempool is the same across all nodes.
def check_mempool(self, txhash, outcome):
with mininode_lock:
for c in self.connections:
if outcome is None:
# Make sure the mempools agree with each other
if c.cb.lastInv != self.connections[0].cb.lastInv:
# print c.rpc.getrawmempool()
return False
elif isinstance(outcome, RejectResult): # Check that tx was rejected w/ code
if txhash in c.cb.lastInv:
return False
if txhash not in c.cb.tx_reject_map:
print('Tx not in reject map: %064x' % (txhash))
return False
if not outcome.match(c.cb.tx_reject_map[txhash]):
print('Tx rejected with %s instead of expected %s: %064x' % (c.cb.tx_reject_map[txhash], outcome, txhash))
return False
elif ((txhash in c.cb.lastInv) != outcome):
# print c.rpc.getrawmempool(), c.cb.lastInv
return False
return True
def run(self):
# Wait until verack is received
self.wait_for_verack()
test_number = 1
for test_instance in self.test_generator.get_tests():
# We use these variables to keep track of the last block
# and last transaction in the tests, which are used
# if we're not syncing on every block or every tx.
[ block, block_outcome, tip ] = [ None, None, None ]
[ tx, tx_outcome ] = [ None, None ]
invqueue = []
for test_obj in test_instance.blocks_and_transactions:
b_or_t = test_obj[0]
outcome = test_obj[1]
# Determine if we're dealing with a block or tx
if isinstance(b_or_t, CBlock): # Block test runner
block = b_or_t
block_outcome = outcome
tip = block.sha256
# each test_obj can have an optional third argument
# to specify the tip we should compare with
# (default is to use the block being tested)
if len(test_obj) >= 3:
tip = test_obj[2]
# Add to shared block_store, set as current block
# If there was an open getdata request for the block
# previously, and we didn't have an entry in the
# block_store, then immediately deliver, because the
# node wouldn't send another getdata request while
# the earlier one is outstanding.
first_block_with_hash = True
if self.block_store.get(block.sha256) is not None:
first_block_with_hash = False
with mininode_lock:
self.block_store.add_block(block)
for c in self.connections:
if first_block_with_hash and block.sha256 in c.cb.block_request_map and c.cb.block_request_map[block.sha256] == True:
# There was a previous request for this block hash
# Most likely, we delivered a header for this block
# but never had the block to respond to the getdata
c.send_message(msg_block(block))
else:
c.cb.block_request_map[block.sha256] = False
# Either send inv's to each node and sync, or add
# to invqueue for later inv'ing.
if (test_instance.sync_every_block):
# if we expect success, send inv and sync every block
# if we expect failure, just push the block and see what happens.
if outcome == True:
[ c.cb.send_inv(block) for c in self.connections ]
self.sync_blocks(block.sha256, 1)
else:
[ c.send_message(msg_block(block)) for c in self.connections ]
[ c.cb.send_ping(self.ping_counter) for c in self.connections ]
self.wait_for_pings(self.ping_counter)
self.ping_counter += 1
if (not self.check_results(tip, outcome)):
raise AssertionError("Test failed at test %d" % test_number)
else:
invqueue.append(CInv(2, block.sha256))
elif isinstance(b_or_t, CBlockHeader):
block_header = b_or_t
self.block_store.add_header(block_header)
[ c.cb.send_header(block_header) for c in self.connections ]
else: # Tx test runner
assert(isinstance(b_or_t, CTransaction))
tx = b_or_t
tx_outcome = outcome
# Add to shared tx store and clear map entry
with mininode_lock:
self.tx_store.add_transaction(tx)
for c in self.connections:
c.cb.tx_request_map[tx.sha256] = False
# Again, either inv to all nodes or save for later
if (test_instance.sync_every_tx):
[ c.cb.send_inv(tx) for c in self.connections ]
self.sync_transaction(tx.sha256, 1)
if (not self.check_mempool(tx.sha256, outcome)):
raise AssertionError("Test failed at test %d" % test_number)
else:
invqueue.append(CInv(1, tx.sha256))
# Ensure we're not overflowing the inv queue
if len(invqueue) == MAX_INV_SZ:
[ c.send_message(msg_inv(invqueue)) for c in self.connections ]
invqueue = []
# Do final sync if we weren't syncing on every block or every tx.
if (not test_instance.sync_every_block and block is not None):
if len(invqueue) > 0:
[ c.send_message(msg_inv(invqueue)) for c in self.connections ]
invqueue = []
self.sync_blocks(block.sha256, len(test_instance.blocks_and_transactions))
if (not self.check_results(tip, block_outcome)):
raise AssertionError("Block test failed at test %d" % test_number)
if (not test_instance.sync_every_tx and tx is not None):
if len(invqueue) > 0:
[ c.send_message(msg_inv(invqueue)) for c in self.connections ]
invqueue = []
self.sync_transaction(tx.sha256, len(test_instance.blocks_and_transactions))
if (not self.check_mempool(tx.sha256, tx_outcome)):
raise AssertionError("Mempool test failed at test %d" % test_number)
print("Test %d: PASS" % test_number, [ c.rpc.getblockcount() for c in self.connections ])
test_number += 1
[ c.disconnect_node() for c in self.connections ]
self.wait_for_disconnections()
self.block_store.close()
self.tx_store.close()
| 45.227818
| 145
| 0.599576
|
4a173e5bc9b3869b0c0a876735208d2766068131
| 7,278
|
py
|
Python
|
train.py
|
greulist137/Data-Science---Deep-Learning-Image-Classifier
|
1d7d44ebf2d5fc619c6020d002b21eaf6af6b9f2
|
[
"MIT"
] | null | null | null |
train.py
|
greulist137/Data-Science---Deep-Learning-Image-Classifier
|
1d7d44ebf2d5fc619c6020d002b21eaf6af6b9f2
|
[
"MIT"
] | null | null | null |
train.py
|
greulist137/Data-Science---Deep-Learning-Image-Classifier
|
1d7d44ebf2d5fc619c6020d002b21eaf6af6b9f2
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Sun Aug 12 12:17:00 2018
@author: greul
"""
# Imports here
from torch import nn
from torch import optim
import torch.nn.functional as F
import matplotlib as plt
#%config InlineBackend.figure_format = 'retina'
import matplotlib.pyplot as plt
from PIL import Image
from torchvision import datasets, transforms, models
from collections import OrderedDict
import numpy as np
import torch
import time
import argparse
# construct the argument parse and parse the arguments
'''
Default values used for testing
directory: root
Learning Rate: 0.0005
epochs: 3
model (VGG16 or resnet18)
CUDA
Hidden layer: 3
'''
ap = argparse.ArgumentParser()
ap.add_argument("-d", "--directory", required=True,
help="Root Directory of images")
ap.add_argument("-l", "--learning", required=True,
help="Learning Rate")
ap.add_argument("-e", "--epochs", required=True,
help="Number of epochs")
ap.add_argument("-m", "--model", required=True,
help="Type of model")
ap.add_argument("-j", "--hidden", required=True,
help="number of hidden layers")
ap.add_argument("-p", "--processor", required=True,
help="use GPU or CPU")
args = vars(ap.parse_args())
data_dir = args['directory']
train_dir = data_dir + '/train'
valid_dir = data_dir + '/valid'
test_dir = data_dir + '/test'
# Define your transforms for the training, validation, and testing sets
train_transforms = transforms.Compose([transforms.RandomRotation(30),
transforms.RandomResizedCrop(224),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
transforms.Normalize([0.485, 0.456, 0.406],
[0.229, 0.224, 0.225])])
test_val_transforms = transforms.Compose([transforms.Resize(256),
transforms.CenterCrop(224),
transforms.ToTensor(),
transforms.Normalize([0.485, 0.456, 0.406],
[0.229, 0.224, 0.225])])
# Load the datasets with ImageFolder
train_data = datasets.ImageFolder(train_dir, transform=train_transforms)
val_data = datasets.ImageFolder(valid_dir, transform=test_val_transforms)
test_data = datasets.ImageFolder(test_dir, transform=test_val_transforms)
trainloader = torch.utils.data.DataLoader(train_data, batch_size=64, shuffle=True)
validationloader = torch.utils.data.DataLoader(val_data, batch_size=32)
testloader = torch.utils.data.DataLoader(test_data, batch_size=32)
dataiter = iter(trainloader)
images, labels = dataiter.next()
# Build and train your network
if(args['model'] == 'vgg16'):
model = getattr(models, 'vgg16')(pretrained=True)
model_inputs = model.classifier[0].in_features
if(args['model'] == 'resnet18'):
model = getattr(models, 'resnet18')(pretrained=True)
model_inputs = model.classifier[0].in_features
# Freeze parameters so we don't backprop through them
for param in model.parameters():
param.requires_grad = False
classifier = nn.Sequential(OrderedDict([
('fc1', nn.Linear(model_inputs, 4096)),
('relu1', nn.ReLU()),
('drop1', nn.Dropout(0.5)),
('fc2', nn.Linear(4096, 1000)),
('relu2', nn.ReLU()),
('drop2', nn.Dropout(0.5)),
('fc4', nn.Linear(1000, 102)),
('output', nn.LogSoftmax(dim=1))]))
model.classifier = classifier
# Train a model with a pre-trained network
criterion = nn.NLLLoss()
learning = float(args['learning'])
optimizer = optim.Adam(model.classifier.parameters(), learning)
def do_deep_learning(model, trainloader, validationloader, epochs, print_every, criterion, optimizer, device='cpu'):
epochs = epochs
print_every = print_every
steps = 0
running_loss = 0
# change to cuda
model.to(args['processor'])
for e in range(epochs):
if e % 2 == 0:
loader = validationloader
model.eval()
accuracy = 0
val_loss = 0
for ii, (inputs, labels) in enumerate(loader):
steps += 1
inputs, labels = inputs.to(args['processor']), labels.to(args['processor'])
outputs = model.forward(inputs)
val_loss = criterion(outputs, labels)
ps = torch.exp(outputs).data
equality = (labels.data == ps.max(1)[1])
accuracy += equality.type_as(torch.FloatTensor()).mean()
if steps % print_every == 0:
print("Epoch: {}/{}.. ".format(e+1, epochs),
"Training Loss: {:.3f}.. ".format(running_loss/print_every),
"Validation Loss: {:.3f}.. ".format(val_loss/len(validationloader)),
"Validation Accuracy: {:.3f}".format(accuracy/len(validationloader)))
else:
model.train()
loader = trainloader
for ii, (inputs, labels) in enumerate(loader):
steps += 1
inputs, labels = inputs.to(args['processor']), labels.to(args['processor'])
optimizer.zero_grad()
outputs = model.forward(inputs)
val_loss = criterion(outputs, labels)
val_loss.backward()
optimizer.step()
running_loss += val_loss.item()
if steps % print_every == 0:
print("Epoch: {}/{}.. ".format(e+1, epochs),
"Training Loss: {:.3f}.. ".format(running_loss/print_every),
"Validation Loss: {:.3f}.. ".format(val_loss/len(validationloader)),
"Validation Accuracy: {:.3f}".format(accuracy/len(validationloader)))
running_loss = 0
def check_accuracy_on_test(testloader):
correct = 0
total = 0
model.eval()
with torch.no_grad():
for data in testloader:
images, labels = data
images, labels = images.to(args['processor']), labels.to(args['processor'])
outputs = model(images)
_, predicted = torch.max(outputs.data, 1)
total += labels.size(0)
correct += (predicted == labels).sum().item()
print('Accuracy of the network on the 10000 test images: %d %%' % (100 * correct / total))
epochs = int(args['epochs'])
do_deep_learning(model, trainloader, validationloader, epochs, 10, criterion, optimizer, args['processor'])
# Do validation on the test set
check_accuracy_on_test(testloader)
def save_checkpoint(model):
model.class_to_idx = train_data.class_to_idx
checkpoint = {
'state_dict': model.state_dict(),
'image_datasets' : model.class_to_idx,
'arch': model,
'epochs': epochs,
'optimizer': optimizer.state_dict(),
'learning_rate': learning,
}
torch.save(checkpoint, 'checkpoint.pth')
save_checkpoint(model)
| 37.132653
| 116
| 0.585463
|
4a173f3c2ddb99722dfcb406e0de77c366d5989c
| 1,303
|
py
|
Python
|
xlsxwriter/test/comparison/test_image42.py
|
Rippling/XlsxWriter-1
|
be8d1cb8f8b156cf87bbe5d591f1f5475804be44
|
[
"BSD-2-Clause"
] | null | null | null |
xlsxwriter/test/comparison/test_image42.py
|
Rippling/XlsxWriter-1
|
be8d1cb8f8b156cf87bbe5d591f1f5475804be44
|
[
"BSD-2-Clause"
] | null | null | null |
xlsxwriter/test/comparison/test_image42.py
|
Rippling/XlsxWriter-1
|
be8d1cb8f8b156cf87bbe5d591f1f5475804be44
|
[
"BSD-2-Clause"
] | null | null | null |
###############################################################################
#
# Tests for XlsxWriter.
#
# SPDX-License-Identifier: BSD-2-Clause
# Copyright (c), 2013-2021, John McNamara, jmcnamara@cpan.org
#
from ..excel_comparison_test import ExcelComparisonTest
from ...workbook import Workbook
class TestCompareXLSXFiles(ExcelComparisonTest):
"""
Test file created by XlsxWriter against a file created by Excel.
"""
def setUp(self):
self.set_filename('image42.xlsx')
# Despite a lot of effort and testing I can't match Excel's
# calculations exactly for EMF files. The differences are are small
# (<1%) and in general aren't visible. The following ignore the
# elements where these differences occur until the they can be
# resolved. This issue doesn't occur for any other image type.
self.ignore_elements = {'xl/drawings/drawing1.xml': ['<xdr:rowOff>', '<xdr:colOff>', '<a:ext cx=']}
def test_create_file(self):
"""Test the creation of a simple XlsxWriter file with image(s)."""
workbook = Workbook(self.got_filename)
worksheet = workbook.add_worksheet()
worksheet.insert_image('E9', self.image_dir + 'test-000.emf')
workbook.close()
self.assertExcelEqual()
| 31.02381
| 107
| 0.635457
|
4a173f9819cd4e04940b0d64f77c0401066eda7a
| 861
|
py
|
Python
|
lab7.py
|
uni-student234/ISAT252
|
4c0942919c432456fe26900c23f076161b4cc266
|
[
"MIT"
] | null | null | null |
lab7.py
|
uni-student234/ISAT252
|
4c0942919c432456fe26900c23f076161b4cc266
|
[
"MIT"
] | null | null | null |
lab7.py
|
uni-student234/ISAT252
|
4c0942919c432456fe26900c23f076161b4cc266
|
[
"MIT"
] | null | null | null |
"""
Week 2, day 7, lab 7
"""
3.1
i = 0
while i <= 6:
if i == 3 or i == 6:
i = i + 1
continue
print(i)
i = i + 1
#3.2
i = 5
factorial = 1
while i > 1:
factorial = factorial*i
i = i - 1
print(factorial)
#3.3
i = 1
factorial = 0
while i <= 5:
factorial = factorial + i
i = i + 1
print(factorial)
#3.4
i = 3
factorial = 1
while i <= 8:
factorial = factorial*i
i = i + 1
print(factorial)
#3.5
i = 1
dividend = 1
while i <= 8:
dividend = dividend*i
i = i + 1
print(dividend)
i = 1
divisor = 1
while i <= 3:
divisor = divisor*i
i = i + 1
print(divisor)
print(dividend/divisor)
#3.6
num_list = [ 12, 32, 43, 35]
while num_list:
num_list.remove(num_list[0])
print(num_list)
"""
#Using other methods
print(len(num_list))
while len(num_list) != 0:
num_list.pop(0)
print(num_list)
"""
| 13.666667
| 32
| 0.563298
|
4a174068bd960160047d880bc306efd533d13656
| 2,326
|
py
|
Python
|
sklego/meta/decay_estimator.py
|
lahdjirayhan/scikit-lego
|
5dd145df796c4d254cd505727c9db01484ebc39c
|
[
"MIT"
] | 784
|
2019-03-01T21:35:53.000Z
|
2022-03-30T11:22:46.000Z
|
sklego/meta/decay_estimator.py
|
lahdjirayhan/scikit-lego
|
5dd145df796c4d254cd505727c9db01484ebc39c
|
[
"MIT"
] | 382
|
2019-02-27T10:38:53.000Z
|
2022-03-31T07:22:24.000Z
|
sklego/meta/decay_estimator.py
|
lahdjirayhan/scikit-lego
|
5dd145df796c4d254cd505727c9db01484ebc39c
|
[
"MIT"
] | 112
|
2019-03-01T19:34:37.000Z
|
2022-03-30T14:10:29.000Z
|
import numpy as np
from sklearn import clone
from sklearn.base import BaseEstimator
from sklearn.utils.validation import (
check_is_fitted,
check_X_y,
FLOAT_DTYPES,
)
class DecayEstimator(BaseEstimator):
"""
Morphs an estimator suchs that the training weights can be
adapted to ensure that points that are far away have less weight.
Note that it is up to the user to sort the dataset appropriately.
This meta estimator will only work for estimators that have a
"sample_weights" argument in their `.fit()` method.
The DecayEstimator will use exponential decay to weight the parameters.
w_{t-1} = decay * w_{t}
"""
def __init__(self, model, decay: float = 0.999, decay_func="exponential"):
self.model = model
self.decay = decay
self.decay_func = decay_func
def _is_classifier(self):
return any(
["ClassifierMixin" in p.__name__ for p in type(self.model).__bases__]
)
def fit(self, X, y):
"""
Fit the data after adapting the same weight.
:param X: array-like, shape=(n_columns, n_samples,) training data.
:param y: array-like, shape=(n_samples,) training data.
:return: Returns an instance of self.
"""
X, y = check_X_y(X, y, estimator=self, dtype=FLOAT_DTYPES)
self.weights_ = np.cumprod(np.ones(X.shape[0]) * self.decay)[::-1]
self.estimator_ = clone(self.model)
try:
self.estimator_.fit(X, y, sample_weight=self.weights_)
except TypeError as e:
if "sample_weight" in str(e):
raise TypeError(
f"Model {type(self.model).__name__}.fit() does not have 'sample_weight'"
)
if self._is_classifier():
self.classes_ = self.estimator_.classes_
return self
def predict(self, X):
"""
Predict new data.
:param X: array-like, shape=(n_columns, n_samples,) training data.
:return: array, shape=(n_samples,) the predicted data
"""
if self._is_classifier():
check_is_fitted(self, ["classes_"])
check_is_fitted(self, ["weights_", "estimator_"])
return self.estimator_.predict(X)
def score(self, X, y):
return self.estimator_.score(X, y)
| 33.228571
| 92
| 0.624248
|
4a17415fb417debb14950cca36a9c5be5eb6714e
| 1,048
|
py
|
Python
|
refinery/units/crypto/cipher/rc4mod.py
|
larsborn/refinery
|
c8b19156b17e5fa5de5c72bc668a14d646584560
|
[
"BSD-3-Clause"
] | null | null | null |
refinery/units/crypto/cipher/rc4mod.py
|
larsborn/refinery
|
c8b19156b17e5fa5de5c72bc668a14d646584560
|
[
"BSD-3-Clause"
] | null | null | null |
refinery/units/crypto/cipher/rc4mod.py
|
larsborn/refinery
|
c8b19156b17e5fa5de5c72bc668a14d646584560
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from itertools import cycle
from . import arg, StreamCipherUnit
class rc4mod(StreamCipherUnit):
"""
Implements a modifiably version of the RC4 stream cipher where the size of
the RC4 table can be altered.
"""
def __init__(
self, key, *,
size: arg.number('-t', help='Table size, {default} by default.', bound=(1, None)) = 0x100
):
super().__init__(key=key, size=size)
def keystream(self):
size = self.args.size
tablerange = range(max(size, 0x100))
b, table = 0, bytearray(k & 0xFF for k in tablerange)
for a, keybyte in zip(tablerange, cycle(self.args.key)):
t = table[a]
b = (b + keybyte + t) % size
table[a] = table[b]
table[b] = t
b, a = 0, 0
while True:
a = (a + 1) % size
t = table[a]
b = (b + t) % size
table[a] = table[b]
table[b] = t
yield table[(table[a] + t) % size]
| 28.324324
| 97
| 0.520992
|
4a17418572502275b4ca576b230619c8583eeb71
| 3,221
|
py
|
Python
|
01_PythonTutorial/045_StringMethods.py
|
EliazBobadilla/Python-Tutorial-W3Schools
|
0f22be2eea493c7e331d15b72847a34a4b748884
|
[
"MIT"
] | 5
|
2021-05-29T23:30:57.000Z
|
2021-12-19T11:21:24.000Z
|
01_PythonTutorial/045_StringMethods.py
|
ChromeOwO/Python-Tutorial-W3Schools
|
0f22be2eea493c7e331d15b72847a34a4b748884
|
[
"MIT"
] | null | null | null |
01_PythonTutorial/045_StringMethods.py
|
ChromeOwO/Python-Tutorial-W3Schools
|
0f22be2eea493c7e331d15b72847a34a4b748884
|
[
"MIT"
] | 4
|
2021-06-04T20:23:48.000Z
|
2022-01-23T05:48:19.000Z
|
#String Methods
'''
capitalize() Converts the first character to upper case
casefold() Converts string into lower case
center() Returns a centered string
count() Returns the number of times a specified value occurs in a string
encode() Returns an encoded version of the string
endswith() Returns true if the string ends with the specified value
expandtabs() Sets the tab size of the string
find() Searches the string for a specified value and returns the position of where it was found
format() Formats specified values in a string
format_map() Formats specified values in a string
index() Searches the string for a specified value and returns the position of where it was found
isalnum() Returns True if all characters in the string are alphanumeric
isalpha() Returns True if all characters in the string are in the alphabet
isdecimal() Returns True if all characters in the string are decimals
isdigit() Returns True if all characters in the string are digits
isidentifier() Returns True if the string is an identifier
islower() Returns True if all characters in the string are lower case
isnumeric() Returns True if all characters in the string are numeric
isprintable() Returns True if all characters in the string are printable
isspace() Returns True if all characters in the string are whitespaces
istitle() Returns True if the string follows the rules of a title
isupper() Returns True if all characters in the string are upper case
join() Joins the elements of an iterable to the end of the string
ljust() Returns a left justified version of the string
lower() Converts a string into lower case
lstrip() Returns a left trim version of the string
maketrans() Returns a translation table to be used in translations
partition() Returns a tuple where the string is parted into three parts
replace() Returns a string where a specified value is replaced with a specified value
rfind() Searches the string for a specified value and returns the last position of where it was found
rindex() Searches the string for a specified value and returns the last position of where it was found
rjust() Returns a right justified version of the string
rpartition() Returns a tuple where the string is parted into three parts
rsplit() Splits the string at the specified separator, and returns a list
rstrip() Returns a right trim version of the string
split() Splits the string at the specified separator, and returns a list
splitlines() Splits the string at line breaks and returns a list
startswith() Returns true if the string starts with the specified value
strip() Returns a trimmed version of the string
swapcase() Swaps cases, lower case becomes upper case and vice versa
title() Converts the first character of each word to upper case
translate() Returns a translated string
upper() Converts a string into upper case
zfill() Fills the string with a specified number of 0 values at the beginning
'''
print("String Methods")
#Note: All string methods returns new values. They do not change the original string.
'''
Terminal:
String Methods
'''
#https://www.w3schools.com/python/python_strings_modify.asp
#Learn more about String Methods with our String Methods Reference: https://www.w3schools.com/python/python_ref_string.asp
| 56.508772
| 122
| 0.800373
|
4a174193270ca4b9dbe22e7db12150535085d671
| 547
|
py
|
Python
|
protseqspark/ProtSeqIO/tsv_helper.py
|
benchiverton/Proteomics
|
006ac5877a5256ee60abdfff35ad81c4a1afa157
|
[
"MIT"
] | 2
|
2020-09-26T14:33:21.000Z
|
2021-01-19T19:22:54.000Z
|
protseqspark/ProtSeqIO/tsv_helper.py
|
benchiverton/Proteomics
|
006ac5877a5256ee60abdfff35ad81c4a1afa157
|
[
"MIT"
] | 2
|
2020-09-28T12:39:04.000Z
|
2022-02-13T15:02:38.000Z
|
protseqspark/ProtSeqIO/tsv_helper.py
|
benchiverton/Proteomics
|
006ac5877a5256ee60abdfff35ad81c4a1afa157
|
[
"MIT"
] | null | null | null |
from typing import Iterator
from ..ProtSeq import ProteinSequence
def writeSequenceToTsv(tsv_file: str, sequences: Iterator[ProteinSequence]):
file = open(tsv_file, "x")
for seq in sequences:
file.write(f'{sequenceToTsv(seq)}\n')
file.close()
def sequenceToTsv(seq: ProteinSequence) -> str:
return f'{seq.accession}\t{seq.geneName}\t{seq.specie}\t{seq.sequence}'
def sequenceFromTsv(row: str) -> ProteinSequence:
parts = row.rstrip().split("\t")
return ProteinSequence(parts[0], parts[1], parts[2], parts[3])
| 28.789474
| 76
| 0.702011
|
4a1744152cd4f445219a81f181bed508a4299b5b
| 502
|
py
|
Python
|
data/split_fasta.py
|
jhwnkim/covid-mut-rate
|
43a011dfef2cadb9770860d1d11d8a43c0f904ab
|
[
"MIT"
] | null | null | null |
data/split_fasta.py
|
jhwnkim/covid-mut-rate
|
43a011dfef2cadb9770860d1d11d8a43c0f904ab
|
[
"MIT"
] | null | null | null |
data/split_fasta.py
|
jhwnkim/covid-mut-rate
|
43a011dfef2cadb9770860d1d11d8a43c0f904ab
|
[
"MIT"
] | null | null | null |
from Bio import SeqIO
import sys
print(sys.argv)
if len(sys.argv) > 1:
infile = sys.argv[1]
else:
infile = "./old/MA-sequences-2-toy.fasta"
if len(sys.argv)> 2:
size = int(sys.argv[2])
else:
size = 250
# excludes = []
# if len(sys.argv)> 3:
# exclude = sys.argv[]
records = list( SeqIO.parse(infile, "fasta") )
for i in range(0, len(records), size):
outfile = infile[:-6]+'-{:03d}.fasta'.format(i//size)
SeqIO.write(records[i:min(len(records), i+size)], outfile, "fasta")
| 20.916667
| 71
| 0.621514
|
4a174424b9032c4e1f1ba8c9f93994b32d6bf3d9
| 2,045
|
py
|
Python
|
Preprocess.py
|
abhinandansharma/number-plate-recognition
|
e31a1bfb5b7d92199829cb30b281d37f2b4552bb
|
[
"MIT"
] | null | null | null |
Preprocess.py
|
abhinandansharma/number-plate-recognition
|
e31a1bfb5b7d92199829cb30b281d37f2b4552bb
|
[
"MIT"
] | null | null | null |
Preprocess.py
|
abhinandansharma/number-plate-recognition
|
e31a1bfb5b7d92199829cb30b281d37f2b4552bb
|
[
"MIT"
] | null | null | null |
# Preprocess.py
import cv2
import numpy as np
import math
# module level variables ##########################################################################
GAUSSIAN_SMOOTH_FILTER_SIZE = (5, 5)
ADAPTIVE_THRESH_BLOCK_SIZE = 19
ADAPTIVE_THRESH_WEIGHT = 9
###################################################################################################
def preprocess(imgOriginal):
imgGrayscale = extractValue(imgOriginal)
imgMaxContrastGrayscale = maximizeContrast(imgGrayscale)
height, width = imgGrayscale.shape
imgBlurred = np.zeros((height, width, 1), np.uint8)
imgBlurred = cv2.GaussianBlur(imgMaxContrastGrayscale, GAUSSIAN_SMOOTH_FILTER_SIZE, 0)
imgThresh = cv2.adaptiveThreshold(imgBlurred, 255.0, cv2.ADAPTIVE_THRESH_GAUSSIAN_C, cv2.THRESH_BINARY_INV, ADAPTIVE_THRESH_BLOCK_SIZE, ADAPTIVE_THRESH_WEIGHT)
return imgGrayscale, imgThresh
# end function
###################################################################################################
def extractValue(imgOriginal):
height, width, numChannels = imgOriginal.shape
imgHSV = np.zeros((height, width, 3), np.uint8)
imgHSV = cv2.cvtColor(imgOriginal, cv2.COLOR_BGR2HSV)
imgHue, imgSaturation, imgValue = cv2.split(imgHSV)
return imgValue
# end function
###################################################################################################
def maximizeContrast(imgGrayscale):
height, width = imgGrayscale.shape
imgTopHat = np.zeros((height, width, 1), np.uint8)
imgBlackHat = np.zeros((height, width, 1), np.uint8)
structuringElement = cv2.getStructuringElement(cv2.MORPH_RECT, (3, 3))
imgTopHat = cv2.morphologyEx(imgGrayscale, cv2.MORPH_TOPHAT, structuringElement)
imgBlackHat = cv2.morphologyEx(imgGrayscale, cv2.MORPH_BLACKHAT, structuringElement)
imgGrayscalePlusTopHat = cv2.add(imgGrayscale, imgTopHat)
imgGrayscalePlusTopHatMinusBlackHat = cv2.subtract(imgGrayscalePlusTopHat, imgBlackHat)
return imgGrayscalePlusTopHatMinusBlackHat
# end function
| 34.083333
| 163
| 0.630807
|
4a17469c34caf6ee91a6f3cba43353f7c4fd839f
| 16,261
|
py
|
Python
|
tests/algorithms/test_enumeration.py
|
PermutaTriangle/Tilings
|
227d4d014bf07b4037b7af1b35a1f2139ebe7e92
|
[
"BSD-3-Clause"
] | 5
|
2020-04-30T20:19:27.000Z
|
2021-03-06T20:20:14.000Z
|
tests/algorithms/test_enumeration.py
|
PermutaTriangle/Tilings
|
227d4d014bf07b4037b7af1b35a1f2139ebe7e92
|
[
"BSD-3-Clause"
] | 129
|
2019-06-04T14:50:58.000Z
|
2022-03-29T13:47:00.000Z
|
tests/algorithms/test_enumeration.py
|
PermutaTriangle/Tilings
|
227d4d014bf07b4037b7af1b35a1f2139ebe7e92
|
[
"BSD-3-Clause"
] | 7
|
2020-06-11T14:24:06.000Z
|
2020-09-14T21:51:57.000Z
|
import abc
import pytest
import sympy
from comb_spec_searcher.utils import taylor_expand
from tilings import GriddedPerm, Tiling
from tilings.algorithms import (
DatabaseEnumeration,
LocalEnumeration,
MonotoneTreeEnumeration,
)
from tilings.exception import InvalidOperationError
class CommonTest(abc.ABC):
@abc.abstractmethod
@pytest.fixture
def enum_verified(self):
raise NotImplementedError
@abc.abstractmethod
@pytest.fixture
def enum_not_verified(self):
raise NotImplementedError
def test_verified(self, enum_verified, enum_not_verified):
assert enum_verified.verified()
assert not enum_not_verified.verified()
@abc.abstractmethod
def test_get_genf(self, enum_verified):
raise NotImplementedError
def test_get_genf_not_verified(self, enum_not_verified):
with pytest.raises(InvalidOperationError):
enum_not_verified.get_genf()
class TestLocalEnumeration(CommonTest):
@pytest.fixture
def enum_verified(self):
t = Tiling(
obstructions=[
GriddedPerm((0, 1, 2), ((0, 0),) * 3),
GriddedPerm((0, 2, 1), ((1, 0),) * 3),
GriddedPerm((0, 1, 2), ((1, 0),) * 3),
GriddedPerm((0, 1), ((1, 1),) * 2),
],
requirements=[
[
GriddedPerm((0, 1), ((0, 0),) * 2),
GriddedPerm((1, 0), ((0, 0),) * 2),
],
[GriddedPerm((0,), ((1, 0),))],
],
)
return LocalEnumeration(t)
@pytest.fixture
def enum_not_verified(self):
t = Tiling(
obstructions=[
GriddedPerm((0, 1, 2), ((0, 0),) * 3),
GriddedPerm((0, 2, 1), ((1, 0),) * 3),
GriddedPerm((0, 1, 2), ((1, 0),) * 3),
GriddedPerm((0, 1), ((1, 1),) * 2),
GriddedPerm((0, 1), ((0, 0), (1, 1))),
],
requirements=[
[
GriddedPerm((0, 1), ((0, 0),) * 2),
GriddedPerm((0, 1), ((1, 0),) * 2),
]
],
)
return LocalEnumeration(t)
@pytest.fixture
def onebyone_enum(self):
return LocalEnumeration(Tiling.from_string("123"))
@pytest.fixture
def enum_no_req(self):
t = Tiling(
obstructions=[
GriddedPerm((0, 1, 2), ((0, 0),) * 3),
GriddedPerm((0, 2, 1), ((1, 0),) * 3),
GriddedPerm((0, 1, 2), ((1, 0),) * 3),
GriddedPerm((0, 1), ((1, 1),) * 2),
],
requirements=[
[
GriddedPerm((0, 1), ((0, 0),) * 2),
GriddedPerm((0, 1), ((1, 0),) * 2),
]
],
)
return LocalEnumeration(t, no_req=True)
def test_req_is_single_cell(self):
assert LocalEnumeration._req_is_single_cell([GriddedPerm((0,), ((0, 1),))])
assert LocalEnumeration._req_is_single_cell(
[GriddedPerm((0, 1), ((0, 1), (0, 1)))]
)
assert not LocalEnumeration._req_is_single_cell(
[GriddedPerm((0, 1), ((0, 0), (0, 1)))]
)
assert not LocalEnumeration._req_is_single_cell(
[GriddedPerm((0,), ((0, 1),)), GriddedPerm((0,), ((1, 0),))]
)
assert LocalEnumeration._req_is_single_cell(
[
GriddedPerm((0, 1), ((0, 0),) * 2),
GriddedPerm((1, 0), ((0, 0),) * 2),
]
)
assert not LocalEnumeration._req_is_single_cell(
[
GriddedPerm((0, 1), ((1, 0),) * 2),
GriddedPerm((1, 0), ((0, 0),) * 2),
]
)
def test_verified(self, enum_verified, enum_not_verified):
assert enum_verified.verified()
assert not enum_not_verified.verified()
def test_crossing_req_list(self):
"""
This tiling is not local verified because of the requirement list in multiple
cells.
"""
t = Tiling(
obstructions=[
GriddedPerm((0, 2, 1), ((0, 1),) * 3),
GriddedPerm((0, 2, 1), ((1, 0),) * 3),
],
requirements=[
[
GriddedPerm((0,), ((0, 1),)),
GriddedPerm((2, 0, 1), ((1, 0),) * 3),
],
[GriddedPerm((0,), ((1, 0),))],
],
)
assert not LocalEnumeration(t).verified()
def test_get_genf(self, enum_verified):
with pytest.raises(NotImplementedError):
enum_verified.get_genf()
def test_get_genf_not_verified(self, enum_not_verified):
with pytest.raises(InvalidOperationError):
enum_not_verified.get_genf()
def test_1x1_verified(self, onebyone_enum):
assert onebyone_enum.verified()
def test_no_req_option(self, enum_no_req):
assert not enum_no_req.verified()
class TestMonotoneTreeEnumeration(CommonTest):
@pytest.fixture
def enum_verified(self):
t = Tiling(
obstructions=[
GriddedPerm((0, 1), ((0, 0),) * 2),
GriddedPerm((0, 1), ((0, 1),) * 2),
GriddedPerm((0, 1), ((0, 2),) * 2),
GriddedPerm((0, 1), ((2, 0),) * 2),
GriddedPerm((0, 1, 2), ((1, 1),) * 3),
]
)
return MonotoneTreeEnumeration(t)
@pytest.fixture
def enum_not_verified(self):
t = Tiling(
obstructions=[
GriddedPerm((0, 1), ((0, 0),) * 2),
GriddedPerm((0, 1), ((0, 1),) * 2),
GriddedPerm((0, 1), ((0, 2),) * 2),
GriddedPerm((0, 1), ((2, 0),) * 2),
GriddedPerm((0, 1), ((2, 2),) * 2),
GriddedPerm((0, 1, 2), ((1, 1),) * 3),
]
)
return MonotoneTreeEnumeration(t)
@pytest.fixture
def enum_with_list_req(self):
t = Tiling(
obstructions=[
GriddedPerm((0, 1), ((0, 0), (0, 0))),
GriddedPerm((0, 1), ((1, 0), (1, 0))),
],
requirements=[
[
GriddedPerm((0,), ((0, 0),)),
GriddedPerm((0,), ((1, 0),)),
]
],
)
return MonotoneTreeEnumeration(t)
@pytest.fixture
def onebyone_enum(self):
return MonotoneTreeEnumeration(Tiling.from_string("123"))
@pytest.fixture
def enum_with_crossing(self):
t = Tiling(
obstructions=[
GriddedPerm((0, 1), ((0, 0),) * 2),
GriddedPerm((0, 1), ((0, 1),) * 2),
GriddedPerm((0, 1), ((0, 2),) * 2),
GriddedPerm((0, 1), ((2, 0),) * 2),
GriddedPerm((0, 1), ((0, 0), (0, 1))),
GriddedPerm((0, 1, 2), ((1, 1),) * 3),
]
)
return MonotoneTreeEnumeration(t)
def test_visited_cells_aligned(self, enum_verified):
visited = {(1, 1), (0, 1)}
assert sorted(enum_verified._visted_cells_aligned((0, 2), visited)) == [(0, 1)]
def test_cell_tree_traversal(self, enum_verified):
order = list(enum_verified._cell_tree_traversal((1, 1)))
assert len(order) == 4
assert (1, 1) not in order
assert order[0] == (0, 1)
assert order[3] == (2, 0)
assert set(order[1:3]) == {(0, 0), (0, 2)}
def test_not_verified(self, enum_with_list_req, onebyone_enum, enum_with_crossing):
assert not enum_with_crossing.verified()
assert not enum_with_list_req.verified()
assert not onebyone_enum.verified()
forest_tiling = Tiling(
obstructions=[
GriddedPerm((0,), ((0, 0),)),
GriddedPerm((0,), ((1, 1),)),
GriddedPerm((0,), ((2, 1),)),
GriddedPerm((0, 1), ((1, 0), (1, 0))),
GriddedPerm((0, 1), ((2, 0), (2, 0))),
GriddedPerm((0, 1, 2), ((0, 1), (0, 1), (0, 1))),
],
requirements=[[GriddedPerm((0,), ((0, 1),))]],
)
assert not MonotoneTreeEnumeration(forest_tiling).verified()
def test_get_genf(self, enum_verified):
x = sympy.Symbol("x")
expected_gf = -(
sympy.sqrt(
-(4 * x ** 3 - 14 * x ** 2 + 8 * x - 1) / (2 * x ** 2 - 4 * x + 1)
)
- 1
) / (2 * x * (x ** 2 - 3 * x + 1))
assert sympy.simplify(enum_verified.get_genf() - expected_gf) == 0
t = Tiling(
obstructions=[
GriddedPerm((0, 1), ((0, 0),) * 2),
GriddedPerm((0, 1), ((1, 0),) * 2),
]
)
enum_no_start = MonotoneTreeEnumeration(t)
expected_gf = -1 / ((x - 1) * (x / (x - 1) + 1))
assert sympy.simplify(enum_no_start.get_genf() - expected_gf) == 0
def test_get_genf_simple(self):
t = Tiling(
obstructions=[
GriddedPerm((0, 1), ((0, 0),) * 2),
GriddedPerm((1, 0), ((1, 0),) * 2),
]
)
enum = MonotoneTreeEnumeration(t)
print(t)
assert enum.verified()
assert sympy.simplify(enum.get_genf() - sympy.sympify("1/(1-2*x)")) == 0
def test_with_finite_monotone_cell(self):
t = Tiling(
obstructions=[
GriddedPerm((0, 1), ((0, 0),) * 2),
GriddedPerm((1, 0), ((0, 0),) * 2),
GriddedPerm((0, 1), ((1, 0),) * 2),
GriddedPerm((1, 0), ((1, 0),) * 2),
]
)
enum = MonotoneTreeEnumeration(t)
print(t)
assert enum.verified()
assert enum.get_genf().expand() == sympy.sympify("1+2*x+2*x**2")
def test_with_finite_monotone_cell2(self):
t = Tiling(
obstructions=[
GriddedPerm((0, 1), ((0, 0),) * 2),
GriddedPerm((1, 0), ((0, 1),) * 2),
GriddedPerm((0, 1), ((0, 1),) * 2),
GriddedPerm((1, 0), ((1, 1),) * 2),
]
)
enum = MonotoneTreeEnumeration(t)
print(t)
assert enum.verified()
assert (
sympy.sympify("x/(1-x)**4 + 1/(1-x)**2") - enum.get_genf()
).simplify() == 0
def test_interleave_fixed_length(self, enum_verified):
track_var = MonotoneTreeEnumeration._tracking_var
cell_var = enum_verified._cell_variable((1, 0))
dummy_var = enum_verified._cell_variable((0, 0))
x = sympy.var("x")
F = x ** 8 * track_var ** 3 * dummy_var ** 3
assert (
enum_verified._interleave_fixed_length(F, (1, 0), 1)
== 4 * x ** 9 * dummy_var ** 3 * cell_var ** 1
)
assert (
enum_verified._interleave_fixed_length(F, (1, 0), 3)
== 20 * x ** 11 * dummy_var ** 3 * cell_var ** 3
)
assert (
enum_verified._interleave_fixed_length(F, (1, 0), 0)
== x ** 8 * dummy_var ** 3
)
def test_interleave_fixed_lengths(self, enum_verified):
track_var = MonotoneTreeEnumeration._tracking_var
cell_var = enum_verified._cell_variable((1, 0))
dummy_var = enum_verified._cell_variable((0, 0))
x = sympy.var("x")
F = x ** 8 * track_var ** 3 * dummy_var ** 3
assert (
enum_verified._interleave_fixed_lengths(F, (1, 0), 1, 1)
== 4 * x ** 9 * dummy_var ** 3 * cell_var ** 1
)
assert (
enum_verified._interleave_fixed_lengths(F, (1, 0), 3, 3)
== 20 * x ** 11 * dummy_var ** 3 * cell_var ** 3
)
assert (
enum_verified._interleave_fixed_lengths(F, (1, 0), 0, 0)
== x ** 8 * dummy_var ** 3
)
assert (
enum_verified._interleave_fixed_lengths(F, (1, 0), 0, 2)
== x ** 8 * dummy_var ** 3
+ 4 * x ** 9 * dummy_var ** 3 * cell_var ** 1
+ 10 * x ** 10 * dummy_var ** 3 * cell_var ** 2
)
assert (
enum_verified._interleave_fixed_lengths(F, (1, 0), 1, 3)
== 4 * x ** 9 * dummy_var ** 3 * cell_var ** 1
+ 10 * x ** 10 * dummy_var ** 3 * cell_var ** 2
+ 20 * x ** 11 * dummy_var ** 3 * cell_var ** 3
)
def test_genf_with_req(self):
t = Tiling(
obstructions=[
GriddedPerm((0, 1), ((0, 0),) * 2),
GriddedPerm((0, 1), ((1, 0),) * 2),
],
requirements=[
[GriddedPerm((1, 0), ((0, 0),) * 2)],
[GriddedPerm((0,), ((1, 0),))],
],
)
enum = MonotoneTreeEnumeration(t)
print(t)
assert enum.verified()
genf = enum.get_genf().expand()
terms = [0, 0, 0, 3, 10, 25, 56, 119, 246, 501, 1012]
assert taylor_expand(genf) == terms
def test_genf_with_big_finite_cell(self):
t = Tiling(
obstructions=[
GriddedPerm((0, 1), ((0, 0),) * 2),
GriddedPerm((0, 1), ((1, 0),) * 2),
GriddedPerm((3, 2, 1, 0), ((0, 0),) * 4),
GriddedPerm((3, 2, 1, 0), ((1, 0),) * 4),
]
)
enum = MonotoneTreeEnumeration(t)
print(t)
assert enum.verified()
genf = enum.get_genf().expand()
x = sympy.var("x")
assert (
genf
== 1
+ 2 * x
+ 4 * x ** 2
+ 8 * x ** 3
+ 14 * x ** 4
+ 20 * x ** 5
+ 20 * x ** 6
)
def test_with_two_reqs(self):
t = Tiling(
obstructions=(
GriddedPerm((0,), ((1, 1),)),
GriddedPerm((0, 1), ((0, 0), (0, 0))),
GriddedPerm((0, 1), ((0, 1), (0, 1))),
GriddedPerm((0, 1), ((1, 0), (1, 0))),
GriddedPerm((1, 0), ((0, 1), (0, 1))),
),
requirements=(
(GriddedPerm((0,), ((0, 0),)),),
(GriddedPerm((0,), ((0, 1),)),),
),
)
enum = MonotoneTreeEnumeration(t)
expected_enum = [0, 0, 2, 7, 19, 47, 111, 255, 575, 1279, 2815]
assert enum.verified()
assert taylor_expand(enum.get_genf()) == expected_enum
def test_corner(self):
t = Tiling(
obstructions=(
GriddedPerm((0,), ((1, 1),)),
GriddedPerm((0, 1), ((0, 0), (0, 0))),
GriddedPerm((0, 1), ((0, 1), (0, 1))),
GriddedPerm((0, 1), ((1, 0), (1, 0))),
),
requirements=((GriddedPerm((0,), ((0, 0),)),),),
)
enum = MonotoneTreeEnumeration(t)
expected_enum = [0, 1, 5, 17, 50, 138, 370, 979, 2575, 6755, 17700]
assert enum.verified()
assert taylor_expand(enum.get_genf()) == expected_enum
class TestDatabaseEnumeration(CommonTest):
@pytest.fixture
def enum_verified(self):
t = Tiling.from_string("123_132_231")
return DatabaseEnumeration(t)
@pytest.fixture
def enum_not_verified(self):
t = Tiling.from_string("1324")
return DatabaseEnumeration(t)
def test_get_genf(self, enum_verified):
assert enum_verified.get_genf() == sympy.sympify(
"(x**2 - x + 1)/(x**2 - 2*x + 1)"
)
@pytest.mark.slow
def test_load_verified_tilings(self):
DatabaseEnumeration.load_verified_tiling()
assert DatabaseEnumeration.all_verified_tilings
sample = next(iter(DatabaseEnumeration.all_verified_tilings))
Tiling.from_bytes(sample)
def test_verification_with_cache(self):
t = Tiling.from_string("123_132_231")
DatabaseEnumeration.all_verified_tilings = frozenset()
assert DatabaseEnumeration(t).verified()
DatabaseEnumeration.all_verified_tilings = frozenset([1, 2, 3, 4])
assert not DatabaseEnumeration(t).verified()
DatabaseEnumeration.all_verified_tilings = frozenset([t.to_bytes()])
assert DatabaseEnumeration(t).verified()
| 34.233684
| 87
| 0.480967
|
4a17474b6359ebc975e952c45706005671144f00
| 971
|
py
|
Python
|
tensorflow/contrib/framework/python/framework/__init__.py
|
ln0119/tensorflow-fast-rcnn
|
e937e6394818c9a320754237651d7fe083b1020d
|
[
"Apache-2.0"
] | 73
|
2017-01-05T09:06:08.000Z
|
2021-11-06T14:00:50.000Z
|
tensorflow/contrib/framework/python/framework/__init__.py
|
minhhoai2/tensorflow
|
da88903d5e29230d68d861053aa1dea1432c0696
|
[
"Apache-2.0"
] | 8
|
2017-04-10T10:36:20.000Z
|
2021-02-07T01:02:32.000Z
|
tensorflow/contrib/framework/python/framework/__init__.py
|
minhhoai2/tensorflow
|
da88903d5e29230d68d861053aa1dea1432c0696
|
[
"Apache-2.0"
] | 151
|
2016-11-10T09:01:15.000Z
|
2022-01-18T08:13:49.000Z
|
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""A module containing TensorFlow ops whose API may change in the future."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint: disable=wildcard-import
from tensorflow.contrib.framework.python.framework.tensor_util import *
| 42.217391
| 80
| 0.719876
|
4a174840726b65ed3529638839d76d548d17ab40
| 4,293
|
py
|
Python
|
django_liquid/liquid.py
|
jg-rp/django-liquid
|
33363b4c52b16050508f86b8b9f6a93fa5fa6a68
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | null | null | null |
django_liquid/liquid.py
|
jg-rp/django-liquid
|
33363b4c52b16050508f86b8b9f6a93fa5fa6a68
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | null | null | null |
django_liquid/liquid.py
|
jg-rp/django-liquid
|
33363b4c52b16050508f86b8b9f6a93fa5fa6a68
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | null | null | null |
"""A Liquid template engine for Django."""
from pathlib import Path
import liquid
from django.conf import settings
from django.template import TemplateDoesNotExist
from django.template import TemplateSyntaxError
from django.utils.functional import cached_property
from django.utils.module_loading import import_string
from django.template.backends.base import BaseEngine
class Liquid(BaseEngine):
app_dirname = "liquid"
def __init__(self, params):
params = params.copy()
options = params.pop("OPTIONS").copy()
super().__init__(params)
self.context_processors = options.pop("context_processors", [])
environment = options.pop("environment", "liquid.Environment")
environment_cls = import_string(environment)
if "loader" not in options:
options["loader"] = liquid.FileSystemLoader(self.template_dirs)
options.setdefault("autoescape", True)
options.setdefault(
"undefined", liquid.DebugUndefined if settings.DEBUG else liquid.Undefined
)
self.env: liquid.Environment = environment_cls(**options)
def from_string(self, template_code):
return Template(self.env.from_string(template_code), self)
def get_template(self, template_name):
try:
return Template(self.env.get_template(template_name), self)
except liquid.exceptions.TemplateNotFound as exc:
raise TemplateDoesNotExist(exc.filename, backend=self) from exc
except liquid.exceptions.LiquidSyntaxError as exc:
new = TemplateSyntaxError(exc.args)
new.template_debug = get_exception_info(exc)
raise new from exc
@cached_property
def template_context_processors(self):
return [import_string(path) for path in self.context_processors]
class Template:
def __init__(self, template, backend):
self.template = template
self.backend = backend
if template.path:
name = str(template.path)
else:
name = "<template>"
self.origin = Origin(
name=name,
template_name=template.name or None,
)
def render(self, context=None, request=None):
from django.template.backends.utils import csrf_input_lazy, csrf_token_lazy
if context is None:
context = {}
if request is not None:
context["request"] = request
context["csrf_input"] = csrf_input_lazy(request)
context["csrf_token"] = csrf_token_lazy(request)
for context_processor in self.backend.template_context_processors:
context.update(context_processor(request))
try:
return self.template.render(context)
except liquid.exceptions.LiquidSyntaxError as exc:
new = TemplateSyntaxError(exc.args)
new.template_debug = get_exception_info(exc)
raise new from exc
class Origin:
"""A container to hold debug information as described in the template API
documentation.
"""
def __init__(self, name, template_name):
self.name = name
self.template_name = template_name
def get_exception_info(exception):
"""Format exception information for display on the debug page using the
structure described in the template API documentation.
"""
context_lines = 10
lineno = exception.linenum
source = exception.source
if source is None:
exception_file = Path(exception.filename)
if exception_file.exists():
with open(exception_file, "r") as fd:
source = fd.read()
if source is not None:
lines = list(enumerate(source.strip().split("\n"), start=1))
during = lines[lineno - 1][1]
total = len(lines)
top = max(0, lineno - context_lines - 1)
bottom = min(total, lineno + context_lines)
else:
during = ""
lines = []
total = top = bottom = 0
return {
"name": exception.name,
"message": exception.message,
"source_lines": lines[top:bottom],
"line": lineno,
"before": "",
"during": during,
"after": "",
"total": total,
"top": top,
"bottom": bottom,
}
| 31.335766
| 86
| 0.641276
|
4a17492f48a79db9dbb3fd0f8d4f9fad56e57c5d
| 7,656
|
py
|
Python
|
yt/frontends/art/fields.py
|
tukss/yt
|
8bf6fce609cad3d4b291ebd94667019ab2e18377
|
[
"BSD-3-Clause-Clear"
] | null | null | null |
yt/frontends/art/fields.py
|
tukss/yt
|
8bf6fce609cad3d4b291ebd94667019ab2e18377
|
[
"BSD-3-Clause-Clear"
] | 8
|
2020-04-02T16:51:49.000Z
|
2022-01-11T14:12:44.000Z
|
yt/frontends/art/fields.py
|
tukss/yt
|
8bf6fce609cad3d4b291ebd94667019ab2e18377
|
[
"BSD-3-Clause-Clear"
] | 2
|
2020-08-12T15:46:11.000Z
|
2021-02-09T13:09:17.000Z
|
from yt.fields.field_info_container import FieldInfoContainer
b_units = "code_magnetic"
ra_units = "code_length / code_time**2"
rho_units = "code_mass / code_length**3"
vel_units = "code_velocity"
# NOTE: ARTIO uses momentum density.
mom_units = "code_mass / (code_length**2 * code_time)"
en_units = "code_mass*code_velocity**2/code_length**3"
class ARTFieldInfo(FieldInfoContainer):
known_other_fields = (
("Density", (rho_units, ["density"], None)),
("TotalEnergy", (en_units, ["total_energy"], None)),
("XMomentumDensity", (mom_units, ["momentum_x"], None)),
("YMomentumDensity", (mom_units, ["momentum_y"], None)),
("ZMomentumDensity", (mom_units, ["momentum_z"], None)),
("Pressure", ("", ["pressure"], None)), # Unused
("Gamma", ("", ["gamma"], None)),
("GasEnergy", (en_units, ["thermal_energy"], None)),
("MetalDensitySNII", (rho_units, ["metal_ii_density"], None)),
("MetalDensitySNIa", (rho_units, ["metal_ia_density"], None)),
("PotentialNew", ("", ["potential"], None)),
("PotentialOld", ("", ["gas_potential"], None)),
)
known_particle_fields = (
("particle_position_x", ("code_length", [], None)),
("particle_position_y", ("code_length", [], None)),
("particle_position_z", ("code_length", [], None)),
("particle_velocity_x", (vel_units, [], None)),
("particle_velocity_y", (vel_units, [], None)),
("particle_velocity_z", (vel_units, [], None)),
("particle_mass", ("code_mass", [], None)),
("particle_index", ("", [], None)),
("particle_species", ("", ["particle_type"], None)),
("particle_creation_time", ("Gyr", [], None)),
("particle_mass_initial", ("code_mass", [], None)),
("particle_metallicity1", ("", [], None)),
("particle_metallicity2", ("", [], None)),
)
def setup_fluid_fields(self):
unit_system = self.ds.unit_system
def _temperature(field, data):
r0 = data.ds.parameters["boxh"] / data.ds.parameters["ng"]
tr = data.ds.quan(3.03e5 * r0 ** 2, "K/code_velocity**2")
tr *= data.ds.parameters["wmu"] * data.ds.parameters["Om0"]
tr *= data.ds.parameters["gamma"] - 1.0
tr /= data.ds.parameters["aexpn"] ** 2
return tr * data["art", "GasEnergy"] / data["art", "Density"]
self.add_field(
("gas", "temperature"),
sampling_type="cell",
function=_temperature,
units=unit_system["temperature"],
)
def _get_vel(axis):
def velocity(field, data):
return data[("gas", f"momentum_{axis}")] / data[("gas", "density")]
return velocity
for ax in "xyz":
self.add_field(
("gas", f"velocity_{ax}"),
sampling_type="cell",
function=_get_vel(ax),
units=unit_system["velocity"],
)
def _momentum_magnitude(field, data):
tr = (
data["gas", "momentum_x"] ** 2
+ data["gas", "momentum_y"] ** 2
+ data["gas", "momentum_z"] ** 2
) ** 0.5
tr *= data["index", "cell_volume"].in_units("cm**3")
return tr
self.add_field(
("gas", "momentum_magnitude"),
sampling_type="cell",
function=_momentum_magnitude,
units=unit_system["momentum"],
)
def _velocity_magnitude(field, data):
tr = data["gas", "momentum_magnitude"]
tr /= data["gas", "cell_mass"]
return tr
self.add_field(
("gas", "velocity_magnitude"),
sampling_type="cell",
function=_velocity_magnitude,
units=unit_system["velocity"],
)
def _metal_density(field, data):
tr = data["gas", "metal_ia_density"]
tr += data["gas", "metal_ii_density"]
return tr
self.add_field(
("gas", "metal_density"),
sampling_type="cell",
function=_metal_density,
units=unit_system["density"],
)
def _metal_mass_fraction(field, data):
tr = data["gas", "metal_density"]
tr /= data["gas", "density"]
return tr
self.add_field(
("gas", "metal_mass_fraction"),
sampling_type="cell",
function=_metal_mass_fraction,
units="",
)
def _H_mass_fraction(field, data):
tr = 1.0 - data.ds.parameters["Y_p"] - data["gas", "metal_mass_fraction"]
return tr
self.add_field(
("gas", "H_mass_fraction"),
sampling_type="cell",
function=_H_mass_fraction,
units="",
)
def _metallicity(field, data):
tr = data["gas", "metal_mass_fraction"]
tr /= data["gas", "H_mass_fraction"]
return tr
self.add_field(
("gas", "metallicity"),
sampling_type="cell",
function=_metallicity,
units="",
)
atoms = [
"C",
"N",
"O",
"F",
"Ne",
"Na",
"Mg",
"Al",
"Si",
"P",
"S",
"Cl",
"Ar",
"K",
"Ca",
"Sc",
"Ti",
"V",
"Cr",
"Mn",
"Fe",
"Co",
"Ni",
"Cu",
"Zn",
]
def _specific_metal_density_function(atom):
def _specific_metal_density(field, data):
nucleus_densityIa = (
data["gas", "metal_ia_density"] * SNIa_abundance[atom]
)
nucleus_densityII = (
data["gas", "metal_ii_density"] * SNII_abundance[atom]
)
return nucleus_densityIa + nucleus_densityII
return _specific_metal_density
for atom in atoms:
self.add_field(
("gas", f"{atom}_nuclei_mass_density"),
sampling_type="cell",
function=_specific_metal_density_function(atom),
units=unit_system["density"],
)
# based on Iwamoto et al 1999
# mass fraction of each atom in SNIa metal
SNIa_abundance = {
"H": 0.00e00,
"He": 0.00e00,
"C": 3.52e-02,
"N": 8.47e-07,
"O": 1.04e-01,
"F": 4.14e-10,
"Ne": 3.30e-03,
"Na": 4.61e-05,
"Mg": 6.25e-03,
"Al": 7.19e-04,
"Si": 1.14e-01,
"P": 2.60e-04,
"S": 6.35e-02,
"Cl": 1.27e-04,
"Ar": 1.14e-02,
"K": 5.72e-05,
"Ca": 8.71e-03,
"Sc": 1.61e-07,
"Ti": 2.50e-04,
"V": 5.46e-05,
"Cr": 6.19e-03,
"Mn": 6.47e-03,
"Fe": 5.46e-01,
"Co": 7.59e-04,
"Ni": 9.17e-02,
"Cu": 2.19e-06,
"Zn": 2.06e-05,
}
# mass fraction of each atom in SNII metal
SNII_abundance = {
"H": 0.00e00,
"He": 0.00e00,
"C": 3.12e-02,
"N": 6.15e-04,
"O": 7.11e-01,
"F": 4.57e-10,
"Ne": 9.12e-02,
"Na": 2.56e-03,
"Mg": 4.84e-02,
"Al": 5.83e-03,
"Si": 4.81e-02,
"P": 4.77e-04,
"S": 1.62e-02,
"Cl": 4.72e-05,
"Ar": 3.15e-03,
"K": 2.65e-05,
"Ca": 2.31e-03,
"Sc": 9.02e-08,
"Ti": 5.18e-05,
"V": 3.94e-06,
"Cr": 5.18e-04,
"Mn": 1.52e-04,
"Fe": 3.58e-02,
"Co": 2.86e-05,
"Ni": 2.35e-03,
"Cu": 4.90e-07,
"Zn": 7.46e-06,
}
| 29.221374
| 85
| 0.4872
|
4a1749dd9305207b3ae6f3927d3894321b6623fa
| 21,917
|
py
|
Python
|
pyKinectTools/scripts/PoseInitAndTracking_PF.py
|
colincsl/pyKinectTools
|
a84bb5b7ff9dd613576415932865c2ad435520b3
|
[
"BSD-2-Clause-FreeBSD"
] | 33
|
2015-04-07T16:28:04.000Z
|
2021-11-22T00:28:43.000Z
|
pyKinectTools/scripts/PoseInitAndTracking_PF.py
|
colincsl/pyKinectTools
|
a84bb5b7ff9dd613576415932865c2ad435520b3
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
pyKinectTools/scripts/PoseInitAndTracking_PF.py
|
colincsl/pyKinectTools
|
a84bb5b7ff9dd613576415932865c2ad435520b3
|
[
"BSD-2-Clause-FreeBSD"
] | 13
|
2015-04-07T16:28:34.000Z
|
2021-04-26T08:04:36.000Z
|
"""
Main file for training multi-camera pose
"""
import sys
import time
import traceback
import itertools as it
from joblib import Parallel, delayed
import cPickle as pickle
import optparse
from copy import deepcopy
import numpy as np
import scipy.misc as sm
import scipy.ndimage as nd
import Image
import cv2
import skimage
from skimage import color
from skimage.draw import line, circle
from skimage.color import rgb2gray,gray2rgb, rgb2lab
from skimage.feature import local_binary_pattern, match_template, peak_local_max
# from RGBDActionDatasets.dataset_readers.KinectPlayer import *KinectPlayer, display_help
from RGBDActionDatasets.dataset_readers.RealtimePlayer import RealtimePlayer
# from pyKinectTools.dataset_readers.KinectPlayer import KinectPlayer, display_help
# from pyKinectTools.dataset_readers.RealtimePlayer import RealtimePlayer
from RGBDActionDatasets.dataset_readers.MHADPlayer import MHADPlayer
# from pyKinectTools.dataset_readers.MHADPlayer import MHADPlayer
from pyKinectTools.utils.DepthUtils import *
from pyKinectTools.utils.SkeletonUtils import display_skeletons, transform_skels, kinect_to_msr_skel, msr_to_kinect_skel
from pyKinectTools.algs.GeodesicSkeleton import *
from pyKinectTools.algs.PoseTracking import *
from sklearn.mixture import GMM
from sklearn.cluster import KMeans
from IPython import embed
np.seterr(all='ignore')
# -------------------------MAIN------------------------------------------
def main(visualize=False, learn=False, actions=[1], subjects=[1], n_frames=220):
# search_joints=[0,2,4,5,7,10,13]
search_joints=range(14)
# interactive = True
interactive = False
save_results = False
if 0:
learn = False
# learn = learn
else:
learn = True
actions = [1]
subjects = [1]
# actions = range(1,10)
# subjects = range(1,9)
if 1:
dataset = 'MHAD'
cam = MHADPlayer(base_dir='/Users/colin/Data/BerkeleyMHAD/', kinect=1, actions=actions, subjects=subjects, reps=[1], get_depth=True, get_color=True, get_skeleton=True, fill_images=False)
elif 0:
dataset = 'JHU'
cam = KinectPlayer(base_dir='./', device=1, bg_subtraction=True, get_depth=True, get_color=True, get_skeleton=True, fill_images=False)
bg = Image.open('/Users/colin/Data/JHU_RGBD_Pose/CIRL_Background_A.tif')
# bg = Image.open('/Users/colin/Data/JHU_RGBD_Pose/Wall_Background_A.tif')
# bg = Image.open('/Users/colin/Data/JHU_RGBD_Pose/Office_Background_A.tif')
# bg = Image.open('/Users/colin/Data/WICU_May2013_C2/WICU_C2_Background.tif')
# cam = KinectPlayer(base_dir='./', device=2, bg_subtraction=True, get_depth=True, get_color=True, get_skeleton=True, fill_images=False)
# bg = Image.open('/Users/colin/Data/JHU_RGBD_Pose/CIRL_Background_B.tif')
cam.bgSubtraction.backgroundModel = np.array(bg.getdata()).reshape([240,320]).clip(0, 4500) - 000.
else:
# Realtime
dataset = 'RT'
cam = RealtimePlayer(device=0, edit=True, get_depth=True, get_color=True, get_skeleton=True)
# cam.set_bg_model('box', 2500)
tmp = cam.depthIm
tmp[tmp>4000] = 4000
cam.set_bg_model(bg_type='static', param=tmp)
# embed()
height, width = cam.depthIm.shape
skel_previous = None
face_detector = FaceDetector()
hand_detector = HandDetector(cam.depthIm.shape)
n_joints = 14
# gmm = GMM(n_components=n_joints)
kmeans = KMeans(n_clusters=n_joints, n_init=4, max_iter=100)
# Video writer
# video_writer = cv2.VideoWriter("/Users/colin/Desktop/test.avi", cv2.cv.CV_FOURCC('M','J','P','G'), 15, (640,480))
# Save Background model
# im = Image.fromarray(cam.depthIm.astype(np.int32), 'I')
# im.save("/Users/Colin/Desktop/k2.png")
# Setup pose database
append = True
# append = False
# pose_database = PoseDatabase("PoseDatabase.pkl", learn=learn, search_joints=[0,4,7,10,13], append=append)
# pose_database = PoseDatabase("PoseDatabase.pkl", learn=learn, search_joints=search_joints,
# append=append, scale=1.1, n_clusters=-1)#1000
pose_database = PoseDatabase("PoseDatabase.pkl", learn=learn, search_joints=search_joints,
append=append, scale=1.0, n_clusters=1500)
pose_prob = np.ones(len(pose_database.database), dtype=np.float)/len(pose_database.database)
# embed()
# Setup Tracking
skel_init, joint_size, constraint_links, features_joints,skel_parts, convert_to_kinect = get_14_joint_properties()
constraint_values = []
for c in constraint_links:
constraint_values += [np.linalg.norm(skel_init[c[0]]-skel_init[c[1]], 2)]
constraint_values = np.array(constraint_values)
skel_current = None#skel_init.copy()
skel_previous = None#skel_current.copy()
skel_previous_uv = None
# Evaluation
accuracy_all_db = []
accuracy_all_track = []
joint_accuracy_db = []
joint_accuracy_track = []
if not learn:
try:
results = pickle.load(open('Accuracy_Results.pkl'))
except:
results = { 'subject':[], 'action':[], 'accuracy_all':[],
'accuracy_mean':[], 'joints_all':[],
'joint_mean':[], 'joint_median':[]}
frame_count = 0
frame_rate = 10
if dataset == 'JHU':
cam.next(350)
# cam.next(700)
pass
frame_prev = 0
try:
# if 1:
while cam.next(frame_rate):# and frame_count < n_frames:
# Print every once in a while
if frame_count - frame_prev > 99:
print ""
print "Frame #{0:d}".format(frame_count)
frame_prev = frame_count
if dataset in ['MHAD', 'JHU']:
users = deepcopy(cam.users)
else:
users = deepcopy(cam.user_skels)
ground_truth = False
if dataset in ['RT','JHU']:
if len(users) > 0:
if not np.any(users[0][0] == -1):
ground_truth = True
users[0][:,1] *= -1
cam.users_uv_msr = [cam.camera_model.world2im(users[0], cam.depthIm.shape)]
else:
ground_truth = True
# Apply mask to image
mask = cam.get_person(200) == 1 # > 0
# cv2.imshow('bg',(mask*255).astype(np.uint8))
# cv2.imshow('bg',cam.colorIm)
# cv2.waitKey(1)
if type(mask)==bool or np.all(mask==False):
# print "No mask"
continue
# cv2.imshow('bg',cam.bgSubtraction.backgroundModel)
# cv2.imshow('bg',(mask*255).astype(np.uint8))
im_depth = cam.depthIm
# if dataset in ['RT']:
# cam.depthIm[cam.depthIm>2500] = 0
if cam.colorIm is not None:
im_color = cam.colorIm*mask[:,:,None]
cam.colorIm *= mask[:,:,None]
if ground_truth:
pose_truth = users[0]
pose_truth_uv = cam.users_uv_msr[0]
# Get bounding box around person
box = nd.find_objects(mask)[0]
d = 20
# Widen box
box = (slice(np.maximum(box[0].start-d, 0), \
np.minimum(box[0].stop+d, height-1)), \
slice(np.maximum(box[1].start-d, 0), \
np.minimum(box[1].stop+d, width-1)))
box_corner = [box[0].start,box[1].start]
mask_box = mask[box]
''' ---------- ----------------------------------- --------'''
''' ---------- ----------------------------------- --------'''
''' ---- Calculate Detectors ---- '''
# Face detection
# face_detector.run(im_color[box])
# Skin detection
# hand_markers = hand_detector.run(im_color[box], n_peaks=3)
hand_markers = []
# Calculate Geodesic Extrema
im_pos = cam.camera_model.im2PosIm(cam.depthIm*mask)[box]
# geodesic_markers = geodesic_extrema_MPI(im_pos, iterations=5, visualize=False)
if 1:
''' Find pts using kmeans or gmm '''
pts = im_pos[np.nonzero(im_pos)].reshape([-1,3])
# gmm.fit(pts)
kmeans.fit(pts)
# pts = cam.camera_model.world2im(gmm.means_)
pts = cam.camera_model.world2im(kmeans.cluster_centers_)
geodesic_markers = pts[:,:2] - box_corner
else:
''' Find pts using geodesic extrema '''
geodesic_markers = geodesic_extrema_MPI(im_pos, iterations=10, visualize=False)
if len(geodesic_markers) == 0:
print "No markers"
continue
# Concatenate markers
markers = list(geodesic_markers) + list(hand_markers) #+ list(lop_markers) + curve_markers
markers = np.array([list(x) for x in markers])
if np.any(markers==0):
print "Bad markers"
continue
''' ---- Database lookup ---- '''
time_t0 = time.time()
pts_mean = im_pos[(im_pos!=0)[:,:,2]].mean(0)
if learn and ground_truth:
# pose_uv = pose_truth_uv
if np.any(pose_truth_uv==0):
frame_count += frame_rate
if not interactive:
continue
# Markers can be just outside of bounds
markers = list(geodesic_markers) + hand_markers
markers = np.array([list(x) for x in markers])
# pose_database.update(pose_truth-pts_mean, keys=im_pos[markers[:,0],markers[:,1]]-pts_mean)
pose_database.update(pose_truth-pts_mean)
if not interactive:
continue
# else:
if 1:
# Normalize pose
pts = im_pos[markers[:,0], markers[:,1]]
pts = np.array([x for x in pts if x[0] != 0])
pts -= pts_mean
# Get closest pose
# Based on markers/raw positions
# poses_obs, pose_error = pose_database.query(pts, knn=1, return_error=True)
pose_error = pose_query(pts, np.array(pose_database.database), search_joints=search_joints)
# pose_error = query_error(pts, pose_database.trees, search_joints=search_joints)
# Based on markers/keys:
# pts = im_pos[markers[:,0], markers[:,1]] - pts_mean
# # poses, pose_error = pose_database.query_tree(pts, knn=len(pose_database.database), return_error=True)
# # poses, pose_error = pose_database.query_flann(pts, knn=len(pose_database.database), return_error=True)
# pose_error = np.sqrt(np.sum((pose_database.keys - pts.reshape([27]))**2, 1))
observation_variance = 100.
prob_obervation = np.exp(-pose_error / observation_variance) / np.sum(np.exp(-pose_error/observation_variance))
# subplot(2,2,1)
# plot(prob_obervation)
# subplot(2,2,2)
# plot(prob_motion)
# subplot(2,2,3)
# plot(pose_prob_new)
# subplot(2,2,4)
# plot(pose_prob)
# show()
# inference = 'NN'
inference = 'Bayes'
# inference = 'PF'
if inference=='NN': # Nearest neighbor
poses_obs, _ = pose_database.query(pts, knn=1, return_error=True)
poses = [poses_obs[0]]
elif inference=='Bayes': # Bayes
if frame_count is 0:
poses_obs, _ = pose_database.query(pts, knn=1, return_error=True)
skel_previous = poses_obs[0].copy()
# poses_m, pose_m_error = pose_database.query(skel_previous-pts_mean, knn=1, return_error=True)
pose_m_error = pose_query(skel_previous-pts_mean, np.array(pose_database.database), search_joints=search_joints)
# poses_m, pose_m_error = pose_database.query(skel_previous-pts_mean+(np.random.random([3,14])-.5).T*30, knn=5, return_error=True)
motion_variance = 10000.
prob_motion = np.exp(-pose_m_error / motion_variance) / np.sum(np.exp(-pose_m_error/motion_variance))
pose_prob_new = prob_obervation*prob_motion
if pose_prob_new.shape == pose_prob.shape:
pose_prob = (pose_prob_new+pose_prob).T/2.
else:
pose_prob = pose_prob_new.T
prob_sorted = np.argsort(pose_prob)
poses = [pose_database.database[np.argmax(pose_prob)]]
# poses = pose_database.database[prob_sorted[-1:]]
# Particle Filter
elif inference=='PF':
prob_sorted = np.argsort(pose_prob)
poses = pose_database.database[prob_sorted[-5:]]
## ICP
# im_pos -= pts_mean
# R,t = IterativeClosestPoint(pose, im_pos.reshape([-1,3])-pts_mean, max_iters=5, min_change=.001, pt_tolerance=10000)
# pose = np.dot(R.T, pose.T).T - t
# pose = np.dot(R, pose.T).T + t
# scale = 1.
# poses *= scale
poses += pts_mean
# print "DB time:", time.time() - time_t0
''' ---- Tracker ---- '''
surface_map = nd.distance_transform_edt(-nd.binary_erosion(mask_box), return_distances=False, return_indices=True)
if skel_previous_uv is None:
skel_previous = poses[0].copy()
skel_current = poses[0].copy()
pose_tmp = cam.camera_model.world2im(poses[0], cam.depthIm.shape)
skel_previous_uv = pose_tmp.copy()
skel_current_uv = pose_tmp.copy()
pose_weights = np.zeros(len(poses), dtype=np.float)
pose_updates = []
pose_updates_uv = []
time_t0 = time.time()
# 2) Sample poses
if inference in ['PF', 'Bayes']:
for pose_i, pose in enumerate(poses):
skel_current = skel_previous.copy()
skel_current_uv = skel_previous_uv.copy()
pose_uv = cam.camera_model.world2im(pose, cam.depthIm.shape)
try:
pose_uv[:,:2] = surface_map[:, pose_uv[:,0]-box_corner[0], pose_uv[:,1]-box_corner[1]].T + [box_corner[0], box_corner[1]]
except:
pass
pose = cam.camera_model.im2world(pose_uv, cam.depthIm.shape)
# ---- (Step 2) Update pose state, x ----
correspondence_displacement = skel_previous - pose
lambda_p = .0
lambda_c = 1.
skel_prev_difference = (skel_current - skel_previous)
# print skel_prev_difference
skel_current = skel_previous \
+ lambda_p * skel_prev_difference \
- lambda_c * correspondence_displacement#\
# ---- (Step 3) Add constraints ----
# A: Link lengths / geometry
# skel_current = link_length_constraints(skel_current, constraint_links, constraint_values, alpha=.5)
# skel_current = geometry_constraints(skel_current, joint_size, alpha=0.5)
# skel_current = collision_constraints(skel_current, constraint_links)
skel_current_uv = (cam.camera_model.world2im(skel_current, cam.depthIm.shape) - [box[0].start, box[1].start, 0])#/mask_interval
skel_current_uv = skel_current_uv.clip([0,0,0], [box[0].stop-box[0].start-1, box[1].stop-box[1].start-1, 9999])
# B: Ray-cast constraints
skel_current, skel_current_uv = ray_cast_constraints(skel_current, skel_current_uv, im_pos, surface_map, joint_size)
# Map back from mask to image
# try:
# skel_current_uv[:,:2] = surface_map[:, skel_current_uv[:,0], skel_current_uv[:,1]].T# + [box_corner[0], box_corner[1]]
# except:
# pass
# ---- (Step 4) Update the confidence ----
if inference=='PF':
time_t1 = time.time()
## Calc distance between each pixel and all joints
px_corr = np.zeros([im_pos.shape[0], im_pos.shape[1], 14])
for i,s in enumerate(skel_current):
px_corr[:,:,i] = np.sqrt(np.sum((im_pos - s)**2, -1))# / joint_size[i]**2
# for i,s in enumerate(pose_uv):
# for i,s in enumerate(skel_current_uv):
# ''' Problem: need to constrain pose_uv to mask '''
# _, geo_map = geodesic_extrema_MPI(im_pos, [s[0],s[1]], iterations=1, visualize=True)
# px_corr[:,:,i] = geo_map
# subplot(2,7,i+1)
# imshow(geo_map, vmin=0, vmax=2000)
# axis('off')
# px_corr[geo_map==0,i] = 9999
px_label = np.argmin(px_corr, -1)*mask_box
px_label_flat = px_label[mask_box].flatten()
# cv2.imshow('gMap', (px_corr.argmin(-1)+1)/15.*mask_box)
# cv2.waitKey(1)
# Project distance to joint's radius
px_joint_displacement = im_pos[mask_box] - skel_current[px_label_flat]
px_joint_magnitude = np.sqrt(np.sum(px_joint_displacement**2,-1))
joint_mesh_pos = skel_current[px_label_flat] + px_joint_displacement*(joint_size[px_label_flat]/px_joint_magnitude)[:,None]
px_joint_displacement = joint_mesh_pos - im_pos[mask_box]
# Ensure pts aren't too far away (these are noise!)
px_joint_displacement[np.abs(px_joint_displacement) > 500] = 0
if 0:
x = im_pos.copy()*0
x[mask_box] = joint_mesh_pos
for i in range(3):
subplot(1,4,i+1)
imshow(x[:,:,i])
axis('off')
subplot(1,4,4)
imshow((px_label+1)*mask_box)
# Calc the correspondance change in position for each joint
correspondence_displacement = np.zeros([len(skel_current), 3])
ii = 0
for i,_ in enumerate(skel_current):
labels = px_label_flat==i
correspondence_displacement[i] = np.sum(px_joint_displacement[px_label_flat==ii], 0) / np.sum(px_joint_displacement[px_label_flat==ii]!=0)
ii+=1
correspondence_displacement = np.nan_to_num(correspondence_displacement)
# print "time:", time.time() - time_t1
# Likelihood
motion_variance = 500
prob_motion = np.exp(-np.mean(np.sum((pose-skel_previous)**2,1)/motion_variance**2))
if inference == 'PF':
correspondence_variance = 40
prob_coor = np.exp(-np.mean(np.sum(correspondence_displacement**2,1)/correspondence_variance**2))
prob = prob_motion * prob_coor
prob = prob_motion
# Viz correspondences
# x = im_pos.copy()*0
# x[mask_box] = px_joint_displacement
# for i in range(3):
# subplot(1,4,i+1)
# imshow(x[:,:,i])
# axis('off')
# subplot(1,4,4)
# imshow((px_label+1)*mask_box)
# # embed()
# # for j in range(3):
# # for i in range(14):
# # subplot(3,14,j*14+i+1)
# # imshow(x[:,:,j]*((px_label==i)*mask_box))
# # axis('off')
# show()
# prob = link_length_probability(skel_current, constraint_links, constraint_values, 100)
# print frame_count
# print "Prob:", np.mean(prob)#, np.min(prob), prob
# thresh = .05
# if np.min(prob) < thresh:
# # print 'Resetting pose'
# for c in constraint_links[prob<thresh]:
# for cc in c:
# skel_current_uv[c] = pose_uv[c] - [box[0].start, box[1].start, 0]
# skel_current[c] = pose[c]
# skel_current_uv = pose_uv.copy() - [box[0].start, box[1].start, 0]
# skel_current = pose.copy()
skel_current_uv = skel_current_uv + [box[0].start, box[1].start, 0]
skel_current = cam.camera_model.im2world(skel_current_uv, cam.depthIm.shape)
# print 'Error:', np.sqrt(np.sum((pose_truth-skel_current)**2, 0))
pose_weights[pose_i] = prob
# pose_updates += [skel_current.copy()]
# pose_updates_uv += [skel_current_uv.copy()]
pose_updates += [pose.copy()]
pose_updates_uv += [pose_uv.copy()]
if cam.colorIm is not None:
cam.colorIm = display_skeletons(cam.colorIm, skel_current_uv, skel_type='Kinect', color=(0,0,pose_i*40+50))
else:
cam.depthIm = display_skeletons(cam.depthIm, skel_current_uv, skel_type='Kinect', color=(0,0,pose_i*40+50))
# cam.colorIm = display_skeletons(cam.colorIm, pose_uv, skel_type='Kinect', color=(0,pose_i*40+50,pose_i*40+50))
# print "Tracking time:", time.time() - time_t0
# Update for next round
pose_ind = np.argmax(pose_weights)
# print "Pickled:", pose_ind
skel_previous = pose_updates[pose_ind].copy()
skel_previous_uv = pose_updates_uv[pose_ind].copy()
# print pose_weights
else:
pose = poses[0]
skel_previous = pose.copy()
pose_uv = cam.camera_model.world2im(skel_previous, cam.depthIm.shape)
skel_current_uv = pose_uv.copy()
skel_previous_uv = pose_uv.copy()
''' ---- Accuracy ---- '''
if ground_truth:
error_track = pose_truth - skel_previous
error_track *= np.any(pose_truth!=0, 1)[:,None]
error_l2_track = np.sqrt(np.sum(error_track**2, 1))
joint_accuracy_track += [error_l2_track]
accuracy_track = np.sum(error_l2_track < 150) / n_joints
accuracy_all_track += [accuracy_track]
print "Current track: {}% {} mm".format(accuracy_track, error_l2_track.mean())
print "Running avg (track):", np.mean(accuracy_all_track)
# print "Joint avg (overall track):", np.mean(joint_accuracy_track)
print ""
''' --- Visualization --- '''
if visualize:
display_markers(cam.colorIm, hand_markers[:2], box, color=(0,250,0))
if len(hand_markers) > 2:
display_markers(cam.colorIm, [hand_markers[2]], box, color=(0,200,0))
display_markers(cam.colorIm, geodesic_markers, box, color=(200,0,0))
# display_markers(cam.colorIm, curve_markers, box, color=(0,100,100))
# display_markers(cam.colorIm, lop_markers, box, color=(0,0,200))
if ground_truth:
cam.colorIm = display_skeletons(cam.colorIm, pose_truth_uv, skel_type='Kinect', color=(0,255,0))
cam.colorIm = display_skeletons(cam.colorIm, skel_current_uv, skel_type='Kinect', color=(255,0,0))
cam.visualize(color=True, depth=False)
# ------------------------------------------------------------
# video_writer.write((geo_clf_map/float(geo_clf_map.max())*255.).astype(np.uint8))
# video_writer.write(cam.colorIm[:,:,[2,1,0]])
frame_count += frame_rate
print "Frame:", frame_count
except:
traceback.print_exc(file=sys.stdout)
pass
try:
print "-- Results for subject {:d} action {:d}".format(subjects[0],actions[0])
except:
pass
# print "Running avg (db):", np.mean(accuracy_all_db)
print "Running mean (track):", np.mean(accuracy_all_track)
# print "Joint avg (overall db):", np.mean(joint_accuracy_db)
print "Joint mean (overall track):", np.mean(joint_accuracy_track)
print "Joint median (overall track):", np.median(joint_accuracy_track)
# print 'Done'
embed()
if learn:
pose_database.save()
elif save_results:
# Save results:
results['subject'] += [subjects[0]]
results['action'] += [actions[0]]
results['accuracy_all'] += [accuracy_all_track]
results['accuracy_mean'] += [np.mean(accuracy_all_track)]
results['joints_all'] += [joint_accuracy_track]
results['joint_mean'] += [np.mean(joint_accuracy_track)]
results['joint_median'] += [np.median(joint_accuracy_track)]
pickle.dump(results, open('/Users/colin/Data/BerkeleyMHAD/Accuracy_Results.pkl', 'w'))
if __name__=="__main__":
parser = optparse.OptionParser()
parser.add_option('-v', '--visualize', dest='viz', action="store_true", default=False, help='Enable visualization')
parser.add_option('-l', '--learn', dest='learn', action="store_true", default=False, help='Training phase')
parser.add_option('-a', '--actions', dest='actions', type='int', action='append', default=[], help='Training phase')
parser.add_option('-s', '--subjects', dest='subjects', type='int', action='append', default=[], help='Training phase')
(opt, args) = parser.parse_args()
main(visualize=opt.viz, learn=opt.learn, actions=opt.actions, subjects=opt.subjects)
| 37.27381
| 188
| 0.672857
|
4a1749e9fcb294fb2e4e76ac15fb20c74c5f8ae9
| 2,430
|
py
|
Python
|
security_monkey/task_scheduler/util.py
|
boladmin/security_monkey
|
c28592ffd518fa399527d26262683fc860c30eef
|
[
"Apache-2.0"
] | 4,258
|
2015-01-04T22:06:10.000Z
|
2022-03-31T23:40:27.000Z
|
security_monkey/task_scheduler/util.py
|
boladmin/security_monkey
|
c28592ffd518fa399527d26262683fc860c30eef
|
[
"Apache-2.0"
] | 1,013
|
2015-01-12T02:31:03.000Z
|
2021-09-16T19:09:03.000Z
|
security_monkey/task_scheduler/util.py
|
boladmin/security_monkey
|
c28592ffd518fa399527d26262683fc860c30eef
|
[
"Apache-2.0"
] | 965
|
2015-01-11T21:06:07.000Z
|
2022-03-17T16:53:57.000Z
|
"""
.. module: security_monkey.task_scheduler.util
:platform: Unix
:synopsis: Instantiates the Celery object for use with task scheduling.
.. version:: $$VERSION$$
.. moduleauthor:: Mike Grima <mgrima@netflix.com>
"""
from celery import Celery
from security_monkey import app
from security_monkey.common.utils import find_modules, load_plugins
import os
import importlib
from security_monkey.exceptions import InvalidCeleryConfigurationType
def get_celery_config_file():
"""This gets the Celery configuration file as a module that Celery uses"""
return importlib.import_module("security_monkey.{}".format(os.environ.get("SM_CELERY_CONFIG", "celeryconfig")),
"security_monkey")
def make_celery(app):
"""
Recommended from Flask's documentation to set up the Celery object.
:param app:
:return:
"""
celery = Celery(app.import_name)
# Determine which Celery configuration to load:
# The order is:
# 1. `SM_CELERY_CONFIG` Environment Variable
# 2. The default "celeryconfig.py"
celery.config_from_object(get_celery_config_file())
celery.conf.update(app.config)
TaskBase = celery.Task
class ContextTask(TaskBase):
abstract = True
def __call__(self, *args, **kwargs):
with app.app_context():
return TaskBase.__call__(self, *args, **kwargs)
celery.Task = ContextTask
return celery
def setup():
"""Load the required data for scheduling tasks"""
find_modules('alerters')
find_modules('watchers')
find_modules('auditors')
load_plugins('security_monkey.plugins')
def get_sm_celery_config_value(celery_config, variable_name, variable_type):
"""
This returns a celery configuration value of a given type back.
If it's not set, it will return None.
:param variable_name: The name of the Celery configuration variable to obtain.
:param type: The type of the value, such as `list`, `dict`, etc.
:return:
"""
try:
# Directly load the config that Celery is configured to use:
value = getattr(celery_config, variable_name, None)
if value is None:
return
if not isinstance(value, variable_type):
raise InvalidCeleryConfigurationType(variable_name, variable_type, type(value))
except KeyError as _:
return
return value
CELERY = make_celery(app)
| 27.613636
| 115
| 0.688889
|
4a1749ff734c1190f08fde79430f1b43298e3152
| 7,122
|
py
|
Python
|
.venv/lib/python3.8/site-packages/pandas/tests/indexes/timedeltas/test_timedelta.py
|
acrucetta/Chicago_COVI_WebApp
|
a37c9f492a20dcd625f8647067394617988de913
|
[
"MIT",
"Unlicense"
] | 115
|
2020-06-18T15:00:58.000Z
|
2022-03-02T10:13:19.000Z
|
.venv/lib/python3.8/site-packages/pandas/tests/indexes/timedeltas/test_timedelta.py
|
acrucetta/Chicago_COVI_WebApp
|
a37c9f492a20dcd625f8647067394617988de913
|
[
"MIT",
"Unlicense"
] | 21
|
2021-04-13T01:17:40.000Z
|
2022-03-11T16:06:50.000Z
|
.venv/lib/python3.8/site-packages/pandas/tests/indexes/timedeltas/test_timedelta.py
|
acrucetta/Chicago_COVI_WebApp
|
a37c9f492a20dcd625f8647067394617988de913
|
[
"MIT",
"Unlicense"
] | 60
|
2020-07-22T14:53:10.000Z
|
2022-03-23T10:17:59.000Z
|
from datetime import timedelta
import numpy as np
import pytest
import pandas as pd
from pandas import (
DataFrame,
Index,
Int64Index,
Series,
Timedelta,
TimedeltaIndex,
date_range,
timedelta_range,
)
import pandas._testing as tm
from ..datetimelike import DatetimeLike
randn = np.random.randn
class TestTimedeltaIndex(DatetimeLike):
_holder = TimedeltaIndex
@pytest.fixture
def index(self):
return tm.makeTimedeltaIndex(10)
def create_index(self) -> TimedeltaIndex:
index = pd.to_timedelta(range(5), unit="d")._with_freq("infer")
assert index.freq == "D"
ret = index + pd.offsets.Hour(1)
assert ret.freq == "D"
return ret
def test_numeric_compat(self):
# Dummy method to override super's version; this test is now done
# in test_arithmetic.py
pass
def test_shift(self):
pass # this is handled in test_arithmetic.py
def test_pickle_compat_construction(self):
pass
def test_pickle_after_set_freq(self):
tdi = timedelta_range("1 day", periods=4, freq="s")
tdi = tdi._with_freq(None)
res = tm.round_trip_pickle(tdi)
tm.assert_index_equal(res, tdi)
def test_isin(self):
index = tm.makeTimedeltaIndex(4)
result = index.isin(index)
assert result.all()
result = index.isin(list(index))
assert result.all()
tm.assert_almost_equal(
index.isin([index[2], 5]), np.array([False, False, True, False])
)
def test_factorize(self):
idx1 = TimedeltaIndex(["1 day", "1 day", "2 day", "2 day", "3 day", "3 day"])
exp_arr = np.array([0, 0, 1, 1, 2, 2], dtype=np.intp)
exp_idx = TimedeltaIndex(["1 day", "2 day", "3 day"])
arr, idx = idx1.factorize()
tm.assert_numpy_array_equal(arr, exp_arr)
tm.assert_index_equal(idx, exp_idx)
arr, idx = idx1.factorize(sort=True)
tm.assert_numpy_array_equal(arr, exp_arr)
tm.assert_index_equal(idx, exp_idx)
# freq must be preserved
idx3 = timedelta_range("1 day", periods=4, freq="s")
exp_arr = np.array([0, 1, 2, 3], dtype=np.intp)
arr, idx = idx3.factorize()
tm.assert_numpy_array_equal(arr, exp_arr)
tm.assert_index_equal(idx, idx3)
def test_sort_values(self):
idx = TimedeltaIndex(["4d", "1d", "2d"])
ordered = idx.sort_values()
assert ordered.is_monotonic
ordered = idx.sort_values(ascending=False)
assert ordered[::-1].is_monotonic
ordered, dexer = idx.sort_values(return_indexer=True)
assert ordered.is_monotonic
tm.assert_numpy_array_equal(dexer, np.array([1, 2, 0]), check_dtype=False)
ordered, dexer = idx.sort_values(return_indexer=True, ascending=False)
assert ordered[::-1].is_monotonic
tm.assert_numpy_array_equal(dexer, np.array([0, 2, 1]), check_dtype=False)
def test_argmin_argmax(self):
idx = TimedeltaIndex(["1 day 00:00:05", "1 day 00:00:01", "1 day 00:00:02"])
assert idx.argmin() == 1
assert idx.argmax() == 0
def test_misc_coverage(self):
rng = timedelta_range("1 day", periods=5)
result = rng.groupby(rng.days)
assert isinstance(list(result.values())[0][0], Timedelta)
idx = TimedeltaIndex(["3d", "1d", "2d"])
assert not idx.equals(list(idx))
non_td = Index(list("abc"))
assert not idx.equals(list(non_td))
def test_map(self):
# test_map_dictlike generally tests
rng = timedelta_range("1 day", periods=10)
f = lambda x: x.days
result = rng.map(f)
exp = Int64Index([f(x) for x in rng])
tm.assert_index_equal(result, exp)
def test_pass_TimedeltaIndex_to_index(self):
rng = timedelta_range("1 days", "10 days")
idx = Index(rng, dtype=object)
expected = Index(rng.to_pytimedelta(), dtype=object)
tm.assert_numpy_array_equal(idx.values, expected.values)
def test_append_numpy_bug_1681(self):
td = timedelta_range("1 days", "10 days", freq="2D")
a = DataFrame()
c = DataFrame({"A": "foo", "B": td}, index=td)
str(c)
result = a.append(c)
assert (result["B"] == td).all()
def test_fields(self):
rng = timedelta_range("1 days, 10:11:12.100123456", periods=2, freq="s")
tm.assert_index_equal(rng.days, Index([1, 1], dtype="int64"))
tm.assert_index_equal(
rng.seconds,
Index([10 * 3600 + 11 * 60 + 12, 10 * 3600 + 11 * 60 + 13], dtype="int64"),
)
tm.assert_index_equal(
rng.microseconds, Index([100 * 1000 + 123, 100 * 1000 + 123], dtype="int64")
)
tm.assert_index_equal(rng.nanoseconds, Index([456, 456], dtype="int64"))
msg = "'TimedeltaIndex' object has no attribute '{}'"
with pytest.raises(AttributeError, match=msg.format("hours")):
rng.hours
with pytest.raises(AttributeError, match=msg.format("minutes")):
rng.minutes
with pytest.raises(AttributeError, match=msg.format("milliseconds")):
rng.milliseconds
# with nat
s = Series(rng)
s[1] = np.nan
tm.assert_series_equal(s.dt.days, Series([1, np.nan], index=[0, 1]))
tm.assert_series_equal(
s.dt.seconds, Series([10 * 3600 + 11 * 60 + 12, np.nan], index=[0, 1])
)
# preserve name (GH15589)
rng.name = "name"
assert rng.days.name == "name"
def test_freq_conversion(self):
# doc example
# series
td = Series(date_range("20130101", periods=4)) - Series(
date_range("20121201", periods=4)
)
td[2] += timedelta(minutes=5, seconds=3)
td[3] = np.nan
result = td / np.timedelta64(1, "D")
expected = Series([31, 31, (31 * 86400 + 5 * 60 + 3) / 86400.0, np.nan])
tm.assert_series_equal(result, expected)
result = td.astype("timedelta64[D]")
expected = Series([31, 31, 31, np.nan])
tm.assert_series_equal(result, expected)
result = td / np.timedelta64(1, "s")
expected = Series([31 * 86400, 31 * 86400, 31 * 86400 + 5 * 60 + 3, np.nan])
tm.assert_series_equal(result, expected)
result = td.astype("timedelta64[s]")
tm.assert_series_equal(result, expected)
# tdi
td = TimedeltaIndex(td)
result = td / np.timedelta64(1, "D")
expected = Index([31, 31, (31 * 86400 + 5 * 60 + 3) / 86400.0, np.nan])
tm.assert_index_equal(result, expected)
result = td.astype("timedelta64[D]")
expected = Index([31, 31, 31, np.nan])
tm.assert_index_equal(result, expected)
result = td / np.timedelta64(1, "s")
expected = Index([31 * 86400, 31 * 86400, 31 * 86400 + 5 * 60 + 3, np.nan])
tm.assert_index_equal(result, expected)
result = td.astype("timedelta64[s]")
tm.assert_index_equal(result, expected)
| 30.698276
| 88
| 0.597304
|
4a174a23b052e19b13851f09284c3fa2a7841751
| 88,227
|
py
|
Python
|
openmc/surface.py
|
simondrichards/openmc
|
91db1c94636884d8fc15a8edbdfb533850fe22b7
|
[
"MIT"
] | 1
|
2019-08-27T19:49:57.000Z
|
2019-08-27T19:49:57.000Z
|
openmc/surface.py
|
ilhamv/openmc
|
46b42d5eadef701c024e04a94be510ffb1d7aa2d
|
[
"MIT"
] | null | null | null |
openmc/surface.py
|
ilhamv/openmc
|
46b42d5eadef701c024e04a94be510ffb1d7aa2d
|
[
"MIT"
] | null | null | null |
from abc import ABC, abstractmethod
from collections import OrderedDict
from collections.abc import Iterable
from copy import deepcopy
import math
from numbers import Real
from xml.etree import ElementTree as ET
from warnings import warn, catch_warnings, simplefilter
import numpy as np
from .checkvalue import check_type, check_value, check_length
from .mixin import IDManagerMixin, IDWarning
from .region import Region, Intersection, Union
_BOUNDARY_TYPES = ['transmission', 'vacuum', 'reflective', 'periodic', 'white']
_WARNING_UPPER = """\
"{}(...) accepts an argument named '{}', not '{}'. Future versions of OpenMC \
will not accept the capitalized version.\
"""
_WARNING_KWARGS = """\
"{}(...) accepts keyword arguments only for '{}'. Future versions of OpenMC \
will not accept positional parameters for superclass arguments.\
"""
class SurfaceCoefficient:
"""Descriptor class for surface coefficients.
Parameters
-----------
value : float or str
Value of the coefficient (float) or the name of the coefficient that
it is equivalent to (str).
"""
def __init__(self, value):
self.value = value
def __get__(self, instance, owner=None):
if instance is None:
return self
else:
if isinstance(self.value, str):
return instance._coefficients[self.value]
else:
return self.value
def __set__(self, instance, value):
if isinstance(self.value, Real):
raise AttributeError('This coefficient is read-only')
check_type(f'{self.value} coefficient', value, Real)
instance._coefficients[self.value] = value
def _future_kwargs_warning_helper(cls, *args, **kwargs):
# Warn if Surface parameters are passed by position, not by keyword
argsdict = dict(zip(('boundary_type', 'name', 'surface_id'), args))
for k in argsdict:
warn(_WARNING_KWARGS.format(cls.__name__, k), FutureWarning)
kwargs.update(argsdict)
return kwargs
def get_rotation_matrix(rotation, order='xyz'):
r"""Generate a 3x3 rotation matrix from input angles
.. versionadded:: 0.12
Parameters
----------
rotation : 3-tuple of float
A 3-tuple of angles :math:`(\phi, \theta, \psi)` in degrees where the
first element is the rotation about the x-axis in the fixed laboratory
frame, the second element is the rotation about the y-axis in the fixed
laboratory frame, and the third element is the rotation about the
z-axis in the fixed laboratory frame. The rotations are active
rotations.
order : str, optional
A string of 'x', 'y', and 'z' in some order specifying which rotation
to perform first, second, and third. Defaults to 'xyz' which means, the
rotation by angle :math:`\phi` about x will be applied first, followed
by :math:`\theta` about y and then :math:`\psi` about z. This
corresponds to an x-y-z extrinsic rotation as well as a z-y'-x''
intrinsic rotation using Tait-Bryan angles :math:`(\phi, \theta, \psi)`.
"""
check_type('surface rotation', rotation, Iterable, Real)
check_length('surface rotation', rotation, 3)
phi, theta, psi = np.array(rotation)*(math.pi/180.)
cx, sx = math.cos(phi), math.sin(phi)
cy, sy = math.cos(theta), math.sin(theta)
cz, sz = math.cos(psi), math.sin(psi)
R = {
'x': np.array([[1., 0., 0.], [0., cx, -sx], [0., sx, cx]]),
'y': np.array([[cy, 0., sy], [0., 1., 0.], [-sy, 0., cy]]),
'z': np.array([[cz, -sz, 0.], [sz, cz, 0.], [0., 0., 1.]]),
}
R1, R2, R3 = (R[xi] for xi in order)
return R3 @ R2 @ R1
class Surface(IDManagerMixin, ABC):
"""An implicit surface with an associated boundary condition.
An implicit surface is defined as the set of zeros of a function of the
three Cartesian coordinates. Surfaces in OpenMC are limited to a set of
algebraic surfaces, i.e., surfaces that are polynomial in x, y, and z.
Parameters
----------
surface_id : int, optional
Unique identifier for the surface. If not specified, an identifier will
automatically be assigned.
boundary_type : {'transmission, 'vacuum', 'reflective', 'periodic', 'white'}, optional
Boundary condition that defines the behavior for particles hitting the
surface. Defaults to transmissive boundary condition where particles
freely pass through the surface. Note that periodic boundary conditions
can only be applied to x-, y-, and z-planes, and only axis-aligned
periodicity is supported.
name : str, optional
Name of the surface. If not specified, the name will be the empty
string.
Attributes
----------
boundary_type : {'transmission, 'vacuum', 'reflective', 'periodic', 'white'}
Boundary condition that defines the behavior for particles hitting the
surface.
coefficients : dict
Dictionary of surface coefficients
id : int
Unique identifier for the surface
name : str
Name of the surface
type : str
Type of the surface
"""
next_id = 1
used_ids = set()
_atol = 1.e-12
def __init__(self, surface_id=None, boundary_type='transmission', name=''):
self.id = surface_id
self.name = name
self.boundary_type = boundary_type
# A dictionary of the quadratic surface coefficients
# Key - coefficient name
# Value - coefficient value
self._coefficients = {}
def __neg__(self):
return Halfspace(self, '-')
def __pos__(self):
return Halfspace(self, '+')
def __repr__(self):
string = 'Surface\n'
string += '{0: <16}{1}{2}\n'.format('\tID', '=\t', self._id)
string += '{0: <16}{1}{2}\n'.format('\tName', '=\t', self._name)
string += '{0: <16}{1}{2}\n'.format('\tType', '=\t', self._type)
string += '{0: <16}{1}{2}\n'.format('\tBoundary', '=\t', self._boundary_type)
coefficients = '{0: <16}'.format('\tCoefficients') + '\n'
for coeff in self._coefficients:
coefficients += '{0: <16}{1}{2}\n'.format(
coeff, '=\t', self._coefficients[coeff])
string += coefficients
return string
@property
def name(self):
return self._name
@property
def type(self):
return self._type
@property
def boundary_type(self):
return self._boundary_type
@property
def coefficients(self):
return self._coefficients
@name.setter
def name(self, name):
if name is not None:
check_type('surface name', name, str)
self._name = name
else:
self._name = ''
@boundary_type.setter
def boundary_type(self, boundary_type):
check_type('boundary type', boundary_type, str)
check_value('boundary type', boundary_type, _BOUNDARY_TYPES)
self._boundary_type = boundary_type
def bounding_box(self, side):
"""Determine an axis-aligned bounding box.
An axis-aligned bounding box for surface half-spaces is represented by
its lower-left and upper-right coordinates. If the half-space is
unbounded in a particular direction, numpy.inf is used to represent
infinity.
Parameters
----------
side : {'+', '-'}
Indicates the negative or positive half-space
Returns
-------
numpy.ndarray
Lower-left coordinates of the axis-aligned bounding box for the
desired half-space
numpy.ndarray
Upper-right coordinates of the axis-aligned bounding box for the
desired half-space
"""
return (np.array([-np.inf, -np.inf, -np.inf]),
np.array([np.inf, np.inf, np.inf]))
def clone(self, memo=None):
"""Create a copy of this surface with a new unique ID.
Parameters
----------
memo : dict or None
A nested dictionary of previously cloned objects. This parameter
is used internally and should not be specified by the user.
Returns
-------
clone : openmc.Surface
The clone of this surface
"""
if memo is None:
memo = {}
# If no nemoize'd clone exists, instantiate one
if self not in memo:
clone = deepcopy(self)
clone.id = None
# Memoize the clone
memo[self] = clone
return memo[self]
def normalize(self, coeffs=None):
"""Normalize coefficients by first nonzero value
.. versionadded:: 0.12
Parameters
----------
coeffs : tuple, optional
Tuple of surface coefficients to normalize. Defaults to None. If no
coefficients are supplied then the coefficients will be taken from
the current Surface.
Returns
-------
tuple of normalized coefficients
"""
if coeffs is None:
coeffs = self._get_base_coeffs()
coeffs = np.asarray(coeffs)
nonzeros = ~np.isclose(coeffs, 0., rtol=0., atol=self._atol)
norm_factor = np.abs(coeffs[nonzeros][0])
return tuple([c/norm_factor for c in coeffs])
def is_equal(self, other):
"""Determine if this Surface is equivalent to another
Parameters
----------
other : instance of openmc.Surface
Instance of openmc.Surface that should be compared to the current
surface
"""
coeffs1 = self.normalize(self._get_base_coeffs())
coeffs2 = self.normalize(other._get_base_coeffs())
return np.allclose(coeffs1, coeffs2, rtol=0., atol=self._atol)
@abstractmethod
def _get_base_coeffs(self):
"""Return polynomial coefficients representing the implicit surface
equation.
"""
@abstractmethod
def evaluate(self, point):
"""Evaluate the surface equation at a given point.
Parameters
----------
point : 3-tuple of float
The Cartesian coordinates, :math:`(x',y',z')`, at which the surface
equation should be evaluated.
Returns
-------
float
Evaluation of the surface polynomial at point :math:`(x',y',z')`
"""
@abstractmethod
def translate(self, vector, inplace=False):
"""Translate surface in given direction
Parameters
----------
vector : iterable of float
Direction in which surface should be translated
inplace : boolean
Whether or not to return a new instance of this Surface or to
modify the coefficients of this Surface. Defaults to False
Returns
-------
instance of openmc.Surface
Translated surface
"""
@abstractmethod
def rotate(self, rotation, pivot=(0., 0., 0.), order='xyz', inplace=False):
r"""Rotate surface by angles provided or by applying matrix directly.
.. versionadded:: 0.12
Parameters
----------
rotation : 3-tuple of float, or 3x3 iterable
A 3-tuple of angles :math:`(\phi, \theta, \psi)` in degrees where
the first element is the rotation about the x-axis in the fixed
laboratory frame, the second element is the rotation about the
y-axis in the fixed laboratory frame, and the third element is the
rotation about the z-axis in the fixed laboratory frame. The
rotations are active rotations. Additionally a 3x3 rotation matrix
can be specified directly either as a nested iterable or array.
pivot : iterable of float, optional
(x, y, z) coordinates for the point to rotate about. Defaults to
(0., 0., 0.)
order : str, optional
A string of 'x', 'y', and 'z' in some order specifying which
rotation to perform first, second, and third. Defaults to 'xyz'
which means, the rotation by angle :math:`\phi` about x will be
applied first, followed by :math:`\theta` about y and then
:math:`\psi` about z. This corresponds to an x-y-z extrinsic
rotation as well as a z-y'-x'' intrinsic rotation using Tait-Bryan
angles :math:`(\phi, \theta, \psi)`.
inplace : boolean
Whether or not to return a new instance of Surface or to modify the
coefficients of this Surface in place. Defaults to False.
Returns
-------
openmc.Surface
Rotated surface
"""
def to_xml_element(self):
"""Return XML representation of the surface
Returns
-------
element : xml.etree.ElementTree.Element
XML element containing source data
"""
element = ET.Element("surface")
element.set("id", str(self._id))
if len(self._name) > 0:
element.set("name", str(self._name))
element.set("type", self._type)
if self.boundary_type != 'transmission':
element.set("boundary", self.boundary_type)
element.set("coeffs", ' '.join([str(self._coefficients.setdefault(key, 0.0))
for key in self._coeff_keys]))
return element
@staticmethod
def from_xml_element(elem):
"""Generate surface from an XML element
Parameters
----------
elem : xml.etree.ElementTree.Element
XML element
Returns
-------
openmc.Surface
Instance of a surface subclass
"""
# Determine appropriate class
surf_type = elem.get('type')
cls = _SURFACE_CLASSES[surf_type]
# Determine ID, boundary type, coefficients
kwargs = {}
kwargs['surface_id'] = int(elem.get('id'))
kwargs['boundary_type'] = elem.get('boundary', 'transmission')
kwargs['name'] = elem.get('name')
coeffs = [float(x) for x in elem.get('coeffs').split()]
kwargs.update(dict(zip(cls._coeff_keys, coeffs)))
return cls(**kwargs)
@staticmethod
def from_hdf5(group):
"""Create surface from HDF5 group
Parameters
----------
group : h5py.Group
Group in HDF5 file
Returns
-------
openmc.Surface
Instance of surface subclass
"""
# If this is a DAGMC surface, do nothing for now
geom_type = group.get('geom_type')
if geom_type and geom_type[()].decode() == 'dagmc':
return
surface_id = int(group.name.split('/')[-1].lstrip('surface '))
name = group['name'][()].decode() if 'name' in group else ''
bc = group['boundary_type'][()].decode()
coeffs = group['coefficients'][...]
kwargs = {'boundary_type': bc, 'name': name, 'surface_id': surface_id}
surf_type = group['type'][()].decode()
cls = _SURFACE_CLASSES[surf_type]
return cls(*coeffs, **kwargs)
class PlaneMixin:
"""A Plane mixin class for all operations on order 1 surfaces"""
def __init__(self, **kwargs):
super().__init__(**kwargs)
self._periodic_surface = None
@property
def periodic_surface(self):
return self._periodic_surface
@periodic_surface.setter
def periodic_surface(self, periodic_surface):
check_type('periodic surface', periodic_surface, Plane)
self._periodic_surface = periodic_surface
periodic_surface._periodic_surface = self
def _get_base_coeffs(self):
return (self.a, self.b, self.c, self.d)
def _get_normal(self):
a, b, c = self._get_base_coeffs()[:3]
return np.array((a, b, c)) / math.sqrt(a*a + b*b + c*c)
def bounding_box(self, side):
"""Determine an axis-aligned bounding box.
An axis-aligned bounding box for Plane half-spaces is represented by
its lower-left and upper-right coordinates. If the half-space is
unbounded in a particular direction, numpy.inf is used to represent
infinity.
Parameters
----------
side : {'+', '-'}
Indicates the negative or positive half-space
Returns
-------
numpy.ndarray
Lower-left coordinates of the axis-aligned bounding box for the
desired half-space
numpy.ndarray
Upper-right coordinates of the axis-aligned bounding box for the
desired half-space
"""
# Compute the bounding box based on the normal vector to the plane
nhat = self._get_normal()
ll = np.array([-np.inf, -np.inf, -np.inf])
ur = np.array([np.inf, np.inf, np.inf])
# If the plane is axis aligned, find the proper bounding box
if np.any(np.isclose(np.abs(nhat), 1., rtol=0., atol=self._atol)):
sign = nhat.sum()
a, b, c, d = self._get_base_coeffs()
vals = [d/val if not np.isclose(val, 0., rtol=0., atol=self._atol)
else np.nan for val in (a, b, c)]
if side == '-':
if sign > 0:
ur = np.array([v if not np.isnan(v) else np.inf for v in vals])
else:
ll = np.array([v if not np.isnan(v) else -np.inf for v in vals])
elif side == '+':
if sign > 0:
ll = np.array([v if not np.isnan(v) else -np.inf for v in vals])
else:
ur = np.array([v if not np.isnan(v) else np.inf for v in vals])
return (ll, ur)
def evaluate(self, point):
"""Evaluate the surface equation at a given point.
Parameters
----------
point : 3-tuple of float
The Cartesian coordinates, :math:`(x',y',z')`, at which the surface
equation should be evaluated.
Returns
-------
float
:math:`Ax' + By' + Cz' - D`
"""
x, y, z = point
a, b, c, d = self._get_base_coeffs()
return a*x + b*y + c*z - d
def translate(self, vector, inplace=False):
"""Translate surface in given direction
Parameters
----------
vector : iterable of float
Direction in which surface should be translated
inplace : boolean
Whether or not to return a new instance of a Plane or to modify the
coefficients of this plane. Defaults to False
Returns
-------
openmc.Plane
Translated surface
"""
if np.allclose(vector, 0., rtol=0., atol=self._atol):
return self
a, b, c, d = self._get_base_coeffs()
d = d + np.dot([a, b, c], vector)
surf = self if inplace else self.clone()
setattr(surf, surf._coeff_keys[-1], d)
return surf
def rotate(self, rotation, pivot=(0., 0., 0.), order='xyz', inplace=False):
pivot = np.asarray(pivot)
rotation = np.asarray(rotation, dtype=float)
# Allow rotation matrix to be passed in directly, otherwise build it
if rotation.ndim == 2:
check_length('surface rotation', rotation.ravel(), 9)
Rmat = rotation
else:
Rmat = get_rotation_matrix(rotation, order=order)
# Translate surface to pivot
surf = self.translate(-pivot, inplace=inplace)
a, b, c, d = surf._get_base_coeffs()
# Compute new rotated coefficients a, b, c
a, b, c = Rmat @ [a, b, c]
kwargs = {'boundary_type': surf.boundary_type, 'name': surf.name}
if inplace:
kwargs['surface_id'] = surf.id
surf = Plane(a=a, b=b, c=c, d=d, **kwargs)
return surf.translate(pivot, inplace=inplace)
def to_xml_element(self):
"""Return XML representation of the surface
Returns
-------
element : xml.etree.ElementTree.Element
XML element containing source data
"""
element = super().to_xml_element()
# Add periodic surface pair information
if self.boundary_type == 'periodic':
if self.periodic_surface is not None:
element.set("periodic_surface_id",
str(self.periodic_surface.id))
return element
class Plane(PlaneMixin, Surface):
"""An arbitrary plane of the form :math:`Ax + By + Cz = D`.
Parameters
----------
a : float, optional
The 'A' parameter for the plane. Defaults to 1.
b : float, optional
The 'B' parameter for the plane. Defaults to 0.
c : float, optional
The 'C' parameter for the plane. Defaults to 0.
d : float, optional
The 'D' parameter for the plane. Defaults to 0.
boundary_type : {'transmission, 'vacuum', 'reflective', 'periodic', 'white'}, optional
Boundary condition that defines the behavior for particles hitting the
surface. Defaults to transmissive boundary condition where particles
freely pass through the surface.
name : str, optional
Name of the plane. If not specified, the name will be the empty string.
surface_id : int, optional
Unique identifier for the surface. If not specified, an identifier will
automatically be assigned.
Attributes
----------
a : float
The 'A' parameter for the plane
b : float
The 'B' parameter for the plane
c : float
The 'C' parameter for the plane
d : float
The 'D' parameter for the plane
boundary_type : {'transmission, 'vacuum', 'reflective', 'periodic', 'white'}
Boundary condition that defines the behavior for particles hitting the
surface.
periodic_surface : openmc.Surface
If a periodic boundary condition is used, the surface with which this
one is periodic with
coefficients : dict
Dictionary of surface coefficients
id : int
Unique identifier for the surface
name : str
Name of the surface
type : str
Type of the surface
"""
_type = 'plane'
_coeff_keys = ('a', 'b', 'c', 'd')
def __init__(self, a=1., b=0., c=0., d=0., *args, **kwargs):
# *args should ultimately be limited to a, b, c, d as specified in
# __init__, but to preserve the API it is allowed to accept Surface
# parameters for now, but will raise warnings if this is done.
kwargs = _future_kwargs_warning_helper(type(self), *args, **kwargs)
# Warn if capital letter arguments are passed
capdict = {}
for k in 'ABCD':
val = kwargs.pop(k, None)
if val is not None:
warn(_WARNING_UPPER.format(type(self), k.lower(), k),
FutureWarning)
capdict[k.lower()] = val
super().__init__(**kwargs)
for key, val in zip(self._coeff_keys, (a, b, c, d)):
setattr(self, key, val)
for key, val in capdict.items():
setattr(self, key, val)
@classmethod
def __subclasshook__(cls, c):
if cls is Plane and c in (XPlane, YPlane, ZPlane):
return True
return NotImplemented
a = SurfaceCoefficient('a')
b = SurfaceCoefficient('b')
c = SurfaceCoefficient('c')
d = SurfaceCoefficient('d')
@classmethod
def from_points(cls, p1, p2, p3, **kwargs):
"""Return a plane given three points that pass through it.
Parameters
----------
p1, p2, p3 : 3-tuples
Points that pass through the plane
kwargs : dict
Keyword arguments passed to the :class:`Plane` constructor
Returns
-------
Plane
Plane that passes through the three points
"""
# Convert to numpy arrays
p1 = np.asarray(p1)
p2 = np.asarray(p2)
p3 = np.asarray(p3)
# Find normal vector to plane by taking cross product of two vectors
# connecting p1->p2 and p1->p3
n = np.cross(p2 - p1, p3 - p1)
# The equation of the plane will by n·(<x,y,z> - p1) = 0. Determine
# coefficients a, b, c, and d based on that
a, b, c = n
d = np.dot(n, p1)
return cls(a=a, b=b, c=c, d=d, **kwargs)
class XPlane(PlaneMixin, Surface):
"""A plane perpendicular to the x axis of the form :math:`x - x_0 = 0`
Parameters
----------
x0 : float, optional
Location of the plane. Defaults to 0.
boundary_type : {'transmission, 'vacuum', 'reflective', 'periodic', 'white'}, optional
Boundary condition that defines the behavior for particles hitting the
surface. Defaults to transmissive boundary condition where particles
freely pass through the surface. Only axis-aligned periodicity is
supported, i.e., x-planes can only be paired with x-planes.
name : str, optional
Name of the plane. If not specified, the name will be the empty string.
surface_id : int, optional
Unique identifier for the surface. If not specified, an identifier will
automatically be assigned.
Attributes
----------
x0 : float
Location of the plane
boundary_type : {'transmission, 'vacuum', 'reflective', 'periodic', 'white'}
Boundary condition that defines the behavior for particles hitting the
surface.
periodic_surface : openmc.Surface
If a periodic boundary condition is used, the surface with which this
one is periodic with
coefficients : dict
Dictionary of surface coefficients
id : int
Unique identifier for the surface
name : str
Name of the surface
type : str
Type of the surface
"""
_type = 'x-plane'
_coeff_keys = ('x0',)
def __init__(self, x0=0., *args, **kwargs):
# work around for accepting Surface kwargs as positional parameters
# until they are deprecated
kwargs = _future_kwargs_warning_helper(type(self), *args, **kwargs)
super().__init__(**kwargs)
self.x0 = x0
x0 = SurfaceCoefficient('x0')
a = SurfaceCoefficient(1.)
b = SurfaceCoefficient(0.)
c = SurfaceCoefficient(0.)
d = x0
def evaluate(self, point):
return point[0] - self.x0
class YPlane(PlaneMixin, Surface):
"""A plane perpendicular to the y axis of the form :math:`y - y_0 = 0`
Parameters
----------
y0 : float, optional
Location of the plane
boundary_type : {'transmission, 'vacuum', 'reflective', 'periodic', 'white'}, optional
Boundary condition that defines the behavior for particles hitting the
surface. Defaults to transmissive boundary condition where particles
freely pass through the surface. Only axis-aligned periodicity is
supported, i.e., x-planes can only be paired with x-planes.
name : str, optional
Name of the plane. If not specified, the name will be the empty string.
surface_id : int, optional
Unique identifier for the surface. If not specified, an identifier will
automatically be assigned.
Attributes
----------
y0 : float
Location of the plane
boundary_type : {'transmission, 'vacuum', 'reflective', 'periodic', 'white'}
Boundary condition that defines the behavior for particles hitting the
surface.
periodic_surface : openmc.Surface
If a periodic boundary condition is used, the surface with which this
one is periodic with
coefficients : dict
Dictionary of surface coefficients
id : int
Unique identifier for the surface
name : str
Name of the surface
type : str
Type of the surface
"""
_type = 'y-plane'
_coeff_keys = ('y0',)
def __init__(self, y0=0., *args, **kwargs):
# work around for accepting Surface kwargs as positional parameters
# until they are deprecated
kwargs = _future_kwargs_warning_helper(type(self), *args, **kwargs)
super().__init__(**kwargs)
self.y0 = y0
y0 = SurfaceCoefficient('y0')
a = SurfaceCoefficient(0.)
b = SurfaceCoefficient(1.)
c = SurfaceCoefficient(0.)
d = y0
def evaluate(self, point):
return point[1] - self.y0
class ZPlane(PlaneMixin, Surface):
"""A plane perpendicular to the z axis of the form :math:`z - z_0 = 0`
Parameters
----------
z0 : float, optional
Location of the plane. Defaults to 0.
boundary_type : {'transmission, 'vacuum', 'reflective', 'periodic', 'white'}, optional
Boundary condition that defines the behavior for particles hitting the
surface. Defaults to transmissive boundary condition where particles
freely pass through the surface. Only axis-aligned periodicity is
supported, i.e., x-planes can only be paired with x-planes.
name : str, optional
Name of the plane. If not specified, the name will be the empty string.
surface_id : int, optional
Unique identifier for the surface. If not specified, an identifier will
automatically be assigned.
Attributes
----------
z0 : float
Location of the plane
boundary_type : {'transmission, 'vacuum', 'reflective', 'periodic', 'white'}
Boundary condition that defines the behavior for particles hitting the
surface.
periodic_surface : openmc.Surface
If a periodic boundary condition is used, the surface with which this
one is periodic with
coefficients : dict
Dictionary of surface coefficients
id : int
Unique identifier for the surface
name : str
Name of the surface
type : str
Type of the surface
"""
_type = 'z-plane'
_coeff_keys = ('z0',)
def __init__(self, z0=0., *args, **kwargs):
# work around for accepting Surface kwargs as positional parameters
# until they are deprecated
kwargs = _future_kwargs_warning_helper(type(self), *args, **kwargs)
super().__init__(**kwargs)
self.z0 = z0
z0 = SurfaceCoefficient('z0')
a = SurfaceCoefficient(0.)
b = SurfaceCoefficient(0.)
c = SurfaceCoefficient(1.)
d = z0
def evaluate(self, point):
return point[2] - self.z0
class QuadricMixin:
"""A Mixin class implementing common functionality for quadric surfaces"""
@property
def _origin(self):
return np.array((self.x0, self.y0, self.z0))
@property
def _axis(self):
axis = np.array((self.dx, self.dy, self.dz))
return axis / np.linalg.norm(axis)
def get_Abc(self, coeffs=None):
"""Compute matrix, vector, and scalar coefficients for this surface or
for a specified set of coefficients.
Parameters
----------
coeffs : tuple, optional
Tuple of coefficients from which to compute the quadric elements.
If none are supplied the coefficients of this surface will be used.
"""
if coeffs is None:
a, b, c, d, e, f, g, h, j, k = self._get_base_coeffs()
else:
a, b, c, d, e, f, g, h, j, k = coeffs
A = np.array([[a, d/2, f/2], [d/2, b, e/2], [f/2, e/2, c]])
bvec = np.array([g, h, j])
return A, bvec, k
def eigh(self, coeffs=None):
"""Wrapper method for returning eigenvalues and eigenvectors of this
quadric surface which is used for transformations.
Parameters
----------
coeffs : tuple, optional
Tuple of coefficients from which to compute the quadric elements.
If none are supplied the coefficients of this surface will be used.
Returns
-------
w, v : tuple of numpy arrays with shapes (3,) and (3,3) respectively
Returns the eigenvalues and eigenvectors of the quadric matrix A
that represents the supplied coefficients. The vector w contains
the eigenvalues in ascending order and the matrix v contains the
eigenvectors such that v[:,i] is the eigenvector corresponding to
the eigenvalue w[i].
"""
return np.linalg.eigh(self.get_Abc(coeffs=coeffs)[0])
def evaluate(self, point):
"""Evaluate the surface equation at a given point.
Parameters
----------
point : 3-tuple of float
The Cartesian coordinates, :math:`(x',y',z')`, at which the surface
equation should be evaluated.
Returns
-------
float
:math:`Ax'^2 + By'^2 + Cz'^2 + Dx'y' + Ey'z' + Fx'z' + Gx' + Hy' +
Jz' + K = 0`
"""
x = np.asarray(point)
A, b, c = self.get_Abc()
return x.T @ A @ x + b.T @ x + c
def translate(self, vector, inplace=False):
"""Translate surface in given direction
Parameters
----------
vector : iterable of float
Direction in which surface should be translated
inplace : boolean
Whether to return a clone of the Surface or the Surface itself.
Defaults to False
Returns
-------
openmc.Surface
Translated surface
"""
vector = np.asarray(vector)
if np.allclose(vector, 0., rtol=0., atol=self._atol):
return self
surf = self if inplace else self.clone()
if hasattr(self, 'x0'):
for vi, xi in zip(vector, ('x0', 'y0', 'z0')):
val = getattr(surf, xi)
try:
setattr(surf, xi, val + vi)
except AttributeError:
# That attribute is read only i.e x0 for XCylinder
pass
else:
A, bvec, cnst = self.get_Abc()
g, h, j = bvec - 2*vector.T @ A
k = cnst + vector.T @ A @ vector - bvec.T @ vector
for key, val in zip(('g', 'h', 'j', 'k'), (g, h, j, k)):
setattr(surf, key, val)
return surf
def rotate(self, rotation, pivot=(0., 0., 0.), order='xyz', inplace=False):
# Get pivot and rotation matrix
pivot = np.asarray(pivot)
rotation = np.asarray(rotation, dtype=float)
# Allow rotaiton matrix to be passed in directly, otherwise build it
if rotation.ndim == 2:
check_length('surface rotation', rotation.ravel(), 9)
Rmat = rotation
else:
Rmat = get_rotation_matrix(rotation, order=order)
# Translate surface to the pivot point
tsurf = self.translate(-pivot, inplace=inplace)
# If the surface is already generalized just clone it
if type(tsurf) is tsurf._virtual_base:
surf = tsurf if inplace else tsurf.clone()
else:
base_cls = type(tsurf)._virtual_base
# Copy necessary surface attributes to new kwargs dictionary
kwargs = {'boundary_type': tsurf.boundary_type, 'name': tsurf.name}
if inplace:
kwargs['surface_id'] = tsurf.id
kwargs.update({k: getattr(tsurf, k) for k in base_cls._coeff_keys})
# Create new instance of the virtual base class
surf = base_cls(**kwargs)
# Perform rotations on axis, origin, or quadric coefficients
if hasattr(surf, 'dx'):
for key, val in zip(('dx', 'dy', 'dz'), Rmat @ tsurf._axis):
setattr(surf, key, val)
if hasattr(surf, 'x0'):
for key, val in zip(('x0', 'y0', 'z0'), Rmat @ tsurf._origin):
setattr(surf, key, val)
else:
A, bvec, k = surf.get_Abc()
Arot = Rmat @ A @ Rmat.T
a, b, c = np.diagonal(Arot)
d, e, f = 2*Arot[0, 1], 2*Arot[1, 2], 2*Arot[0, 2]
g, h, j = Rmat @ bvec
for key, val in zip(surf._coeff_keys, (a, b, c, d, e, f, g, h, j, k)):
setattr(surf, key, val)
# translate back to the original frame and return the surface
return surf.translate(pivot, inplace=inplace)
class Cylinder(QuadricMixin, Surface):
"""A cylinder with radius r, centered on the point (x0, y0, z0) with an
axis specified by the line through points (x0, y0, z0) and (x0+dx, y0+dy,
z0+dz)
Parameters
----------
x0 : float, optional
x-coordinate for the origin of the Cylinder. Defaults to 0
y0 : float, optional
y-coordinate for the origin of the Cylinder. Defaults to 0
z0 : float, optional
z-coordinate for the origin of the Cylinder. Defaults to 0
r : float, optional
Radius of the cylinder. Defaults to 1.
dx : float, optional
x-component of the vector representing the axis of the cylinder.
Defaults to 0.
dy : float, optional
y-component of the vector representing the axis of the cylinder.
Defaults to 0.
dz : float, optional
z-component of the vector representing the axis of the cylinder.
Defaults to 1.
boundary_type : {'transmission, 'vacuum', 'reflective', 'white'}, optional
Boundary condition that defines the behavior for particles hitting the
surface. Defaults to transmissive boundary condition where particles
freely pass through the surface.
name : str, optional
Name of the cylinder. If not specified, the name will be the empty
string.
surface_id : int, optional
Unique identifier for the surface. If not specified, an identifier will
automatically be assigned.
Attributes
----------
x0 : float
x-coordinate for the origin of the Cylinder
y0 : float
y-coordinate for the origin of the Cylinder
z0 : float
z-coordinate for the origin of the Cylinder
r : float
Radius of the cylinder
dx : float
x-component of the vector representing the axis of the cylinder
dy : float
y-component of the vector representing the axis of the cylinder
dz : float
z-component of the vector representing the axis of the cylinder
boundary_type : {'transmission, 'vacuum', 'reflective', 'white'}
Boundary condition that defines the behavior for particles hitting the
surface.
coefficients : dict
Dictionary of surface coefficients
id : int
Unique identifier for the surface
name : str
Name of the surface
type : str
Type of the surface
"""
_type = 'cylinder'
_coeff_keys = ('x0', 'y0', 'z0', 'r', 'dx', 'dy', 'dz')
def __init__(self, x0=0., y0=0., z0=0., r=1., dx=0., dy=0., dz=1., *args,
**kwargs):
kwargs = _future_kwargs_warning_helper(type(self), *args, **kwargs)
super().__init__(**kwargs)
for key, val in zip(self._coeff_keys, (x0, y0, z0, r, dx, dy, dz)):
setattr(self, key, val)
@classmethod
def __subclasshook__(cls, c):
if cls is Cylinder and c in (XCylinder, YCylinder, ZCylinder):
return True
return NotImplemented
x0 = SurfaceCoefficient('x0')
y0 = SurfaceCoefficient('y0')
z0 = SurfaceCoefficient('z0')
r = SurfaceCoefficient('r')
dx = SurfaceCoefficient('dx')
dy = SurfaceCoefficient('dy')
dz = SurfaceCoefficient('dz')
def bounding_box(self, side):
if side == '-':
r = self.r
ll = [xi - r if np.isclose(dxi, 0., rtol=0., atol=self._atol)
else -np.inf for xi, dxi in zip(self._origin, self._axis)]
ur = [xi + r if np.isclose(dxi, 0., rtol=0., atol=self._atol)
else np.inf for xi, dxi in zip(self._origin, self._axis)]
return (np.array(ll), np.array(ur))
elif side == '+':
return (np.array([-np.inf, -np.inf, -np.inf]),
np.array([np.inf, np.inf, np.inf]))
def _get_base_coeffs(self):
# Get x, y, z coordinates of two points
x1, y1, z1 = self._origin
x2, y2, z2 = self._origin + self._axis
r = self.r
# Define intermediate terms
dx = x2 - x1
dy = y2 - y1
dz = z2 - z1
cx = y1*z2 - y2*z1
cy = x2*z1 - x1*z2
cz = x1*y2 - x2*y1
# Given p=(x,y,z), p1=(x1, y1, z1), p2=(x2, y2, z2), the equation
# for the cylinder can be derived as
# r = |(p - p1) ⨯ (p - p2)| / |p2 - p1|.
# Expanding out all terms and grouping according to what Quadric
# expects gives the following coefficients.
a = dy*dy + dz*dz
b = dx*dx + dz*dz
c = dx*dx + dy*dy
d = -2*dx*dy
e = -2*dy*dz
f = -2*dx*dz
g = 2*(cy*dz - cz*dy)
h = 2*(cz*dx - cx*dz)
j = 2*(cx*dy - cy*dx)
k = cx*cx + cy*cy + cz*cz - (dx*dx + dy*dy + dz*dz)*r*r
return (a, b, c, d, e, f, g, h, j, k)
@classmethod
def from_points(cls, p1, p2, r=1., **kwargs):
"""Return a cylinder given points that define the axis and a radius.
.. versionadded:: 0.12
Parameters
----------
p1, p2 : 3-tuples
Points that pass through the plane, p1 will be used as (x0, y0, z0)
r : float, optional
Radius of the cylinder. Defaults to 1.
kwargs : dict
Keyword arguments passed to the :class:`Cylinder` constructor
Returns
-------
Cylinder
Cylinder that has an axis through the points p1 and p2, and a
radius r.
"""
# Convert to numpy arrays
p1 = np.asarray(p1)
p2 = np.asarray(p2)
x0, y0, z0 = p1
dx, dy, dz = p2 - p1
return cls(x0=x0, y0=y0, z0=z0, r=r, dx=dx, dy=dy, dz=dz, **kwargs)
def to_xml_element(self):
"""Return XML representation of the surface
Returns
-------
element : xml.etree.ElementTree.Element
XML element containing source data
"""
# This method overrides Surface.to_xml_element to generate a Quadric
# since the C++ layer doesn't support Cylinders right now
with catch_warnings():
simplefilter('ignore', IDWarning)
kwargs = {'boundary_type': self.boundary_type, 'name': self.name,
'surface_id': self.id}
quad_rep = Quadric(*self._get_base_coeffs(), **kwargs)
return quad_rep.to_xml_element()
class XCylinder(QuadricMixin, Surface):
"""An infinite cylinder whose length is parallel to the x-axis of the form
:math:`(y - y_0)^2 + (z - z_0)^2 = r^2`.
Parameters
----------
y0 : float, optional
y-coordinate for the origin of the Cylinder. Defaults to 0
z0 : float, optional
z-coordinate for the origin of the Cylinder. Defaults to 0
r : float, optional
Radius of the cylinder. Defaults to 1.
boundary_type : {'transmission, 'vacuum', 'reflective', 'white'}, optional
Boundary condition that defines the behavior for particles hitting the
surface. Defaults to transmissive boundary condition where particles
freely pass through the surface.
name : str, optional
Name of the cylinder. If not specified, the name will be the empty
string.
surface_id : int, optional
Unique identifier for the surface. If not specified, an identifier will
automatically be assigned.
Attributes
----------
y0 : float
y-coordinate for the origin of the Cylinder
z0 : float
z-coordinate for the origin of the Cylinder
r : float
Radius of the cylinder
boundary_type : {'transmission, 'vacuum', 'reflective', 'white'}
Boundary condition that defines the behavior for particles hitting the
surface.
coefficients : dict
Dictionary of surface coefficients
id : int
Unique identifier for the surface
name : str
Name of the surface
type : str
Type of the surface
"""
_type = 'x-cylinder'
_coeff_keys = ('y0', 'z0', 'r')
def __init__(self, y0=0., z0=0., r=1., *args, **kwargs):
R = kwargs.pop('R', None)
if R is not None:
warn(_WARNING_UPPER.format(type(self).__name__, 'r', 'R'),
FutureWarning)
r = R
kwargs = _future_kwargs_warning_helper(type(self), *args, **kwargs)
super().__init__(**kwargs)
for key, val in zip(self._coeff_keys, (y0, z0, r)):
setattr(self, key, val)
x0 = SurfaceCoefficient(0.)
y0 = SurfaceCoefficient('y0')
z0 = SurfaceCoefficient('z0')
r = SurfaceCoefficient('r')
dx = SurfaceCoefficient(1.)
dy = SurfaceCoefficient(0.)
dz = SurfaceCoefficient(0.)
def _get_base_coeffs(self):
y0, z0, r = self.y0, self.z0, self.r
a = d = e = f = g = 0.
b = c = 1.
h, j, k = -2*y0, -2*z0, y0*y0 + z0*z0 - r*r
return (a, b, c, d, e, f, g, h, j, k)
def bounding_box(self, side):
if side == '-':
return (np.array([-np.inf, self.y0 - self.r, self.z0 - self.r]),
np.array([np.inf, self.y0 + self.r, self.z0 + self.r]))
elif side == '+':
return (np.array([-np.inf, -np.inf, -np.inf]),
np.array([np.inf, np.inf, np.inf]))
def evaluate(self, point):
y = point[1] - self.y0
z = point[2] - self.z0
return y*y + z*z - self.r**2
class YCylinder(QuadricMixin, Surface):
"""An infinite cylinder whose length is parallel to the y-axis of the form
:math:`(x - x_0)^2 + (z - z_0)^2 = r^2`.
Parameters
----------
x0 : float, optional
x-coordinate for the origin of the Cylinder. Defaults to 0
z0 : float, optional
z-coordinate for the origin of the Cylinder. Defaults to 0
r : float, optional
Radius of the cylinder. Defaults to 1.
boundary_type : {'transmission, 'vacuum', 'reflective', 'white'}, optional
Boundary condition that defines the behavior for particles hitting the
surface. Defaults to transmissive boundary condition where particles
freely pass through the surface.
name : str, optional
Name of the cylinder. If not specified, the name will be the empty
string.
surface_id : int, optional
Unique identifier for the surface. If not specified, an identifier will
automatically be assigned.
Attributes
----------
x0 : float
x-coordinate for the origin of the Cylinder
z0 : float
z-coordinate for the origin of the Cylinder
r : float
Radius of the cylinder
boundary_type : {'transmission, 'vacuum', 'reflective', 'white'}
Boundary condition that defines the behavior for particles hitting the
surface.
coefficients : dict
Dictionary of surface coefficients
id : int
Unique identifier for the surface
name : str
Name of the surface
type : str
Type of the surface
"""
_type = 'y-cylinder'
_coeff_keys = ('x0', 'z0', 'r')
def __init__(self, x0=0., z0=0., r=1., *args, **kwargs):
R = kwargs.pop('R', None)
if R is not None:
warn(_WARNING_UPPER.format(type(self).__name__, 'r', 'R'),
FutureWarning)
r = R
kwargs = _future_kwargs_warning_helper(type(self), *args, **kwargs)
super().__init__(**kwargs)
for key, val in zip(self._coeff_keys, (x0, z0, r)):
setattr(self, key, val)
x0 = SurfaceCoefficient('x0')
y0 = SurfaceCoefficient(0.)
z0 = SurfaceCoefficient('z0')
r = SurfaceCoefficient('r')
dx = SurfaceCoefficient(0.)
dy = SurfaceCoefficient(1.)
dz = SurfaceCoefficient(0.)
def _get_base_coeffs(self):
x0, z0, r = self.x0, self.z0, self.r
b = d = e = f = h = 0.
a = c = 1.
g, j, k = -2*x0, -2*z0, x0*x0 + z0*z0 - r*r
return (a, b, c, d, e, f, g, h, j, k)
def bounding_box(self, side):
if side == '-':
return (np.array([self.x0 - self.r, -np.inf, self.z0 - self.r]),
np.array([self.x0 + self.r, np.inf, self.z0 + self.r]))
elif side == '+':
return (np.array([-np.inf, -np.inf, -np.inf]),
np.array([np.inf, np.inf, np.inf]))
def evaluate(self, point):
x = point[0] - self.x0
z = point[2] - self.z0
return x*x + z*z - self.r**2
class ZCylinder(QuadricMixin, Surface):
"""An infinite cylinder whose length is parallel to the z-axis of the form
:math:`(x - x_0)^2 + (y - y_0)^2 = r^2`.
Parameters
----------
x0 : float, optional
x-coordinate for the origin of the Cylinder. Defaults to 0
y0 : float, optional
y-coordinate for the origin of the Cylinder. Defaults to 0
r : float, optional
Radius of the cylinder. Defaults to 1.
boundary_type : {'transmission, 'vacuum', 'reflective', 'white'}, optional
Boundary condition that defines the behavior for particles hitting the
surface. Defaults to transmissive boundary condition where particles
freely pass through the surface.
name : str, optional
Name of the cylinder. If not specified, the name will be the empty
string.
surface_id : int, optional
Unique identifier for the surface. If not specified, an identifier will
automatically be assigned.
Attributes
----------
x0 : float
x-coordinate for the origin of the Cylinder
y0 : float
y-coordinate for the origin of the Cylinder
r : float
Radius of the cylinder
boundary_type : {'transmission, 'vacuum', 'reflective', 'white'}
Boundary condition that defines the behavior for particles hitting the
surface.
coefficients : dict
Dictionary of surface coefficients
id : int
Unique identifier for the surface
name : str
Name of the surface
type : str
Type of the surface
"""
_type = 'z-cylinder'
_coeff_keys = ('x0', 'y0', 'r')
def __init__(self, x0=0., y0=0., r=1., *args, **kwargs):
R = kwargs.pop('R', None)
if R is not None:
warn(_WARNING_UPPER.format(type(self).__name__, 'r', 'R'),
FutureWarning)
r = R
kwargs = _future_kwargs_warning_helper(type(self), *args, **kwargs)
super().__init__(**kwargs)
for key, val in zip(self._coeff_keys, (x0, y0, r)):
setattr(self, key, val)
x0 = SurfaceCoefficient('x0')
y0 = SurfaceCoefficient('y0')
z0 = SurfaceCoefficient(0.)
r = SurfaceCoefficient('r')
dx = SurfaceCoefficient(0.)
dy = SurfaceCoefficient(0.)
dz = SurfaceCoefficient(1.)
def _get_base_coeffs(self):
x0, y0, r = self.x0, self.y0, self.r
c = d = e = f = j = 0.
a = b = 1.
g, h, k = -2*x0, -2*y0, x0*x0 + y0*y0 - r*r
return (a, b, c, d, e, f, g, h, j, k)
def bounding_box(self, side):
if side == '-':
return (np.array([self.x0 - self.r, self.y0 - self.r, -np.inf]),
np.array([self.x0 + self.r, self.y0 + self.r, np.inf]))
elif side == '+':
return (np.array([-np.inf, -np.inf, -np.inf]),
np.array([np.inf, np.inf, np.inf]))
def evaluate(self, point):
x = point[0] - self.x0
y = point[1] - self.y0
return x*x + y*y - self.r**2
class Sphere(QuadricMixin, Surface):
"""A sphere of the form :math:`(x - x_0)^2 + (y - y_0)^2 + (z - z_0)^2 = r^2`.
Parameters
----------
x0 : float, optional
x-coordinate of the center of the sphere. Defaults to 0.
y0 : float, optional
y-coordinate of the center of the sphere. Defaults to 0.
z0 : float, optional
z-coordinate of the center of the sphere. Defaults to 0.
r : float, optional
Radius of the sphere. Defaults to 1.
boundary_type : {'transmission, 'vacuum', 'reflective', 'white'}, optional
Boundary condition that defines the behavior for particles hitting the
surface. Defaults to transmissive boundary condition where particles
freely pass through the surface.
name : str, optional
Name of the sphere. If not specified, the name will be the empty string.
surface_id : int, optional
Unique identifier for the surface. If not specified, an identifier will
automatically be assigned.
Attributes
----------
x0 : float
x-coordinate of the center of the sphere
y0 : float
y-coordinate of the center of the sphere
z0 : float
z-coordinate of the center of the sphere
r : float
Radius of the sphere
boundary_type : {'transmission, 'vacuum', 'reflective', 'white'}
Boundary condition that defines the behavior for particles hitting the
surface.
coefficients : dict
Dictionary of surface coefficients
id : int
Unique identifier for the surface
name : str
Name of the surface
type : str
Type of the surface
"""
_type = 'sphere'
_coeff_keys = ('x0', 'y0', 'z0', 'r')
def __init__(self, x0=0., y0=0., z0=0., r=1., *args, **kwargs):
R = kwargs.pop('R', None)
if R is not None:
warn(_WARNING_UPPER.format(type(self).__name__, 'r', 'R'),
FutureWarning)
r = R
kwargs = _future_kwargs_warning_helper(type(self), *args, **kwargs)
super().__init__(**kwargs)
for key, val in zip(self._coeff_keys, (x0, y0, z0, r)):
setattr(self, key, val)
x0 = SurfaceCoefficient('x0')
y0 = SurfaceCoefficient('y0')
z0 = SurfaceCoefficient('z0')
r = SurfaceCoefficient('r')
def _get_base_coeffs(self):
x0, y0, z0, r = self.x0, self.y0, self.z0, self.r
a = b = c = 1.
d = e = f = 0.
g, h, j = -2*x0, -2*y0, -2*z0
k = x0*x0 + y0*y0 + z0*z0 - r*r
return (a, b, c, d, e, f, g, h, j, k)
def bounding_box(self, side):
if side == '-':
return (np.array([self.x0 - self.r, self.y0 - self.r,
self.z0 - self.r]),
np.array([self.x0 + self.r, self.y0 + self.r,
self.z0 + self.r]))
elif side == '+':
return (np.array([-np.inf, -np.inf, -np.inf]),
np.array([np.inf, np.inf, np.inf]))
def evaluate(self, point):
x = point[0] - self.x0
y = point[1] - self.y0
z = point[2] - self.z0
return x*x + y*y + z*z - self.r**2
class Cone(QuadricMixin, Surface):
"""A conical surface parallel to the x-, y-, or z-axis.
Parameters
----------
x0 : float, optional
x-coordinate of the apex. Defaults to 0.
y0 : float, optional
y-coordinate of the apex. Defaults to 0.
z0 : float, optional
z-coordinate of the apex. Defaults to 0.
r2 : float, optional
Parameter related to the aperature. Defaults to 1.
dx : float, optional
x-component of the vector representing the axis of the cone.
Defaults to 0.
dy : float, optional
y-component of the vector representing the axis of the cone.
Defaults to 0.
dz : float, optional
z-component of the vector representing the axis of the cone.
Defaults to 1.
surface_id : int, optional
Unique identifier for the surface. If not specified, an identifier will
automatically be assigned.
boundary_type : {'transmission, 'vacuum', 'reflective', 'white'}, optional
Boundary condition that defines the behavior for particles hitting the
surface. Defaults to transmissive boundary condition where particles
freely pass through the surface.
name : str
Name of the cone. If not specified, the name will be the empty string.
Attributes
----------
x0 : float
x-coordinate of the apex
y0 : float
y-coordinate of the apex
z0 : float
z-coordinate of the apex
r2 : float
Parameter related to the aperature
dx : float
x-component of the vector representing the axis of the cone.
dy : float
y-component of the vector representing the axis of the cone.
dz : float
z-component of the vector representing the axis of the cone.
boundary_type : {'transmission, 'vacuum', 'reflective', 'white'}
Boundary condition that defines the behavior for particles hitting the
surface.
coefficients : dict
Dictionary of surface coefficients
id : int
Unique identifier for the surface
name : str
Name of the surface
type : str
Type of the surface
"""
_type = 'cone'
_coeff_keys = ('x0', 'y0', 'z0', 'r2', 'dx', 'dy', 'dz')
def __init__(self, x0=0., y0=0., z0=0., r2=1., dx=0., dy=0., dz=1., *args,
**kwargs):
R2 = kwargs.pop('R2', None)
if R2 is not None:
warn(_WARNING_UPPER.format(type(self).__name__, 'r2', 'R2'),
FutureWarning)
r2 = R2
kwargs = _future_kwargs_warning_helper(type(self), *args, **kwargs)
super().__init__(**kwargs)
for key, val in zip(self._coeff_keys, (x0, y0, z0, r2, dx, dy, dz)):
setattr(self, key, val)
@classmethod
def __subclasshook__(cls, c):
if cls is Cone and c in (XCone, YCone, ZCone):
return True
return NotImplemented
x0 = SurfaceCoefficient('x0')
y0 = SurfaceCoefficient('y0')
z0 = SurfaceCoefficient('z0')
r2 = SurfaceCoefficient('r2')
dx = SurfaceCoefficient('dx')
dy = SurfaceCoefficient('dy')
dz = SurfaceCoefficient('dz')
def _get_base_coeffs(self):
# The equation for a general cone with vertex at point p = (x0, y0, z0)
# and axis specified by the unit vector d = (dx, dy, dz) and opening
# half angle theta can be described by the equation
#
# (d*(r - p))^2 - (r - p)*(r - p)cos^2(theta) = 0
#
# where * is the dot product and the vector r is the evaulation point
# r = (x, y, z)
#
# The argument r2 for cones is actually tan^2(theta) so that
# cos^2(theta) = 1 / (1 + r2)
x0, y0, z0 = self._origin
dx, dy, dz = self._axis
cos2 = 1 / (1 + self.r2)
a = cos2 - dx*dx
b = cos2 - dy*dy
c = cos2 - dz*dz
d = -2*dx*dy
e = -2*dy*dz
f = -2*dx*dz
g = 2*(dx*(dy*y0 + dz*z0) - a*x0)
h = 2*(dy*(dx*x0 + dz*z0) - b*y0)
j = 2*(dz*(dx*x0 + dy*y0) - c*z0)
k = a*x0*x0 + b*y0*y0 + c*z0*z0 - 2*(dx*dy*x0*y0 + dy*dz*y0*z0 +
dx*dz*x0*z0)
return (a, b, c, d, e, f, g, h, j, k)
def to_xml_element(self):
"""Return XML representation of the surface
Returns
-------
element : xml.etree.ElementTree.Element
XML element containing source data
"""
# This method overrides Surface.to_xml_element to generate a Quadric
# since the C++ layer doesn't support Cones right now
with catch_warnings():
simplefilter('ignore', IDWarning)
kwargs = {'boundary_type': self.boundary_type, 'name': self.name,
'surface_id': self.id}
quad_rep = Quadric(*self._get_base_coeffs(), **kwargs)
return quad_rep.to_xml_element()
class XCone(QuadricMixin, Surface):
"""A cone parallel to the x-axis of the form :math:`(y - y_0)^2 + (z - z_0)^2 =
r^2 (x - x_0)^2`.
Parameters
----------
x0 : float, optional
x-coordinate of the apex. Defaults to 0.
y0 : float, optional
y-coordinate of the apex. Defaults to 0.
z0 : float, optional
z-coordinate of the apex. Defaults to 0.
r2 : float, optional
Parameter related to the aperature. Defaults to 1.
boundary_type : {'transmission, 'vacuum', 'reflective', 'white'}, optional
Boundary condition that defines the behavior for particles hitting the
surface. Defaults to transmissive boundary condition where particles
freely pass through the surface.
name : str, optional
Name of the cone. If not specified, the name will be the empty string.
surface_id : int, optional
Unique identifier for the surface. If not specified, an identifier will
automatically be assigned.
Attributes
----------
x0 : float
x-coordinate of the apex
y0 : float
y-coordinate of the apex
z0 : float
z-coordinate of the apex
r2 : float
Parameter related to the aperature
boundary_type : {'transmission, 'vacuum', 'reflective', 'white'}
Boundary condition that defines the behavior for particles hitting the
surface.
coefficients : dict
Dictionary of surface coefficients
id : int
Unique identifier for the surface
name : str
Name of the surface
type : str
Type of the surface
"""
_type = 'x-cone'
_coeff_keys = ('x0', 'y0', 'z0', 'r2')
def __init__(self, x0=0., y0=0., z0=0., r2=1., *args, **kwargs):
R2 = kwargs.pop('R2', None)
if R2 is not None:
warn(_WARNING_UPPER.format(type(self).__name__, 'r2', 'R2'),
FutureWarning)
r2 = R2
kwargs = _future_kwargs_warning_helper(type(self), *args, **kwargs)
super().__init__(**kwargs)
for key, val in zip(self._coeff_keys, (x0, y0, z0, r2)):
setattr(self, key, val)
x0 = SurfaceCoefficient('x0')
y0 = SurfaceCoefficient('y0')
z0 = SurfaceCoefficient('z0')
r2 = SurfaceCoefficient('r2')
dx = SurfaceCoefficient(1.)
dy = SurfaceCoefficient(0.)
dz = SurfaceCoefficient(0.)
def _get_base_coeffs(self):
x0, y0, z0, r2 = self.x0, self.y0, self.z0, self.r2
a = -r2
b = c = 1.
d = e = f = 0.
g, h, j = 2*x0*r2, -2*y0, -2*z0
k = y0*y0 + z0*z0 - r2*x0*x0
return (a, b, c, d, e, f, g, h, j, k)
def evaluate(self, point):
x = point[0] - self.x0
y = point[1] - self.y0
z = point[2] - self.z0
return y*y + z*z - self.r2*x*x
class YCone(QuadricMixin, Surface):
"""A cone parallel to the y-axis of the form :math:`(x - x_0)^2 + (z - z_0)^2 =
r^2 (y - y_0)^2`.
Parameters
----------
x0 : float, optional
x-coordinate of the apex. Defaults to 0.
y0 : float, optional
y-coordinate of the apex. Defaults to 0.
z0 : float, optional
z-coordinate of the apex. Defaults to 0.
r2 : float, optional
Parameter related to the aperature. Defaults to 1.
boundary_type : {'transmission, 'vacuum', 'reflective', 'white'}, optional
Boundary condition that defines the behavior for particles hitting the
surface. Defaults to transmissive boundary condition where particles
freely pass through the surface.
name : str, optional
Name of the cone. If not specified, the name will be the empty string.
surface_id : int, optional
Unique identifier for the surface. If not specified, an identifier will
automatically be assigned.
Attributes
----------
x0 : float
x-coordinate of the apex
y0 : float
y-coordinate of the apex
z0 : float
z-coordinate of the apex
r2 : float
Parameter related to the aperature
boundary_type : {'transmission, 'vacuum', 'reflective', 'white'}
Boundary condition that defines the behavior for particles hitting the
surface.
coefficients : dict
Dictionary of surface coefficients
id : int
Unique identifier for the surface
name : str
Name of the surface
type : str
Type of the surface
"""
_type = 'y-cone'
_coeff_keys = ('x0', 'y0', 'z0', 'r2')
def __init__(self, x0=0., y0=0., z0=0., r2=1., *args, **kwargs):
R2 = kwargs.pop('R2', None)
if R2 is not None:
warn(_WARNING_UPPER.format(type(self).__name__, 'r2', 'R2'),
FutureWarning)
r2 = R2
kwargs = _future_kwargs_warning_helper(type(self), *args, **kwargs)
super().__init__(**kwargs)
for key, val in zip(self._coeff_keys, (x0, y0, z0, r2)):
setattr(self, key, val)
x0 = SurfaceCoefficient('x0')
y0 = SurfaceCoefficient('y0')
z0 = SurfaceCoefficient('z0')
r2 = SurfaceCoefficient('r2')
dx = SurfaceCoefficient(0.)
dy = SurfaceCoefficient(1.)
dz = SurfaceCoefficient(0.)
def _get_base_coeffs(self):
x0, y0, z0, r2 = self.x0, self.y0, self.z0, self.r2
b = -r2
a = c = 1.
d = e = f = 0.
g, h, j = -2*x0, 2*y0*r2, -2*z0
k = x0*x0 + z0*z0 - r2*y0*y0
return (a, b, c, d, e, f, g, h, j, k)
def evaluate(self, point):
x = point[0] - self.x0
y = point[1] - self.y0
z = point[2] - self.z0
return x*x + z*z - self.r2*y*y
class ZCone(QuadricMixin, Surface):
"""A cone parallel to the x-axis of the form :math:`(x - x_0)^2 + (y - y_0)^2 =
r^2 (z - z_0)^2`.
Parameters
----------
x0 : float, optional
x-coordinate of the apex. Defaults to 0.
y0 : float, optional
y-coordinate of the apex. Defaults to 0.
z0 : float, optional
z-coordinate of the apex. Defaults to 0.
r2 : float, optional
Parameter related to the aperature. Defaults to 1.
boundary_type : {'transmission, 'vacuum', 'reflective', 'white'}, optional
Boundary condition that defines the behavior for particles hitting the
surface. Defaults to transmissive boundary condition where particles
freely pass through the surface.
name : str, optional
Name of the cone. If not specified, the name will be the empty string.
surface_id : int, optional
Unique identifier for the surface. If not specified, an identifier will
automatically be assigned.
Attributes
----------
x0 : float
x-coordinate of the apex
y0 : float
y-coordinate of the apex
z0 : float
z-coordinate of the apex
r2 : float
Parameter related to the aperature
boundary_type : {'transmission, 'vacuum', 'reflective', 'white'}
Boundary condition that defines the behavior for particles hitting the
surface.
coefficients : dict
Dictionary of surface coefficients
id : int
Unique identifier for the surface
name : str
Name of the surface
type : str
Type of the surface
"""
_type = 'z-cone'
_coeff_keys = ('x0', 'y0', 'z0', 'r2')
def __init__(self, x0=0., y0=0., z0=0., r2=1., *args, **kwargs):
R2 = kwargs.pop('R2', None)
if R2 is not None:
warn(_WARNING_UPPER.format(type(self).__name__, 'r2', 'R2'),
FutureWarning)
r2 = R2
kwargs = _future_kwargs_warning_helper(type(self), *args, **kwargs)
super().__init__(**kwargs)
for key, val in zip(self._coeff_keys, (x0, y0, z0, r2)):
setattr(self, key, val)
x0 = SurfaceCoefficient('x0')
y0 = SurfaceCoefficient('y0')
z0 = SurfaceCoefficient('z0')
r2 = SurfaceCoefficient('r2')
dx = SurfaceCoefficient(0.)
dy = SurfaceCoefficient(0.)
dz = SurfaceCoefficient(1.)
def _get_base_coeffs(self):
x0, y0, z0, r2 = self.x0, self.y0, self.z0, self.r2
c = -r2
a = b = 1.
d = e = f = 0.
g, h, j = -2*x0, -2*y0, 2*z0*r2
k = x0*x0 + y0*y0 - r2*z0*z0
return (a, b, c, d, e, f, g, h, j, k)
def evaluate(self, point):
x = point[0] - self.x0
y = point[1] - self.y0
z = point[2] - self.z0
return x*x + y*y - self.r2*z*z
class Quadric(QuadricMixin, Surface):
"""A surface of the form :math:`Ax^2 + By^2 + Cz^2 + Dxy + Eyz + Fxz + Gx + Hy +
Jz + K = 0`.
Parameters
----------
a, b, c, d, e, f, g, h, j, k : float, optional
coefficients for the surface. All default to 0.
boundary_type : {'transmission, 'vacuum', 'reflective', 'white'}, optional
Boundary condition that defines the behavior for particles hitting the
surface. Defaults to transmissive boundary condition where particles
freely pass through the surface.
name : str, optional
Name of the surface. If not specified, the name will be the empty string.
surface_id : int, optional
Unique identifier for the surface. If not specified, an identifier will
automatically be assigned.
Attributes
----------
a, b, c, d, e, f, g, h, j, k : float
coefficients for the surface
boundary_type : {'transmission, 'vacuum', 'reflective', 'white'}
Boundary condition that defines the behavior for particles hitting the
surface.
coefficients : dict
Dictionary of surface coefficients
id : int
Unique identifier for the surface
name : str
Name of the surface
type : str
Type of the surface
"""
_type = 'quadric'
_coeff_keys = ('a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'j', 'k')
def __init__(self, a=0., b=0., c=0., d=0., e=0., f=0., g=0., h=0., j=0.,
k=0., *args, **kwargs):
kwargs = _future_kwargs_warning_helper(type(self), *args, **kwargs)
super().__init__(**kwargs)
for key, val in zip(self._coeff_keys, (a, b, c, d, e, f, g, h, j, k)):
setattr(self, key, val)
a = SurfaceCoefficient('a')
b = SurfaceCoefficient('b')
c = SurfaceCoefficient('c')
d = SurfaceCoefficient('d')
e = SurfaceCoefficient('e')
f = SurfaceCoefficient('f')
g = SurfaceCoefficient('g')
h = SurfaceCoefficient('h')
j = SurfaceCoefficient('j')
k = SurfaceCoefficient('k')
def _get_base_coeffs(self):
return tuple(getattr(self, c) for c in self._coeff_keys)
class TorusMixin:
"""A Mixin class implementing common functionality for torus surfaces"""
_coeff_keys = ('x0', 'y0', 'z0', 'a', 'b', 'c')
def __init__(self, x0=0., y0=0., z0=0., a=0., b=0., c=0., **kwargs):
super().__init__(**kwargs)
for key, val in zip(self._coeff_keys, (x0, y0, z0, a, b, c)):
setattr(self, key, val)
x0 = SurfaceCoefficient('x0')
y0 = SurfaceCoefficient('y0')
z0 = SurfaceCoefficient('z0')
a = SurfaceCoefficient('a')
b = SurfaceCoefficient('b')
c = SurfaceCoefficient('c')
def translate(self, vector, inplace=False):
surf = self if inplace else self.clone()
surf.x0 += vector[0]
surf.y0 += vector[1]
surf.z0 += vector[2]
return surf
def rotate(self, rotation, pivot=(0., 0., 0.), order='xyz', inplace=False):
pivot = np.asarray(pivot)
rotation = np.asarray(rotation, dtype=float)
# Allow rotation matrix to be passed in directly, otherwise build it
if rotation.ndim == 2:
check_length('surface rotation', rotation.ravel(), 9)
Rmat = rotation
else:
Rmat = get_rotation_matrix(rotation, order=order)
# Only can handle trivial rotation matrices
close = np.isclose
if not np.all(close(Rmat, -1.0) | close(Rmat, 0.0) | close(Rmat, 1.0)):
raise NotImplementedError('Torus surfaces cannot handle generic rotations')
# Translate surface to pivot
surf = self.translate(-pivot, inplace=inplace)
# Determine "center" of torus and a point above it (along main axis)
center = [surf.x0, surf.y0, surf.z0]
above_center = center.copy()
index = ['x-torus', 'y-torus', 'z-torus'].index(surf._type)
above_center[index] += 1
# Compute new rotated torus center
center = Rmat @ center
# Figure out which axis should be used after rotation
above_center = Rmat @ above_center
new_index = np.where(np.isclose(np.abs(above_center - center), 1.0))[0][0]
cls = [XTorus, YTorus, ZTorus][new_index]
# Create rotated torus
kwargs = {
'boundary_type': surf.boundary_type, 'name': surf.name,
'a': surf.a, 'b': surf.b, 'c': surf.c
}
if inplace:
kwargs['surface_id'] = surf.id
surf = cls(x0=center[0], y0=center[1], z0=center[2], **kwargs)
return surf.translate(pivot, inplace=inplace)
def _get_base_coeffs(self):
raise NotImplementedError
class XTorus(TorusMixin, Surface):
r"""A torus of the form :math:`(x - x_0)^2/B^2 + (\sqrt{(y - y_0)^2 + (z -
z_0)^2} - A)^2/C^2 - 1 = 0`.
Parameters
----------
x0 : float
x-coordinate of the center of the axis of revolution
y0 : float
y-coordinate of the center of the axis of revolution
z0 : float
z-coordinate of the center of the axis of revolution
a : float
Major radius of the torus
b : float
Minor radius of the torus (parallel to axis of revolution)
c : float
Minor radius of the torus (perpendicular to axis of revolution)
kwargs : dict
Keyword arguments passed to the :class:`Surface` constructor
Attributes
----------
x0 : float
x-coordinate of the center of the axis of revolution
y0 : float
y-coordinate of the center of the axis of revolution
z0 : float
z-coordinate of the center of the axis of revolution
a : float
Major radius of the torus
b : float
Minor radius of the torus (parallel to axis of revolution)
c : float
Minor radius of the torus (perpendicular to axis of revolution)
boundary_type : {'transmission, 'vacuum', 'reflective', 'white'}
Boundary condition that defines the behavior for particles hitting the
surface.
coefficients : dict
Dictionary of surface coefficients
id : int
Unique identifier for the surface
name : str
Name of the surface
type : str
Type of the surface
"""
_type = 'x-torus'
def evaluate(self, point):
x = point[0] - self.x0
y = point[1] - self.y0
z = point[2] - self.z0
a = self.a
b = self.b
c = self.c
return (x*x)/(b*b) + (math.sqrt(y*y + z*z) - a)**2/(c*c) - 1
def bounding_box(self, side):
x0, y0, z0 = self.x0, self.y0, self.z0
a, b, c = self.a, self.b, self.c
if side == '-':
return (np.array([x0 - b, y0 - a - c, z0 - a - c]),
np.array([x0 + b, y0 + a + c, z0 + a + c]))
elif side == '+':
return (np.array([-np.inf, -np.inf, -np.inf]),
np.array([np.inf, np.inf, np.inf]))
class YTorus(TorusMixin, Surface):
r"""A torus of the form :math:`(y - y_0)^2/B^2 + (\sqrt{(x - x_0)^2 + (z -
z_0)^2} - A)^2/C^2 - 1 = 0`.
Parameters
----------
x0 : float
x-coordinate of the center of the axis of revolution
y0 : float
y-coordinate of the center of the axis of revolution
z0 : float
z-coordinate of the center of the axis of revolution
a : float
Major radius of the torus
b : float
Minor radius of the torus (parallel to axis of revolution)
c : float
Minor radius of the torus (perpendicular to axis of revolution)
kwargs : dict
Keyword arguments passed to the :class:`Surface` constructor
Attributes
----------
x0 : float
x-coordinate of the center of the axis of revolution
y0 : float
y-coordinate of the center of the axis of revolution
z0 : float
z-coordinate of the center of the axis of revolution
a : float
Major radius of the torus
b : float
Minor radius of the torus (parallel to axis of revolution)
c : float
Minor radius of the torus (perpendicular to axis of revolution)
boundary_type : {'transmission, 'vacuum', 'reflective', 'white'}
Boundary condition that defines the behavior for particles hitting the
surface.
coefficients : dict
Dictionary of surface coefficients
id : int
Unique identifier for the surface
name : str
Name of the surface
type : str
Type of the surface
"""
_type = 'y-torus'
def evaluate(self, point):
x = point[0] - self.x0
y = point[1] - self.y0
z = point[2] - self.z0
a = self.a
b = self.b
c = self.c
return (y*y)/(b*b) + (math.sqrt(x*x + z*z) - a)**2/(c*c) - 1
def bounding_box(self, side):
x0, y0, z0 = self.x0, self.y0, self.z0
a, b, c = self.a, self.b, self.c
if side == '-':
return (np.array([x0 - a - c, y0 - b, z0 - a - c]),
np.array([x0 + a + c, y0 + b, z0 + a + c]))
elif side == '+':
return (np.array([-np.inf, -np.inf, -np.inf]),
np.array([np.inf, np.inf, np.inf]))
class ZTorus(TorusMixin, Surface):
r"""A torus of the form :math:`(z - z_0)^2/B^2 + (\sqrt{(x - x_0)^2 + (y -
y_0)^2} - A)^2/C^2 - 1 = 0`.
Parameters
----------
x0 : float
x-coordinate of the center of the axis of revolution
y0 : float
y-coordinate of the center of the axis of revolution
z0 : float
z-coordinate of the center of the axis of revolution
a : float
Major radius of the torus
b : float
Minor radius of the torus (parallel to axis of revolution)
c : float
Minor radius of the torus (perpendicular to axis of revolution)
kwargs : dict
Keyword arguments passed to the :class:`Surface` constructor
Attributes
----------
x0 : float
x-coordinate of the center of the axis of revolution
y0 : float
y-coordinate of the center of the axis of revolution
z0 : float
z-coordinate of the center of the axis of revolution
a : float
Major radius of the torus
b : float
Minor radius of the torus (parallel to axis of revolution)
c : float
Minor radius of the torus (perpendicular to axis of revolution)
boundary_type : {'transmission, 'vacuum', 'reflective', 'white'}
Boundary condition that defines the behavior for particles hitting the
surface.
coefficients : dict
Dictionary of surface coefficients
id : int
Unique identifier for the surface
name : str
Name of the surface
type : str
Type of the surface
"""
_type = 'z-torus'
def evaluate(self, point):
x = point[0] - self.x0
y = point[1] - self.y0
z = point[2] - self.z0
a = self.a
b = self.b
c = self.c
return (z*z)/(b*b) + (math.sqrt(x*x + y*y) - a)**2/(c*c) - 1
def bounding_box(self, side):
x0, y0, z0 = self.x0, self.y0, self.z0
a, b, c = self.a, self.b, self.c
if side == '-':
return (np.array([x0 - a - c, y0 - a - c, z0 - b]),
np.array([x0 + a + c, y0 + a + c, z0 + b]))
elif side == '+':
return (np.array([-np.inf, -np.inf, -np.inf]),
np.array([np.inf, np.inf, np.inf]))
class Halfspace(Region):
"""A positive or negative half-space region.
A half-space is either of the two parts into which a two-dimension surface
divides the three-dimensional Euclidean space. If the equation of the
surface is :math:`f(x,y,z) = 0`, the region for which :math:`f(x,y,z) < 0`
is referred to as the negative half-space and the region for which
:math:`f(x,y,z) > 0` is referred to as the positive half-space.
Instances of Halfspace are generally not instantiated directly. Rather, they
can be created from an existing Surface through the __neg__ and __pos__
operators, as the following example demonstrates:
>>> sphere = openmc.Sphere(surface_id=1, r=10.0)
>>> inside_sphere = -sphere
>>> outside_sphere = +sphere
>>> type(inside_sphere)
<class 'openmc.surface.Halfspace'>
Parameters
----------
surface : openmc.Surface
Surface which divides Euclidean space.
side : {'+', '-'}
Indicates whether the positive or negative half-space is used.
Attributes
----------
surface : openmc.Surface
Surface which divides Euclidean space.
side : {'+', '-'}
Indicates whether the positive or negative half-space is used.
bounding_box : tuple of numpy.ndarray
Lower-left and upper-right coordinates of an axis-aligned bounding box
"""
def __init__(self, surface, side):
self.surface = surface
self.side = side
def __and__(self, other):
if isinstance(other, Intersection):
return Intersection([self] + other[:])
else:
return Intersection((self, other))
def __or__(self, other):
if isinstance(other, Union):
return Union([self] + other[:])
else:
return Union((self, other))
def __invert__(self):
return -self.surface if self.side == '+' else +self.surface
def __contains__(self, point):
"""Check whether a point is contained in the half-space.
Parameters
----------
point : 3-tuple of float
Cartesian coordinates, :math:`(x',y',z')`, of the point
Returns
-------
bool
Whether the point is in the half-space
"""
val = self.surface.evaluate(point)
return val >= 0. if self.side == '+' else val < 0.
@property
def surface(self):
return self._surface
@surface.setter
def surface(self, surface):
check_type('surface', surface, Surface)
self._surface = surface
@property
def side(self):
return self._side
@side.setter
def side(self, side):
check_value('side', side, ('+', '-'))
self._side = side
@property
def bounding_box(self):
return self.surface.bounding_box(self.side)
def __str__(self):
return '-' + str(self.surface.id) if self.side == '-' \
else str(self.surface.id)
def get_surfaces(self, surfaces=None):
"""
Returns the surface that this is a halfspace of.
Parameters
----------
surfaces: collections.OrderedDict, optional
Dictionary mapping surface IDs to :class:`openmc.Surface` instances
Returns
-------
surfaces: collections.OrderedDict
Dictionary mapping surface IDs to :class:`openmc.Surface` instances
"""
if surfaces is None:
surfaces = OrderedDict()
surfaces[self.surface.id] = self.surface
return surfaces
def remove_redundant_surfaces(self, redundant_surfaces):
"""Recursively remove all redundant surfaces referenced by this region
Parameters
----------
redundant_surfaces : dict
Dictionary mapping redundant surface IDs to surface IDs for the
:class:`openmc.Surface` instances that should replace them.
"""
surf = redundant_surfaces.get(self.surface.id)
if surf is not None:
self.surface = surf
def clone(self, memo=None):
"""Create a copy of this halfspace, with a cloned surface with a
unique ID.
Parameters
----------
memo : dict or None
A nested dictionary of previously cloned objects. This parameter
is used internally and should not be specified by the user.
Returns
-------
clone : openmc.Halfspace
The clone of this halfspace
"""
if memo is None:
memo = dict
clone = deepcopy(self)
clone.surface = self.surface.clone(memo)
return clone
def translate(self, vector, memo=None):
"""Translate half-space in given direction
Parameters
----------
vector : iterable of float
Direction in which region should be translated
memo : dict or None
Dictionary used for memoization
Returns
-------
openmc.Halfspace
Translated half-space
"""
if memo is None:
memo = {}
# If translated surface not in memo, add it
key = (self.surface, tuple(vector))
if key not in memo:
memo[key] = self.surface.translate(vector)
# Return translated half-space
return type(self)(memo[key], self.side)
def rotate(self, rotation, pivot=(0., 0., 0.), order='xyz', inplace=False,
memo=None):
r"""Rotate surface by angles provided or by applying matrix directly.
.. versionadded:: 0.12
Parameters
----------
rotation : 3-tuple of float, or 3x3 iterable
A 3-tuple of angles :math:`(\phi, \theta, \psi)` in degrees where
the first element is the rotation about the x-axis in the fixed
laboratory frame, the second element is the rotation about the
y-axis in the fixed laboratory frame, and the third element is the
rotation about the z-axis in the fixed laboratory frame. The
rotations are active rotations. Additionally a 3x3 rotation matrix
can be specified directly either as a nested iterable or array.
pivot : iterable of float, optional
(x, y, z) coordinates for the point to rotate about. Defaults to
(0., 0., 0.)
order : str, optional
A string of 'x', 'y', and 'z' in some order specifying which
rotation to perform first, second, and third. Defaults to 'xyz'
which means, the rotation by angle :math:`\phi` about x will be
applied first, followed by :math:`\theta` about y and then
:math:`\psi` about z. This corresponds to an x-y-z extrinsic
rotation as well as a z-y'-x'' intrinsic rotation using Tait-Bryan
angles :math:`(\phi, \theta, \psi)`.
inplace : boolean
Whether or not to return a new instance of Surface or to modify the
coefficients of this Surface in place. Defaults to False.
memo : dict or None
Dictionary used for memoization
Returns
-------
openmc.Halfspace
Translated half-space
"""
if memo is None:
memo = {}
# If rotated surface not in memo, add it
key = (self.surface, tuple(rotation), tuple(pivot), order, inplace)
if key not in memo:
memo[key] = self.surface.rotate(rotation, pivot=pivot, order=order,
inplace=inplace)
# Return rotated half-space
return type(self)(memo[key], self.side)
_SURFACE_CLASSES = {cls._type: cls for cls in Surface.__subclasses__()}
# Set virtual base classes for "casting" up the hierarchy
Plane._virtual_base = Plane
XPlane._virtual_base = Plane
YPlane._virtual_base = Plane
ZPlane._virtual_base = Plane
Cylinder._virtual_base = Cylinder
XCylinder._virtual_base = Cylinder
YCylinder._virtual_base = Cylinder
ZCylinder._virtual_base = Cylinder
Cone._virtual_base = Cone
XCone._virtual_base = Cone
YCone._virtual_base = Cone
ZCone._virtual_base = Cone
Sphere._virtual_base = Sphere
Quadric._virtual_base = Quadric
| 33.205495
| 90
| 0.584538
|
4a174cd1ea672e3d680ebe9698c178f961c70447
| 1,483
|
py
|
Python
|
python_code/vnev/Lib/site-packages/jdcloud_sdk/services/cdn/apis/QueryRefreshTaskByIdsRequest.py
|
Ureimu/weather-robot
|
7634195af388538a566ccea9f8a8534c5fb0f4b6
|
[
"MIT"
] | 14
|
2018-04-19T09:53:56.000Z
|
2022-01-27T06:05:48.000Z
|
python_code/vnev/Lib/site-packages/jdcloud_sdk/services/cdn/apis/QueryRefreshTaskByIdsRequest.py
|
Ureimu/weather-robot
|
7634195af388538a566ccea9f8a8534c5fb0f4b6
|
[
"MIT"
] | 15
|
2018-09-11T05:39:54.000Z
|
2021-07-02T12:38:02.000Z
|
python_code/vnev/Lib/site-packages/jdcloud_sdk/services/cdn/apis/QueryRefreshTaskByIdsRequest.py
|
Ureimu/weather-robot
|
7634195af388538a566ccea9f8a8534c5fb0f4b6
|
[
"MIT"
] | 33
|
2018-04-20T05:29:16.000Z
|
2022-02-17T09:10:05.000Z
|
# coding=utf8
# Copyright 2018 JDCLOUD.COM
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# NOTE: This class is auto generated by the jdcloud code generator program.
from jdcloud_sdk.core.jdcloudrequest import JDCloudRequest
class QueryRefreshTaskByIdsRequest(JDCloudRequest):
"""
根据taskIds查询刷新预热任务
"""
def __init__(self, parameters, header=None, version="v1"):
super(QueryRefreshTaskByIdsRequest, self).__init__(
'/task:queryByIds', 'POST', header, version)
self.parameters = parameters
class QueryRefreshTaskByIdsParameters(object):
def __init__(self, ):
"""
"""
self.taskIds = None
self.keyword = None
def setTaskIds(self, taskIds):
"""
:param taskIds: (Optional) 查询的任务taskIds列表,最多能查10条
"""
self.taskIds = taskIds
def setKeyword(self, keyword):
"""
:param keyword: (Optional) url的模糊查询关键字
"""
self.keyword = keyword
| 27.462963
| 75
| 0.683075
|
4a174d1d6a4a96113e4d6890c26917c517d7441f
| 1,305
|
py
|
Python
|
picshrink/mail_util.py
|
wtttc/apkshrink
|
50b61c38b64c4a4fe0b11e686045852a3c685f3d
|
[
"MIT"
] | 3
|
2015-09-01T10:46:15.000Z
|
2016-05-20T09:29:42.000Z
|
picshrink/mail_util.py
|
wtttc/apkshrink
|
50b61c38b64c4a4fe0b11e686045852a3c685f3d
|
[
"MIT"
] | null | null | null |
picshrink/mail_util.py
|
wtttc/apkshrink
|
50b61c38b64c4a4fe0b11e686045852a3c685f3d
|
[
"MIT"
] | 1
|
2020-12-08T10:37:32.000Z
|
2020-12-08T10:37:32.000Z
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
from email.mime.application import MIMEApplication
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
import smtplib
__author__ = 'tiantong'
# MIMEText should has been used on mail_content
def sendMail(mail_content, subject,email_from, email_to, user_name, password, smtpurl, smtpport, list_to_send):
emailto = email_to
emailfrom = email_from
message = MIMEMultipart('alternative')
message['To'] = ", ".join(emailto)
message['From'] = emailfrom
message['Subject'] = subject
storeplain = MIMEText(mail_content, 'plain')
plaintextemailmessage = unicode(storeplain)
storeplain = MIMEText(plaintextemailmessage, 'plain')
message.attach(storeplain)
if list_to_send is not None:
for key in list_to_send:
part = MIMEApplication(open(key, 'rb').read())
part.add_header('Content-Disposition', 'attachment', filename=key)
message.attach(part)
deetsurl = smtplib.SMTP(smtpurl, smtpport)
deetsuser = user_name
deetspassword = password
deetsurl.ehlo()
deetsurl.starttls()
deetsurl.ehlo()
deetsurl.login(deetsuser, deetspassword)
deetsurl.sendmail(emailfrom, emailto, message.as_string())
deetsurl.quit()
| 29.659091
| 111
| 0.70728
|
4a174d585a32843e35f9a317fe2219fcbf6eaace
| 7,453
|
py
|
Python
|
paddlenlp/taskflow/models/sentiment_analysis_model.py
|
mukaiu/PaddleNLP
|
0315365dbafa6e3b1c7147121ba85e05884125a5
|
[
"Apache-2.0"
] | null | null | null |
paddlenlp/taskflow/models/sentiment_analysis_model.py
|
mukaiu/PaddleNLP
|
0315365dbafa6e3b1c7147121ba85e05884125a5
|
[
"Apache-2.0"
] | null | null | null |
paddlenlp/taskflow/models/sentiment_analysis_model.py
|
mukaiu/PaddleNLP
|
0315365dbafa6e3b1c7147121ba85e05884125a5
|
[
"Apache-2.0"
] | null | null | null |
# coding:utf-8
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import paddle
import paddle.nn as nn
import paddle.nn.functional as F
from paddlenlp.seq2vec.encoder import BoWEncoder, LSTMEncoder
from paddlenlp.transformers import SkepPretrainedModel
class BoWModel(nn.Layer):
"""
This class implements the Bag of Words Classification Network model to classify texts.
At a high level, the model starts by embedding the tokens and running them through
a word embedding. Then, we encode these epresentations with a `BoWEncoder`.
Lastly, we take the output of the encoder to create a final representation,
which is passed through some feed-forward layers to output a logits (`output_layer`).
Args:
vocab_size(int): The vocab size that used to create the embedding.
num_class(int): The num class of the classifier.
emb_dim(int. optinal): The size of the embedding, default value is 128.
padding_idx(int, optinal): The padding value in the embedding, the padding_idx of embedding value will
not be updated, the default value is 0.
hidden_size(int, optinal): The output size of linear that after the bow, default value is 128.
fc_hidden_size(int, optinal): The output size of linear that after the fisrt linear, default value is 96.
"""
def __init__(self,
vocab_size,
num_classes,
emb_dim=128,
padding_idx=0,
hidden_size=128,
fc_hidden_size=96):
super().__init__()
self.embedder = nn.Embedding(vocab_size,
emb_dim,
padding_idx=padding_idx)
self.bow_encoder = BoWEncoder(emb_dim)
self.fc1 = nn.Linear(self.bow_encoder.get_output_dim(), hidden_size)
self.fc2 = nn.Linear(hidden_size, fc_hidden_size)
self.output_layer = nn.Linear(fc_hidden_size, num_classes)
def forward(self, text, seq_len=None):
# Shape: (batch_size, num_tokens, embedding_dim)
embedded_text = self.embedder(text)
# Shape: (batch_size, embedding_dim)
summed = self.bow_encoder(embedded_text)
encoded_text = paddle.tanh(summed)
# Shape: (batch_size, hidden_size)
fc1_out = paddle.tanh(self.fc1(encoded_text))
# Shape: (batch_size, fc_hidden_size)
fc2_out = paddle.tanh(self.fc2(fc1_out))
# Shape: (batch_size, num_classes)
logits = self.output_layer(fc2_out)
return logits
class LSTMModel(nn.Layer):
"""
This class implements the Bag of Words Classification Network model to classify texts.
At a high level, the model starts by embedding the tokens and running them through
a word embedding. Then, we encode these epresentations with a `BoWEncoder`.
Lastly, we take the output of the encoder to create a final representation,
which is passed through some feed-forward layers to output a logits (`output_layer`).
Args:
vocab_size(int): The vocab size that used to create the embedding.
num_class(int): The num clas of the classifier.
emb_dim(int. optinal): The size of the embedding, default value is 128.
padding_idx(int, optinal): The padding value in the embedding, the padding_idx of embedding value will
not be updated, the default value is 0.
lstm_hidden_size(int, optinal): The output size of the lstm, defalut value 198.
direction(string, optinal): The direction of lstm, default value is `forward`.
lstm_layers(string, optinal): The num of lstm layer.
dropout(float, optinal): The dropout rate of lstm.
pooling_type(float, optinal): The pooling type of lstm. Defalut value is None,
if `pooling_type` is None, then the LSTMEncoder will return the hidden state of the last time step at last layer as a single vector.
"""
def __init__(self,
vocab_size,
num_classes,
emb_dim=128,
padding_idx=0,
lstm_hidden_size=198,
direction='forward',
lstm_layers=1,
dropout_rate=0.0,
pooling_type=None,
fc_hidden_size=96):
super().__init__()
self.embedder = nn.Embedding(num_embeddings=vocab_size,
embedding_dim=emb_dim,
padding_idx=padding_idx)
self.lstm_encoder = LSTMEncoder(emb_dim,
lstm_hidden_size,
num_layers=lstm_layers,
direction=direction,
dropout=dropout_rate,
pooling_type=pooling_type)
self.fc = nn.Linear(self.lstm_encoder.get_output_dim(), fc_hidden_size)
self.output_layer = nn.Linear(fc_hidden_size, num_classes)
def forward(self, text, seq_len):
# Shape: (batch_size, num_tokens, embedding_dim)
embedded_text = self.embedder(text)
# Shape: (batch_size, num_tokens, num_directions*lstm_hidden_size)
# num_directions = 2 if direction is 'bidirect'
# if not, num_directions = 1
text_repr = self.lstm_encoder(embedded_text, sequence_length=seq_len)
# Shape: (batch_size, fc_hidden_size)
fc_out = paddle.tanh(self.fc(text_repr))
# Shape: (batch_size, num_classes)
logits = self.output_layer(fc_out)
probs = F.softmax(logits, axis=1)
idx = paddle.argmax(probs, axis=1).numpy()
return idx, probs
class SkepSequenceModel(SkepPretrainedModel):
def __init__(self, skep, num_classes=2, dropout=None):
super(SkepSequenceModel, self).__init__()
self.num_classes = num_classes
self.skep = skep # allow skep to be config
self.dropout = nn.Dropout(dropout if dropout is not None else self.skep.
config["hidden_dropout_prob"])
self.classifier = nn.Linear(self.skep.config["hidden_size"],
num_classes)
self.apply(self.init_weights)
def forward(self,
input_ids,
token_type_ids=None,
position_ids=None,
attention_mask=None):
_, pooled_output = self.skep(input_ids,
token_type_ids=token_type_ids,
position_ids=position_ids,
attention_mask=attention_mask)
pooled_output = self.dropout(pooled_output)
logits = self.classifier(pooled_output)
probs = F.softmax(logits, axis=1)
idx = paddle.argmax(probs, axis=1)
return idx, probs
| 46.291925
| 144
| 0.630753
|
4a174dc11fd2120273247b7fafcb23a97534d71b
| 1,680
|
py
|
Python
|
Layers/SMatirxLayer.py
|
Yottaxx/T-LSTM
|
92618d8c3ee2418b194a2e1592512548da955b77
|
[
"MIT"
] | 9
|
2020-05-23T05:40:27.000Z
|
2021-11-19T01:29:36.000Z
|
Layers/SMatirxLayer.py
|
Yottaxx/T-LSTM
|
92618d8c3ee2418b194a2e1592512548da955b77
|
[
"MIT"
] | 1
|
2020-11-29T04:35:52.000Z
|
2021-01-29T07:39:37.000Z
|
Layers/SMatirxLayer.py
|
Yottaxx/T-LSTM
|
92618d8c3ee2418b194a2e1592512548da955b77
|
[
"MIT"
] | 2
|
2020-10-26T13:42:49.000Z
|
2020-11-01T02:01:33.000Z
|
import torch.nn as nn
import torch.nn.functional as F
import torch
import torch
from torch_geometric.nn import GCNConv, GATConv
from torch_geometric.data import Data
# graph functional
class SentenceMatrixLayer(nn.Module):
def __init__(self, in_size, out_size=1,p_Asem=0.8):
super(SentenceMatrixLayer, self).__init__()
self.in_size = in_size
self.out_size = out_size
self.p_Asem=p_Asem
self.linear = nn.Linear(in_size * 2, out_size)
def forward(self, x, adj):
# x batch*node*emb
# adj batch*node*node
# adj is dense batch*node*node*(2*emb)
# 2*emb for cat xi,xj
# new_adj = adj.unsqueeze(-1)
# new_adj = new_adj.expand(new_adj.shape[0], new_adj.shape[1], new_adj.shape[2], x.shape[-1] * 2)
# xi batch*n*1*emb expand dim 1 decide x[n]
xi = x.unsqueeze(-2)
xi = xi.expand(xi.shape[0], xi.shape[1], xi.shape[1], xi.shape[-1])
# xj #xi batch*1*n*emb dim 2 decide x[n]
xj = x.unsqueeze(1)
xj = xj.expand(xj.shape[0], xj.shape[2], xj.shape[2], xj.shape[-1])
# cat [xi,xj]
xij = torch.cat((xi, xj), -1)
#here for rezero have a try
A_esm = self.p_Asem*(torch.sigmoid(self.linear(xij).squeeze()))+(1-self.p_Asem)*adj
return A_esm
##test
# edge_index = torch.tensor([[0, 1, 1, 2],
# [1, 0, 2, 1]], dtype=torch.long)
# x = torch.rand((3, 100))
# tri = torch.rand((1, 72))
# data = Data(x=x, edge_index=edge_index)
# device = torch.device('cuda')
# data = data.to(device)
# tri = tri.to(device)
# model = FRGN(100, 1)
# model.cuda()
# test = model(data)
# print(test)
| 30.545455
| 105
| 0.599405
|
4a174e0fae202f498a6cb7b9a29f87b1170469a5
| 1,857
|
py
|
Python
|
aliyun-python-sdk-elasticsearch/aliyunsdkelasticsearch/request/v20170613/GetTransferableNodesRequest.py
|
yndu13/aliyun-openapi-python-sdk
|
12ace4fb39fe2fb0e3927a4b1b43ee4872da43f5
|
[
"Apache-2.0"
] | 1,001
|
2015-07-24T01:32:41.000Z
|
2022-03-25T01:28:18.000Z
|
aliyun-python-sdk-elasticsearch/aliyunsdkelasticsearch/request/v20170613/GetTransferableNodesRequest.py
|
yndu13/aliyun-openapi-python-sdk
|
12ace4fb39fe2fb0e3927a4b1b43ee4872da43f5
|
[
"Apache-2.0"
] | 363
|
2015-10-20T03:15:00.000Z
|
2022-03-08T12:26:19.000Z
|
aliyun-python-sdk-elasticsearch/aliyunsdkelasticsearch/request/v20170613/GetTransferableNodesRequest.py
|
yndu13/aliyun-openapi-python-sdk
|
12ace4fb39fe2fb0e3927a4b1b43ee4872da43f5
|
[
"Apache-2.0"
] | 682
|
2015-09-22T07:19:02.000Z
|
2022-03-22T09:51:46.000Z
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RoaRequest
from aliyunsdkelasticsearch.endpoint import endpoint_data
class GetTransferableNodesRequest(RoaRequest):
def __init__(self):
RoaRequest.__init__(self, 'elasticsearch', '2017-06-13', 'GetTransferableNodes','elasticsearch')
self.set_uri_pattern('/openapi/instances/[InstanceId]/transferable-nodes')
self.set_method('GET')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_InstanceId(self):
return self.get_path_params().get('InstanceId')
def set_InstanceId(self,InstanceId):
self.add_path_param('InstanceId',InstanceId)
def get_nodeType(self):
return self.get_query_params().get('nodeType')
def set_nodeType(self,nodeType):
self.add_query_param('nodeType',nodeType)
def get_count(self):
return self.get_query_params().get('count')
def set_count(self,count):
self.add_query_param('count',count)
| 36.411765
| 99
| 0.76629
|
4a174e34da2e9a6ba93f110f110315864edcda38
| 22,902
|
py
|
Python
|
rl_agents/trainer/log_creator.py
|
rvalienter90/rl-agents
|
ad6be08f9a7e2f0ec0daf6f557bd9f476bb9e4da
|
[
"MIT"
] | null | null | null |
rl_agents/trainer/log_creator.py
|
rvalienter90/rl-agents
|
ad6be08f9a7e2f0ec0daf6f557bd9f476bb9e4da
|
[
"MIT"
] | null | null | null |
rl_agents/trainer/log_creator.py
|
rvalienter90/rl-agents
|
ad6be08f9a7e2f0ec0daf6f557bd9f476bb9e4da
|
[
"MIT"
] | null | null | null |
import time
import csv
import os
import numpy as np
import copy
class LogCreator():
RAW_LOG_FOLDER = 'raw_logfiles'
TIMESTEP_LOG_FOLDER = 'timestep_logs'
EPISODE_LOGFILE = 'episode_logfile'
TIMESTEP_LOGFILE = 'timestep_logfile'
EPISODE_FIELD_NAMES = ['episode', 'episode_reward', 'episode_length', 'episode_average_speed_all',
'episode_average_speed_controlled', 'episode_average_speed_human',
'episode_average_distance_all', 'episode_average_distance_controlled',
'episode_average_distance_human', 'mission_time','crashed_hv','crashed_av','scenario']
EPISODE_INDIVIDUAL_FIELD_NAMES = ['episode', 'vehicle_id', 'vehicle_is_controlled', 'episode_reward',
'episode_length', 'vehicle_average_speed', 'vehicle_average_distance',
'mission_time','crashed_hv','crashed_av','scenario']
EPISODE_MISSION_FIELD_NAMES = ['episode', 'vehicle_id', 'vehicle_is_controlled', 'episode_reward',
'episode_length', 'vehicle_average_speed', 'vehicle_average_distance',
'mission_time','crashed_hv','crashed_av','scenario']
# common field or different ? TIMESTEP_FIELD_NAMES_CONTROLLED ?
# TIMESTEP_FIELD_NAMES = ['timestep', 'is_controlled', 'vehicle_id', 'timestep_reward', 'vehicle_speed',
# 'vehicle_distance', 'mission_accomplished']
TIMESTEP_FIELD_NAMES = ['timestep', 'is_controlled', 'vehicle_id', 'timestep_reward', 'vehicle_speed',
'vehicle_distance', 'mission_accomplished']
def __init__(self, evaluation):
self.evaluation = evaluation
self.run_directory = self.evaluation.run_directory
self.controlled_vehicles_count = len(self.evaluation.env.controlled_vehicles)
self.vehicles_count = len(self.evaluation.env.road.vehicles)
self.humans_count = self.vehicles_count - self.controlled_vehicles_count
# TODO
# self.mission_vehicle_id = self.evaluation.env.config['scenario']['mission_vehicle_id']
self.mission_vehicle_id = -1
self.mission_time = None
self.average_episode_logfile_name = self.get_logfile_name('episode_average')
self.create_raw_log_folder()
self.rewards_keys = []
self.rewards_keys_episode = []
self.update_field_once = 1
self.TIMESTEP_FIELD_NAMES_EXTRA = copy.deepcopy(self.TIMESTEP_FIELD_NAMES)
self.mission_type = self.evaluation.env.config['scenario']['mission_type']
if self.mission_type == 'none' or self.evaluation.env.scenario.random_scenario is True:
self.mission_log =False
else:
self.mission_log = True
self.log_reward = True
self.log_distance = True
def create_raw_log_folder(self):
log_folder_path = os.path.join(self.run_directory, self.RAW_LOG_FOLDER)
if not os.path.exists(log_folder_path):
os.makedirs(log_folder_path)
def create_timestep_log_folder(self, episode, vehicle_id):
vehicle_folder_name = "vehicle_" + str(vehicle_id)
log_folder_path = os.path.join(self.run_directory, self.RAW_LOG_FOLDER, self.TIMESTEP_LOG_FOLDER,
vehicle_folder_name)
if not os.path.exists(log_folder_path):
os.makedirs(log_folder_path)
def create_episode_logfiles(self):
with open(self.average_episode_logfile_name, 'a') as csvfile:
writer = csv.DictWriter(csvfile, fieldnames=self.EPISODE_FIELD_NAMES)
writer.writeheader()
def get_logfile_name(self, log_type, **kwargs):
vehicle_id = str(kwargs.get('vehicle_id', 0))
assert (log_type == 'episode_average' or log_type == 'episode_mission' or
log_type == 'episode_individual' or log_type == 'timestep'), \
"'get_logfile_name()' only accepts 'episode_average' or 'timestep' as 'log_type'"
logfile_name = None
if log_type == 'episode_average':
logfile_name = self.EPISODE_LOGFILE + '_average.csv'
elif log_type == 'episode_individual':
logfile_name = self.EPISODE_LOGFILE + '_individual_' + vehicle_id + '.csv'
elif log_type == 'episode_mission':
logfile_name = self.EPISODE_LOGFILE + '_mission' + '.csv'
elif log_type == 'timestep':
timestep = str(kwargs.get('timestep', 0))
episode = str(kwargs.get('episode', 0))
logfile_name = self.TIMESTEP_LOGFILE + '_vehicle_' + vehicle_id + '_episode_' + episode + '.csv'
vehicle_folder_name = "vehicle_" + str(vehicle_id)
logfile_name = os.path.join(self.TIMESTEP_LOG_FOLDER,
vehicle_folder_name, logfile_name)
logfile_path = os.path.join(self.run_directory, self.RAW_LOG_FOLDER, logfile_name)
return logfile_path
def episode_info_logger(self, episode):
start_time = time.time()
############# Calculations
rewards_averaged_over_agents = self.evaluation.rewards_averaged_over_agents
rewards_individual_agents = self.evaluation.rewards
reward_total_episode = sum(rewards_averaged_over_agents)
episode_length = self.evaluation.episode_length
episode_info = self.evaluation.episode_info
# speed calculations
# this keeps the sum of speeds over timesteps of an episode for all vehicles
self.vehicles_count = len(self.evaluation.env.road.vehicles)
speeds_container = np.zeros(self.vehicles_count)
# this keeps the sum of rewards components over timesteps of an episode for all controlled vehicles
reward_components_length = len(episode_info[0]["reward_info"][0])
rewards_container = np.zeros((self.controlled_vehicles_count, reward_components_length))
# distance calculations
# keeps the sum over timesteps
distances_container = np.zeros(self.vehicles_count)
# counts how many non-None distances have occured for each vehicle
distances_counter = np.zeros(self.vehicles_count)
# Information available for logging
if len(episode_info[0]["reward_info"]) <= 1:
self.log_reward = False
if episode_info[0]['vehicle_distances'][0] is None:
self.log_distance = False
if self.update_field_once:
self.rewards_keys = list(episode_info[0]["reward_info"][0].keys())
if self.log_reward:
self.rewards_keys_episode = ["episode_average_" + char for char in self.rewards_keys]
self.TIMESTEP_FIELD_NAMES_EXTRA.extend(self.rewards_keys)
self.EPISODE_FIELD_NAMES.extend(self.rewards_keys_episode)
self.EPISODE_INDIVIDUAL_FIELD_NAMES.extend(self.rewards_keys)
self.EPISODE_MISSION_FIELD_NAMES.extend(self.rewards_keys)
if len(episode_info[0]["vehicle_info_debug"]) > 0:
self.TIMESTEP_FIELD_NAMES_EXTRA.extend(list(episode_info[0]["vehicle_info_debug"][0].keys()))
self.create_episode_logfiles()
self.update_field_once = 0
# -1 means mission was never accomplished
self.mission_time = -1
vehicles_speeds_avg_by_step = []
controlled_vehicles_speeds_avg_by_step = []
human_vehicles_speeds_avg_by_step = []
for step in range(episode_length):
# TODO: this is currently only for merging but should be general
info_at_timestep = episode_info[step]
timestep = info_at_timestep['timestep']
rewards_at_timestep = rewards_individual_agents[step]
try:
vehicles_speeds=info_at_timestep['vehicle_speeds']
if self.evaluation.env.scenario.random_scenario is False:
speeds_container = np.add(speeds_container, vehicles_speeds)
vehicles_speeds_avg_by_step.append(np.average(vehicles_speeds))
mask = np.array(info_at_timestep['vehicle_is_controlled'])
controlled_vehicles_speeds = np.array(vehicles_speeds)[mask==1]
controlled_vehicles_speeds_avg_by_step.append(np.average(controlled_vehicles_speeds))
human_vehicles_speeds = np.array(vehicles_speeds)[mask==0]
human_vehicles_speeds_avg_by_step.append(np.average(human_vehicles_speeds))
except:
print(" Error updating speed container")
if self.log_reward:
reward_values = [np.array(list(rewards.values())) for rewards in info_at_timestep["reward_info"]]
reward_values = np.array(reward_values)
rewards_container = np.add(rewards_container, reward_values)
if self.log_distance:
for i, distance in enumerate(info_at_timestep['vehicle_distances']):
if not distance == None:
distances_counter[i] += 1
distances_container[i] += distance
# checking if the goal is accomplished
if (info_at_timestep['mission_accomplished'] and self.mission_time == -1):
self.mission_time = timestep
# creating timestep logs
if self.evaluation.create_timestep_log:
vehicle_ids = info_at_timestep['vehicle_ids']
for i, vehicle_id in enumerate(vehicle_ids):
self.create_timestep_log_folder(episode, vehicle_id)
vehicle_timestep_reward = 0
# if vehicle_id in info_at_timestep['reward_ids']:
# j = np.where(np.array(info_at_timestep['reward_ids']) == vehicle_id)[0][0]
# vehicle_timestep_reward = rewards_at_timestep[j]
individual_timestep_log = {
'timestep': timestep,
'is_controlled': info_at_timestep['vehicle_is_controlled'][i],
'vehicle_id': vehicle_id,
'timestep_reward': vehicle_timestep_reward,
'vehicle_speed': info_at_timestep['vehicle_speeds'][i],
'vehicle_distance': info_at_timestep['vehicle_distances'][i],
'mission_accomplished': info_at_timestep['mission_accomplished']}
individual_timestep_log_name = self.get_logfile_name('timestep', episode=episode,
vehicle_id=vehicle_id, timestep=timestep)
with open(individual_timestep_log_name, 'a') as csvfile:
# if vehicle_id in info_at_timestep['reward_ids']:
# # writer = csv.DictWriter(csvfile, fieldnames=self.TIMESTEP_FIELD_NAMES_CONTROLLED)
# j = info_at_timestep['reward_ids'].index(vehicle_id)
# individual_timestep_log.update(info_at_timestep["reward_info"][j])
if len(episode_info[0]["vehicle_info_debug"]) > 0:
individual_timestep_log.update(info_at_timestep["vehicle_info_debug"][i])
# else:
# writer = csv.DictWriter(csvfile, fieldnames=self.TIMESTEP_FIELD_NAMES)
writer = csv.DictWriter(csvfile, fieldnames=self.TIMESTEP_FIELD_NAMES_EXTRA)
if timestep == 1:
writer.writeheader()
writer.writerow(individual_timestep_log)
### Calculating average values (averaged over the timesteps of an episode)
if self.evaluation.env.scenario.random_scenario is False and (self.evaluation.env.scenario.road_type == "road_merge" or self.evaluation.env.scenario.road_type == "road_exit"):
mask = episode_info[0]['vehicle_is_controlled']
## Speeds
# for all vehicles separately
try:
vehicles_average_speeds = speeds_container / episode_length
except:
vehicles_average_speeds = 0
try:
controlled_average_speeds = np.multiply(vehicles_average_speeds, mask)
controlled_average_speeds_error = False
except:
controlled_average_speeds_error = True
controlled_average_speeds = 0
human_average_speeds = vehicles_average_speeds - controlled_average_speeds
# averaged over all vehicles
episode_average_speed_all = sum(vehicles_average_speeds) / self.vehicles_count
if controlled_average_speeds_error:
episode_average_speed_controlled =-1
else:
episode_average_speed_controlled = sum(controlled_average_speeds) / self.controlled_vehicles_count
episode_average_speed_human = sum(human_average_speeds) / self.humans_count
else:
episode_average_speed_all = np.average(vehicles_speeds_avg_by_step)
episode_average_speed_human = np.average(human_vehicles_speeds_avg_by_step)
episode_average_speed_controlled = np.average(controlled_vehicles_speeds_avg_by_step)
## Distances
# here we remove the entries that have a distance_counter==0 because that means they never had a vehicle in
# front of them and hence should not be considered in the averaging
if self.log_distance:
no_distance_indices = np.where(distances_counter == 0)
distances_counter_masked = np.delete(distances_counter, no_distance_indices)
distances_container_masked = np.delete(distances_container, no_distance_indices)
mask = np.delete(mask, no_distance_indices)
vehicles_average_distances = 0 if not distances_counter_masked else distances_container_masked / distances_counter_masked
controlled_average_distances = np.delete(vehicles_average_distances, np.argwhere(1 * np.logical_not(mask)))
human_average_distances = np.delete(vehicles_average_distances, np.argwhere(mask))
# averaged over all vehicles
episode_average_distance_all = 0 if not vehicles_average_distances else np.average(
vehicles_average_distances)
episode_average_distance_controlled = 0 if not controlled_average_distances else np.average(
controlled_average_distances)
episode_average_distance_human = 0 if not human_average_distances else np.average(human_average_distances)
if self.log_reward:
rewards_container_average = rewards_container / episode_length
episode_rewards_components_average = np.average(rewards_container_average, axis=0)
# average over all controlled vehicles reward components
episode_average_reward_log = {self.rewards_keys_episode[i]: episode_rewards_components_average[i] for i in
range(0, len(self.rewards_keys_episode))}
crashed_hv = int(any(vehicle.crashed for vehicle in self.evaluation.env.road.vehicles))
crashed_av = int(any(vehicle.crashed for vehicle in self.evaluation.env.controlled_vehicles))
scenario = self.evaluation.env.scenario.road_types_idx
episode_average_log = {'episode': episode,
'episode_reward': reward_total_episode,
'episode_length': episode_length,
'episode_average_speed_all': episode_average_speed_all,
'episode_average_speed_controlled': episode_average_speed_controlled,
'episode_average_speed_human': episode_average_speed_human,
# 'episode_average_distance_all': episode_average_distance_all,
# 'episode_average_distance_controlled': episode_average_distance_controlled,
# 'episode_average_distance_human': episode_average_distance_human,
'mission_time': self.mission_time,
'crashed_hv': crashed_hv,
'crashed_av': crashed_av,
'scenario': scenario,
}
if self.log_reward:
episode_average_log.update(episode_average_reward_log)
with open(self.average_episode_logfile_name, 'a') as csvfile:
writer = csv.DictWriter(csvfile, fieldnames=self.EPISODE_FIELD_NAMES)
writer.writerow(episode_average_log)
#### Individual Logs
vehicle_ids = np.array(episode_info[0]['vehicle_ids'])
vehicle_is_controlled_arr = np.array(episode_info[0]['vehicle_is_controlled'])
vehicle_reward_ids = np.array(episode_info[0]['reward_ids'])
if len(self.evaluation.env.controlled_vehicles) > 1 and (
self.evaluation.individual_episode_log_level == 2 or self.evaluation.individual_episode_log_level == 3):
vehicle_id = None
vehicle_is_controlled = None
vehicle_reward = None
vehicle_average_speed = None
controlled_indices = np.argwhere(vehicle_is_controlled_arr)
vehicle_rewards = np.sum(rewards_individual_agents, axis=0)
for i in controlled_indices:
i = i[0]
vehicle_id = vehicle_ids[i]
if vehicle_id == self.mission_vehicle_id:
continue
# TODO: here check if it's controlled, if not reward = None
vehicle_reward_index = np.where(vehicle_reward_ids == vehicle_id)[0][0]
# vehicle_reward_indexv = episode_info[0]['reward_ids'].index(vehicle_id)
vehicle_reward = vehicle_rewards[vehicle_reward_index]
vehicle_average_speed = vehicles_average_speeds[i]
vehicle_is_controlled = vehicle_is_controlled_arr[i]
vehicle_average_distance = float("inf")
if self.log_distance:
if i not in no_distance_indices[0]:
vehicle_average_distance = distances_container[i] / distances_counter[i]
rewards_container_average_vehicle = rewards_container_average[vehicle_reward_index, :]
episode_individual_reward_log = {self.rewards_keys[j]: rewards_container_average_vehicle[j] for j
in range(0, len(self.rewards_keys))}
episode_individual_log = {'episode': episode,
'vehicle_id': vehicle_id,
'vehicle_is_controlled': vehicle_is_controlled,
'episode_reward': vehicle_reward,
'episode_length': episode_length,
'vehicle_average_speed': vehicle_average_speed,
'vehicle_average_distance': vehicle_average_distance,
'mission_time': self.mission_time}
episode_individual_log.update(episode_individual_reward_log)
individual_log_name = self.get_logfile_name('episode_individual', vehicle_id=vehicle_id)
with open(individual_log_name, 'a') as csvfile:
writer = csv.DictWriter(csvfile, fieldnames=self.EPISODE_INDIVIDUAL_FIELD_NAMES)
if episode == 0:
writer.writeheader()
writer.writerow(episode_individual_log)
### Log for mission vehicle
if self.mission_vehicle_id in vehicle_ids and self.mission_log:
mission_vehicle_index = np.where(vehicle_ids == self.mission_vehicle_id)[0][0]
mission_vehicle_average_speed = vehicles_average_speeds[mission_vehicle_index]
mission_vehicle_is_controlled = vehicle_is_controlled_arr[mission_vehicle_index]
mission_vehicle_average_distance = float("inf")
if self.log_distance:
if mission_vehicle_index not in no_distance_indices[0]:
mission_vehicle_average_distance = distances_container[mission_vehicle_index] \
/ distances_counter[mission_vehicle_index]
mission_vehicle_reward = None
if mission_vehicle_is_controlled:
vehicle_rewards = np.sum(rewards_individual_agents, axis=0)
mission_reward_index = np.where(vehicle_reward_ids == self.mission_vehicle_id)[0][0]
mission_vehicle_reward = vehicle_rewards[mission_reward_index]
episode_mission_log = {'episode': episode,
'vehicle_id': self.mission_vehicle_id,
'vehicle_is_controlled': mission_vehicle_is_controlled,
'episode_reward': mission_vehicle_reward,
'episode_length': episode_length,
'vehicle_average_speed': mission_vehicle_average_speed,
'vehicle_average_distance': mission_vehicle_average_distance,
'mission_time': self.mission_time,
'crashed_hv': crashed_hv,
'crashed_av': crashed_av,
'scenario': scenario,
}
if self.mission_vehicle_id in episode_info[0]['reward_ids']:
mission_vehicle_reward_index = episode_info[0]['reward_ids'].index(self.mission_vehicle_id)
rewards_container_average_vehicle = rewards_container_average[mission_vehicle_reward_index, :]
episode_mission_reward_log = {self.rewards_keys[j]: rewards_container_average_vehicle[j] for j
in range(0, len(self.rewards_keys))}
episode_mission_log.update(episode_mission_reward_log)
individual_log_name = self.get_logfile_name('episode_mission')
with open(individual_log_name, 'a') as csvfile:
writer = csv.DictWriter(csvfile, fieldnames=self.EPISODE_MISSION_FIELD_NAMES)
if episode == 0:
writer.writeheader()
writer.writerow(episode_mission_log)
logging_time = time.time() - start_time
# print(">>>>>>>>>>>>>> LOG FILE SUCCESSFULLY UPDATED IN = {:5f} ms".format(1000*logging_time))
return episode_average_log
| 54.528571
| 183
| 0.628286
|
4a174f27a3f7ef01b3eecc32917350a9708c65d7
| 6,476
|
py
|
Python
|
salt/modules/debian_service.py
|
jkur/salt
|
3e62675550f9869d550d7787800270e632955d2f
|
[
"Apache-2.0"
] | 3
|
2015-04-16T18:42:35.000Z
|
2017-10-30T16:57:49.000Z
|
salt/modules/debian_service.py
|
jkur/salt
|
3e62675550f9869d550d7787800270e632955d2f
|
[
"Apache-2.0"
] | 16
|
2015-11-18T00:44:03.000Z
|
2018-10-29T20:48:27.000Z
|
salt/modules/debian_service.py
|
jkur/salt
|
3e62675550f9869d550d7787800270e632955d2f
|
[
"Apache-2.0"
] | 1
|
2020-10-19T11:49:50.000Z
|
2020-10-19T11:49:50.000Z
|
# -*- coding: utf-8 -*-
'''
Service support for Debian systems (uses update-rc.d and /sbin/service)
'''
from __future__ import absolute_import
# Import python libs
import logging
import glob
import re
# Import 3rd-party libs
# pylint: disable=import-error
from salt.ext.six.moves import shlex_quote as _cmd_quote
# pylint: enable=import-error
# Import salt libs
import salt.utils.systemd
__func_alias__ = {
'reload_': 'reload'
}
# Define the module's virtual name
__virtualname__ = 'service'
log = logging.getLogger(__name__)
_DEFAULT_VER = '7.0.0'
def __virtual__():
'''
Only work on Debian and when systemd isn't running
'''
if __grains__['os'] in ('Debian', 'Raspbian') and not salt.utils.systemd.booted(__context__):
return __virtualname__
return False
def _service_cmd(*args):
osmajor = _osrel()[0]
if osmajor < '6':
cmd = '/etc/init.d/{0} {1}'.format(args[0], ' '.join(args[1:]))
else:
cmd = 'service {0} {1}'.format(args[0], ' '.join(args[1:]))
return cmd
def _get_runlevel():
'''
returns the current runlevel
'''
out = __salt__['cmd.run']('runlevel')
# unknown can be returned while inside a container environment, since
# this is due to a lack of init, it should be safe to assume runlevel
# 2, which is Debian's default. If not, all service related states
# will throw an out of range exception here which will cause
# other functions to fail.
if 'unknown' in out:
return '2'
else:
return out.split()[1]
def get_enabled():
'''
Return a list of service that are enabled on boot
CLI Example:
.. code-block:: bash
salt '*' service.get_enabled
'''
prefix = '/etc/rc[S{0}].d/S'.format(_get_runlevel())
ret = set()
lines = glob.glob('{0}*'.format(prefix))
for line in lines:
ret.add(re.split(prefix + r'\d+', line)[1])
return sorted(ret)
def get_disabled():
'''
Return a set of services that are installed but disabled
CLI Example:
.. code-block:: bash
salt '*' service.get_disabled
'''
return sorted(set(get_all()) - set(get_enabled()))
def available(name):
'''
Returns ``True`` if the specified service is available, otherwise returns
``False``.
CLI Example:
.. code-block:: bash
salt '*' service.available sshd
'''
return name in get_all()
def missing(name):
'''
The inverse of service.available.
Returns ``True`` if the specified service is not available, otherwise returns
``False``.
CLI Example:
.. code-block:: bash
salt '*' service.missing sshd
'''
return name not in get_all()
def get_all():
'''
Return all available boot services
CLI Example:
.. code-block:: bash
salt '*' service.get_all
'''
ret = set()
lines = glob.glob('/etc/init.d/*')
for line in lines:
service = line.split('/etc/init.d/')[1]
# Remove README. If it's an enabled service, it will be added back in.
if service != 'README':
ret.add(service)
return sorted(ret | set(get_enabled()))
def start(name):
'''
Start the specified service
CLI Example:
.. code-block:: bash
salt '*' service.start <service name>
'''
cmd = _service_cmd(name, 'start')
return not __salt__['cmd.retcode'](cmd)
def stop(name):
'''
Stop the specified service
CLI Example:
.. code-block:: bash
salt '*' service.stop <service name>
'''
cmd = _service_cmd(name, 'stop')
return not __salt__['cmd.retcode'](cmd)
def restart(name):
'''
Restart the named service
CLI Example:
.. code-block:: bash
salt '*' service.restart <service name>
'''
cmd = _service_cmd(name, 'restart')
return not __salt__['cmd.retcode'](cmd)
def reload_(name):
'''
Reload the named service
CLI Example:
.. code-block:: bash
salt '*' service.reload <service name>
'''
cmd = _service_cmd(name, 'reload')
return not __salt__['cmd.retcode'](cmd)
def force_reload(name):
'''
Force-reload the named service
CLI Example:
.. code-block:: bash
salt '*' service.force_reload <service name>
'''
cmd = _service_cmd(name, 'force-reload')
return not __salt__['cmd.retcode'](cmd)
def status(name, sig=None):
'''
Return the status for a service, pass a signature to use to find
the service via ps
CLI Example:
.. code-block:: bash
salt '*' service.status <service name>
'''
if sig:
return bool(__salt__['status.pid'](sig))
cmd = _service_cmd(name, 'status')
return not __salt__['cmd.retcode'](cmd)
def _osrel():
osrel = __grains__.get('osrelease', _DEFAULT_VER)
if not osrel:
osrel = _DEFAULT_VER
return osrel
def enable(name, **kwargs):
'''
Enable the named service to start at boot
CLI Example:
.. code-block:: bash
salt '*' service.enable <service name>
'''
osmajor = _osrel()[0]
if osmajor < '6':
cmd = 'update-rc.d -f {0} defaults 99'.format(_cmd_quote(name))
else:
cmd = 'update-rc.d {0} enable'.format(_cmd_quote(name))
try:
if int(osmajor) >= 6:
cmd = 'insserv {0} && '.format(_cmd_quote(name)) + cmd
except ValueError:
if osmajor == 'testing/unstable' or osmajor == 'unstable':
cmd = 'insserv {0} && '.format(_cmd_quote(name)) + cmd
return not __salt__['cmd.retcode'](cmd, python_shell=True)
def disable(name, **kwargs):
'''
Disable the named service to start at boot
CLI Example:
.. code-block:: bash
salt '*' service.disable <service name>
'''
osmajor = _osrel()[0]
if osmajor < '6':
cmd = 'update-rc.d -f {0} remove'.format(name)
else:
cmd = 'update-rc.d {0} disable'.format(name)
return not __salt__['cmd.retcode'](cmd)
def enabled(name, **kwargs):
'''
Return True if the named service is enabled, false otherwise
CLI Example:
.. code-block:: bash
salt '*' service.enabled <service name>
'''
return name in get_enabled()
def disabled(name):
'''
Return True if the named service is enabled, false otherwise
CLI Example:
.. code-block:: bash
salt '*' service.disabled <service name>
'''
return name in get_disabled()
| 21.094463
| 97
| 0.605003
|
4a174fad33085dd552c43128ad902c80233d97d9
| 2,543
|
py
|
Python
|
common/interwebs.py
|
jmcollis/GitSavvy
|
153dca03bfd63db8248c1f9ee03bb6f2ebef545a
|
[
"MIT"
] | 1
|
2019-06-19T14:58:32.000Z
|
2019-06-19T14:58:32.000Z
|
common/interwebs.py
|
jmcollis/GitSavvy
|
153dca03bfd63db8248c1f9ee03bb6f2ebef545a
|
[
"MIT"
] | null | null | null |
common/interwebs.py
|
jmcollis/GitSavvy
|
153dca03bfd63db8248c1f9ee03bb6f2ebef545a
|
[
"MIT"
] | null | null | null |
"""
A simple HTTP interface for making GET, PUT and POST requests.
"""
import http.client
import json
from urllib.parse import urlparse, urlencode, quote # NOQA
from base64 import b64encode
from functools import partial
from collections import namedtuple
Response = namedtuple("Response", ("payload", "headers", "status", "is_json"))
def request(verb, host, port, path, payload=None, https=False, headers=None, auth=None, redirect=True):
"""
Make an HTTP(S) request with the provided HTTP verb, host FQDN, port number, path,
payload, protocol, headers, and auth information. Return a response object with
payload, headers, JSON flag, and HTTP status number.
"""
if not headers:
headers = {}
headers["User-Agent"] = "GitSavvy Sublime Plug-in"
if auth:
# use basic authentication
username_password = "{}:{}".format(*auth).encode("ascii")
headers["Authorization"] = "Basic {}".format(b64encode(username_password).decode("ascii"))
connection = (http.client.HTTPSConnection(host, port)
if https
else http.client.HTTPConnection(host, port))
connection.request(verb, path, body=payload, headers=headers)
response = connection.getresponse()
response_payload = response.read()
response_headers = dict(response.getheaders())
status = response.status
is_json = "application/json" in response_headers["Content-Type"]
if is_json:
response_payload = json.loads(response_payload.decode("utf-8"))
response.close()
connection.close()
if redirect and verb == "GET" and status == 301 or status == 302:
return request_url(
verb,
response_headers["Location"],
headers=headers,
auth=auth
)
return Response(response_payload, response_headers, status, is_json)
def request_url(verb, url, payload=None, headers=None, auth=None):
parsed = urlparse(url)
https = parsed.scheme == "https"
return request(
verb,
parsed.hostname,
parsed.port or 443 if https else 80,
parsed.path,
payload=payload,
https=https,
headers=headers,
auth=([parsed.username, parsed.password]
if parsed.username and parsed.password
else None)
)
get = partial(request, "GET")
post = partial(request, "POST")
put = partial(request, "PUT")
get_url = partial(request_url, "GET")
post_url = partial(request_url, "POST")
put_url = partial(request_url, "PUT")
| 31.012195
| 103
| 0.657491
|
4a17507daca7562c6e81d5a4df737f43cbeb4147
| 3,696
|
py
|
Python
|
docs/conf.py
|
MrBartusek/corkus.py
|
031c11e3e251f0bddbcb67415564357460fe7fea
|
[
"MIT"
] | 5
|
2021-09-10T14:20:15.000Z
|
2022-01-09T11:27:49.000Z
|
docs/conf.py
|
MrBartusek/corkus.py
|
031c11e3e251f0bddbcb67415564357460fe7fea
|
[
"MIT"
] | 11
|
2021-08-15T09:39:09.000Z
|
2022-01-12T14:11:24.000Z
|
docs/conf.py
|
MrBartusek/corkus.py
|
031c11e3e251f0bddbcb67415564357460fe7fea
|
[
"MIT"
] | 2
|
2021-12-01T23:33:14.000Z
|
2022-01-12T11:08:18.000Z
|
import sys
import os
from datetime import datetime
import re
import glob
sys.path.insert(0, ".")
sys.path.insert(1, "..")
from corkus import __version__
copyright = datetime.today().strftime("%Y, MrBartusek")
exclude_patterns = ["_build"]
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.intersphinx",
"sphinx_autodoc_typehints",
"sphinxext.opengraph",
"sphinx_copybutton"
]
html_static_path = ["_static"]
html_css_files = ['colors.css']
html_favicon = '_static/favicon.ico'
html_theme = 'furo'
htmlhelp_basename = "Corkus.py"
intersphinx_mapping = {"python": ("https://docs.python.org", None)}
master_doc = "index"
nitpicky = True
project = "Corkus.py"
pygments_style = "sphinx"
release = __version__
source_suffix = ".rst"
suppress_warnings = ["image.nonlocal_uri"]
version = ".".join(__version__.split(".", 2)[:2])
autodoc_member_order = "bysource"
autodoc_typehints = "none"
autoclass_content = "class"
html_logo = "_static/logo.png"
autodoc_class_signature = "separated"
set_type_checking_flag = True
html_title = f"Corkus.py {__version__}"
ogp_site_url = "https://corkuspy.readthedocs.io"
ogp_site_name = "Corkus.py Documentation"
ogp_image = "https://corkuspy.readthedocs.io/en/stable/_static/logo.png"
ogp_custom_meta_tags = [
'<meta name="google-site-verification" content="hIrkOqiXAYM8rbacCCcHQSAL83yd49nzfUwV7OY0POo" />'
'<meta name="description" content="Asynchronous, feature-rich and easy to use Python wrapper for Public Wynncraft API."/>',
]
def to_camel_case(string):
string = string.replace("UUID", "Uuid")
return re.sub(r'(?<!^)(?=[A-Z])', '_', string).lower()
corkus_objects = []
with open('../corkus/objects/__init__.py', 'r') as objects:
search = re.findall(r'(?m)^(?:from[ ]+(\S+)[ ]+)?import[ ]+([\S, ]+)[ ]*$', objects.read())
for result in search:
for item in result[1].split(", "):
corkus_objects.append(item)
for f in glob.glob('code_overview/objects/*'):
os.remove(f)
corkus_objects = sorted(corkus_objects)
for obj in corkus_objects:
with open("code_overview/objects/" + to_camel_case(obj) + ".rst", "w") as f:
f.write(".." + "\n")
f.write(" This file is auto-generated" + "\n")
f.write("\n")
f.write(".. py:currentmodule:: corkus.objects" + "\n")
f.write("\n")
f.write(obj + "\n")
f.write("=" * len(obj) + "\n")
if obj.startswith("Partial"):
f.write(".. include:: ../note_partial_object.rst" + "\n")
f.write("\n")
f.write(".. autoclass:: " + obj + "\n")
f.write(" :inherited-members:" + "\n")
if obj != "CorkusUUID":
f.write(" :undoc-members:" + "\n")
with open("code_overview/corkus_objects.rst", "w") as f:
f.write(".." + "\n")
f.write(" This file is auto-generated" + "\n")
f.write("\n")
f.write("Working with Corkus Objects" + "\n")
f.write("==========================="+ "\n")
f.write("\n")
f.write(".. include:: corkus_objects_info.rst" + "\n")
f.write("\n")
f.write(".. toctree::" + "\n")
f.write(" :maxdepth: 2" + "\n")
f.write(" :caption: Objects" + "\n")
f.write("\n")
for obj in corkus_objects:
f.write(" objects/" + to_camel_case(obj) + "\n")
def autodoc_skip_member(app, what, name, obj, skip, options):
exclusions = (
'__init__',
'__new__',
'from_items_api',
'from_ingredient_api',
'to_items_api',
'to_ingredient_api',
'with_traceback',
'from_type'
)
exclude = name in exclusions
return True if exclude else None
def setup(app):
app.connect('autodoc-skip-member', autodoc_skip_member)
| 31.862069
| 127
| 0.619589
|
4a175087a99fe476d9387ad0f15fbe6e357dc3b8
| 29,492
|
py
|
Python
|
python/ccxt/bithumb.py
|
orikalinski/ccxt_new
|
318caa4f8db7ffb719edab2c060a0989d2a9cd28
|
[
"MIT"
] | 1
|
2019-09-26T09:16:37.000Z
|
2019-09-26T09:16:37.000Z
|
python/ccxt/bithumb.py
|
orikalinski/ccxt_new
|
318caa4f8db7ffb719edab2c060a0989d2a9cd28
|
[
"MIT"
] | 1
|
2020-09-03T10:11:29.000Z
|
2020-09-03T10:11:29.000Z
|
python/ccxt/bithumb.py
|
orikalinski/ccxt_new
|
318caa4f8db7ffb719edab2c060a0989d2a9cd28
|
[
"MIT"
] | 3
|
2019-09-26T09:17:26.000Z
|
2021-02-01T11:51:49.000Z
|
# -*- coding: utf-8 -*-
# PLEASE DO NOT EDIT THIS FILE, IT IS GENERATED AND WILL BE OVERWRITTEN:
# https://github.com/ccxt/ccxt/blob/master/CONTRIBUTING.md#how-to-contribute-code
from ccxt.base.exchange import Exchange
import base64
import hashlib
from ccxt.base.errors import ExchangeError
from ccxt.base.errors import AuthenticationError
from ccxt.base.errors import PermissionDenied
from ccxt.base.errors import ArgumentsRequired
from ccxt.base.errors import BadRequest
from ccxt.base.errors import InvalidAddress
from ccxt.base.errors import InvalidOrder
from ccxt.base.errors import ExchangeNotAvailable
from ccxt.base.decimal_to_precision import TRUNCATE
from ccxt.base.decimal_to_precision import DECIMAL_PLACES
from ccxt.base.decimal_to_precision import SIGNIFICANT_DIGITS
class bithumb(Exchange):
def describe(self):
return self.deep_extend(super(bithumb, self).describe(), {
'id': 'bithumb',
'name': 'Bithumb',
'countries': ['KR'], # South Korea
'rateLimit': 500,
'has': {
'cancelOrder': True,
'CORS': True,
'createMarketOrder': True,
'createOrder': True,
'fetchBalance': True,
'fetchMarkets': True,
'fetchOpenOrders': True,
'fetchOrder': True,
'fetchOrderBook': True,
'fetchTicker': True,
'fetchTickers': True,
'fetchTrades': True,
'withdraw': True,
},
'urls': {
'logo': 'https://user-images.githubusercontent.com/1294454/30597177-ea800172-9d5e-11e7-804c-b9d4fa9b56b0.jpg',
'api': {
'public': 'https://api.bithumb.com/public',
'private': 'https://api.bithumb.com',
},
'www': 'https://www.bithumb.com',
'doc': 'https://apidocs.bithumb.com',
'fees': 'https://en.bithumb.com/customer_support/info_fee',
},
'api': {
'public': {
'get': [
'ticker/{currency}',
'ticker/all',
'orderbook/{currency}',
'orderbook/all',
'transaction_history/{currency}',
'transaction_history/all',
],
},
'private': {
'post': [
'info/account',
'info/balance',
'info/wallet_address',
'info/ticker',
'info/orders',
'info/user_transactions',
'info/order_detail',
'trade/place',
'trade/cancel',
'trade/btc_withdrawal',
'trade/krw_deposit',
'trade/krw_withdrawal',
'trade/market_buy',
'trade/market_sell',
],
},
},
'fees': {
'trading': {
'maker': 0.25 / 100,
'taker': 0.25 / 100,
},
},
'precisionMode': SIGNIFICANT_DIGITS,
'exceptions': {
'Bad Request(SSL)': BadRequest,
'Bad Request(Bad Method)': BadRequest,
'Bad Request.(Auth Data)': AuthenticationError, # {"status": "5100", "message": "Bad Request.(Auth Data)"}
'Not Member': AuthenticationError,
'Invalid Apikey': AuthenticationError, # {"status":"5300","message":"Invalid Apikey"}
'Method Not Allowed.(Access IP)': PermissionDenied,
'Method Not Allowed.(BTC Adress)': InvalidAddress,
'Method Not Allowed.(Access)': PermissionDenied,
'Database Fail': ExchangeNotAvailable,
'Invalid Parameter': BadRequest,
'5600': ExchangeError,
'Unknown Error': ExchangeError,
'After May 23th, recent_transactions is no longer, hence users will not be able to connect to recent_transactions': ExchangeError, # {"status":"5100","message":"After May 23th, recent_transactions is no longer, hence users will not be able to connect to recent_transactions"}
},
})
def amount_to_precision(self, symbol, amount):
return self.decimal_to_precision(amount, TRUNCATE, self.markets[symbol]['precision']['amount'], DECIMAL_PLACES)
def fetch_markets(self, params={}):
response = self.publicGetTickerAll(params)
data = self.safe_value(response, 'data')
currencyIds = list(data.keys())
result = []
quote = self.safe_currency_code('KRW')
for i in range(0, len(currencyIds)):
currencyId = currencyIds[i]
if currencyId == 'date':
continue
market = data[currencyId]
base = self.safe_currency_code(currencyId)
symbol = currencyId + '/' + quote
active = True
if isinstance(market, list):
numElements = len(market)
if numElements == 0:
active = False
result.append({
'id': currencyId,
'symbol': symbol,
'base': base,
'quote': quote,
'info': market,
'active': active,
'precision': {
'amount': 4,
'price': 4,
},
'limits': {
'amount': {
'min': None,
'max': None,
},
'price': {
'min': None,
'max': None,
},
'cost': {
'min': 500,
'max': 5000000000,
},
},
'baseId': None,
'quoteId': None,
})
return result
def fetch_balance(self, params={}):
self.load_markets()
request = {
'currency': 'ALL',
}
response = self.privatePostInfoBalance(self.extend(request, params))
result = {'info': response}
balances = self.safe_value(response, 'data')
codes = list(self.currencies.keys())
for i in range(0, len(codes)):
code = codes[i]
account = self.account()
currency = self.currency(code)
lowerCurrencyId = self.safe_string_lower(currency, 'id')
account['total'] = self.safe_float(balances, 'total_' + lowerCurrencyId)
account['used'] = self.safe_float(balances, 'in_use_' + lowerCurrencyId)
account['free'] = self.safe_float(balances, 'available_' + lowerCurrencyId)
result[code] = account
return self.parse_balance(result)
def fetch_order_book(self, symbol, limit=None, params={}):
self.load_markets()
market = self.market(symbol)
request = {
'currency': market['base'],
}
if limit is not None:
request['count'] = limit # default 30, max 30
response = self.publicGetOrderbookCurrency(self.extend(request, params))
#
# {
# "status":"0000",
# "data":{
# "timestamp":"1587621553942",
# "payment_currency":"KRW",
# "order_currency":"BTC",
# "bids":[
# {"price":"8652000","quantity":"0.0043"},
# {"price":"8651000","quantity":"0.0049"},
# {"price":"8650000","quantity":"8.4791"},
# ],
# "asks":[
# {"price":"8654000","quantity":"0.119"},
# {"price":"8655000","quantity":"0.254"},
# {"price":"8658000","quantity":"0.119"},
# ]
# }
# }
#
data = self.safe_value(response, 'data', {})
timestamp = self.safe_integer(data, 'timestamp')
return self.parse_order_book(data, timestamp, 'bids', 'asks', 'price', 'quantity')
def parse_ticker(self, ticker, market=None):
#
# fetchTicker, fetchTickers
#
# {
# "opening_price":"227100",
# "closing_price":"228400",
# "min_price":"222300",
# "max_price":"230000",
# "units_traded":"82618.56075337",
# "acc_trade_value":"18767376138.6031",
# "prev_closing_price":"227100",
# "units_traded_24H":"151871.13484676",
# "acc_trade_value_24H":"34247610416.8974",
# "fluctate_24H":"8700",
# "fluctate_rate_24H":"3.96",
# "date":"1587710327264", # fetchTickers inject self
# }
#
timestamp = self.safe_integer(ticker, 'date')
symbol = None
if market is not None:
symbol = market['symbol']
open = self.safe_float(ticker, 'opening_price')
close = self.safe_float(ticker, 'closing_price')
change = None
percentage = None
average = None
if (close is not None) and (open is not None):
change = close - open
if open > 0:
percentage = change / open * 100
average = self.sum(open, close) / 2
baseVolume = self.safe_float(ticker, 'units_traded_24H')
quoteVolume = self.safe_float(ticker, 'acc_trade_value_24H')
vwap = None
if quoteVolume is not None and baseVolume is not None:
vwap = quoteVolume / baseVolume
return {
'symbol': symbol,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'high': self.safe_float(ticker, 'max_price'),
'low': self.safe_float(ticker, 'min_price'),
'bid': self.safe_float(ticker, 'buy_price'),
'bidVolume': None,
'ask': self.safe_float(ticker, 'sell_price'),
'askVolume': None,
'vwap': vwap,
'open': open,
'close': close,
'last': close,
'previousClose': None,
'change': change,
'percentage': percentage,
'average': average,
'baseVolume': baseVolume,
'quoteVolume': quoteVolume,
'info': ticker,
}
def fetch_tickers(self, symbols=None, params={}):
self.load_markets()
response = self.publicGetTickerAll(params)
#
# {
# "status":"0000",
# "data":{
# "BTC":{
# "opening_price":"9045000",
# "closing_price":"9132000",
# "min_price":"8938000",
# "max_price":"9168000",
# "units_traded":"4619.79967497",
# "acc_trade_value":"42021363832.5187",
# "prev_closing_price":"9041000",
# "units_traded_24H":"8793.5045804",
# "acc_trade_value_24H":"78933458515.4962",
# "fluctate_24H":"530000",
# "fluctate_rate_24H":"6.16"
# },
# "date":"1587710878669"
# }
# }
#
result = {}
data = self.safe_value(response, 'data', {})
timestamp = self.safe_integer(data, 'date')
tickers = self.omit(data, 'date')
ids = list(tickers.keys())
for i in range(0, len(ids)):
id = ids[i]
symbol = id
market = None
if id in self.markets_by_id:
market = self.markets_by_id[id]
symbol = market['symbol']
ticker = tickers[id]
isArray = isinstance(ticker, list)
if not isArray:
ticker['date'] = timestamp
result[symbol] = self.parse_ticker(ticker, market)
return result
def fetch_ticker(self, symbol, params={}):
self.load_markets()
market = self.market(symbol)
request = {
'currency': market['base'],
}
response = self.publicGetTickerCurrency(self.extend(request, params))
#
# {
# "status":"0000",
# "data":{
# "opening_price":"227100",
# "closing_price":"228400",
# "min_price":"222300",
# "max_price":"230000",
# "units_traded":"82618.56075337",
# "acc_trade_value":"18767376138.6031",
# "prev_closing_price":"227100",
# "units_traded_24H":"151871.13484676",
# "acc_trade_value_24H":"34247610416.8974",
# "fluctate_24H":"8700",
# "fluctate_rate_24H":"3.96",
# "date":"1587710327264"
# }
# }
#
data = self.safe_value(response, 'data', {})
return self.parse_ticker(data, market)
def parse_trade(self, trade, market=None):
#
# fetchTrades(public)
#
# {
# "transaction_date":"2020-04-23 22:21:46",
# "type":"ask",
# "units_traded":"0.0125",
# "price":"8667000",
# "total":"108337"
# }
#
# fetchOrder(private)
#
# {
# "transaction_date": "1572497603902030",
# "price": "8601000",
# "units": "0.005",
# "fee_currency": "KRW",
# "fee": "107.51",
# "total": "43005"
# }
#
# a workaround for their bug in date format, hours are not 0-padded
timestamp = None
transactionDatetime = self.safe_string(trade, 'transaction_date')
if transactionDatetime is not None:
parts = transactionDatetime.split(' ')
numParts = len(parts)
if numParts > 1:
transactionDate = parts[0]
transactionTime = parts[1]
if len(transactionTime) < 8:
transactionTime = '0' + transactionTime
timestamp = self.parse8601(transactionDate + ' ' + transactionTime)
else:
timestamp = self.safe_integer_product(trade, 'transaction_date', 0.001)
if timestamp is not None:
timestamp -= 9 * 3600000 # they report UTC + 9 hours, server in Korean timezone
type = None
side = self.safe_string(trade, 'type')
side = 'sell' if (side == 'ask') else 'buy'
id = self.safe_string(trade, 'cont_no')
symbol = None
if market is not None:
symbol = market['symbol']
price = self.safe_float(trade, 'price')
amount = self.safe_float(trade, 'units_traded')
cost = self.safe_float(trade, 'total')
if cost is None:
if amount is not None:
if price is not None:
cost = price * amount
fee = None
feeCost = self.safe_float(trade, 'fee')
if feeCost is not None:
feeCurrencyId = self.safe_string(trade, 'fee_currency')
feeCurrencyCode = self.common_currency_code(feeCurrencyId)
fee = {
'cost': feeCost,
'currency': feeCurrencyCode,
}
return {
'id': id,
'info': trade,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'symbol': symbol,
'order': None,
'type': type,
'side': side,
'takerOrMaker': None,
'price': price,
'amount': amount,
'cost': cost,
'fee': fee,
}
def fetch_trades(self, symbol, since=None, limit=None, params={}):
self.load_markets()
market = self.market(symbol)
request = {
'currency': market['base'],
}
if limit is None:
request['count'] = limit # default 20, max 100
response = self.publicGetTransactionHistoryCurrency(self.extend(request, params))
#
# {
# "status":"0000",
# "data":[
# {
# "transaction_date":"2020-04-23 22:21:46",
# "type":"ask",
# "units_traded":"0.0125",
# "price":"8667000",
# "total":"108337"
# },
# ]
# }
#
data = self.safe_value(response, 'data', [])
return self.parse_trades(data, market, since, limit)
def create_order(self, symbol, type, side, amount, price=None, params={}):
self.load_markets()
market = self.market(symbol)
request = {
'order_currency': market['id'],
'Payment_currency': market['quote'],
'units': amount,
}
method = 'privatePostTradePlace'
if type == 'limit':
request['price'] = price
request['type'] = 'bid' if (side == 'buy') else 'ask'
else:
method = 'privatePostTradeMarket' + self.capitalize(side)
response = getattr(self, method)(self.extend(request, params))
id = self.safe_string(response, 'order_id')
if id is None:
raise InvalidOrder(self.id + ' createOrder did not return an order id')
return {
'info': response,
'symbol': symbol,
'type': type,
'side': side,
'id': id,
}
def fetch_order(self, id, symbol=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' fetchOrder requires a symbol argument')
self.load_markets()
market = self.market(symbol)
request = {
'order_id': id,
'count': 1,
'order_currency': market['base'],
'payment_currency': market['quote'],
}
response = self.privatePostInfoOrderDetail(self.extend(request, params))
#
# {
# "status": "0000",
# "data": {
# "transaction_date": "1572497603668315",
# "type": "bid",
# "order_status": "Completed",
# "order_currency": "BTC",
# "payment_currency": "KRW",
# "order_price": "8601000",
# "order_qty": "0.007",
# "cancel_date": "",
# "cancel_type": "",
# "contract": [
# {
# "transaction_date": "1572497603902030",
# "price": "8601000",
# "units": "0.005",
# "fee_currency": "KRW",
# "fee": "107.51",
# "total": "43005"
# },
# ]
# }
# }
#
data = self.safe_value(response, 'data')
return self.parse_order(self.extend(data, {'order_id': id}), market)
def parse_order_status(self, status):
statuses = {
'Pending': 'open',
'Completed': 'closed',
'Cancel': 'canceled',
}
return self.safe_string(statuses, status, status)
def parse_order(self, order, market=None):
#
# fetchOrder
#
# {
# "transaction_date": "1572497603668315",
# "type": "bid",
# "order_status": "Completed",
# "order_currency": "BTC",
# "payment_currency": "KRW",
# "order_price": "8601000",
# "order_qty": "0.007",
# "cancel_date": "",
# "cancel_type": "",
# "contract": [
# {
# "transaction_date": "1572497603902030",
# "price": "8601000",
# "units": "0.005",
# "fee_currency": "KRW",
# "fee": "107.51",
# "total": "43005"
# },
# ]
# }
#
# fetchOpenOrders
#
# {
# "order_currency": "BTC",
# "payment_currency": "KRW",
# "order_id": "C0101000007408440032",
# "order_date": "1571728739360570",
# "type": "bid",
# "units": "5.0",
# "units_remaining": "5.0",
# "price": "501000",
# }
#
timestamp = self.safe_integer_product(order, 'order_date', 0.001)
sideProperty = self.safe_value_2(order, 'type', 'side')
side = 'buy' if (sideProperty == 'bid') else 'sell'
status = self.parse_order_status(self.safe_string(order, 'order_status'))
price = self.safe_float_2(order, 'order_price', 'price')
type = 'limit'
if price == 0:
price = None
type = 'market'
amount = self.safe_float_2(order, 'order_qty', 'units')
remaining = self.safe_float(order, 'units_remaining')
if remaining is None:
if status == 'closed':
remaining = 0
else:
remaining = amount
filled = None
if (amount is not None) and (remaining is not None):
filled = amount - remaining
symbol = None
baseId = self.safe_string(order, 'order_currency')
quoteId = self.safe_string(order, 'payment_currency')
base = self.safe_currency_code(baseId)
quote = self.safe_currency_code(quoteId)
if (base is not None) and (quote is not None):
symbol = base + '/' + quote
if (symbol is None) and (market is not None):
symbol = market['symbol']
rawTrades = self.safe_value(order, 'contract')
trades = None
id = self.safe_string(order, 'order_id')
if rawTrades is not None:
trades = self.parse_trades(rawTrades, market, None, None, {
'side': side,
'symbol': symbol,
'order': id,
})
return {
'info': order,
'id': id,
'clientOrderId': None,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'lastTradeTimestamp': None,
'symbol': symbol,
'type': type,
'side': side,
'price': price,
'amount': amount,
'cost': None,
'average': None,
'filled': filled,
'remaining': remaining,
'status': status,
'fee': None,
'trades': trades,
}
def fetch_open_orders(self, symbol=None, since=None, limit=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' fetchOpenOrders requires a symbol argument')
self.load_markets()
market = self.market(symbol)
if limit is None:
limit = 100
request = {
'count': limit,
'order_currency': market['base'],
'payment_currency': market['quote'],
}
if since is not None:
request['after'] = since
response = self.privatePostInfoOrders(self.extend(request, params))
#
# {
# "status": "0000",
# "data": [
# {
# "order_currency": "BTC",
# "payment_currency": "KRW",
# "order_id": "C0101000007408440032",
# "order_date": "1571728739360570",
# "type": "bid",
# "units": "5.0",
# "units_remaining": "5.0",
# "price": "501000",
# }
# ]
# }
#
data = self.safe_value(response, 'data', [])
return self.parse_orders(data, market, since, limit)
def cancel_order(self, id, symbol=None, params={}):
side_in_params = ('side' in params)
if not side_in_params:
raise ArgumentsRequired(self.id + ' cancelOrder requires a `symbol` argument and a `side` parameter(sell or buy)')
if symbol is None:
raise ArgumentsRequired(self.id + ' cancelOrder requires a `symbol` argument and a `side` parameter(sell or buy)')
market = self.market(symbol)
side = 'bid' if (params['side'] == 'buy') else 'ask'
params = self.omit(params, ['side', 'currency'])
# https://github.com/ccxt/ccxt/issues/6771
request = {
'order_id': id,
'type': side,
'order_currency': market['base'],
'payment_currency': market['quote'],
}
return self.privatePostTradeCancel(self.extend(request, params))
def cancel_unified_order(self, order, params={}):
request = {
'side': order['side'],
}
return self.cancel_order(order['id'], order['symbol'], self.extend(request, params))
def withdraw(self, code, amount, address, tag=None, params={}):
self.check_address(address)
self.load_markets()
currency = self.currency(code)
request = {
'units': amount,
'address': address,
'currency': currency['id'],
}
if currency == 'XRP' or currency == 'XMR':
destination = self.safe_string(params, 'destination')
if (tag is None) and (destination is None):
raise ArgumentsRequired(self.id + ' ' + code + ' withdraw() requires a tag argument or an extra destination param')
elif tag is not None:
request['destination'] = tag
response = self.privatePostTradeBtcWithdrawal(self.extend(request, params))
return {
'info': response,
'id': None,
}
def nonce(self):
return self.milliseconds()
def sign(self, path, api='public', method='GET', params={}, headers=None, body=None):
endpoint = '/' + self.implode_params(path, params)
url = self.urls['api'][api] + endpoint
query = self.omit(params, self.extract_params(path))
if api == 'public':
if query:
url += '?' + self.urlencode(query)
else:
self.check_required_credentials()
body = self.urlencode(self.extend({
'endpoint': endpoint,
}, query))
nonce = str(self.nonce())
auth = endpoint + "\0" + body + "\0" + nonce # eslint-disable-line quotes
signature = self.hmac(self.encode(auth), self.encode(self.secret), hashlib.sha512)
signature64 = self.decode(base64.b64encode(self.encode(signature)))
headers = {
'Accept': 'application/json',
'Content-Type': 'application/x-www-form-urlencoded',
'Api-Key': self.apiKey,
'Api-Sign': str(signature64),
'Api-Nonce': nonce,
}
return {'url': url, 'method': method, 'body': body, 'headers': headers}
def handle_errors(self, httpCode, reason, url, method, headers, body, response, requestHeaders, requestBody):
if response is None:
return # fallback to default error handler
if 'status' in response:
#
# {"status":"5100","message":"After May 23th, recent_transactions is no longer, hence users will not be able to connect to recent_transactions"}
#
status = self.safe_string(response, 'status')
message = self.safe_string(response, 'message')
if status is not None:
if status == '0000':
return # no error
feedback = self.id + ' ' + body
self.throw_exactly_matched_exception(self.exceptions, status, feedback)
self.throw_exactly_matched_exception(self.exceptions, message, feedback)
raise ExchangeError(feedback)
def request(self, path, api='public', method='GET', params={}, headers=None, body=None):
response = self.fetch2(path, api, method, params, headers, body)
if 'status' in response:
if response['status'] == '0000':
return response
raise ExchangeError(self.id + ' ' + self.json(response))
return response
| 39.114058
| 292
| 0.478842
|
4a1750b4a5755fc7101e7d0743520a8704d9cfaa
| 10,246
|
py
|
Python
|
perfkitbenchmarker/providers/gcp/google_kubernetes_engine.py
|
inflatador/PerfKitBenchmarker
|
9a12f44aa0c3fe6873e57a7920b1d13c006073e3
|
[
"Apache-2.0"
] | null | null | null |
perfkitbenchmarker/providers/gcp/google_kubernetes_engine.py
|
inflatador/PerfKitBenchmarker
|
9a12f44aa0c3fe6873e57a7920b1d13c006073e3
|
[
"Apache-2.0"
] | 1
|
2021-02-23T12:07:44.000Z
|
2021-02-23T12:07:44.000Z
|
perfkitbenchmarker/providers/gcp/google_kubernetes_engine.py
|
isabella232/PerfKitBenchmarker
|
8dd509ac0e024b7deeccd74266c8e6211a69529e
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2018 PerfKitBenchmarker Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Contains classes/functions related to GKE (Google Kubernetes Engine)."""
import json
import logging
import os
import re
from perfkitbenchmarker import container_service
from perfkitbenchmarker import data
from perfkitbenchmarker import errors
from perfkitbenchmarker import flags
from perfkitbenchmarker import kubernetes_helper
from perfkitbenchmarker import providers
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.providers.gcp import gce_virtual_machine
from perfkitbenchmarker.providers.gcp import util
FLAGS = flags.FLAGS
FLAGS.kubernetes_anti_affinity = False
NVIDIA_DRIVER_SETUP_DAEMON_SET_SCRIPT = 'https://raw.githubusercontent.com/GoogleCloudPlatform/container-engine-accelerators/master/nvidia-driver-installer/cos/daemonset-preloaded.yaml'
NVIDIA_UNRESTRICTED_PERMISSIONS_DAEMON_SET = 'nvidia_unrestricted_permissions_daemonset.yml'
DEFAULT_CONTAINER_VERSION = 'latest'
SERVICE_ACCOUNT_PATTERN = r'.*((?<!iam)|{project}.iam).gserviceaccount.com'
class GoogleContainerRegistry(container_service.BaseContainerRegistry):
"""Class for building and storing container images on GCP."""
CLOUD = providers.GCP
def __init__(self, registry_spec):
super(GoogleContainerRegistry, self).__init__(registry_spec)
self.project = self.project or util.GetDefaultProject()
def GetFullRegistryTag(self, image):
"""Gets the full tag of the image."""
region = util.GetMultiRegionFromRegion(util.GetRegionFromZone(self.zone))
hostname = '{region}.gcr.io'.format(region=region)
full_tag = '{hostname}/{project}/{name}'.format(
hostname=hostname, project=self.project, name=image)
return full_tag
def Login(self):
"""No-op because Push() handles its own auth."""
pass
def Push(self, image):
"""Push a locally built image to the registry."""
full_tag = self.GetFullRegistryTag(image.name)
tag_cmd = ['docker', 'tag', image.name, full_tag]
vm_util.IssueCommand(tag_cmd)
# vm_util.IssueCommand() is used here instead of util.GcloudCommand()
# because gcloud flags cannot be appended to the command since they
# are interpreted as docker args instead.
push_cmd = [
FLAGS.gcloud_path, '--project', self.project,
'docker', '--', 'push', full_tag
]
vm_util.IssueCommand(push_cmd)
def RemoteBuild(self, image):
"""Build the image remotely."""
full_tag = self.GetFullRegistryTag(image.name)
build_cmd = util.GcloudCommand(self, 'builds', 'submit',
'--tag', full_tag, image.directory)
del build_cmd.flags['zone']
build_cmd.Issue()
class GkeCluster(container_service.KubernetesCluster):
"""Class representing a Google Kubernetes Engine cluster."""
CLOUD = providers.GCP
def __init__(self, spec):
super(GkeCluster, self).__init__(spec)
self.project = spec.vm_spec.project
self.cluster_version = (FLAGS.container_cluster_version or
DEFAULT_CONTAINER_VERSION)
self.use_application_default_credentials = True
def GetResourceMetadata(self):
"""Returns a dict containing metadata about the cluster.
Returns:
dict mapping string property key to value.
"""
result = super(GkeCluster, self).GetResourceMetadata()
result['project'] = self.project
result['container_cluster_version'] = self.cluster_version
result['boot_disk_type'] = self.vm_config.boot_disk_type
result['boot_disk_size'] = self.vm_config.boot_disk_size
if self.vm_config.max_local_disks:
result['gce_local_ssd_count'] = self.vm_config.max_local_disks
# TODO(pclay): support NVME when it leaves alpha
# Also consider moving FLAGS.gce_ssd_interface into the vm_spec.
result['gce_local_ssd_interface'] = gce_virtual_machine.SCSI
return result
def _Create(self):
"""Creates the cluster."""
cmd = util.GcloudCommand(self, 'container', 'clusters', 'create', self.name)
cmd.flags['cluster-version'] = self.cluster_version
if FLAGS.gke_enable_alpha:
cmd.args.append('--enable-kubernetes-alpha')
cmd.args.append('--no-enable-autorepair')
cmd.args.append('--no-enable-autoupgrade')
user = util.GetDefaultUser()
if FLAGS.gcp_service_account:
cmd.flags['service-account'] = FLAGS.gcp_service_account
# Matches service accounts that either definitely belongs to this project or
# are a GCP managed service account like the GCE default service account,
# which we can't tell to which project they belong.
elif re.match(SERVICE_ACCOUNT_PATTERN, user):
logging.info(
'Re-using configured service-account for GKE Cluster: %s', user)
cmd.flags['service-account'] = user
self.use_application_default_credentials = False
else:
logging.info('Using default GCE service account for GKE cluster')
cmd.flags['scopes'] = 'cloud-platform'
if self.vm_config.gpu_count:
cmd.flags['accelerator'] = (
gce_virtual_machine.GenerateAcceleratorSpecString(
self.vm_config.gpu_type, self.vm_config.gpu_count))
if self.vm_config.min_cpu_platform:
cmd.flags['min-cpu-platform'] = self.vm_config.min_cpu_platform
if self.vm_config.boot_disk_size:
cmd.flags['disk-size'] = self.vm_config.boot_disk_size
if self.vm_config.boot_disk_type:
cmd.flags['disk-type'] = self.vm_config.boot_disk_type
if self.vm_config.max_local_disks:
# TODO(pclay): Switch to local-ssd-volumes which support NVME when it
# leaves alpha. See
# https://cloud.google.com/sdk/gcloud/reference/alpha/container/clusters/create
cmd.flags['local-ssd-count'] = self.vm_config.max_local_disks
if self.min_nodes != self.num_nodes or self.max_nodes != self.num_nodes:
cmd.args.append('--enable-autoscaling')
cmd.flags['max-nodes'] = self.max_nodes
cmd.flags['min-nodes'] = self.min_nodes
cmd.flags['num-nodes'] = self.num_nodes
if self.vm_config.machine_type is None:
cmd.flags['machine-type'] = 'custom-{0}-{1}'.format(
self.vm_config.cpus, self.vm_config.memory_mib)
else:
cmd.flags['machine-type'] = self.vm_config.machine_type
cmd.flags['metadata'] = util.MakeFormattedDefaultTags()
cmd.flags['labels'] = util.MakeFormattedDefaultTags()
# This command needs a long timeout due to the many minutes it
# can take to provision a large GPU-accelerated GKE cluster.
_, stderr, retcode = cmd.Issue(timeout=1200, raise_on_failure=False)
if retcode:
# Log specific type of failure, if known.
if 'ZONE_RESOURCE_POOL_EXHAUSTED' in stderr:
logging.exception('Container resources exhausted: %s', stderr)
raise errors.Benchmarks.InsufficientCapacityCloudFailure(
'Container resources exhausted in zone %s: %s' %
(self.zone, stderr))
util.CheckGcloudResponseKnownFailures(stderr, retcode)
raise errors.Resource.CreationError(stderr)
def _PostCreate(self):
"""Acquire cluster authentication."""
super(GkeCluster, self)._PostCreate()
cmd = util.GcloudCommand(
self, 'container', 'clusters', 'get-credentials', self.name)
env = os.environ.copy()
env['KUBECONFIG'] = FLAGS.kubeconfig
cmd.IssueRetryable(env=env)
self._AddTags()
if self.vm_config.gpu_count:
kubernetes_helper.CreateFromFile(NVIDIA_DRIVER_SETUP_DAEMON_SET_SCRIPT)
kubernetes_helper.CreateFromFile(
data.ResourcePath(NVIDIA_UNRESTRICTED_PERMISSIONS_DAEMON_SET))
def _AddTags(self):
"""Tags all VMs in the cluster."""
vms_in_cluster = []
for instance_group in self._GetInstanceGroups():
vms_in_cluster.extend(self._GetInstancesFromInstanceGroup(instance_group))
for vm_name in vms_in_cluster:
cmd = util.GcloudCommand(self, 'compute', 'instances', 'add-metadata',
vm_name)
cmd.flags['metadata'] = util.MakeFormattedDefaultTags()
cmd.Issue()
cmd = util.GcloudCommand(self, 'compute', 'disks', 'add-labels', vm_name)
cmd.flags['labels'] = util.MakeFormattedDefaultTags()
cmd.Issue()
def _GetInstanceGroups(self):
cmd = util.GcloudCommand(self, 'container', 'node-pools', 'list')
cmd.flags['cluster'] = self.name
stdout, _, _ = cmd.Issue()
json_output = json.loads(stdout)
instance_groups = []
for node_pool in json_output:
for group_url in node_pool['instanceGroupUrls']:
instance_groups.append(group_url.split('/')[-1]) # last url part
return instance_groups
def _GetInstancesFromInstanceGroup(self, instance_group_name):
cmd = util.GcloudCommand(self, 'compute', 'instance-groups',
'list-instances', instance_group_name)
stdout, _, _ = cmd.Issue()
json_output = json.loads(stdout)
instances = []
for instance in json_output:
instances.append(instance['instance'].split('/')[-1])
return instances
def _IsDeleting(self):
cmd = util.GcloudCommand(
self, 'container', 'clusters', 'describe', self.name)
stdout, _, _ = cmd.Issue(raise_on_failure=False)
return True if stdout else False
def _Delete(self):
"""Deletes the cluster."""
cmd = util.GcloudCommand(
self, 'container', 'clusters', 'delete', self.name)
cmd.args.append('--async')
cmd.Issue(raise_on_failure=False)
def _Exists(self):
"""Returns True if the cluster exits."""
cmd = util.GcloudCommand(
self, 'container', 'clusters', 'describe', self.name)
_, _, retcode = cmd.Issue(suppress_warning=True, raise_on_failure=False)
return retcode == 0
| 39.713178
| 185
| 0.71179
|
4a1750d1485d085e088f75a654ad1eaaa132bb7b
| 1,219
|
py
|
Python
|
python/leetcode/526.py
|
ParkinWu/leetcode
|
b31312bdefbb2be795f3459e1a76fbc927cab052
|
[
"MIT"
] | null | null | null |
python/leetcode/526.py
|
ParkinWu/leetcode
|
b31312bdefbb2be795f3459e1a76fbc927cab052
|
[
"MIT"
] | null | null | null |
python/leetcode/526.py
|
ParkinWu/leetcode
|
b31312bdefbb2be795f3459e1a76fbc927cab052
|
[
"MIT"
] | null | null | null |
# 假设有从 1 到 N 的 N 个整数,如果从这 N 个数字中成功构造出一个数组,使得数组的第 i 位 (1 <= i <= N) 满足如下两个条件中的一个,我们就称这个数组为一个优美的排列。条件:
#
# 第 i 位的数字能被 i 整除
# i 能被第 i 位上的数字整除
# 现在给定一个整数 N,请问可以构造多少个优美的排列?
#
# 示例1:
#
# 输入: 2
# 输出: 2
# 解释:
#
# 第 1 个优美的排列是 [1, 2]:
# 第 1 个位置(i=1)上的数字是1,1能被 i(i=1)整除
# 第 2 个位置(i=2)上的数字是2,2能被 i(i=2)整除
#
# 第 2 个优美的排列是 [2, 1]:
# 第 1 个位置(i=1)上的数字是2,2能被 i(i=1)整除
# 第 2 个位置(i=2)上的数字是1,i(i=2)能被 1 整除
# 说明:
#
# N 是一个正整数,并且不会超过15。
#
# 来源:力扣(LeetCode)
# 链接:https://leetcode-cn.com/problems/beautiful-arrangement
# 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
from typing import List
class Solution:
def __init__(self):
self.ans = 0
def __dfs(self, start: int, N: int, visit: List[int]):
if start == N + 1:
self.ans += 1
return
for i in range(1, N + 1):
if visit[i]:
continue
if i % start == 0 or start % i == 0:
visit[i] = 1
self.__dfs(start + 1, N, visit)
visit[i] = 0
def countArrangement(self, N: int) -> int:
visit = [0] * (N + 1)
self.__dfs(1, N, visit)
return self.ans
if __name__ == '__main__':
s = Solution()
print(s.countArrangement(15))
| 20.316667
| 100
| 0.539787
|
4a1750f17f03a34785bc56c7c2adbd375e29d24b
| 17,109
|
py
|
Python
|
selfdrive/car/honda/carstate.py
|
Arkantium/ArnePilot
|
2fe3eba61e763df8a5997ac40aec1fc77b501352
|
[
"MIT"
] | null | null | null |
selfdrive/car/honda/carstate.py
|
Arkantium/ArnePilot
|
2fe3eba61e763df8a5997ac40aec1fc77b501352
|
[
"MIT"
] | null | null | null |
selfdrive/car/honda/carstate.py
|
Arkantium/ArnePilot
|
2fe3eba61e763df8a5997ac40aec1fc77b501352
|
[
"MIT"
] | null | null | null |
from cereal import car
from collections import defaultdict
from common.numpy_fast import interp
from opendbc.can.can_define import CANDefine
from opendbc.can.parser import CANParser
from selfdrive.config import Conversions as CV
from selfdrive.car.interfaces import CarStateBase
from selfdrive.car.honda.values import CAR, DBC, STEER_THRESHOLD, SPEED_FACTOR, HONDA_BOSCH
def calc_cruise_offset(offset, speed):
# euristic formula so that speed is controlled to ~ 0.3m/s below pid_speed
# constraints to solve for _K0, _K1, _K2 are:
# - speed = 0m/s, out = -0.3
# - speed = 34m/s, offset = 20, out = -0.25
# - speed = 34m/s, offset = -2.5, out = -1.8
_K0 = -0.3
_K1 = -0.01879
_K2 = 0.01013
return min(_K0 + _K1 * speed + _K2 * speed * offset, 0.)
def get_can_signals(CP):
# this function generates lists for signal, messages and initial values
signals = [
("XMISSION_SPEED", "ENGINE_DATA", 0),
("WHEEL_SPEED_FL", "WHEEL_SPEEDS", 0),
("WHEEL_SPEED_FR", "WHEEL_SPEEDS", 0),
("WHEEL_SPEED_RL", "WHEEL_SPEEDS", 0),
("WHEEL_SPEED_RR", "WHEEL_SPEEDS", 0),
("STEER_ANGLE", "STEERING_SENSORS", 0),
("STEER_ANGLE_RATE", "STEERING_SENSORS", 0),
("MOTOR_TORQUE", "STEER_MOTOR_TORQUE", 0),
("STEER_TORQUE_SENSOR", "STEER_STATUS", 0),
("LEFT_BLINKER", "SCM_FEEDBACK", 0),
("RIGHT_BLINKER", "SCM_FEEDBACK", 0),
("GEAR", "GEARBOX", 0),
("SEATBELT_DRIVER_LAMP", "SEATBELT_STATUS", 1),
("SEATBELT_DRIVER_LATCHED", "SEATBELT_STATUS", 0),
("BRAKE_PRESSED", "POWERTRAIN_DATA", 0),
("BRAKE_SWITCH", "POWERTRAIN_DATA", 0),
("CRUISE_BUTTONS", "SCM_BUTTONS", 0),
("ESP_DISABLED", "VSA_STATUS", 1),
("USER_BRAKE", "VSA_STATUS", 0),
("BRAKE_HOLD_ACTIVE", "VSA_STATUS", 0),
("STEER_STATUS", "STEER_STATUS", 5),
("GEAR_SHIFTER", "GEARBOX", 0),
("PEDAL_GAS", "POWERTRAIN_DATA", 0),
("CRUISE_SETTING", "SCM_BUTTONS", 0),
("ACC_STATUS", "POWERTRAIN_DATA", 0),
]
checks = [
("ENGINE_DATA", 100),
("WHEEL_SPEEDS", 50),
("STEERING_SENSORS", 100),
("SEATBELT_STATUS", 10),
("CRUISE", 10),
("POWERTRAIN_DATA", 100),
("VSA_STATUS", 50),
]
if CP.carFingerprint == CAR.ODYSSEY_CHN:
checks += [
("SCM_FEEDBACK", 25),
("SCM_BUTTONS", 50),
]
else:
checks += [
("SCM_FEEDBACK", 10),
("SCM_BUTTONS", 25),
]
if CP.carFingerprint in (CAR.CRV_HYBRID, CAR.CIVIC_BOSCH_DIESEL):
checks += [
("GEARBOX", 50),
]
else:
checks += [
("GEARBOX", 100),
]
if CP.carFingerprint in HONDA_BOSCH:
# Civic is only bosch to use the same brake message as other hondas.
if CP.carFingerprint not in (CAR.ACCORDH, CAR.CIVIC_BOSCH, CAR.CIVIC_BOSCH_DIESEL, CAR.CRV_HYBRID, CAR.INSIGHT):
signals += [("BRAKE_PRESSED", "BRAKE_MODULE", 0)]
checks += [("BRAKE_MODULE", 50)]
signals += [("CAR_GAS", "GAS_PEDAL_2", 0),
("MAIN_ON", "SCM_FEEDBACK", 0),
("CRUISE_CONTROL_LABEL", "ACC_HUD", 0),
("EPB_STATE", "EPB_STATUS", 0),
("CRUISE_SPEED", "ACC_HUD", 0)]
checks += [("GAS_PEDAL_2", 100)]
# TODO: Find brake error bits for CRV_HYBRID
if CP.openpilotLongitudinalControl and CP.carFingerprint not in CAR.CRV_HYBRID:
signals += [("BRAKE_ERROR_1", "STANDSTILL", 1),
("BRAKE_ERROR_2", "STANDSTILL", 1)]
checks += [("STANDSTILL", 50)]
else:
# Nidec signals.
signals += [("BRAKE_ERROR_1", "STANDSTILL", 1),
("BRAKE_ERROR_2", "STANDSTILL", 1),
("CRUISE_SPEED_PCM", "CRUISE", 0),
("CRUISE_SPEED_OFFSET", "CRUISE_PARAMS", 0)]
checks += [("STANDSTILL", 50)]
if CP.carFingerprint == CAR.ODYSSEY_CHN:
checks += [("CRUISE_PARAMS", 10)]
else:
checks += [("CRUISE_PARAMS", 50)]
if CP.carFingerprint in (CAR.ACCORD, CAR.ACCORD_15, CAR.ACCORDH, CAR.CIVIC_BOSCH, CAR.CIVIC_BOSCH_DIESEL, CAR.CRV_HYBRID, CAR.INSIGHT):
signals += [("DRIVERS_DOOR_OPEN", "SCM_FEEDBACK", 1)]
elif CP.carFingerprint == CAR.ODYSSEY_CHN:
signals += [("DRIVERS_DOOR_OPEN", "SCM_BUTTONS", 1)]
elif CP.carFingerprint == CAR.HRV:
signals += [("DRIVERS_DOOR_OPEN", "SCM_BUTTONS", 1),
("WHEELS_MOVING", "STANDSTILL", 1)]
else:
signals += [("DOOR_OPEN_FL", "DOORS_STATUS", 1),
("DOOR_OPEN_FR", "DOORS_STATUS", 1),
("DOOR_OPEN_RL", "DOORS_STATUS", 1),
("DOOR_OPEN_RR", "DOORS_STATUS", 1),
("WHEELS_MOVING", "STANDSTILL", 1)]
checks += [("DOORS_STATUS", 3)]
if CP.carFingerprint == CAR.CIVIC:
signals += [("CAR_GAS", "GAS_PEDAL_2", 0),
("MAIN_ON", "SCM_FEEDBACK", 0),
("IMPERIAL_UNIT", "HUD_SETTING", 0),
("EPB_STATE", "EPB_STATUS", 0)]
elif CP.carFingerprint == CAR.ACURA_ILX:
signals += [("CAR_GAS", "GAS_PEDAL_2", 0),
("MAIN_ON", "SCM_BUTTONS", 0)]
elif CP.carFingerprint in (CAR.CRV, CAR.CRV_EU, CAR.ACURA_RDX, CAR.PILOT_2019, CAR.RIDGELINE):
signals += [("MAIN_ON", "SCM_BUTTONS", 0)]
elif CP.carFingerprint in (CAR.FIT, CAR.HRV):
signals += [("CAR_GAS", "GAS_PEDAL_2", 0),
("MAIN_ON", "SCM_BUTTONS", 0),
("BRAKE_HOLD_ACTIVE", "VSA_STATUS", 0)]
elif CP.carFingerprint == CAR.HRV:
signals += [("CAR_GAS", "GAS_PEDAL", 0),
("MAIN_ON", "SCM_BUTTONS", 0),
("BRAKE_HOLD_ACTIVE", "VSA_STATUS", 0)]
elif CP.carFingerprint == CAR.ODYSSEY:
signals += [("MAIN_ON", "SCM_FEEDBACK", 0),
("EPB_STATE", "EPB_STATUS", 0)]
checks += [("EPB_STATUS", 50)]
elif CP.carFingerprint == CAR.PILOT:
signals += [("MAIN_ON", "SCM_BUTTONS", 0),
("CAR_GAS", "GAS_PEDAL_2", 0)]
elif CP.carFingerprint == CAR.ODYSSEY_CHN:
signals += [("MAIN_ON", "SCM_BUTTONS", 0),
("EPB_STATE", "EPB_STATUS", 0)]
checks += [("EPB_STATUS", 50)]
# add gas interceptor reading if we are using it
if CP.enableGasInterceptor:
signals.append(("INTERCEPTOR_GAS", "GAS_SENSOR", 0))
signals.append(("INTERCEPTOR_GAS2", "GAS_SENSOR", 0))
checks.append(("GAS_SENSOR", 50))
return signals, checks
class CarState(CarStateBase):
def __init__(self, CP):
super().__init__(CP)
can_define = CANDefine(DBC[CP.carFingerprint]['pt'])
self.shifter_values = can_define.dv["GEARBOX"]["GEAR_SHIFTER"]
self.steer_status_values = defaultdict(lambda: "UNKNOWN", can_define.dv["STEER_STATUS"]["STEER_STATUS"])
self.user_gas, self.user_gas_pressed = 0., 0
self.brake_switch_prev = 0
self.brake_switch_ts = 0
self.cruise_setting = 0
self.v_cruise_pcm_prev = 0
self.cruise_mode = 0
def update(self, cp, cp_cam):
ret = car.CarState.new_message()
# car params
v_weight_v = [0., 1.] # don't trust smooth speed at low values to avoid premature zero snapping
v_weight_bp = [1., 6.] # smooth blending, below ~0.6m/s the smooth speed snaps to zero
# update prevs, update must run once per loop
self.prev_cruise_buttons = self.cruise_buttons
self.prev_cruise_setting = self.cruise_setting
# ******************* parse out can *******************
# TODO: find wheels moving bit in dbc
if self.CP.carFingerprint in (CAR.ACCORD, CAR.ACCORD_15, CAR.ACCORDH, CAR.CIVIC_BOSCH, CAR.CIVIC_BOSCH_DIESEL, CAR.CRV_HYBRID, CAR.INSIGHT):
ret.standstill = cp.vl["ENGINE_DATA"]['XMISSION_SPEED'] < 0.1
ret.doorOpen = bool(cp.vl["SCM_FEEDBACK"]['DRIVERS_DOOR_OPEN'])
elif self.CP.carFingerprint == CAR.ODYSSEY_CHN:
ret.standstill = cp.vl["ENGINE_DATA"]['XMISSION_SPEED'] < 0.1
ret.doorOpen = bool(cp.vl["SCM_BUTTONS"]['DRIVERS_DOOR_OPEN'])
elif self.CP.carFingerprint == CAR.HRV:
ret.doorOpen = bool(cp.vl["SCM_BUTTONS"]['DRIVERS_DOOR_OPEN'])
else:
ret.standstill = not cp.vl["STANDSTILL"]['WHEELS_MOVING']
ret.doorOpen = any([cp.vl["DOORS_STATUS"]['DOOR_OPEN_FL'], cp.vl["DOORS_STATUS"]['DOOR_OPEN_FR'],
cp.vl["DOORS_STATUS"]['DOOR_OPEN_RL'], cp.vl["DOORS_STATUS"]['DOOR_OPEN_RR']])
ret.seatbeltUnlatched = bool(cp.vl["SEATBELT_STATUS"]['SEATBELT_DRIVER_LAMP'] or not cp.vl["SEATBELT_STATUS"]['SEATBELT_DRIVER_LATCHED'])
steer_status = self.steer_status_values[cp.vl["STEER_STATUS"]['STEER_STATUS']]
ret.steerError = steer_status not in ['NORMAL', 'NO_TORQUE_ALERT_1', 'NO_TORQUE_ALERT_2', 'LOW_SPEED_LOCKOUT', 'TMP_FAULT']
# NO_TORQUE_ALERT_2 can be caused by bump OR steering nudge from driver
self.steer_not_allowed = steer_status not in ['NORMAL', 'NO_TORQUE_ALERT_2']
# LOW_SPEED_LOCKOUT is not worth a warning
ret.steerWarning = steer_status not in ['NORMAL', 'LOW_SPEED_LOCKOUT', 'NO_TORQUE_ALERT_2']
if not self.CP.openpilotLongitudinalControl or self.CP.carFingerprint in CAR.CRV_HYBRID:
self.brake_error = 0
else:
self.brake_error = cp.vl["STANDSTILL"]['BRAKE_ERROR_1'] or cp.vl["STANDSTILL"]['BRAKE_ERROR_2']
ret.espDisabled = cp.vl["VSA_STATUS"]['ESP_DISABLED'] != 0
speed_factor = SPEED_FACTOR[self.CP.carFingerprint]
ret.wheelSpeeds.fl = cp.vl["WHEEL_SPEEDS"]['WHEEL_SPEED_FL'] * CV.KPH_TO_MS * speed_factor
ret.wheelSpeeds.fr = cp.vl["WHEEL_SPEEDS"]['WHEEL_SPEED_FR'] * CV.KPH_TO_MS * speed_factor
ret.wheelSpeeds.rl = cp.vl["WHEEL_SPEEDS"]['WHEEL_SPEED_RL'] * CV.KPH_TO_MS * speed_factor
ret.wheelSpeeds.rr = cp.vl["WHEEL_SPEEDS"]['WHEEL_SPEED_RR'] * CV.KPH_TO_MS * speed_factor
v_wheel = (ret.wheelSpeeds.fl + ret.wheelSpeeds.fr + ret.wheelSpeeds.rl + ret.wheelSpeeds.rr)/4.
# blend in transmission speed at low speed, since it has more low speed accuracy
v_weight = interp(v_wheel, v_weight_bp, v_weight_v)
ret.vEgoRaw = (1. - v_weight) * cp.vl["ENGINE_DATA"]['XMISSION_SPEED'] * CV.KPH_TO_MS * speed_factor + v_weight * v_wheel
ret.vEgo, ret.aEgo = self.update_speed_kf(ret.vEgoRaw)
ret.steeringAngle = cp.vl["STEERING_SENSORS"]['STEER_ANGLE']
ret.steeringRate = cp.vl["STEERING_SENSORS"]['STEER_ANGLE_RATE']
self.cruise_setting = cp.vl["SCM_BUTTONS"]['CRUISE_SETTING']
self.cruise_buttons = cp.vl["SCM_BUTTONS"]['CRUISE_BUTTONS']
ret.leftBlinker = cp.vl["SCM_FEEDBACK"]['LEFT_BLINKER'] != 0
ret.rightBlinker = cp.vl["SCM_FEEDBACK"]['RIGHT_BLINKER'] != 0
self.brake_hold = cp.vl["VSA_STATUS"]['BRAKE_HOLD_ACTIVE']
if self.CP.carFingerprint in (CAR.CIVIC, CAR.ODYSSEY, CAR.CRV_5G, CAR.ACCORD, CAR.ACCORD_15, CAR.ACCORDH, CAR.CIVIC_BOSCH,
CAR.CIVIC_BOSCH_DIESEL, CAR.CRV_HYBRID, CAR.INSIGHT):
self.park_brake = cp.vl["EPB_STATUS"]['EPB_STATE'] != 0
main_on = cp.vl["SCM_FEEDBACK"]['MAIN_ON']
elif self.CP.carFingerprint == CAR.ODYSSEY_CHN:
self.park_brake = cp.vl["EPB_STATUS"]['EPB_STATE'] != 0
main_on = cp.vl["SCM_BUTTONS"]['MAIN_ON']
else:
self.park_brake = 0 # TODO
main_on = cp.vl["SCM_BUTTONS"]['MAIN_ON']
gear = int(cp.vl["GEARBOX"]['GEAR_SHIFTER'])
ret.gearShifter = self.parse_gear_shifter(self.shifter_values.get(gear, None))
self.pedal_gas = cp.vl["POWERTRAIN_DATA"]['PEDAL_GAS']
# crv doesn't include cruise control
if self.CP.carFingerprint in (CAR.CRV, CAR.CRV_EU, CAR.HRV, CAR.ODYSSEY, CAR.ACURA_RDX, CAR.RIDGELINE, CAR.PILOT_2019, CAR.ODYSSEY_CHN):
ret.gas = self.pedal_gas / 256.
else:
ret.gas = cp.vl["GAS_PEDAL_2"]['CAR_GAS'] / 256.
# this is a hack for the interceptor. This is now only used in the simulation
# TODO: Replace tests by toyota so this can go away
if self.CP.enableGasInterceptor:
self.user_gas = (cp.vl["GAS_SENSOR"]['INTERCEPTOR_GAS'] + cp.vl["GAS_SENSOR"]['INTERCEPTOR_GAS2']) / 2.
self.user_gas_pressed = self.user_gas > 1e-5 # this works because interceptor read < 0 when pedal position is 0. Once calibrated, this will change
ret.gasPressed = self.user_gas_pressed
else:
ret.gasPressed = self.pedal_gas > 1e-5
ret.steeringTorque = cp.vl["STEER_STATUS"]['STEER_TORQUE_SENSOR']
ret.steeringTorqueEps = cp.vl["STEER_MOTOR_TORQUE"]['MOTOR_TORQUE']
ret.steeringPressed = abs(ret.steeringTorque) > STEER_THRESHOLD[self.CP.carFingerprint]
self.brake_switch = cp.vl["POWERTRAIN_DATA"]['BRAKE_SWITCH'] != 0
if self.CP.carFingerprint in HONDA_BOSCH:
self.cruise_mode = cp.vl["ACC_HUD"]['CRUISE_CONTROL_LABEL']
ret.cruiseState.standstill = cp.vl["ACC_HUD"]['CRUISE_SPEED'] == 252.
ret.cruiseState.speedOffset = calc_cruise_offset(0, ret.vEgo)
if self.CP.carFingerprint in (CAR.CIVIC_BOSCH, CAR.CIVIC_BOSCH_DIESEL, CAR.ACCORDH, CAR.CRV_HYBRID, CAR.INSIGHT):
ret.brakePressed = cp.vl["POWERTRAIN_DATA"]['BRAKE_PRESSED'] != 0 or \
(self.brake_switch and self.brake_switch_prev and
cp.ts["POWERTRAIN_DATA"]['BRAKE_SWITCH'] != self.brake_switch_ts)
self.brake_switch_prev = self.brake_switch
self.brake_switch_ts = cp.ts["POWERTRAIN_DATA"]['BRAKE_SWITCH']
else:
# TODO: should anything use CS.brake_switch outside this file?
self.brake_switch = cp.vl["BRAKE_MODULE"]['BRAKE_PRESSED'] != 0
ret.brakePressed = cp.vl["BRAKE_MODULE"]['BRAKE_PRESSED'] != 0
# On set, cruise set speed pulses between 254~255 and the set speed prev is set to avoid this.
ret.cruiseState.speed = self.v_cruise_pcm_prev if cp.vl["ACC_HUD"]['CRUISE_SPEED'] > 160.0 else cp.vl["ACC_HUD"]['CRUISE_SPEED'] * CV.KPH_TO_MS
self.v_cruise_pcm_prev = ret.cruiseState.speed
else:
ret.cruiseState.speedOffset = calc_cruise_offset(cp.vl["CRUISE_PARAMS"]['CRUISE_SPEED_OFFSET'], ret.vEgo)
ret.cruiseState.speed = cp.vl["CRUISE"]['CRUISE_SPEED_PCM'] * CV.KPH_TO_MS
# brake switch has shown some single time step noise, so only considered when
# switch is on for at least 2 consecutive CAN samples
ret.brakePressed = bool(cp.vl["POWERTRAIN_DATA"]['BRAKE_PRESSED'] or
(self.brake_switch and self.brake_switch_prev and
cp.ts["POWERTRAIN_DATA"]['BRAKE_SWITCH'] != self.brake_switch_ts))
self.brake_switch_prev = self.brake_switch
self.brake_switch_ts = cp.ts["POWERTRAIN_DATA"]['BRAKE_SWITCH']
ret.brake = cp.vl["VSA_STATUS"]['USER_BRAKE']
ret.cruiseState.enabled = cp.vl["POWERTRAIN_DATA"]['ACC_STATUS'] != 0
ret.cruiseState.available = bool(main_on)
ret.cruiseState.nonAdaptive = self.cruise_mode != 0
# Gets rid of Pedal Grinding noise when brake is pressed at slow speeds for some models
if self.CP.carFingerprint in (CAR.PILOT, CAR.PILOT_2019, CAR.RIDGELINE):
if ret.brake > 0.05:
ret.brakePressed = True
# TODO: discover the CAN msg that has the imperial unit bit for all other cars
self.is_metric = not cp.vl["HUD_SETTING"]['IMPERIAL_UNIT'] if self.CP.carFingerprint in (CAR.CIVIC) else False
if self.CP.carFingerprint in HONDA_BOSCH:
ret.stockAeb = bool(cp_cam.vl["ACC_CONTROL"]["AEB_STATUS"] and cp_cam.vl["ACC_CONTROL"]["ACCEL_COMMAND"] < -1e-5)
else:
ret.stockAeb = bool(cp_cam.vl["BRAKE_COMMAND"]["AEB_REQ_1"] and cp_cam.vl["BRAKE_COMMAND"]["COMPUTER_BRAKE"] > 1e-5)
if self.CP.carFingerprint in HONDA_BOSCH:
self.stock_hud = False
ret.stockFcw = False
else:
ret.stockFcw = cp_cam.vl["BRAKE_COMMAND"]["FCW"] != 0
self.stock_hud = cp_cam.vl["ACC_HUD"]
self.stock_brake = cp_cam.vl["BRAKE_COMMAND"]
return ret
@staticmethod
def get_can_parser(CP):
signals, checks = get_can_signals(CP)
bus_pt = 1 if CP.isPandaBlack and CP.carFingerprint in HONDA_BOSCH else 0
return CANParser(DBC[CP.carFingerprint]['pt'], signals, checks, bus_pt)
@staticmethod
def get_can_parser_init(CP):
signals, checks = get_can_signals(CP)
bus_pt = 1 if CP.isPandaBlack and CP.carFingerprint in HONDA_BOSCH else 0
return CANParser(DBC[CP.carFingerprint]['pt'], signals, checks, bus_pt)
@staticmethod
def get_cam_can_parser(CP):
signals = []
if CP.carFingerprint in HONDA_BOSCH:
signals += [("ACCEL_COMMAND", "ACC_CONTROL", 0),
("AEB_STATUS", "ACC_CONTROL", 0)]
else:
signals += [("COMPUTER_BRAKE", "BRAKE_COMMAND", 0),
("AEB_REQ_1", "BRAKE_COMMAND", 0),
("FCW", "BRAKE_COMMAND", 0),
("CHIME", "BRAKE_COMMAND", 0),
("FCM_OFF", "ACC_HUD", 0),
("FCM_OFF_2", "ACC_HUD", 0),
("FCM_PROBLEM", "ACC_HUD", 0),
("ICONS", "ACC_HUD", 0)]
# all hondas except CRV, RDX and 2019 Odyssey@China use 0xe4 for steering
checks = [(0xe4, 100)]
if CP.carFingerprint in [CAR.CRV, CAR.CRV_EU, CAR.ACURA_RDX, CAR.ODYSSEY_CHN]:
checks = [(0x194, 100)]
bus_cam = 1 if CP.carFingerprint in HONDA_BOSCH and not CP.isPandaBlack else 2
return CANParser(DBC[CP.carFingerprint]['pt'], signals, checks, bus_cam)
| 46.240541
| 153
| 0.651178
|
4a1751b331d21c6593c27dcff9d7bb2357bc7f34
| 19,379
|
py
|
Python
|
python/paddle/hapi/callbacks.py
|
Ray2020BD/Paddle
|
994087188816575d456c2f9c2a6c90aad83b4e71
|
[
"Apache-2.0"
] | 1
|
2020-10-29T13:54:19.000Z
|
2020-10-29T13:54:19.000Z
|
python/paddle/hapi/callbacks.py
|
Ray2020BD/Paddle
|
994087188816575d456c2f9c2a6c90aad83b4e71
|
[
"Apache-2.0"
] | null | null | null |
python/paddle/hapi/callbacks.py
|
Ray2020BD/Paddle
|
994087188816575d456c2f9c2a6c90aad83b4e71
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import numbers
from paddle.fluid.dygraph.parallel import ParallelEnv
from paddle.utils import try_import
from .progressbar import ProgressBar
__all__ = ['Callback', 'ProgBarLogger', 'ModelCheckpoint', 'VisualDL']
def config_callbacks(callbacks=None,
model=None,
batch_size=None,
epochs=None,
steps=None,
log_freq=2,
verbose=2,
save_freq=1,
save_dir=None,
metrics=None,
mode='train'):
cbks = callbacks or []
cbks = cbks if isinstance(cbks, (list, tuple)) else [cbks]
if not any(isinstance(k, ProgBarLogger) for k in cbks) and verbose:
cbks = [ProgBarLogger(log_freq, verbose=verbose)] + cbks
if not any(isinstance(k, ModelCheckpoint) for k in cbks):
cbks = cbks + [ModelCheckpoint(save_freq, save_dir)]
cbk_list = CallbackList(cbks)
cbk_list.set_model(model)
metrics = metrics or [] if mode != 'test' else []
params = {
'batch_size': batch_size,
'epochs': epochs,
'steps': steps,
'verbose': verbose,
'metrics': metrics,
}
cbk_list.set_params(params)
return cbk_list
class CallbackList(object):
def __init__(self, callbacks=None):
# copy
self.callbacks = [c for c in callbacks]
self.params = {}
self.model = None
def append(self, callback):
self.callbacks.append(callback)
def __iter__(self):
return iter(self.callbacks)
def set_params(self, params):
for c in self.callbacks:
c.set_params(params)
def set_model(self, model):
for c in self.callbacks:
c.set_model(model)
def _call(self, name, *args):
for c in self.callbacks:
func = getattr(c, name)
func(*args)
def _check_mode(self, mode):
assert mode in ['train', 'eval', 'test'], \
'mode should be train, eval or test'
def on_begin(self, mode, logs=None):
self._check_mode(mode)
name = 'on_{}_begin'.format(mode)
self._call(name, logs)
def on_end(self, mode, logs=None):
self._check_mode(mode)
name = 'on_{}_end'.format(mode)
self._call(name, logs)
def on_epoch_begin(self, epoch=None, logs=None):
self._call('on_epoch_begin', epoch, logs)
def on_epoch_end(self, epoch=None, logs=None):
self._call('on_epoch_end', epoch, logs)
def on_batch_begin(self, mode, step=None, logs=None):
self._check_mode(mode)
name = 'on_{}_batch_begin'.format(mode)
self._call(name, step, logs)
def on_batch_end(self, mode, step=None, logs=None):
self._check_mode(mode)
name = 'on_{}_batch_end'.format(mode)
self._call(name, step, logs)
class Callback(object):
"""
Base class used to build new callbacks.
Examples:
.. code-block:: python
import paddle
# build a simple model checkpoint callback
class ModelCheckpoint(paddle.callbacks.Callback):
def __init__(self, save_freq=1, save_dir=None):
self.save_freq = save_freq
self.save_dir = save_dir
def on_epoch_end(self, epoch, logs=None):
if self.model is not None and epoch % self.save_freq == 0:
path = '{}/{}'.format(self.save_dir, epoch)
print('save checkpoint at {}'.format(path))
self.model.save(path)
"""
def __init__(self):
self.model = None
self.params = {}
def set_params(self, params):
"""
Set parameters, which is dict. The keys contain:
- 'batch_size': an integer. Number of samples per batch.
- 'epochs': an integer. Number of epochs.
- 'steps': an integer. Number of steps of one epoch.
- 'verbose': an integer. Verbose mode is 0, 1 or 2.
0 = silent, 1 = progress bar, 2 = one line per epoch.
- 'metrics': a list of str. Names of metrics, including 'loss'
and the names of paddle.metric.Metric.
"""
self.params = params
def set_model(self, model):
"""model is instance of paddle.Model.
"""
self.model = model
def on_train_begin(self, logs=None):
"""Called at the start of training.
Args:
logs (dict): The logs is a dict or None.
"""
def on_train_end(self, logs=None):
"""Called at the end of training.
Args:
logs (dict): The logs is a dict or None. The keys of logs
passed by paddle.Model contains 'loss', metric names and
`batch_size`.
"""
def on_eval_begin(self, logs=None):
"""Called at the start of evaluation.
Args:
logs (dict): The logs is a dict or None. The keys of logs
passed by paddle.Model contains 'steps' and 'metrics',
The `steps` is number of total steps of validation dataset.
The `metrics` is a list of str including 'loss' and the names
of paddle.metric.Metric.
"""
def on_eval_end(self, logs=None):
"""Called at the end of evaluation.
Args:
logs (dict): The logs is a dict or None. The `logs` passed by
paddle.Model is a dict contains 'loss', metrics and 'batch_size'
of last batch of validation dataset.
"""
def on_test_begin(self, logs=None):
"""Called at the beginning of predict.
Args:
logs (dict): The logs is a dict or None.
"""
def on_test_end(self, logs=None):
"""Called at the end of predict.
Args:
logs (dict): The logs is a dict or None.
"""
def on_epoch_begin(self, epoch, logs=None):
"""Called at the beginning of each epoch.
Args:
epoch (int): The index of epoch.
logs (dict): The logs is a dict or None. The `logs` passed by
paddle.Model is None.
"""
def on_epoch_end(self, epoch, logs=None):
"""Called at the end of each epoch.
Args:
epoch (int): The index of epoch.
logs (dict): The logs is a dict or None. The `logs` passed by
paddle.Model is a dict, contains 'loss', metrics and 'batch_size'
of last batch.
"""
def on_train_batch_begin(self, step, logs=None):
"""Called at the beginning of each batch in training.
Args:
step (int): The index of step (or iteration).
logs (dict): The logs is a dict or None. The `logs` passed by
paddle.Model is empty.
"""
def on_train_batch_end(self, step, logs=None):
"""Called at the end of each batch in training.
Args:
step (int): The index of step (or iteration).
logs (dict): The logs is a dict or None. The `logs` passed by
paddle.Model is a dict, contains 'loss', metrics and 'batch_size'
of current batch.
"""
def on_eval_batch_begin(self, step, logs=None):
"""Called at the beginning of each batch in evaluation.
Args:
step (int): The index of step (or iteration).
logs (dict): The logs is a dict or None. The `logs` passed by
paddle.Model is empty.
"""
def on_eval_batch_end(self, step, logs=None):
"""Called at the end of each batch in evaluation.
Args:
step (int): The index of step (or iteration).
logs (dict): The logs is a dict or None. The `logs` passed by
paddle.Model is a dict, contains 'loss', metrics and 'batch_size'
of current batch.
"""
def on_test_batch_begin(self, step, logs=None):
"""Called at the beginning of each batch in predict.
Args:
step (int): The index of step (or iteration).
logs (dict): The logs is a dict or None.
"""
def on_test_batch_end(self, step, logs=None):
"""Called at the end of each batch in predict.
Args:
step (int): The index of step (or iteration).
logs (dict): The logs is a dict or None.
"""
class ProgBarLogger(Callback):
"""Logger callback function
Args:
log_freq (int): The frequency, in number of steps, the logs such as `loss`,
`metrics` are printed. Default: 1.
verbose (int): The verbosity mode, should be 0, 1, or 2.
0 = silent, 1 = progress bar, 2 = one line per epoch. Default: 2.
Examples:
.. code-block:: python
import paddle
from paddle.static import InputSpec
inputs = [InputSpec([-1, 1, 28, 28], 'float32', 'image')]
labels = [InputSpec([None, 1], 'int64', 'label')]
train_dataset = paddle.vision.datasets.MNIST(mode='train')
lenet = paddle.vision.LeNet()
model = paddle.Model(lenet,
inputs, labels)
optim = paddle.optimizer.Adam(0.001, parameters=lenet.parameters())
model.prepare(optimizer=optim,
loss=paddle.nn.CrossEntropyLoss(),
metrics=paddle.metric.Accuracy())
callback = paddle.callbacks.ProgBarLogger(log_freq=10)
model.fit(train_dataset, batch_size=64, callbacks=callback)
"""
def __init__(self, log_freq=1, verbose=2):
self.epochs = None
self.steps = None
self.progbar = None
self.verbose = verbose
self.log_freq = log_freq
def _is_print(self):
return self.verbose and ParallelEnv().local_rank == 0
def on_train_begin(self, logs=None):
self.epochs = self.params['epochs']
assert self.epochs
self.train_metrics = self.params['metrics']
assert self.train_metrics
def on_epoch_begin(self, epoch=None, logs=None):
self.steps = self.params['steps']
self.epoch = epoch
self.train_step = 0
if self.epochs and self._is_print():
print('Epoch %d/%d' % (epoch + 1, self.epochs))
self.train_progbar = ProgressBar(num=self.steps, verbose=self.verbose)
def _updates(self, logs, mode):
values = []
metrics = getattr(self, '%s_metrics' % (mode))
progbar = getattr(self, '%s_progbar' % (mode))
steps = getattr(self, '%s_step' % (mode))
for k in metrics:
if k in logs:
values.append((k, logs[k]))
progbar.update(steps, values)
def on_train_batch_end(self, step, logs=None):
logs = logs or {}
self.train_step += 1
if self._is_print() and self.train_step % self.log_freq == 0:
if self.steps is None or self.train_step < self.steps:
self._updates(logs, 'train')
def on_epoch_end(self, epoch, logs=None):
logs = logs or {}
if self._is_print() and (self.steps is not None):
self._updates(logs, 'train')
def on_eval_begin(self, logs=None):
self.eval_steps = logs.get('steps', None)
self.eval_metrics = logs.get('metrics', [])
self.eval_step = 0
self.evaled_samples = 0
self.eval_progbar = ProgressBar(
num=self.eval_steps, verbose=self.verbose)
if self._is_print():
print('Eval begin...')
def on_eval_batch_end(self, step, logs=None):
logs = logs or {}
self.eval_step += 1
samples = logs.get('batch_size', 1)
self.evaled_samples += samples
if self._is_print() and self.eval_step % self.log_freq == 0:
if self.eval_steps is None or self.eval_step < self.eval_steps:
self._updates(logs, 'eval')
def on_test_begin(self, logs=None):
self.test_steps = logs.get('steps', None)
self.test_metrics = logs.get('metrics', [])
self.test_step = 0
self.tested_samples = 0
self.test_progbar = ProgressBar(
num=self.test_steps, verbose=self.verbose)
if self._is_print():
print('Predict begin...')
def on_test_batch_end(self, step, logs=None):
logs = logs or {}
self.test_step += 1
samples = logs.get('batch_size', 1)
self.tested_samples += samples
if self.test_step % self.log_freq == 0 and self._is_print():
if self.test_steps is None or self.test_step < self.test_steps:
self._updates(logs, 'test')
def on_eval_end(self, logs=None):
logs = logs or {}
if self._is_print() and (self.eval_steps is not None):
self._updates(logs, 'eval')
print('Eval samples: %d' % (self.evaled_samples))
def on_test_end(self, logs=None):
logs = logs or {}
if self._is_print():
if self.test_step % self.log_freq != 0 or self.verbose == 1:
self._updates(logs, 'test')
print('Predict samples: %d' % (self.tested_samples))
class ModelCheckpoint(Callback):
"""Model checkpoint callback function
Args:
save_freq(int): The frequency, in number of epochs, the model checkpoint
are saved. Default: 1.
save_dir(str|None): The directory to save checkpoint during training.
If None, will not save checkpoint. Default: None.
Examples:
.. code-block:: python
import paddle
from paddle.static import InputSpec
inputs = [InputSpec([-1, 1, 28, 28], 'float32', 'image')]
labels = [InputSpec([None, 1], 'int64', 'label')]
train_dataset = paddle.vision.datasets.MNIST(mode='train')
lenet = paddle.vision.LeNet()
model = paddle.Model(lenet,
inputs, labels)
optim = paddle.optimizer.Adam(0.001, parameters=lenet.parameters())
model.prepare(optimizer=optim,
loss=paddle.nn.CrossEntropyLoss(),
metrics=paddle.metric.Accuracy())
callback = paddle.callbacks.ModelCheckpoint(save_dir='./temp')
model.fit(train_dataset, batch_size=64, callbacks=callback)
"""
def __init__(self, save_freq=1, save_dir=None):
self.save_freq = save_freq
self.save_dir = save_dir
def on_epoch_begin(self, epoch=None, logs=None):
self.epoch = epoch
def _is_save(self):
return self.model and self.save_dir and ParallelEnv().local_rank == 0
def on_epoch_end(self, epoch, logs=None):
if self._is_save() and self.epoch % self.save_freq == 0:
path = '{}/{}'.format(self.save_dir, epoch)
print('save checkpoint at {}'.format(os.path.abspath(path)))
self.model.save(path)
def on_train_end(self, logs=None):
if self._is_save():
path = '{}/final'.format(self.save_dir)
print('save checkpoint at {}'.format(os.path.abspath(path)))
self.model.save(path)
class VisualDL(Callback):
"""VisualDL callback function
Args:
log_dir (str): The directory to save visualdl log file.
Examples:
.. code-block:: python
import paddle
from paddle.static import InputSpec
inputs = [InputSpec([-1, 1, 28, 28], 'float32', 'image')]
labels = [InputSpec([None, 1], 'int64', 'label')]
train_dataset = paddle.vision.datasets.MNIST(mode='train')
eval_dataset = paddle.vision.datasets.MNIST(mode='test')
net = paddle.vision.LeNet()
model = paddle.Model(net, inputs, labels)
optim = paddle.optimizer.Adam(0.001, parameters=net.parameters())
model.prepare(optimizer=optim,
loss=paddle.nn.CrossEntropyLoss(),
metrics=paddle.metric.Accuracy())
## uncomment following lines to fit model with visualdl callback function
# callback = paddle.callbacks.VisualDL(log_dir='visualdl_log_dir')
# model.fit(train_dataset, eval_dataset, batch_size=64, callbacks=callback)
"""
def __init__(self, log_dir):
self.log_dir = log_dir
self.epochs = None
self.steps = None
self.epoch = 0
def _is_write(self):
return ParallelEnv().local_rank == 0
def on_train_begin(self, logs=None):
self.epochs = self.params['epochs']
assert self.epochs
self.train_metrics = self.params['metrics']
assert self.train_metrics
self._is_fit = True
self.train_step = 0
def on_epoch_begin(self, epoch=None, logs=None):
self.steps = self.params['steps']
self.epoch = epoch
def _updates(self, logs, mode):
if not self._is_write():
return
if not hasattr(self, 'writer'):
visualdl = try_import('visualdl')
self.writer = visualdl.LogWriter(self.log_dir)
metrics = getattr(self, '%s_metrics' % (mode))
current_step = getattr(self, '%s_step' % (mode))
if mode == 'train':
total_step = current_step
else:
total_step = self.epoch
for k in metrics:
if k in logs:
temp_tag = mode + '/' + k
if isinstance(logs[k], (list, tuple)):
temp_value = logs[k][0]
elif isinstance(logs[k], numbers.Number):
temp_value = logs[k]
else:
continue
self.writer.add_scalar(
tag=temp_tag, step=total_step, value=temp_value)
def on_train_batch_end(self, step, logs=None):
logs = logs or {}
self.train_step += 1
if self._is_write():
self._updates(logs, 'train')
def on_eval_begin(self, logs=None):
self.eval_steps = logs.get('steps', None)
self.eval_metrics = logs.get('metrics', [])
self.eval_step = 0
self.evaled_samples = 0
def on_train_end(self, logs=None):
if hasattr(self, 'writer'):
self.writer.close()
delattr(self, 'writer')
def on_eval_end(self, logs=None):
if self._is_write():
self._updates(logs, 'eval')
if (not hasattr(self, '_is_fit')) and hasattr(self, 'writer'):
self.writer.close()
delattr(self, 'writer')
| 33.183219
| 87
| 0.57392
|
4a1751fc9b4e896fffc3941b69e2e577800f2a6e
| 59
|
py
|
Python
|
messi/__main__.py
|
eerimoq/messi
|
3c0e0b227f7dec4970b0126cdb1a9005817f75fb
|
[
"MIT"
] | 7
|
2020-06-08T13:31:53.000Z
|
2021-11-23T11:51:45.000Z
|
messi/__main__.py
|
eerimoq/messager
|
396bc0112c43615ff5785616d2d710f38f0bb446
|
[
"MIT"
] | 17
|
2020-05-24T06:01:37.000Z
|
2020-10-19T05:12:05.000Z
|
messi/__main__.py
|
eerimoq/messager
|
396bc0112c43615ff5785616d2d710f38f0bb446
|
[
"MIT"
] | 1
|
2022-03-22T05:19:34.000Z
|
2022-03-22T05:19:34.000Z
|
# Execute as "python -m messi"
from . import main
main()
| 9.833333
| 30
| 0.661017
|
4a1752693db55974b98c578ca99cefe1acdf59da
| 4,634
|
py
|
Python
|
particle_sampler/factory.py
|
yesitsreallyme/Robotics
|
2a9232cf23933322d1352619810508a0a5e6733d
|
[
"MIT"
] | null | null | null |
particle_sampler/factory.py
|
yesitsreallyme/Robotics
|
2a9232cf23933322d1352619810508a0a5e6733d
|
[
"MIT"
] | null | null | null |
particle_sampler/factory.py
|
yesitsreallyme/Robotics
|
2a9232cf23933322d1352619810508a0a5e6733d
|
[
"MIT"
] | null | null | null |
"""
Module with factory methods for different objects (either real or simulation)
"""
class FactoryCreate:
"""Class to create objects which are related to the physical iRobot Create2 robot.
"""
def __init__(self):
"""Constructor.
"""
from robot import Create2Driver
self._create = Create2Driver("/dev/ttyS2", 87)
self._clientID = None
def close(self):
"""Clean-up
"""
self._create.drive_direct(0, 0)
self._create.digits_leds_ascii(bytes(" ", encoding='ascii'))
self._create.stop()
def create_create(self):
"""Instantiates a new create robot (only a single one is supported!)
Returns:
(robot.Create2Driver) instance of robot.Create2Driver
"""
return self._create
def create_time_helper(self):
"""Instantiates a new time object.
Returns:
(time) instance of time
"""
import time
return time
def create_sonar(self):
"""Instantiates a new sonar (only a single one is supported!)
Returns:
(robot.Sonar) instance of robot.Sonar
"""
from robot import Sonar
return Sonar(104)
def create_servo(self):
"""Instantiates a new servo (only a single one is supported!)
Returns:
(robot.Servo) instance of robot.Servo
"""
from robot import Servo
return Servo(0)
def create_virtual_create(self, hostname):
"""Instantiates a new virtual create for visualization (only a single one is supported!)
Returns:
(visualization.VirtualCreate) instance of visualization.VirtualCreate
"""
from vrep import vrep as vrep
vrep.simxFinish(-1) # just in case, close all opened connections
self._clientID = vrep.simxStart(hostname, 19997, True, True, 5000, 5) # Connect to V-REP
from visualization import VirtualCreate
return VirtualCreate(self._clientID)
class FactorySimulation:
"""Class to create objects which are simulated.
"""
def __init__(self):
"""Constructor.
"""
from vrep import vrep as vrep
vrep.simxFinish(-1) # just in case, close all opened connections
self._clientID = vrep.simxStart('127.0.0.1', 19997, True, True, 5000, 5) # Connect to V-REP
# enable the synchronous mode on the client:
vrep.simxSynchronous(self._clientID, True)
# start the simulation:
vrep.simxStartSimulation(self._clientID, vrep.simx_opmode_oneshot_wait)
def close(self):
"""Clean-up
"""
from vrep import vrep as vrep
# stop the simulation:
vrep.simxStopSimulation(self._clientID, vrep.simx_opmode_oneshot_wait)
# close the connection to V-REP:
vrep.simxFinish(self._clientID)
def create_create(self):
"""Instantiates a new create robot (only a single one is supported!)
Returns:
(simulation.Create2Vrep) instance of simulation.Create2Vrep
"""
from simulation import Create2Vrep
return Create2Vrep(self._clientID)
def create_time_helper(self):
"""Instantiates a new time object.
Returns:
(simulation.TimeHelper) instance of simulation.TimeHelper
"""
from simulation import TimeHelper
return TimeHelper(self._clientID)
def create_sonar(self):
"""Instantiates a new sonar (only a single one is supported!)
Returns:
(simulation.Sonar) instance of simulation.Sonar
"""
from simulation import Sonar
return Sonar(self._clientID)
def create_servo(self):
"""Instantiates a new servo (only a single one is supported!)
Returns:
(simulation.Servo) instance of simulation.Servo
"""
from simulation import Servo
return Servo(self._clientID)
def create_virtual_create(self):
"""Instantiates a new virtual create for visualization (only a single one is supported!)
Returns:
(visualization.VirtualCreate) instance of visualization.VirtualCreate
"""
from visualization import VirtualCreate
return VirtualCreate(self._clientID)
def create_kuka_lbr4p(self):
"""Instantiates a new robotic arm (only a single one is supported!)
Returns:
(simulation.KukaLBR4PlusVrep) instance of simulation.KukaLBR4PlusVrep
"""
from simulation import KukaLBR4PlusVrep
return KukaLBR4PlusVrep(self._clientID)
| 30.688742
| 100
| 0.632067
|
4a17557a447b6a424e9c591e4508f20003dc956a
| 10,383
|
py
|
Python
|
app/participant/views.py
|
vicoociv/bread-and-roses
|
bf53988d670b2a1e19883b394e249be0a1fbe934
|
[
"MIT"
] | null | null | null |
app/participant/views.py
|
vicoociv/bread-and-roses
|
bf53988d670b2a1e19883b394e249be0a1fbe934
|
[
"MIT"
] | null | null | null |
app/participant/views.py
|
vicoociv/bread-and-roses
|
bf53988d670b2a1e19883b394e249be0a1fbe934
|
[
"MIT"
] | 1
|
2020-08-04T02:33:08.000Z
|
2020-08-04T02:33:08.000Z
|
import datetime
from flask import abort, flash, redirect, render_template, url_for, request
from flask_login import current_user, login_required
from .forms import NewDonorForm, TodoToAsking, AskingToPledged, PledgedToCompleted
from ..decorators import admin_required
from . import participant
from .. import db
from ..models import Donor, Demographic, DonorStatus, Candidate, User
@participant.route('/<int:part_id>/')
@participant.route('/', defaults={'part_id': None})
@login_required
def index(part_id):
user = current_user
if part_id is not None:
if not current_user.is_admin():
return abort(403)
user = User.query.filter_by(id=part_id).first()
"""Participant dashboard page."""
donors_by_status = {
status.name: Donor.query.filter_by(
user_id=user.id, status=status).all()
for status in DonorStatus
}
def datestring(s):
return s.strftime('%b %d')
def datestring_alt(s):
return s.strftime('%b %d, %Y')
forms_by_donor = {}
for d in Donor.query.filter_by(user_id=user.id).all():
f = None
if d.status == DonorStatus.TODO:
f = TodoToAsking(donor=d.id)
elif d.status == DonorStatus.ASKING:
f = AskingToPledged(donor=d.id)
elif d.status == DonorStatus.PLEDGED:
f = PledgedToCompleted(donor=d.id)
else:
f = PledgedToCompleted(donor=d.id, amount_received=d.amount_received, date_received=d.date_received)
forms_by_donor[d.id] = f
return render_template('participant/index.html',
user=user,
donors_by_status=donors_by_status,
Status=DonorStatus,
datestring=datestring,
datestring_alt=datestring_alt,
part_id=part_id,
forms_by_donor=forms_by_donor,
current_user=current_user)
@participant.route('/profile')
@login_required
def profile():
"""Participant Profile page."""
asking_donors = Donor.query.filter_by(
user_id=current_user.id, status=1).all()
pledged_donors = Donor.query.filter_by(
user_id=current_user.id, status=2).all()
completed_donors = Donor.query.filter_by(
user_id=current_user.id, status=3).all()
todo_donors = Donor.query.filter_by(
user_id=current_user.id, status=0).all()
num_donors = len(completed_donors)
num_asks = len(asking_donors) + len(pledged_donors) + len(completed_donors)
ind_pledged = 0
is_candidate = False
term_participants = []
total_pledged = 0
total_raised = 0
total_num_donors = 0;
if current_user.candidate is not None and current_user.candidate.term_id is not None:
cohort_stats = Candidate.cohort_stats(current_user.candidate.term_id)
participant_stats = current_user.candidate.participant_stats()
amt_donated = current_user.candidate.amount_donated
else:
cohort_stats = {}
cohort_stats["amount_donated"] = "N/A (no cohort assigned)"
cohort_stats["total_donations"] = "N/A (no cohort assigned)"
cohort_stats["total_pledges"] = "N/A (no cohort assigned)"
cohort_stats["donor_count"] = "N/A (no cohort assigned)"
participant_stats = {}
participant_stats["asking_count"] = "N/A (no participant linked)",
participant_stats["todo_count"] = "N/A (no participant linked)",
participant_stats["pledged_count"] = "N/A (no participant linked)",
participant_stats["completed_count"] = "N/A (no participant linked)",
participant_stats["donor_count"] = "N/A (no participant linked)",
participant_stats["total_donations"] = "N/A (no participant linked)",
amt_donated = "N/A"
return render_template('participant/profile.html',
user=current_user,
is_candidate=current_user.candidate is not None,
ind_pledged=amt_donated,
num_asks=participant_stats["asking_count"],
total_todo=participant_stats["todo_count"],
total_pledged=participant_stats["pledged_count"],
total_completed=participant_stats["completed_count"],
total_num_donors=participant_stats["donor_count"],
total_raised=participant_stats["total_donations"],
cohort_raised=cohort_stats["amount_donated"],
cohort_donations=cohort_stats["total_donations"],
cohort_pledges=cohort_stats["total_pledges"],
cohort_donors=cohort_stats["donor_count"],
form=None)
@participant.route('/donor/ask/<int:donor_id>', methods=['POST'])
@login_required
def todo_to_asking(donor_id):
d = Donor.query.filter_by(id=donor_id).first()
part_id = None
if current_user.is_admin() and d.user.id!=current_user.id:
part_id = d.user.id
if d.user != current_user and not current_user.is_admin():
return abort(403)
f = TodoToAsking()
if f.validate_on_submit():
d.status = DonorStatus(int(f.status.data))
d.date_asking = f.date_asking.data
d.amount_asking_for = f.amount_asking_for.data
d.how_asking = f.how_asking.data
db.session.add(d)
db.session.commit()
flash('Successfully moved donor %s to %s.' % (d.first_name, d.status.name.lower()), 'success')
else:
flash('Error filling out form. Did you miss a field?', 'error')
return redirect(url_for('participant.index', part_id=part_id))
@participant.route('/donor/pledge/<int:donor_id>', methods=['POST'])
@login_required
def asking_to_pledged(donor_id):
d = Donor.query.filter_by(id=donor_id).first()
part_id = None
if current_user.is_admin() and d.user.id!=current_user.id:
part_id = d.user.id
if d.user != current_user and not current_user.is_admin():
return abort(403)
f = AskingToPledged()
if f.validate_on_submit():
d.status = DonorStatus(int(f.status.data))
d.pledged = f.pledged.data
d.amount_pledged = f.amount_pledged.data
db.session.add(d)
db.session.commit()
flash('Successfully moved donor %s to %s.' % (d.first_name, d.status.name.lower()), 'success')
else:
for e in f.errors:
flash('Error filling out %s field. %s' % (e.replace('_', ' ').title(), f.errors[e][0]), 'error')
return redirect(url_for('participant.index', part_id=part_id))
@participant.route('/donor/complete/<int:donor_id>', methods=['POST'])
@login_required
@admin_required
def pledged_to_completed(donor_id):
d = Donor.query.filter_by(id=donor_id).first()
part_id = None
if current_user.is_admin() and d.user.id!=current_user.id:
part_id = d.user.id
f = PledgedToCompleted()
if f.validate_on_submit():
d.status = DonorStatus(int(f.status.data))
d.amount_received = f.amount_received.data
d.date_received = f.date_received.data
db.session.add(d)
db.session.commit()
flash('Successfully moved donor %s to %s.' % (d.first_name, d.status.name.lower()), 'success')
else:
for e in f.errors:
flash('Error filling out %s field. %s' % (e.replace('_', ' ').title(), f.errors[e][0]), 'error')
return redirect(url_for('participant.index', part_id=part_id))
@participant.route('/<int:part_id>/donor/<int:donor_id>/_delete')
@participant.route('/donor/<int:donor_id>/_delete', defaults={'part_id': None})
@login_required
def delete_donor(part_id, donor_id):
"""Delete a participant."""
d = Donor.query.filter_by(id=donor_id).first()
if d.user != current_user and not (
current_user.is_admin() and d.user.id==part_id
):
return abort(403)
db.session.delete(d)
db.session.commit()
flash('Successfully deleted donor %s.' % d.first_name, 'success')
return redirect(url_for('participant.index', part_id=part_id))
@participant.route('/donor/<int:donor_id>/edit')
@login_required
def edit_donor(donor_id):
"""Edits a donor."""
d = Donor.query.filter_by(id=donor_id).first()
return redirect(url_for('participant.index'))
@participant.route('/new-donor', defaults={'part_id': None}, methods=['GET', 'POST'])
@participant.route('/<int:part_id>/new-donor', methods=['GET', 'POST'])
@login_required
def new_donor(part_id):
user = current_user
if part_id is not None:
if not current_user.is_admin():
return abort(403)
user = User.query.filter_by(id=part_id).first()
"""Create a new donor."""
form = NewDonorForm()
if form.validate_on_submit():
demographic = Demographic(
race=form.demographic.race.data,
gender=form.demographic.gender.data,
age=form.demographic.age.data,
sexual_orientation=form.demographic.sexual_orientation.data,
soc_class=form.demographic.soc_class.data
)
donor = Donor(
user_id=user.id,
user=user,
first_name=form.first_name.data,
last_name=form.last_name.data,
contact_date=form.contact_date.data,
street_address=form.street_address.data,
city=form.city.data,
state=form.state.data,
zipcode=form.zipcode.data,
phone_number=form.phone_number.data,
email=form.email.data,
notes=form.notes.data,
interested_in_future_gp=form.interested_in_future_gp.data,
want_to_learn_about_brf_guarantees=form.want_to_learn_about_brf_guarantees.data,
interested_in_volunteering=form.interested_in_volunteering.data,
status=DonorStatus.TODO,
amount_pledged=0,
amount_received=0,
amount_asking_for=0,
demographic=demographic
)
db.session.add(donor)
db.session.commit()
flash('Donor {} successfully created'.format(donor.full_name()),
'form-success')
return render_template('participant/new_donor.html', form=form, part_id=part_id)
| 37.348921
| 112
| 0.63421
|
4a1756ad178263aaac78d0496ba0c16a0aaec8ae
| 383
|
py
|
Python
|
core/commands/owner/test.py
|
stefano-mecocci/nebula8
|
668a5b9dc3a2a022a346fee391fccf6816072dea
|
[
"Apache-2.0"
] | null | null | null |
core/commands/owner/test.py
|
stefano-mecocci/nebula8
|
668a5b9dc3a2a022a346fee391fccf6816072dea
|
[
"Apache-2.0"
] | null | null | null |
core/commands/owner/test.py
|
stefano-mecocci/nebula8
|
668a5b9dc3a2a022a346fee391fccf6816072dea
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright SquirrelNetwork
import datetime
from core import decorators
from core.utilities.functions import chat_object
from core.database.repository.group import GroupRepository
@decorators.owner.init
def init(update,context):
chat = chat_object(update)
row = GroupRepository().getUpdatesByChat(chat.id)
print(row)
| 25.533333
| 58
| 0.75718
|
4a175841d94d1767a154bd2f14da1f3ea8ff23e4
| 1,156
|
py
|
Python
|
tests/unit/network_services/collector/test_publisher.py
|
mythwm/yardstick
|
ea13581f450c9c44f6f73d383e6a192697a95cc1
|
[
"Apache-2.0"
] | null | null | null |
tests/unit/network_services/collector/test_publisher.py
|
mythwm/yardstick
|
ea13581f450c9c44f6f73d383e6a192697a95cc1
|
[
"Apache-2.0"
] | null | null | null |
tests/unit/network_services/collector/test_publisher.py
|
mythwm/yardstick
|
ea13581f450c9c44f6f73d383e6a192697a95cc1
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2016-2017 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Unittest for yardstick.network_services.collector.publisher
from __future__ import absolute_import
import unittest
from yardstick.network_services.collector import publisher
class PublisherTestCase(unittest.TestCase):
def setUp(self):
self.test_publisher = publisher.Publisher()
def test_successful_init(self):
pass
def test_unsuccessful_init(self):
pass
def test_start(self):
self.assertIsNone(self.test_publisher.start())
def test_stop(self):
self.assertIsNone(self.test_publisher.stop())
| 28.9
| 74
| 0.749135
|
4a1758c898c663bc0d1b8dc58a2478c6b51babcb
| 1,088
|
py
|
Python
|
web/service.py
|
w6688j/TripleIE
|
21b069c1a5cef4d5deba0ce6d4b662051d57bb95
|
[
"MIT"
] | null | null | null |
web/service.py
|
w6688j/TripleIE
|
21b069c1a5cef4d5deba0ce6d4b662051d57bb95
|
[
"MIT"
] | 1
|
2019-04-02T06:51:07.000Z
|
2019-04-02T11:14:38.000Z
|
web/service.py
|
w6688j/TripleIE
|
21b069c1a5cef4d5deba0ce6d4b662051d57bb95
|
[
"MIT"
] | 1
|
2019-04-02T02:11:08.000Z
|
2019-04-02T02:11:08.000Z
|
import sys
from flask import Flask, request, jsonify, render_template
sys.path.append('/home/httpd/TripleIE')
from cli_single_question import CliSingle
from web.models.question import Question
app = Flask(__name__)
@app.route('/', methods=["GET"])
def index():
return render_template('index.html')
@app.route('/get_triples', methods=["POST"])
def get_triples():
post = request.json
question = post['q']
triples, norm_questions = CliSingle(question).run()
# 记录问题
Question().save_question(question, norm_questions, triples)
return jsonify(code=200, message='ok', data={'triples': triples})
@app.route('/get_completion', methods=["POST"])
def get_completion():
post = request.json
str = post['s']
@app.errorhandler(Exception)
def flask_global_exception_handler(e):
return jsonify(code=200, message='err', data={'err': '请求错误'})
@app.route('/get_test', methods=["POST"])
def get_test():
return jsonify(code=200, message='ok', data={'triples': 111})
if __name__ == '__main__':
app.run(host='0.0.0.0', port=8080, debug=True)
| 22.666667
| 69
| 0.688419
|
4a1759e407a598c84a093acddb71369f4e893809
| 3,144
|
py
|
Python
|
lldb/packages/Python/lldbsuite/test/functionalities/thread/multi_break/TestMultipleBreakpoints.py
|
tkf/opencilk-project
|
48265098754b785d1b06cb08d8e22477a003efcd
|
[
"MIT"
] | 1
|
2019-12-11T17:43:58.000Z
|
2019-12-11T17:43:58.000Z
|
lldb/packages/Python/lldbsuite/test/functionalities/thread/multi_break/TestMultipleBreakpoints.py
|
tkf/opencilk-project
|
48265098754b785d1b06cb08d8e22477a003efcd
|
[
"MIT"
] | 10
|
2018-05-27T23:16:42.000Z
|
2019-09-30T13:28:45.000Z
|
lldb/packages/Python/lldbsuite/test/functionalities/thread/multi_break/TestMultipleBreakpoints.py
|
tkf/opencilk-project
|
48265098754b785d1b06cb08d8e22477a003efcd
|
[
"MIT"
] | 3
|
2019-12-21T06:35:35.000Z
|
2020-06-07T23:18:58.000Z
|
"""
Test number of threads.
"""
from __future__ import print_function
import os
import time
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
class MultipleBreakpointTestCase(TestBase):
mydir = TestBase.compute_mydir(__file__)
def setUp(self):
# Call super's setUp().
TestBase.setUp(self)
# Find the line number for our breakpoint.
self.breakpoint = line_number('main.cpp', '// Set breakpoint here')
@expectedFailureAll(
oslist=["linux"],
bugnumber="llvm.org/pr15824 thread states not properly maintained")
@expectedFailureAll(
oslist=lldbplatformutil.getDarwinOSTriples(),
bugnumber="llvm.org/pr15824 thread states not properly maintained and <rdar://problem/28557237>")
@expectedFailureAll(
oslist=["freebsd"],
bugnumber="llvm.org/pr18190 thread states not properly maintained")
@skipIfWindows # This is flakey on Windows: llvm.org/pr24668, llvm.org/pr38373
@expectedFailureNetBSD
def test(self):
"""Test simultaneous breakpoints in multiple threads."""
self.build(dictionary=self.getBuildFlags())
exe = self.getBuildArtifact("a.out")
self.runCmd("file " + exe, CURRENT_EXECUTABLE_SET)
# This should create a breakpoint in the main thread.
lldbutil.run_break_set_by_file_and_line(
self, "main.cpp", self.breakpoint, num_expected_locations=1)
# Run the program.
self.runCmd("run", RUN_SUCCEEDED)
# The stop reason of the thread should be breakpoint.
# The breakpoint may be hit in either thread 2 or thread 3.
self.expect("thread list", STOPPED_DUE_TO_BREAKPOINT,
substrs=['stopped',
'stop reason = breakpoint'])
# Get the target process
target = self.dbg.GetSelectedTarget()
process = target.GetProcess()
# Get the number of threads
num_threads = process.GetNumThreads()
# Make sure we see all three threads
self.assertTrue(
num_threads >= 3,
'Number of expected threads and actual threads do not match.')
# Get the thread objects
thread1 = process.GetThreadAtIndex(0)
thread2 = process.GetThreadAtIndex(1)
thread3 = process.GetThreadAtIndex(2)
# Make sure both threads are stopped
self.assertTrue(
thread1.IsStopped(),
"Primary thread didn't stop during breakpoint")
self.assertTrue(
thread2.IsStopped(),
"Secondary thread didn't stop during breakpoint")
self.assertTrue(
thread3.IsStopped(),
"Tertiary thread didn't stop during breakpoint")
# Delete the first breakpoint then continue
self.runCmd("breakpoint delete 1")
# Run to completion
self.runCmd("continue")
# At this point, the inferior process should have exited.
self.assertTrue(
process.GetState() == lldb.eStateExited,
PROCESS_EXITED)
| 33.446809
| 105
| 0.645992
|
4a175b0fc0ae5c4a170a650bb83643ec0710dcbd
| 4,010
|
py
|
Python
|
alipay/aop/api/request/AlipaySecurityProdHaiguanAuthCreateRequest.py
|
antopen/alipay-sdk-python-all
|
8e51c54409b9452f8d46c7bb10eea7c8f7e8d30c
|
[
"Apache-2.0"
] | null | null | null |
alipay/aop/api/request/AlipaySecurityProdHaiguanAuthCreateRequest.py
|
antopen/alipay-sdk-python-all
|
8e51c54409b9452f8d46c7bb10eea7c8f7e8d30c
|
[
"Apache-2.0"
] | null | null | null |
alipay/aop/api/request/AlipaySecurityProdHaiguanAuthCreateRequest.py
|
antopen/alipay-sdk-python-all
|
8e51c54409b9452f8d46c7bb10eea7c8f7e8d30c
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.FileItem import FileItem
from alipay.aop.api.constant.ParamConstants import *
from alipay.aop.api.domain.AlipaySecurityProdHaiguanAuthCreateModel import AlipaySecurityProdHaiguanAuthCreateModel
class AlipaySecurityProdHaiguanAuthCreateRequest(object):
def __init__(self, biz_model=None):
self._biz_model = biz_model
self._biz_content = None
self._version = "1.0"
self._terminal_type = None
self._terminal_info = None
self._prod_code = None
self._notify_url = None
self._return_url = None
self._udf_params = None
self._need_encrypt = False
@property
def biz_model(self):
return self._biz_model
@biz_model.setter
def biz_model(self, value):
self._biz_model = value
@property
def biz_content(self):
return self._biz_content
@biz_content.setter
def biz_content(self, value):
if isinstance(value, AlipaySecurityProdHaiguanAuthCreateModel):
self._biz_content = value
else:
self._biz_content = AlipaySecurityProdHaiguanAuthCreateModel.from_alipay_dict(value)
@property
def version(self):
return self._version
@version.setter
def version(self, value):
self._version = value
@property
def terminal_type(self):
return self._terminal_type
@terminal_type.setter
def terminal_type(self, value):
self._terminal_type = value
@property
def terminal_info(self):
return self._terminal_info
@terminal_info.setter
def terminal_info(self, value):
self._terminal_info = value
@property
def prod_code(self):
return self._prod_code
@prod_code.setter
def prod_code(self, value):
self._prod_code = value
@property
def notify_url(self):
return self._notify_url
@notify_url.setter
def notify_url(self, value):
self._notify_url = value
@property
def return_url(self):
return self._return_url
@return_url.setter
def return_url(self, value):
self._return_url = value
@property
def udf_params(self):
return self._udf_params
@udf_params.setter
def udf_params(self, value):
if not isinstance(value, dict):
return
self._udf_params = value
@property
def need_encrypt(self):
return self._need_encrypt
@need_encrypt.setter
def need_encrypt(self, value):
self._need_encrypt = value
def add_other_text_param(self, key, value):
if not self.udf_params:
self.udf_params = dict()
self.udf_params[key] = value
def get_params(self):
params = dict()
params[P_METHOD] = 'alipay.security.prod.haiguan.auth.create'
params[P_VERSION] = self.version
if self.biz_model:
params[P_BIZ_CONTENT] = json.dumps(obj=self.biz_model.to_alipay_dict(), ensure_ascii=False, sort_keys=True, separators=(',', ':'))
if self.biz_content:
if hasattr(self.biz_content, 'to_alipay_dict'):
params['biz_content'] = json.dumps(obj=self.biz_content.to_alipay_dict(), ensure_ascii=False, sort_keys=True, separators=(',', ':'))
else:
params['biz_content'] = self.biz_content
if self.terminal_type:
params['terminal_type'] = self.terminal_type
if self.terminal_info:
params['terminal_info'] = self.terminal_info
if self.prod_code:
params['prod_code'] = self.prod_code
if self.notify_url:
params['notify_url'] = self.notify_url
if self.return_url:
params['return_url'] = self.return_url
if self.udf_params:
params.update(self.udf_params)
return params
def get_multipart_params(self):
multipart_params = dict()
return multipart_params
| 27.655172
| 148
| 0.64788
|
4a175b67889bbab2bd333c75d9a8acdd4c622e5c
| 4,112
|
py
|
Python
|
azure-mgmt-network/azure/mgmt/network/v2018_07_01/models/subnet.py
|
JonathanGailliez/azure-sdk-for-python
|
f0f051bfd27f8ea512aea6fc0c3212ee9ee0029b
|
[
"MIT"
] | 1
|
2021-09-07T18:36:04.000Z
|
2021-09-07T18:36:04.000Z
|
azure-mgmt-network/azure/mgmt/network/v2018_07_01/models/subnet.py
|
JonathanGailliez/azure-sdk-for-python
|
f0f051bfd27f8ea512aea6fc0c3212ee9ee0029b
|
[
"MIT"
] | 2
|
2019-10-02T23:37:38.000Z
|
2020-10-02T01:17:31.000Z
|
azure-mgmt-network/azure/mgmt/network/v2018_07_01/models/subnet.py
|
JonathanGailliez/azure-sdk-for-python
|
f0f051bfd27f8ea512aea6fc0c3212ee9ee0029b
|
[
"MIT"
] | 1
|
2018-08-28T14:36:47.000Z
|
2018-08-28T14:36:47.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .sub_resource import SubResource
class Subnet(SubResource):
"""Subnet in a virtual network resource.
Variables are only populated by the server, and will be ignored when
sending a request.
:param id: Resource ID.
:type id: str
:param address_prefix: The address prefix for the subnet.
:type address_prefix: str
:param network_security_group: The reference of the NetworkSecurityGroup
resource.
:type network_security_group:
~azure.mgmt.network.v2018_07_01.models.NetworkSecurityGroup
:param route_table: The reference of the RouteTable resource.
:type route_table: ~azure.mgmt.network.v2018_07_01.models.RouteTable
:param service_endpoints: An array of service endpoints.
:type service_endpoints:
list[~azure.mgmt.network.v2018_07_01.models.ServiceEndpointPropertiesFormat]
:param service_endpoint_policies: An array of service endpoint policies.
:type service_endpoint_policies:
list[~azure.mgmt.network.v2018_07_01.models.ServiceEndpointPolicy]
:ivar ip_configurations: Gets an array of references to the network
interface IP configurations using subnet.
:vartype ip_configurations:
list[~azure.mgmt.network.v2018_07_01.models.IPConfiguration]
:param resource_navigation_links: Gets an array of references to the
external resources using subnet.
:type resource_navigation_links:
list[~azure.mgmt.network.v2018_07_01.models.ResourceNavigationLink]
:param provisioning_state: The provisioning state of the resource.
:type provisioning_state: str
:param name: The name of the resource that is unique within a resource
group. This name can be used to access the resource.
:type name: str
:param etag: A unique read-only string that changes whenever the resource
is updated.
:type etag: str
"""
_validation = {
'ip_configurations': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'address_prefix': {'key': 'properties.addressPrefix', 'type': 'str'},
'network_security_group': {'key': 'properties.networkSecurityGroup', 'type': 'NetworkSecurityGroup'},
'route_table': {'key': 'properties.routeTable', 'type': 'RouteTable'},
'service_endpoints': {'key': 'properties.serviceEndpoints', 'type': '[ServiceEndpointPropertiesFormat]'},
'service_endpoint_policies': {'key': 'properties.serviceEndpointPolicies', 'type': '[ServiceEndpointPolicy]'},
'ip_configurations': {'key': 'properties.ipConfigurations', 'type': '[IPConfiguration]'},
'resource_navigation_links': {'key': 'properties.resourceNavigationLinks', 'type': '[ResourceNavigationLink]'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
}
def __init__(self, **kwargs):
super(Subnet, self).__init__(**kwargs)
self.address_prefix = kwargs.get('address_prefix', None)
self.network_security_group = kwargs.get('network_security_group', None)
self.route_table = kwargs.get('route_table', None)
self.service_endpoints = kwargs.get('service_endpoints', None)
self.service_endpoint_policies = kwargs.get('service_endpoint_policies', None)
self.ip_configurations = None
self.resource_navigation_links = kwargs.get('resource_navigation_links', None)
self.provisioning_state = kwargs.get('provisioning_state', None)
self.name = kwargs.get('name', None)
self.etag = kwargs.get('etag', None)
| 48.376471
| 119
| 0.684095
|
4a175bf0581f8833a8c8fe6b7ed32984b09144e6
| 792
|
py
|
Python
|
pyflux/setup.py
|
ThomasHoppe/pyflux
|
297f2afc2095acd97c12e827dd500e8ea5da0c0f
|
[
"BSD-3-Clause"
] | 2,091
|
2016-04-01T02:52:10.000Z
|
2022-03-29T11:38:15.000Z
|
pyflux/setup.py
|
EricSchles/pyflux
|
297f2afc2095acd97c12e827dd500e8ea5da0c0f
|
[
"BSD-3-Clause"
] | 160
|
2016-04-26T14:52:18.000Z
|
2022-03-15T02:09:07.000Z
|
pyflux/setup.py
|
EricSchles/pyflux
|
297f2afc2095acd97c12e827dd500e8ea5da0c0f
|
[
"BSD-3-Clause"
] | 264
|
2016-05-02T14:03:31.000Z
|
2022-03-29T07:48:20.000Z
|
import os
PACKAGE_NAME = 'pyflux'
def configuration(parent_package='', top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration(PACKAGE_NAME, parent_package, top_path)
config.add_subpackage('__check_build')
config.add_subpackage('arma')
config.add_subpackage('ensembles')
config.add_subpackage('families')
config.add_subpackage('garch')
config.add_subpackage('gas')
config.add_subpackage('gpnarx')
config.add_subpackage('inference')
config.add_subpackage('output')
config.add_subpackage('ssm')
config.add_subpackage('tests')
config.add_subpackage('var')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(**configuration(top_path='').todict())
| 27.310345
| 66
| 0.727273
|
4a175c3d3d23909eb34196af98331d77a8790755
| 205
|
py
|
Python
|
pyuniqid/__init__.py
|
boriskurikhin/pyuniqid
|
d61477708adeeee34882a2f5ef359c0194405675
|
[
"MIT"
] | 1
|
2020-06-26T19:37:37.000Z
|
2020-06-26T19:37:37.000Z
|
pyuniqid/__init__.py
|
boriskurikhin/pyuniqid
|
d61477708adeeee34882a2f5ef359c0194405675
|
[
"MIT"
] | 1
|
2020-06-29T04:38:02.000Z
|
2020-06-29T05:38:25.000Z
|
pyuniqid/__init__.py
|
boriskurikhin/pyuniqid
|
d61477708adeeee34882a2f5ef359c0194405675
|
[
"MIT"
] | 2
|
2020-06-26T16:09:37.000Z
|
2020-06-29T02:59:10.000Z
|
"""Global pyuniqid module.
Main module for the pyuniqid package.
Typical example usage:
from pyuniqid import uniqid
my_id = uniqid()
"""
from pyuniqid.uniqid import uniqid
__all__ = ["uniqid"]
| 13.666667
| 37
| 0.721951
|
4a175c96c67da22908e96fe6935723954d9cdf8d
| 14,796
|
py
|
Python
|
src/tests/test_application.py
|
yuin/rays
|
62ce174fc46577d93fb6ee595baf8d91d77e89bd
|
[
"MIT"
] | null | null | null |
src/tests/test_application.py
|
yuin/rays
|
62ce174fc46577d93fb6ee595baf8d91d77e89bd
|
[
"MIT"
] | null | null | null |
src/tests/test_application.py
|
yuin/rays
|
62ce174fc46577d93fb6ee595baf8d91d77e89bd
|
[
"MIT"
] | 1
|
2019-04-17T08:20:59.000Z
|
2019-04-17T08:20:59.000Z
|
#vim fileencoding=utf8
from __future__ import division, print_function
import sys
import itertools
from rays import *
from rays.compat import *
from .base import *
import_BytesIO()
import pytest
class TestApplication(Base):
def test_define_tls_property(self):
self.finish_app_config()
self.app.define_tls_property("prop", "test property")
assert self.app.prop == None
assert "prop" in self.app.tls_names
def test_copy_tls_property(self):
self.finish_app_config()
self.app.define_tls_property("prop", "test property")
tls = self.app.copy_tls_property()
assert len(tls) == 3
assert all(v in ["res", "req", "prop"] for v in tls)
tls["prop"] = "value"
self.app.copy_tls_property(tls)
assert len(tls) == 3
assert all(v in ["res", "req", "prop"] for v in tls)
assert self.app.prop == "value"
def test_get_renderer(self):
self.finish_app_config()
assert isinstance(self.app.renderer, Renderer)
def test_set_renderer(self):
self.app.renderer = None
assert self.app._renderer == None
def test_config(self):
check_dct = {}
class TestExtension(Extension):
@classmethod
def app_config(cls, app, dct):
check_dct["v"] = True
self.app.config([
("base", "/"),
("charset", "utf8"),
("debug", True),
("logger",True),
("renderer", {"template_dir": "./t"}),
("TestExtension", {}),
("app_ver", 1.0)
])
self.finish_app_config()
assert self.app.base == "/"
assert self.app.charset == "utf8"
assert self.app.debug == True
assert self.app.logger == True
assert self.app.renderer.template_dir == "./t"
assert self.app.vars.app_ver == 1.0
assert check_dct["v"]
def test_helper(self):
@self.app.helper
def helper_func(helper):
return "value"
self.finish_app_config()
assert self.app.renderer.template_globals["h"].helper_func() == "value"
def test_init_routes(self):
@self.app.get("")
def index():
pass
self.finish_app_config()
assert len(self.app.url_cache) == 0
assert "index" in self.app.actions_map
def test_get(self):
@self.app.get("")
def index():
return "ok"
self.finish_app_config()
assert b"ok" in self.browser.get(self.url("index")).body
assert self.browser.post(self.url("index"), expect_errors = True).status.startswith("405")
def test_post(self):
@self.app.post("")
def index():
return "ok"
self.finish_app_config()
assert b"ok" in self.browser.post(self.url("index")).body
assert self.browser.get(self.url("index"), expect_errors = True).status.startswith("405")
def test_put(self):
@self.app.put("")
def index():
return "ok"
self.finish_app_config()
assert b"ok" in self.browser.put(self.url("index")).body
assert self.browser.get(self.url("index"), expect_errors = True).status.startswith("405")
def test_delete(self):
@self.app.delete("")
def index():
return "ok"
self.finish_app_config()
assert b"ok" in self.browser.delete(self.url("index")).body
assert self.browser.get(self.url("index"), expect_errors = True).status.startswith("405")
def test_head(self):
@self.app.head("")
def index():
return "ok"
self.finish_app_config()
assert self.browser.head(self.url("index")).body.strip() == b""
assert self.browser.get(self.url("index"), expect_errors = True).status.startswith("405")
def test_apply_filer_to_action(self):
def filter(*a, **k):
yield
@self.app.apply_filter(filter)
@self.app.get("")
def index():
return "ok"
self.finish_app_config()
assert len(self.app.actions_map["index"].filters) == 1
assert b"ok" in self.browser.get(self.url("index")).body
def test_apply_filter_to_function(self):
def filter(*a, **k):
yield
@self.app.get("")
@self.app.apply_filter(filter)
def index():
return "ok"
self.finish_app_config()
assert len(self.app.actions_map["index"].filters) == 1
assert b"ok" in self.browser.get(self.url("index")).body
def test_filter(self):
check_dict = {}
app = self.app
def filter_a(*args):
check_dict["filter_a_pre"] = True
yield
app.res.content = "aaa"
check_dict["filter_a_after"] = True
def filter_b(*args):
check_dict["filter_b_pre"] = True
yield
app.res.content = "bbb"
check_dict["filter_b_after"] = True
def filter_c(*args):
check_dict["filter_c_pre"] = True
yield
app.res.content = "ccc"
check_dict["filter_c_after"] = True
with app.filter(filter_a, [filter_b, {"except":["test_get1"]}]):
@app.get("test")
def test_get():
assert check_dict["filter_a_pre"]
assert "filter_a_after" not in check_dict
return ""
@app.get("test_error")
def test_get_error():
app.res.notfound()
with app.filter(filter_c):
@app.get("test1")
def test_get1():
return ""
@app.get("_test_without")
def _test_without():
return ""
@app.get("test_without")
def test_without():
return _test_without()
self.finish_app_config()
check_dict = {}
assert b"aaa" == self.browser.get(self.url("test_get")).body.strip()
assert check_dict["filter_a_pre"]
assert check_dict["filter_a_after"]
assert check_dict["filter_b_pre"]
assert check_dict["filter_b_after"]
assert "filter_c_pre" not in check_dict
assert "filter_c_after" not in check_dict
check_dict = {}
with pytest.raises(Exception):
self.browser.get(self.url("test_get_error"))
assert check_dict["filter_a_pre"]
assert "filter_a_after" not in check_dict
assert check_dict["filter_b_pre"]
assert "filter_b_after" not in check_dict
assert "filter_c_pre" not in check_dict
assert "filter_c_after" not in check_dict
check_dict = {}
assert b"aaa" == self.browser.get(self.url("test_get1")).body.strip()
assert check_dict["filter_a_pre"]
assert check_dict["filter_a_after"]
assert "filter_b_pre" not in check_dict
assert "filter_b_after" not in check_dict
assert check_dict["filter_c_pre"]
assert check_dict["filter_c_after"]
check_dict = {}
assert b"" == self.browser.get(self.url("test_without")).body.strip()
assert "filter_a_pre" not in check_dict
assert "filter_a_after" not in check_dict
assert "filter_b_pre" not in check_dict
assert "filter_b_after" not in check_dict
def test_filter_order(self):
app = self.app
buffer = []
def filter_a(*a, **k):
buffer.append(1)
yield
buffer.append(6)
def filter_b(*a, **k):
buffer.append(2)
yield
buffer.append(5)
def filter_c(*a, **k):
buffer.append(3)
yield
buffer.append(4)
with app.filter(filter_a, filter_b):
with app.filter(filter_c):
@app.get("test")
def test_get():
return "ok"
self.finish_app_config()
assert b"ok" in self.browser.get(self.url("test_get")).body
assert [1,2,3,4,5,6] == buffer
def test_before_hooks1(self):
app = self.app
check_dict = {}
@app.get("test")
def test_get():
return ""
@app.get("test1")
def test_get1():
# raise a error
return foo
@app.get("test2")
def test_get2():
# abort
app.res.notfound()
self.finish_app_config()
check_dict = {}
@app.hook("before_call")
def hook1(env, start_response):
check_dict[0] = True
@app.hook("before_call")
def hook2(env, start_response):
check_dict[1] = True
raise Exception()
@app.hook("before_call")
def hook3(env, start_response):
check_dict[2] = True
with pytest.raises(Exception):
self.browser.get(self.url("test_get"))
assert check_dict[0]
assert check_dict[1]
assert 2 not in check_dict
def test_before_hooks2(self):
app = self.app
check_dict = {}
@app.get("test")
def test_get():
return ""
@app.get("test1")
def test_get1():
# raise a error
return foo
@app.get("test2")
def test_get2():
# abort
app.res.notfound()
@app.hook("before_action")
def hook():
assert hasattr(app.req, "params")
assert hasattr(app.req, "action")
assert "" == app.res.content
check_dict[0] = True
return ""
self.finish_app_config()
self.browser.get(self.url("test_get"))
assert check_dict[0]
def test_after_hooks(self):
app = self.app
check_dict = DefaultAttrDict()
@app.get("test_success")
def test_get():
check_dict.sucess = True
return ""
@app.get("test_error")
def test_get1():
check_dict.error = True
return foo
@app.get("test_abort")
def test_get2():
check_dict.abort = True
app.res.notfound()
@app.hook("before_start_response")
def hook():
if app.res.is_success:
check_dict.hook_success = True
elif app.res.is_abort:
check_dict.hook_abort = True
elif app.res.is_error:
check_dict.hook_error = True
self.finish_app_config()
check_dict.clear()
self.browser.get(self.url("test_get"))
assert check_dict.sucess
assert check_dict.hook_success
check_dict.clear()
try:
self.browser.get(self.url("test_get1"))
assert False
except:
assert check_dict.error
assert check_dict.hook_error
check_dict.clear()
try:
self.browser.get(self.url("test_get2"))
assert False
except:
assert check_dict.abort
assert check_dict.hook_abort
def test_not_found(self):
app = self.app
@app.error(404)
def _404():
return "--notfound--"
self.finish_app_config()
response = self.browser.get("/unknwon_path", expect_errors=True)
assert response.status.startswith("404")
assert b"--notfound--" == response.body.strip()
def test_redirect(self):
app = self.app
@app.get("get")
def get():
return "ok"
@app.get("redirect")
def redirect():
app.res.redirect(app.url.get())
self.finish_app_config()
response = self.browser.get(self.url("redirect"))
response = response.follow()
assert b"ok" in response.body
def test_url_builder(self):
app = self.app
@app.get("get/(int:\d+)/(str:[^/]+)/(int:\d+)")
def get():
return "ok"
self.finish_app_config()
assert "http://localhost/get/10/%E3%83%91%E3%82%B9/9" == app.url.get(10, u_("パス"), 9)
assert "http://localhost/get/10/str/9?query" == app.url.get(10, "str", 9, _query="query")
assert "https://localhost/get/10/str/9?query" == app.url.get(10, "str", 9, _query="query", _ssl=True)
self.init_app({"wsgi.url_scheme": "https"})
app = self.app
@app.get("get/(int:\d+)/(str:[^/]+)/(int:\d+)")
def get():
return "ok"
self.finish_app_config()
assert "https://localhost/get/10/str/9" == app.url.get(10, "str", 9)
assert "http://localhost/get/10/str/9?query" == app.url.get(10, "str", 9, _query="query", _ssl=False)
def test_handle_exception_with_debugging(self):
app = self.app
@app.get("get1")
def get1():
app.res.status_code = 500
raise Abort("ERROR", 500)
@app.get("get2")
def get2():
app.res.status_code = 500
raise Abort(lambda : "ERROR", 500)
@app.get("get3")
def get3():
app.res.status_code = 500
raise Abort(lambda : return_response(lambda : "ERROR"), 500)
@app.get("get4")
def get4():
assert False
@app.get("get5")
def get5():
foo
self.finish_app_config()
for i in range(1,4):
response = self.browser.get(self.url("get%d"%i), expect_errors=True)
assert response.status.startswith("500")
assert b"ERROR" in response.body
with pytest.raises(AssertionError):
self.browser.get(self.url("get4"))
response = self.browser.get(self.url("get5"), expect_errors=True)
assert response.status.startswith("500")
assert b"NameError: global name" in response.body
def test_handle_exception_with_no_debugging_and_error_handlers(self):
app = self.app
app.debug = False
@app.get("get1")
def get1():
foo
@app.error(500)
def error_500():
return "MY ERROR MESSAGE"
self.finish_app_config()
response = self.browser.get(self.url("get1"), expect_errors=True)
assert response.status.startswith("500")
assert b"MY ERROR MESSAGE" in response.body
def test_handle_exception_with_no_debugging_and_no_error_handlers(self):
app = self.app
app.debug = False
@app.get("get1")
def get1():
foo
self.finish_app_config()
response = self.browser.get(self.url("get1"), expect_errors=True)
assert response.status.startswith("500")
assert b"500 Internal Server Error" in response.body
def test_convert_content(self):
app = self.app
@app.get("get1")
def get1():
return BytesIO(b"")
@app.get("get2")
def get2():
return b"bytes"
@app.get("get3")
def get3():
return u_("ユニコード")
def wrapper(v):
return [b"wrapped"]
self.finish_app_config()
response = self.browser.get(self.url("get1"), extra_environ={"wsgi.file_wrapper":wrapper})
assert b"wrapped" in response.body
response = self.browser.get(self.url("get2"))
assert b"bytes" in response.body
response = self.browser.get(self.url("get3"))
assert u_("ユニコード").encode("utf8") in response.body
def test_javascript_url_builder(self):
app = self.app
@app.get("get1/(int:\d+)")
def get1(id):
pass
@app.get("get2/(int:\d+)/(str:\s+)")
def get2(id, name):
pass
self.finish_app_config()
patterns = itertools.permutations(['"get1": ["/get1/", ""]', '"get2": ["/get2/", "/", ""]', '"_dummy": ["/_dummy"]'])
assert any(["""if(typeof(rays) == 'undefined'){ window.rays={};}(function(){var patterns={%s}, host="localhost";window.rays.url=function(name, args, _options){
var options = _options || {}; var parts = patterns[name]; var path = "";
if(parts.length == 1) { path = parts.join(""); }else{ for(var i = 0, l = args.length; i < l; i++){ path = path + parts[i] + args[i]; } path = path + parts[parts.length-1];}
var protocol = "http"; if(options.ssl || (!options.ssl && location.protocol == "https:")){ protocol = "https"; }
var url = protocol+"://"+host+path; if(options.query) { url = url+"?"+options.query } return url;
};})();"""%(", ".join(v)) == app.generate_javascript_url_builder() for v in patterns])
| 27.501859
| 178
| 0.623209
|
4a175c9d84f948bdf80e128328bd6ab747af8e83
| 6,041
|
py
|
Python
|
chris_backend/servicefiles/tests/test_serializers.py
|
rudolphpienaar/ChRIS_ultron_backEnd
|
5de4e255fb151ac7a6f900327704831da11dcd1f
|
[
"MIT"
] | 26
|
2016-05-26T14:09:35.000Z
|
2022-01-28T19:12:43.000Z
|
chris_backend/servicefiles/tests/test_serializers.py
|
rudolphpienaar/ChRIS_ultron_backEnd
|
5de4e255fb151ac7a6f900327704831da11dcd1f
|
[
"MIT"
] | 168
|
2016-06-24T11:07:15.000Z
|
2022-03-21T12:33:43.000Z
|
chris_backend/servicefiles/tests/test_serializers.py
|
rudolphpienaar/ChRIS_ultron_backEnd
|
5de4e255fb151ac7a6f900327704831da11dcd1f
|
[
"MIT"
] | 45
|
2017-08-16T16:41:40.000Z
|
2022-03-31T18:12:14.000Z
|
import logging
import time
import io
from unittest import mock
from django.test import TestCase, tag
from django.conf import settings
from rest_framework import serializers
from servicefiles.models import Service, ServiceFile
from servicefiles.serializers import ServiceFileSerializer
from servicefiles.serializers import SwiftManager
class ServiceFileSerializerTests(TestCase):
def setUp(self):
# avoid cluttered console output (for instance logging all the http requests)
logging.disable(logging.WARNING)
def tearDown(self):
# re-enable logging
logging.disable(logging.NOTSET)
def test_validate_service_name_failure_registered_service(self):
"""
Test whether overriden validate_name method validates whether submitted
unregistered service name is actually the name of a registered service.
"""
servicefiles_serializer = ServiceFileSerializer()
with self.assertRaises(serializers.ValidationError):
servicefiles_serializer.validate_service_name('PACS')
def test_validate_service_name_success(self):
"""
Test whether overriden validate_name method successfully returns a valid
unregistered service name.
"""
Service.objects.get_or_create(identifier='NewService')
servicefiles_serializer = ServiceFileSerializer()
self.assertEqual(servicefiles_serializer.validate_service_name('MyService'),
'MyService')
self.assertEqual(servicefiles_serializer.validate_service_name('NewService'),
'NewService')
def test_validate_updates_validated_data(self):
"""
Test whether overriden validate method updates validated data with the descriptors
embedded in the path string.
"""
path = 'SERVICES/MyService/123456-crazy/brain_crazy_study/brain_crazy_mri/file1.dcm'
data = {'service_name': 'MyService', 'path': path}
servicefiles_serializer = ServiceFileSerializer()
with mock.patch.object(SwiftManager, 'obj_exists',
return_value=True) as obj_exists_mock:
new_data = servicefiles_serializer.validate(data)
self.assertIn('service', new_data)
self.assertNotIn('service_name', new_data)
self.assertEqual(new_data.get('path'), path.strip(' ').strip('/'))
obj_exists_mock.assert_called_with(new_data.get('path'))
def test_validate_failure_path_does_not_start_with_SERVICES_PACS(self):
"""
Test whether overriden validate method validates submitted path must start
with the 'SERVICES/<service_name>/' string.
"""
path = 'SERVICES/Other/123456-crazy/brain_crazy_study/brain_crazy_mri/file1.dcm'
data = {'service_name': 'MyService', 'path': path}
servicefiles_serializer = ServiceFileSerializer()
with self.assertRaises(serializers.ValidationError):
servicefiles_serializer.validate(data)
def test_validate_failure_path_does_not_exist(self):
"""
Test whether overriden validate method validates that submitted path exists
in internal storage.
"""
path = 'SERVICES/MyService/123456-crazy/brain_crazy_study/brain_crazy_mri/file1.dcm'
data = {'service_name': 'MyService', 'path': path}
servicefiles_serializer = ServiceFileSerializer()
with mock.patch.object(SwiftManager, 'obj_exists',
return_value=False) as obj_exists_mock:
with self.assertRaises(serializers.ValidationError):
servicefiles_serializer.validate(data)
obj_exists_mock.assert_called_with(path.strip(' ').strip('/'))
@tag('integration')
def test_integration_validate_path_failure_does_not_exist(self):
"""
Test whether overriden validate method validates that submitted path exists
in internal storage.
"""
path = 'SERVICES/MyService/123456-crazy/brain_crazy_study/brain_crazy_mri/file1.dcm'
data = {'service_name': 'MyService', 'path': path}
servicefiles_serializer = ServiceFileSerializer()
with self.assertRaises(serializers.ValidationError):
servicefiles_serializer.validate(data)
@tag('integration')
def test_integration_validate_path_success(self):
"""
Test whether overriden validate method validates submitted path.
"""
path = 'SERVICES/MyService/123456-crazy/brain_crazy_study/brain_crazy_mri/file1.dcm'
data = {'service_name': 'MyService', 'path': path}
servicefiles_serializer = ServiceFileSerializer()
swift_manager = SwiftManager(settings.SWIFT_CONTAINER_NAME,
settings.SWIFT_CONNECTION_PARAMS)
# upload file to Swift storage
with io.StringIO("test file") as file1:
swift_manager.upload_obj(path, file1.read(), content_type='text/plain')
for _ in range(20):
if swift_manager.obj_exists(path):
break
time.sleep(0.2)
self.assertEqual(servicefiles_serializer.validate(data).get('path'), path)
# delete file from Swift storage
swift_manager.delete_obj(path)
def test_validate_validates_path_has_not_already_been_registered(self):
"""
Test whether overriden validate method validates that the submitted path
has not been already registered.
"""
path = 'SERVICES/MyService/123456-crazy/brain_crazy_study/brain_crazy_mri/file1.dcm'
data = {'service_name': 'MyService', 'path': path}
servicefiles_serializer = ServiceFileSerializer()
service = Service(identifier='MyService')
service.save()
service_file = ServiceFile(service=service)
service_file.fname.name = path
service_file.save()
with self.assertRaises(serializers.ValidationError):
servicefiles_serializer.validate(data)
| 43.775362
| 92
| 0.68631
|
4a175cdb03794731b266b004d59d65f668ebdc9f
| 443
|
py
|
Python
|
config/api_router.py
|
devnelmar/Pokeindex-application
|
6bfefacabf201713151407d1a87cf2fce4220884
|
[
"MIT"
] | null | null | null |
config/api_router.py
|
devnelmar/Pokeindex-application
|
6bfefacabf201713151407d1a87cf2fce4220884
|
[
"MIT"
] | null | null | null |
config/api_router.py
|
devnelmar/Pokeindex-application
|
6bfefacabf201713151407d1a87cf2fce4220884
|
[
"MIT"
] | null | null | null |
from django.conf import settings
from rest_framework.routers import DefaultRouter, SimpleRouter
from pokeindexapi.users.api.views import UserViewSet
from pokeindexapi.apps.pokedex.api.views import PokemonViewSet
if settings.DEBUG:
router = DefaultRouter()
else:
router = SimpleRouter()
router.register("users", UserViewSet)
router.register("pokemon", PokemonViewSet, basename="pokemon")
app_name = "api"
urlpatterns = router.urls
| 24.611111
| 62
| 0.79684
|
4a175ddeb569be6778d0ec734f7c6b360b064e4c
| 7,929
|
py
|
Python
|
backend/testfigmafeb28app_d_23627/settings.py
|
crowdbotics-dev/testfigmafeb28app-d-23627
|
e0613f6d8e5907a6416a15461108463a17a75f8b
|
[
"FTL",
"AML",
"RSA-MD"
] | null | null | null |
backend/testfigmafeb28app_d_23627/settings.py
|
crowdbotics-dev/testfigmafeb28app-d-23627
|
e0613f6d8e5907a6416a15461108463a17a75f8b
|
[
"FTL",
"AML",
"RSA-MD"
] | null | null | null |
backend/testfigmafeb28app_d_23627/settings.py
|
crowdbotics-dev/testfigmafeb28app-d-23627
|
e0613f6d8e5907a6416a15461108463a17a75f8b
|
[
"FTL",
"AML",
"RSA-MD"
] | null | null | null |
"""
Django settings for testfigmafeb28app_d_23627 project.
Generated by 'django-admin startproject' using Django 2.2.2.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
import io
import environ
import logging
import google.auth
from google.cloud import secretmanager
from google.auth.exceptions import DefaultCredentialsError
from google.api_core.exceptions import PermissionDenied
from modules.manifest import get_modules
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
env_file = os.path.join(BASE_DIR, ".env")
env = environ.Env()
env.read_env(env_file)
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = env.bool("DEBUG", default=False)
try:
# Pull secrets from Secret Manager
_, project = google.auth.default()
client = secretmanager.SecretManagerServiceClient()
settings_name = os.environ.get("SETTINGS_NAME", "django_settings")
name = client.secret_version_path(project, settings_name, "latest")
payload = client.access_secret_version(name=name).payload.data.decode("UTF-8")
env.read_env(io.StringIO(payload))
except (DefaultCredentialsError, PermissionDenied):
pass
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = env.str("SECRET_KEY")
ALLOWED_HOSTS = env.list("HOST", default=["*"])
SITE_ID = 1
SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https")
SECURE_SSL_REDIRECT = env.bool("SECURE_REDIRECT", default=False)
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.sites'
]
LOCAL_APPS = [
'home',
'users.apps.UsersConfig',
]
THIRD_PARTY_APPS = [
'rest_framework',
'rest_framework.authtoken',
'rest_auth',
'rest_auth.registration',
'bootstrap4',
'allauth',
'allauth.account',
'allauth.socialaccount',
'allauth.socialaccount.providers.google',
'django_extensions',
'drf_yasg',
'storages',
]
MODULES_APPS = get_modules()
INSTALLED_APPS += LOCAL_APPS + THIRD_PARTY_APPS + MODULES_APPS
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'testfigmafeb28app_d_23627.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'web_build')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'testfigmafeb28app_d_23627.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
if env.str("DATABASE_URL", default=None):
DATABASES = {
'default': env.db()
}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = '/static/'
MIDDLEWARE += ['whitenoise.middleware.WhiteNoiseMiddleware']
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
'allauth.account.auth_backends.AuthenticationBackend'
)
STATIC_ROOT = os.path.join(BASE_DIR, "staticfiles")
STATICFILES_DIRS = [os.path.join(BASE_DIR, 'static'), os.path.join(BASE_DIR, 'web_build/static')]
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
# allauth / users
ACCOUNT_EMAIL_REQUIRED = True
ACCOUNT_AUTHENTICATION_METHOD = 'email'
ACCOUNT_USERNAME_REQUIRED = False
ACCOUNT_EMAIL_VERIFICATION = "optional"
ACCOUNT_CONFIRM_EMAIL_ON_GET = True
ACCOUNT_LOGIN_ON_EMAIL_CONFIRMATION = True
ACCOUNT_UNIQUE_EMAIL = True
LOGIN_REDIRECT_URL = "users:redirect"
ACCOUNT_ADAPTER = "users.adapters.AccountAdapter"
SOCIALACCOUNT_ADAPTER = "users.adapters.SocialAccountAdapter"
ACCOUNT_ALLOW_REGISTRATION = env.bool("ACCOUNT_ALLOW_REGISTRATION", True)
SOCIALACCOUNT_ALLOW_REGISTRATION = env.bool("SOCIALACCOUNT_ALLOW_REGISTRATION", True)
REST_AUTH_SERIALIZERS = {
# Replace password reset serializer to fix 500 error
"PASSWORD_RESET_SERIALIZER": "home.api.v1.serializers.PasswordSerializer",
}
REST_AUTH_REGISTER_SERIALIZERS = {
# Use custom serializer that has no username and matches web signup
"REGISTER_SERIALIZER": "home.api.v1.serializers.SignupSerializer",
}
# Custom user model
AUTH_USER_MODEL = "users.User"
EMAIL_HOST = env.str("EMAIL_HOST", "smtp.sendgrid.net")
EMAIL_HOST_USER = env.str("SENDGRID_USERNAME", "")
EMAIL_HOST_PASSWORD = env.str("SENDGRID_PASSWORD", "")
EMAIL_PORT = 587
EMAIL_USE_TLS = True
# AWS S3 config
AWS_ACCESS_KEY_ID = env.str("AWS_ACCESS_KEY_ID", "")
AWS_SECRET_ACCESS_KEY = env.str("AWS_SECRET_ACCESS_KEY", "")
AWS_STORAGE_BUCKET_NAME = env.str("AWS_STORAGE_BUCKET_NAME", "")
AWS_STORAGE_REGION = env.str("AWS_STORAGE_REGION", "")
USE_S3 = (
AWS_ACCESS_KEY_ID and
AWS_SECRET_ACCESS_KEY and
AWS_STORAGE_BUCKET_NAME and
AWS_STORAGE_REGION
)
if USE_S3:
AWS_S3_CUSTOM_DOMAIN = env.str("AWS_S3_CUSTOM_DOMAIN", "")
AWS_S3_OBJECT_PARAMETERS = {"CacheControl": "max-age=86400"}
AWS_DEFAULT_ACL = env.str("AWS_DEFAULT_ACL", "public-read")
AWS_MEDIA_LOCATION = env.str("AWS_MEDIA_LOCATION", "media")
AWS_AUTO_CREATE_BUCKET = env.bool("AWS_AUTO_CREATE_BUCKET", True)
DEFAULT_FILE_STORAGE = env.str(
"DEFAULT_FILE_STORAGE", "home.storage_backends.MediaStorage"
)
MEDIA_URL = '/mediafiles/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'mediafiles')
# Swagger settings for api docs
SWAGGER_SETTINGS = {
"DEFAULT_INFO": f"{ROOT_URLCONF}.api_info",
}
if DEBUG or not (EMAIL_HOST_USER and EMAIL_HOST_PASSWORD):
# output email to console instead of sending
if not DEBUG:
logging.warning("You should setup `SENDGRID_USERNAME` and `SENDGRID_PASSWORD` env vars to send emails.")
EMAIL_BACKEND = "django.core.mail.backends.console.EmailBackend"
# GCP config
GS_BUCKET_NAME = env.str("GS_BUCKET_NAME", "")
if GS_BUCKET_NAME:
DEFAULT_FILE_STORAGE = "storages.backends.gcloud.GoogleCloudStorage"
STATICFILES_STORAGE = "storages.backends.gcloud.GoogleCloudStorage"
GS_DEFAULT_ACL = "publicRead"
| 30.496154
| 112
| 0.737798
|
4a175e1d994e868880641c77c6f7503cb0c62c88
| 1,333
|
py
|
Python
|
Software/Estadística/MCMC/HS/Cosas_viejas/analsis_cadenas_2params.py
|
matiasleize/tesis_licenciatura
|
5df6e341314583702b466b8ed7977d410f0ee457
|
[
"MIT"
] | null | null | null |
Software/Estadística/MCMC/HS/Cosas_viejas/analsis_cadenas_2params.py
|
matiasleize/tesis_licenciatura
|
5df6e341314583702b466b8ed7977d410f0ee457
|
[
"MIT"
] | null | null | null |
Software/Estadística/MCMC/HS/Cosas_viejas/analsis_cadenas_2params.py
|
matiasleize/tesis_licenciatura
|
5df6e341314583702b466b8ed7977d410f0ee457
|
[
"MIT"
] | null | null | null |
import numpy as np
import emcee
from matplotlib import pyplot as plt
import corner
import sys
import os
import time
from pc_path import definir_path
path_git, path_datos_global = definir_path()
os.chdir(path_git)
sys.path.append('./Software/Funcionales/')
from funciones_analisis_cadenas import graficar_cadenas,graficar_contornos,graficar_taus_vs_n
#%%
os.chdir(path_git+'/Software/Estadística/Resultados_simulaciones/')
with np.load('valores_medios_cronom_2params.npz') as data:
sol = data['sol']
#%%
os.chdir(path_datos_global+'/Resultados_cadenas/')
#filename = 'sample_supernovas_M_b_101.h5'
filename = 'sample_cron_omega_b_1.h5'
reader = emcee.backends.HDFBackend(filename)
# Algunos valores
tau = reader.get_autocorr_time()
burnin = int(2 * np.max(tau))
thin = int(0.5 * np.min(tau))
samples = reader.get_chain(discard=burnin, flat=True, thin=thin)
print(tau)
#%%
%matplotlib qt5
graficar_cadenas(reader,
#labels = ['M_abs','b'])
labels = ['omega_m','b'])
#%%
burnin=300
#burnin = int(2 * np.max(tau))
#thin = int(0.5 * np.min(tau))
graficar_contornos(reader,params_truths=sol,discard=burnin,#thin=thin
#labels= ['M_abs','b'])
labels = ['omega_m','b'])
#%%
plt.figure()
graficar_taus_vs_n(reader,num_param=0)
graficar_taus_vs_n(reader,num_param=1)
| 28.978261
| 93
| 0.72018
|
4a175f1110de4fa3d778d2490f0083bc29077bc0
| 69
|
py
|
Python
|
pylinear/grism/instruments/__init__.py
|
Russell-Ryan/pyLINEAR
|
d68e44bc64d302b816db69d2becc4de3b15059f9
|
[
"MIT"
] | 2
|
2019-08-07T19:57:04.000Z
|
2021-01-21T22:54:13.000Z
|
pylinear/grism/instruments/__init__.py
|
Russell-Ryan/pyLINEAR
|
d68e44bc64d302b816db69d2becc4de3b15059f9
|
[
"MIT"
] | 1
|
2019-10-02T03:18:26.000Z
|
2019-10-02T03:18:26.000Z
|
pylinear/grism/instruments/__init__.py
|
Russell-Ryan/pyLINEAR
|
d68e44bc64d302b816db69d2becc4de3b15059f9
|
[
"MIT"
] | 5
|
2019-09-03T17:01:10.000Z
|
2020-08-05T17:49:42.000Z
|
from .config import Config
from .load_detector import load_detector
| 17.25
| 40
| 0.84058
|
4a175f2418bce11f806bce5cee352edd765aea0c
| 3,558
|
py
|
Python
|
PSO.py
|
Sheeran-Tsingtao/Optimization-Algorithm
|
ef1ed3d41c7bc130d798673dbc3e67a5e3f99686
|
[
"MIT"
] | 1
|
2021-06-15T03:16:00.000Z
|
2021-06-15T03:16:00.000Z
|
PSO.py
|
Sheeran-Tsingtao/Optimization-Algorithm
|
ef1ed3d41c7bc130d798673dbc3e67a5e3f99686
|
[
"MIT"
] | null | null | null |
PSO.py
|
Sheeran-Tsingtao/Optimization-Algorithm
|
ef1ed3d41c7bc130d798673dbc3e67a5e3f99686
|
[
"MIT"
] | null | null | null |
import math
import random
import numpy as np
import matplotlib.pyplot as plt
import pylab as mpl
flag = 1;
class PSO:
def __init__(self, dimension, time, size, low, up, v_low, v_high):
self.dimension = dimension
self.time = time
self.size = size
self.bound = []
self.bound.append(low)
self.bound.append(up)
self.v_low = v_low
self.v_high = v_high
self.x = np.zeros((self.size, self.dimension))
self.v = np.zeros((self.size, self.dimension))
self.p_best = np.zeros((self.size, self.dimension))
self.g_best = np.zeros((1, self.dimension))[0]
temp = -1000000
for i in range(self.size):
for j in range(self.dimension):
self.x[i][j] = random.uniform(self.bound[0][j], self.bound[1][j])
self.v[i][j] = random.uniform(self.v_low, self.v_high)
self.p_best[i] = self.x[i]
fit = self.fitness(self.p_best[i])
if fit > temp:
self.g_best = self.p_best[i]
temp = fit
def fitness(self, x):
x1 = x[0]
x2 = x[1]
x3 = x[2]
if flag == 1:
y = -(2*x1**2 - 3*x2**2 - 4*x1 + 5*x2 + x3)
if flag == 0:
y = 2*x1**2 - 3*x2**2 - 4*x1 + 5*x2 + x3
# print(y)
return y
def update(self, size):
c1 = 2.0
c2 = 2.0
w = 0.8
for i in range(size):
self.v[i] = w * self.v[i] + c1 * random.uniform(0, 1) * (
self.p_best[i] - self.x[i]) + c2 * random.uniform(0, 1) * (self.g_best - self.x[i])
for j in range(self.dimension):
if self.v[i][j] < self.v_low:
self.v[i][j] = self.v_low
if self.v[i][j] > self.v_high:
self.v[i][j] = self.v_high
self.x[i] = self.x[i] + self.v[i]
for j in range(self.dimension):
if self.x[i][j] < self.bound[0][j]:
self.x[i][j] = self.bound[0][j]
if self.x[i][j] > self.bound[1][j]:
self.x[i][j] = self.bound[1][j]
if self.fitness(self.x[i]) > self.fitness(self.p_best[i]):
self.p_best[i] = self.x[i]
if self.fitness(self.x[i]) > self.fitness(self.g_best):
self.g_best = self.x[i]
def pso(self):
best = []
self.final_best = np.array([1, 2, 3])
for gen in range(self.time):
self.update(self.size)
if self.fitness(self.g_best) > self.fitness(self.final_best):
self.final_best = self.g_best.copy()
print('the temp best position:{}'.format(self.final_best))
temp = self.fitness(self.final_best)
print('the temp best fitness:{}'.format(temp))
best.append(temp)
t = [i for i in range(self.time)]
if flag == 1:
for i,k in enumerate(best):
best[i]=-best[i]
plt.figure()
plt.plot(t, best, color='blue', marker=".")
plt.margins(0)
plt.xlabel(u"iteration")
plt.ylabel(u"fitneess")
plt.title(u"PSO")
plt.savefig('pso1.jpg')
if __name__ == '__main__':
time = 20
size = 100
dimension = 3
v_low = -1
v_high = 1
low = [0,0,0]
up = [15,15,15]
pso = PSO(dimension, time, size, low, up, v_low, v_high)
pso.pso()
| 31.767857
| 103
| 0.479202
|
4a176073201fe5dfe15f863c708fb12cd5c3de33
| 8,499
|
py
|
Python
|
monai/visualize/img2tensorboard.py
|
albarqounilab/MONAI
|
bb0b307d68021a243011a58fd82a1d275f00a51a
|
[
"Apache-2.0"
] | 1
|
2021-08-02T07:18:50.000Z
|
2021-08-02T07:18:50.000Z
|
monai/visualize/img2tensorboard.py
|
albarqounilab/MONAI
|
bb0b307d68021a243011a58fd82a1d275f00a51a
|
[
"Apache-2.0"
] | null | null | null |
monai/visualize/img2tensorboard.py
|
albarqounilab/MONAI
|
bb0b307d68021a243011a58fd82a1d275f00a51a
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2020 - 2021 MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import TYPE_CHECKING, Dict, List, Optional, Sequence, Union
import numpy as np
import torch
from monai.config import NdarrayTensor
from monai.transforms import rescale_array
from monai.utils import optional_import
PIL, _ = optional_import("PIL")
GifImage, _ = optional_import("PIL.GifImagePlugin", name="Image")
if TYPE_CHECKING:
from tensorboard.compat.proto.summary_pb2 import Summary
from torch.utils.tensorboard import SummaryWriter
else:
Summary, _ = optional_import("tensorboard.compat.proto.summary_pb2", name="Summary")
SummaryWriter, _ = optional_import("torch.utils.tensorboard", name="SummaryWriter")
__all__ = ["make_animated_gif_summary", "add_animated_gif", "add_animated_gif_no_channels", "plot_2d_or_3d_image"]
def _image3_animated_gif(tag: str, image: Union[np.ndarray, torch.Tensor], scale_factor: float = 1.0) -> Summary:
"""Function to actually create the animated gif.
Args:
tag: Data identifier
image: 3D image tensors expected to be in `HWD` format
scale_factor: amount to multiply values by. if the image data is between 0 and 1, using 255 for this value will
scale it to displayable range
"""
if len(image.shape) != 3:
raise AssertionError("3D image tensors expected to be in `HWD` format, len(image.shape) != 3")
ims = [(np.asarray((image[:, :, i])) * scale_factor).astype(np.uint8) for i in range(image.shape[2])]
ims = [GifImage.fromarray(im) for im in ims]
img_str = b""
for b_data in PIL.GifImagePlugin.getheader(ims[0])[0]:
img_str += b_data
img_str += b"\x21\xFF\x0B\x4E\x45\x54\x53\x43\x41\x50" b"\x45\x32\x2E\x30\x03\x01\x00\x00\x00"
for i in ims:
for b_data in PIL.GifImagePlugin.getdata(i):
img_str += b_data
img_str += b"\x3B"
summary_image_str = Summary.Image(height=10, width=10, colorspace=1, encoded_image_string=img_str)
image_summary = Summary.Value(tag=tag, image=summary_image_str)
return Summary(value=[image_summary])
def make_animated_gif_summary(
tag: str,
image: Union[np.ndarray, torch.Tensor],
max_out: int = 3,
animation_axes: Sequence[int] = (3,),
image_axes: Sequence[int] = (1, 2),
other_indices: Optional[Dict] = None,
scale_factor: float = 1.0,
) -> Summary:
"""Creates an animated gif out of an image tensor in 'CHWD' format and returns Summary.
Args:
tag: Data identifier
image: The image, expected to be in CHWD format
max_out: maximum number of slices to animate through
animation_axes: axis to animate on (not currently used)
image_axes: axes of image (not currently used)
other_indices: (not currently used)
scale_factor: amount to multiply values by.
if the image data is between 0 and 1, using 255 for this value will scale it to displayable range
"""
suffix = "/image" if max_out == 1 else "/image/{}"
if other_indices is None:
other_indices = {}
axis_order = [0] + list(animation_axes) + list(image_axes)
slicing = []
for i in range(len(image.shape)):
if i in axis_order:
slicing.append(slice(None))
else:
other_ind = other_indices.get(i, 0)
slicing.append(slice(other_ind, other_ind + 1))
image = image[tuple(slicing)]
for it_i in range(min(max_out, list(image.shape)[0])):
one_channel_img: Union[torch.Tensor, np.ndarray] = (
image[it_i, :, :, :].squeeze(dim=0) if isinstance(image, torch.Tensor) else image[it_i, :, :, :]
)
summary_op = _image3_animated_gif(tag + suffix.format(it_i), one_channel_img, scale_factor)
return summary_op
def add_animated_gif(
writer: SummaryWriter,
tag: str,
image_tensor: Union[np.ndarray, torch.Tensor],
max_out: int,
scale_factor: float,
global_step: Optional[int] = None,
) -> None:
"""Creates an animated gif out of an image tensor in 'CHWD' format and writes it with SummaryWriter.
Args:
writer: Tensorboard SummaryWriter to write to
tag: Data identifier
image_tensor: tensor for the image to add, expected to be in CHWD format
max_out: maximum number of slices to animate through
scale_factor: amount to multiply values by. If the image data is between 0 and 1, using 255 for this value will
scale it to displayable range
global_step: Global step value to record
"""
writer._get_file_writer().add_summary(
make_animated_gif_summary(
tag, image_tensor, max_out=max_out, animation_axes=[1], image_axes=[2, 3], scale_factor=scale_factor
),
global_step,
)
def add_animated_gif_no_channels(
writer: SummaryWriter,
tag: str,
image_tensor: Union[np.ndarray, torch.Tensor],
max_out: int,
scale_factor: float,
global_step: Optional[int] = None,
) -> None:
"""Creates an animated gif out of an image tensor in 'HWD' format that does not have
a channel dimension and writes it with SummaryWriter. This is similar to the "add_animated_gif"
after inserting a channel dimension of 1.
Args:
writer: Tensorboard SummaryWriter to write to
tag: Data identifier
image_tensor: tensor for the image to add, expected to be in HWD format
max_out: maximum number of slices to animate through
scale_factor: amount to multiply values by. If the image data is between 0 and 1,
using 255 for this value will scale it to displayable range
global_step: Global step value to record
"""
writer._get_file_writer().add_summary(
make_animated_gif_summary(
tag, image_tensor, max_out=max_out, animation_axes=[1], image_axes=[1, 2], scale_factor=scale_factor
),
global_step,
)
def plot_2d_or_3d_image(
data: Union[NdarrayTensor, List[NdarrayTensor]],
step: int,
writer: SummaryWriter,
index: int = 0,
max_channels: int = 1,
max_frames: int = 64,
tag: str = "output",
) -> None:
"""Plot 2D or 3D image on the TensorBoard, 3D image will be converted to GIF image.
Note:
Plot 3D or 2D image(with more than 3 channels) as separate images.
Args:
data: target data to be plotted as image on the TensorBoard.
The data is expected to have 'NCHW[D]' dimensions or a list of data with `CHW[D]` dimensions,
and only plot the first in the batch.
step: current step to plot in a chart.
writer: specify TensorBoard SummaryWriter to plot the image.
index: plot which element in the input data batch, default is the first element.
max_channels: number of channels to plot.
max_frames: number of frames for 2D-t plot.
tag: tag of the plotted image on TensorBoard.
"""
data_index = data[index]
d: np.ndarray = data_index.detach().cpu().numpy() if isinstance(data_index, torch.Tensor) else data_index
if d.ndim == 2:
d = rescale_array(d, 0, 1)
dataformats = "HW"
writer.add_image(f"{tag}_{dataformats}", d, step, dataformats=dataformats)
return
if d.ndim == 3:
if d.shape[0] == 3 and max_channels == 3: # RGB
dataformats = "CHW"
writer.add_image(f"{tag}_{dataformats}", d, step, dataformats=dataformats)
return
dataformats = "HW"
for j, d2 in enumerate(d[:max_channels]):
d2 = rescale_array(d2, 0, 1)
writer.add_image(f"{tag}_{dataformats}_{j}", d2, step, dataformats=dataformats)
return
if d.ndim >= 4:
spatial = d.shape[-3:]
for j, d3 in enumerate(d.reshape([-1] + list(spatial))[:max_channels]):
d3 = rescale_array(d3, 0, 255)
add_animated_gif(writer, f"{tag}_HWD_{j}", d3[None], max_frames, 1.0, step)
return
| 39.901408
| 119
| 0.668785
|
4a1760bcf6bbea372e0fd62dad17a2566686db7d
| 8,641
|
py
|
Python
|
NewsMLG2/partmeta.py
|
iptc/python-newsmlg2
|
5914c9db36d64674586e63879d476fdd2ca1c07f
|
[
"MIT"
] | null | null | null |
NewsMLG2/partmeta.py
|
iptc/python-newsmlg2
|
5914c9db36d64674586e63879d476fdd2ca1c07f
|
[
"MIT"
] | null | null | null |
NewsMLG2/partmeta.py
|
iptc/python-newsmlg2
|
5914c9db36d64674586e63879d476fdd2ca1c07f
|
[
"MIT"
] | null | null | null |
"""
partMeta support
"""
from .attributegroups import CommonPowerAttributes, I18NAttributes
from .concepts import QualPropType
from .contentmeta import (
AdministrativeMetadataGroup, DescriptiveMetadataGroup, Icon
)
from .extensionproperties import Flex2ExtPropType
from .itemmanagement import EdNote, Signal
from .link import Link
class TimeDelim(CommonPowerAttributes):
"""
A delimiter for a piece of streaming media content, expressed in various
time formats
"""
attributes = {
# The start time of the part in a timeline. The expressed time unit is
# excluded. Using the Edit Unit requires the frame rate or sampling rate
# to be known, this must be defined by the referenced rendition of the
# content.
'start': {
'xml_name': 'start',
'xml_type': 'xs:string',
'use': 'required'
},
# The end time of the part in a timeline. The expressed time unit is
# included. Using the Edit Unit requires the frame rate or sampling rate
# to be known, this must be defined by the referenced rendition of the
# content.
'end': {
'xml_name': 'end',
'xml_type': 'xs:string',
'use': 'required'
},
# The unit used for the start and end timestamps - expressed by a QCode
# either the timeunit or the timeunituri attribute MUST be used
'timeunit': {
'xml_name': 'timeunit',
'xml_type': 'QCodeType'
},
# The unit used for the start and end timestamps - expressed by a URI
# either the timeunit or the timeunituri attribute MUST be used
'timeunituri': {
'xml_name': 'timeunituri',
'xml_type': 'IRIType'
},
# Refers to the content rendition with this QCode as rendition attribute
# value - expressed by a QCode
'renditionref': {
'xml_name': 'renditionref',
'xml_type': 'QCodeType'
},
# Refers to the content rendition with this QCode as rendition attribute
# value - expressed by a URI
'renditionrefuri': {
'xml_name': 'renditionrefuri',
'xml_type': 'IRIType'
}
}
class RegionDelim(CommonPowerAttributes):
"""
A delimiter for a rectangular region in a piece of visual content
"""
attributes = {
# The x-axis coordinate of the side of the rectangle which has the
# smaller x-axis coordinate value in the current user coordinate system
'x': {
'xml_name': 'x',
'xml_type': 'xs:integer'
},
# The y-axis coordinate of the side of the rectangle which has the
# smaller y-axis coordinate value in the current user coordinate system
'y': {
'xml_name': 'y',
'xml_type': 'xs:integer'
},
# The width of the rectangle</xs:documentation>
'width': {
'xml_name': 'width',
'xml_type': 'xs:integer'
},
# The height of the rectangle</xs:documentation>
'height': {
'xml_name': 'height',
'xml_type': 'xs:nonNegativeInteger'
}
}
class PartMetaRole(QualPropType):
"""
The role [of this part] in the overall content stream.
"""
class PartMetaExtProperty(Flex2ExtPropType):
"""
Extension Property; the semantics are defined by the concept referenced by
the rel attribute. The semantics of the Extension Property must have the
same scope as the parent property.
"""
class PartMetaPropType(I18NAttributes):
"""
A type representing the structure of a partMeta property
"""
elements = [
('icon', {
'type': 'array', 'xml_name': 'icon', 'element_class': Icon
}),
('timedelim', {
'type': 'array', 'xml_name': 'timeDelim',
'element_class': TimeDelim
}),
('regiondelim', {
'type': 'single', 'xml_name': 'regionDelim',
'element_class': RegionDelim
}),
('role', {
'type': 'single', 'xml_name': 'regionDelim',
'element_class': RegionDelim
})
] + AdministrativeMetadataGroup + DescriptiveMetadataGroup + [
('partmetaextproperty', {
'type': 'array', 'xml_name': 'partMetaExtProperty',
'element_class': PartMetaExtProperty
}),
('signal', {
'type': 'array', 'xml_name': 'signal', 'element_class': Signal
}),
('ednote', {
'type': 'array', 'xml_name': 'edNote', 'element_class': EdNote
}),
('link', {
'type': 'array', 'xml_name': 'link', 'element_class': Link
})
]
attributes = {
# The identifier of the part
'partid': {
'xml_name': 'partid',
'xml_type': 'xs:ID',
'use': 'optional'
},
# If the attribute is empty, specifies which entity (person,
# organisation or system) will edit the property - expressed by a QCode.
# If the attribute is non-empty, specifies which entity (person,
# organisation or system) has edited the property.
'creator': {
'xml_name': 'creator',
'xml_type': 'QCodeType',
'use': 'optional'
},
# If the attribute is empty, specifies which entity (person,
# organisation or system) will edit the property - expressed by a URI.
# If the attribute is non-empty, specifies which entity (person,
# organisation or system) has edited the property.
'creatoruri': {
'xml_name': 'creatoruri',
'xml_type': 'IRIType',
'use': 'optional'
},
# The date (and, optionally, the time) when the property was last
# modified. The initial value is the date (and, optionally, the time) of
# creation of the property.
'modified': {
'xml_name': 'modified',
'xml_type': 'DateOptTimeType',
'use': 'optional'
},
# If set to true the corresponding property was added to the G2 Item for
# a specific customer or group of customers only. The default value of
# this property is false which applies when this attribute is not used
# with the property.
'custom': {
'xml_name': 'custom',
'xml_type': 'xs:boolean',
'use': 'optional'
},
# Indicates by which means the value was extracted from the content -
# expressed by a QCode
'how': {
'xml_name': 'how',
'xml_type': 'QCodeType',
'use': 'optional'
},
# Indicates by which means the value was extracted from the content -
# expressed by a URI
'howuri': {
'xml_name': 'howuri',
'xml_type': 'IRIType',
'use': 'optional'
},
# Why the metadata has been included - expressed by a QCode
'why': {
'xml_name': 'why',
'xml_type': 'QCodeType',
'use': 'optional'
},
# Why the metadata has been included - expressed by a URI
'whyuri': {
'xml_name': 'whyuri',
'xml_type': 'IRIType',
'use': 'optional'
},
# The sequence number of the part
'seq': {
'xml_name': 'seq',
'xml_type': 'xs:nonNegativeInteger',
'use': 'optional'
},
# A list of identifiers of XML elements containing content which is
# described by this partMeta structure.
'contentrefs': {
'xml_name': 'contentrefs',
'xml_type': 'xs:IDREFS',
'use': 'optional'
}
}
class PartMeta(PartMetaPropType):
"""
A set of properties describing a specific part of the content of the Item.
The relationship of properties inside this partMeta and properties at a
higher hierarchical level of the content parts structure is:
- the semantic assertion of all properties at a higher level is inherited by
this partMeta element as if these properities would be its children
- a child property of a specific name wipes out for this partMeta element
any semantic assertions of properties of the same name at higher levels
- in this latter case: if the semantic assertion of a property at a higher
level should be reinstated for this part of the content then this property
has to appear again as child of this partMeta
"""
| 35.854772
| 80
| 0.576206
|
4a1760c9bc0512dee43f6491b5e47ea5c805895e
| 1,788
|
py
|
Python
|
papermerge/core/views/decorators.py
|
w-michal/papermerge
|
14703c3316deea06696da041b7adc4bd0b15270b
|
[
"Apache-2.0"
] | null | null | null |
papermerge/core/views/decorators.py
|
w-michal/papermerge
|
14703c3316deea06696da041b7adc4bd0b15270b
|
[
"Apache-2.0"
] | 1
|
2021-02-12T02:28:00.000Z
|
2021-02-24T04:08:34.000Z
|
papermerge/core/views/decorators.py
|
w-michal/papermerge
|
14703c3316deea06696da041b7adc4bd0b15270b
|
[
"Apache-2.0"
] | 2
|
2021-02-11T23:10:29.000Z
|
2021-02-13T09:06:49.000Z
|
import json
from django.http import (
HttpResponse,
HttpResponseRedirect
)
def smart_dump(value):
if isinstance(value, str):
return json.dumps({'msg': value})
if isinstance(value, dict):
return json.dumps(value)
return ""
def json_response(func):
"""
Decorates view to return application/json type response.
Argument function func is expected to return one of:
1. A string
in this case, body will be a json.dump({
'msg': returned_str
})
and status code will be 200
2. A dictionary
same as above, but respone will dump directly dictionary
json.dumps(returned_dict)
and status code will be 200
3. Two valued tuple
First value of the tuple must be either a string
or a dictionary. In this case above points 1 and 2 apply
Second value of the tuple is status code (as intiger number)
"""
def inner(*args, **kwargs):
ret = func(*args, **kwargs)
status = 200
body = ""
if isinstance(ret, str) or isinstance(ret, dict):
body = smart_dump(ret)
elif isinstance(ret, tuple):
for_body = ret[0]
status = ret[1]
body = smart_dump(for_body)
elif isinstance(ret, HttpResponseRedirect):
# in case anonymous user access this view - return
# the HttpResponseRedirect object
return ret
else:
raise ValueError(
"Function must return str, dict or 2 valued tuple"
)
return HttpResponse(
body,
content_type="application/json",
status=status
)
return inner
| 27.090909
| 72
| 0.568233
|
4a17610b1b7c41a2b1b76fc2c5ece1989c52fc4c
| 845
|
py
|
Python
|
polyaxon/signals/users.py
|
elyase/polyaxon
|
1c19f059a010a6889e2b7ea340715b2bcfa382a0
|
[
"MIT"
] | null | null | null |
polyaxon/signals/users.py
|
elyase/polyaxon
|
1c19f059a010a6889e2b7ea340715b2bcfa382a0
|
[
"MIT"
] | null | null | null |
polyaxon/signals/users.py
|
elyase/polyaxon
|
1c19f059a010a6889e2b7ea340715b2bcfa382a0
|
[
"MIT"
] | null | null | null |
from hestia.decorators import ignore_raw
from rest_framework.authtoken.models import Token
from django.conf import settings
from django.db.models.signals import post_save
from django.dispatch import Signal, receiver
import auditor
from event_manager.events.user import USER_REGISTERED, USER_UPDATED
@receiver(post_save, sender=settings.AUTH_USER_MODEL)
@ignore_raw
def create_auth_token(sender, instance=None, created=False, **kwargs):
if created:
Token.objects.create(user=instance)
auditor.record(event_type=USER_REGISTERED, instance=instance)
else:
auditor.record(event_type=USER_UPDATED, instance=instance)
# A new user has registered.
user_registered = Signal(providing_args=["user", "request"])
# A user has activated his or her account.
user_activated = Signal(providing_args=["user", "request"])
| 29.137931
| 70
| 0.784615
|
4a1761681300c5e731809b32a97060891cc15a7f
| 967
|
py
|
Python
|
zeus/modules/__init__.py
|
georgefang/vega
|
977054e12dd3bc1c96bbe35f18d5db4bc82d0522
|
[
"MIT"
] | null | null | null |
zeus/modules/__init__.py
|
georgefang/vega
|
977054e12dd3bc1c96bbe35f18d5db4bc82d0522
|
[
"MIT"
] | null | null | null |
zeus/modules/__init__.py
|
georgefang/vega
|
977054e12dd3bc1c96bbe35f18d5db4bc82d0522
|
[
"MIT"
] | null | null | null |
# -*- coding:utf-8 -*-
# Copyright (C) 2020. Huawei Technologies Co., Ltd. All rights reserved.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the MIT License.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# MIT License for more details.
"""Import and register modules automatically."""
from zeus.common.class_factory import ClassFactory
ClassFactory.lazy_register("zeus.modules", {
"module": ["network:Module"],
})
def register_modules():
"""Import and register modules automatically."""
from . import blocks
from . import cells
from . import connections
from . import operators
from . import preprocess
from . import loss
from . import getters
from . import necks
from . import backbones
from . import distillation
| 30.21875
| 72
| 0.722854
|
4a17618bfcf356a86372b26e66cb71bde18a62e6
| 1,113
|
py
|
Python
|
drones/filter.py
|
codacy-badger/01
|
edf4deb6de72533f784d2411d0bf10bcd5a68e74
|
[
"MIT"
] | null | null | null |
drones/filter.py
|
codacy-badger/01
|
edf4deb6de72533f784d2411d0bf10bcd5a68e74
|
[
"MIT"
] | null | null | null |
drones/filter.py
|
codacy-badger/01
|
edf4deb6de72533f784d2411d0bf10bcd5a68e74
|
[
"MIT"
] | null | null | null |
from rest_framework import filters
from django_filters import AllValuesFilter, DateTimeFilter, NumberFilter
from drones.models import Competition
class CompetitionFilter(filters.FilterSet):
from_achievement_date = DateTimeFilter(name='distance_achievement_date', lookup_expr='gte')
to_achievement_date = DateTimeFilter(name='distance_achievement_date', lookup_expr='lte')
min_distance_in_feet = NumberFilter(name='distance_in_feet', lookup_expr='gte')
max_distance_in_feet = NumberFilter(name='distance_in_feet', lookup_expr='lte')
drone_name = AllValuesFilter(name='drone__name')
pilot_name = AllValuesFilter(name='pilot__name')
class Meta:
model = Competition
fields = (
'distance_in_feet',
'from_achievement_date',
'to_achievement_date',
'min_distance_in_feet',
'max_distance_in_feet',
# drone__name will be accessed as drone_name
'drone_name',
# pilot__name will be accessed as pilot_name
'pilot_name',
)
| 46.375
| 95
| 0.677448
|
4a1761bacf9cda6a828863d7ba7a2b9ff0c6f26a
| 49
|
py
|
Python
|
ShopOnline/payment/__init__.py
|
tupakisyao/ShopOnline-
|
4a6d10f7600b6d085903913216701d67464485e3
|
[
"MIT"
] | 3
|
2017-04-25T10:19:02.000Z
|
2017-06-07T12:50:30.000Z
|
online-shop/myshop/payment/__init__.py
|
EssaAlshammri/django-by-example
|
d1a1cba9308d4f19bbb1228dbd191ad5540b2c78
|
[
"MIT"
] | 12
|
2019-10-02T17:18:09.000Z
|
2022-03-11T23:54:53.000Z
|
online-shop/myshop/payment/__init__.py
|
EssaAlshammri/django-by-example
|
d1a1cba9308d4f19bbb1228dbd191ad5540b2c78
|
[
"MIT"
] | 1
|
2019-10-21T08:14:38.000Z
|
2019-10-21T08:14:38.000Z
|
default_app_config = 'payment.apps.PaymentConfig'
| 49
| 49
| 0.857143
|
4a17628a4c2daac3c5f67b8b834fd5bc6cb7b565
| 1,490
|
py
|
Python
|
aries_cloudagent/messaging/trustping/messages/ping.py
|
DibbsZA/aries-cloudagent-python
|
a094dd7697023721ac2a2fd4e58b04d4b37d1f44
|
[
"Apache-2.0"
] | 7
|
2020-07-07T15:44:41.000Z
|
2022-03-26T21:20:41.000Z
|
aries_cloudagent/messaging/trustping/messages/ping.py
|
totemprotocol/aries-fl
|
dd78dcebc771971abfee301b80cdd5d246c14840
|
[
"Apache-2.0"
] | null | null | null |
aries_cloudagent/messaging/trustping/messages/ping.py
|
totemprotocol/aries-fl
|
dd78dcebc771971abfee301b80cdd5d246c14840
|
[
"Apache-2.0"
] | 2
|
2019-12-02T18:59:07.000Z
|
2020-06-03T18:58:20.000Z
|
"""Represents a trust ping message."""
from marshmallow import fields
from ...agent_message import AgentMessage, AgentMessageSchema
from ..message_types import PING
HANDLER_CLASS = "aries_cloudagent.messaging.trustping.handlers.ping_handler.PingHandler"
class Ping(AgentMessage):
"""Class representing a trustping message."""
class Meta:
"""Ping metadata."""
handler_class = HANDLER_CLASS
message_type = PING
schema_class = "PingSchema"
def __init__(
self, *, response_requested: bool = True, comment: str = None, **kwargs
):
"""
Initialize a Ping message instance.
Args:
response_requested: A flag indicating that a response is requested
(defaults to True for the recipient if not included)
comment: An optional comment string
"""
super(Ping, self).__init__(**kwargs)
self.comment = comment
self.response_requested = response_requested
class PingSchema(AgentMessageSchema):
"""Schema for Ping class."""
class Meta:
"""PingSchema metadata."""
model_class = Ping
response_requested = fields.Bool(
default=True,
required=False,
description="Whether response is requested (default True)",
example=True,
)
comment = fields.Str(
required=False,
description="Optional comment to include",
example="Hello",
allow_none=True,
)
| 25.689655
| 88
| 0.64094
|
4a17629037b67bf303889fb224118051315a2beb
| 1,078
|
py
|
Python
|
ACM-Solution/PrimitiveDrawing.py
|
wasi0013/Python-CodeBase
|
4a7a36395162f68f84ded9085fa34cc7c9b19233
|
[
"MIT"
] | 2
|
2016-04-26T15:40:40.000Z
|
2018-07-18T10:16:42.000Z
|
ACM-Solution/PrimitiveDrawing.py
|
wasi0013/Python-CodeBase
|
4a7a36395162f68f84ded9085fa34cc7c9b19233
|
[
"MIT"
] | 1
|
2016-04-26T15:44:15.000Z
|
2016-04-29T14:44:40.000Z
|
pygame/PrimitiveDrawing.py
|
wasi0013/Python-CodeBase
|
4a7a36395162f68f84ded9085fa34cc7c9b19233
|
[
"MIT"
] | 1
|
2018-10-02T16:12:19.000Z
|
2018-10-02T16:12:19.000Z
|
import pygame, sys
from pygame.locals import *
pygame.init()
DISPLAYSURF = pygame.display.set_mode((500,400),0,32)
pygame.display.set_caption("Primitive Drawing")
#colour setup
BLACK = (0,0,0)
WHITE = (255,255,255)
RED =(255,0,0)
GREEN = (0,255,0)
BLUE = (0,0,255)
#draw surface objects
DISPLAYSURF.fill(WHITE)
pygame.draw.polygon(DISPLAYSURF,GREEN,((146,0),(291,106),(236,277),(56,277),(0,106)))
pygame.draw.line(DISPLAYSURF,BLUE,(60,60),(120,60),4)
pygame.draw.line(DISPLAYSURF,BLUE,(120,60),(60,120))
pygame.draw.line(DISPLAYSURF,BLUE,(60,120),(120,120),4)
pygame.draw.circle(DISPLAYSURF,BLUE,(300,50),20,0)
pygame.draw.ellipse(DISPLAYSURF,RED,(300,250,40,80),1)
pixObj= pygame.PixelArray(DISPLAYSURF)
pixObj[480][380]=BLACK
pixObj[482][382]=BLACK
pixObj[484][384]=BLACK
pixObj[486][386]=BLACK
pixObj[488][388]=BLACK
while True:
for event in pygame.event.get():
if event.type == QUIT:
pygame.quit()
sys.exit()
pygame.display.update()
| 16.089552
| 86
| 0.636364
|
4a1762f2e84f20b91c791155e3b93550237ffa3b
| 169
|
py
|
Python
|
pyensemblorthologues/__init__.py
|
Uauy-Lab/pyensemblorthologues
|
352119c30572bc493650db3fe22ae8855ca51949
|
[
"MIT"
] | null | null | null |
pyensemblorthologues/__init__.py
|
Uauy-Lab/pyensemblorthologues
|
352119c30572bc493650db3fe22ae8855ca51949
|
[
"MIT"
] | null | null | null |
pyensemblorthologues/__init__.py
|
Uauy-Lab/pyensemblorthologues
|
352119c30572bc493650db3fe22ae8855ca51949
|
[
"MIT"
] | null | null | null |
"""Top-level package for PyEnsemblOrthologues."""
__author__ = """Ricardo H. Ramirez-Gonzalez"""
__email__ = "ricardo.ramirez-gonzalez@jic.ac.uk"
__version__ = "0.1.2"
| 28.166667
| 49
| 0.727811
|
4a1763093e15353d17f34a9cdab845382fafbc92
| 23,532
|
py
|
Python
|
generate.py
|
Holmes7/library-checker-problems
|
d29be07a624955b20f4e9bcc3f14fe74d0a9e1cc
|
[
"Apache-2.0"
] | null | null | null |
generate.py
|
Holmes7/library-checker-problems
|
d29be07a624955b20f4e9bcc3f14fe74d0a9e1cc
|
[
"Apache-2.0"
] | null | null | null |
generate.py
|
Holmes7/library-checker-problems
|
d29be07a624955b20f4e9bcc3f14fe74d0a9e1cc
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
import sys
import argparse
import os
import platform
import shutil
import hashlib
import json
from datetime import datetime
from logging import Logger, basicConfig, getLogger, INFO
from os import getenv
from pathlib import Path
from subprocess import (DEVNULL, PIPE, STDOUT, CalledProcessError,
TimeoutExpired, call, check_call, check_output, run)
from tempfile import TemporaryDirectory
from typing import Any, Iterator, List, MutableMapping, Union, Optional
from enum import Enum
import toml
logger = getLogger(__name__) # type: Logger
CASENAME_LEN_LIMIT = 40
def casename(name: Union[str, Path], i: int) -> str:
# (random, 1) -> random_01
return Path(name).stem + '_' + str(i).zfill(2)
class UnknownTypeFile(Exception):
def __init__(self, message):
super().__init__()
self.message = message
def compile(src: Path, libdir: Path):
if src.suffix == '.cpp':
cxx = getenv('CXX', 'g++')
cxxflags_default = '-O2 -std=c++17 -Wall -Wextra -Werror -Wno-unused-result'
if platform.system() == 'Darwin':
cxxflags_default += ' -Wl,-stack_size,0x10000000' # 256MB
if platform.system() == 'Windows':
cxxflags_default += ' -Wl,-stack,0x10000000' # 256MB
cxxflags_default += ' -D__USE_MINGW_ANSI_STDIO' # avoid using MinGW's "unique" stdio, which doesn't recognize %lld
if platform.uname().system == 'Linux' and 'Microsoft' in platform.uname().release:
cxxflags_default += ' -fsplit-stack' # a workaround for the lack of ulimit in Windows Subsystem for Linux
cxxflags = getenv('CXXFLAGS', cxxflags_default).split()
cxxflags.extend(['-I', str(libdir / 'common')])
check_call([cxx] + cxxflags +
['-o', str(src.with_suffix(''))] + [str(src)])
elif src.suffix == '.in':
pass
else:
logger.error('Unknown type of file {}'.format(src))
raise UnknownTypeFile('Unknown file: {}'.format(src))
def execcmd(src: Path, arg: List[str] = []) -> List[str]:
# main.cpp -> ['main']
# example.in -> ['cat', 'example_00.in']
if src.suffix == '.cpp':
cmd = [str(src.with_suffix('' if platform.system() != 'Windows' else '.exe').resolve())]
cmd.extend(arg)
return cmd
elif src.suffix == '.in':
inpath = src.with_name(casename(src, int(arg[0])) + '.in')
if platform.system() == 'Windows': cmd = ['cmd', '/C', 'type', str(inpath)] # Windows' built-in command
else: cmd = ['cat', str(inpath)]
return cmd
else:
raise UnknownTypeFile('Unknown file: {} {}'.format(src, arg))
def check_call_to_file(command: List[str], outpath: Path, *args, **kwargs):
# same as subprocess.check_call(command, stdout=open(outpath, "w"), *args, **kwargs)
# but handles CRLF stuff on Windows
if platform.uname().system == 'Windows':
result = run(command, stdout=PIPE, check=True, *args, **kwargs)
with open(str(outpath), "w", newline='\n') as out_file:
out_file.write(result.stdout.decode('utf-8').replace(os.linesep, '\n'))
else:
check_call(command, stdout=open(str(outpath), "w"), *args, **kwargs)
def logging_result(result: str, start: datetime, end: datetime, message: str):
usemsec = (end - start).seconds*1000 + \
(end - start).microseconds // 1000
logger.info('{:>3s} {:6d} msecs : {}'.format(
result, usemsec, message))
class Problem:
def __init__(self, libdir: Path, basedir: Path):
self.libdir = libdir # type: Path
self.basedir = basedir # type: Path
tomlpath = basedir / 'info.toml'
self.config = toml.load(tomlpath) # type: MutableMapping[str, Any]
self.checker = basedir / self.config.get('checker', 'checker.cpp') # type: Path
self.verifier = basedir / self.config.get('verifier', 'verifier.cpp') # type: Path
self.ignore_warning = False # type: bool
def warning(self,message: str):
logger.warning(message)
if not self.ignore_warning:
raise RuntimeError(message)
def health_check(self):
if 'title' not in self.config:
self.warning('no title: {}'.format(self.basedir))
for test in self.config['tests']:
for i in range(test['number']):
cn = casename(test['name'], i) + '.in'
if len(cn) > CASENAME_LEN_LIMIT:
self.warning('too long casename: {}'.format(cn))
gendir = self.basedir / 'gen'
gens = []
for test in self.config['tests']:
gen = gendir / test['name']
if gen.suffix == '.cpp':
gens.append(str(gen))
elif gen.suffix == '.in':
for i in range(test['number']):
cn = casename(test['name'], i) + '.in'
gens.append(str(gendir / cn))
else:
logger.error('Unknown file: {}'.format(test['name']))
raise UnknownTypeFile('Unknown file: {}'.format(test['name']))
for name in self.basedir.glob('gen/*.cpp'):
if str(name) not in gens:
self.warning('Unused .cpp gen file: {}'.format(name))
for name in self.basedir.glob('gen/*.in'):
if str(name) not in gens:
self.warning('Unused .in gen file: {}'.format(name))
def generate_params_h(self):
logger.info('generate params.h')
with open(str(self.basedir / 'params.h'), 'w') as fh:
for key, value in self.config.get('params', {}).items():
if isinstance(value, int):
fh.write('#define {} (long long){}\n'.format(key, value))
elif isinstance(value, float):
fh.write('#define {} {}\n'.format(key, value))
elif isinstance(value, str):
# NOTE: this fails if value contains some chars like double quotations
fh.write('#define {} "{}"\n'.format(key, value))
else:
logger.error('Unsupported type of params: {}'.format(key))
exit(1)
def compile_correct(self):
logger.info('compile solution')
compile(self.basedir / 'sol' / 'correct.cpp', self.libdir)
def compile_verifier(self):
logger.info('compile verifier')
compile(self.verifier, self.libdir)
def compile_gens(self):
logger.info('compile generators')
for test in self.config['tests']:
name = test['name']
logger.info('compile {}'.format(name))
compile(self.basedir / 'gen' / name, self.libdir)
def compile_checker(self):
logger.info('compile checker')
compile(self.checker, self.libdir)
def compile_solutions(self):
for sol in self.config.get('solutions', []):
name = sol['name']
compile(self.basedir / 'sol' / name, self.libdir)
def make_inputs(self):
indir = self.basedir / 'in'
gendir = self.basedir / 'gen'
logger.info('clear input {}'.format(indir))
if indir.exists():
shutil.rmtree(str(indir))
indir.mkdir()
for test in self.config['tests']:
name = test['name']
num = test['number']
logger.info('gen {} {}cases'.format(name, num))
for i in range(num):
inpath = indir / (casename(name, i) + '.in')
check_call_to_file(execcmd(gendir / name, [str(i)]), inpath)
def verify_inputs(self):
indir = self.basedir / 'in'
for test in self.config['tests']:
name = test['name']
num = test['number']
logger.info('verify {} {}cases'.format(name, num))
for i in range(num):
inname = (casename(name, i) + '.in')
inpath = indir / inname
result = run(execcmd(self.verifier),
stdin=open(str(inpath), 'r'))
if result.returncode != 0:
logger.error('verify failed: {}'.format(inname))
exit(1)
def make_outputs(self, check):
indir = self.basedir / 'in'
outdir = self.basedir / 'out'
soldir = self.basedir / 'sol'
checker = self.checker
logger.info('clear output {}'.format(outdir))
if outdir.exists():
shutil.rmtree(str(outdir))
outdir.mkdir()
for test in self.config['tests']:
name = test['name']
num = test['number']
for i in range(num):
case = casename(name, i)
infile = indir / (case + '.in')
expected = outdir / (case + '.out')
start = datetime.now()
check_call_to_file(execcmd(soldir / 'correct.cpp'), expected, stdin=open(str(infile), 'r'))
end = datetime.now()
checker_output = bytes()
if check:
process = run(
execcmd(checker, [str(infile), str(expected), str(expected)]), stdout=PIPE, stderr=STDOUT, check=True)
checker_output = process.stdout
logging_result('ANS', start, end,
'{} : {}'.format(case, checker_output))
def is_testcases_already_generated(self) -> bool:
indir = self.basedir / 'in'
outdir = self.basedir / 'out'
# get the timestamp when generate.py was last run
testcases = set()
for test in self.config['tests']:
name = test['name']
num = test['number']
for i in range(num):
case = casename(name, i)
infile = indir / (case + '.in')
expected = outdir / (case + '.out')
if not infile.exists() or not expected.exists():
return False
testcases.add(infile)
testcases.add(expected)
# Here you should use min, not max. We want ensure that all testcases are newer than all source files.
latest_timestamp = min(datetime.fromtimestamp(
path.stat().st_mtime) for path in testcases)
# compare the timestamp with other files (including header files in common/)
for path in self.list_depending_files():
if latest_timestamp < datetime.fromtimestamp(path.stat().st_mtime):
return False
logger.info('Test cases are already generated')
return True
def is_checker_already_generated(self) -> bool:
checker_bin = self.checker.parent / self.checker.stem
if not checker_bin.exists():
return False
checker_timestamp = datetime.fromtimestamp(checker_bin.stat().st_mtime)
for path in self.list_depending_files():
if checker_timestamp < datetime.fromtimestamp(path.stat().st_mtime):
return False
logger.info('The checker is already compiled')
return True
def list_depending_files(self) -> Iterator[Path]:
yield Path(__file__)
for path in list(self.basedir.glob('**/*')) + list(self.libdir.glob('common/**/*')):
if (self.basedir / 'in').exists() and (self.basedir / 'in').resolve() in path.resolve().parents:
continue
if (self.basedir / 'out').exists() and (self.basedir / 'out').resolve() in path.resolve().parents:
continue
if not path.is_file():
continue # ignore directories
if path.suffix == '':
continue # ignore compiled binaries
if path.name.endswith('.html'):
continue # ignore generated HTML files
if path.name == 'params.h':
continue # ignore generated params.h
yield path
# return "version" of problem
def problem_version(self) -> str:
all_hash = hashlib.sha256()
for path in sorted(self.list_depending_files()):
all_hash.update(hashlib.sha256(open(str(path), 'rb').read()).digest())
return all_hash.hexdigest()
# return "version" of testcase
def testcase_version(self) -> str:
all_hash = hashlib.sha256()
all_hash.update(hashlib.sha256(open(str(self.checker), 'rb').read()).digest())
cases = json.load(open(str(self.basedir / 'hash.json'), 'r'))
for name, sha in sorted(cases.items(), key=lambda x : x[0]):
all_hash.update(sha.encode('ascii'))
return all_hash.hexdigest()
def judge(self, src: Path, config: dict):
indir = self.basedir / 'in'
outdir = self.basedir / 'out'
_tmpdir = TemporaryDirectory()
tmpdir = _tmpdir.name
checker = self.checker
results = set()
logger.info('Start {}'.format(src.name))
for test in self.config['tests']:
name = test['name']
num = test['number']
for i in range(num):
case = casename(name, i)
infile = indir / (case + '.in')
expected = outdir / (case + '.out')
actual = Path(tmpdir) / (case + '.out')
start = datetime.now()
result = ''
checker_output = bytes()
try:
check_call_to_file(execcmd(src), actual,
stdin=open(str(infile), 'r'), timeout=self.config['timelimit'])
except TimeoutExpired:
result = 'TLE'
except CalledProcessError:
result = 'RE'
else:
process = run(
execcmd(checker, [str(infile), str(actual), str(expected)]), stdout=PIPE, stderr=STDOUT)
checker_output = process.stdout
if process.returncode:
result = 'WA'
else:
result = 'AC'
end = datetime.now()
results.add(result)
logging_result(result, start, end,
'{} : {}'.format(case, checker_output.decode('utf-8')))
if config.get('wrong', False):
if results == {'AC'}:
logger.error('wrong solution got accept: {}'.format(src))
exit(1)
else:
if 'WA' in results or 'RE' in results:
logger.error('correct solution got wa/re: {}'.format(src))
exit(1)
if not config.get('allow_tle', False) and 'TLE' in results:
logger.error('fast solution got tle: {}'.format(src))
exit(1)
def gen_html(self):
from htmlgen import ToHTMLConverter
# convert task
return ToHTMLConverter(self.basedir, self.config)
def write_html(self, htmldir: Optional[Path]):
# convert task
html = self.gen_html()
if not html.check_all_samples_used():
self.warning('all samples are not used')
path = (self.basedir / 'task.html') if not htmldir else htmldir / (self.basedir.name + '.html')
with open(str(path), 'w', encoding='utf-8') as f:
f.write(html.html)
def calc_hashes(self) -> MutableMapping[str, str]:
hashes = dict() # type: MutableMapping[str, str]
for name in self.basedir.glob('in/*.in'):
m = hashlib.sha256()
m.update(open(str(name), 'rb').read())
hashes[name.name] = m.hexdigest()
for name in self.basedir.glob('out/*.out'):
m = hashlib.sha256()
m.update(open(str(name), 'rb').read())
hashes[name.name] = m.hexdigest()
return hashes
def assert_hashes(self):
if not Path(self.basedir, 'hash.json').exists():
raise RuntimeError("hash.json doesn't exist")
expect = json.load(open(str(self.basedir / 'hash.json'), 'r'))
actual = self.calc_hashes()
if expect != actual:
logger.error('hashes are different')
logger.error('your hash: {}'.format(
json.dumps(actual, indent=2, sort_keys=True)))
raise RuntimeError("hashes are different")
def write_hashes(self):
actual = self.calc_hashes()
if not Path(self.basedir, 'hash.json').exists():
self.warning("hash.json doesn't exist, create")
else:
expect = json.load(open(str(self.basedir / 'hash.json'), 'r'))
if expect != actual:
self.warning('hashes are different, overwrite')
self.warning('your hash: {}'.format(
json.dumps(actual, indent=2, sort_keys=True)))
json.dump(self.calc_hashes(), open(
str(self.basedir / 'hash.json'), 'w'), indent=2, sort_keys=True)
class Mode(Enum):
DEFAULT = 1
DEV = 2
TEST = 3
def force_generate(self):
return self == self.DEV or self == self.TEST
def verify(self):
return self == self.DEV or self == self.TEST
def rewrite_hash(self):
return self == self.DEV
def generate_html(self):
return self == self.DEV or self == self.TEST
def generate(self, mode: Mode, html_dir: Optional[Path]):
if mode == self.Mode.DEV:
self.ignore_warning = True
logger.info('Start {}'.format(self.basedir.name))
# health check
self.health_check()
self.generate_params_h()
is_testcases_already_generated = self.is_testcases_already_generated()
is_checker_already_generated = self.is_checker_already_generated()
if not is_checker_already_generated or mode.force_generate():
self.compile_checker()
if not is_testcases_already_generated or mode.force_generate():
self.compile_correct()
self.compile_gens()
self.make_inputs()
if mode.verify():
self.compile_verifier()
self.verify_inputs()
if not is_testcases_already_generated or mode.force_generate():
self.make_outputs(mode.verify())
if mode.verify():
self.compile_solutions()
for sol in self.config.get('solutions', []):
self.judge(self.basedir / 'sol' / sol['name'], sol)
if mode.rewrite_hash():
self.write_hashes()
else:
self.assert_hashes()
if mode.generate_html():
self.write_html(html_dir)
def find_problem_dir(rootdir: Path, problem_name: Path) -> Optional[Path]:
tomls = list(rootdir.glob('**/{}/info.toml'.format(problem_name)))
if len(tomls) == 0:
logger.error('Cannot find problem: {}'.format(problem_name))
return None
if len(tomls) >= 2:
logger.error('Find multiple problem dirs: {}'.format(problem_name))
return None
return tomls[0].parent
def generate(
problem: Problem,
force_generate: bool,
ignore_warning: bool,
rewrite_hash: bool,
verify: bool,
generate_html: bool,
html_dir: Union[Path, None]):
problem.ignore_warning = ignore_warning
logger.info('Start {}'.format(problem.basedir.name))
# health check
problem.health_check()
is_testcases_already_generated = problem.is_testcases_already_generated()
is_checker_already_generated = problem.is_checker_already_generated()
problem.generate_params_h()
if not is_testcases_already_generated or force_generate:
problem.compile_correct()
problem.compile_gens()
problem.make_inputs()
if verify:
problem.compile_verifier()
problem.verify_inputs()
if not is_checker_already_generated or force_generate:
problem.compile_checker()
if not is_testcases_already_generated or force_generate:
problem.make_outputs(verify)
if verify:
problem.compile_solutions()
# TODO: problem.judge_solutions()?
for sol in problem.config.get('solutions', []):
problem.judge(problem.basedir / 'sol' / sol['name'], sol)
if rewrite_hash:
problem.write_hashes()
else:
problem.assert_hashes()
if generate_html:
problem.write_html(html_dir if html_dir else problem.basedir)
def main(args: List[str]):
try:
import colorlog
except ImportError:
basicConfig(
format="%(asctime)s [%(levelname)s] %(message)s",
datefmt="%H:%M:%S",
level=getenv('LOG_LEVEL', 'INFO'),
)
logger.warn('Please install colorlog: pip3 install colorlog')
else:
handler = colorlog.StreamHandler()
formatter = colorlog.ColoredFormatter(
"%(log_color)s%(asctime)s [%(levelname)s] %(message)s",
datefmt="%H:%M:%S",
log_colors={
'DEBUG': 'cyan',
'INFO': 'white',
'WARNING': 'yellow',
'ERROR': 'red',
'CRITICAL': 'red,bg_white',
})
handler.setFormatter(formatter)
basicConfig(
level=getenv('LOG_LEVEL', 'INFO'),
handlers=[handler]
)
parser = argparse.ArgumentParser(description='Testcase Generator')
parser.add_argument('toml', nargs='*', help='Toml File')
parser.add_argument('-p', '--problem', nargs='*',
help='Generate problem', default=[])
parser.add_argument('--dev', action='store_true', help='Developer Mode')
parser.add_argument('--test', action='store_true', help='CI Mode')
parser.add_argument('--htmldir', help='Generate HTML', default=None)
parser.add_argument('--compile-checker',
action='store_true', help='Deprecated: Compile Checker')
opts = parser.parse_args(args)
if opts.dev and opts.test:
raise ValueError('only one of --dev and --test can be used')
if opts.compile_checker:
logger.warning(
'--compile-checker is deprecated. Checker is compiled in default')
libdir = Path(__file__).parent
problems = list() # type: List[Problem]
for tomlpath in opts.toml:
tomlfile = toml.load(opts.toml)
problems.append(Problem(libdir, Path(tomlpath).parent))
for problem_name in opts.problem:
problem_dir = find_problem_dir(libdir, problem_name)
if problem_dir is None:
raise ValueError('Cannot find problem: {}'.format(problem_name))
problems.append(Problem(libdir, problem_dir))
if len(problems) == 0:
logger.warning('No problems')
if opts.htmldir:
logger.info('Make htmldir')
Path(opts.htmldir).mkdir(exist_ok=True, parents=True)
# suppress the annoying dialog appears when an application crashes on Windows
if platform.uname().system == 'Windows':
import ctypes
SEM_NOGPFAULTERRORBOX = 2 # https://msdn.microsoft.com/en-us/library/windows/desktop/ms684863(v=vs.85).aspx
ctypes.windll.kernel32.SetErrorMode(SEM_NOGPFAULTERRORBOX)
mode = Problem.Mode.DEFAULT
if opts.dev:
mode = Problem.Mode.DEV
if opts.test:
mode = Problem.Mode.TEST
for problem in problems:
problem.generate(mode, Path(opts.htmldir) if opts.htmldir else None)
if __name__ == '__main__':
main(sys.argv[1:])
| 37.711538
| 126
| 0.570117
|
4a17634155efc1d39ffec204c43fb2a4586d1fc4
| 23,588
|
bzl
|
Python
|
swift/internal/xcode_swift_toolchain.bzl
|
LaudateCorpus1/rules_swift
|
f7c05b638f861f7a01c705c2c83e40c776543611
|
[
"Apache-2.0"
] | 2
|
2020-07-01T20:21:35.000Z
|
2021-04-28T21:28:50.000Z
|
swift/internal/xcode_swift_toolchain.bzl
|
LaudateCorpus1/rules_swift
|
f7c05b638f861f7a01c705c2c83e40c776543611
|
[
"Apache-2.0"
] | null | null | null |
swift/internal/xcode_swift_toolchain.bzl
|
LaudateCorpus1/rules_swift
|
f7c05b638f861f7a01c705c2c83e40c776543611
|
[
"Apache-2.0"
] | 2
|
2021-06-03T10:06:10.000Z
|
2022-02-02T14:23:52.000Z
|
# Copyright 2018 The Bazel Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""BUILD rules used to provide a Swift toolchain provided by Xcode on macOS.
The rules defined in this file are not intended to be used outside of the Swift
toolchain package. If you are looking for rules to build Swift code using this
toolchain, see `swift.bzl`.
"""
load("@bazel_skylib//lib:collections.bzl", "collections")
load("@bazel_skylib//lib:dicts.bzl", "dicts")
load("@bazel_skylib//lib:partial.bzl", "partial")
load("@bazel_skylib//lib:types.bzl", "types")
load("@bazel_tools//tools/cpp:toolchain_utils.bzl", "find_cpp_toolchain")
load(
":features.bzl",
"SWIFT_FEATURE_AUTOLINK_EXTRACT",
"SWIFT_FEATURE_BUNDLED_XCTESTS",
"SWIFT_FEATURE_DEBUG_PREFIX_MAP",
"SWIFT_FEATURE_ENABLE_BATCH_MODE",
"SWIFT_FEATURE_MODULE_MAP_HOME_IS_CWD",
"SWIFT_FEATURE_USE_RESPONSE_FILES",
"features_for_build_modes",
)
load(":providers.bzl", "SwiftToolchainInfo")
load(":wrappers.bzl", "SWIFT_TOOL_WRAPPER_ATTRIBUTES")
def _command_line_objc_copts(objc_fragment):
"""Returns copts that should be passed to `clang` from the `objc` fragment.
Args:
objc_fragment: The `objc` configuration fragment.
Returns:
A list of `clang` copts, each of which is preceded by `-Xcc` so that they can be passed
through `swiftc` to its underlying ClangImporter instance.
"""
# In general, every compilation mode flag from native `objc_*` rules should be passed, but `-g`
# seems to break Clang module compilation. Since this flag does not make much sense for module
# compilation and only touches headers, it's ok to omit.
clang_copts = objc_fragment.copts + objc_fragment.copts_for_current_compilation_mode
return collections.before_each("-Xcc", [copt for copt in clang_copts if copt != "-g"])
def _default_linker_opts(
apple_fragment,
apple_toolchain,
platform,
target,
xcode_config,
is_static,
is_test):
"""Returns options that should be passed by default to `clang` when linking.
This function is wrapped in a `partial` that will be propagated as part of the toolchain
provider. The first five arguments are pre-bound; the `is_static` and `is_test` arguments are
expected to be passed by the caller.
Args:
apple_fragment: The `apple` configuration fragment.
apple_toolchain: The `apple_common.apple_toolchain()` object.
platform: The `apple_platform` value describing the target platform.
target: The target triple.
xcode_config: The Xcode configuration.
is_static: `True` to link against the static version of the Swift runtime, or `False` to
link against dynamic/shared libraries.
is_test: `True` if the target being linked is a test target.
Returns:
The command line options to pass to `clang` to link against the desired variant of the Swift
runtime libraries.
"""
platform_framework_dir = apple_toolchain.platform_developer_framework_dir(apple_fragment)
linkopts = []
uses_runtime_in_os = _is_xcode_at_least_version(xcode_config, "10.2")
if uses_runtime_in_os:
# Starting with Xcode 10.2, Apple forbids statically linking to the Swift runtime. The
# libraries are distributed with the OS and located in /usr/lib/swift.
swift_subdir = "swift"
linkopts.append("-Wl,-rpath,/usr/lib/swift")
elif is_static:
# This branch and the branch below now only support Xcode 10.1 and below. Eventually,
# once we drop support for those versions, they can be deleted.
swift_subdir = "swift_static"
linkopts.extend([
"-Wl,-force_load_swift_libs",
"-framework",
"Foundation",
"-lstdc++",
])
else:
swift_subdir = "swift"
swift_lib_dir = (
"{developer_dir}/Toolchains/{toolchain}.xctoolchain/usr/lib/{swift_subdir}/{platform}"
).format(
developer_dir = apple_toolchain.developer_dir(),
platform = platform.name_in_plist.lower(),
swift_subdir = swift_subdir,
toolchain = "XcodeDefault",
)
# TODO(b/128303533): It's possible to run Xcode 10.2 on a version of macOS 10.14.x that does
# not yet include `/usr/lib/swift`. Later Xcode 10.2 betas have deleted the `swift_static`
# directory, so we must manually add the dylibs to the binary's rpath or those binaries won't
# be able to run at all. This is added after `/usr/lib/swift` above so the system versions
# will always be preferred if they are present.
# This workaround can be removed once Xcode 10.2 and macOS 10.14.4 are out of beta.
if uses_runtime_in_os and platform == apple_common.platform.macos:
linkopts.append("-Wl,-rpath,{}".format(swift_lib_dir))
linkopts.extend([
"-F{}".format(platform_framework_dir),
"-L{}".format(swift_lib_dir),
# TODO(b/112000244): These should get added by the C++ Skylark API, but we're using the
# "c++-link-executable" action right now instead of "objc-executable" because the latter
# requires additional variables not provided by cc_common. Figure out how to handle this
# correctly.
"-ObjC",
"-Wl,-objc_abi_version,2",
])
# XCTest.framework only lives in the Xcode bundle (its platform framework
# directory), so test binaries need to have that directory explicitly added to
# their rpaths.
if is_test:
linkopts.append("-Wl,-rpath,{}".format(platform_framework_dir))
return linkopts
def _default_swiftc_copts(apple_fragment, apple_toolchain, target):
"""Returns options that should be passed by default to `swiftc`.
Args:
apple_fragment: The `apple` configuration fragment.
apple_toolchain: The `apple_common.apple_toolchain()` object.
target: The target triple.
Returns:
A list of options that will be passed to any compile action created by this toolchain.
"""
copts = [
"-target",
target,
"-sdk",
apple_toolchain.sdk_dir(),
"-F",
apple_toolchain.platform_developer_framework_dir(apple_fragment),
]
bitcode_mode = str(apple_fragment.bitcode_mode)
if bitcode_mode == "embedded":
copts.append("-embed-bitcode")
elif bitcode_mode == "embedded_markers":
copts.append("-embed-bitcode-marker")
elif bitcode_mode != "none":
fail("Internal error: expected apple_fragment.bitcode_mode to be one of: " +
"['embedded', 'embedded_markers', 'none']")
return copts
def _is_macos(platform):
"""Returns `True` if the given platform is macOS.
Args:
platform: An `apple_platform` value describing the platform for which a
target is being built.
Returns:
`True` if the given platform is macOS.
"""
return platform.platform_type == apple_common.platform_type.macos
def _trim_version(version):
"""Trim the given version number down to a maximum of three components.
Args:
version: The version number to trim; either a string or a `DottedVersion` value.
Returns:
The trimmed version number as a `DottedVersion` value.
"""
version = str(version)
parts = version.split(".")
maxparts = min(len(parts), 3)
return apple_common.dotted_version(".".join(parts[:maxparts]))
def _is_xcode_at_least_version(xcode_config, desired_version):
"""Returns True if we are building with at least the given Xcode version.
Args:
xcode_config: the `apple_common.XcodeVersionConfig` provider.
desired_version: The minimum desired Xcode version, as a dotted version string.
Returns:
True if the current target is being built with a version of Xcode at least as high as the
given version.
"""
current_version = xcode_config.xcode_version()
if not current_version:
fail("Could not determine Xcode version at all. This likely means Xcode isn't " +
"available; if you think this is a mistake, please file an issue.")
# TODO(b/131195460): DottedVersion comparison is broken for four-component versions that are
# returned by modern Xcodes. Work around it for now.
desired_version_value = _trim_version(desired_version)
return _trim_version(current_version) >= desired_version_value
def _modified_action_args(
action_args,
toolchain_env,
toolchain_execution_requirements):
"""Updates an argument dictionary with values from a toolchain.
Args:
action_args: The `kwargs` dictionary from a call to `actions.run` or `actions.run_shell`.
toolchain_env: The required environment from the toolchain.
toolchain_execution_requirements: The required execution requirements from the toolchain.
Returns:
A dictionary that can be passed as the `**kwargs` to a call to one of the action running
functions that has been modified to include the toolchain values.
"""
modified_args = dict(action_args)
# Note that we add the toolchain values second; we do not want the caller to ever be able to
# override those values. Note also that passing the default to `get` does not always work
# because `None` could be explicitly a value in the dictionary.
modified_args["env"] = dicts.add(modified_args.get("env") or {}, toolchain_env)
modified_args["execution_requirements"] = dicts.add(
modified_args.get("execution_requirements") or {},
toolchain_execution_requirements,
)
return modified_args
def _run_action(
toolchain_env,
toolchain_execution_requirements,
bazel_xcode_wrapper,
actions,
**kwargs):
"""Runs an action with the toolchain requirements.
This is the implementation of the `action_registrars.run` partial, where the first three
arguments are pre-bound to toolchain-specific values.
Args:
toolchain_env: The required environment from the toolchain.
toolchain_execution_requirements: The required execution requirements from the toolchain.
bazel_xcode_wrapper: A `File` representing the Bazel Xcode wrapper executable for the
action.
actions: The `Actions` object with which to register actions.
**kwargs: Additional arguments that are passed to `actions.run`.
"""
remaining_args = _modified_action_args(kwargs, toolchain_env, toolchain_execution_requirements)
# Get the user's arguments. If the caller gave us a list of strings instead of a list of `Args`
# objects, convert it to a list of `Args` because we're going to create our own `Args` that we
# prepend to it.
user_args = remaining_args.pop("arguments", [])
if user_args and types.is_string(user_args[0]):
user_args_strings = user_args
user_args_object = actions.args()
user_args_object.add_all(user_args_strings)
user_args = [user_args_object]
# Since we're executing the wrapper, make the user's desired executable the first argument to
# it.
user_executable = remaining_args.pop("executable")
wrapper_args = actions.args()
wrapper_args.add("/usr/bin/xcrun")
wrapper_args.add(user_executable)
# We also need to include the user executable in the "tools" argument of the action, since it
# won't be referenced by "executable" anymore.
user_tools = remaining_args.pop("tools", None)
if types.is_list(user_tools):
tools = [user_executable] + user_tools
elif type(user_tools) == type(depset()):
tools = depset(direct = [user_executable], transitive = [user_tools])
elif user_tools:
fail("'tools' argument must be a sequence or depset.")
elif not types.is_string(user_executable):
# Only add the user_executable to the "tools" list if it's a File, not a string.
tools = [user_executable]
else:
tools = []
actions.run(
arguments = [wrapper_args] + user_args,
executable = bazel_xcode_wrapper,
tools = tools,
**remaining_args
)
def _run_shell_action(
toolchain_env,
toolchain_execution_requirements,
bazel_xcode_wrapper,
actions,
**kwargs):
"""Runs a shell action with the toolchain requirements.
This is the implementation of the `action_registrars.run_shell` partial, where the first three
arguments are pre-bound to toolchain-specific values.
Args:
toolchain_env: The required environment from the toolchain.
toolchain_execution_requirements: The required execution requirements from the toolchain.
bazel_xcode_wrapper: A `File` representing the Bazel Xcode wrapper executable for the
action.
actions: The `Actions` object with which to register actions.
**kwargs: Additional arguments that are passed to `actions.run_shell`.
"""
remaining_args = _modified_action_args(kwargs, toolchain_env, toolchain_execution_requirements)
# We need to add the wrapper to the tools of the action so that we can reference its path in the
# new command line.
user_tools = remaining_args.pop("tools", [])
if types.is_list(user_tools):
tools = [bazel_xcode_wrapper] + user_tools
elif type(user_tools) == type(depset()):
tools = depset(direct = [bazel_xcode_wrapper], transitive = [user_tools])
else:
fail("'tools' argument must be a sequence or depset.")
# Prepend the wrapper executable to the command being executed.
user_command = remaining_args.pop("command", "")
if types.is_list(user_command):
command = [bazel_xcode_wrapper.path, "/usr/bin/xcrun"] + user_command
else:
command = "{wrapper_path} /usr/bin/xcrun {user_command}".format(
user_command = user_command,
wrapper_path = bazel_xcode_wrapper.path,
)
actions.run_shell(
command = command,
tools = tools,
**remaining_args
)
def _run_swift_action(
toolchain_env,
toolchain_execution_requirements,
bazel_xcode_wrapper,
swift_wrapper,
actions,
**kwargs):
"""Runs a Swift tool with the toolchain requirements.
This is the implementation of the `action_registrars.run_swift` partial, where the first four
arguments are pre-bound to toolchain-specific values.
Args:
toolchain_env: The required environment from the toolchain.
toolchain_execution_requirements: The required execution requirements from the toolchain.
bazel_xcode_wrapper: A `File` representing the Bazel Xcode wrapper executable for the
action.
swift_wrapper: A `File` representing the executable that wraps Swift tool invocations.
actions: The `Actions` object with which to register actions.
**kwargs: Additional arguments that are passed to `actions.run`.
"""
remaining_args = _modified_action_args(kwargs, toolchain_env, toolchain_execution_requirements)
# Get the user's arguments. If the caller gave us a list of strings instead of a list of `Args`
# objects, convert it to a list of `Args` because we're going to create our own `Args` that we
# prepend to it.
user_args = remaining_args.pop("arguments", [])
if user_args and types.is_string(user_args[0]):
user_args_strings = user_args
user_args_object = actions.args()
user_args_object.add_all(user_args_strings)
user_args = [user_args_object]
# The ordering that we want is `<bazel wrapper> <swift wrapper> xcrun <swift tool>`. This
# ensures that we ask `xcrun` to run the correct tool instead of having it get picked up
# from the system path.
swift_tool = remaining_args.pop("swift_tool")
wrapper_args = actions.args()
wrapper_args.add(swift_wrapper)
wrapper_args.add("/usr/bin/xcrun")
wrapper_args.add(swift_tool)
# We also need to include the Swift wrapper in the "tools" argument of the action.
user_tools = remaining_args.pop("tools", None)
if types.is_list(user_tools):
tools = [swift_wrapper] + user_tools
elif type(user_tools) == type(depset()):
tools = depset(direct = [swift_wrapper], transitive = [user_tools])
elif user_tools:
fail("'tools' argument must be a sequence or depset.")
else:
# Only add the user_executable to the "tools" list if it's a File, not a string.
tools = [swift_wrapper]
actions.run(
arguments = [wrapper_args] + user_args,
executable = bazel_xcode_wrapper,
tools = tools,
**remaining_args
)
def _swift_apple_target_triple(cpu, platform, version):
"""Returns a target triple string for an Apple platform.
Args:
cpu: The CPU of the target.
platform: The `apple_platform` value describing the target platform.
version: The target platform version as a dotted version string.
Returns:
A target triple string describing the platform.
"""
platform_string = str(platform.platform_type)
if platform_string == "macos":
platform_string = "macosx"
environment = ""
if not platform.is_device:
environment = "-simulator"
return "{cpu}-apple-{platform}{version}{environment}".format(
cpu = cpu,
environment = environment,
platform = platform_string,
version = version,
)
def _xcode_env(xcode_config, platform):
"""Returns a dictionary containing Xcode-related environment variables.
Args:
xcode_config: The `XcodeVersionConfig` provider that contains information about the current
Xcode configuration.
platform: The `apple_platform` value describing the target platform being built.
Returns:
A `dict` containing Xcode-related environment variables that should be passed to Swift
compile and link actions.
"""
return dicts.add(
apple_common.apple_host_system_env(xcode_config),
apple_common.target_apple_env(xcode_config, platform),
)
def _xcode_swift_toolchain_impl(ctx):
apple_fragment = ctx.fragments.apple
apple_toolchain = apple_common.apple_toolchain()
cpu = apple_fragment.single_arch_cpu
platform = apple_fragment.single_arch_platform
xcode_config = ctx.attr._xcode_config[apple_common.XcodeVersionConfig]
target_os_version = xcode_config.minimum_os_for_platform_type(platform.platform_type)
target = _swift_apple_target_triple(cpu, platform, target_os_version)
linker_opts_producer = partial.make(
_default_linker_opts,
apple_fragment,
apple_toolchain,
platform,
target,
xcode_config,
)
swiftc_copts = _default_swiftc_copts(apple_fragment, apple_toolchain, target)
# Configure the action registrars that automatically prepend xcrunwrapper to registered actions.
env = _xcode_env(xcode_config, platform)
swift_toolchain_env = {}
custom_toolchain = ctx.var.get("SWIFT_CUSTOM_TOOLCHAIN")
if custom_toolchain:
swift_toolchain_env["TOOLCHAINS"] = custom_toolchain
execution_requirements = {"requires-darwin": ""}
bazel_xcode_wrapper = ctx.executable._bazel_xcode_wrapper
action_registrars = struct(
run = partial.make(_run_action, env, execution_requirements, bazel_xcode_wrapper),
run_shell = partial.make(
_run_shell_action,
env,
execution_requirements,
bazel_xcode_wrapper,
),
run_swift = partial.make(
_run_swift_action,
dicts.add(env, swift_toolchain_env),
execution_requirements,
bazel_xcode_wrapper,
ctx.executable._swift_wrapper,
),
)
cc_toolchain = find_cpp_toolchain(ctx)
cc_toolchain_files = cc_toolchain.all_files
# Compute the default requested features and conditional ones based on Xcode version.
requested_features = features_for_build_modes(ctx, objc_fragment = ctx.fragments.objc)
requested_features.extend(ctx.features)
requested_features.append(SWIFT_FEATURE_BUNDLED_XCTESTS)
# Xcode 10.0 implies Swift 4.2.
if _is_xcode_at_least_version(xcode_config, "10.0"):
requested_features.append(SWIFT_FEATURE_ENABLE_BATCH_MODE)
requested_features.append(SWIFT_FEATURE_USE_RESPONSE_FILES)
# Xcode 10.2 implies Swift 5.0.
if _is_xcode_at_least_version(xcode_config, "10.2"):
requested_features.append(SWIFT_FEATURE_DEBUG_PREFIX_MAP)
command_line_copts = _command_line_objc_copts(ctx.fragments.objc) + ctx.fragments.swift.copts()
return [
SwiftToolchainInfo(
action_environment = env,
action_registrars = action_registrars,
cc_toolchain_files = cc_toolchain_files,
cc_toolchain_info = cc_toolchain,
clang_executable = None,
command_line_copts = command_line_copts,
cpu = cpu,
execution_requirements = execution_requirements,
implicit_deps = [],
linker_opts_producer = linker_opts_producer,
object_format = "macho",
requested_features = requested_features,
root_dir = None,
stamp = ctx.attr.stamp if _is_macos(platform) else None,
supports_objc_interop = True,
swiftc_copts = swiftc_copts,
swift_worker = ctx.executable._swift_worker,
system_name = "darwin",
unsupported_features = ctx.disabled_features + [
SWIFT_FEATURE_AUTOLINK_EXTRACT,
SWIFT_FEATURE_MODULE_MAP_HOME_IS_CWD,
],
),
]
xcode_swift_toolchain = rule(
attrs = dicts.add(SWIFT_TOOL_WRAPPER_ATTRIBUTES, {
"stamp": attr.label(
doc = """
A `CcInfo`-providing target that should be linked into any binaries that are built with stamping
enabled.
""",
providers = [[CcInfo]],
),
"_bazel_xcode_wrapper": attr.label(
cfg = "host",
default = Label(
"@build_bazel_rules_swift//tools/wrappers:bazel_xcode_wrapper",
),
executable = True,
),
"_cc_toolchain": attr.label(
default = Label("@bazel_tools//tools/cpp:current_cc_toolchain"),
doc = """
The C++ toolchain from which linking flags and other tools needed by the Swift toolchain (such as
`clang`) will be retrieved.
""",
),
"_xcode_config": attr.label(
default = configuration_field(
name = "xcode_config_label",
fragment = "apple",
),
),
}),
doc = "Represents a Swift compiler toolchain provided by Xcode.",
fragments = [
"apple",
"objc",
"swift",
],
toolchains = ["@bazel_tools//tools/cpp:toolchain_type"],
implementation = _xcode_swift_toolchain_impl,
)
| 39.777403
| 100
| 0.686027
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.