id stringlengths 1 8 | text stringlengths 6 1.05M | dataset_id stringclasses 1
value |
|---|---|---|
6446315 | <filename>Toby/WGAN_Model.py
import numpy as np
import functools
import time
import matplotlib.pyplot as plt
import tensorflow as tf
import keras
from keras.optimizers import Adam
from network import build_critic, build_generator
from tensorflow import reduce_mean
from sklearn.preprocessing import *
class WGAN():
def __init__(self, n_features,latent_space=3,BATCH_SIZE=100,n_var = 2,use_bias=False):
self.n_features = n_features
self.n_var = n_var
self.BATCH_SIZE = BATCH_SIZE
self.latent_space = latent_space
self.n_critic = 8
# building the components of the WGAN-GP
self.generator = build_generator(self.latent_space, n_var,self.n_features,use_bias)
self.critic = build_critic(n_var,use_bias)
self.wgan = keras.models.Sequential([self.generator, self.critic])
# setting hyperparemeters of the WGAN-GP
self.generator_mean_loss = tf.keras.metrics.Mean(dtype=tf.float32)
self.critic_mean_loss = tf.keras.metrics.Mean(dtype=tf.float32)
# self.generator_optimizer = tf.keras.optimizers.RMSprop(lr=0.00005)
# self.critic_optimizer = tf.keras.optimizers.RMSprop(lr=0.00005)
self.generator_optimizer = tf.keras.optimizers.Adam(learning_rate=0.0001, beta_1=0.5, beta_2=0.9)
self.critic_optimizer = tf.keras.optimizers.Adam(learning_rate=0.0001, beta_1=0.5, beta_2=0.9)
# saving the events of the generator and critic
generator_log_dir = './content/generator'
critic_log_dir = './content/critic'
self.generator_summary_writer = tf.summary.create_file_writer(generator_log_dir)
self.critic_summary_writer = tf.summary.create_file_writer(critic_log_dir)
# for prediction function
self.mse = tf.keras.losses.MeanSquaredError()
self.optimizer = tf.keras.optimizers.Adam(1e-2)
############################################################################
############################################################################
# preprocessing
def preproc(self, X_train, y_train, scaled):
"""
Prepares the data for the WGAN-GP by splitting the data set
into batches and normalizing it between -1 and 1.
"""
sample_data = np.concatenate((X_train, y_train), axis=1)
if scaled == '-1-1':
scaler = MinMaxScaler(feature_range=(-1,1))
X_train_scaled = scaler.fit_transform(sample_data)
elif scaled =='0-1':
scaler = MinMaxScaler(feature_range=(0,1))
X_train_scaled = scaler.fit_transform(sample_data)
train_dataset = X_train_scaled.reshape(-1, self.n_features).astype('float32')
train_dataset = tf.data.Dataset.from_tensor_slices(train_dataset)
train_dataset = train_dataset.shuffle(len(X_train_scaled))
train_dataset = train_dataset.batch(self.BATCH_SIZE)
num=0
for data in train_dataset:
print("every time the data shape",data.shape)
num+=1
return train_dataset, scaler, X_train_scaled
############################################################################
############################################################################
# training
def generator_loss(self, fake_output):
return -tf.reduce_mean(fake_output)
def critic_loss(self, real_output,fake_output):
return tf.reduce_mean(fake_output)-tf.reduce_mean(real_output)
#return tf.reduce_mean(real_output)-tf.reduce_mean(fake_output)
def gradient_penalty(self, f, real, fake):
"""
WGAN-GP uses gradient penalty instead of the weight
clipping to enforce the Lipschitz constraint.
"""
# alpha = tf.random.normal([self.BATCH_SIZE, self.n_var], mean=0.0, stddev=0.1)
alpha = tf.random.uniform(shape=[self.BATCH_SIZE, self.n_var], minval=-1., maxval=1.)
interpolated = real + alpha * (fake - real)
with tf.GradientTape() as t:
t.watch(interpolated)
pred = self.critic(interpolated, training=True)
grad = t.gradient(pred, interpolated)
norm = tf.sqrt(tf.reduce_sum(tf.square(grad)) + 1e-12)
gp = tf.reduce_mean((norm - 1.)**2)
return gp
@tf.function
def train_G(self, batch):
"""
The training routine for the generator
"""
if batch.shape[0]==self.BATCH_SIZE:
noise = tf.random.normal([self.BATCH_SIZE, self.latent_space], mean=0.0, stddev=0.1)
else:
noise = tf.random.normal([batch.shape[0]%self.BATCH_SIZE, self.latent_space], mean=0.0, stddev=0.1)
with tf.GradientTape() as gen_tape:
generated_data = self.generator(noise, training=True)
fake_output = self.critic(generated_data, training=True)
gen_loss = self.generator_loss(fake_output)
gradients_of_generator = gen_tape.gradient(gen_loss, self.generator.trainable_variables)
self.generator_optimizer.apply_gradients(zip(gradients_of_generator, self.generator.trainable_variables))
# return tf.math.abs(gen_loss)
return gen_loss
@tf.function
def train_D(self, batch):
"""
The training routine for the critic
"""
if batch.shape[0]==self.BATCH_SIZE:
noise = tf.random.normal([self.BATCH_SIZE, self.latent_space], mean=0.0, stddev=0.1)
else:
noise = tf.random.normal([batch.shape[0]%self.BATCH_SIZE, self.latent_space], mean=0.0, stddev=0.1)
with tf.GradientTape() as disc_tape:
generated_data = self.generator(noise, training=True)
real_output = self.critic(batch, training=True)
fake_output = self.critic(generated_data, training=True)
disc_loss = self.critic_loss(real_output, fake_output)
gp = self.gradient_penalty(functools.partial(self.critic, training=True), batch, generated_data)
# disc_loss += (self.critic_loss(real_output, fake_output) + gp*10.0)
disc_loss += gp*10.0
gradients_of_critic = disc_tape.gradient(disc_loss, self.critic.trainable_variables)
self.critic_optimizer.apply_gradients(zip(gradients_of_critic, self.critic.trainable_variables))
# return tf.math.abs(disc_loss)
return disc_loss
def train(self, dataset, epochs, scaler, scaled, X_train, y_train):
"""
Training the WGAN-GP
"""
hist = []
for epoch in range(epochs):
start = time.time()
print("Epoch {}/{}".format(epoch, epochs))
for batch in dataset:
for _ in range(self.n_critic):
self.train_D(batch)
disc_loss = self.train_D(batch)
self.critic_mean_loss(disc_loss)
gen_loss = self.train_G(batch)
self.generator_mean_loss(gen_loss)
self.train_G(batch)
with self.generator_summary_writer.as_default():
tf.summary.scalar('generator_loss', self.generator_mean_loss.result(), step=epoch)
with self.critic_summary_writer.as_default():
tf.summary.scalar('critic_loss', self.critic_mean_loss.result(), step=epoch)
hist.append([self.generator_mean_loss.result().numpy(), self.critic_mean_loss.result().numpy()])
self.generator_mean_loss.reset_states()
self.critic_mean_loss.reset_states()
# outputting loss information
print("critic: {:.6f}".format(hist[-1][1]), end=' - ')
print("generator: {:.6f}".format(hist[-1][0]), end=' - ')
print('{:.0f}s'.format( time.time()-start))
if epoch%500 == 0:
# #save the model
# self.wgan.save('./content/'+'wgan'+str(epoch)+'.h5')
self.plot_latent(scaler, scaled, X_train, y_train, epoch)
return hist
def plot_latent(self, scaler, scaled, X_train, y_train, epoch):
latent_values = tf.random.normal([1000, self.latent_space], mean=0.0, stddev=0.1)
# predicted_vals_1 = scaler.inverse_transform((self.generator.predict(tf.convert_to_tensor(latent_values)).reshape(1, self.n_features)))
# predicted_values = self.generator.predict(latent_values)
if scaled == '-1-1':
predicted_values = scaler.inverse_transform((self.generator.predict(tf.convert_to_tensor(latent_values)).reshape(1000, self.n_features)))
elif scaled =='0-1':
predicted_values = scaler.inverse_transform((self.generator.predict(tf.convert_to_tensor(latent_values)).reshape(1000, self.n_features)))
predicted_values = predicted_values.reshape(1000, self.n_features)
predicted_values = predicted_values[1:,:]
plt.clf()
plt.plot(X_train,y_train,'o')
plt.plot(predicted_values[:,0],predicted_values[:,1],'o')
plt.savefig('GANS/Random_test/GANS_test'+str(epoch)+'.png')
############################################################################
############################################################################
# prediction
def mse_loss(self, inp, outp):
"""
Calculates the MSE loss between the x-coordinates
"""
inp = tf.reshape(inp, [-1, self.n_features])
outp = tf.reshape(outp, [-1, self.n_features])
return self.mse(inp[:,0], outp[:,0])
def opt_step(self, latent_values, real_coding):
"""
Minimizes the loss between generated point and inputted point
"""
with tf.GradientTape() as tape:
tape.watch(latent_values)
gen_output = self.generator(latent_values, training=False)
loss = self.mse_loss(real_coding, gen_output)
gradient = tape.gradient(loss, latent_values)
self.optimizer.apply_gradients(zip([gradient], [latent_values]))
return loss
def optimize_coding(self, real_coding):
"""
Optimizes the latent space values
"""
latent_values = tf.random.normal([len(real_coding), self.latent_space], mean=0.0, stddev=0.1)
latent_values = tf.Variable(latent_values)
# loss = [100,90]
# lr = 1e-2
# while loss[-1] > 0.5:
loss = []
for epoch in range(2000):
# #print(loss[-1])
# if loss[-1] > loss[-2]:
# lr = lr * 0.1
# self.optimizer = tf.keras.optimizers.Adam(lr)
loss.append(self.opt_step(latent_values, real_coding).numpy())
return latent_values
# def predict(self, input_data, scaler):
# """
# Optimizes the latent space of the input then produces a prediction from
# the generator.
# """
# predicted_vals = np.zeros((1, self.n_features))
#
# for n in range(len(input_data)):
# print("Optimizing latent space for point ", n, " / ", len(input_data))
# real_coding = input_data[n].reshape(1, self.n_features)
# real_coding = tf.constant(real_coding)
# real_coding = tf.cast(real_coding, dtype=tf.float32)
#
#
# latent_values = self.optimize_coding(real_coding)
#
# predicted_vals_1 = scaler.inverse_transform((self.generator.predict(tf.convert_to_tensor(latent_values)).reshape(1, self.n_features)))
# predicted_vals_1 = predicted_vals_1.reshape(1, self.n_features)
# predicted_vals = np.concatenate((predicted_vals, predicted_vals_1), axis=0)
#
# predicted_vals = predicted_vals[1:,:]
# return predicted_vals
# def predict(self, input_data, scaler):
# """
# Optimizes the latent space of the input then produces a prediction from
# the generator.
# """
# predicted_vals = np.zeros((1, self.n_features))
#
# unscaled = scaler.inverse_transform(input_data)
#
# for n in range(len(input_data)):
# print("Optimizing latent space for point ", n, " / ", len(input_data))
# real_coding = input_data[n].reshape(1,-1)
# real_coding = tf.constant(real_coding)
# real_coding = tf.cast(real_coding, dtype=tf.float32)
#
# latent_values = self.optimize_coding(real_coding)
#
#
# predicted_vals_1 = scaler.inverse_transform((self.generator.predict(tf.convert_to_tensor(latent_values)).reshape(l, self.n_features)))
# predicted_vals_1 = predicted_vals_1.reshape(1, self.n_features)
# # print(predicted_vals_1)
# # predicted_vals_1[0,0] = unscaled[n,0]
# # print(predicted_vals_1)
# predicted_vals = np.concatenate((predicted_vals, predicted_vals_1), axis=0)
#
# predicted_vals = predicted_vals[1:,:]
# return predicted_vals
def predict(self, input_data, scaler):
"""
Optimizes the latent space of the input then produces a prediction from
the generator.
"""
predicted_vals = np.zeros((1, self.n_features))
unscaled = scaler.inverse_transform(input_data)
latent_values = self.optimize_coding(input_data)
predicted_vals_1 = scaler.inverse_transform((self.generator.predict(tf.convert_to_tensor(latent_values)).reshape(len(input_data), self.n_features)))
predicted_vals_1 = predicted_vals_1.reshape(len(input_data), self.n_features)
predicted_vals = predicted_vals_1[1:,:]
return predicted_vals
# Single Input is implemented above for prediction across the whole range
# please uncomment the function below and comment out the function with the
# same name above for it to run
# def mse_loss(self, inp, outp):
# inp = tf.reshape(inp, [-1, self.n_features])
# outp = tf.reshape(outp, [-1, self.n_features])
# return self.mse(inp, outp)
| StarcoderdataPython |
6417954 | """Indicator model subclasses"""
from ipaddress import IPv4Address, IPv6Address
from typing import Any
from pydantic import HttpUrl, NameEmail
from ..types import MD5, SHA1, SHA256, Domain
from .base import Indicator, IndicatorType
class MD5Indicator(Indicator):
"""MD5 hash indicator"""
value: MD5
type = IndicatorType.md5
class SHA255Indicator(Indicator):
"""SHA256 hash indicator"""
value: SHA256
type = IndicatorType.sha256
class SHA1Indicator(Indicator):
"""SHA1 hash indicator"""
value: SHA1
type = IndicatorType.sha1
class IPv4Indicator(Indicator):
"""IPv4 address indicator"""
value: IPv4Address
type = IndicatorType.ipv4
class IPv6Indicator(Indicator):
"""IPv6 address indicator"""
value: IPv6Address
type = IndicatorType.ipv6
class DomainIndicator(Indicator):
"""Domain name indicator"""
value: Domain
type = IndicatorType.domain
class URLIndicator(Indicator):
"""URL indicator"""
value: HttpUrl
type = IndicatorType.url
class FunctionIndicator(Indicator):
"""Function name indicator"""
value: str
type = IndicatorType.function
class EmailIndicator(Indicator):
"""Email address indicator"""
value: NameEmail
type = IndicatorType.email
class PublisherIndicator(Indicator):
"""Publisher name indicator"""
value: str
type = IndicatorType.publisher
class KeywordIndicator(Indicator):
"""Generic keyword indicator"""
value: str
type = IndicatorType.keyword
| StarcoderdataPython |
9738273 | <filename>dataset.py
#
# Video Action Recognition with Pytorch
#
# Paper citation
#
# Action Recognition in Video Sequences using
# Deep Bi-Directional LSTM With CNN Features
# 2017, <NAME> et al.
# Digital Object Identifier 10.1109/ACCESS.2017.2778011 @ IEEEAccess
#
# See also main.py
#
import requests
import os
import glob
import torch
def download_file(URL, destination):
session = requests.Session()
response = session.get(URL, stream = True)
save_response_content(response, destination)
def save_response_content(response, destination):
CHUNK_SIZE = 32768
with open(destination, "wb") as f:
for chunk in response.iter_content(CHUNK_SIZE):
# filter out keep-alive new chunks
if chunk:
f.write(chunk)
def prepare_dataset(colab):
if colab:
base_path = '/content/drive/MyDrive/dataset'
checkpoints_path = '/content/drive/MyDrive/checkpoints'
results_path = '/content/drive/MyDrive/results'
else:
base_path = './dataset'
checkpoint_path = './checkpoints'
results_path = './results'
if not os.path.isdir(base_path):
os.makedirs(base_path)
os.makedirs(checkpoints_path)
os.makedirs(results_path)
if not os.path.isfile(base_path + '/hmdb51_org.rar'):
print('Downloading the dataset...')
download_file('http://serre-lab.clps.brown.edu/wp-content/uploads/2013/10/hmdb51_org.rar', base_path + '/hmdb51_org.rar')
if not os.path.isdir(base_path + '/video'):
print('Unraring the dataset...')
os.makedirs(base_path + '/video')
os.system('unrar e ' + base_path + '/hmdb51_org.rar ' + base_path + '/video')
filenames = glob.glob(base_path + '/video/*.rar')
for file_name in filenames:
os.system(('unrar x %s ' + base_path + '/video') % file_name)
if not os.path.isfile(base_path + '/test_train_splits.rar'):
print('Downloading annotations of the dataset...')
download_file('http://serre-lab.clps.brown.edu/wp-content/uploads/2013/10/test_train_splits.rar', base_path + '/test_train_splits.rar')
if not os.path.isdir(base_path + '/annotation'):
print('Unraring annotations of the dataset...')
os.makedirs(base_path + '/annotation')
os.system('unrar e ' + base_path + '/test_train_splits.rar ' + base_path + '/annotation')
filenames = glob.glob(base_path + '/annotation/*.rar')
for file_name in filenames:
os.system('unrar x %s ' + base_path + '/annotation' % file_name)
def to_normalized_float_tensor(video):
return video.permute(0, 3, 1, 2).to(torch.float) / 255
class ToFloatTensorInZeroOne(object):
def __call__(self, video):
return to_normalized_float_tensor(video)
| StarcoderdataPython |
11252814 | <gh_stars>0
from celery.contrib.testing.worker import start_worker
from django.contrib.auth import get_user_model
from django.test import Client
from django.test import tag
from django.test import TestCase
from django.test import TransactionTestCase
from django.test.client import RequestFactory
from django.urls import reverse
from .forms import CommentForm
from .models import Category
from .models import Comment
from .models import Post
from .tasks import post_unpublished_to_telegram
from .views import PostDetailView
from core_config.celery import app
REGULAR_USER_EMAIL = '<EMAIL>'
# 1. MODELS / MANAGERS
class ModelsTests(TestCase):
comment_content = None
post_commenter = None
comment_model = None
post_content = None
post_slug = None
post_title = None
post_author = None
post_model = None
post_category = None
user_model = None
# Tests are more readable and it’s more maintainable to create objects using the ORM
@classmethod
def setUpTestData(cls):
cls.post_model = Post
cls.user_model = get_user_model()
cls.post_category = Category
cls.comment_model = Comment
cls.post_author = cls.user_model.objects.create_user(email=REGULAR_USER_EMAIL,
password='<PASSWORD>')
cls.post_commenter = cls.user_model.objects.create_user(email='<EMAIL>',
password='<PASSWORD>')
cls.post_category = cls.post_category.objects.create(name='cool_python')
cls.post_title = 'The very first test post'
cls.post_slug = 'any-slug-name'
cls.post_content = '''
This
can be
any
lorem ipsum
text.
'''
new_post = cls.post_model.objects.create(author=cls.post_author, title=cls.post_title, slug=cls.post_slug,
content=cls.post_content,
is_published_to_telegram=False)
new_post.categories.add(cls.post_category)
cls.comment_content = 'Very good post!'
cls.new_comment = cls.comment_model.objects.create(post=new_post, author=cls.post_commenter,
content=cls.comment_content)
@tag('on_creation')
def test_create_post(self):
new_post = self.post_model.objects.get(id=1)
self.assertEqual(new_post.author, self.post_author)
self.assertEqual(new_post.title, self.post_title)
self.assertEqual(new_post.slug, self.post_slug)
self.assertEqual(new_post.content, self.post_content)
self.assertFalse(new_post.is_published_to_telegram)
self.assertIsInstance(new_post, Post)
self.assertEqual(str(new_post), new_post.title)
self.assertEqual(str(new_post.categories.get(name='cool_python')), str(self.post_category))
def test_get_absolute_url(self):
new_post = self.post_model.objects.get(id=1)
self.path = reverse('blog:post-detail', kwargs={'slug': new_post.slug})
self.request = RequestFactory().get(self.path)
self.response = PostDetailView.as_view()(self.request, slug=new_post.slug)
self.assertEqual(new_post.get_absolute_url(), self.path)
self.assertEqual(self.response.status_code, 200)
def test_count_comments_under_moderation(self):
new_post = self.post_model.objects.get(id=1)
self.assertEqual(new_post.count_comments_under_moderation, 1)
def test_comments_under_moderation(self):
new_post = self.post_model.objects.get(id=1)
self.assertEqual(len(new_post.comments_under_moderation()), 1)
def test_comment_str(self):
new_post = self.post_model.objects.get(id=1)
comment = new_post.comments.first()
self.assertEqual(str(comment), f'Comment {comment.content} by {comment.commenter_name}')
def test_comment_get_model_name(self):
new_post = self.post_model.objects.get(id=1)
comment = new_post.comments.first()
self.assertEqual(comment.get_model_name(), 'Comment')
# 2. VIEWS
class PostListViewTests(TestCase):
# https://docs.djangoproject.com/en/3.2/topics/testing/tools/#fixture-loading
fixtures = ['users.json', 'posts.json', 'categories.json', 'comments.json']
def setUp(self):
# Such client have context and templates that were rendered (cf. simpler RequestFactory)
self.client = Client()
@classmethod
def setUpTestData(cls):
cls.posts = Post.objects.all()[:5]
def test_post_list(self):
response = self.client.get(reverse('blog:post-list'))
self.assertQuerysetEqual(
response.context['posts'],
self.posts,
)
class PostCreateViewTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.template_name = 'blog/post_create.html'
cls.user = get_user_model().objects.create_user(email=REGULAR_USER_EMAIL, password='<PASSWORD>')
def test_render_post_create_view(self):
# 2. Authorize user
is_authorized = self.client.login(username=REGULAR_USER_EMAIL, password='<PASSWORD>')
self.assertTrue(is_authorized)
# 3. Visit blog:post-create url
response = self.client.get(reverse('blog:post-create'))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, self.template_name)
def test_post_create_view_success(self):
is_authorized = self.client.login(username=REGULAR_USER_EMAIL, password='<PASSWORD>')
self.assertTrue(is_authorized)
new_post = {
'author': self.user,
'title': 'New cool post',
'slug': 'new_cool_post',
'content': 'lorem ipsum lorem ipsum lorem ipsum lorem ipsum',
'categories': ['python-code']
}
response = self.client.post(reverse('blog:post-create'), kwargs=new_post)
self.assertEqual(response.status_code, 200)
class PostDetailViewTests(TestCase):
fixtures = ['users.json', 'posts.json', 'categories.json', 'comments.json']
@classmethod
def setUpTestData(cls):
cls.template_name = 'blog/post_detail.html'
cls.user = get_user_model().objects.create_user(email=REGULAR_USER_EMAIL, password='<PASSWORD>')
def test_render_post_detail_view(self):
is_authorized = self.client.login(username=REGULAR_USER_EMAIL, password='<PASSWORD>')
self.assertTrue(is_authorized)
post = Post.objects.filter(comments__isnull=False).first()
response = self.client.get(reverse('blog:post-detail', kwargs={'slug': post.slug}))
self.assertEqual(response.status_code, 200)
self.assertIn('comments', response.context)
def test_add_comment(self):
user = get_user_model().objects.get(pk=1)
post = Post.objects.get(pk=1)
data = {
'commenter_name': 'I\'ll not tell you mu name.',
'post_id': post.id,
'author': user,
'content': 'cool article, yo!'
}
comment_form = CommentForm(data={**data})
new_comment = comment_form.save(commit=False)
new_comment.post = post
comment_form.save()
self.assertTrue(comment_form.is_valid())
class PostUpdateViewTests(TestCase):
fixtures = ['users.json', 'posts.json', 'categories.json']
@classmethod
def setUpTestData(cls):
cls.template_name = 'blog/post_update.html'
cls.user = get_user_model().objects.create_user(email=REGULAR_USER_EMAIL, password='<PASSWORD>')
def test_render_post_update_view(self):
is_authorized = self.client.login(username=REGULAR_USER_EMAIL, password='<PASSWORD>')
self.assertTrue(is_authorized)
response = self.client.get(reverse('blog:post-update', kwargs={'pk': 1}))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, self.template_name)
def test_post_update_view_success(self):
is_authorized = self.client.login(username=REGULAR_USER_EMAIL, password='<PASSWORD>')
self.assertTrue(is_authorized)
post = Post.objects.get(pk=1)
post.title = 'updated title'
post.content = 'updated content'
post.categories.add(2)
post.save()
response = self.client.put(reverse('blog:post-update', kwargs={'pk': post.pk}))
self.assertEqual(response.status_code, 200)
class PostDeleteViewTests(TestCase):
fixtures = ['users.json', 'posts.json', 'categories.json']
@classmethod
def setUpTestData(cls):
cls.template_name = 'blog/post_delete.html'
cls.user = get_user_model().objects.create_user(email=REGULAR_USER_EMAIL, password='<PASSWORD>')
def test_render_post_delete_view(self):
is_authorized = self.client.login(username=REGULAR_USER_EMAIL, password='<PASSWORD>')
self.assertTrue(is_authorized)
response = self.client.get(reverse('blog:post-delete', kwargs={'pk': 1}))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, self.template_name)
def test_post_delete_view(self):
is_authorized = self.client.login(username=REGULAR_USER_EMAIL, password='<PASSWORD>')
self.assertTrue(is_authorized)
response = self.client.post(reverse('blog:post-update', kwargs={'pk': 1}))
self.assertEqual(response.status_code, 200)
class PostsByCategoryViewTests(TestCase):
fixtures = ['users.json', 'posts.json', 'categories.json']
@classmethod
def setUpTestData(cls):
cls.template_name = 'blog/post_category.html'
def test_posts_list_by_category(self):
category = Category.objects.get(pk=1)
response = self.client.get(reverse('blog:post-category', kwargs={'category': category.name}))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, self.template_name)
self.assertIn('posts', response.context)
# TASKS
@tag('exclude')
class TasksTests(TransactionTestCase):
"""Invoking your Celery tasks inside your tests with the apply() method executes the task synchronously and
locally. This allows you to write tests that look and feel very similar to the ones for your API endpoints."""
celery_worker = None
post_model = None
databases = '__all__'
fixtures = ['users.json', 'posts.json', 'categories.json', 'comments.json']
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.post_model = Post
# Start up celery worker
cls.celery_worker = start_worker(app, perform_ping_check=False)
cls.celery_worker.__enter__()
@classmethod
def tearDownClass(cls):
super().tearDownClass()
# Close worker
cls.celery_worker.__exit__(None, None, None)
def test_post_unpublished_to_telegram_success(self):
self.post_model.objects.all().update(is_published_to_telegram=False)
self.task = post_unpublished_to_telegram.apply()
self.result = self.task.get()
self.assertTrue(self.result)
def test_post_unpublished_to_telegram_no_fresh_posts(self):
self.post_model.objects.all().update(is_published_to_telegram=True)
self.task = post_unpublished_to_telegram.apply()
self.result = self.task.get()
self.assertFalse(self.result)
| StarcoderdataPython |
6683906 | import random
import timeit
from decimal import Decimal
import h5py
import hdf5plugin
import numpy as np
import pandas as pd
import gym
from gym import logger
from gym import spaces
import matplotlib.pyplot as plt
import os
from decimal import getcontext
os.environ['KMP_DUPLICATE_LIB_OK'] = 'True'
pd.set_option('display.float_format', lambda x: '%.10f' % x) # 为了直观的显示数字,不采用科学计数法
getcontext().prec # 设置Decimal精度
class GymEnvBase(gym.Env):
def _pick_day_when_reset(self):
raise NotImplementedError
def _pick_start_index_and_time_when_reset(self):
raise NotImplementedError
def _if_done_when_step(self):
raise NotImplementedError
# 直接以收益作为奖励
def _calculate_reward(self):
return self.records['amount'][-1] - self.records['amount'][-2]
def __init__(self, **kwargs):
random.seed(timeit.default_timer())
params_seed = random.randint(0, 2 ** 32 - 1)
# Gym、numpy、Pytorch都要设置随机数种子
super(GymEnvBase, self).seed(params_seed)
""" 一些常量 """
self.MAX_HOLD_SECONDS = 300 # 单个合约最长持仓时间(秒数) 5分钟
self.MIN_ORDER_VOLUME = 1 # 想要开仓成功订单量的最小阈值
self.MARGIN_RATE = Decimal('0.12') # 金鹏保证金率
self.COMMISSION_PRE_LOT = Decimal('3.3') # 金鹏单手日内佣金
self.CONTRACT_SIZE = 10 # 合约规模
self.TIME_ONLY_CLOSE = pd.to_datetime('225500000', format='%H%M%S%f') # 最后6分钟只平仓不开仓
self.TIME_CLOSE_ALL = pd.to_datetime('225900000', format='%H%M%S%f') # 最后1分钟强平所有然后结束
""" 必填参数 """
# 数据文件位置
assert 'file_path' in kwargs.keys(), 'Parameter [file_path] must be specified.'
# 本金
assert 'capital' in kwargs.keys(), 'Parameter [capital] must be specified.'
# 日期范围 开始结束日期如果是交易日则都包括 exp:"20220105"
assert 'date_start' in kwargs.keys(), 'Parameter [date_start] must be specified.'
assert 'date_end' in kwargs.keys(), 'Parameter [date_end] must be specified.'
date_start = kwargs['date_start']
date_end = kwargs['date_end']
file_path = kwargs['file_path']
capital = Decimal(str(kwargs['capital']))
""" 初始化参数 """
self.file = h5py.File(file_path, 'r') # 只读打开数据
days = pd.to_datetime(list(self.file.keys()))
days = days[days.isin(pd.date_range(date_start, date_end))].strftime('%Y%m%d')
self.possible_days = days # 可选的交易日
self.done = False
self.capital = capital # 本金
self.closed_pl = Decimal('0') # 平仓盈亏 Closed Trade P/L
self.commission = Decimal('0') # 已花费手续费
# 持仓信息 list Tuple(open_time, open_index, direction, open_price, close_price, close_time)
self.order_list = []
self.unclosed_order_index = 0
self.current_position_info = None # 当前持仓情况
self.margin_pre_lot = Decimal('0') # 当前每手保证金
self.last_price = Decimal('0') # 当前最新价
self.transaction_data = None # 交易数据
self.time = None # 当前时间
self.start_time = None # 开始时间
self.seconds_from_start = 0
self.timeout_close_count = None
self.undermargined_count = None
self.min_observation_index = None # 最小指针(开始时刻的索引
self.current_observation_index = None # 数据指针 指向当前数据 (从第5分钟(9:05)开始)
self.max_observation_index = None # 最大指针
self.records = { # 交易过程记录
'amount': [],
'position': [],
'risk': [],
'action': []
}
self.huge_blow = False # 突发情况,会严重影响reward
""" 状态空间和动作空间 """
first_day = self.possible_days[0] # 随便取一天
# 获取一个状态数据以便设置状态空间
self.transaction_data = pd.DataFrame(self.file[first_day][()])
self.last_price = Decimal('0') # 为了计算状态空间设置临时至
self.margin_pre_lot = Decimal('0') # 为了计算状态空间设置临时至
self.current_observation_index = 0
observation = self._observation()
# 设置状态空间
self.observation_space = spaces.Box(- float('inf'), float('inf'), observation.shape, dtype=np.float32)
# 动作空间(0-观望 1-多 2-空)
self.action_space = spaces.Discrete(3)
""" 输出环境加载信息 """
days_count = len(days.values)
logger.info("Environment initialization complete!!! \n---\nFind %d possible days: %s"
"\nMargin rate %.2f, commission %d, contract size %d. \nFile path: %s\n---"
% (days_count,
str(days.values) if days_count < 10 else (
" ".join(str(x) for x in days.values[0:10]) + ' ...'),
self.MARGIN_RATE, self.COMMISSION_PRE_LOT, self.CONTRACT_SIZE, file_path))
def reset(self):
random.seed(timeit.default_timer())
self.done = False
self.closed_pl = Decimal('0')
self.commission = Decimal('0')
self.huge_blow = False
del self.order_list
self.order_list = []
self.unclosed_order_index = 0
self.current_position_info = None
del self.records
self.records = {
'amount': [self.capital],
'position': [0],
'risk': [0.],
'action': [0],
}
self.timeout_close_count = 0
self.undermargined_count = 0
# 选取日期
day = self._pick_day_when_reset()
# 加载数据
self.transaction_data = pd.DataFrame(self.file[day][()])
logger.info("| --> Load data size : %d" % len(self.transaction_data))
self.max_observation_index = len(self.transaction_data) - 1
# 选取交易开始的位置
start_index = self._pick_start_index_and_time_when_reset()
self.min_observation_index = start_index
self.current_observation_index = start_index
self.last_price = Decimal(str(self.transaction_data.iloc[self.current_observation_index]['last_price']))
self.margin_pre_lot = Decimal(str(self.last_price)) * self.MARGIN_RATE * self.CONTRACT_SIZE
self.start_time = pd.to_datetime(str(int(self.transaction_data.iloc[start_index]['time'])), format='%H%M%S%f')
self.seconds_from_start = 0
logger.info("| --> Market start at : %s" % self.start_time.strftime('%X'))
observation = self._observation()
return observation
'''
Actions:
Type: Discrete(3)
Num Action
0 保持
1 开多 或 平空
2 开空 或 平多
'''
def step(self, action):
action = int(action)
err_msg = "%r (%s) invalid" % (action, type(action))
assert self.action_space.contains(action), err_msg
assert self.current_observation_index < self.max_observation_index, "Already OVER !!!"
if self.done:
logger.warn("Already DONE !!!")
last_transaction_data = self.transaction_data.iloc[self.current_observation_index]
'''
时间推进 新时间点,更新参数
'''
self.current_observation_index += 1 # 推动时间前进
current_transaction_data = self.transaction_data.iloc[self.current_observation_index]
# 最新价
self.last_price = Decimal(str(current_transaction_data['last_price']))
# 最新每手保证金
self.margin_pre_lot = Decimal(str(self.last_price)) * self.MARGIN_RATE * self.CONTRACT_SIZE
# 当前时刻
self.time = pd.to_datetime(str(int(current_transaction_data['time'])), format='%H%M%S%f')
self.seconds_from_start = (self.time - self.start_time).total_seconds()
# 挂单量
ask_volume = current_transaction_data['ask_volume']
bid_volume = current_transaction_data['bid_volume']
# 市价单 可成交卖价 (当前tick 和下一tick 高价)
market_ask_price = Decimal(str(max(last_transaction_data['ask'], current_transaction_data['ask'])))
# 市价单 可成交买价 (当前tick 和下一tick 低价)
market_bid_price = Decimal(str(min(last_transaction_data['bid'], current_transaction_data['bid'])))
'''
仓位维护
'''
self._update_position(action, market_ask_price, market_bid_price, ask_volume, bid_volume)
self._check_order_list(market_ask_price, market_bid_price, ask_volume, bid_volume)
self._check_position_info(market_ask_price, market_bid_price, ask_volume, bid_volume)
'''
记录指标变化
'''
self.records['amount'].append(self.current_position_info['amount'])
self.records['position'].append(self.current_position_info['position'])
self.records['risk'].append(self.current_position_info['risk'])
self.records['action'].append(action)
'''
返回 :状态,奖励,是否完成,和其他信息(持仓情况)
'''
# 状态
# observation (object): agent's observation of the current environment
observation = self._observation()
# 奖励
# reward (float) : amount of reward returned after previous action
reward = float(self._calculate_reward())
# 是否完成,如果完成了就一直完成状态
# done (bool): whether the episode has ended, in which case further step() calls will return undefined results
done = self._if_done_when_step() or self.done
self.done = done
if self.huge_blow and done:
reward = -5000
# 附加信息
# info (dict): contains auxiliary diagnostic information (helpful for debugging, and sometimes learning)
info = self.current_position_info.copy()
info['commission'] = self.commission
info['order_count'] = len(self.order_list)
info['unclosed_index'] = self.unclosed_order_index
info['timeout'] = self.timeout_close_count
info['undermargined'] = self.undermargined_count
info = self._decimal_to_float(info)
return observation, reward, done, info
def get_order_history(self):
return self.order_list.copy()
def render(self, mode="human"):
logger.info("You render the env")
ax1 = plt.subplot(411)
ax2 = plt.subplot(412)
ax3 = plt.subplot(413)
ax4 = plt.subplot(414)
ax1.plot(self.transaction_data['last_price'])
ax2.plot(self.transaction_data['turnover'])
ax3.plot(self.records['amount'])
ax4.plot(self.records['position'])
plt.show()
def close(self):
self.file.close()
del self.file
def _update_position(self, action, market_ask_price, market_bid_price, ask_volume, bid_volume):
"""
调仓
---
先检验是否到临近尾盘需要清仓
然后如果有持仓根据action加减仓位
没有持仓根据action开新仓
最后查看是否有需要超时平仓的订单
"""
if self.time > self.TIME_CLOSE_ALL \
or self.max_observation_index - self.current_observation_index < 100: # 距离收盘一分钟 平所有
for i in range(self.unclosed_order_index, len(self.order_list)):
self._close_earliest(market_ask_price, market_bid_price, ask_volume, bid_volume)
self.done = True # 收盘了
else:
if self.unclosed_order_index < len(self.order_list):
# Position info -> Tuple(open_time, open_index, direction, open_price, close_price, close_time)
_, _, direction, _, _, _ = self.order_list[self.unclosed_order_index] # 持仓方向
if action == 1: # 看多
if direction < 0: # 持仓为空头
self._close_earliest(market_ask_price, market_bid_price, ask_volume, bid_volume) # 减仓
else:
self._open_long(market_ask_price, ask_volume) # 加多
elif action == 2: # 看空
if direction > 0: # 持仓为多头
self._close_earliest(market_ask_price, market_bid_price, ask_volume, bid_volume) # 减仓
else:
self._open_short(market_bid_price, bid_volume) # 加空
else: # 无持仓
if action == 1: # 看多
self._open_long(market_ask_price, ask_volume) # 加多
elif action == 2: # 看空
self._open_short(market_bid_price, bid_volume) # 加空
def _check_order_list(self, market_ask_price, market_bid_price, ask_volume, bid_volume):
"""
检查订单健康状况
"""
# 查看是否有需要超时平仓的订单
for i in range(self.unclosed_order_index, len(self.order_list)):
open_time, open_index, direction, open_price, close_price, close_time = self.order_list[i]
if (self.time - open_time).total_seconds() > self.MAX_HOLD_SECONDS: # 持仓超过最大持仓时间
self._close_earliest(market_ask_price, market_bid_price, ask_volume, bid_volume)
self.timeout_close_count += 1
else:
break
def _check_position_info(self, market_ask_price, market_bid_price, ask_volume, bid_volume):
"""
检查持仓情况
"""
# 检查仓位是否健康
position_info = self._position_info()
while position_info['risk'] > 0.95: # 爆仓了 强平到不爆
self._close_earliest(market_ask_price, market_bid_price, ask_volume, bid_volume)
logger.info("Margin closeout ...")
self.undermargined_count += 1
position_info = self._position_info()
# 更新当前头寸
self.current_position_info = position_info
def _open_long(self, ask_price, ask_volume):
if ask_volume < self.MIN_ORDER_VOLUME: # 订单量不足
logger.info('Insufficient order quantity.')
if self.time > self.TIME_ONLY_CLOSE \
or self.max_observation_index - self.current_observation_index < 550: # 到了不能开仓的时间段
logger.info('The position will not be opened '
'when it is less than six minutes from the end of the trading time.')
return
if not self._can_open_new_position(): # 检查可用资金是否允许开仓
logger.info("Undermargined ...")
self.undermargined_count += 1
return
direction = 1
# Position info -> Tuple(open_time, open_index, direction, open_price, close_price, close_time)
self.order_list.append((self.time,
self.current_observation_index,
direction,
ask_price,
None,
None)) # 开多
self.commission = self.commission + self.COMMISSION_PRE_LOT # 手续费
logger.info("[%s] Open long on %d" % (self.time.strftime('%X'), ask_price))
def _open_short(self, bid_price, bid_volume):
if bid_volume < self.MIN_ORDER_VOLUME: # 订单量不足
logger.info('Insufficient order quantity.')
if self.time > self.TIME_ONLY_CLOSE \
or self.max_observation_index - self.current_observation_index < 550: # 到了不能开仓的时间段
logger.info('The position will not be opened '
'when it is less than six minutes from the end of the trading time.')
return
if not self._can_open_new_position(): # 检查可用资金是否允许开仓
logger.info("Undermargined ...")
self.undermargined_count += 1
return
direction = -1
# Position info -> Tuple(open_time, open_index, direction, open_price, close_price, close_time)
self.order_list.append((self.time,
self.current_observation_index,
direction,
bid_price,
None,
None)) # 开空
self.commission = self.commission + self.COMMISSION_PRE_LOT # 手续费
logger.info("[%s] Open short on %d" % (self.time.strftime('%X'), bid_price))
# 平最早的一单
def _close_earliest(self, market_ask_price, market_bid_price, ask_volume, bid_volume):
if self.unclosed_order_index >= len(self.order_list): # 没有未平仓,则直接返回
return
# Position info -> Tuple(open_time, open_index, direction, open_price, close_price, close_time)
open_time, index, direction, open_price, _, _ = self.order_list[self.unclosed_order_index] # 第一个未平仓合约
if direction > 0:
close_price = market_bid_price
if bid_volume < self.MIN_ORDER_VOLUME: # 如果订单数不足惩罚5个点
close_price = market_bid_price - 5
self.closed_pl += (close_price - open_price) * self.CONTRACT_SIZE # 更新平仓盈亏
logger.info("[%s] Close long on %d" % (self.time.strftime('%X'), market_bid_price))
self.order_list[self.unclosed_order_index] = (open_time, index, direction, open_price,
close_price, self.time)
elif direction < 0:
close_price = market_ask_price
if ask_volume < self.MIN_ORDER_VOLUME: # 如果订单数不足惩罚5个点
close_price = market_ask_price + 5
self.closed_pl += (open_price - market_ask_price) * self.CONTRACT_SIZE # 更新平仓盈亏
logger.info("[%s] Close short on %d" % (self.time.strftime('%X'), market_ask_price))
self.order_list[self.unclosed_order_index] = (open_time, index, direction, open_price,
close_price, self.time)
# 指针后移,表示当前指针所指的头寸平仓
self.unclosed_order_index += 1
# 观测状态 = 交易状态 + 持仓状态
def _observation(self):
# 获取交易数据
transaction_state = self.transaction_data.iloc[self.current_observation_index] # 前两位是日期数据不要
# 获取仓位数据
position_state = pd.Series(self._decimal_to_float(self._position_info()))
# 拼接数据并返回结果
return np.array(tuple(transaction_state)[2:] + tuple(position_state)) # 前两位是日期数据不要
def _position_info(self):
# 持仓保证金
margin = self.margin_pre_lot * (len(self.order_list) - self.unclosed_order_index)
# 仓位
position = 0
# 持仓盈亏 Floating P/L
floating_pl = Decimal('0') # 利润
for i in range(self.unclosed_order_index, len(self.order_list)):
# Position info -> Tuple(open_time, open_index, direction, open_price, close_price, close_time)
_, _, direction, open_price, _, _ = self.order_list[i]
delta = self.last_price - open_price
floating_pl += direction * delta # 只计算盈利点数
position += direction
floating_pl *= self.CONTRACT_SIZE # 盈利点数乘以合约规模就是利润
# 当前权益 = 本金(期初权益)+ 持仓盈亏 + 平仓盈亏 - 手续费
amount = self.capital + floating_pl + self.closed_pl - self.commission
# 可用资金 = 当前权益 - 持仓保证金
free_margin = amount - margin
# 风险度 = 保证金 / 当前权益
if amount == 0:
risk = 0
else:
risk = margin / amount
return {
'position': position,
'floating_pl': floating_pl,
'closed_pl': self.closed_pl,
'amount': amount,
'risk': risk,
'free_margin': free_margin,
'margin': margin
}
def _can_open_new_position(self):
# 更新持仓情况
position_info = self._position_info()
return position_info['free_margin'] * Decimal('0.8') > self.margin_pre_lot
@staticmethod
def _decimal_to_float(dict_of_decimal):
ret = {}
for key in dict_of_decimal:
if isinstance(dict_of_decimal[key], Decimal):
ret[key] = float(dict_of_decimal[key])
else:
ret[key] = dict_of_decimal[key]
return ret
| StarcoderdataPython |
9718655 | <gh_stars>0
#!/usr/bin/python
#This script is to visualize the txt file "map" which is the output from the mapping script
#Input: TXT file which will have [X Y Z DESCRIPTOR]
#Output: 3D visualization or 2D visualization of the features
# Author : <NAME>
# Contact : <EMAIL>
# Thesis source code, CVUT, Prague, Czech Republic
#Import Libraries
#==================
import numpy as np
import matplotlib
import matplotlib.pyplot as plt
import mpl_toolkits
from mpl_toolkits.mplot3d import Axes3D
from mpl_toolkits.mplot3d import Axes3D
f = open('map.txt', 'r').readlines()
Xsnew= list()
Ysnew= list()
Zsnew= list()
output_list = []
fig = plt.figure()
for line in f:
tmp = line.strip().split(",")
values = [float(v) for v in tmp]
points4d = np.array(values).reshape((-1, 65))
# Change below if you want to exclude W to 3
points3d = points4d[:, :65]
Xs = points3d[:, 0]/points4d[:,3]
Ys = points3d[:, 1]/points4d[:,3]
Zs = points3d[:, 2]/points4d[:,3]
Ws = points3d[:, 3]/points4d[:,3]
for i in range(len(Xs)):
if 2.8 < Zs[i] < 3.6:
output_list.append([Xs[i],Ys[i],Zs[i]])
else:
pass
for values in output_list:
Xsnew.append(values[0])
Ysnew.append(values[1])
Zsnew.append(values[2])
print("XNEW", Xsnew)
print("YNEW", Ysnew)
print("ZNEW", Zsnew)
#3D Visualization
#====================
ax = fig.add_subplot(111, projection = '3d')
ax.scatter(Xsnew, Ysnew, my_new_Z_list, c = 'r', marker = "o")
MAX = 3
for direction in (-1, 1):
for point in np.diag(direction * MAX * np.array([1,1,1])):
ax.plot([point[0]], [point[1]], [point[2]], 'w')
ax.set_xlabel('X')
ax.set_ylabel('Y')
ax.set_zlabel('Z')
plt.show()
#2D Visualization
#==================
# ax = fig.add_subplot(111)
# ax.scatter(Xsnew, Ysnew, my_new_Z_list, c = 'r', marker = "o")
# MAX = 3
# for direction in (-1, 1):
# for point in np.diag(direction * MAX * np.array([1,1,1])):
# ax.plot([point[0]], [point[1]], [point[2]], 'w')
# ax.set_xlabel('X')
# ax.set_ylabel('Y')
# plt.show()
| StarcoderdataPython |
1979326 | <filename>pygithublabeler/__main__.py<gh_stars>0
from .run import cli
cli() | StarcoderdataPython |
3534506 | _base_config_ = ["base.py"]
generator = dict(
semantic_input_mode=None,
use_norm=True,
style_cfg=dict(
type="CSEStyleMapper", encoder_modulator="CSELinear", decoder_modulator="CSELinear",middle_modulator="CSELinear",
w_mapper=dict(input_z=True)),
embed_z=False,
use_cse=True
)
loss = dict(
gan_criterion=dict(type="segmentation", seg_weight=.1)
)
discriminator=dict(
pred_only_cse=False,
pred_only_semantic=True
) | StarcoderdataPython |
1673568 | <gh_stars>1-10
"""
Copyright European Organization for Nuclear Research (CERN)
Licensed under the Apache License, Version 2.0 (the "License");
You may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Authors:
- <NAME>, <<EMAIL>>, 2017
- <NAME>, <<EMAIL>>, 2017
Gets current traffic for all the links.
"""
import sys
from rucio.client import Client
from rucio.db.sqla.session import get_session
def get_traffic_from_db():
"""
Gets the size of the current requests
for each link.
"""
session = get_session()
collector = []
query = '''SELECT
SUM(bytes),
atlas_rucio.id2rse(source_rse_id),
atlas_rucio.id2rse(dest_rse_id)
FROM atlas_rucio.requests WHERE
(state='D' or
state='S' or
state='F' or
state='L')
group by source_rse_id, dest_rse_id'''
try:
result = session.execute(query)
for row in result:
link = {'bytes': row[0], 'src_rse': row[1], 'dst_rse': row[2]}
collector.append(link)
except Exception, exception:
print exception
sys.exit()
return collector
def create_site_map(rse_map):
"""
Creation of a net of sources and destination with trafic between them.
"""
client = Client()
trafic_map = {}
for link in rse_map:
src_site = client.list_rse_attributes(link['src_rse'])['site']
dst_site = client.list_rse_attributes(link['dst_rse'])['site']
trafic = int(link['bytes'])
# map creation site to site
if src_site in trafic_map.keys():
if dst_site in trafic_map[src_site].keys():
trafic_map[src_site][dst_site] += trafic
else:
trafic_map[src_site][dst_site] = trafic
else:
trafic_map[src_site] = {src_site: trafic}
return trafic_map
def get_link_traffic():
"""
Returns a dictionary object of the current traffic
of format {source: name1, destination: name2 , traffic: int}
"""
rse_map = get_traffic_from_db()
site_map = create_site_map(rse_map)
return site_map
| StarcoderdataPython |
8080502 | import contextlib
import datetime
import itertools
import collections
from typing import AbstractSet, Iterable
import dateutil
import rich.align
import rich.box
import rich.console
import rich.padding
import rich.panel
import rich.rule
import rich.table
from cloclify import client
def timedelta_str(delta):
h, rem = divmod(delta.seconds, 3600)
m, s = divmod(rem, 60)
dec = h + m / 60
prefix = f"{delta.days} days, " if delta.days != 0 else ""
return f"{prefix}{h:02}:{m:02}:{s:02} ({round(dec, 2)})"
def print_entries(
console: rich.console.Console,
date: datetime.date,
entries: Iterable[client.Entry],
*,
debug: bool,
workspace_name: str,
highlight_ids: AbstractSet[str] = frozenset(),
center: bool = False,
) -> None:
console.print(f"[yellow]Workspace:[/yellow] {workspace_name}\n")
date_str = date.strftime("%a, %Y-%m-%d")
table = rich.table.Table(
title=date_str,
box=rich.box.ROUNDED,
)
table.add_column("Description", style="yellow")
table.add_column("Start", style="cyan")
table.add_column("End", style="cyan")
table.add_column("Project")
table.add_column("Tags", style="blue")
table.add_column(":gear:") # icons
total = datetime.timedelta()
project_totals = collections.defaultdict(datetime.timedelta)
for entry in reversed(list(entries)):
if debug:
console.print(entry, highlight=True)
data = []
data.append(entry.description)
assert entry.start is not None, entry
data.append(entry.start.strftime("%H:%M"))
if entry.end is None:
data.append(":clock3:")
now = datetime.datetime.now(dateutil.tz.tzlocal())
duration = now - entry.start
else:
data.append(entry.end.strftime("%H:%M"))
duration = entry.end - entry.start
total += duration
proj_key = (entry.project or "Other", entry.project_color or "default")
project_totals[proj_key] += duration
if entry.project is None:
data.append("")
else:
data.append(f"[{entry.project_color}]{entry.project}[/{entry.project_color}]")
data.append(", ".join(entry.tags))
icon = ""
if entry.eid in highlight_ids:
icon += ":sparkles:"
if entry.billable:
icon += ":heavy_dollar_sign:"
data.append(icon)
style = None
if highlight_ids and entry.eid not in highlight_ids:
style = rich.style.Style(dim=True)
table.add_row(*data, style=style)
renderable = rich.align.Align(table, "center") if center else table
console.print(renderable)
justify = "center" if center else None
console.print(f"[b]Total: {timedelta_str(total)}[/b]", justify=justify)
for (proj, color), tag_total in sorted(project_totals.items()):
console.print(
f"[{color}]{proj}[/{color}]: {timedelta_str(tag_total)}",
justify=justify,
)
def conky(console, client, parser) -> None:
"""Output for conky's exec(i) with lemonbar."""
entries = list(client.get_entries_day(parser.date))
running = [e for e in entries if e.end is None]
parts = []
if running:
for entry in running:
project = entry.project or "Other"
color = entry.project_color or "#fffff"
parts.append('%{F' + color + '}' + project + '%{F-}')
finished_count = len(entries) - len(running)
now = datetime.datetime.now()
if finished_count:
if parts:
parts.append("+")
parts.append(str(finished_count))
elif running:
# don't need to append "none" if a task is running
pass
elif 0 <= now.date().weekday() < 5 and 8 <= now.time().hour <= 18:
# roughly working hours
parts.append("%{B<color>} none %{B-}".replace('<color>', parser.conky_error_color))
else:
# roughly non-working hours
parts.append("none")
console.print(' '.join(parts))
def dump(console, client, parser) -> None:
"""Dump all entries for the month given in 'date'."""
entries = client.get_entries_month(parser.dump)
separator = rich.padding.Padding(rich.rule.Rule(), (1, 0))
pager = console.pager(styles=True) if parser.pager else contextlib.nullcontext()
with pager:
for date, day_entries in itertools.groupby(
reversed(list(entries)), key=lambda e: e.start.date()
):
print_entries(
console,
date,
reversed(list(day_entries)),
debug=parser.debug,
center=True,
workspace_name=client.workspace_name,
)
console.print(separator)
| StarcoderdataPython |
6664864 | '''
Given an integer array nums, move all 0's to the end of it while maintaining the relative order of the non-zero elements.
Note that you must do this in-place without making a copy of the array.
Example 1:
Input: nums = [0,1,0,3,12]
Output: [1,3,12,0,0]
Example 2:
Input: nums = [0]
Output: [0]
Constraints:
1 <= nums.length <= 104
-231 <= nums[i] <= 231 - 1
'''
class Solution:
def moveZeroes(self, nums: List[int]) -> None:
"""
Do not return anything, modify nums in-place instead.
"""
i = 0
first = last = i
while last < len(nums) and first < len(nums):
if nums[first] == 0:
while last < len(nums) and nums[last] == 0:
last += 1
if last < len(nums):
nums[first], nums[last] = nums[last], nums[first]
last += 1
first += 1
return nums | StarcoderdataPython |
3435194 | class Colors:
RED = 255, 114, 111
GREEN = 0, 255, 0
BLUE = 0, 0, 255
YELLOW = 255, 255, 0
WHITE = 255, 255, 255
BLACK = 0, 0, 0
PURPLE = 161, 3, 252
ORANGE = 255, 165, 0
GREY = 128, 128, 128
TURQUOISE = 64, 224, 208
| StarcoderdataPython |
1775404 | # st2
from ultron8.utils.misc import (
lowercase_value,
rstrip_last_char,
sanitize_output,
strip_shell_chars,
)
from ultron8.utils.ujson import fast_deepcopy
__all__ = ["MiscUtilTestCase"]
class TestMiscUtilTestCase:
def test_rstrip_last_char(self):
assert rstrip_last_char(None, "\n") == None
assert rstrip_last_char("stuff", None) == "stuff"
assert rstrip_last_char("", "\n") == ""
assert rstrip_last_char("foo", "\n") == "foo"
assert rstrip_last_char("foo\n", "\n") == "foo"
assert rstrip_last_char("foo\n\n", "\n") == "foo\n"
assert rstrip_last_char("foo\r", "\r") == "foo"
assert rstrip_last_char("foo\r\r", "\r") == "foo\r"
assert rstrip_last_char("foo\r\n", "\r\n") == "foo"
assert rstrip_last_char("foo\r\r\n", "\r\n") == "foo\r"
assert rstrip_last_char("foo\n\r", "\r\n") == "foo\n\r"
def test_strip_shell_chars(self):
assert strip_shell_chars(None) == None
assert strip_shell_chars("foo") == "foo"
assert strip_shell_chars("foo\r") == "foo"
assert strip_shell_chars("fo\ro\r") == "fo\ro"
assert strip_shell_chars("foo\n") == "foo"
assert strip_shell_chars("fo\no\n") == "fo\no"
assert strip_shell_chars("foo\r\n") == "foo"
assert strip_shell_chars("fo\no\r\n") == "fo\no"
assert strip_shell_chars("foo\r\n\r\n") == "foo\r\n"
def test_lowercase_value(self):
value = "TEST"
expected_value = "test"
assert expected_value == lowercase_value(value=value)
value = ["testA", "TESTb", "TESTC"]
expected_value = ["testa", "testb", "testc"]
assert expected_value == lowercase_value(value=value)
value = {"testA": "testB", "testC": "TESTD", "TESTE": "TESTE"}
expected_value = {"testa": "testb", "testc": "testd", "teste": "teste"}
assert expected_value == lowercase_value(value=value)
def test_fast_deepcopy_success(self):
values = [
"a",
u"٩(̾●̮̮̃̾•̃̾)۶",
1,
[1, 2, "3", "b"],
{"a": 1, "b": "3333", "c": "d"},
]
expected_values = [
"a",
u"٩(̾●̮̮̃̾•̃̾)۶",
1,
[1, 2, "3", "b"],
{"a": 1, "b": "3333", "c": "d"},
]
for value, expected_value in zip(values, expected_values):
result = fast_deepcopy(value)
assert result == value
assert result == expected_value
def test_sanitize_output_use_pyt_false(self):
# pty is not used, \r\n shouldn't be replaced with \n
input_strs = [
"foo",
"foo\n",
"foo\r\n",
"foo\nbar\nbaz\n",
"foo\r\nbar\r\nbaz\r\n",
]
expected = ["foo", "foo", "foo", "foo\nbar\nbaz", "foo\r\nbar\r\nbaz"]
for input_str, expected_output in zip(input_strs, expected):
output = sanitize_output(input_str, uses_pty=False)
assert expected_output == output
def test_sanitize_output_use_pyt_true(self):
# pty is used, \r\n should be replaced with \n
input_strs = [
"foo",
"foo\n",
"foo\r\n",
"foo\nbar\nbaz\n",
"foo\r\nbar\r\nbaz\r\n",
]
expected = ["foo", "foo", "foo", "foo\nbar\nbaz", "foo\nbar\nbaz"]
for input_str, expected_output in zip(input_strs, expected):
output = sanitize_output(input_str, uses_pty=True)
assert expected_output == output
| StarcoderdataPython |
3404667 | <reponame>majamassarini/knx-stack<gh_stars>1-10
from knx_stack.encode.layer.transport.t_data_group.encode import tl_encode as encode
from knx_stack.encode.layer.transport.t_data_group import req, ind
| StarcoderdataPython |
11377080 | import glob
import librosa
import os
import numpy as np
from .constant import *
import argparse
def audio_clip(data_dir, N, low, high, duration, output_dir):
speakers = glob.glob(os.path.join(data_dir, "*.sph"))
speakers.extend(glob.glob(os.path.join(data_dir, "*.wav")))
for i in range(len(speakers)):
p = os.path.join(output_dir, str(i))
if not os.path.exists(p):
os.makedirs(p)
y, _ = librosa.load(speakers[i], sr=SAMPLING_RATE)
for j in range(N):
k = int(np.random.randint(low, high, size=1))
librosa.output.write_wav(os.path.join(p, str(j)) + ".wav",
y[k*SAMPLING_RATE : (k+duration)*SAMPLING_RATE], SAMPLING_RATE)
| StarcoderdataPython |
12822582 | import os
import sys
from copy import deepcopy
import shutil
from os.path import exists as _exists
from pprint import pprint
from time import time
from time import sleep
from datetime import datetime
import wepppy
from wepppy.nodb import (
Ron, Topaz, Watershed, Landuse, Soils, Climate, Wepp, SoilsMode, ClimateMode, ClimateSpatialMode, LanduseMode
)
from wepppy.nodb.mods.locations import LakeTahoe
from os.path import join as _join
from wepppy.nodb.mods.locations.lt.selectors import *
from wepppy.wepp.out import TotalWatSed
from wepppy.export import arc_export
from osgeo import gdal, osr
gdal.UseExceptions()
wd = None
def log_print(msg):
global wd
now = datetime.now()
print('[{now}] {wd}: {msg}'.format(now=now, wd=wd, msg=msg))
if __name__ == '__main__':
os.chdir('/geodata/weppcloud_runs/')
watersheds = [
dict(watershed='0_Near_Burton_Creek', # Watershed_6
extent=[-120.25222778320314, 39.102091011833686, -120.01190185546876, 39.28834275351453],
map_center=[-120.13206481933595, 39.19527859633793],
map_zoom=12,
outlet=[-120.14460408169862, 39.17224134827233],
landuse=None,
cs=30, erod=0.000001,
surf_runoff=0.004, lateral_flow=0.005, baseflow=0.006, sediment=1000.0,
gwstorage=100, bfcoeff=0.04, dscoeff=0.00, bfthreshold=1.001,
mid_season_crop_coeff=0.95, p_coeff=0.8),
dict(watershed='1_Unnamed_Creek_at_Tahoe_City_State_Park', # Watershed_5
extent=[-120.25222778320314, 39.102091011833686, -120.01190185546876, 39.28834275351453],
map_center=[-120.13206481933595, 39.19527859633793],
map_zoom=12,
outlet=[-120.1402884859731, 39.175919130374645],
landuse=None,
cs=30, erod=0.000001,
surf_runoff=0.004, lateral_flow=0.005, baseflow=0.006, sediment=1000.0,
gwstorage=100, bfcoeff=0.04, dscoeff=0.00, bfthreshold=1.001,
mid_season_crop_coeff=0.95, p_coeff=0.8),
dict(watershed='2_Burton_Creek', # Watershed_4
extent=[-120.20605087280275, 39.15083019711799, -120.08588790893556, 39.243953257043124],
map_center=[-120.14596939086915, 39.19740715574304],
map_zoom=13,
outlet=[-120.12241504431637, 39.181379503672105],
# outlet=[-120.1233, 39.1816], # [-120.12241504431637, 39.181379503672105],
landuse=None,
cs=30, erod=0.000001,
surf_runoff=0.004, lateral_flow=0.005, baseflow=0.006, sediment=1000.0,
gwstorage=100, bfcoeff=0.04, dscoeff=0.00, bfthreshold=1.001,
mid_season_crop_coeff=0.95, p_coeff=0.8),
dict(watershed='3_Unnamed_Creek_near_Lake_Forest', # Watershed_3
extent=[-120.25497436523439, 39.072244930479926, -120.0146484375, 39.25857565711887],
map_center=[-120.1348114013672, 39.165471994238374],
map_zoom=12,
outlet=[-120.12165282292143, 39.18644160172608],
landuse=None,
cs=30, erod=0.000001,
surf_runoff=0.004, lateral_flow=0.005, baseflow=0.006, sediment=1000.0,
gwstorage=100, bfcoeff=0.04, dscoeff=0.00, bfthreshold=1.001,
mid_season_crop_coeff=0.95, p_coeff=0.8),
dict(watershed='4_Unnamed_Creek_at_Lake_Forest', # Watershed_2
extent=[-120.25497436523439, 39.072244930479926, -120.0146484375, 39.25857565711887],
map_center=[-120.1348114013672, 39.165471994238374],
map_zoom=12,
outlet=[-120.11460381632118, 39.18896973503106],
landuse=None,
cs=30, erod=0.000001,
surf_runoff=0.004, lateral_flow=0.005, baseflow=0.006, sediment=1000.0,
gwstorage=100, bfcoeff=0.04, dscoeff=0.00, bfthreshold=1.001,
mid_season_crop_coeff=0.95, p_coeff=0.8),
dict(watershed='5_Dollar_Creek', # Watershed_1
extent=[-120.25497436523439, 39.072244930479926, -120.0146484375, 39.25857565711887],
map_center=[-120.1348114013672, 39.165471994238374],
map_zoom=12,
outlet=[-120.09757304843217, 39.19773527084747],
landuse=None,
cs=30, erod=0.000001,
surf_runoff=0.004, lateral_flow=0.005, baseflow=0.006, sediment=1000.0,
gwstorage=100, bfcoeff=0.04, dscoeff=0.00, bfthreshold=1.001,
mid_season_crop_coeff=0.95, p_coeff=0.8),
dict(watershed='6_Unnamed_Creek_at_Cedar_Flat', # 150 ha Watershed_1
extent=[-120.1619338989258, 39.14763521827571, -119.92160797119142, 39.33376633431887],
map_center=[-120.04177093505861, 39.2407625100131],
map_zoom=12,
outlet=[-120.09622790374877, 39.20593567273984],
landuse=None,
cs=30, erod=0.000001,
surf_runoff=0.004, lateral_flow=0.005, baseflow=0.006, sediment=1000.0,
gwstorage=100, bfcoeff=0.04, dscoeff=0.00, bfthreshold=1.001,
mid_season_crop_coeff=0.95, p_coeff=0.8),
dict(watershed='6_Intervening_Area_Cedar_Flat', # 190 ha Watershed_2
extent=[-120.1619338989258, 39.14763521827571, -119.92160797119142, 39.33376633431887],
map_center=[-120.04177093505861, 39.2407625100131],
map_zoom=12,
outlet=[-120.09007219506651, 39.211997939797904],
landuse=None,
cs=30, erod=0.000001,
surf_runoff=0.004, lateral_flow=0.005, baseflow=0.006, sediment=1000.0,
gwstorage=100, bfcoeff=0.04, dscoeff=0.00, bfthreshold=1.001,
mid_season_crop_coeff=0.95, p_coeff=0.8),
dict(watershed='7_Watson_Creek', # 610 ha Watershed_3
extent=[-120.1619338989258, 39.14763521827571, -119.92160797119142, 39.33376633431887],
map_center=[-120.04177093505861, 39.2407625100131],
map_zoom=12,
outlet=[-120.08804679389792, 39.218974048542954],
landuse=None,
cs=30, erod=0.000001,
surf_runoff=0.004, lateral_flow=0.005, baseflow=0.006, sediment=1000.0,
gwstorage=100, bfcoeff=0.04, dscoeff=0.00, bfthreshold=1.001,
mid_season_crop_coeff=0.95, p_coeff=0.8),
dict(watershed='8_Carnelian_Bay_Creek', # 210 ha Watershed_4
extent=[-120.1619338989258, 39.14763521827571, -119.92160797119142, 39.33376633431887],
map_center=[-120.04177093505861, 39.2407625100131],
map_zoom=12,
outlet=[-120.08641274873328, 39.22487886998101],
landuse=None,
cs=30, erod=0.000001,
surf_runoff=0.004, lateral_flow=0.005, baseflow=0.006, sediment=1000.0,
gwstorage=100, bfcoeff=0.04, dscoeff=0.00, bfthreshold=1.001,
mid_season_crop_coeff=0.95, p_coeff=0.8),
dict(watershed='9_Intervening_Area_Carnelian_Bay_1', # 23 ha Watershed_5
extent=[-120.15626907348634, 39.17279175010029, -120.03610610961915, 39.265885713697195],
map_center=[-120.09618759155275, 39.21935416298406],
map_zoom=13,
outlet=[-120.08355157342717, 39.225167631789475],
landuse=None,
cs=30, erod=0.000001,
surf_runoff=0.004, lateral_flow=0.005, baseflow=0.006, sediment=1000.0,
gwstorage=100, bfcoeff=0.04, dscoeff=0.00, bfthreshold=1.001,
mid_season_crop_coeff=0.95, p_coeff=0.8),
dict(watershed='9_Intervening_Area_Carnelian_Bay_2', # 46 ha Watershed_6
extent=[-120.15626907348634, 39.17279175010029, -120.03610610961915, 39.265885713697195],
map_center=[-120.09618759155275, 39.21935416298406],
map_zoom=12,
outlet=[-120.08284617571803, 39.225420213254786],
landuse=None,
cs=30, erod=0.000001,
surf_runoff=0.004, lateral_flow=0.005, baseflow=0.006, sediment=1000.0,
gwstorage=100, bfcoeff=0.04, dscoeff=0.00, bfthreshold=1.001,
mid_season_crop_coeff=0.95, p_coeff=0.8),
dict(watershed='9_Carnelian_Creek', # 770 ha Watershed_7
extent=[-120.1619338989258, 39.14763521827571, -119.92160797119142, 39.33376633431887],
map_center=[-120.04177093505861, 39.2407625100131],
map_zoom=12,
outlet=[-120.07969459253908, 39.22768334903354],
landuse=None,
cs=30, erod=0.000001,
surf_runoff=0.004, lateral_flow=0.005, baseflow=0.006, sediment=1000.0,
gwstorage=100, bfcoeff=0.04, dscoeff=0.00, bfthreshold=1.001,
mid_season_crop_coeff=0.95, p_coeff=0.8),
dict(watershed='10_Intervening_Area_Agate_Bay', # 80 ha Watershed_8
extent=[-120.1619338989258, 39.14763521827571, -119.92160797119142, 39.33376633431887],
map_center=[-120.04177093505861, 39.2407625100131],
map_zoom=12,
outlet=[-120.05797912271075, 39.24010882250784],
landuse=None,
cs=30, erod=0.000001,
surf_runoff=0.004, lateral_flow=0.005, baseflow=0.006, sediment=1000.0,
gwstorage=100, bfcoeff=0.04, dscoeff=0.00, bfthreshold=1.001,
mid_season_crop_coeff=0.95, p_coeff=0.8),
dict(watershed='10_Snow_Creek', # 1200 ha Watershed_9
extent=[-120.1619338989258, 39.14763521827571, -119.92160797119142, 39.33376633431887],
map_center=[-120.04177093505861, 39.2407625100131],
map_zoom=12,
outlet=[-120.0396132899316, 39.23883229646565],
landuse=None,
cs=30, erod=0.000001,
surf_runoff=0.004, lateral_flow=0.005, baseflow=0.006, sediment=1000.0,
gwstorage=100, bfcoeff=0.04, dscoeff=0.00, bfthreshold=1.001,
mid_season_crop_coeff=0.95, p_coeff=0.8),
dict(watershed='11_Griff_Creek', # 1100 ha Watershed_10
extent=[-120.1619338989258, 39.14763521827571, -119.92160797119142, 39.33376633431887],
map_center=[-120.04177093505861, 39.2407625100131],
map_zoom=12,
outlet=[-120.03057579150752, 39.238872298828994],
landuse=None,
cs=25, erod=0.000001,
surf_runoff=0.007, lateral_flow=0.008, baseflow=0.009, sediment=1000.0,
gwstorage=100, bfcoeff=0.013, dscoeff=0.013, bfthreshold=1.001,
mid_season_crop_coeff=0.95, p_coeff=0.8),
dict(watershed='12_Intervening_Area_Griff_to_Baldy', # 76 ha Watershed_11
extent=[-120.1619338989258, 39.14763521827571, -119.92160797119142, 39.33376633431887],
map_center=[-120.04177093505861, 39.2407625100131],
map_zoom=12,
outlet=[-120.02654678811952, 39.2355263472678],
landuse=None,
cs=25, erod=0.000001,
surf_runoff=0.008, lateral_flow=0.009, baseflow=0.010, sediment=1000.0,
gwstorage=100, bfcoeff=0.013, dscoeff=0.013, bfthreshold=1.001,
mid_season_crop_coeff=0.95, p_coeff=0.8),
dict(watershed='12_Baldy_Creek', # 160 ha Watershed_12
extent=[-120.1619338989258, 39.14763521827571, -119.92160797119142, 39.33376633431887],
map_center=[-120.04177093505861, 39.2407625100131],
map_zoom=12,
outlet=[-120.02345678903843, 39.23463658610686],
landuse=None,
cs=25, erod=0.000001,
surf_runoff=0.008, lateral_flow=0.009, baseflow=0.010, sediment=1000.0,
gwstorage=100, bfcoeff=0.013, dscoeff=0.013, bfthreshold=1.001,
mid_season_crop_coeff=0.95, p_coeff=0.8),
dict(watershed='13_East_Stateline_Point', # 370 ha Watershed_13
extent=[-120.1619338989258, 39.14763521827571, -119.92160797119142, 39.33376633431887],
map_center=[-120.04177093505861, 39.2407625100131],
map_zoom=12,
outlet=[-119.99814918521255, 39.225068116460506],
landuse=None,
cs=25, erod=0.000001,
surf_runoff=0.008, lateral_flow=0.009, baseflow=0.010, sediment=1000.0,
gwstorage=100, bfcoeff=0.013, dscoeff=0.013, bfthreshold=1.001,
mid_season_crop_coeff=0.95, p_coeff=0.8),
dict(watershed='14_First_Creek', # 450 ha Watershed_14
extent=[-120.1619338989258, 39.14763521827571, -119.92160797119142, 39.33376633431887],
map_center=[-120.04177093505861, 39.2407625100131],
map_zoom=12,
outlet=[-119.98883774006968, 39.24779919914662],
landuse=None,
cs=25, erod=0.000001,
surf_runoff=0.008, lateral_flow=0.009, baseflow=0.010, sediment=1000.0,
gwstorage=100, bfcoeff=0.013, dscoeff=0.013, bfthreshold=1.001,
mid_season_crop_coeff=0.95, p_coeff=0.8),
dict(watershed='15_Second_Creek', # 400 ha Watershed_15
extent=[-120.1619338989258, 39.14763521827571, -119.92160797119142, 39.33376633431887],
map_center=[-120.04177093505861, 39.2407625100131],
map_zoom=12,
outlet=[-119.97838738323198, 39.248339060781475],
landuse=None,
cs=25, erod=0.000001,
surf_runoff=0.008, lateral_flow=0.009, baseflow=0.010, sediment=1000.0,
gwstorage=100, bfcoeff=0.013, dscoeff=0.013, bfthreshold=1.001,
mid_season_crop_coeff=0.95, p_coeff=0.8),
dict(watershed='16_Intervening_Area_Second_to_Wood', # 92 ha Watershed_16
extent=[-120.1619338989258, 39.14763521827571, -119.92160797119142, 39.33376633431887],
map_center=[-120.04177093505861, 39.2407625100131],
map_zoom=12,
outlet=[-119.97179085084468, 39.24816757762806],
landuse=None,
cs=25, erod=0.000001,
surf_runoff=0.008, lateral_flow=0.009, baseflow=0.010, sediment=1000.0,
gwstorage=100, bfcoeff=0.013, dscoeff=0.013, bfthreshold=1.001,
mid_season_crop_coeff=0.95, p_coeff=0.8),
dict(watershed='17_Wood_Creek', # 490 ha Watershed_17
extent=[-120.1619338989258, 39.14763521827571, -119.92160797119142, 39.33376633431887],
map_center=[-120.04177093505861, 39.2407625100131],
map_zoom=12,
outlet=[-119.95707223120424, 39.24291905726153],
landuse=None,
cs=25, erod=0.000001,
surf_runoff=0.008, lateral_flow=0.009, baseflow=0.010, sediment=1000.0,
gwstorage=100, bfcoeff=0.013, dscoeff=0.013, bfthreshold=1.001,
mid_season_crop_coeff=0.95, p_coeff=0.8),
dict(watershed='18_Third_Creek', # 1600 ha Watershed_18_Third
extent=[-120.04760742187501, 39.16839998800286, -119.80728149414064, 39.35447606884594],
map_center=[-119.92744445800783, 39.261499771230774],
map_zoom=12,
outlet=[-119.94713185797971, 39.239460705991355],
landuse=None,
cs=25, erod=0.000001,
surf_runoff=0.008, lateral_flow=0.009, baseflow=0.010, sediment=700.0,
gwstorage=100, bfcoeff=0.013, dscoeff=0.0134, bfthreshold=1.001,
mid_season_crop_coeff=0.95, p_coeff=0.8),
dict(watershed='19_Incline_Creek', # 1700 ha Watershed_19_Incline
extent=[-120.04760742187501, 39.16839998800286, -119.80728149414064, 39.35447606884594],
map_center=[-119.92744445800783, 39.261499771230774],
map_zoom=12,
outlet=[-119.94500218172628, 39.2404858227834],
landuse=None,
cs=35, erod=0.000001,
surf_runoff=0.011, lateral_flow=0.012, baseflow=0.013, sediment=1500.0,
gwstorage=100, bfcoeff=0.0019, dscoeff=0.001, bfthreshold=1.001,
mid_season_crop_coeff=0.95, p_coeff=0.8),
dict(watershed='20_Mill_Creek', # 500 ha Watershed_20
extent=[-120.04760742187501, 39.16839998800286, -119.80728149414064, 39.35447606884594],
map_center=[-119.92744445800783, 39.261499771230774],
map_zoom=12,
outlet=[-119.93519389103228, 39.234282368305905],
landuse=None,
cs=35, erod=0.000001,
surf_runoff=0.011, lateral_flow=0.012, baseflow=0.013, sediment=1500.0,
gwstorage=100, bfcoeff=0.002, dscoeff=0.001, bfthreshold=1.001,
mid_season_crop_coeff=0.95, p_coeff=0.8),
dict(watershed='21_Tunnel_Creek', # 310 ha Watershed_21
extent=[-120.04760742187501, 39.16839998800286, -119.80728149414064, 39.35447606884594],
map_center=[-119.92744445800783, 39.261499771230774],
map_zoom=12,
outlet=[-119.92772893484674, 39.22219445266412],
landuse=None,
cs=35, erod=0.000001,
surf_runoff=0.011, lateral_flow=0.012, baseflow=0.013, sediment=1500.0,
gwstorage=100, bfcoeff=0.002, dscoeff=0.001, bfthreshold=1.001,
mid_season_crop_coeff=0.95, p_coeff=0.8),
dict(watershed='22_Unnamed_creek_at_Sand_Harbor', # 230 ha Watershed_22
extent=[-120.04760742187501, 39.16839998800286, -119.80728149414064, 39.35447606884594],
map_center=[-119.92744445800783, 39.261499771230774],
map_zoom=12,
outlet=[-119.92780585626933, 39.21246741121267],
landuse=None,
cs=35, erod=0.000001,
surf_runoff=0.011, lateral_flow=0.012, baseflow=0.013, sediment=2000.0,
gwstorage=100, bfcoeff=0.002, dscoeff=0.001, bfthreshold=1.001,
mid_season_crop_coeff=0.95, p_coeff=0.8),
dict(watershed='23_Intervening_Area_Sand_Harbor_1', # 52 ha Watershed_23
extent=[-120.04760742187501, 39.16839998800286, -119.80728149414064, 39.35447606884594],
map_center=[-119.92744445800783, 39.261499771230774],
map_zoom=12,
outlet=[-119.92902337136051, 39.208445758549246],
landuse=None,
cs=35, erod=0.000001,
surf_runoff=0.011, lateral_flow=0.012, baseflow=0.013, sediment=2000.0,
gwstorage=100, bfcoeff=0.002, dscoeff=0.001, bfthreshold=1.001,
mid_season_crop_coeff=0.95, p_coeff=0.8),
dict(watershed='23_Intervening_Area_Sand_Harbor_2', # 25 ha Watershed_24
extent=[-120.04760742187501, 39.16839998800286, -119.80728149414064, 39.35447606884594],
map_center=[-119.92744445800783, 39.261499771230774],
map_zoom=12,
outlet=[-119.93014075643508, 39.19874614288978],
landuse=None,
cs=35, erod=0.000001,
surf_runoff=0.011, lateral_flow=0.012, baseflow=0.013, sediment=2000.0,
gwstorage=100, bfcoeff=0.002, dscoeff=0.001, bfthreshold=1.001,
mid_season_crop_coeff=0.95, p_coeff=0.8),
dict(watershed='23_Intervening_Area_Sand_Harbor_3', # 33 ha Watershed_25
extent=[-120.04760742187501, 39.16839998800286, -119.80728149414064, 39.35447606884594],
map_center=[-119.92744445800783, 39.261499771230774],
map_zoom=12,
outlet=[-119.92712434849918, 39.19623427594098],
landuse=None,
cs=35, erod=0.000001,
surf_runoff=0.011, lateral_flow=0.012, baseflow=0.013, sediment=2000.0,
gwstorage=100, bfcoeff=0.002, dscoeff=0.001, bfthreshold=1.001,
mid_season_crop_coeff=0.95, p_coeff=0.8),
dict(watershed='23_Intervening_Area_Sand_Harbor_4', # 140 ha Watershed_26
extent=[-120.04760742187501, 39.16839998800286, -119.80728149414064, 39.35447606884594],
map_center=[-119.92744445800783, 39.261499771230774],
map_zoom=12,
outlet=[-119.92704812670831, 39.190016450091775],
landuse=None,
cs=35, erod=0.000001,
surf_runoff=0.011, lateral_flow=0.012, baseflow=0.013, sediment=2500.0,
gwstorage=100, bfcoeff=0.002, dscoeff=0.001, bfthreshold=1.001,
mid_season_crop_coeff=0.95, p_coeff=0.8),
dict(watershed='24_Marlette_Creek', # 1300 ha Watershed_27
extent=[-120.01396179199219, 39.019450429324046, -119.77363586425783, 39.20592074849823],
map_center=[-119.89379882812501, 39.11274726579313],
map_zoom=12,
outlet=[-119.93270185597697, 39.16542835468725],
landuse=None,
cs=35, erod=0.000001,
surf_runoff=0.012, lateral_flow=0.013, baseflow=0.014, sediment=2500.0,
gwstorage=100, bfcoeff=0.002, dscoeff=0.001, bfthreshold=1.001,
mid_season_crop_coeff=0.95, p_coeff=0.8),
dict(watershed='25_Intervening_Area_Marlette_to_Secret_Harbor', # 51 ha Watershed_28
extent=[-120.01396179199219, 39.019450429324046, -119.77363586425783, 39.20592074849823],
map_center=[-119.89379882812501, 39.11274726579313],
map_zoom=12,
outlet=[-119.93090374225022, 39.150834255220026],
landuse=None,
cs=35, erod=0.000001,
surf_runoff=0.012, lateral_flow=0.013, baseflow=0.014, sediment=2500.0,
gwstorage=100, bfcoeff=0.002, dscoeff=0.001, bfthreshold=1.001,
mid_season_crop_coeff=0.95, p_coeff=0.8),
dict(watershed='25_Secret_Harbor_Creek', # 510 ha Watershed_29
extent=[-120.01396179199219, 39.019450429324046, -119.77363586425783, 39.20592074849823],
map_center=[-119.89379882812501, 39.11274726579313],
map_zoom=12,
outlet=[-119.93136079483678, 39.148367001968865],
landuse=None,
cs=35, erod=0.000001,
surf_runoff=0.013, lateral_flow=0.014, baseflow=0.015, sediment=2500.0,
gwstorage=100, bfcoeff=0.002, dscoeff=0.001, bfthreshold=1.001,
mid_season_crop_coeff=0.95, p_coeff=0.8),
dict(watershed='26_Bliss_Creek', # 140 ha Watershed_30
extent=[-120.01396179199219, 39.019450429324046, -119.77363586425783, 39.20592074849823],
map_center=[-119.89379882812501, 39.11274726579313],
map_zoom=12,
outlet=[-119.93642518249555, 39.143635771481485],
landuse=None,
cs=35, erod=0.000001,
surf_runoff=0.013, lateral_flow=0.014, baseflow=0.015, sediment=3000.0,
gwstorage=100, bfcoeff=0.002, dscoeff=0.001, bfthreshold=1.001,
mid_season_crop_coeff=0.95, p_coeff=0.8),
dict(watershed='27_Intervening_Area_Deadman_Point', # 40 ha Watershed_31
extent=[-120.01396179199219, 39.019450429324046, -119.77363586425783, 39.20592074849823],
map_center=[-119.89379882812501, 39.11274726579313],
map_zoom=12,
outlet=[-119.94066697186948, 39.14185550725643],
landuse=None,
cs=35, erod=0.000001,
surf_runoff=0.013, lateral_flow=0.014, baseflow=0.015, sediment=3000.0,
gwstorage=100, bfcoeff=0.002, dscoeff=0.001, bfthreshold=1.001,
mid_season_crop_coeff=0.95, p_coeff=0.8),
dict(watershed='28_Slaughterhouse_Creek', # 1600 ha Watershed_32
extent=[-120.01396179199219, 39.019450429324046, -119.77363586425783, 39.20592074849823],
map_center=[-119.89379882812501, 39.11274726579313],
map_zoom=12,
outlet=[-119.94656529513026, 39.1017421575381],
landuse=None,
cs=35, erod=0.000001,
surf_runoff=0.013, lateral_flow=0.014, baseflow=0.015, sediment=3000.0,
gwstorage=100, bfcoeff=0.002, dscoeff=0.001, bfthreshold=1.001,
mid_season_crop_coeff=0.95, p_coeff=0.8),
dict(watershed='29_Intervening_Area_Glenbrook_Bay_1', # 88 ha Watershed_33
extent=[-120.01396179199219, 39.019450429324046, -119.77363586425783, 39.20592074849823],
map_center=[-119.89379882812501, 39.11274726579313],
map_zoom=12,
outlet=[-119.9418433230724, 39.09864545553091],
landuse=None,
cs=35, erod=0.000001,
surf_runoff=0.015, lateral_flow=0.016, baseflow=0.017, sediment=3200.0,
gwstorage=100, bfcoeff=0.002, dscoeff=0.002, bfthreshold=1.001,
mid_season_crop_coeff=0.95, p_coeff=0.8),
dict(watershed='29_Intervening_Area_Glenbrook_Bay_2', # 99 ha Watershed_34
extent=[-120.01396179199219, 39.019450429324046, -119.77363586425783, 39.20592074849823],
map_center=[-119.89379882812501, 39.11274726579313],
map_zoom=12,
outlet=[-119.94062104067505, 39.09482976550799],
landuse=None,
cs=35, erod=0.000001,
surf_runoff=0.015, lateral_flow=0.016, baseflow=0.017, sediment=3200.0,
gwstorage=100, bfcoeff=0.002, dscoeff=0.002, bfthreshold=1.001,
mid_season_crop_coeff=0.95, p_coeff=0.8),
dict(watershed='29_Glenbrook_Creek', # 1100 ha Watershed_35
extent=[-120.01396179199219, 39.019450429324046, -119.77363586425783, 39.20592074849823],
map_center=[-119.89379882812501, 39.11274726579313],
map_zoom=12,
outlet=[-119.93952733643079, 39.08804461546371],
landuse=None,
cs=35, erod=0.000001,
surf_runoff=0.015, lateral_flow=0.016, baseflow=0.017, sediment=3500.0,
gwstorage=100, bfcoeff=0.0018, dscoeff=0.0016, bfthreshold=1.001,
mid_season_crop_coeff=0.95, p_coeff=0.8),
dict(watershed='30_North_Logan_House_Creek', # 290 ha Watershed_36
extent=[-120.01396179199219, 39.019450429324046, -119.77363586425783, 39.20592074849823],
map_center=[-119.89379882812501, 39.11274726579313],
map_zoom=12,
outlet=[-119.94139490719957, 39.068905396563665],
landuse=None,
cs=40, erod=0.000001,
surf_runoff=0.002, lateral_flow=0.003, baseflow=0.004, sediment=2300.0,
gwstorage=100, bfcoeff=0.002, dscoeff=0.002, bfthreshold=1.001,
mid_season_crop_coeff=0.95, p_coeff=0.8),
dict(watershed='31_Logan_House_Creek', # 530 ha Watershed_37
extent=[-120.01396179199219, 39.019450429324046, -119.77363586425783, 39.20592074849823],
map_center=[-119.89379882812501, 39.11274726579313],
map_zoom=12,
outlet=[-119.93525510941215, 39.066581990025206],
landuse=None,
cs=40, erod=0.000001,
surf_runoff=0.002, lateral_flow=0.003, baseflow=0.004, sediment=2300.0,
gwstorage=100, bfcoeff=0.0005, dscoeff=0.001, bfthreshold=1.001,
mid_season_crop_coeff=1.1, p_coeff=0.8),
dict(watershed='32_Intervening_Area_Logan_Shoals_1', # 31 ha Watershed_38
extent=[-120.01396179199219, 39.019450429324046, -119.77363586425783, 39.20592074849823],
map_center=[-119.89379882812501, 39.11274726579313],
map_zoom=12,
outlet=[-119.94319963374946, 39.05922347741282],
landuse=None,
cs=40, erod=0.000001,
surf_runoff=0.013, lateral_flow=0.014, baseflow=0.015, sediment=2600.0,
gwstorage=100, bfcoeff=0.001, dscoeff=0.001, bfthreshold=1.001,
mid_season_crop_coeff=0.95, p_coeff=0.8),
dict(watershed='32_Intervening_Area_Logan_Shoals_2', # 36 ha Watershed_39
extent=[-120.01396179199219, 39.019450429324046, -119.77363586425783, 39.20592074849823],
map_center=[-119.89379882812501, 39.11274726579313],
map_zoom=12,
outlet=[-119.9445062251884, 39.05304180979089],
landuse=None,
cs=40, erod=0.000001,
surf_runoff=0.013, lateral_flow=0.014, baseflow=0.015, sediment=2600.0,
gwstorage=100, bfcoeff=0.001, dscoeff=0.001, bfthreshold=1.001,
mid_season_crop_coeff=0.95, p_coeff=0.8),
dict(watershed='32_Cave_Rock_Unnamed_Creek_at_Lincoln_Park', # 150 ha Watershed_40
extent=[-120.01396179199219, 39.019450429324046, -119.77363586425783, 39.20592074849823],
map_center=[-119.89379882812501, 39.11274726579313],
map_zoom=12,
outlet=[-119.94636531738409, 39.05011770248522],
landuse=None,
cs=40, erod=0.000001,
surf_runoff=0.013, lateral_flow=0.014, baseflow=0.015, sediment=2500.0,
gwstorage=100, bfcoeff=0.001, dscoeff=0.001, bfthreshold=1.001,
mid_season_crop_coeff=0.95, p_coeff=0.8),
dict(watershed='33_Lincoln_Creek', # 700 ha Watershed_41
extent=[-120.01396179199219, 39.019450429324046, -119.77363586425783, 39.20592074849823],
map_center=[-119.89379882812501, 39.11274726579313],
map_zoom=12,
outlet=[-119.94819197052668, 39.039895724271986],
landuse=None,
cs=40, erod=0.000001,
surf_runoff=0.013, lateral_flow=0.014, baseflow=0.015, sediment=2600.0,
gwstorage=100, bfcoeff=0.001, dscoeff=0.001, bfthreshold=1.001,
mid_season_crop_coeff=0.95, p_coeff=0.8),
dict(watershed='35_North_Zephyr_Creek', # 680 ha Watershed_42
extent=[-120.06202697753908, 38.87045372777545, -119.8217010498047, 39.05731715424236],
map_center=[-119.94186401367189, 38.963947050281696],
map_zoom=12,
outlet=[-119.94876901553465, 39.01494787512556],
landuse=None,
cs=45, erod=0.000001,
surf_runoff=0.012, lateral_flow=0.013, baseflow=0.014, sediment=2700.0,
gwstorage=100, bfcoeff=0.001, dscoeff=0.001, bfthreshold=1.001,
mid_season_crop_coeff=0.95, p_coeff=0.8),
dict(watershed='37_Zephyr_Creek', # 430 ha Watershed_43
extent=[-120.06202697753908, 38.87045372777545, -119.8217010498047, 39.05731715424236],
map_center=[-119.94186401367189, 38.963947050281696],
map_zoom=12,
outlet=[-119.94804448075419, 39.007631888060544],
landuse=None,
cs=45, erod=0.000001,
surf_runoff=0.012, lateral_flow=0.013, baseflow=0.014, sediment=2700.0,
gwstorage=100, bfcoeff=0.001, dscoeff=0.001, bfthreshold=1.001,
mid_season_crop_coeff=0.95, p_coeff=0.8),
dict(watershed='38_McFaul_Creek', # 940 ha Watershed_44
extent=[-120.06202697753908, 38.87045372777545, -119.8217010498047, 39.05731715424236],
map_center=[-119.94186401367189, 38.963947050281696],
map_zoom=12,
outlet=[-119.95345634105546, 38.99453069976447],
landuse=None,
cs=45, erod=0.000001,
surf_runoff=0.011, lateral_flow=0.012, baseflow=0.013, sediment=2700.0,
gwstorage=100, bfcoeff=0.001, dscoeff=0.001, bfthreshold=1.001,
mid_season_crop_coeff=0.95, p_coeff=0.8),
dict(watershed='39_Burke_Creek', # 1200 ha Watershed_45
extent=[-120.06202697753908, 38.87045372777545, -119.8217010498047, 39.05731715424236],
map_center=[-119.94186401367189, 38.963947050281696],
map_zoom=12,
outlet=[-119.94974779877337, 38.97605598069683],
landuse=None,
cs=45, erod=0.000001,
surf_runoff=0.011, lateral_flow=0.012, baseflow=0.013, sediment=2700.0,
gwstorage=100, bfcoeff=0.001, dscoeff=0.001, bfthreshold=1.001,
mid_season_crop_coeff=0.95, p_coeff=0.8)
]
scenarios = [
dict(scenario='SimFire.fccsFuels_obs_cli',
landuse=None,
lc_lookup_fn='ki5krcs.csv',
cfg='lt-fire-snow',
climate='copyCurCond'),
dict(scenario='SimFire.landisFuels_obs_cli',
landuse=None,
lc_lookup_fn='ki5krcs.csv',
cfg='lt-fire-future-snow',
climate='copyCurCond'),
dict(scenario='SimFire.landisFuels_fut_cli_A2',
landuse=None,
lc_lookup_fn='ki5krcs.csv',
cfg='lt-fire-future-snow',
climate='future'),
dict(scenario='CurCond',
landuse=None,
lc_lookup_fn='ki5krcs.csv'),
dict(scenario='PrescFire',
landuse=[(not_shrub_selector, 110), (shrub_selector, 122)],
lc_lookup_fn='ki5krcs.csv',
climate='copyCurCond'),
dict(scenario='LowSev',
landuse=[(not_shrub_selector, 106), (shrub_selector, 121)],
lc_lookup_fn='ki5krcs.csv',
climate='copyCurCond'),
dict(scenario='ModSev',
landuse=[(not_shrub_selector, 118), (shrub_selector, 120)],
lc_lookup_fn='ki5krcs.csv',
climate='copyCurCond'),
dict(scenario='HighSev',
landuse=[(not_shrub_selector, 105), (shrub_selector, 119)],
lc_lookup_fn='ki5krcs.csv',
climate='copyCurCond'),
dict(scenario='Thinn96',
landuse=[(not_shrub_selector, 123)],
lc_lookup_fn='ki5krcs.csv',
climate='copyCurCond'),
dict(scenario='Thinn93',
landuse=[(not_shrub_selector, 115)],
lc_lookup_fn='ki5krcs.csv',
climate='copyCurCond'),
dict(scenario='Thinn85',
landuse=[(not_shrub_selector, 117)],
lc_lookup_fn='ki5krcs.csv',
climate='copyCurCond'), # <- EXAMPLE FOR COPYING CLIMATE
]
skip_completed = True
projects = []
wc = sys.argv[-1]
if '.py' in wc:
wc = None
for scenario in scenarios:
for watershed in watersheds:
projects.append(deepcopy(watershed))
projects[-1]['cfg'] = scenario.get('cfg', 'lt-wepp_bd16b69-snow')
projects[-1]['landuse'] = scenario['landuse']
projects[-1]['lc_lookup_fn'] = scenario.get('lc_lookup_fn', 'landSoilLookup.csv')
projects[-1]['climate'] = scenario.get('climate', 'observed')
projects[-1]['scenario'] = scenario['scenario']
projects[-1]['wd'] = 'lt_202012_%s_%s' % (watershed['watershed'], scenario['scenario'])
failed = open('failed', 'w')
for proj in projects:
try:
wd = proj['wd']
extent = proj['extent']
map_center = proj['map_center']
map_zoom = proj['map_zoom']
outlet = proj['outlet']
default_landuse = proj['landuse']
cfg = proj['cfg']
climate_mode = proj['climate']
lc_lookup_fn = proj['lc_lookup_fn']
watershed = proj['watershed']
scenario = proj['scenario']
if wc is not None:
if not wc in wd:
continue
if skip_completed:
if _exists(_join(wd, 'export', 'arcmap', 'channels.shp')):
log_print('has channels.shp... skipping.')
continue
log_print('cleaning dir')
if _exists(wd):
print()
shutil.rmtree(wd)
os.mkdir(wd)
log_print('initializing project')
ron = Ron(wd, "%s.cfg" % cfg)
ron.name = wd
ron.set_map(extent, map_center, zoom=map_zoom)
log_print('fetching dem')
ron.fetch_dem()
log_print('building channels')
topaz = Topaz.getInstance(wd)
topaz.build_channels(csa=5, mcl=60)
topaz.set_outlet(*outlet)
sleep(0.5)
log_print('building subcatchments')
topaz.build_subcatchments()
log_print('abstracting watershed')
wat = Watershed.getInstance(wd)
wat.abstract_watershed()
translator = wat.translator_factory()
topaz_ids = [top.split('_')[1] for top in translator.iter_sub_ids()]
log_print('building landuse')
landuse = Landuse.getInstance(wd)
landuse.mode = LanduseMode.Gridded
landuse.build()
landuse = Landuse.getInstance(wd)
# 105 - Tahoe High severity fire
# topaz_ids is a list of string ids e.g. ['22', '23']
if default_landuse is not None:
log_print('setting default landuse')
tops = []
for selector, dom in default_landuse:
_topaz_ids = selector(landuse, None)
bare_tops = bare_or_sodgrass_or_bunchgrass_selector(landuse, None)
_topaz_ids = [top for top in _topaz_ids if top not in bare_tops]
landuse.modify(_topaz_ids, dom)
tops.extend(_topaz_ids)
log_print('building soils')
if _exists(_join(wd, 'lt.nodb')):
lt = LakeTahoe.getInstance(wd)
lt.lc_lookup_fn = lc_lookup_fn
soils = Soils.getInstance(wd)
soils.mode = SoilsMode.Gridded
soils.build()
log_print('building climate')
if climate_mode == 'observed':
climate = Climate.getInstance(wd)
stations = climate.find_closest_stations()
climate.input_years = 30
climate.climatestation = stations[0]['id']
climate.climate_mode = ClimateMode.Observed
climate.climate_spatialmode = ClimateSpatialMode.Multiple
climate.set_observed_pars(start_year=1990, end_year=2019)
elif climate_mode == 'future':
climate = Climate.getInstance(wd)
stations = climate.find_closest_stations()
climate.input_years = 30
climate.climatestation = stations[0]['id']
climate.climate_mode = ClimateMode.Future
climate.climate_spatialmode = ClimateSpatialMode.Single
climate.set_future_pars(start_year=2018, end_year=2018 + 30)
# climate.set_orig_cli_fn(_join(climate._future_clis_wc, 'Ward_Creek_A2.cli'))
elif climate_mode == 'vanilla':
climate = Climate.getInstance(wd)
stations = climate.find_closest_stations()
climate.input_years = 30
climate.climatestation = stations[0]['id']
climate.climate_mode = ClimateMode.Vanilla
climate.climate_spatialmode = ClimateSpatialMode.Single
# climate.set_orig_cli_fn(_join(climate._future_clis_wc, 'Ward_Creek_A2.cli'))
elif 'copy' in climate_mode:
src_wd = 'lt_202012_%s_%s' % (watershed, climate_mode[4:])
shutil.rmtree(_join(wd, 'climate'))
shutil.copytree(_join(src_wd, 'climate'), _join(wd, 'climate'))
with open(_join(src_wd, 'climate.nodb')) as fp:
contents = fp.read()
with open(_join(wd, 'climate.nodb'), 'w') as fp:
fp.write(contents.replace(src_wd, wd))
else:
raise Exception("Unknown climate_mode")
if 'copy' not in climate_mode:
climate.build(verbose=1)
log_print('prepping wepp')
wepp = Wepp.getInstance(wd)
wepp.parse_inputs(proj)
wepp.prep_hillslopes()
log_print('running hillslopes')
wepp.run_hillslopes()
log_print('prepping watershed')
wepp = Wepp.getInstance(wd)
wepp.prep_watershed(erodibility=proj['erod'], critical_shear=proj['cs'])
wepp._prep_pmet(mid_season_crop_coeff=proj['mid_season_crop_coeff'], p_coeff=proj['p_coeff'])
log_print('running watershed')
wepp.run_watershed()
log_print('generating loss report')
loss_report = wepp.report_loss()
log_print('generating totalwatsed report')
fn = _join(ron.export_dir, 'totalwatsed.csv')
totwatsed = TotalWatSed(_join(ron.output_dir, 'totalwatsed.txt'),
wepp.baseflow_opts, wepp.phosphorus_opts)
totwatsed.export(fn)
assert _exists(fn)
log_print('exporting arcmap resources')
arc_export(wd)
except:
failed.write('%s\n' % wd)
raise
| StarcoderdataPython |
26279 | <gh_stars>1-10
#! /usr/bin/env python
# -*- coding: utf-8 -*-
import os
basedir = os.path.abspath(os.path.dirname(__file__))
CSRF_ENABLED = True
SECRET_KEY='parayan-manasilla'
| StarcoderdataPython |
3248319 | <filename>olist_data_warehouse/project/src/data_warehouse/create_data_warehouse.py
# This file contains the functions "" that create the dataware house, fact table, dimentional tables, etc.
def create_data_warehouse_schema(cursor):
"""
Summary: Creates the Olist Data Warehouse Database Schema.
Args:
: cursor (DataBase cursor): Cursor of the connection with the database
"""
_DW_DB_NAME = "olist_data_warehouse" # Data Warehouse Database Name
cursor.execute("""CREATE SCHEMA IF NOT EXISTS {db_name}""".format(db_name=_DW_DB_NAME)) # Execute the SQL command that create the DW Schema
def create_data_warehouse_tables(cursor):
"""
Summary: Creates the Olist Data Warehouse Database tables.
Args:
: cursor (DataBase cursor): Cursor of the connection with the database
"""
cursor.execute(""" CREATE TABLE IF NOT EXISTS olist_data_warehouse.Dim_customers (
customer_id VARCHAR(32) PRIMARY KEY,
customer_unique_id VARCHAR(32),
customer_zip_code_prefix VARCHAR(5) NOT NULL
)""")
cursor.execute(""" CREATE TABLE IF NOT EXISTS olist_data_warehouse.Orders_Fact (
order_id VARCHAR(32),
customer_id VARCHAR(32) REFERENCES olist_data_warehouse.Dim_customers (customer_id),
payment_value NUMERIC(12, 2) CHECK (payment_value >= 0) NOT NULL
)""")
cursor.execute(""" CREATE TABLE IF NOT EXISTS olist_data_warehouse.Dim_payments (
order_id VARCHAR(32),
payment_installments VARCHAR(3) NOT NULL,
payment_type VARCHAR(20) NOT NULL
)""")
cursor.execute(""" CREATE TABLE IF NOT EXISTS olist_data_warehouse.Dim_geolocation (
geolocation_zip_code_prefix VARCHAR(5),
geolocation_lat VARCHAR(30) NOT NULL,
geolocation_lng VARCHAR(30) NOT NULL,
geolocation_city VARCHAR(40) NOT NULL,
geolocation_state VARCHAR(2)
)""")
cursor.execute(""" CREATE TABLE IF NOT EXISTS olist_data_warehouse.Dim_order_items (
order_id VARCHAR(32),
product_id VARCHAR(32) NOT NULL,
product_category_name VARCHAR(60),
order_item_id VARCHAR(2),
price NUMERIC(12, 2) CHECK (price > 0) NOT NULL,
freight_value NUMERIC(12, 2) CHECK (freight_value >= 0) NOT NULL,
PRIMARY KEY (order_id, order_item_id)
)""")
cursor.execute(""" CREATE TABLE IF NOT EXISTS olist_data_warehouse.Dim_date (
order_id VARCHAR(32),
order_status VARCHAR(20) NOT NULL,
order_purchase_timestamp TIMESTAMP NOT NULL,
order_delivered_customer_date TIMESTAMP,
order_estimated_delivery_date TIMESTAMP
)""")
cursor.execute(""" CREATE TABLE IF NOT EXISTS olist_data_warehouse.Dim_review (
order_id VARCHAR(32),
order_review_score NUMERIC(2)
)""")
| StarcoderdataPython |
6428308 | <gh_stars>1-10
from rich import box
from rich.align import Align
from rich.console import Group
from rich.panel import Panel
from rich.table import Table
from config import get_elasticsearch_url
from client.elasticsearch import get_plain_response
def display_information_widget() -> Table:
nodes_panel = Panel(
Align.center(
Group(Align.center("Current Elasticsearch cluster: [bold green]" + get_elasticsearch_url() + "[/]"), Align.center(get_health_table())),
vertical="top",
),
box=box.ROUNDED,
padding=(1, 2),
title="[bold]ElasticSearch Cluster Watcher[/]",
border_style="bright_green",
)
return nodes_panel
def get_health_table() -> Table:
response = get_plain_response('_cat/health?v')
lines = response.splitlines()
table = Table(show_footer=False)
column_names = lines[0].split()
for c in column_names:
table.add_column(c, no_wrap=True)
healthData = lines[1].split()
table.add_row(healthData[0],
healthData[1],
healthData[2],
prettyStatus(healthData[3]),
healthData[4],
healthData[5],
healthData[6],
healthData[7],
healthData[8],
healthData[9],
healthData[10],
healthData[11],
healthData[12],
healthData[13],
)
return table
def get_health_info() -> Table:
table = get_health_table()
nodes_panel = Panel(
Align.center(
Align.center(table),
vertical="top",
),
box=box.ROUNDED,
padding=(1, 2),
title="[bold]Health Information[/]",
border_style="bright_cyan",
)
return nodes_panel
def prettyStatus(status):
if status == 'green':
return "[bold green]"+status+"[/]"
if status == 'yellow':
return "[bold yellow]"+status+"[/]"
return "[bold magenta]"+status+"[/]"
| StarcoderdataPython |
11239090 | import re
import pywikibot
import requests
from api.importer import AdditionalDataImporter
from api.servicemanager.pgrest import DynamicBackend
from page_lister import get_pages_from_category
dyn_backend = DynamicBackend()
def use_wiktionary(language):
def wrap_use_wiki(cls):
cls.wiki = pywikibot.Site(language, 'wiktionary')
return cls
return wrap_use_wiki
class WiktionaryAdditionalDataImporter(AdditionalDataImporter):
def fetch_additional_data_for_category(self, language, category_name):
url = dyn_backend.backend + "/word_with_additional_data"
params = {
'language': f'eq.{language}',
'select': 'word,additional_data',
}
words = requests.get(url, params=params).json()
# Database entries containing the data_type already defined.
already_defined_pages = set([
w['word'] for w in words
if self.is_data_type_already_defined(w['additional_data'])
])
url = dyn_backend.backend + "/word"
params = {
'language': f'eq.{language}',
}
words = requests.get(url, params=params).json()
pages_defined_in_database = set([
w['word']
for w in words
])
self.counter = 0
category_pages = set(
[k.title() for k in get_pages_from_category('en', category_name)])
# Wiki pages who may have not been parsed yet
titles = (category_pages & pages_defined_in_database) - \
already_defined_pages
wikipages = set([
pywikibot.Page(self.wiktionary, page) for page in titles
])
# print(f"{len(wikipages)} pages from '{category_name}';\n"
# f"{len(already_defined_pages)} already defined pages "
# f"out of {len(category_pages)} pages in category\n"
# f"and {len(pages_defined_in_database)} pages currently defined in
# DB\n\n")
for wikipage in wikipages:
self.process_wikipage(wikipage, language)
def process_wikipage(self, wikipage: pywikibot.Page, language: str):
content = wikipage.get()
title = wikipage.title()
return self.process_non_wikipage(title, content, language)
def run(
self,
root_category: str,
wiktionary=pywikibot.Site(
'en',
'wiktionary')):
self.wiktionary = wiktionary
category = pywikibot.Category(wiktionary, root_category)
for category in category.subcategories():
name = category.title().replace('Category:', '')
# print(name)
language_name = name.split()[0]
if language_name in self.languages:
iso = self.languages[language_name]
# print(f'Fetching for {language_name} ({iso})')
self.fetch_additional_data_for_category(iso, category.title())
# else:
# print(f'Skipping for {language_name}...')
class TemplateImporter(WiktionaryAdditionalDataImporter):
def get_data(
self,
template_title: str,
wikipage: str,
language: str) -> list:
retrieved = []
for line in wikipage.split('\n'):
if "{{" + template_title + "|" + language in line:
line = line[line.find("{{" + template_title + "|" + language):]
data = line.split('|')[2]
data = data.replace('}}', '')
data = data.replace('{{', '')
retrieved.append(data)
return retrieved
class SubsectionImporter(WiktionaryAdditionalDataImporter):
section_name = ''
# True if the section contains a number e.g. Etymology 1, Etymology 2, etc.
numbered = False
level = 3
def __init__(self, **params):
super(SubsectionImporter, self).__init__(**params)
def set_whole_section_name(self, section_name: str):
self.section_name = section_name
def get_data(self, template_title, wikipage: str, language: str) -> list:
def retrieve_subsection(wikipage_, regex):
retrieved_ = []
target_subsection_section = re.search(regex, wikipage_)
if target_subsection_section is not None:
section = target_subsection_section.group()
pos1 = wikipage_.find(section) + len(section)
# section end is 2 newlines
pos2 = wikipage_.find('\n\n', pos1)
if pos2 != -1:
wikipage_ = wikipage_[pos1:pos2]
else:
wikipage_ = wikipage_[pos1:]
# More often than we'd like to admit,
# the section level for the given sub-section is one level deeper than expected.
# As a consequence, a '=<newline>' can appear before the sub-section content.
# That often happens for references, derived terms, synonyms, etymologies and part of speech.
# We could throw an Exception,
# but there are 6.5M pages and God knows how many more cases to handle;
# so we don't: let's focus on the job while still keeping it simple.
# Hence, the hack below can help the script fall back on its feet while still doing its job
# of fetching the subsection's content.
# I didn't look for sub-sections that are actually 2 levels or more deeper than expected.
# Should there be any of that, copy and adapt the condition.
# I didn't do it here because -- I.M.H.O -- Y.A.G.N.I right now.
# My most sincere apologies to perfectionists.
if wikipage_.startswith('=\n'):
wikipage_ = wikipage_[2:]
retrieved_.append(wikipage_.lstrip('\n'))
return retrieved_
retrieved = []
# Retrieving and narrowing to target section
if self.numbered:
number_rgx = ' [1-9]+'
else:
number_rgx = ''
target_language_section = re.search(
'==[ ]?' + self.iso_codes[language] + '[ ]?==', wikipage)
if target_language_section is not None:
section_begin = wikipage.find(target_language_section.group())
section_end = wikipage.find('----', section_begin)
if section_end != -1:
lang_section_wikipage = wikipage = wikipage[section_begin:section_end]
else:
lang_section_wikipage = wikipage = wikipage[section_begin:]
else:
return []
for regex_match in re.findall('=' * self.level + '[ ]?' + self.section_name + number_rgx + '=' * self.level,
wikipage):
retrieved += retrieve_subsection(wikipage, regex_match)
wikipage = lang_section_wikipage
returned_subsections = [s for s in retrieved if s]
# print(returned_subsections)
return returned_subsections # retrieved
| StarcoderdataPython |
1652101 | from src.database_access import database_access
class Message:
@staticmethod
def send_message(sender: str, receiver: str, body: str, db: database_access):
# the status is either sent or read
sql_post_messages_string = '''
INSERT INTO messages (sender, receiver, body) VALUES (?, ?, ?)
'''
res = db.execute(sql_post_messages_string, [sender, receiver, body])
@staticmethod
def get_my_messages(receiver: str, db: database_access):
sql_get_messages = '''
SELECT * FROM messages WHERE receiver = ? ORDER BY time_sent
'''
res = db.execute(sql_get_messages, [receiver])
return res
@staticmethod
def delete_message(message_id: int, db: database_access):
sql_delete_message = '''
DELETE FROM messages WHERE message_id = ?
'''
db.execute(sql_delete_message, [message_id])
check = 'SELECT COUNT(*) FROM messages WHERE message_id = ?'
# checking if the delete was successful
res = db.execute(check, [message_id])
return True if res[0][0] == 0 else False | StarcoderdataPython |
200809 | <reponame>utkuyaman/csv_pile_2_xlsx<filename>main.py
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Created on Nov 21, 2015
@author: tuku
"""
import argparse, sys, os
import xlsxwriter
import csv
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("-s", "--source", help="source folder that contains csv files", type=str)
parser.add_argument("-d", "--destination", help="final file name", type=str)
parser.add_argument("-f", "--force", help="force final name override", action="store_true")
parser.add_argument("-v", "--verbose", help="toggle logging output", action="store_true")
args = parser.parse_args()
verbose = args.verbose
if not args.source or not args.destination:
parser.print_help()
sys.exit(-1)
# check if folder exists
if not os.path.exists(args.source) or not os.path.isdir(args.source):
print('Source must be a folder')
parser.print_help()
sys.exit(-1)
# check if destination file exists or not
if os.path.exists(args.destination) and not args.force:
print('Destination file already exists, try using -f argument or choosing another file name')
parser.print_help()
sys.exit(-1)
csv_files = [f for f in os.listdir(args.source) if os.path.isfile(os.path.join(args.source, f))]
if verbose:
print('found {} files'.format(len(csv_files)))
# Create an new Excel file
with xlsxwriter.Workbook(args.destination) as workbook:
for file in csv_files:
if verbose:
print('merging file "{}"'.format(file))
# add a worksheet for each csv file
worksheet = workbook.add_worksheet(file[:31])
with open(os.path.join(args.source, file), 'r') as csvfile:
reader = csv.reader(csvfile, delimiter='\t')
row_index = 0
for row in reader:
worksheet.write_row(row_index, 0, tuple(row))
row_index += 1
if verbose:
print('merged {} rows'.format(row_index))
if verbose:
print('saving file')
| StarcoderdataPython |
6470685 | <filename>Main.py
import subprocess
import Func
print("Hi This is Speech To Text/Text To Speech Converter")
print("What Do you Want ?")
print("1-Speech To Text")
print("2-Text To Speech ")
x=input()
if x=="1" :
print("What Do You Want ?")
print("1-Record audio")
print("2-Choose File from device ")
y=input()
if y=="1":
#Z Variable is to specify How many seconds does the user wants the Program to Run
print("Please Enter How many seconds do You want To Record")
z=input()
#I used subprocess Function to allow the user to choose How many Seconds Does He wants
subprocess.call("python transcribe.py -t {}".format(z),shell=True)
elif y=="2" :
Func.AudioFromDevice()
else:
print("not a valid input please Try again")
elif x=="2" :
print("What Do You Want ?")
print("1-Write text")
print("2-Choose File from device ")
y=input()
if y=="1":
print('\nPlease Enter The Text That you want :')
Text=input()
Func.TxtToSpeech(Text)
elif y=="2":
Func.TxtFileFromDevice()
else:
print("not a valid input please Try again")
else:
print("not a valid input please Try again")
| StarcoderdataPython |
1945668 | def test_rpush(judge_command):
judge_command(
"RPUSH list1 foo bar hello world",
{
"command_key_values": "RPUSH",
"key": "list1",
"values": "foo bar hello world",
},
)
judge_command(
"LPUSH list1 foo",
{"command_key_values": "LPUSH", "key": "list1", "values": "foo"},
)
def test_lindex(judge_command):
judge_command(
"LINDEX list1 10",
{"command_key_position": "LINDEX", "key": "list1", "position": "10"},
)
judge_command(
"LINDEX list1 -10",
{"command_key_position": "LINDEX", "key": "list1", "position": "-10"},
)
judge_command("LINDEX list1 1.1", None)
def test_lset(judge_command):
judge_command(
"LSET list1 10 newbie",
{
"command_key_position_value": "LSET",
"key": "list1",
"position": "10",
"value": "newbie",
},
)
judge_command(
"LSET list1 -1 newbie",
{
"command_key_position_value": "LSET",
"key": "list1",
"position": "-1",
"value": "newbie",
},
)
def test_brpoplpush(judge_command):
judge_command(
"BRPOPLPUSH list1 list2 10",
{
"command_key_newkey_timeout": "BRPOPLPUSH",
"key": "list1",
"newkey": "list2",
"timeout": "10",
},
)
judge_command(
"BRPOPLPUSH list1 list2 0",
{
"command_key_newkey_timeout": "BRPOPLPUSH",
"key": "list1",
"newkey": "list2",
"timeout": "0",
},
)
judge_command("BRPOPLPUSH list1 list2 -1", None)
def test_linsert(judge_command):
judge_command(
'LINSERT mylist BEFORE "World" "There"',
{
"command_key_positionchoice_pivot_value": "LINSERT",
"key": "mylist",
"position_choice": "BEFORE",
"value": ['"World"', '"There"'],
},
)
judge_command(
'LINSERT mylist after "World" "There"',
{
"command_key_positionchoice_pivot_value": "LINSERT",
"key": "mylist",
"position_choice": "after",
"value": ['"World"', '"There"'],
},
)
| StarcoderdataPython |
1604742 | from heapq import heappush, heappop
def main():
heaps(readfile())
def heaps(arr):
max_heap = [] # lowest numbers
min_heap = [] # highest numbers
for i in arr:
# initial case
if(len(max_heap) == 0):
heappush(max_heap, i * -1)
count_out(max_heap, min_heap)
continue
# determine if to push onto max or min heap
med = heappop(max_heap)
heappush(max_heap, med)
if(abs(med) > i):
heappush(max_heap, i * -1)
else:
heappush(min_heap, i)
#normalize size
ln = len(max_heap) # lowest numbers
hn = len(min_heap) # highest numbers
if abs(ln - hn) <= 1:
count_out(max_heap, min_heap)
continue;
if ln > hn:
a = heappop(max_heap)
heappush(min_heap, a * -1)
else:
a = heappop(min_heap)
heappush(max_heap, a * -1)
count_out(max_heap, min_heap)
print(out % 10000)
out = 0
def count_out(max_heap, min_heap):
global out
ln = len(max_heap) # lowest numbers
hn = len(min_heap) # highest numbers
if ln == hn or ln > hn:
a = abs(heappop(max_heap))
out +=a
heappush(max_heap, a * -1)
else:
a = abs(heappop(min_heap))
out +=a
heappush(min_heap, a)
print(out)
def readfile():
with open('./heap.txt') as f:
content = f.readlines()
return list(map(lambda x : int(x.strip()),content))
if __name__ == "__main__":
main()
| StarcoderdataPython |
11339404 | from configs.ToRURAL import SOURCE_DATA_CONFIG,TARGET_DATA_CONFIG, EVAL_DATA_CONFIG, TARGET_SET, source_dir
from albumentations import HorizontalFlip, VerticalFlip, RandomRotate90, Normalize, RandomCrop, RandomScale
from albumentations import OneOf, Compose
import ever as er
MODEL = 'ResNet'
IGNORE_LABEL = -1
MOMENTUM = 0.9
NUM_CLASSES = 7
SAVE_PRED_EVERY = 2000
SNAPSHOT_DIR = './log/pycda/back+/2rural'
#Hyper Paramters
WEIGHT_DECAY = 0.0005
LEARNING_RATE = 1e-2
NUM_STEPS = 15000
NUM_STEPS_STOP = 10000 # Use damping instead of early stopping
PREHEAT_STEPS = int(NUM_STEPS / 20)
POWER = 0.9
EVAL_EVERY=2000
# Loss
LAMBDA_TRADE_OFF=1
CONF_THRESHOLD=0.7
BOX_SIZE = [2, 4, 8]
MERGE_1X1 = True
LAMBDA_BALANCE = 1
LAMBDA_PSEUDO = 0.5
TARGET_SET = TARGET_SET
SOURCE_DATA_CONFIG = SOURCE_DATA_CONFIG
TARGET_DATA_CONFIG=TARGET_DATA_CONFIG
EVAL_DATA_CONFIG=EVAL_DATA_CONFIG
| StarcoderdataPython |
1894288 | from django.contrib import admin
from . import models
# Register your models here.
class ApplicationAdmin(admin.ModelAdmin):
list_display = ['applicant', 'selection_status', 'review_status', 'term_accepted', 'comments']
list_filter = ['selection_status', 'term_accepted']
search_fields = ['applicant', 'status', 'comments']
admin.site.register(models.Application, ApplicationAdmin)
admin.site.register(models.Review)
admin.site.register(models.Feedback) | StarcoderdataPython |
1988655 | # -*- coding: utf-8 -*-
# Copyright 2017 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A service for dispatching and managing work based on load.
This module provides the `WorkQueueService`, which implements a simple
RPC service. Calls to the service are executed in strict FIFO order.
Workload is metered, so that the amount of work currently underway stays
below a target threshold. When workload goes above the threshold, new
calls block on the server side until capacity is available.
"""
from __future__ import print_function
import collections
import errno
import os
import pickle
import shutil
import time
from chromite.lib import cros_logging as logging
from chromite.lib import metrics
class WorkQueueTimeout(Exception):
"""Exception to raise when `WorkQueueService.Wait()` times out."""
def __init__(self, request_id, timeout):
super(WorkQueueTimeout, self).__init__(
'Request "{:s}" timed out after {:d} seconds'.format(
request_id, timeout))
class _BaseWorkQueue(object):
"""Work queue code common to both server- and client-side.
Requests in a work queue are tracked using a spool directory. New
requests are added to the spool directory, as are final results.
In the usual flow, requests work their way through these 4 states:
_REQUESTED - Clients add new requests into the spool in this state
from `EnqueueRequest()`.
_PENDING - The server moves each request to this state from
"requested" when it sees the request for the first time in
`ProcessRequests()`.
_RUNNING - The server moves the request to this state when it
schedules a task for the request in `ProcessRequests()`.
_COMPLETE - The server moves a request to this state when its running
task returns a result. The waiting client then retrieves the
result and removes the request from the spool.
Each state has a corresponding, distinct subdirectory in the spool.
Individual requests are recorded in files in the subdirectory
corresponding to the request's current state.
Requests are identified by a timestamp assigned when they are
enqueued. Requests are handled in strict FIFO order, based on that
timestamp.
A request can be aborted if the client chooses to give up waiting
for the result. To do this, the client creates an abort request in
the `_ABORTING` directory using the identifier of the request to be
aborted.
* An aborted request that is not yet complete will never report a
result.
* An aborted request that is already complete will be removed, and
its result will then be unavailable.
* Attempts to abort requests that no longer exist will be ignored.
Consistency within the spool is guaranteed by a simple ownership
protocol:
* Only clients are allowed to create files in the `_REQUESTED` or
`_ABORTING` subdirectories. The client implementation guarantees
that request ids are globally unique within the spool.
* Only the server can move request files from one state to the next.
The server is single threaded to guarante that transitions are
atomic.
* Normally, only the client can remove files from `_COMPLETE`. The
client transfers responsibility for the final removal by making
an abort request.
Clients are not meant to abort requests they did not create, but the
API provides no enforcement of this requirement. System behavior in
this event is still consistent:
* If the request is not valid, the server will ignore it.
* If the request is valid, a client still waiting on the result
will eventually time out.
"""
# The various states of requests.
#
# For convenience, the names of the state directories are selected so
# that when sorted lexically, they appear in the order in which they're
# processed.
_REQUESTED = '1-requested'
_PENDING = '2-pending'
_RUNNING = '3-running'
_COMPLETE = '4-complete'
_ABORTING = '5-aborting'
_STATES = [_REQUESTED, _PENDING, _RUNNING, _COMPLETE, _ABORTING]
# _REQUEST_ID_FORMAT
# In the client's EnqueueRequest() method, requests are given an id
# based on the time when the request arrives. This format string is
# used to create the id from the timestamp.
#
# Format requirements:
# * When sorted lexically, id values must reflect the order of
# their arrival.
# * The id values must be usable as file names.
# * For performance, the id should include enough digits from the
# timestamp such that 1) collisions with other clients is
# unlikely, and 2) in a retry loop, collisions with the prior
# iteration are unlikely.
#
# This value is given a name partly for the benefit of the unit tests,
# which patch an alternative value that artifically increases the
# chance retrying due to collision.
_REQUEST_ID_FORMAT = '{:.6f}'
def __init__(self, spool_dir):
self._spool_state_dirs = {state: os.path.join(spool_dir, state)
for state in self._STATES}
def _GetRequestPathname(self, request_id, state):
"""Return the path to a request's data given its state."""
request_dir = self._spool_state_dirs[state]
return os.path.join(request_dir, request_id)
def _RequestInState(self, request_id, state):
"""Return whether `request_id` is in state `state`."""
return os.path.exists(self._GetRequestPathname(request_id, state))
def _MakeRequestId(self):
return self._REQUEST_ID_FORMAT.format(time.time())
class WorkQueueClient(_BaseWorkQueue):
"""A service for dispatching tasks based on system load.
Typical usage:
workqueue = service.WorkQueueClient(SPOOL_DIR)
request_id = workqueue.EnqueueRequest(request_arguments)
result = workqueue.Wait(request_id, timeout_value)
Explanation of the usage:
* `SPOOL_DIR` is the path to the work queue's spool directory.
* `request_arguments` represents an object acceptable as an
argument to the call provided by the server.
* `timeout_value` is a time in seconds representing how long the
client is willing to wait for the result.
Requests that time out or for whatever reason no longer have a client
waiting for them can be aborted by calling `AbortRequest`. Aborted
requests are removed from the spool directory, and will never return
a result.
"""
# _WAIT_POLL_INTERVAL
# Time in seconds to wait between polling checks in the `Wait()`
# method.
_WAIT_POLL_INTERVAL = 1.0
def _IsComplete(self, request_id):
"""Test whether `request_id` is in "completed" state."""
return self._RequestInState(request_id, self._COMPLETE)
def EnqueueRequest(self, request):
"""Create a new request for processing.
Args:
request: An object encapsulating the work to be done.
Returns:
the `request_id` identifying the work for calls to `Wait()` or
`AbortRequest()`.
"""
fd = -1
while True:
request_id = self._MakeRequestId()
request_path = self._GetRequestPathname(request_id, self._REQUESTED)
try:
# os.O_EXCL guarantees that the file did not exist until this
# call created it. So, if some other client creates the request
# file before us, we'll fail with EEXIST.
fd = os.open(request_path, os.O_EXCL | os.O_CREAT | os.O_WRONLY)
break
except OSError as oserr:
if oserr.errno != errno.EEXIST:
raise
with os.fdopen(fd, 'w') as f:
pickle.dump(request, f)
return request_id
def AbortRequest(self, request_id):
"""Abort the given `request_id`.
Aborted requests are removed from the spool directory. If running,
their results will be dropped and never returned.
The intended usage of this method is that this will be called (only)
by the specific client that created the request to be aborted.
Clients still waiting for a request when it is aborted will
eventually time out.
Args:
request_id: The id of the request to be aborted.
"""
request_path = self._GetRequestPathname(request_id, self._ABORTING)
open(request_path, 'w').close()
def Wait(self, request_id, timeout):
"""Wait for completion of a given request.
If a result is made available for the request within the timeout,
that result will be returned. If the server side encountered an
exception, the exception will be re-raised. If wait time for the
request exceeds `timeout` seconds, the request will be aborted, and
a `WorkQueueTimeout` exception will be raised.
Args:
request_id: Id of the request to wait for.
timeout: How long to wait before timing out.
Returns:
the result object reported for the task.
Raises:
WorkQueueTimeout: raised when the wait time exceeds the given
timeout value.
Exception: any subclass of `Exception` may be raised if
returned by the server.
"""
end_time = time.time() + timeout
while not self._IsComplete(request_id):
if end_time < time.time():
self.AbortRequest(request_id)
raise WorkQueueTimeout(request_id, timeout)
time.sleep(self._WAIT_POLL_INTERVAL)
completion_file = self._GetRequestPathname(request_id, self._COMPLETE)
with open(completion_file, 'r') as f:
result = pickle.load(f)
os.remove(completion_file)
if isinstance(result, Exception):
raise result
assert not isinstance(result, BaseException)
return result
class WorkQueueServer(_BaseWorkQueue):
"""A service for dispatching tasks based on system load.
Typical usage:
workqueue = service.WorkQueueService(SPOOL_DIR)
workqueue.ProcessRequests(task_manager)
Explanation of the usage:
* `SPOOL_DIR` is the path to the work queue's spool directory.
* `task_manager` is an instance of a concrete subclass of
`tasks.TaskManager`.
The server code in this class is independent of the details of the
tasks being scheduled; the `ProcessRequests()` delegates management of
tasks in the `_RUNNING` state to its `task_manager` parameter. The
task manager object is responsible for these actions:
* Starting new tasks.
* Reporting results from completed tasks.
* Aborting running tasks when requested.
* Indicating whether capacity is available to start new tasks.
"""
# _HEARTBEAT_INTERVAL -
# `ProcessRequests()` periodically logs a message at the start of
# tick, just so you can see it's alive. This value determines the
# approximate time in seconds in between messages.
_HEARTBEAT_INTERVAL = 10 * 60
# Metrics-related constants. These are the names of the various
# metrics we report during `ProcessRequests()`.
#
# _METRIC_PREFIX - initial path to all workqueue server metrics
# names.
#
# 'ticks' - Counter incremented once for each time through the polling
# loop in `ProcessRequests()`.
#
# 'time_waiting' - Distribution of the amount of time that requests
# spend waiting in the queue.
# 'time_running' - Distribution of the amount of time that requests
# spend actively running.
# 'time_to_abort' - Distribution of the amount of time that an aborted
# request spent in its final state (_not_ the time spent from
# creation until termination).
#
# 'task_count' - Gauge for the number of requests currently in the
# queue. A 'state' field distinguishes waiting from running
# requests.
#
# 'total_received' - Counter for the number of requests seen by
# `_GetNewRequests()`.
# 'total_completed' - Counter for the number of requests that have
# completed and been removed from the queue. A 'status'
# field distinguishes whether the request finished normally or was
# aborted.
_METRIC_PREFIX = 'chromeos/provision_workqueue/server/'
# Because of crbug.com/755415, the metrics have to be constructed
# at run time, after calling ts_mon_config.SetupTsMonGlobalState().
# So, just record what we'll construct, and leave the actual
# construction till later.
_METRICS_CONSTRUCTORS = [
('ticks', metrics.Counter),
('time_waiting',
(lambda name: metrics.CumulativeSecondsDistribution(name, scale=0.01))),
('time_running',
(lambda name: metrics.CumulativeSecondsDistribution(name, scale=0.01))),
('time_to_abort',
(lambda name: metrics.CumulativeSecondsDistribution(name, scale=0.01))),
('task_count', metrics.Gauge),
('total_received', metrics.Counter),
('total_completed', metrics.Counter),
]
_MetricsSet = collections.namedtuple(
'_MetricsSet', [name for name, _ in _METRICS_CONSTRUCTORS])
def _CreateSpool(self):
"""Create and populate the spool directory in the file system."""
spool_dir = os.path.dirname(self._spool_state_dirs[self._REQUESTED])
if os.path.exists(spool_dir):
for old_path in os.listdir(spool_dir):
shutil.rmtree(os.path.join(spool_dir, old_path))
else:
os.mkdir(spool_dir)
for state_dir in self._spool_state_dirs.itervalues():
os.mkdir(state_dir)
def _TransitionRequest(self, request_id, oldstate, newstate):
"""Move a request from one state to another."""
logging.info('Transition %s from %s to %s',
request_id, oldstate, newstate)
oldpath = self._GetRequestPathname(request_id, oldstate)
newpath = self._GetRequestPathname(request_id, newstate)
os.rename(oldpath, newpath)
def _ClearRequest(self, request_id, state):
"""Remove a request given its state."""
os.remove(self._GetRequestPathname(request_id, state))
def _CompleteRequest(self, request_id, result):
"""Move a task that has finished running into "completed" state."""
logging.info('Reaped %s, result = %r', request_id, result)
completion_path = self._GetRequestPathname(request_id, self._COMPLETE)
with open(completion_path, 'w') as f:
pickle.dump(result, f)
self._ClearRequest(request_id, self._RUNNING)
def _GetRequestsByState(self, state):
"""Return all requests in a given state."""
requests_dir = self._spool_state_dirs[state]
return sorted(os.listdir(requests_dir))
def _GetNewRequests(self):
"""Move all tasks in `requested` state to `pending` state."""
new_requests = self._GetRequestsByState(self._REQUESTED)
if new_requests:
while self._MakeRequestId() == new_requests[-1]:
pass
for request_id in new_requests:
self._TransitionRequest(request_id, self._REQUESTED, self._PENDING)
return new_requests
def _GetAbortRequests(self):
"""Move all tasks in `requested` state to `pending` state."""
new_requests = self._GetRequestsByState(self._ABORTING)
for request_id in new_requests:
logging.info('Abort requested for %s', request_id)
self._ClearRequest(request_id, self._ABORTING)
return new_requests
def _StartRequest(self, request_id, manager):
"""Start execution of a given request."""
pending_path = self._GetRequestPathname(request_id, self._PENDING)
with open(pending_path, 'r') as f:
request_object = pickle.load(f)
manager.StartTask(request_id, request_object)
self._TransitionRequest(request_id, self._PENDING, self._RUNNING)
def _ProcessAbort(self, request_id, pending_requests, manager):
"""Actually remove a given request that is being aborted."""
state = None
if self._RequestInState(request_id, self._PENDING):
pending_requests.remove(request_id)
state = self._PENDING
elif self._RequestInState(request_id, self._RUNNING):
manager.TerminateTask(request_id)
state = self._RUNNING
elif self._RequestInState(request_id, self._COMPLETE):
state = self._COMPLETE
# No check for "requested" state; our caller guarantees it's not
# needed.
#
# By design, we don't fail if the aborted request is already gone.
if state is not None:
logging.info('Abort is removing %s from state %s',
request_id, state)
try:
self._ClearRequest(request_id, state)
except OSError:
logging.exception('Request %s was not removed from %s.',
request_id, state)
else:
logging.info('Abort for non-existent request %s', request_id)
def ProcessRequests(self, manager):
"""Main processing loop for the server-side daemon.
The method runs indefinitely; it terminates only if an exception is
raised during its execution.
Args:
manager: An instance of `tasks.TaskManager`. This object is
responsible for starting and tracking running tasks.
"""
self._CreateSpool()
metrics_set = self._MetricsSet(
*(constructor(self._METRIC_PREFIX + name)
for name, constructor in self._METRICS_CONSTRUCTORS))
pending_requests = []
timestamps = {}
tick_count = 0
next_heartbeat = time.time()
while True:
tick_count += 1
if time.time() >= next_heartbeat:
next_heartbeat = time.time() + self._HEARTBEAT_INTERVAL
logging.debug('Starting tick number %d', tick_count)
manager.StartTick()
num_completed = 0
for request_id, result in manager.Reap():
num_completed += 1
metrics_set.total_completed.increment(fields={'status': 'normal'})
time_running = time.time() - timestamps.pop(request_id)
metrics_set.time_running.add(time_running)
self._CompleteRequest(request_id, result)
num_added = 0
for request_id in self._GetNewRequests():
num_added += 1
metrics_set.total_received.increment()
timestamps[request_id] = time.time()
pending_requests.append(request_id)
num_aborted = 0
for abort_id in self._GetAbortRequests():
num_aborted += 1
metrics_set.total_completed.increment(fields={'status': 'abort'})
if abort_id in timestamps:
time_to_abort = time.time() - timestamps.pop(abort_id)
metrics_set.time_to_abort.add(time_to_abort)
self._ProcessAbort(abort_id, pending_requests, manager)
num_started = 0
while pending_requests and manager.HasCapacity():
num_started += 1
request_id = pending_requests.pop(0)
time_now = time.time()
time_waiting = time_now - timestamps[request_id]
metrics_set.time_waiting.add(time_waiting)
timestamps[request_id] = time_now
self._StartRequest(request_id, manager)
if num_completed or num_added or num_aborted or num_started:
logging.info('new: %d, started: %d, aborted: %d, completed: %d',
num_added, num_started, num_aborted, num_completed)
num_pending = len(pending_requests)
num_running = len(manager)
logging.info('pending: %d, running: %d', num_pending, num_running)
metrics_set.task_count.set(num_pending,
fields={'state': 'pending'})
metrics_set.task_count.set(num_running,
fields={'state': 'running'})
metrics_set.ticks.increment()
time.sleep(manager.sample_interval)
| StarcoderdataPython |
5157190 | <reponame>MATTHEWFRAZER/pygmy
#!/usr/bin/python
import subprocess
try:
from subprocess import DEVNULL # py3k
except ImportError:
import os
DEVNULL = open(os.devnull, 'wb')
version_path = "version.txt"
build_type = "rc"
with open(version_path, "r") as f:
version = f.readline().strip()
build_number = 0
tag = "{0}-{1}.{2}".format(version, build_type, build_number)
# TODO: revisit
# while ideally, I would like separation between one liners and scripts in our build process
# this was the simplest way I could think of to implement this
# because it is done this way, it forces us to pay attention to the order in which we call into this script
while subprocess.call(["git", "rev-parse", "--verify", tag], stdout=DEVNULL, stderr=DEVNULL) == 0:
build_number += 1
tag = "{0}-{1}.{2}".format(version, build_type, build_number)
print(tag) | StarcoderdataPython |
3372380 | <gh_stars>1-10
#!/usr/bin/env python3
import humanify
import datetime
test_date = datetime.datetime(2018, 9, 4, 0, 40, 20)
if not humanify.datetime(test_date) == "Tuesday, 4th of September 2018 00:40:20":
raise Exception("Unit test gave incorrect results")
| StarcoderdataPython |
9778776 | <reponame>sevas/vispy
# -*- coding: utf-8 -*-
# Copyright (c) 2015, Vispy Development Team.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
from __future__ import division
import numpy as np
from .widget import Widget
from ...ext.cassowary import (SimplexSolver, expression,
Variable, WEAK, REQUIRED,
STRONG, RequiredFailure)
class Grid(Widget):
"""
Widget that automatically sets the position and size of child Widgets to
proportionally divide its internal area into a grid.
Parameters
----------
spacing : int
Spacing between widgets.
**kwargs : dict
Keyword arguments to pass to `Widget`.
"""
def __init__(self, spacing=6, **kwargs):
from .viewbox import ViewBox
self._next_cell = [0, 0] # row, col
self._cells = {}
self._grid_widgets = {}
self.spacing = spacing
self._n_added = 0
self._default_class = ViewBox # what to add when __getitem__ is used
self._solver = None
self._need_solver_recreate = True
# width and height of the Rect used to place child widgets
self._var_w = Variable("w_rect")
self._var_h = Variable("h_rect")
self._width_grid = None
self._height_grid = None
self._height_stay = None
self._width_stay = None
Widget.__init__(self, **kwargs)
def __getitem__(self, idxs):
"""Return an item or create it if the location is available"""
if not isinstance(idxs, tuple):
idxs = (idxs,)
if len(idxs) == 1:
idxs = idxs + (slice(None),)
elif len(idxs) != 2:
raise ValueError('Incorrect index: %s' % (idxs,))
lims = np.empty((2, 2), int)
for ii, idx in enumerate(idxs):
if isinstance(idx, int):
idx = slice(idx, idx + 1, None)
if not isinstance(idx, slice):
raise ValueError('indices must be slices or integers, not %s'
% (type(idx),))
if idx.step is not None and idx.step != 1:
raise ValueError('step must be one or None, not %s' % idx.step)
start = 0 if idx.start is None else idx.start
end = self.grid_size[ii] if idx.stop is None else idx.stop
lims[ii] = [start, end]
layout = self.layout_array
existing = layout[lims[0, 0]:lims[0, 1], lims[1, 0]:lims[1, 1]] + 1
if existing.any():
existing = set(list(existing.ravel()))
ii = list(existing)[0] - 1
if len(existing) != 1 or ((layout == ii).sum() !=
np.prod(np.diff(lims))):
raise ValueError('Cannot add widget (collision)')
return self._grid_widgets[ii][-1]
spans = np.diff(lims)[:, 0]
item = self.add_widget(self._default_class(),
row=lims[0, 0], col=lims[1, 0],
row_span=spans[0], col_span=spans[1])
return item
def add_widget(self, widget=None, row=None, col=None, row_span=1,
col_span=1, **kwargs):
"""
Add a new widget to this grid. This will cause other widgets in the
grid to be resized to make room for the new widget. Can be used
to replace a widget as well
Parameters
----------
widget : Widget | None
The Widget to add. New widget is constructed if widget is None.
row : int
The row in which to add the widget (0 is the topmost row)
col : int
The column in which to add the widget (0 is the leftmost column)
row_span : int
The number of rows to be occupied by this widget. Default is 1.
col_span : int
The number of columns to be occupied by this widget. Default is 1.
**kwargs : dict
parameters sent to the new Widget that is constructed if
widget is None
Notes
-----
The widget's parent is automatically set to this grid, and all other
parent(s) are removed.
"""
if row is None:
row = self._next_cell[0]
if col is None:
col = self._next_cell[1]
if widget is None:
widget = Widget(**kwargs)
else:
if kwargs:
raise ValueError("cannot send kwargs if widget is given")
_row = self._cells.setdefault(row, {})
_row[col] = widget
self._grid_widgets[self._n_added] = (row, col, row_span, col_span,
widget)
self._n_added += 1
widget.parent = self
self._next_cell = [row, col+col_span]
widget._var_w = Variable("w-(row: %s | col: %s)" % (row, col))
widget._var_h = Variable("h-(row: %s | col: %s)" % (row, col))
# update stretch based on colspan/rowspan
# usually, if you make something consume more grids or columns,
# you also want it to actually *take it up*, ratio wise.
# otherwise, it will never *use* the extra rows and columns,
# thereby collapsing the extras to 0.
stretch = list(widget.stretch)
stretch[0] = col_span if stretch[0] is None else stretch[0]
stretch[1] = row_span if stretch[1] is None else stretch[1]
widget.stretch = stretch
self._need_solver_recreate = True
return widget
def remove_widget(self, widget):
"""Remove a widget from this grid
Parameters
----------
widget : Widget
The Widget to remove
"""
self._grid_widgets = dict((key, val)
for (key, val) in self._grid_widgets.items()
if val[-1] != widget)
self._need_solver_recreate = True
def resize_widget(self, widget, row_span, col_span):
"""Resize a widget in the grid to new dimensions.
Parameters
----------
widget : Widget
The widget to resize
row_span : int
The number of rows to be occupied by this widget.
col_span : int
The number of columns to be occupied by this widget.
"""
row = None
col = None
for (r, c, rspan, cspan, w) in self._grid_widgets.values():
if w == widget:
row = r
col = c
break
if row is None or col is None:
raise ValueError("%s not found in grid" % widget)
self.remove_widget(widget)
self.add_widget(widget, row, col, row_span, col_span)
self._need_solver_recreate = True
def _prepare_draw(self, view):
self._update_child_widget_dim()
def add_grid(self, row=None, col=None, row_span=1, col_span=1,
**kwargs):
"""
Create a new Grid and add it as a child widget.
Parameters
----------
row : int
The row in which to add the widget (0 is the topmost row)
col : int
The column in which to add the widget (0 is the leftmost column)
row_span : int
The number of rows to be occupied by this widget. Default is 1.
col_span : int
The number of columns to be occupied by this widget. Default is 1.
**kwargs : dict
Keyword arguments to pass to the new `Grid`.
"""
from .grid import Grid
grid = Grid(**kwargs)
return self.add_widget(grid, row, col, row_span, col_span)
def add_view(self, row=None, col=None, row_span=1, col_span=1,
**kwargs):
"""
Create a new ViewBox and add it as a child widget.
Parameters
----------
row : int
The row in which to add the widget (0 is the topmost row)
col : int
The column in which to add the widget (0 is the leftmost column)
row_span : int
The number of rows to be occupied by this widget. Default is 1.
col_span : int
The number of columns to be occupied by this widget. Default is 1.
**kwargs : dict
Keyword arguments to pass to `ViewBox`.
"""
from .viewbox import ViewBox
view = ViewBox(**kwargs)
return self.add_widget(view, row, col, row_span, col_span)
def next_row(self):
self._next_cell = [self._next_cell[0] + 1, 0]
@property
def grid_size(self):
rvals = [widget[0]+widget[2] for widget in self._grid_widgets.values()]
cvals = [widget[1]+widget[3] for widget in self._grid_widgets.values()]
return max(rvals + [0]), max(cvals + [0])
@property
def layout_array(self):
locs = -1 * np.ones(self.grid_size, int)
for key in self._grid_widgets.keys():
r, c, rs, cs = self._grid_widgets[key][:4]
locs[r:r + rs, c:c + cs] = key
return locs
def __repr__(self):
return (('<Grid at %s:\n' % hex(id(self))) +
str(self.layout_array + 1) + '>')
@staticmethod
def _add_total_width_constraints(solver, width_grid, _var_w):
for ws in width_grid:
width_expr = expression.Expression()
for w in ws:
width_expr = width_expr + w
solver.add_constraint(width_expr == _var_w, strength=REQUIRED)
@staticmethod
def _add_total_height_constraints(solver, height_grid, _var_h):
for hs in height_grid:
height_expr = expression.Expression()
for h in hs:
height_expr += h
solver.add_constraint(height_expr == _var_h, strength=REQUIRED)
@staticmethod
def _add_gridding_width_constraints(solver, width_grid):
# access widths of one "y", different x
for ws in width_grid.T:
for w in ws[1:]:
solver.add_constraint(ws[0] == w, strength=REQUIRED)
@staticmethod
def _add_gridding_height_constraints(solver, height_grid):
# access heights of one "y"
for hs in height_grid.T:
for h in hs[1:]:
solver.add_constraint(hs[0] == h, strength=REQUIRED)
@staticmethod
def _add_stretch_constraints(solver, width_grid, height_grid,
grid_widgets, widget_grid):
xmax = len(height_grid)
ymax = len(width_grid)
stretch_widths = [[] for _ in range(0, ymax)]
stretch_heights = [[] for _ in range(0, xmax)]
for (y, x, ys, xs, widget) in grid_widgets.values():
for ws in width_grid[y:y+ys]:
total_w = np.sum(ws[x:x+xs])
for sw in stretch_widths[y:y+ys]:
sw.append((total_w, widget.stretch[0]))
for hs in height_grid[x:x+xs]:
total_h = np.sum(hs[y:y+ys])
for sh in stretch_heights[x:x+xs]:
sh.append((total_h, widget.stretch[1]))
for (x, xs) in enumerate(widget_grid):
for(y, widget) in enumerate(xs):
if widget is None:
stretch_widths[y].append((width_grid[y][x], 1))
stretch_heights[x].append((height_grid[x][y], 1))
for sws in stretch_widths:
if len(sws) <= 1:
continue
comparator = sws[0][0] / sws[0][1]
for (stretch_term, stretch_val) in sws[1:]:
solver.add_constraint(comparator == stretch_term / stretch_val,
strength=WEAK)
for sws in stretch_heights:
if len(sws) <= 1:
continue
comparator = sws[0][0] / sws[0][1]
for (stretch_term, stretch_val) in sws[1:]:
solver.add_constraint(comparator == stretch_term / stretch_val,
strength=WEAK)
@staticmethod
def _add_widget_dim_constraints(solver, width_grid, height_grid,
total_var_w, total_var_h, grid_widgets):
assert(total_var_w is not None)
assert(total_var_h is not None)
for ws in width_grid:
for w in ws:
solver.add_constraint(w >= 0, strength=REQUIRED)
for hs in height_grid:
for h in hs:
solver.add_constraint(h >= 0, strength=REQUIRED)
for (_, val) in grid_widgets.items():
(y, x, ys, xs, widget) = val
for ws in width_grid[y:y+ys]:
total_w = np.sum(ws[x:x+xs])
# assert(total_w is not None)
solver.add_constraint(total_w >= widget.width_min,
strength=REQUIRED)
if widget.width_max is not None:
solver.add_constraint(total_w <= widget.width_max,
strength=REQUIRED)
else:
solver.add_constraint(total_w <= total_var_w)
for hs in height_grid[x:x+xs]:
total_h = np.sum(hs[y:y+ys])
solver.add_constraint(total_h >= widget.height_min,
strength=REQUIRED)
if widget.height_max is not None:
solver.add_constraint(total_h <= widget.height_max,
strength=REQUIRED)
else:
solver.add_constraint(total_h <= total_var_h)
def _recreate_solver(self):
self._solver = SimplexSolver()
rect = self.rect.padded(self.padding + self.margin)
ymax, xmax = self.grid_size
self._var_w = Variable(rect.width)
self._var_h = Variable(rect.height)
self._solver.add_constraint(self._var_w >= 0)
self._solver.add_constraint(self._var_h >= 0)
self._height_stay = None
self._width_stay = None
# add widths
self._width_grid = np.array([[Variable("width(x: %s, y: %s)" % (x, y))
for x in range(0, xmax)]
for y in range(0, ymax)])
# add heights
self._height_grid = np.array([[Variable("height(x: %s, y: %s" % (x, y))
for y in range(0, ymax)]
for x in range(0, xmax)])
# setup stretch
stretch_grid = np.zeros(shape=(xmax, ymax, 2), dtype=float)
stretch_grid.fill(1)
for (_, val) in self._grid_widgets.items():
(y, x, ys, xs, widget) = val
stretch_grid[x:x+xs, y:y+ys] = widget.stretch
# even though these are REQUIRED, these should never fail
# since they're added first, and thus the slack will "simply work".
Grid._add_total_width_constraints(self._solver,
self._width_grid, self._var_w)
Grid._add_total_height_constraints(self._solver,
self._height_grid, self._var_h)
try:
# these are REQUIRED constraints for width and height.
# These are the constraints which can fail if
# the corresponding dimension of the widget cannot be fit in the
# grid.
Grid._add_gridding_width_constraints(self._solver,
self._width_grid)
Grid._add_gridding_height_constraints(self._solver,
self._height_grid)
except RequiredFailure:
self._need_solver_recreate = True
# these are WEAK constraints, so these constraints will never fail
# with a RequiredFailure.
Grid._add_stretch_constraints(self._solver,
self._width_grid,
self._height_grid,
self._grid_widgets,
self._widget_grid)
Grid._add_widget_dim_constraints(self._solver,
self._width_grid,
self._height_grid,
self._var_w,
self._var_h,
self._grid_widgets)
def _update_child_widget_dim(self):
# think in terms of (x, y). (row, col) makes code harder to read
ymax, xmax = self.grid_size
if ymax <= 0 or xmax <= 0:
return
rect = self.rect # .padded(self.padding + self.margin)
if rect.width <= 0 or rect.height <= 0:
return
if self._need_solver_recreate:
self._need_solver_recreate = False
self._recreate_solver()
# we only need to remove and add the height and width constraints of
# the solver if they are not the same as the current value
if rect.height != self._var_h.value:
if self._height_stay:
self._solver.remove_constraint(self._height_stay)
self._var_h.value = rect.height
self._height_stay = self._solver.add_stay(self._var_h,
strength=STRONG)
if rect.width != self._var_w.value:
if self._width_stay:
self._solver.remove_constraint(self._width_stay)
self._var_w.value = rect.width
self._width_stay = self._solver.add_stay(self._var_w,
strength=STRONG)
value_vectorized = np.vectorize(lambda x: x.value)
for (_, val) in self._grid_widgets.items():
(row, col, rspan, cspan, widget) = val
width = np.sum(value_vectorized(
self._width_grid[row][col:col+cspan]))
height = np.sum(value_vectorized(
self._height_grid[col][row:row+rspan]))
if col == 0:
x = 0
else:
x = np.sum(value_vectorized(self._width_grid[row][0:col]))
if row == 0:
y = 0
else:
y = np.sum(value_vectorized(self._height_grid[col][0:row]))
widget.size = (width, height)
widget.pos = (x, y)
@property
def _widget_grid(self):
ymax, xmax = self.grid_size
widget_grid = np.array([[None for _ in range(0, ymax)]
for _ in range(0, xmax)])
for (_, val) in self._grid_widgets.items():
(y, x, ys, xs, widget) = val
widget_grid[x:x+xs, y:y+ys] = widget
return widget_grid
| StarcoderdataPython |
1864007 | <gh_stars>0
# We want to classify iris flowers using sepal length, sepal width, petal length and petal width as features.
# We will create a classification model using SVM algorithm.
from sklearn import datasets, svm, metrics
from sklearn.model_selection import train_test_split
iris_dataset = datasets.load_iris()
X = iris_dataset.data
y = iris_dataset.target
classes = ['setosa', 'versicolor', 'virginica']
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2)
model = svm.SVC()
model.fit(X_train, y_train)
predictions = model.predict(X_test)
accuracy_score = metrics.accuracy_score(y_test, predictions)
print('predictions: ', predictions)
print('actual: ', y_test)
print('accuracy score: ', accuracy_score)
print('predictions showing names:')
for i in range(len(predictions)):
print(classes[predictions[i]])
| StarcoderdataPython |
1958595 | from configparser import ConfigParser
from pathlib import Path
from tkinter import Button, Entry, Tk, Menubutton, Label, TOP, BOTTOM, RAISED, Menu, IntVar
from typing import Union
from cotoha_api.cotoha_api import CotohaApi
from logger.logger import LoggerUtils
class Application:
def __init__(self):
self.__master: Union[None, Tk] = None
self.__logger = LoggerUtils.get_instance()
self.__sentence_entry: Union[None, Entry] = None
self.__result_entry: Union[None, Entry] = None
self.__submit_button: Union[None, Button] = None
self.__menu_button: Union[None, Menubutton] = None
self.__vertical_px = 1080
self.__horizontal_px = 1920
self.__window_title = 'Cotoha API'
self.__setting = ConfigParser()
self.__cotoha_api = CotohaApi()
return
@property
def master(self):
"""
Master of Tk().
:return:
"""
return self.__master
@property
def vertical_px(self):
"""
:return:
"""
return self.__vertical_px
@property
def horizontal_px(self):
"""
:return:
"""
return self.__horizontal_px
@property
def window_title(self):
"""
:return:
"""
return self.__window_title
def create_window(self) -> None:
"""
Create window.
:return:
"""
self.__master = Tk()
self.__master.title(self.window_title)
self.__master.geometry(f'{self.horizontal_px}x{self.vertical_px}')
return
def create_sentence_form(self) -> None:
"""
Create Form
:return:
"""
label = Label(self.__master, text='Sentence')
label.pack(side=TOP)
self.__sentence_entry = Entry(self.__master, bd=2)
self.__sentence_entry.pack(side=BOTTOM)
return
def create_pull_down_menu(self):
mb = Menubutton(self.master, text='Select API.', relief=RAISED)
mb.grid()
mb.menu = Menu(mb, tearoff=0)
mb['menu'] = mb.menu
var1 = IntVar()
var2 = IntVar()
var3 = IntVar()
mb.menu.add_checkbutton(label="")
return
def read_config(self):
path = Path('src/setting/setting.conf')
if path.exists():
raise FileExistsError
self.__setting.read(path)
return
| StarcoderdataPython |
27356 | <filename>infer.py
import os
import time
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.utils.data import DataLoader
from tqdm import tqdm
from datasets.inferDataSet import infer_DataSet
from models.model import U2NET
from segConfig import getConfig
def infer(model, test_loader, device, n_classes, save_seg):
model.eval()
with torch.no_grad():
for idx, (imgs, imgs_name) in tqdm(enumerate(test_loader), desc='infer', total=len(test_loader)):
imgs = imgs.to(device)
d0, d1, d2, d3, d4, d5, d6 = model(imgs)
d0, d1, d2, d3, d4, d5, d6 = nn.Softmax(dim=1)(d0),\
nn.Softmax(dim=1)(d1), nn.Softmax(dim=1)(d2),\
nn.Softmax(dim=1)(d3), nn.Softmax(dim=1)(d4),\
nn.Softmax(dim=1)(d5), nn.Softmax(dim=1)(d6)
# d0, d1, d2, d3, d4, d5, d6 = d0[:, 1:n_classes, :, :]*1.01,\
# d1[:, 1:n_classes, :, :]*1.01, d2[:, 1:n_classes, :, :]*1.01,\
# d3[:, 1:n_classes, :, :]*1.01, d4[:, 1:n_classes, :, :]*1.01,\
# d5[:, 1:n_classes, :, :]*1.01, d6[:, 1:n_classes, :, :]*1.01
d0_tmp = F.one_hot(d0.clone().argmax(
dim=1), n_classes).permute(0, 3, 1, 2)
d1_tmp = F.one_hot(d1.clone().argmax(
dim=1), n_classes).permute(0, 3, 1, 2)
d2_tmp = F.one_hot(d2.clone().argmax(
dim=1), n_classes).permute(0, 3, 1, 2)
d3_tmp = F.one_hot(d3.clone().argmax(
dim=1), n_classes).permute(0, 3, 1, 2)
d4_tmp = F.one_hot(d4.clone().argmax(
dim=1), n_classes).permute(0, 3, 1, 2)
d5_tmp = F.one_hot(d5.clone().argmax(
dim=1), n_classes).permute(0, 3, 1, 2)
d6_tmp = F.one_hot(d6.clone().argmax(
dim=1), n_classes).permute(0, 3, 1, 2)
d = torch.Tensor([3.5, 2.5, 1, 1, 1, 1, 1])
add_lesion = -4.1
tmp = d0_tmp*d[0]+d1_tmp*d[1]+d2_tmp*d[2]+d3_tmp*d[3]\
+ d4_tmp*d[4]+d5_tmp*d[5]+d6_tmp*d[6]
tmp[:, 1:n_classes, :, :] = tmp[:, 1:n_classes, :, :]+add_lesion
out_mask = tmp.argmax(dim=1).squeeze()
np.save(save_seg+'/'+imgs_name[0],
out_mask.clone().detach().cpu().numpy().astype(np.uint8).squeeze())
torch.cuda.empty_cache()
def main(args):
device, num_classes, pth, infer_data_dirs = \
args.device, args.num_classes, args.pth, args.infer_data_dirs
if device == 'cuda':
torch.cuda.set_device(0)
if not torch.cuda.is_available():
print('Cuda is not available, use CPU to train.')
device = 'cpu'
device = torch.device(device)
print('===>device:', device)
torch.cuda.manual_seed_all(0)
# Load data
print('===>Setup Model')
model = U2NET(in_channels=1, out_channels=num_classes).to(device)
print('===>Loaded Weight')
checkpoint = torch.load(pth)
model.load_state_dict(checkpoint['model_weights'])
SegDataSet = infer_DataSet
print('===>check infer_data_dirs')
if isinstance(infer_data_dirs, str):
infer_data_dirs = [infer_data_dirs]
total_infer_begin = time.time()
for idx, infer_data_dir in enumerate(infer_data_dirs):
imgs_dir = infer_data_dir+'/imgs/'
masks_save_dir = infer_data_dir+'/masks/'
if not os.path.exists(masks_save_dir):
os.makedirs(masks_save_dir)
print('===>Loading dataset')
test_data_loader = DataLoader(
dataset=SegDataSet(imgs_dir), batch_size=1,
num_workers=8, shuffle=False, drop_last=False)
print('='*30)
print('===>Infering %d' % (idx+1))
print('===>Start infer '+imgs_dir)
print('===>Save to '+masks_save_dir)
infer_start_time = time.time()
infer(model=model, test_loader=test_data_loader, device=device,
n_classes=num_classes, save_seg=masks_save_dir)
infer_end_time = time.time()
print('Infer cost %.2fs' % (infer_end_time-infer_start_time))
del test_data_loader
total_infer_end = time.time()
print('Total Infer cost %.2fs' % (total_infer_end-total_infer_begin))
if __name__ == '__main__':
'''
推断,没有做性能评估,只需加载imgs,代码几乎与segTest一致,如有不懂
请看segTest注释
'''
args = getConfig('infer')
main(args)
| StarcoderdataPython |
55840 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# The MIT License (MIT)
#
# Grove Base Hat for the Raspberry Pi, used to connect grove sensors.
# Copyright (C) 2018 Seeed Technology Co.,Ltd.
'''
This is the code for
- `Grove - Sound Sensor <https://www.seeedstudio.com/Grove-Sound-Sensor-p-752.html>`_
Examples:
.. code-block:: python
import time
from grove.grove_sound_sensor import GroveSoundSensor
# connect to alalog pin 2(slot A2)
PIN = 2
sensor = GroveSoundSensor(PIN)
print('Detecting sound...')
while True:
print('Sound value: {0}'.format(sensor.sound))
time.sleep(.3)
'''
import math
import time
from grove.adc import ADC
__all__ = ['GroveSoundSensor']
class GroveSoundSensor(object):
'''
Grove Sound Sensor class
Args:
pin(int): number of analog pin/channel the sensor connected.
'''
def __init__(self, channel):
self.channel = channel
self.adc = ADC()
@property
def sound(self):
'''
Get the sound strength value
Returns:
(int): ratio, 0(0.0%) - 1000(100.0%)
'''
value = self.adc.read(self.channel)
return value
Grove = GroveSoundSensor
def main():
from grove.helper import SlotHelper
sh = SlotHelper(SlotHelper.ADC)
pin = sh.argv2pin()
sensor = GroveSoundSensor(pin)
print('Detecting sound...')
while True:
print('Sound value: {0}'.format(sensor.sound))
time.sleep(.3)
if __name__ == '__main__':
main()
| StarcoderdataPython |
11342868 | <gh_stars>0
#celery does not support the periodic task decorator anymore, so imporovised
import datetime
from django.core.mail import send_mail
from tasks.models import *
from datetime import timedelta, datetime, timezone
from celery import Celery
from config.celery_app import app
@app.on_after_configure.connect
def setup_periodic_tasks(sender, **kwargs):
print('setting tasks')
sender.add_periodic_task(3600.0, send_reports, name='Send Reports')
@app.task
def send_reports():
print('Checking for reports to send')
#reports that were not sent in 1 day
get_unsent_reports = Report.objects.filter(last_report__lte = (datetime.now(timezone.utc) - timedelta(days=1)))
completed = []
stat_choices = [
["Pending", "PENDING"],
["In Progress", "IN_PROGRESS"],
["Completed", "COMPLETED"],
["Cancelled", "CANCELLED"]
]
for report in get_unsent_reports:
base_qs = Task.objects.filter(user=report.user, deleted = False).order_by('priority')
email_content = f'Hey there {report.user.username}\nHere is your daily task summary:\n\n'
for status in stat_choices:
stat_name = status[0]
stat_id = status[1]
stat_qs = base_qs.filter(status = stat_id)
stat_count = stat_qs.count()
status.append(stat_count)
email_content += f"{stat_count} {stat_name} Tasks:\n"
for q in stat_qs:
email_content+= f" -> {q.title} ({q.priority}): \n | {q.description} \n | Created on {q.created_date} \n \n"
send_mail(f"You have {stat_choices[0][2]} pending and {stat_choices[1][2]} in progress tasks", email_content, "<EMAIL>", [report.user.email])
completed.append(report.user.username)
report.last_report = datetime.now(timezone.utc).replace(hour=report.timing)
report.save()
print(f"Completed Processing User {report.user.id}")
return completed | StarcoderdataPython |
9767248 | <filename>helpers/record_gestures.py<gh_stars>1-10
import cv2
import os.path
from gesture_app.model.clean_image import CleanImage
import time
image_cleaner = CleanImage()
def live_video(gesture_name):
t1 = time.time()
video_capture = cv2.VideoCapture(0)
capture_background_flag = True
capture_flag = False
while True:
t2 = time.time()
ret, frame = video_capture.read()
if t2 - t1 > 5:
capture_background_flag = False
if cv2.waitKey(1) & 0xFF == ord('r'):
t1 = time.time()
capture_background_flag = True
if capture_background_flag:
image_cleaner.subtract_background = image_cleaner.refresh_background()
continue
image_cleaner.frame = frame
image_cleaner.process()
frame = image_cleaner.frame
if not os.path.isdir("images/" + gesture_name):
os.mkdir("images/" + gesture_name)
if cv2.waitKey(1) & 0xFF == ord('c'):
capture_flag = not capture_flag
if capture_flag:
cv2.imwrite("images/" + gesture_name + "/" + gesture_name + "_" + str(int(t2)) + ".jpg", frame)
cv2.putText(frame, f"Gesture Captured !", (0, 60),
image_cleaner.font, image_cleaner.fontScale, (255, 255, 255))
pass
cv2.putText(frame, f"Press Q to quit | Press R to refresh", (0, 20),
image_cleaner.font, image_cleaner.fontScale, (255, 255, 255))
cv2.imshow('frame', frame)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
video_capture.release()
cv2.destroyAllWindows()
if __name__ == '__main__':
live_video("null")
| StarcoderdataPython |
4911067 | <filename>kinematics/time.py
import numpy as np
from typing import List
from geometry import Path
"""
Functions to do time-rescaling on a set of paths
"""
def time_rescale(paths: List[Path]) -> List[Path]:
"""
Rescales all paths in a list to have the same
number of frames to the one with the smallest number of frames
and sets time to go in 0-1 range in that interval.
"""
n_frames = np.min([len(path) for path in paths])
resampled_paths = [p.downsample_in_time(n_frames) for p in paths]
return resampled_paths
def average_xy_trajectory(paths: List[Path], rescale: bool = False) -> Path:
"""
Computes the average XY trajectory from a set of paths,
rescaling them in time if necessary
"""
if rescale:
paths = time_rescale(paths)
X = np.mean(np.vstack([path.x for path in paths]), 0)
Y = np.mean(np.vstack([path.y for path in paths]), 0)
return Path(X, Y)
| StarcoderdataPython |
9754870 | <filename>data_structures/binary_search_tree.py
class TreeNode:
def __init__(self, data, left=None, right=None):
self.data = data
self.left = left
self.right = right
@property
def has_left(self):
return self.left is not None
@property
def has_right(self):
return self.right is not None
def __str__(self):
return str(self.data)
class BinarySearchTree:
def __init__(self):
self.root = None
def insert(self, value):
new_node = TreeNode(value)
if self.root is None:
self.root = new_node
else:
self._insert(self.root, new_node)
def _insert(self, current_node, new_node):
if new_node.data <= current_node.data:
if not current_node.has_left:
current_node.left = new_node
else:
self._insert(current_node.left, new_node)
else:
if not current_node.has_right:
current_node.right = new_node
else:
self._insert(current_node.right, new_node)
def height(self):
return self._height(self.root)
def _height(self, node):
if node is None:
return 0
else:
return max(self._height(node.left), self._height(node.right)) + 1
def inorder(self):
output = []
self._inorder(self.root, output)
return output
def _inorder(self, current_node, output):
if current_node.has_left:
self._inorder(current_node.left, output)
output.append(current_node.data)
if current_node.has_right:
self._inorder(current_node.right, output)
def preorder(self):
output = []
self._preorder(self.root, output)
return output
def _preorder(self, current_node, output):
output.append(current_node.data)
if current_node.has_left:
self._preorder(current_node.left, output)
if current_node.has_right:
self._preorder(current_node.right, output)
def postorder(self):
output = []
self._postorder(self.root, output)
return output
def _postorder(self, current_node, output):
if current_node.has_left:
self._postorder(current_node.left, output)
if current_node.has_right:
self._postorder(current_node.right, output)
output.append(current_node.data)
| StarcoderdataPython |
8079956 | <filename>Lab_5/Q4.py
# Q4: What is the time complexity of
import random
n = int(random.random() * 100)
for i in range(n):
for j in range(n):
pass
""""
for i in range(n): (n*1)*n
for j in range(n): n*1
pass 1
(n*1)*n = O(n^2)
"""
| StarcoderdataPython |
1926938 | """pws-delete.py deletes the setup created by pws-create.py
The settings created by pws-create.py will expire based on the Expiry
value in the header (see pws-create.py comments for an explanation), so this
script isn't necessary unless you want to back out of the pws-create.py
settings in order to change how pws-create.py works.
Copyright (c) 2018 Cisco and/or its affiliates.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import requests
import json
from lxml import etree
if __name__ == '__main__':
# Get the necessary data, such as the server, username/password of the
# application user and username of the end user
with open('serverparams.json') as json_file:
data = json.load(json_file)
for p in data['params']:
SERVER = p['SERVER']
with open('appuser.json') as json_file:
data = json.load(json_file)
for p in data['params']:
AUSERNAME = p['USERNAME']
APASSWORD = p['<PASSWORD>']
with open('enduser.json') as json_file:
data = json.load(json_file)
for p in data['params']:
EUSERNAME = p['USERNAME']
# Log in as the application user to get the application user session key
# There are two ways to log in. One forces Cisco IM&P to create a new
# session key every time, the other allows you to repeat this request
# and get the same session key every time. We're using the latter method.
# See (https://developer.cisco.com/site/im-and-presence/documents/presence_web_service/latest_version/)
# for the differences between the two methods.
passwordxml = '<session><password>'+APASSWORD+'</password></session>'
root = etree.fromstring(passwordxml)
xml = etree.tostring(root)
headers = { 'Content-Type':'text/xml' }
response = requests.post('https://'+SERVER+':8083/presence-service/users/'+AUSERNAME+'/sessions', headers=headers, data=xml, verify=False)
root = etree.fromstring(response.content)
for element in root.iter():
if element.tag == "sessionKey":
asessionKey = element.text
print('\n\n')
print('App User Session Key = '+asessionKey)
print('\n\n')
# Log in as the end user to get the end user session key
# There are two ways to log in. One forces Cisco IM&P to create a new
# session key every time, the other allows you to repeat this request
# and get the same session key every time. We're using the latter method.
# See (https://developer.cisco.com/site/im-and-presence/documents/presence_web_service/latest_version/)
# for the differences between the two methods.
headers = { 'Presence-Session-Key': asessionKey }
response = requests.post('https://'+SERVER+':8083/presence-service/users/'+EUSERNAME+'/sessions', headers=headers, verify=False)
root = etree.fromstring(response.content)
for element in root.iter():
if element.tag == "sessionKey":
esessionKey = element.text
# Use the end user session key to unsubscribe to presence notifications
# Technically, you should only have to delete subscription 1.
# Deleting subscriptions 1-4 is overkill, but it's a way to guarantee that you
# remove all subscriptions you may have accidentally created by running pws-create.py
# multiple times
headers = { 'Presence-Session-Key': esessionKey }
response = requests.delete('https://'+SERVER+':8083/presence-service/users/'+EUSERNAME+'/subscriptions/1', headers=headers, verify=False)
response = requests.delete('https://'+SERVER+':8083/presence-service/users/'+EUSERNAME+'/subscriptions/2', headers=headers, verify=False)
response = requests.delete('https://'+SERVER+':8083/presence-service/users/'+EUSERNAME+'/subscriptions/3', headers=headers, verify=False)
response = requests.delete('https://'+SERVER+':8083/presence-service/users/'+EUSERNAME+'/subscriptions/4', headers=headers, verify=False)
# Use the application user session key to delete the endpoint definition
# Technically, you should only have to delete endpoint 1.
# Deleting endpoints 1-4 is overkill, but it's a way to guarantee that you
# remove all endpoints you may have accidentally created by running pws-create.py
# multiple times
headers = { 'Presence-Session-Key': asessionKey }
response = requests.delete('https://'+SERVER+':8083/presence-service/endpoints/1', headers=headers, verify=False)
response = requests.delete('https://'+SERVER+':8083/presence-service/endpoints/2', headers=headers, verify=False)
response = requests.delete('https://'+SERVER+':8083/presence-service/endpoints/3', headers=headers, verify=False)
response = requests.delete('https://'+SERVER+':8083/presence-service/endpoints/4', headers=headers, verify=False)
| StarcoderdataPython |
309897 | from django.http import HttpResponse, JsonResponse
from django.db.models import Sum, Count, Case, When, Value, CharField
from django.db.models.functions import ExtractYear
import operator, json
from .routes import routes
from .national import national
from .service_availability import availability
def getYearlyTotal(stops):
annuals = stops.values(year=ExtractYear("summary_begin_date")).annotate(
num_of_yearly_census=Count('summary_begin_date', distinct=True)).order_by("year")
weekly = stops.filter(service_key__icontains="W").values(year=ExtractYear("summary_begin_date")).annotate(
weekday_sum_ons=Sum('ons')/Count('summary_begin_date', distinct=True),
weekday_sum_offs=Sum('offs')/Count('summary_begin_date', distinct=True),
weekday_total_stops=Count('ons', distinct=True)
).order_by("year")
saturday = stops.filter(service_key__icontains="S").values(year=ExtractYear("summary_begin_date")).annotate(
saturday_sum_ons=Sum('ons')/Count('summary_begin_date', distinct=True),
saturday_sum_offs=Sum('offs')/Count('summary_begin_date', distinct=True),
saturday_total_stops=Count('ons', distinct=True)
).order_by("year")
sunday = stops.filter(service_key__icontains="U").values(year=ExtractYear("summary_begin_date")).annotate(
sunday_sum_ons=Sum('ons')/Count('summary_begin_date', distinct=True),
sunday_sum_offs=Sum('offs')/Count('summary_begin_date', distinct=True),
sunday_total_stops=Count('ons', distinct=True)
).order_by("year")
sorting_key = operator.itemgetter("year")
for i, j in zip(sorted(weekly, key=sorting_key), sorted(saturday, key=sorting_key)):i.update(j)
for i, j in zip(sorted(weekly, key=sorting_key), sorted(sunday, key=sorting_key)):i.update(j)
for i, j in zip(sorted(weekly, key=sorting_key), sorted(annuals, key=sorting_key)):i.update(j)
return weekly
def getTotals(stops):
weekly = getYearlyTotal(stops)
for week in weekly:
week["sunday_census"] = True
week["saturday_census"] = True
if "saturday_sum_ons" not in week:
week["saturday_sum_ons"] = 0
week["saturday_sum_offs"] = 0
week["saturday_total_stops"] = 0
week["saturday_census"] = False
if "sunday_sum_ons" not in week:
week["sunday_sum_ons"] = 0
week["sunday_sum_offs"] = 0
week["sunday_total_stops"] = 0
week["sunday_census"] = False
week["total_sum_ons"] = week["weekday_sum_ons"] + week["saturday_sum_ons"] + week["sunday_sum_ons"]
week["total_sum_offs"] = week["weekday_sum_offs"] + week["saturday_sum_offs"] + week["sunday_sum_offs"]
week["total_total_stops"] = week["weekday_total_stops"] + week["saturday_total_stops"] + week["sunday_total_stops"]
return weekly
def getCensusTotals(census):
weekly = census.filter(service_key__icontains="W").values("summary_begin_date").distinct().annotate(
weekday_sum_ons=Sum('ons'),
weekday_sum_offs=Sum('offs'),
weekday_total_routes=Count('route_number', distinct=True),
weekday_total_stops=Sum('ons')
).order_by("summary_begin_date")
saturday = census.filter(service_key__icontains="S").values("summary_begin_date").distinct().annotate(
saturday_sum_ons=Sum('ons'),
saturday_sum_offs=Sum('offs'),
saturday_total_routes=Count('route_number', distinct=True),
saturday_total_stops=Sum('ons')
).order_by("summary_begin_date")
sunday = census.filter(service_key__icontains="U").values("summary_begin_date").distinct().annotate(
sunday_sum_ons=Sum('ons'),
sunday_sum_offs=Sum('offs'),
sunday_total_routes=Count('route_number', distinct=True),
sunday_total_stops=Sum('ons')
).order_by("summary_begin_date")
sorting_key = operator.itemgetter("summary_begin_date")
for i, j in zip(sorted(weekly, key=sorting_key), sorted(saturday, key=sorting_key)):i.update(j)
for i, j in zip(sorted(weekly, key=sorting_key), sorted(sunday, key=sorting_key)):i.update(j)
for week in weekly:
week["total_sum_ons"] = week["weekday_sum_ons"] + week["saturday_sum_ons"] + week["sunday_sum_ons"]
week["total_sum_offs"] = week["weekday_sum_offs"] + week["saturday_sum_offs"] + week["sunday_sum_offs"]
week["total_total_stops"] = week["weekday_total_stops"] + week["saturday_total_stops"] + week["sunday_total_stops"]
return weekly
def routeDetailLookup(pk):
dictdump = json.loads(routes)
r = list(filter(lambda route: str(route['route_id']) == pk, dictdump))
return r[0]
def nationalDetailLookup(pk):
dictdump = json.loads(national)
r = list(filter(lambda national: str(national['year']) == pk, dictdump))
return r[0]
| StarcoderdataPython |
9777051 | """Project: NetCDF Flattener
Copyright (c) 2020 EUMETSAT
License: Apache License 2.0
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
"""
import subprocess
from pathlib import Path
from netCDF4 import Dataset
from netcdf_flattener import flatten
from base_test import BaseTest
def walktree(top):
values = top.groups.values()
yield values
for value in top.groups.values():
for children in walktree(value):
yield children
def assert_expected_data(self, _copy_data, variable_in, variable_out):
if _copy_data:
self.assertEqual(variable_in.shape, variable_out.shape)
self.assertTrue(
(variable_in[...].data == variable_out[...].data).all()
)
else:
if variable_in.shape == variable_out.shape:
self.assertFalse(
(variable_in[...].data == variable_out[...].data).all()
)
class Test(BaseTest):
def test_flatten(self):
"""Global test of most functionalities.
Flatten input file 'input1.cdl' and compare to reference 'reference1.cdl'.
"""
# Inputs
input_name = "input1.cdl"
reference_name = "reference1.cdl"
output_name = "output1.nc"
self.flatten_and_compare(input_name, output_name, reference_name)
def test_flatten_copy_data(self):
"""Test of _copy_data functionality.
Flatten input file 'input1.cdl' with _copy_data True/False,
checking that the data is copied/not copied.
"""
input_name = "input1.cdl"
output_name = "output1.nc"
# Compose full file names
test_data_path = Path(Path(__file__)).parent / self.test_data_folder
input_cdl = test_data_path / input_name
input_nc = test_data_path / "{}.nc".format(input_name)
output_nc = test_data_path / output_name
# Generate NetCDF from input CDL
print("Generate NetCDF file '{}' from input CDL '{}'".format(input_nc, input_cdl))
subprocess.call(["ncgen", "-o", input_nc, input_cdl])
# Run flattening script
print("Flatten '{}' in new file '{}'".format(input_nc, output_nc))
input_ds = Dataset(input_nc, 'r')
# Run flattener with _copy_data True/False
for _copy_data in (True, False):
output_ds = Dataset(output_nc, 'w', format='NETCDF4')
flatten(input_ds, output_ds, _copy_data=_copy_data)
for key, variable_in in input_ds.variables.items():
variable_out = output_ds.variables[key]
assert_expected_data(self, _copy_data, variable_in,
variable_out)
for children in walktree(input_ds):
for child in children:
for key, variable_in in child.variables.items():
path = variable_in.group().path
flat_key = '__'.join(path.split('/')[1:] + [key])
variable_out = output_ds.variables[flat_key]
assert_expected_data(self, _copy_data,
variable_in, variable_out)
output_ds.close()
input_ds.close()
| StarcoderdataPython |
3527128 | from .spline_interp_Cwrapper import interpolate
| StarcoderdataPython |
177110 | from rest_framework import serializers
from .models import Entry, MeasurementDevice
class EntrySerializer(serializers.ModelSerializer):
class Meta:
model = Entry
fields = ("time", "temperature", "humidity", "device")
class MeasurementDeviceSerializer(serializers.ModelSerializer):
class Meta:
model = MeasurementDevice
fields = ("id", "name")
class RoomSerializer(serializers.ModelSerializer):
class Meta:
model = MeasurementDevice
fields = ("id", "name")
| StarcoderdataPython |
6563840 | # -*- coding: utf-8 -*-
from ..base.simple_downloader import SimpleDownloader
class GamefrontCom(SimpleDownloader):
__name__ = "GamefrontCom"
__type__ = "downloader"
__version__ = "0.13"
__status__ = "testing"
__pyload_version__ = "0.5"
__pattern__ = r"http://(?:www\.)?gamefront\.com/files/(?P<ID>\d+)"
__config__ = [
("enabled", "bool", "Activated", True),
("use_premium", "bool", "Use premium account if available", True),
("fallback", "bool", "Fallback to free download if premium fails", True),
("chk_filesize", "bool", "Check file size", True),
("max_wait", "int", "Reconnect if waiting time is greater than minutes", 10),
]
__description__ = """Gamefront.com downloader plugin"""
__license__ = "GPLv3"
__authors__ = [("<NAME>", "<EMAIL>")]
NAME_PATTERN = r"<title>(?P<N>.+?) \| Game Front</title>"
SIZE_PATTERN = r">File Size:</dt>\s*<dd>(?P<S>[\d.,]+) (?P<U>[\w^_]+)"
OFFLINE_PATTERN = r"<p>File not found"
LINK_FREE_PATTERN = r"downloadUrl = '(.+?)'"
def setup(self):
self.resume_download = True
self.multi_dl = True
def handle_free(self, pyfile):
self.data = self.load(
"http://www.gamefront.com/files/service/thankyou",
get={"id": self.info["pattern"]["ID"]},
)
return super().handle_free(pyfile)
| StarcoderdataPython |
1935130 | <filename>addons/website_sale/tests/test_website_sale_pricelist.py
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
try:
from unittest.mock import patch
except ImportError:
from mock import patch
from odoo.tests.common import HttpCase, TransactionCase
from odoo.tools import DotDict
''' /!\/!\
Calling `get_pricelist_available` after setting `property_product_pricelist` on
a partner will not work as expected. That field will change the output of
`get_pricelist_available` but modifying it will not invalidate the cache.
Thus, tests should not do:
self.env.user.partner_id.property_product_pricelist = my_pricelist
pls = self.get_pricelist_available()
self.assertEqual(...)
self.env.user.partner_id.property_product_pricelist = another_pricelist
pls = self.get_pricelist_available()
self.assertEqual(...)
as `_get_pl_partner_order` cache won't be invalidate between the calls, output
won't be the one expected and tests will actually not test anything.
Try to keep one call to `get_pricelist_available` by test method.
'''
class TestWebsitePriceList(TransactionCase):
# Mock nedded because request.session doesn't exist during test
def _get_pricelist_available(self, show_visible=False):
return self.get_pl(self.args.get('show'), self.args.get('current_pl'), self.args.get('country'))
def setUp(self):
super(TestWebsitePriceList, self).setUp()
self.env.user.partner_id.country_id = False # Remove country to avoid property pricelist computed.
self.website = self.env['website'].browse(1)
self.website.user_id = self.env.user
self.env['product.pricelist'].search([]).write({'website_id': False})
website_pls = ('list_benelux', 'list_christmas', 'list_europe')
for pl in website_pls:
self.env.ref('website_sale.' + pl).website_id = self.website.id
self.env.ref('product.list0').website_id = self.website.id
self.env.ref('website_sale.list_benelux').selectable = True
self.website.pricelist_id = self.ref('product.list0')
ca_group = self.env['res.country.group'].create({
'name': 'Canada',
'country_ids': [(6, 0, [self.ref('base.ca')])]
})
self.env['product.pricelist'].create({
'name': 'Canada',
'selectable': True,
'website_id': self.website.id,
'country_group_ids': [(6, 0, [ca_group.id])],
'sequence': 10
})
self.args = {
'show': False,
'current_pl': False,
}
patcher = patch('odoo.addons.website_sale.models.website.Website.get_pricelist_available', wraps=self._get_pricelist_available)
patcher.start()
self.addCleanup(patcher.stop)
def get_pl(self, show, current_pl, country):
pl_ids = self.website._get_pl_partner_order(
country,
show,
self.website.pricelist_id.id,
current_pl,
self.website.pricelist_ids
)
return self.env['product.pricelist'].browse(pl_ids)
def test_get_pricelist_available_show(self):
show = True
current_pl = False
country_list = {
False: ['Public Pricelist', 'EUR', 'Benelux', 'Canada'],
'BE': ['EUR', 'Benelux'],
'IT': ['EUR'],
'CA': ['Canada'],
'US': ['Public Pricelist', 'EUR', 'Benelux', 'Canada']
}
for country, result in country_list.items():
pls = self.get_pl(show, current_pl, country)
self.assertEquals(len(set(pls.mapped('name')) & set(result)), len(pls), 'Test failed for %s (%s %s vs %s %s)'
% (country, len(pls), pls.mapped('name'), len(result), result))
def test_get_pricelist_available_not_show(self):
show = False
current_pl = False
country_list = {
False: ['Public Pricelist', 'EUR', 'Benelux', 'Christmas', 'Canada'],
'BE': ['EUR', 'Benelux', 'Christmas'],
'IT': ['EUR', 'Christmas'],
'US': ['Public Pricelist', 'EUR', 'Benelux', 'Christmas', 'Canada'],
'CA': ['Canada']
}
for country, result in country_list.items():
pls = self.get_pl(show, current_pl, country)
self.assertEquals(len(set(pls.mapped('name')) & set(result)), len(pls), 'Test failed for %s (%s %s vs %s %s)'
% (country, len(pls), pls.mapped('name'), len(result), result))
def test_get_pricelist_available_promocode(self):
christmas_pl = self.ref('website_sale.list_christmas')
country_list = {
False: True,
'BE': True,
'IT': True,
'US': True,
'CA': False
}
for country, result in country_list.items():
self.args['country'] = country
# mock patch method could not pass env context
available = self.website.is_pricelist_available(christmas_pl)
if result:
self.assertTrue(available, 'AssertTrue failed for %s' % country)
else:
self.assertFalse(available, 'AssertFalse failed for %s' % country)
def test_get_pricelist_available_show_with_auto_property(self):
show = True
self.env.user.partner_id.country_id = self.env.ref('base.be') # Add EUR pricelist auto
current_pl = False
country_list = {
False: ['Public Pricelist', 'EUR', 'Benelux', 'Canada'],
'BE': ['EUR', 'Benelux'],
'IT': ['EUR'],
'CA': ['EUR', 'Canada'],
'US': ['Public Pricelist', 'EUR', 'Benelux', 'Canada']
}
for country, result in country_list.items():
pls = self.get_pl(show, current_pl, country)
self.assertEquals(len(set(pls.mapped('name')) & set(result)), len(pls), 'Test failed for %s (%s %s vs %s %s)'
% (country, len(pls), pls.mapped('name'), len(result), result))
def simulate_frontend_context(self, website_id=1):
# Mock this method will be enough to simulate frontend context in most methods
def get_request_website():
return self.env['website'].browse(website_id)
patcher = patch('odoo.addons.website.models.ir_http.get_request_website', wraps=get_request_website)
patcher.start()
self.addCleanup(patcher.stop)
class TestWebsitePriceListAvailable(TransactionCase):
# This is enough to avoid a mock (request.session/website do not exist during test)
def get_pricelist_available(self, show_visible=False, website_id=1, country_code=None, website_sale_current_pl=None):
request = DotDict({
'website': self.env['website'].browse(website_id),
'session': {
'geoip': {
'country_code': country_code,
},
'website_sale_current_pl': website_sale_current_pl,
},
})
return self.env['website']._get_pricelist_available(request, show_visible)
def setUp(self):
super(TestWebsitePriceListAvailable, self).setUp()
Pricelist = self.env['product.pricelist']
Website = self.env['website']
# Set up 2 websites
self.website = Website.browse(1)
self.website2 = Website.create({'name': 'Website 2'})
# Remove existing pricelists and create new ones
Pricelist.search([]).write({'active': False})
self.backend_pl = Pricelist.create({
'name': 'Backend Pricelist',
'website_id': False,
})
self.generic_pl_select = Pricelist.create({
'name': 'Generic Selectable Pricelist',
'selectable': True,
'website_id': False,
})
self.generic_pl_code = Pricelist.create({
'name': 'Generic Code Pricelist',
'code': 'GENERICCODE',
'website_id': False,
})
self.generic_pl_code_select = Pricelist.create({
'name': 'Generic Code Selectable Pricelist',
'code': 'GENERICCODESELECT',
'selectable': True,
'website_id': False,
})
self.w1_pl = Pricelist.create({
'name': 'Website 1 Pricelist',
'website_id': self.website.id,
})
self.w1_pl_select = Pricelist.create({
'name': 'Website 1 Pricelist Selectable',
'website_id': self.website.id,
'selectable': True,
})
self.w1_pl_code = Pricelist.create({
'name': 'Website 1 Pricelist Code',
'website_id': self.website.id,
'code': 'W1CODE',
})
self.w1_pl_code_select = Pricelist.create({
'name': 'Website 1 Pricelist Code Selectable',
'website_id': self.website.id,
'code': 'W1CODESELECT',
'selectable': True,
})
self.w2_pl = Pricelist.create({
'name': 'Website 2 Pricelist',
'website_id': self.website2.id,
})
simulate_frontend_context(self)
def test_get_pricelist_available(self):
# all_pl = self.backend_pl + self.generic_pl_select + self.generic_pl_code + self.generic_pl_code_select + self.w1_pl + self.w1_pl_select + self.w1_pl_code + self.w1_pl_code_select + self.w2_pl
# Test get all available pricelists
pls_to_return = self.generic_pl_select + self.generic_pl_code + self.generic_pl_code_select + self.w1_pl + self.w1_pl_select + self.w1_pl_code + self.w1_pl_code_select
pls = self.get_pricelist_available()
self.assertEqual(pls, pls_to_return, "Every pricelist having the correct website_id set or (no website_id but a code or selectable) should be returned")
# Test get all available and visible pricelists
pls_to_return = self.generic_pl_select + self.generic_pl_code_select + self.w1_pl_select + self.w1_pl_code_select
pls = self.get_pricelist_available(show_visible=True)
self.assertEqual(pls, pls_to_return, "Only selectable pricelists website compliant (website_id False or current website) should be returned")
def test_property_product_pricelist_for_inactive_partner(self):
# `_get_partner_pricelist_multi` should consider inactive users when searching for pricelists.
# Real case if for public user. His `property_product_pricelist` need to be set as it is passed
# through `_get_pl_partner_order` as the `website_pl` when searching for available pricelists
# for active users.
public_partner = self.env.ref('base.public_partner')
self.assertFalse(public_partner.active, "Ensure public partner is inactive (purpose of this test)")
pl = public_partner.property_product_pricelist
self.assertEqual(len(pl), 1, "Inactive partner should still get a `property_product_pricelist`")
class TestWebsitePriceListAvailableGeoIP(TestWebsitePriceListAvailable):
def setUp(self):
super(TestWebsitePriceListAvailableGeoIP, self).setUp()
# clean `property_product_pricelist` for partner for this test (clean setup)
self.env['ir.property'].search([('res_id', '=', 'res.partner,%s' % self.env.user.partner_id.id)]).unlink()
# set different country groups on pricelists
c_EUR = self.env.ref('base.europe')
c_BENELUX = self.env.ref('website_sale.benelux')
self.BE = self.env.ref('base.be')
NL = self.env.ref('base.nl')
c_BE = self.env['res.country.group'].create({'name': 'Belgium', 'country_ids': [(6, 0, [self.BE.id])]})
c_NL = self.env['res.country.group'].create({'name': 'Netherlands', 'country_ids': [(6, 0, [NL.id])]})
(self.backend_pl + self.generic_pl_select + self.generic_pl_code + self.w1_pl_select).write({'country_group_ids': [(6, 0, [c_BE.id])]})
(self.generic_pl_code_select + self.w1_pl + self.w2_pl).write({'country_group_ids': [(6, 0, [c_BENELUX.id])]})
(self.w1_pl_code).write({'country_group_ids': [(6, 0, [c_EUR.id])]})
(self.w1_pl_code_select).write({'country_group_ids': [(6, 0, [c_NL.id])]})
# pricelist | selectable | website | code | country group |
# ----------------------------------------------------------------------|
# backend_pl | | | | BE |
# generic_pl_select | V | | | BE |
# generic_pl_code | | | V | BE |
# generic_pl_code_select | V | | V | BENELUX |
# w1_pl | | 1 | | BENELUX |
# w1_pl_select | V | 1 | | BE |
# w1_pl_code | | 1 | V | EUR |
# w1_pl_code_select | V | 1 | V | NL |
# w2_pl | | 2 | | BENELUX |
# available pl for website 1 for GeoIP BE (anything except website 2, backend and NL)
self.website1_be_pl = self.generic_pl_select + self.generic_pl_code + self.w1_pl_select + self.generic_pl_code_select + self.w1_pl + self.w1_pl_code
def test_get_pricelist_available_geoip(self):
# Test get all available pricelists with geoip and no partner pricelist (ir.property)
# property_product_pricelist will also be returned in the available pricelists
self.website1_be_pl += self.env.user.partner_id.property_product_pricelist
pls = self.get_pricelist_available(country_code=self.BE.code)
self.assertEqual(pls, self.website1_be_pl, "Only pricelists for BE and accessible on website should be returned, and the partner pl")
def test_get_pricelist_available_geoip2(self):
# Test get all available pricelists with geoip and a partner pricelist (ir.property) not website compliant
self.env.user.partner_id.property_product_pricelist = self.backend_pl
pls = self.get_pricelist_available(country_code=self.BE.code)
self.assertEqual(pls, self.website1_be_pl, "Only pricelists for BE and accessible on website should be returned as partner pl is not website compliant")
def test_get_pricelist_available_geoip3(self):
# Test get all available pricelists with geoip and a partner pricelist (ir.property) website compliant (but not geoip compliant)
self.env.user.partner_id.property_product_pricelist = self.w1_pl_code_select
self.website1_be_pl += self.env.user.partner_id.property_product_pricelist
pls = self.get_pricelist_available(country_code=self.BE.code)
self.assertEqual(pls, self.website1_be_pl, "Only pricelists for BE and accessible on website should be returned, plus the partner pricelist as it is website compliant")
def test_get_pricelist_available_geoip4(self):
# Test get all available with geoip and visible pricelists + promo pl
pls_to_return = self.generic_pl_select + self.w1_pl_select + self.generic_pl_code_select
# property_product_pricelist will also be returned in the available pricelists
pls_to_return += self.env.user.partner_id.property_product_pricelist
current_pl = self.w1_pl_code
pls = self.get_pricelist_available(country_code=self.BE.code, show_visible=True, website_sale_current_pl=current_pl.id)
self.assertEqual(pls, pls_to_return + current_pl, "Only pricelists for BE, accessible en website and selectable should be returned. It should also return the applied promo pl")
class TestWebsitePriceListHttp(HttpCase):
def test_get_pricelist_available_multi_company(self):
''' Test that the `property_product_pricelist` of `res.partner` is not
computed as SUPERUSER_ID.
Indeed, `property_product_pricelist` is a _compute that ends up
doing a search on `product.pricelist` that woule bypass the
pricelist multi-company `ir.rule`. Then it would return pricelists
from another company and the code would raise an access error when
reading that `property_product_pricelist`.
'''
test_company = self.env['res.company'].create({'name': 'Test Company'})
self.env['product.pricelist'].create({
'name': 'Backend Pricelist For "Test Company"',
'website_id': False,
'company_id': test_company.id,
'sequence': 1,
})
self.authenticate('portal', 'portal')
r = self.url_open('/shop')
self.assertEqual(r.status_code, 200, "The page should not raise an access error because of reading pricelists from other companies")
class TestWebsitePriceListMultiCompany(TransactionCase):
def setUp(self):
''' Create a basic multi-company pricelist environment:
- Set up 2 companies with their own company-restricted pricelist each.
- Add demo user in those 2 companies
- For each company, add that company pricelist to the demo user partner.
- Set website's company to company 2
- Demo user will still be in company 1
'''
super(TestWebsitePriceListMultiCompany, self).setUp()
self.demo_user = self.env.ref('base.user_demo')
# Create and add demo user to 2 companies
self.company1 = self.demo_user.company_id
self.company2 = self.env['res.company'].create({'name': 'Test Company'})
self.demo_user.company_ids += self.company2
# Set company2 as current company for demo user
self.website = self.env['website'].browse(1)
self.website.company_id = self.company2
# Create a company pricelist for each company and set it to demo user
self.c1_pl = self.env['product.pricelist'].create({
'name': 'Company 1 Pricelist',
'company_id': self.company1.id,
})
self.c2_pl = self.env['product.pricelist'].create({
'name': 'Company 2 Pricelist',
'company_id': self.company2.id,
'website_id': False,
})
self.demo_user.partner_id.property_product_pricelist = self.c1_pl
# Switch env.user company to create ir.property in company2
self.env.user.company_id = self.company2
self.demo_user.partner_id.property_product_pricelist = self.c2_pl
# Ensure everything was done correctly
self.assertEqual(self.demo_user.partner_id.with_context(force_company=self.company1.id).property_product_pricelist, self.c1_pl)
self.assertEqual(self.demo_user.partner_id.with_context(force_company=self.company2.id).property_product_pricelist, self.c2_pl)
irp1 = self.env['ir.property'].search([
('name', '=', 'property_product_pricelist'),
('company_id', '=', self.company1.id),
('res_id', '=', 'res.partner,%s' % self.demo_user.partner_id.id),
('value_reference', '=', 'product.pricelist,%s' % self.c1_pl.id),
])
irp2 = self.env['ir.property'].search([
('name', '=', 'property_product_pricelist'),
('company_id', '=', self.company2.id),
('res_id', '=', 'res.partner,%s' % self.demo_user.partner_id.id),
('value_reference', '=', 'product.pricelist,%s' % self.c2_pl.id),
])
self.assertEqual(len(irp1 + irp2), 2, "Ensure there is an `ir.property` for demo partner for every company, and that the pricelist is the company specific one.")
simulate_frontend_context(self)
# ---------------------------------- IR.PROPERTY -------------------------------------
# id | name | res_id | company_id | value_reference
# ------------------------------------------------------------------------------------
# 1 | 'property_product_pricelist' | | 1 | product.pricelist,1
# 2 | 'property_product_pricelist' | | 2 | product.pricelist,2
# 3 | 'property_product_pricelist' | res.partner,8 | 1 | product.pricelist,10
# 4 | 'property_product_pricelist' | res.partner,8 | 2 | product.pricelist,11
def test_property_product_pricelist_multi_company(self):
''' Test that the `property_product_pricelist` of `res.partner` is read
for the company of the website and not the current user company.
This is the case if the user visit a website for which the company
is not the same as its user's company.
Here, as demo user (company1), we will visit website1 (company2).
It should return the ir.property for demo user for company2 and not
for the company1 as we should get the website's company pricelist
and not the demo user's current company pricelist.
'''
# First check: It should return ir.property,4 as company_id is
# website.company_id and not env.user.company_id
company_id = self.website.company_id.id
partner = self.demo_user.partner_id.with_context(force_company=company_id)
demo_pl = partner.property_product_pricelist
self.assertEqual(demo_pl, self.c2_pl)
# Second thing to check: It should not error in read right access error
# Indeed, the ir.rule for pricelists rights about company should allow to
# also read a pricelist from another company if that company is the one
# from the currently visited website.
self.env(user=self.env.ref('base.user_demo'))['product.pricelist'].browse(demo_pl.id).name
| StarcoderdataPython |
9797596 | #!/usr/bin/env python3
import sys
import os
import re
import argparse
from argparse import RawTextHelpFormatter
from pathlib import Path
import calendar
from datetime import datetime
from glob import glob
import tempfile
import bibtexparser
from bibtexparser.bparser import BibTexParser
from bibtexparser.bwriter import BibTexWriter
from bibtexparser.bibdatabase import BibDatabase
from bibtexparser.customization import convert_to_unicode
import shutil
import collections
from collections import defaultdict
from pprint import pprint
# Map BibTeX to Academic publication types.
PUB_TYPES = {
'article': 2,
'book': 5,
'inbook': 6,
'incollection': 6,
'inproceedings': 1,
'manual': 4,
'mastersthesis': 7,
'misc': 0,
'phdthesis': 7,
'proceedings': 0,
'techreport': 4,
'unpublished': 3,
'patent': 8
}
def slugify(s, lower=True):
bad_symbols = ('.', '_', ':') # Symbols to replace with hyphen delimiter.
delimiter = '-'
good_symbols = (delimiter,) # Symbols to keep.
for r in bad_symbols:
s = s.replace(r, delimiter)
s = re.sub(r'(\D+)(\d+)', r'\1\-\2', s) # Delimit non-number, number.
s = re.sub(r'(\d+)(\D+)', r'\1\-\2', s) # Delimit number, non-number.
s = re.sub(r'((?<=[a-z])[A-Z]|(?<!\A)[A-Z](?=[a-z]))', r'\-\1', s) # Delimit camelcase.
s = ''.join(c for c in s if c.isalnum() or c in good_symbols).strip() # Strip non-alphanumeric and non-hyphen.
s = re.sub('\-+', '-', s) # Remove consecutive hyphens.
if lower:
s = s.lower()
return s
def clean_bibtex_authors(author_str):
"""Convert author names to `firstname(s) lastname` format."""
authors = []
for s in author_str:
s = s.strip()
if len(s) < 1:
continue
if ',' in s:
split_names = s.split(',', 1)
last_name = split_names[0].strip()
first_names = [i.strip() for i in split_names[1].split()]
else:
split_names = s.split()
last_name = split_names.pop()
first_names = [i.replace('.', '. ').strip() for i in split_names]
if last_name in ['jnr', 'jr', 'junior']:
last_name = first_names.pop()
for item in first_names:
if item in ['ben', 'van', 'der', 'de', 'la', 'le']:
last_name = first_names.pop() + ' ' + last_name
authors.append(f'"{" ".join(first_names)} {last_name}"')
return authors
def clean_bibtex_str(s):
"""Clean BibTeX string and escape TOML special characters"""
s = s.replace('\\', '')
s = s.replace('"', '\\"')
s = s.replace('{', '').replace('}', '')
s = s.replace('\t', ' ').replace('\n', ' ').replace('\r', '')
return s
def clean_bibtex_tags(s, normalize=False):
"""Clean BibTeX keywords and convert to TOML tags"""
tags = clean_bibtex_str(s).split(',')
tags = [f'"{tag.strip()}"' for tag in tags]
if normalize:
tags = [tag.lower().capitalize() for tag in tags]
tags_str = ', '.join(tags)
return tags_str
def month2number(month):
"""Convert BibTeX month to numeric"""
# print(month)
month_abbr = month.strip()[:3].title()
try:
return str(list(calendar.month_abbr).index(month_abbr)).zfill(2)
except ValueError:
month_abbr = re.search(r'[0-9]{2}-[0-9]{2}\s([a-zA-Z]+)', month).group(1)
return str(list(calendar.month_abbr).index(month_abbr)).zfill(2)
def check_duplicates(bib_dict):
value_occurrences = collections.Counter(bib_dict.values())
# print(value_occurrences)
bib_duplicates = {key: value for key, value in bib_dict.items() if value_occurrences[value] > 1}
duplicate_dict = defaultdict(list)
for key,value in bib_duplicates.items():
duplicate_dict[value].append(key)
return duplicate_dict
def import_bibtex(bibtex, pub_dir='publication', featured=False, overwrite=False, normalize=False):
"""Import publications from BibTeX file"""
# Check BibTeX file exists.
if not Path(bibtex).is_file():
print('Please check the path to your BibTeX file and re-run.')
return
# Load BibTeX file for parsing.
with open(bibtex, 'r', encoding='utf-8') as bibtex_file:
parser = BibTexParser(common_strings=True)
parser.customization = convert_to_unicode
bib_database = bibtexparser.load(bibtex_file, parser=parser)
## Remove duplicates with sample title
bib_dict_full = bib_database.entries_dict
bib_dict = dict(map(lambda kv : (kv[0], kv[1]['title']), bib_dict_full.items()))
duplicate_dict = check_duplicates(bib_dict)
print('Found %d duplicates.' % len(duplicate_dict))
# pprint(dict(duplicate_dict.items()))
print('Removing all preprints if proceedings/journals are available.')
for title, bibkeys in duplicate_dict.items():
for bibkey in bibkeys:
if 'corr' in bibkey:
del bib_dict_full[bibkey]
del bib_dict[bibkey]
duplicate_dict = check_duplicates(bib_dict)
print('Found %d duplicates.' % len(duplicate_dict))
pprint(dict(duplicate_dict.items()))
print('Please, resolve these conflits by hand.')
for key, entry in bib_dict_full.items():
entry['ID'] = key
parse_bibtex_entry(entry, pub_dir=pub_dir, featured=featured, overwrite=overwrite, normalize=normalize)
def parse_bibtex_entry(entry, pub_dir='publication', featured=False, overwrite=False, normalize=False):
"""Parse a bibtex entry and generate corresponding publication bundle"""
verbose = False
print(f"Parsing entry {entry['ID']}") if verbose else None
bundle_path = f"content/{pub_dir}/{slugify(entry['ID'])}"
markdown_path = os.path.join(bundle_path, 'index.md')
# cite_path = os.path.join(bundle_path, f"{slugify(entry['ID'])}.bib")
cite_path = os.path.join(bundle_path, 'cite.bib')
date = datetime.utcnow()
timestamp = date.isoformat('T') + 'Z' # RFC 3339 timestamp.
# Do not overwrite publication bundle if it already exists.
if not overwrite and os.path.isdir(bundle_path):
print(f'Skipping creation of {bundle_path} as it already exists. To overwrite, add the `--overwrite` argument.')
return
# Create bundle dir.
print(f'Creating folder {bundle_path}') if verbose else None
Path(bundle_path).mkdir(parents=True, exist_ok=True)
# Save citation file.
print(f'Saving citation to {cite_path}') if verbose else None
db = BibDatabase()
db.entries = [entry]
writer = BibTexWriter()
with open(cite_path, 'w', encoding='utf-8') as f:
f.write(writer.write(db))
# Prepare YAML front matter for Markdown file.
frontmatter = ['---']
frontmatter.append(f'title: "{clean_bibtex_str(entry["title"])}"')
if 'month' in entry:
frontmatter.append(f"date: {entry['year']}-{month2number(entry['month'])}-01")
else:
frontmatter.append(f"date: {entry['year']}-01-01")
# frontmatter.append(f"publishDate: {timestamp}")
authors = None
if 'author' in entry:
authors = entry['author']
elif 'editor' in entry:
authors = entry['editor']
if authors:
authors = clean_bibtex_authors([i.strip() for i in authors.replace('\n', ' ').split(' and ')])
frontmatter.append(f"authors: [{', '.join(authors)}]")
frontmatter.append(f'publication_types: ["{PUB_TYPES.get(entry["ENTRYTYPE"], 0)}"]')
# if 'abstract' in entry:
# frontmatter.append(f'abstract: "{clean_bibtex_str(entry["abstract"])}"')
# else:
frontmatter.append('abstract: ""')
frontmatter.append(f'featured: {str(featured).lower()}')
# Publication name.
if 'booktitle' in entry:
frontmatter.append(f'publication: "*{clean_bibtex_str(entry["booktitle"])}*"')
elif 'journal' in entry:
frontmatter.append(f'publication: "*{clean_bibtex_str(entry["journal"])}*"')
else:
frontmatter.append('publication: ""')
if 'keywords' in entry:
frontmatter.append(f'tags: [{clean_bibtex_tags(entry["keywords"], normalize)}]')
if 'url' in entry:
frontmatter.append(f'url_pdf: "{clean_bibtex_str(entry["url"])}"')
if 'doi' in entry:
frontmatter.append(f'doi: "{entry["doi"]}"')
frontmatter.append('---\n\n')
# Save Markdown file.
try:
print(f"Saving Markdown to '{markdown_path}'") if verbose else None
with open(markdown_path, 'w', encoding='utf-8') as f:
f.write("\n".join(frontmatter))
except IOError:
print('ERROR: could not save file.')
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description=f'Generate publications for academic website',
formatter_class=RawTextHelpFormatter)
parser.add_argument('--path', type=str, required=True, default='bib/')
parser.add_argument("--overwrite", action='store_true', help='Overwrite existing publications')
args = parser.parse_args()
if os.path.isfile(args.path):
import_bibtex(args.path, pub_dir='publication', featured=False, overwrite=args.overwrite, normalize=False)
elif os.path.isdir(args.path):
bibs = glob(args.path + '**/*.bib', recursive=True)
# Merge all bib files in one
with open('bib/summary.bib','wb') as wfd:
for f in bibs:
print(f)
with open(f,'rb') as fd:
shutil.copyfileobj(fd, wfd)
import_bibtex('bib/summary.bib', pub_dir='publication', featured=False, overwrite=args.overwrite, normalize=False)
os.remove('bib/summary.bib')
else:
print('Error: Invalid path')
quit(-1)
| StarcoderdataPython |
48321 | <filename>kairon/shared/account/processor.py
from datetime import datetime
from typing import Dict, Text
from loguru import logger as logging
from mongoengine.errors import DoesNotExist
from mongoengine.errors import ValidationError
from pydantic import SecretStr
from validators import ValidationFailure
from validators import email as mail_check
from kairon.exceptions import AppException
from kairon.shared.account.data_objects import Account, User, Bot, UserEmailConfirmation, Feedback, UiConfig, \
MailTemplates, SystemProperties, BotAccess
from kairon.shared.actions.data_objects import FormValidationAction, SlotSetAction, EmailActionConfig
from kairon.shared.data.constant import ACCESS_ROLES, ACTIVITY_STATUS
from kairon.shared.data.data_objects import BotSettings, ChatClientConfig, SlotMapping
from kairon.shared.utils import Utility
Utility.load_email_configuration()
class AccountProcessor:
@staticmethod
def add_account(name: str, user: str):
"""
adds a new account
:param name: account name
:param user: user id
:return: account id
"""
if Utility.check_empty_string(name):
raise AppException("Account Name cannot be empty or blank spaces")
Utility.is_exist(
Account,
exp_message="Account name already exists!",
name__iexact=name,
status=True,
)
license = {"bots": 2, "intents": 3, "examples": 20, "training": 3, "augmentation": 5}
return Account(name=name.strip(), user=user, license=license).save().to_mongo().to_dict()
@staticmethod
def get_account(account: int):
"""
fetch account object
:param account: account id
:return: account details
"""
try:
account = Account.objects().get(id=account).to_mongo().to_dict()
return account
except:
raise DoesNotExist("Account does not exists")
@staticmethod
def add_bot(name: str, account: int, user: str, is_new_account: bool = False):
"""
add a bot to account
:param name: bot name
:param account: account id
:param user: user id
:param is_new_account: True if it is a new account
:return: bot id
"""
from kairon.shared.data.processor import MongoProcessor
from kairon.shared.data.data_objects import BotSettings
if Utility.check_empty_string(name):
raise AppException("Bot Name cannot be empty or blank spaces")
if Utility.check_empty_string(user):
raise AppException("user cannot be empty or blank spaces")
Utility.is_exist(
Bot,
exp_message="Bot already exists!",
name__iexact=name,
account=account,
status=True,
)
bot = Bot(name=name, account=account, user=user).save().to_mongo().to_dict()
bot_id = bot['_id'].__str__()
if not is_new_account:
AccountProcessor.allow_access_to_bot(bot_id, user, user, account, ACCESS_ROLES.ADMIN.value, ACTIVITY_STATUS.ACTIVE.value)
BotSettings(bot=bot_id, user=user).save()
processor = MongoProcessor()
config = processor.load_config(bot_id)
processor.add_or_overwrite_config(config, bot_id, user)
processor.add_default_fallback_data(bot_id, user, True, True)
return bot
@staticmethod
def list_bots(account_id: int):
for bot in Bot.objects(account=account_id, status=True):
bot = bot.to_mongo().to_dict()
bot.pop('status')
bot['_id'] = bot['_id'].__str__()
yield bot
@staticmethod
def update_bot(name: Text, bot: Text):
if Utility.check_empty_string(name):
raise AppException('Name cannot be empty')
try:
bot_info = Bot.objects(id=bot, status=True).get()
bot_info.name = name
bot_info.save()
except DoesNotExist:
raise AppException('Bot not found')
@staticmethod
def delete_bot(bot: Text, user: Text):
from kairon.shared.data.data_objects import Intents, Responses, Stories, Configs, Endpoints, Entities, \
EntitySynonyms, Forms, LookupTables, ModelDeployment, ModelTraining, RegexFeatures, Rules, SessionConfigs, \
Slots, TrainingDataGenerator, TrainingExamples
from kairon.shared.test.data_objects import ModelTestingLogs
from kairon.shared.importer.data_objects import ValidationLogs
from kairon.shared.actions.data_objects import HttpActionConfig, ActionServerLogs, Actions
try:
bot_info = Bot.objects(id=bot, status=True).get()
bot_info.status = False
bot_info.save()
Utility.hard_delete_document([
Actions, BotAccess, BotSettings, Configs, ChatClientConfig, Endpoints, Entities, EmailActionConfig,
EntitySynonyms, Forms, FormValidationAction, HttpActionConfig, Intents, LookupTables, RegexFeatures,
Responses, Rules, SlotMapping, SlotSetAction, SessionConfigs, Slots, Stories, TrainingDataGenerator,
TrainingExamples, ActionServerLogs, ModelTraining, ModelTestingLogs, ModelDeployment, ValidationLogs
], bot, user=user)
AccountProcessor.remove_bot_access(bot)
except DoesNotExist:
raise AppException('Bot not found')
@staticmethod
def fetch_role_for_user(email: Text, bot: Text):
try:
return BotAccess.objects(accessor_email=email, bot=bot,
status=ACTIVITY_STATUS.ACTIVE.value).get().to_mongo().to_dict()
except DoesNotExist as e:
logging.error(e)
raise AppException('Access to bot is denied')
@staticmethod
def get_accessible_bot_details(account_id: int, email: Text):
shared_bots = []
account_bots = list(AccountProcessor.list_bots(account_id))
for bot in BotAccess.objects(accessor_email=email, bot_account__ne=account_id,
status=ACTIVITY_STATUS.ACTIVE.value):
bot_details = AccountProcessor.get_bot(bot['bot'])
bot_details.pop('status')
bot_details['_id'] = bot_details['_id'].__str__()
shared_bots.append(bot_details)
return {
'account_owned': account_bots,
'shared': shared_bots
}
@staticmethod
def allow_bot_and_generate_invite_url(bot: Text, email: Text, user: Text, bot_account: int,
role: ACCESS_ROLES = ACCESS_ROLES.TESTER.value):
bot_details = AccountProcessor.allow_access_to_bot(bot, email, user, bot_account, role)
if Utility.email_conf["email"]["enable"]:
token = Utility.generate_token(email)
link = f'{Utility.email_conf["app"]["url"]}/{bot}/invite/accept/{token}'
return bot_details['name'], link
@staticmethod
def allow_access_to_bot(bot: Text, accessor_email: Text, user: Text,
bot_account: int, role: ACCESS_ROLES = ACCESS_ROLES.TESTER.value,
activity_status: ACTIVITY_STATUS = ACTIVITY_STATUS.INVITE_NOT_ACCEPTED.value):
"""
Adds bot to a user account.
:param bot: bot id
:param accessor_email: email id of the new member
:param user: user adding the new member
:param bot_account: account where bot exists
:param activity_status: can be one of active, inactive or deleted.
:param role: can be one of admin, designer or tester.
"""
bot_details = AccountProcessor.get_bot(bot)
Utility.is_exist(BotAccess, 'User is already a collaborator', accessor_email=accessor_email, bot=bot,
status__ne=ACTIVITY_STATUS.DELETED.value)
BotAccess(
accessor_email=accessor_email,
bot=bot,
role=role,
user=user,
bot_account=bot_account,
status=activity_status
).save()
return bot_details
@staticmethod
def update_bot_access(bot: Text, accessor_email: Text, user: Text,
role: ACCESS_ROLES = ACCESS_ROLES.TESTER.value,
status: ACTIVITY_STATUS = ACTIVITY_STATUS.ACTIVE.value):
"""
Adds bot to a user account.
:param bot: bot id
:param accessor_email: email id of the new member
:param user: user adding the new member
:param role: can be one of admin, designer or tester.
:param status: can be one of active, inactive or deleted.
"""
AccountProcessor.get_bot(bot)
try:
bot_access = BotAccess.objects(accessor_email=accessor_email, bot=bot).get()
if Utility.email_conf["email"]["enable"]:
if status != ACTIVITY_STATUS.DELETED.value and bot_access.status == ACTIVITY_STATUS.INVITE_NOT_ACCEPTED.value:
raise AppException('User is yet to accept the invite')
bot_access.role = role
bot_access.user = user
bot_access.status = status
bot_access.timestamp = datetime.utcnow()
bot_access.save()
except DoesNotExist:
raise AppException('User not yet invited to collaborate')
@staticmethod
def accept_bot_access_invite(token: Text, bot: Text):
"""
Activate user's access to bot.
:param token: token sent in the link
:param bot: bot id
"""
bot_details = AccountProcessor.get_bot(bot)
accessor_email = Utility.verify_token(token)
AccountProcessor.get_user_details(accessor_email)
try:
bot_access = BotAccess.objects(accessor_email=accessor_email, bot=bot,
status=ACTIVITY_STATUS.INVITE_NOT_ACCEPTED.value).get()
bot_access.status = ACTIVITY_STATUS.ACTIVE.value
bot_access.accept_timestamp = datetime.utcnow()
bot_access.save()
return bot_access.user, bot_details['name'], bot_access.accessor_email, bot_access.role
except DoesNotExist:
raise AppException('No pending invite found for this bot and user')
@staticmethod
def remove_bot_access(bot: Text, **kwargs):
"""
Removes bot from either for all users or only for user supplied.
:param bot: bot id
:param kwargs: can be either account or email.
"""
if kwargs:
if not Utility.is_exist(BotAccess, None, False, **kwargs, bot=bot, status__ne=ACTIVITY_STATUS.DELETED.value):
raise AppException('User not a collaborator to this bot')
active_bot_access = BotAccess.objects(**kwargs, bot=bot, status__ne=ACTIVITY_STATUS.DELETED.value)
else:
active_bot_access = BotAccess.objects(bot=bot, status__ne=ACTIVITY_STATUS.DELETED.value)
active_bot_access.update(set__status=ACTIVITY_STATUS.DELETED.value)
@staticmethod
def list_bot_accessors(bot: Text):
"""
List users who have access to bot.
:param bot: bot id
"""
for accessor in BotAccess.objects(bot=bot, status__ne=ACTIVITY_STATUS.DELETED.value):
accessor = accessor.to_mongo().to_dict()
accessor['_id'] = accessor['_id'].__str__()
yield accessor
@staticmethod
def get_bot(id: str):
"""
fetches bot details
:param id: bot id
:return: bot details
"""
try:
return Bot.objects().get(id=id).to_mongo().to_dict()
except:
raise DoesNotExist("Bot does not exists!")
@staticmethod
def add_user(
email: str,
password: str,
first_name: str,
last_name: str,
account: int,
user: str,
is_integration_user=False
):
"""
adds new user to the account
:param email: user login id
:param password: <PASSWORD>
:param first_name: user firstname
:param last_name: user lastname
:param account: account id
:param user: user id
:param is_integration_user: is this
:return: user details
"""
if (
Utility.check_empty_string(email)
or Utility.check_empty_string(last_name)
or Utility.check_empty_string(first_name)
or Utility.check_empty_string(password)
):
raise AppException(
"Email, FirstName, LastName and password cannot be empty or blank spaces"
)
Utility.is_exist(
User,
exp_message="User already exists! try with different email address.",
email__iexact=email.strip(),
status=True,
)
return (
User(
email=email.strip(),
password=Utility.get_password_hash(password.strip()),
first_name=first_name.strip(),
last_name=last_name.strip(),
account=account,
user=user.strip(),
is_integration_user=is_integration_user,
)
.save()
.to_mongo()
.to_dict()
)
@staticmethod
def get_user(email: str):
"""
fetch user details
:param email: user login id
:return: user details
"""
try:
return User.objects().get(email=email).to_mongo().to_dict()
except Exception as e:
logging.error(e)
raise DoesNotExist("User does not exist!")
@staticmethod
def get_user_details(email: str):
"""
fetches complete user details, checks for whether it is inactive
:param email: login id
:return: dict
"""
user = AccountProcessor.get_user(email)
if not user["is_integration_user"]:
AccountProcessor.check_email_confirmation(user["email"])
if not user["status"]:
raise ValidationError("Inactive User please contact admin!")
account = AccountProcessor.get_account(user["account"])
if not account["status"]:
raise ValidationError("Inactive Account Please contact system admin!")
return user
@staticmethod
def get_complete_user_details(email: str):
"""
fetches complete user details including account and bot
:param email: login id
:return: dict
"""
user = AccountProcessor.get_user(email)
account = AccountProcessor.get_account(user["account"])
bots = AccountProcessor.get_accessible_bot_details(user["account"], email)
user["account_name"] = account["name"]
user['bots'] = bots
user["_id"] = user["_id"].__str__()
user.pop('password')
return user
@staticmethod
def get_integration_user(bot: str, account: int):
"""
creates integration user if it does not exist
:param bot: bot id
:param account: account id
:return: dict
"""
email = f"{<EMAIL>"
if not Utility.is_exist(
User, raise_error=False, email=email, is_integration_user=True, status=True
):
password = <PASSWORD>()
user_details = AccountProcessor.add_user(
email=email,
password=password,
first_name=bot,
last_name=bot,
account=account,
user="auto_gen",
is_integration_user=True,
)
AccountProcessor.allow_access_to_bot(bot, email.strip(), "auto_gen", account,
ACCESS_ROLES.ADMIN.value, ACTIVITY_STATUS.ACTIVE.value)
return user_details
else:
return (
User.objects(email=email).get(is_integration_user=True).to_mongo().to_dict()
)
@staticmethod
async def account_setup(account_setup: Dict, user: Text):
"""
create new account
:param account_setup: dict of account details
:param user: user id
:return: dict user details, user email id, confirmation mail subject, mail body
"""
from kairon.shared.data.processor import MongoProcessor
account = None
bot = None
mail_to = None
email_enabled = Utility.email_conf["email"]["enable"]
link = None
try:
account = AccountProcessor.add_account(account_setup.get("account"), user)
bot = AccountProcessor.add_bot('Hi-Hello', account["_id"], user, True)
user_details = AccountProcessor.add_user(
email=account_setup.get("email"),
first_name=account_setup.get("first_name"),
last_name=account_setup.get("last_name"),
password=<PASSWORD>_setup.get("password").get_secret_value(),
account=account["_id"].__str__(),
user=user
)
AccountProcessor.allow_access_to_bot(bot["_id"].__str__(), account_setup.get("email"),
account_setup.get("email"), account['_id'],
ACCESS_ROLES.ADMIN.value, ACTIVITY_STATUS.ACTIVE.value)
await MongoProcessor().save_from_path(
"template/use-cases/Hi-Hello", bot["_id"].__str__(), user="sysadmin"
)
if email_enabled:
token = Utility.generate_token(account_setup.get("email"))
link = Utility.email_conf["app"]["url"] + '/verify/' + token
mail_to = account_setup.get("email")
except Exception as e:
if account and "_id" in account:
Account.objects().get(id=account["_id"]).delete()
if bot and "_id" in bot:
Bot.objects().get(id=bot["_id"]).delete()
raise e
return user_details, mail_to, link
@staticmethod
async def default_account_setup():
"""
default account for testing/demo purposes
:return: user details
:raises: if account already exist
"""
account = {
"account": "DemoAccount",
"bot": "Demo",
"email": "<EMAIL>",
"first_name": "Test_First",
"last_name": "Test_Last",
"password": SecretStr("<PASSWORD>"),
}
try:
user, mail, link = await AccountProcessor.account_setup(account, user="sysadmin")
return user, mail, link
except Exception as e:
logging.info(str(e))
@staticmethod
def load_system_properties():
try:
system_properties = SystemProperties.objects().get().to_mongo().to_dict()
except DoesNotExist:
mail_templates = MailTemplates(
password_reset=open('template/emails/passwordReset.html', 'r').read(),
password_reset_confirmation=open('template/emails/passwordResetConfirmation.html', 'r').read(),
verification=open('template/emails/verification.html', 'r').read(),
verification_confirmation=open('template/emails/verificationConfirmation.html', 'r').read(),
add_member_invitation=open('template/emails/memberAddAccept.html', 'r').read(),
add_member_confirmation=open('template/emails/memberAddConfirmation.html', 'r').read(),
password_generated=open('template/emails/passwordGenerated.html', 'r').read(),
)
system_properties = SystemProperties(mail_templates=mail_templates).save().to_mongo().to_dict()
Utility.email_conf['email']['templates']['verification'] = system_properties['mail_templates']['verification']
Utility.email_conf['email']['templates']['verification_confirmation'] = system_properties['mail_templates']['verification_confirmation']
Utility.email_conf['email']['templates']['password_reset'] = system_properties['mail_templates']['password_reset']
Utility.email_conf['email']['templates']['password_reset_confirmation'] = system_properties['mail_templates']['password_reset_confirmation']
Utility.email_conf['email']['templates']['add_member_invitation'] = system_properties['mail_templates']['add_member_invitation']
Utility.email_conf['email']['templates']['add_member_confirmation'] = system_properties['mail_templates']['add_member_confirmation']
Utility.email_conf['email']['templates']['password_generated'] = system_properties['mail_templates']['password_generated']
@staticmethod
async def confirm_email(token: str):
"""
Confirms the user through link and updates the database
:param token: the token from link
:return: mail id, subject of mail, body of mail
"""
email_confirm = Utility.verify_token(token)
Utility.is_exist(
UserEmailConfirmation,
exp_message="Email already confirmed!",
email__iexact=email_confirm.strip(),
)
confirm = UserEmailConfirmation()
confirm.email = email_confirm
confirm.save()
user = AccountProcessor.get_user(email_confirm)
return email_confirm, user['first_name']
@staticmethod
def is_user_confirmed(email: str):
"""
Checks if user is verified and raises an Exception if not
:param email: mail id of user
:return: None
"""
if not Utility.is_exist(UserEmailConfirmation, email__iexact=email.strip(), raise_error=False):
raise AppException("Please verify your mail")
@staticmethod
def check_email_confirmation(email: str):
"""
Checks if the account is verified through mail
:param email: email of the user
:return: None
"""
email_enabled = Utility.email_conf["email"]["enable"]
if email_enabled:
AccountProcessor.is_user_confirmed(email)
@staticmethod
async def send_reset_link(mail: str):
"""
Sends a password reset link to the mail id
:param mail: email id of the user
:return: mail id, mail subject, mail body
"""
email_enabled = Utility.email_conf["email"]["enable"]
if email_enabled:
if isinstance(mail_check(mail), ValidationFailure):
raise AppException("Please enter valid email id")
if not Utility.is_exist(User, email__iexact=mail.strip(), raise_error=False):
raise AppException("Error! There is no user with the following mail id")
if not Utility.is_exist(UserEmailConfirmation, email__iexact=mail.strip(), raise_error=False):
raise AppException("Error! The following user's mail is not verified")
token = Utility.generate_token(mail)
user = AccountProcessor.get_user(mail)
link = Utility.email_conf["app"]["url"] + '/reset_password/' + token
return mail, user['first_name'], link
else:
raise AppException("Error! Email verification is not enabled")
@staticmethod
async def overwrite_password(token: str, password: str):
"""
Changes the user's password
:param token: unique token from the password reset page
:param password: <PASSWORD>
:return: mail id, mail subject and mail body
"""
if Utility.check_empty_string(password):
raise AppException("password cannot be empty or blank")
email = Utility.verify_token(token)
user = User.objects().get(email=email)
user.password = Utility.get_password_hash(password.strip())
user.user = email
user.password_changed = datetime.utcnow
user.save()
return email, user.first_name
@staticmethod
async def send_confirmation_link(mail: str):
"""
Sends a link to the user's mail id for account verification
:param mail: the mail id of the user
:return: mail id, mail subject and mail body
"""
email_enabled = Utility.email_conf["email"]["enable"]
if email_enabled:
if isinstance(mail_check(mail), ValidationFailure):
raise AppException("Please enter valid email id")
Utility.is_exist(UserEmailConfirmation, exp_message="Email already confirmed!", email__iexact=mail.strip())
if not Utility.is_exist(User, email__iexact=mail.strip(), raise_error=False):
raise AppException("Error! There is no user with the following mail id")
user = AccountProcessor.get_user(mail)
token = Utility.generate_token(mail)
link = Utility.email_conf["app"]["url"] + '/verify/' + token
return mail, user['first_name'], link
else:
raise AppException("Error! Email verification is not enabled")
@staticmethod
def add_feedback(rating: float, user: str, scale: float = 5.0, feedback: str = None):
"""
Add user feedback.
@param rating: user given rating.
@param user: Kairon username.
@param scale: Scale on which rating is given. %.0 is the default value.
@param feedback: feedback if any.
@return:
"""
Feedback(rating=rating, scale=scale, feedback=feedback, user=user).save()
@staticmethod
def update_ui_config(config: dict, user: str):
"""
Adds UI configuration such as themes, layout type, flags for stepper
to render UI components based on it.
@param config: UI configuration to save.
@param user: username
"""
try:
ui_config = UiConfig.objects(user=user).get()
except DoesNotExist:
ui_config = UiConfig(user=user)
ui_config.config = config
ui_config.save()
@staticmethod
def get_ui_config(user: str):
"""
Retrieves UI configuration such as themes, layout type, flags for stepper
to render UI components based on it.
@param user: username
"""
try:
ui_config = UiConfig.objects(user=user).get()
config = ui_config.config
except DoesNotExist:
config = {}
AccountProcessor.update_ui_config(config, user)
return config
| StarcoderdataPython |
9779960 | import meep as mp
import numpy as np
import matplotlib.pyplot as plt
from matplotlib import animation
resolution = 16
frequency = 2.0
length = 5.0
endTime = 5.0
courantFactor = 0.5
timestepDuration = courantFactor / resolution
numberTimesteps = int(endTime / timestepDuration)
cellSize = mp.Vector3(0, 0, length)
sources = [mp.Source(
mp.ContinuousSource(frequency=frequency),
component=mp.Ex,
center=mp.Vector3(0, 0, 0))]
simulation = mp.Simulation(
cell_size=cellSize,
sources=sources,
resolution=resolution)
simulation.run(until=timestepDuration)
field_Ex = simulation.get_array(center=mp.Vector3(0, 0, 0), size=cellSize, component=mp.Ex)
fieldData = np.array(field_Ex)
for i in range(numberTimesteps-1):
simulation.run(until=timestepDuration)
fieldEx = simulation.get_array(center=mp.Vector3(0, 0, 0), size=cellSize, component=mp.Ex)
fieldData = np.vstack((fieldData, fieldEx))
fig = plt.figure()
ax = plt.axes(xlim=(-length/2,length/2),ylim=(-1,1))
line, = ax.plot([], [], lw=2)
xData = np.linspace(-length/2, length/2, fieldData.shape[1])
def init():
line.set_data([],[])
return line,
def animate(i):
line.set_data(xData, fieldData[i])
return line,
fieldAnimation = animation.FuncAnimation(fig, animate, init_func=init,
frames=numberTimesteps, interval=20, blit=True)
fieldAnimation.save('basic_animation.mp4', fps=30, extra_args=['-vcodec', 'libx264'])
plt.show()
| StarcoderdataPython |
208543 | <reponame>hoechenberger/astunparse
# coding: utf-8
from __future__ import absolute_import
from six.moves import cStringIO
from .unparser import Unparser
from .printer import Printer
__version__ = '1.6.1'
def unparse(tree):
v = cStringIO()
Unparser(tree, file=v)
return v.getvalue()
def dump(tree):
v = cStringIO()
Printer(file=v).visit(tree)
return v.getvalue()
| StarcoderdataPython |
1738807 | # Copyright 2010-2011 OpenStack Foundation
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import os
import fixtures
import mock
from oslo_config import cfg
from oslo_log import log
import oslo_messaging
from oslotest import base
import pecan
import testscenarios
from magnum.common import context as magnum_context
from magnum.common import keystone as magnum_keystone
from magnum.objects import base as objects_base
from magnum.tests import conf_fixture
from magnum.tests import fake_notifier
from magnum.tests import output_fixture
from magnum.tests import policy_fixture
CONF = cfg.CONF
try:
log.register_options(CONF)
except cfg.ArgsAlreadyParsedError:
pass
CONF.set_override('use_stderr', False)
class BaseTestCase(testscenarios.WithScenarios, base.BaseTestCase):
"""Test base class."""
def setUp(self):
super(BaseTestCase, self).setUp()
self.addCleanup(cfg.CONF.reset)
class TestCase(base.BaseTestCase):
"""Test case base class for all unit tests."""
def setUp(self):
super(TestCase, self).setUp()
token_info = {
'token': {
'project': {
'id': 'fake_project'
},
'user': {
'id': 'fake_user'
}
}
}
trustee_domain_id = '12345678-9012-3456-7890-123456789abc'
self.context = magnum_context.RequestContext(
auth_token_info=token_info,
project_id='fake_project',
user_id='fake_user',
is_admin=False)
self.global_mocks = {}
self.keystone_client = magnum_keystone.KeystoneClientV3(self.context)
self.policy = self.useFixture(policy_fixture.PolicyFixture())
self.output = self.useFixture(output_fixture.OutputStreamCapture())
self.useFixture(fixtures.MockPatchObject(
oslo_messaging, 'Notifier',
fake_notifier.FakeNotifier))
self.addCleanup(fake_notifier.reset)
def make_context(*args, **kwargs):
# If context hasn't been constructed with token_info
if not kwargs.get('auth_token_info'):
kwargs['auth_token_info'] = copy.deepcopy(token_info)
if not kwargs.get('project_id'):
kwargs['project_id'] = 'fake_project'
if not kwargs.get('user_id'):
kwargs['user_id'] = 'fake_user'
if not kwargs.get('is_admin'):
kwargs['is_admin'] = False
context = magnum_context.RequestContext(*args, **kwargs)
return magnum_context.RequestContext.from_dict(context.to_dict())
p = mock.patch.object(magnum_context, 'make_context',
side_effect=make_context)
self.global_mocks['magnum.common.context.make_context'] = p
q = mock.patch.object(magnum_keystone.KeystoneClientV3,
'trustee_domain_id',
return_value=trustee_domain_id)
self.global_mocks[
'magnum.common.keystone.KeystoneClientV3.trustee_domain_id'] = q
self.mock_make_context = p.start()
self.addCleanup(p.stop)
self.mock_make_trustee_domain_id = q.start()
self.addCleanup(q.stop)
self.useFixture(conf_fixture.ConfFixture())
self.useFixture(fixtures.NestedTempfile())
self._base_test_obj_backup = copy.copy(
objects_base.MagnumObjectRegistry._registry._obj_classes)
self.addCleanup(self._restore_obj_registry)
def reset_pecan():
pecan.set_config({}, overwrite=True)
self.addCleanup(reset_pecan)
def start_global(self, name):
self.global_mocks[name].start()
def stop_global(self, name):
self.global_mocks[name].stop()
def _restore_obj_registry(self):
objects_base.MagnumObjectRegistry._registry._obj_classes \
= self._base_test_obj_backup
def config(self, **kw):
"""Override config options for a test."""
group = kw.pop('group', None)
for k, v in kw.items():
CONF.set_override(k, v, group)
def get_path(self, project_file=None):
"""Get the absolute path to a file. Used for testing the API.
:param project_file: File whose path to return. Default: None.
:returns: path to the specified file, or path to project root.
"""
root = os.path.abspath(os.path.join(os.path.dirname(__file__),
'..',
'..',
)
)
if project_file:
return os.path.join(root, project_file)
else:
return root
| StarcoderdataPython |
4956911 | <filename>Hash Table/217_Contains-Duplicate/217. Contains Duplicate.py
"""
* Title:
* 217. Contains Duplicate
* 217. 存在重复元素
* Address:
* https://leetcode-cn.com/problems/contains-duplicate/
"""
# 方法一:哈希表/集合
class Solution:
def containsDuplicate(self, nums: List[int]) -> bool:
return len(set(nums)) != len(nums)
# 方法二:排序
class Solution_2:
def containsDuplicate(self, nums: List[int]) -> bool:
nums.sort()
for i in range(1, len(nums)):
if nums[i - 1] == nums[i]:
return True
return False | StarcoderdataPython |
6539929 | <gh_stars>0
#!/usr/bin/env python
# Author: <NAME>
# Date: 7/11/2005
#
# See the associated manual page for an explanation.
#
from direct.showbase.ShowBase import ShowBase
from panda3d.core import FrameBufferProperties, WindowProperties
from panda3d.core import GraphicsPipe, GraphicsOutput
from panda3d.core import Filename, Texture, Shader
from panda3d.core import RenderState, CardMaker
from panda3d.core import PandaNode, TextNode, NodePath
from panda3d.core import RenderAttrib, AlphaTestAttrib, ColorBlendAttrib
from panda3d.core import CullFaceAttrib, DepthTestAttrib, DepthWriteAttrib
from panda3d.core import LPoint3, LVector3, BitMask32
from direct.gui.OnscreenText import OnscreenText
from direct.showbase.DirectObject import DirectObject
from direct.interval.MetaInterval import Sequence
from direct.task.Task import Task
from direct.actor.Actor import Actor
import sys
import os
import random
# Function to put instructions on the screen.
def addInstructions(pos, msg):
return OnscreenText(text=msg, style=1, fg=(1, 1, 1, 1), shadow=(0, 0, 0, 1),
parent=base.a2dTopLeft, align=TextNode.ALeft,
pos=(0.08, -pos - 0.04), scale=.05)
# Function to put title on the screen.
def addTitle(text):
return OnscreenText(text=text, style=1, pos=(-0.1, 0.09), scale=.08,
parent=base.a2dBottomRight, align=TextNode.ARight,
fg=(1, 1, 1, 1), shadow=(0, 0, 0, 1))
class FireflyDemo(ShowBase):
def __init__(self):
# Initialize the ShowBase class from which we inherit, which will
# create a window and set up everything we need for rendering into it.
ShowBase.__init__(self)
self.setBackgroundColor((0, 0, 0, 0))
# Preliminary capabilities check.
if not self.win.getGsg().getSupportsBasicShaders():
self.t = addTitle("Firefly Demo: Video driver reports that Cg "
"shaders are not supported.")
return
if not self.win.getGsg().getSupportsDepthTexture():
self.t = addTitle("Firefly Demo: Video driver reports that depth "
"textures are not supported.")
return
# This algorithm uses two offscreen buffers, one of which has
# an auxiliary bitplane, and the offscreen buffers share a single
# depth buffer. This is a heck of a complicated buffer setup.
self.modelbuffer = self.makeFBO("model buffer", 1)
self.lightbuffer = self.makeFBO("light buffer", 0)
# Creation of a high-powered buffer can fail, if the graphics card
# doesn't support the necessary OpenGL extensions.
if self.modelbuffer is None or self.lightbuffer is None:
self.t = addTitle("Toon Shader: Video driver does not support "
"multiple render targets")
return
# Create four render textures: depth, normal, albedo, and final.
# attach them to the various bitplanes of the offscreen buffers.
self.texDepth = Texture()
self.texDepth.setFormat(Texture.FDepthStencil)
self.texAlbedo = Texture()
self.texNormal = Texture()
self.texFinal = Texture()
self.modelbuffer.addRenderTexture(self.texDepth,
GraphicsOutput.RTMBindOrCopy, GraphicsOutput.RTPDepthStencil)
self.modelbuffer.addRenderTexture(self.texAlbedo,
GraphicsOutput.RTMBindOrCopy, GraphicsOutput.RTPColor)
self.modelbuffer.addRenderTexture(self.texNormal,
GraphicsOutput.RTMBindOrCopy, GraphicsOutput.RTPAuxRgba0)
self.lightbuffer.addRenderTexture(self.texFinal,
GraphicsOutput.RTMBindOrCopy, GraphicsOutput.RTPColor)
# Set the near and far clipping planes.
self.cam.node().getLens().setNear(50.0)
self.cam.node().getLens().setFar(500.0)
lens = self.cam.node().getLens()
# This algorithm uses three cameras: one to render the models into the
# model buffer, one to render the lights into the light buffer, and
# one to render "plain" stuff (non-deferred shaded) stuff into the
# light buffer. Each camera has a bitmask to identify it.
self.modelMask = 1
self.lightMask = 2
self.plainMask = 4
self.modelcam = self.makeCamera(self.modelbuffer,
lens=lens, scene=render, mask=self.modelMask)
self.lightcam = self.makeCamera(self.lightbuffer,
lens=lens, scene=render, mask=self.lightMask)
self.plaincam = self.makeCamera(self.lightbuffer,
lens=lens, scene=render, mask=self.plainMask)
# Panda's main camera is not used.
self.cam.node().setActive(0)
# Take explicit control over the order in which the three
# buffers are rendered.
self.modelbuffer.setSort(1)
self.lightbuffer.setSort(2)
self.win.setSort(3)
# Within the light buffer, control the order of the two cams.
self.lightcam.node().getDisplayRegion(0).setSort(1)
self.plaincam.node().getDisplayRegion(0).setSort(2)
# By default, panda usually clears the screen before every
# camera and before every window. Tell it not to do that.
# Then, tell it specifically when to clear and what to clear.
self.modelcam.node().getDisplayRegion(0).disableClears()
self.lightcam.node().getDisplayRegion(0).disableClears()
self.plaincam.node().getDisplayRegion(0).disableClears()
self.cam.node().getDisplayRegion(0).disableClears()
self.cam2d.node().getDisplayRegion(0).disableClears()
self.modelbuffer.disableClears()
self.win.disableClears()
self.modelbuffer.setClearColorActive(1)
self.modelbuffer.setClearDepthActive(1)
self.lightbuffer.setClearColorActive(1)
self.lightbuffer.setClearColor((0, 0, 0, 1))
# Miscellaneous stuff.
self.disableMouse()
self.camera.setPos(-9.112, -211.077, 46.951)
self.camera.setHpr(0, -7.5, 2.4)
random.seed()
# Calculate the projection parameters for the final shader.
# The math here is too complex to explain in an inline comment,
# I've put in a full explanation into the HTML intro.
proj = self.cam.node().getLens().getProjectionMat()
proj_x = 0.5 * proj.getCell(3, 2) / proj.getCell(0, 0)
proj_y = 0.5 * proj.getCell(3, 2)
proj_z = 0.5 * proj.getCell(3, 2) / proj.getCell(2, 1)
proj_w = -0.5 - 0.5 * proj.getCell(1, 2)
# Configure the render state of the model camera.
tempnode = NodePath(PandaNode("temp node"))
tempnode.setAttrib(
AlphaTestAttrib.make(RenderAttrib.MGreaterEqual, 0.5))
tempnode.setShader(loader.loadShader("model.sha"))
tempnode.setAttrib(DepthTestAttrib.make(RenderAttrib.MLessEqual))
self.modelcam.node().setInitialState(tempnode.getState())
# Configure the render state of the light camera.
tempnode = NodePath(PandaNode("temp node"))
tempnode.setShader(loader.loadShader("light.sha"))
tempnode.setShaderInput("texnormal", self.texNormal)
tempnode.setShaderInput("texalbedo", self.texAlbedo)
tempnode.setShaderInput("texdepth", self.texDepth)
tempnode.setShaderInput("proj", (proj_x, proj_y, proj_z, proj_w))
tempnode.setAttrib(ColorBlendAttrib.make(ColorBlendAttrib.MAdd,
ColorBlendAttrib.OOne, ColorBlendAttrib.OOne))
tempnode.setAttrib(
CullFaceAttrib.make(CullFaceAttrib.MCullCounterClockwise))
# The next line causes problems on Linux.
# tempnode.setAttrib(DepthTestAttrib.make(RenderAttrib.MGreaterEqual))
tempnode.setAttrib(DepthWriteAttrib.make(DepthWriteAttrib.MOff))
self.lightcam.node().setInitialState(tempnode.getState())
# Configure the render state of the plain camera.
rs = RenderState.makeEmpty()
self.plaincam.node().setInitialState(rs)
# Clear any render attribs on the root node. This is necessary
# because by default, panda assigns some attribs to the root
# node. These default attribs will override the
# carefully-configured render attribs that we just attached
# to the cameras. The simplest solution is to just clear
# them all out.
render.setState(RenderState.makeEmpty())
# My artist created a model in which some of the polygons
# don't have textures. This confuses the shader I wrote.
# This little hack guarantees that everything has a texture.
white = loader.loadTexture("models/white.jpg")
render.setTexture(white, 0)
# Create two subroots, to help speed cull traversal.
self.lightroot = NodePath(PandaNode("lightroot"))
self.lightroot.reparentTo(render)
self.modelroot = NodePath(PandaNode("modelroot"))
self.modelroot.reparentTo(render)
self.lightroot.hide(BitMask32(self.modelMask))
self.modelroot.hide(BitMask32(self.lightMask))
self.modelroot.hide(BitMask32(self.plainMask))
# Load the model of a forest. Make it visible to the model camera.
# This is a big model, so we load it asynchronously while showing a
# load text. We do this by passing in a callback function.
self.loading = addTitle("Loading models...")
self.forest = NodePath(PandaNode("Forest Root"))
self.forest.reparentTo(render)
self.forest.hide(BitMask32(self.lightMask | self.plainMask))
loader.loadModel([
"models/background",
"models/foliage01",
"models/foliage02",
"models/foliage03",
"models/foliage04",
"models/foliage05",
"models/foliage06",
"models/foliage07",
"models/foliage08",
"models/foliage09"],
callback=self.finishLoading)
# Cause the final results to be rendered into the main window on a
# card.
self.card = self.lightbuffer.getTextureCard()
self.card.setTexture(self.texFinal)
self.card.reparentTo(render2d)
# Panda contains a built-in viewer that lets you view the results of
# your render-to-texture operations. This code configures the viewer.
self.bufferViewer.setPosition("llcorner")
self.bufferViewer.setCardSize(0, 0.40)
self.bufferViewer.setLayout("vline")
self.toggleCards()
self.toggleCards()
# Firefly parameters
self.fireflies = []
self.sequences = []
self.scaleseqs = []
self.glowspheres = []
self.fireflysize = 1.0
self.spheremodel = loader.loadModel("misc/sphere")
# Create the firefly model, a fuzzy dot
dotSize = 1.0
cm = CardMaker("firefly")
cm.setFrame(-dotSize, dotSize, -dotSize, dotSize)
self.firefly = NodePath(cm.generate())
self.firefly.setTexture(loader.loadTexture("models/firefly.png"))
self.firefly.setAttrib(ColorBlendAttrib.make(ColorBlendAttrib.M_add,
ColorBlendAttrib.O_incoming_alpha, ColorBlendAttrib.O_one))
# these allow you to change parameters in realtime
self.accept("escape", sys.exit, [0])
self.accept("arrow_up", self.incFireflyCount, [1.1111111])
self.accept("arrow_down", self.decFireflyCount, [0.9000000])
self.accept("arrow_right", self.setFireflySize, [1.1111111])
self.accept("arrow_left", self.setFireflySize, [0.9000000])
self.accept("v", self.toggleCards)
self.accept("V", self.toggleCards)
def finishLoading(self, models):
# This function is used as callback to loader.loadModel, and called
# when all of the models have finished loading.
# Attach the models to the scene graph.
for model in models:
model.reparentTo(self.forest)
# Show the instructions.
self.loading.destroy()
self.title = addTitle("Panda3D: Tutorial - Fireflies using Deferred Shading")
self.inst1 = addInstructions(0.06, "ESC: Quit")
self.inst2 = addInstructions(0.12, "Up/Down: More / Fewer Fireflies (Count: unknown)")
self.inst3 = addInstructions(0.18, "Right/Left: Bigger / Smaller Fireflies (Radius: unknown)")
self.inst4 = addInstructions(0.24, "V: View the render-to-texture results")
self.setFireflySize(25.0)
while len(self.fireflies) < 5:
self.addFirefly()
self.updateReadout()
self.nextadd = 0
taskMgr.add(self.spawnTask, "spawner")
def makeFBO(self, name, auxrgba):
# This routine creates an offscreen buffer. All the complicated
# parameters are basically demanding capabilities from the offscreen
# buffer - we demand that it be able to render to texture on every
# bitplane, that it can support aux bitplanes, that it track
# the size of the host window, that it can render to texture
# cumulatively, and so forth.
winprops = WindowProperties()
props = FrameBufferProperties()
props.setRgbColor(True)
props.setRgbaBits(8, 8, 8, 8)
props.setDepthBits(1)
props.setAuxRgba(auxrgba)
return self.graphicsEngine.makeOutput(
self.pipe, "model buffer", -2,
props, winprops,
GraphicsPipe.BFSizeTrackHost | GraphicsPipe.BFCanBindEvery |
GraphicsPipe.BFRttCumulative | GraphicsPipe.BFRefuseWindow,
self.win.getGsg(), self.win)
def addFirefly(self):
pos1 = LPoint3(random.uniform(-50, 50), random.uniform(-100, 150), random.uniform(-10, 80))
dir = LVector3(random.uniform(-1, 1), random.uniform(-1, 1), random.uniform(-1, 1))
dir.normalize()
pos2 = pos1 + (dir * 20)
fly = self.lightroot.attachNewNode(PandaNode("fly"))
glow = fly.attachNewNode(PandaNode("glow"))
dot = fly.attachNewNode(PandaNode("dot"))
color_r = 1.0
color_g = random.uniform(0.8, 1.0)
color_b = min(color_g, random.uniform(0.5, 1.0))
fly.setColor(color_r, color_g, color_b, 1.0)
fly.setShaderInput("lightcolor", (color_r, color_g, color_b, 1.0))
int1 = fly.posInterval(random.uniform(7, 12), pos1, pos2)
int2 = fly.posInterval(random.uniform(7, 12), pos2, pos1)
si1 = fly.scaleInterval(random.uniform(0.8, 1.5),
LPoint3(0.2, 0.2, 0.2), LPoint3(0.2, 0.2, 0.2))
si2 = fly.scaleInterval(random.uniform(1.5, 0.8),
LPoint3(1.0, 1.0, 1.0), LPoint3(0.2, 0.2, 0.2))
si3 = fly.scaleInterval(random.uniform(1.0, 2.0),
LPoint3(0.2, 0.2, 0.2), LPoint3(1.0, 1.0, 1.0))
siseq = Sequence(si1, si2, si3)
siseq.loop()
siseq.setT(random.uniform(0, 1000))
seq = Sequence(int1, int2)
seq.loop()
self.spheremodel.instanceTo(glow)
self.firefly.instanceTo(dot)
glow.setScale(self.fireflysize * 1.1)
glow.hide(BitMask32(self.modelMask | self.plainMask))
dot.hide(BitMask32(self.modelMask | self.lightMask))
dot.setColor(color_r, color_g, color_b, 1.0)
self.fireflies.append(fly)
self.sequences.append(seq)
self.glowspheres.append(glow)
self.scaleseqs.append(siseq)
def updateReadout(self):
self.inst2.destroy()
self.inst2 = addInstructions(0.12,
"Up/Down: More / Fewer Fireflies (Currently: %d)" % len(self.fireflies))
self.inst3.destroy()
self.inst3 = addInstructions(0.18,
"Right/Left: Bigger / Smaller Fireflies (Radius: %d ft)" % self.fireflysize)
def toggleCards(self):
self.bufferViewer.toggleEnable()
# When the cards are not visible, I also disable the color clear.
# This color-clear is actually not necessary, the depth-clear is
# sufficient for the purposes of the algorithm.
if (self.bufferViewer.isEnabled()):
self.modelbuffer.setClearColorActive(True)
else:
self.modelbuffer.setClearColorActive(False)
def incFireflyCount(self, scale):
n = int((len(self.fireflies) * scale) + 1)
while (n > len(self.fireflies)):
self.addFirefly()
self.updateReadout()
def decFireflyCount(self, scale):
n = int(len(self.fireflies) * scale)
if (n < 1):
n = 1
while (len(self.fireflies) > n):
self.glowspheres.pop()
self.sequences.pop().finish()
self.scaleseqs.pop().finish()
self.fireflies.pop().removeNode()
self.updateReadout()
def setFireflySize(self, n):
n = n * self.fireflysize
self.fireflysize = n
for x in self.glowspheres:
x.setScale(self.fireflysize * 1.1)
self.updateReadout()
def spawnTask(self, task):
if task.time > self.nextadd:
self.nextadd = task.time + 1.0
if (len(self.fireflies) < 300):
self.incFireflyCount(1.03)
return Task.cont
demo = FireflyDemo()
demo.run()
| StarcoderdataPython |
9650748 | import logging
from tests.unit.chroma_core.lib.storage_plugin.resource_manager.test_resource_manager import ResourceManagerTestCase
class TestAlerts(ResourceManagerTestCase):
def setUp(self):
super(TestAlerts, self).setUp('alert_plugin')
def _update_alerts(self, resource_manager, scannable_pk, resource, alert_klass):
result = []
for ac in resource._meta.alert_conditions:
if isinstance(ac, alert_klass):
alert_list = ac.test(resource)
for name, attribute, active, severity in alert_list:
resource_manager.session_notify_alert(scannable_pk, resource._handle, active, severity, name, attribute)
result.append((name, attribute, active))
return result
def _update_alerts_anytrue(self, resource_manager, *args, **kwargs):
alerts = self._update_alerts(resource_manager, *args, **kwargs)
for alert in alerts:
if alert[2]:
return True
return False
def test_multiple_alerts(self):
"""Test multiple AlertConditions acting on the same attribute"""
resource_record, controller_resource = self._make_global_resource('alert_plugin', 'Controller', {'address': 'foo', 'temperature': 40, 'status': 'OK', 'multi_status': 'OK'})
lun_resource = self._make_local_resource('alert_plugin', 'Lun', lun_id="foo", size = 1024 * 1024 * 650, parents = [controller_resource])
# Open session
self.resource_manager.session_open(self.plugin, resource_record.pk, [controller_resource, lun_resource], 60)
from chroma_core.lib.storage_plugin.api.alert_conditions import ValueCondition
# Go into failed state and send notification
controller_resource.multi_status = 'FAIL1'
alerts = self._update_alerts(self.resource_manager, resource_record.pk, controller_resource, ValueCondition)
n = 0
for alert in alerts:
if alert[2]:
n += 1
self.assertEqual(n, 2, alerts)
# Check that the alert is now set on couplet
from chroma_core.models import StorageResourceAlert
self.assertEqual(StorageResourceAlert.objects.filter(active = True).count(), 2)
def test_raise_alert(self):
resource_record, controller_resource = self._make_global_resource('alert_plugin', 'Controller', {'address': 'foo', 'temperature': 40, 'status': 'OK', 'multi_status': 'OK'})
lun_resource = self._make_local_resource('alert_plugin', 'Lun', lun_id="foo", size = 1024 * 1024 * 650, parents = [controller_resource])
# Open session
self.resource_manager.session_open(self.plugin, resource_record.pk, [controller_resource, lun_resource], 60)
from chroma_core.lib.storage_plugin.api.alert_conditions import ValueCondition
# Go into failed state and send notification
controller_resource.status = 'FAILED'
self.assertEqual(True, self._update_alerts_anytrue(self.resource_manager, resource_record.pk, controller_resource, ValueCondition))
from chroma_core.models import StorageResourceAlert, StorageAlertPropagated
# Check that the alert is now set on couplet
self.assertEqual(StorageResourceAlert.objects.filter(active=True).count(), 1)
self.assertEqual(StorageResourceAlert.objects.get().severity, logging.WARNING)
# FIXME: make this string more sensible
self.assertEqual(StorageResourceAlert.objects.get().message(), "Controller failure (Controller Controller foo)")
# Check that the alert is now set on controller (propagation)
self.assertEqual(StorageAlertPropagated.objects.filter().count(), 1)
# Leave failed state and send notification
controller_resource.status = 'OK'
self.assertEqual(False, self._update_alerts_anytrue(self.resource_manager, resource_record.pk, controller_resource, ValueCondition))
# Check that the alert is now unset on couplet
self.assertEqual(StorageResourceAlert.objects.filter(active = True).count(), 0)
# Check that the alert is now unset on controller (propagation)
self.assertEqual(StorageAlertPropagated.objects.filter().count(), 0)
# Now try setting something which should have a different severity (respect difference betwee
# warn_states and error_states on AlertCondition)
controller_resource.status = 'BADLY_FAILED'
self.assertEqual(True, self._update_alerts_anytrue(self.resource_manager, resource_record.pk, controller_resource, ValueCondition))
self.assertEqual(StorageResourceAlert.objects.filter(active=True).count(), 1)
self.assertEqual(StorageResourceAlert.objects.get(active=True).severity, logging.ERROR)
def test_alert_deletion(self):
resource_record, controller_resource = self._make_global_resource('alert_plugin', 'Controller', {'address': 'foo', 'temperature': 40, 'status': 'OK', 'multi_status': 'OK'})
lun_resource = self._make_local_resource('alert_plugin', 'Lun', lun_id="foo", size = 1024 * 1024 * 650, parents = [controller_resource])
# Open session
self.resource_manager.session_open(self.plugin, resource_record.pk, [controller_resource, lun_resource], 60)
from chroma_core.lib.storage_plugin.api.alert_conditions import ValueCondition
# Go into failed state and send notification
controller_resource.status = 'FAILED'
self.assertEqual(True, self._update_alerts_anytrue(self.resource_manager, resource_record.pk, controller_resource, ValueCondition))
from chroma_core.models import StorageResourceAlert, StorageAlertPropagated
# Check that the alert is now set on couplet
self.assertEqual(StorageResourceAlert.objects.filter(active = None).count(), 0)
self.assertEqual(StorageResourceAlert.objects.filter(active = True).count(), 1)
# Check that the alert is now set on controller (propagation)
self.assertEqual(StorageAlertPropagated.objects.filter().count(), 1)
alert_message_before_delete = StorageResourceAlert.objects.filter(active = True)[0].message()
self.resource_manager.global_remove_resource(resource_record.pk)
self.assertEqual(alert_message_before_delete,
StorageResourceAlert.objects.filter(active = None)[0].message())
# Check that the alert is now unset on couplet
self.assertEqual(StorageResourceAlert.objects.filter(active = None).count(), 1)
self.assertEqual(StorageResourceAlert.objects.filter(active = True).count(), 0)
# Check that the alert is now unset on controller (propagation)
self.assertEqual(StorageAlertPropagated.objects.filter().count(), 0)
def test_bound_alert(self):
resource_record, controller_resource = self._make_global_resource('alert_plugin', 'Controller', {'address': 'foo', 'temperature': 40, 'status': 'OK', 'multi_status': 'OK'})
lun_resource = self._make_local_resource('alert_plugin', 'Lun', lun_id="foo", size = 1024 * 1024 * 650, parents = [controller_resource])
from chroma_core.lib.storage_plugin.api.alert_conditions import UpperBoundCondition, LowerBoundCondition
# Open session
self.resource_manager.session_open(self.plugin, resource_record.pk, [controller_resource, lun_resource], 60)
controller_resource.temperature = 86
self.assertEqual(True, self._update_alerts_anytrue(self.resource_manager, resource_record.pk, controller_resource, UpperBoundCondition))
controller_resource.temperature = 84
self.assertEqual(False, self._update_alerts_anytrue(self.resource_manager, resource_record.pk, controller_resource, UpperBoundCondition))
controller_resource.temperature = -1
self.assertEqual(True, self._update_alerts_anytrue(self.resource_manager, resource_record.pk, controller_resource, LowerBoundCondition))
controller_resource.temperature = 1
self.assertEqual(False, self._update_alerts_anytrue(self.resource_manager, resource_record.pk, controller_resource, LowerBoundCondition))
| StarcoderdataPython |
5144990 | <filename>tests/functional/model_permissions.py
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Functional tests for models/review.py."""
__author__ = [
'<EMAIL> (<NAME>)',
]
from common import utils as common_utils
from common import schema_fields
from models import permissions
from models import custom_modules
from models import models
from models import roles
from modules.courses import constants
from tests.functional import actions
class PermissionsTests(actions.TestBase):
"""Tests KeyProperty."""
ADMIN_EMAIL = '<EMAIL>'
IN_ROLE_EMAIL = '<EMAIL>'
NON_ROLE_EMAIL = '<EMAIL>'
COURSE_NAME = 'permissions_tests'
NAMESPACE = 'ns_%s' % COURSE_NAME
MODULE_NAME = 'permissions_tests'
PERMISSION_NAME = 'test_permission'
PERMISSION = roles.Permission(PERMISSION_NAME, 'Fake perm. for testing')
PERMISSION_SCOPE = 'test_scope'
ROLE_NAME = 'test_user_role'
custom_module = None
@classmethod
def setUpClass(cls):
super(PermissionsTests, cls).setUpClass()
cls.custom_module = custom_modules.Module(
cls.MODULE_NAME, 'Permissions Tests', [], [],
notify_module_enabled=cls.notify_module_enabled)
cls.custom_module.enable()
@classmethod
def tearDownClass(cls):
roles.Roles.unregister_permissions(cls.custom_module)
permissions.SchemaPermissionRegistry.remove(
cls.PERMISSION_SCOPE, cls.PERMISSION_NAME)
super(PermissionsTests, cls).tearDownClass()
@classmethod
def notify_module_enabled(cls):
roles.Roles.register_permissions(
cls.custom_module, cls.permissions_callback)
permissions.SchemaPermissionRegistry.add(
cls.PERMISSION_SCOPE,
permissions.SimpleSchemaPermission(
cls.custom_module, cls.PERMISSION_NAME,
readable_list=['a', 'b'],
editable_list=['a']))
permissions.SchemaPermissionRegistry.add(
cls.PERMISSION_SCOPE,
permissions.CourseAdminSchemaPermission())
@classmethod
def permissions_callback(cls, app_context):
return [cls.PERMISSION]
def setUp(self):
super(PermissionsTests, self).setUp()
self.app_context = actions.simple_add_course(
self.COURSE_NAME, self.ADMIN_EMAIL, 'Permissions Tests')
self.schema = schema_fields.FieldRegistry('title')
self.schema.add_property(schema_fields.SchemaField('a', 'A', 'string'))
self.schema.add_property(schema_fields.SchemaField('b', 'B', 'string'))
self.schema.add_property(schema_fields.SchemaField('c', 'C', 'string'))
self.entity = {'a': 1, 'b': 2, 'c': 3}
with common_utils.Namespace(self.NAMESPACE):
role_dto = models.RoleDTO(the_id=None, the_dict={
'name': self.ROLE_NAME,
'permissions': {self.MODULE_NAME: [self.PERMISSION_NAME]},
'description': 'Role allowing limited schema access.',
'users': [self.IN_ROLE_EMAIL]})
roles.RoleDAO.save(role_dto)
def test_admin_has_permissions_with_no_configuration_needed(self):
actions.login(self.ADMIN_EMAIL, is_admin=True)
self.assertTrue(permissions.can_view(
self.app_context, constants.SCOPE_COURSE_SETTINGS))
self.assertTrue(permissions.can_edit(
self.app_context, constants.SCOPE_COURSE_SETTINGS))
self.assertTrue(permissions.can_view_property(
self.app_context, constants.SCOPE_COURSE_SETTINGS,
'absolutely/anything'))
self.assertTrue(permissions.can_edit_property(
self.app_context, constants.SCOPE_COURSE_SETTINGS,
'absolutely/anything'))
def test_non_admin_has_no_permissions_with_no_configuration_needed(self):
actions.login(self.IN_ROLE_EMAIL)
self.assertFalse(permissions.can_view(
self.app_context, constants.SCOPE_COURSE_SETTINGS))
self.assertFalse(permissions.can_edit(
self.app_context, constants.SCOPE_COURSE_SETTINGS))
self.assertFalse(permissions.can_view_property(
self.app_context, constants.SCOPE_COURSE_SETTINGS,
'absolutely/anything'))
self.assertFalse(permissions.can_edit_property(
self.app_context, constants.SCOPE_COURSE_SETTINGS,
'absolutely/anything'))
def test_role_permissions(self):
with common_utils.Namespace(self.NAMESPACE):
# Admin and assistant can read 'a', but student cannot.
checker = permissions.SchemaPermissionRegistry.build_view_checker(
self.PERMISSION_SCOPE, ['a'])
actions.login(self.ADMIN_EMAIL)
self.assertTrue(checker(self.app_context))
actions.login(self.IN_ROLE_EMAIL)
self.assertTrue(checker(self.app_context))
actions.login(self.NON_ROLE_EMAIL)
self.assertFalse(checker(self.app_context))
# Admin and assistant can read 'b', but student cannot.
checker = permissions.SchemaPermissionRegistry.build_view_checker(
self.PERMISSION_SCOPE, ['b'])
actions.login(self.ADMIN_EMAIL)
self.assertTrue(checker(self.app_context))
actions.login(self.IN_ROLE_EMAIL)
self.assertTrue(checker(self.app_context))
actions.login(self.NON_ROLE_EMAIL)
self.assertFalse(checker(self.app_context))
# Admin can read 'c', but neither assistant nor student may.
checker = permissions.SchemaPermissionRegistry.build_view_checker(
self.PERMISSION_SCOPE, ['c'])
actions.login(self.ADMIN_EMAIL)
self.assertTrue(checker(self.app_context))
actions.login(self.IN_ROLE_EMAIL)
self.assertFalse(checker(self.app_context))
actions.login(self.NON_ROLE_EMAIL)
self.assertFalse(checker(self.app_context))
# Admin and assistant can write 'a', but student cannot.
checker = permissions.SchemaPermissionRegistry.build_edit_checker(
self.PERMISSION_SCOPE, ['a'])
actions.login(self.ADMIN_EMAIL)
self.assertTrue(checker(self.app_context))
actions.login(self.IN_ROLE_EMAIL)
self.assertTrue(checker(self.app_context))
actions.login(self.NON_ROLE_EMAIL)
self.assertFalse(checker(self.app_context))
# Admin can write 'b', but neither assistant nor student may.
checker = permissions.SchemaPermissionRegistry.build_edit_checker(
self.PERMISSION_SCOPE, ['b'])
actions.login(self.ADMIN_EMAIL)
self.assertTrue(checker(self.app_context))
actions.login(self.IN_ROLE_EMAIL)
self.assertFalse(checker(self.app_context))
actions.login(self.NON_ROLE_EMAIL)
self.assertFalse(checker(self.app_context))
# Admin can write 'c', but neither assistant nor student may.
checker = permissions.SchemaPermissionRegistry.build_edit_checker(
self.PERMISSION_SCOPE, ['c'])
actions.login(self.ADMIN_EMAIL)
self.assertTrue(checker(self.app_context))
actions.login(self.IN_ROLE_EMAIL)
self.assertFalse(checker(self.app_context))
actions.login(self.NON_ROLE_EMAIL)
self.assertFalse(checker(self.app_context))
def test_schema_redaction(self):
reg = permissions.SchemaPermissionRegistry
with common_utils.Namespace(self.NAMESPACE):
# All properties readable/writable for admin
actions.login(self.ADMIN_EMAIL)
ret = reg.redact_schema_to_permitted_fields(
self.app_context, self.PERMISSION_SCOPE, self.schema)
a = ret.get_property('a')
self.assertNotIn('disabled', a._extra_schema_dict_values)
self.assertFalse(a.hidden)
b = ret.get_property('b')
self.assertNotIn('disabled', b._extra_schema_dict_values)
self.assertFalse(b.hidden)
c = ret.get_property('c')
self.assertNotIn('disabled', c._extra_schema_dict_values)
self.assertFalse(c.hidden)
# 'a', 'b' readable, 'a' writable, and 'c' removed for assistant.
actions.login(self.IN_ROLE_EMAIL)
ret = reg.redact_schema_to_permitted_fields(
self.app_context, self.PERMISSION_SCOPE, self.schema)
a = ret.get_property('a')
self.assertNotIn('disabled', a._extra_schema_dict_values)
self.assertFalse(a.hidden)
b = ret.get_property('b')
self.assertTrue(b._extra_schema_dict_values.get('disabled'))
self.assertFalse(b.hidden)
self.assertIsNone(ret.get_property('c'))
# All properties removed for account w/ no access.
actions.login(self.NON_ROLE_EMAIL)
ret = reg.redact_schema_to_permitted_fields(
self.app_context, self.PERMISSION_SCOPE, self.schema)
self.assertIsNone(ret.get_property('a'))
self.assertIsNone(ret.get_property('b'))
self.assertIsNone(ret.get_property('c'))
class SimpleSchemaPermissionTests(actions.TestBase):
def test_no_args_equals_no_permissions(self):
p = permissions.SimpleSchemaPermission(None, None)
self.assertFalse(p.can_view('a'))
self.assertFalse(p.can_edit('a'))
def test_read_some_write_none(self):
p = permissions.SimpleSchemaPermission(None, None, readable_list=['a'])
self.assertTrue(p.can_view('a'))
self.assertFalse(p.can_edit('a'))
self.assertFalse(p.can_view('b'))
self.assertFalse(p.can_edit('b'))
def test_read_write_some(self):
p = permissions.SimpleSchemaPermission(None, None,
readable_list=['a'],
editable_list=['a'])
self.assertTrue(p.can_view('a'))
self.assertTrue(p.can_edit('a'))
self.assertFalse(p.can_view('b'))
self.assertFalse(p.can_edit('b'))
def test_writability_implies_readability(self):
p = permissions.SimpleSchemaPermission(None, None, editable_list=['a'])
self.assertTrue(p.can_view('a'))
self.assertTrue(p.can_edit('a'))
self.assertFalse(p.can_view('b'))
self.assertFalse(p.can_edit('b'))
def test_some_readable_some_writable(self):
p = permissions.SimpleSchemaPermission(None, None,
readable_list=['b'],
editable_list=['a'])
self.assertTrue(p.can_view('a'))
self.assertTrue(p.can_edit('a'))
self.assertTrue(p.can_view('b'))
self.assertFalse(p.can_edit('b'))
self.assertFalse(p.can_view('c'))
self.assertFalse(p.can_edit('c'))
def test_read_any(self):
p = permissions.SimpleSchemaPermission(None, None, all_readable=True)
self.assertTrue(p.can_view('a'))
self.assertFalse(p.can_edit('a'))
self.assertTrue(p.can_view('b'))
self.assertFalse(p.can_edit('b'))
def test_write_any(self):
p = permissions.SimpleSchemaPermission(None, None, all_writable=True)
self.assertTrue(p.can_view('a'))
self.assertTrue(p.can_edit('a'))
self.assertTrue(p.can_view('b'))
self.assertTrue(p.can_edit('b'))
def test_read_even_one_with_no_readable(self):
p = permissions.SimpleSchemaPermission(None, None)
self.assertFalse(p.can_view())
def test_read_even_one_with_one_readable(self):
p = permissions.SimpleSchemaPermission(None, None, readable_list=['a'])
self.assertTrue(p.can_view())
def test_read_even_one_with_all_readable(self):
p = permissions.SimpleSchemaPermission(None, None, all_readable=True)
self.assertTrue(p.can_view())
def test_read_even_one_with_one_writable(self):
p = permissions.SimpleSchemaPermission(None, None, editable_list=['a'])
self.assertTrue(p.can_view())
def test_read_even_one_with_all_writable(self):
p = permissions.SimpleSchemaPermission(None, None, all_writable=True)
self.assertTrue(p.can_view())
def test_write_even_one_with_no_writable(self):
p = permissions.SimpleSchemaPermission(None, None, all_readable=True)
self.assertFalse(p.can_edit())
def test_write_even_one_with_one_writable(self):
p = permissions.SimpleSchemaPermission(None, None, editable_list=['a'])
self.assertTrue(p.can_edit())
def test_write_even_one_with_all_writable(self):
p = permissions.SimpleSchemaPermission(None, None, all_writable=True)
self.assertTrue(p.can_edit())
def test_containing_schema(self):
p = permissions.SimpleSchemaPermission(
None, None, readable_list=['a/b/c'], editable_list=['d/e/f'])
self.assertTrue(p.can_view('a'))
self.assertTrue(p.can_view('a/b'))
self.assertTrue(p.can_view('a/b/c'))
self.assertFalse(p.can_edit('a'))
self.assertFalse(p.can_edit('a/b'))
self.assertFalse(p.can_edit('a/b/c'))
self.assertTrue(p.can_view('d'))
self.assertTrue(p.can_view('d/e'))
self.assertTrue(p.can_view('d/e/f'))
self.assertTrue(p.can_edit('d'))
self.assertTrue(p.can_edit('d/e'))
self.assertTrue(p.can_edit('d/e/f'))
self.assertFalse(p.can_view('g'))
self.assertFalse(p.can_view('g/h'))
self.assertFalse(p.can_view('g/h/i'))
self.assertFalse(p.can_edit('g'))
self.assertFalse(p.can_edit('g/h'))
self.assertFalse(p.can_edit('g/h/i'))
| StarcoderdataPython |
8171156 | <reponame>HrOrange/PACMAN-with-GUNS-
import constants
import random
import math
import pygame as game
import os
from ghost import ghost, blinky, pinky, inky, clyde
from pacman import pacman
from UI_elements import TEXT
import threading
def past_point(s):
if s.direction == "right":
if(s.pos[0] > s.target.pos[0]):
return True
elif s.direction == "left":
if(s.pos[0] < s.target.pos[0]):
return True
elif s.direction == "down":
if(s.pos[1] > s.target.pos[1]):
return True
elif s.direction == "up":
if(s.pos[1] < s.target.pos[1]):
return True
return False
def neighbor_in_direction(pos, direction, point):
for x in point.neighbors:
if direction == "right":
if x.pos[0] > pos[0]:
return x
elif direction == "left":
if x.pos[0] < pos[0]:
return x
elif direction == "up":
if x.pos[1] < pos[1]:
return x
elif direction == "down":
if x.pos[1] > pos[1]:
return x
return None
def new_target(s, pick_random = False):
#pick a spot
r = None
if pick_random:
chances = []
for x in s.target.neighbors:
if s.direction == "right" and x.pos[0] < s.pos[0]:
chances.append(0.1)
elif s.direction == "left" and x.pos[0] > s.pos[0]:
chances.append(0.1)
elif s.direction == "up" and x.pos[1] > s.pos[1]:
chances.append(0.1)
elif s.direction == "down" and x.pos[1] < s.pos[1]:
chances.append(0.1)
else:
chances.append(0.9 / (len(s.target.neighbors) - 1))
#print(chances)
r = give_random_with_chances([f for f in s.target.neighbors], chances)
else:
if(s.future_moves == ""):
if(s.direction == "right"):
for x in s.target.neighbors:
if(x.pos[0] > s.target.pos[0] and x.pos[1] == s.target.pos[1]):
r = x
break
elif(s.direction == "left"):
for x in s.target.neighbors:
if(x.pos[0] < s.target.pos[0] and x.pos[1] == s.target.pos[1]):
r = x
break
elif(s.direction == "down"):
for x in s.target.neighbors:
if(x.pos[0] == s.target.pos[0] and x.pos[1] > s.target.pos[1]):
r = x
break
elif(s.direction == "up"):
for x in s.target.neighbors:
if(x.pos[0] == s.target.pos[0] and x.pos[1] < s.target.pos[1]):
r = x
break
else:
if(s.future_moves == "right"):
#first check if there is a point in the direction the player wants
r = neighbor_in_direction(s.pos, s.future_moves, s.target)
#if that is not possible
if r == None:
#check if there is one in the direction the player is moving
r = neighbor_in_direction(s.pos, s.direction, s.target)
#if that is not possible
if r == None:
#stand still and wait
s.future_moves = ""
return s.direction, None
else:
s.future_moves = ""
elif(s.future_moves == "left"):
#first check if there is a point in the direction the player wants
r = neighbor_in_direction(s.pos, s.future_moves, s.target)
#if that is not possible
if r == None:
#check if there is one in the direction the player is moving
r = neighbor_in_direction(s.pos, s.direction, s.target)
#if that is not possible
if r == None:
#stand still and wait
s.future_moves = ""
return s.direction, None
else:
s.future_moves = ""
elif(s.future_moves == "down"):
#first check if there is a point in the direction the player wants
r = neighbor_in_direction(s.pos, s.future_moves, s.target)
#if that is not possible
if r == None:
#check if there is one in the direction the player is moving
r = neighbor_in_direction(s.pos, s.direction, s.target)
#if that is not possible
if r == None:
#stand still and wait
s.future_moves = ""
return s.direction, None
else:
s.future_moves = ""
elif(s.future_moves == "up"):
#first check if there is a point in the direction the player wants
r = neighbor_in_direction(s.pos, s.future_moves, s.target)
#if that is not possible
if r == None:
#check if there is one in the direction the player is moving
r = neighbor_in_direction(s.pos, s.direction, s.target)
#if that is not possible
if r == None:
#stand still and wait
s.future_moves = ""
return s.direction, None
else:
s.future_moves = ""
#return the new target
if r == None:
return s.direction, None
elif r.pos[0] == s.pos[0] and r.pos[1] > s.pos[1]:
return "down", r
elif r.pos[0] == s.pos[0] and r.pos[1] < s.pos[1]:
return "up", r
elif r.pos[1] == s.pos[1] and r.pos[0] > s.pos[0]:
return "right", r
elif r.pos[1] == s.pos[1] and r.pos[0] < s.pos[0]:
return "left", r
def give_random_with_chances(data, chances):
chance = 0
r = random.random()
for x in range(len(chances)):
chance += chances[x]
if r <= chance:
return data[x]
return data[-1]
def detect_collision(ob1, ob2):
if(ob1.hitbox[2] >= ob2.hitbox[0] and
ob1.hitbox[3] >= ob2.hitbox[1] and
ob1.hitbox[0] <= ob2.hitbox[2] and
ob1.hitbox[1] <= ob2.hitbox[3]):
return True
else:
return False
def detect_collision_weapon(ob_with_weapon, ob2):
if(ob_with_weapon.weapon_hitbox[2] >= ob2.hitbox[0] and
ob_with_weapon.weapon_hitbox[3] >= ob2.hitbox[1] and
ob_with_weapon.weapon_hitbox[0] <= ob2.hitbox[2] and
ob_with_weapon.weapon_hitbox[1] <= ob2.hitbox[3]):
return True
else:
return False
def AStar(s, target):
pass
def Start_Players_and_Ghosts():
for ghost in constants.ghosts:
ghost.freeze = False
for pacman in constants.players:
pacman.freeze = False
game.mixer.music.load(os.getcwd() + "\\music\\" + "waw_waw_waw.wav")
game.mixer.music.set_volume(0.3)
game.mixer.music.play(-1)
constants.ChannelA.play(constants.effects["waki_2"], loops = -1)
def reset_game():
constants.scene = "Game"
#mega_food and ammo
for mega_food in constants.mega_foods:
mega_food.used = 0
for ammo in constants.ammos:
ammo.used = 0
#constants
constants.timer = 0
constants.mode_index = 0
constants.mode_countdown = constants.mode_durations[constants.level][0] * constants.FPS
constants.Frightened_timer = 0
#player
constants.players[0].pos = [math.floor((216 / constants.original_map_size[0]) * (constants.screen_size[0] - constants.game_offset[0])) + constants.game_offset[0],
math.floor((360 / constants.original_map_size[1]) * (constants.screen_size[1] - constants.game_offset[1])) + constants.game_offset[1]]
constants.players[0].target = constants.spots[29]
constants.players[0].previous_target = constants.spots[37]
constants.players[0].direction = "left"
constants.players[0].future_moves = ""
constants.players[0].dead = False
constants.players[0].freeze = False
constants.players[0].animation_spot = 0
constants.players[0].ammo = constants.weapons_ammo[constants.players[0].weapon]
#ghosts
for x in range(len(constants.ghosts)):
constants.ghosts[x].pos = [constants.spots[27].pos[0], constants.spots[27].pos[1]]
constants.ghosts[x].direction = "left"
constants.ghosts[x].target = constants.spots[18]
constants.ghosts[x].previous_target = constants.spots[27]
constants.ghosts[x].freeze = False
constants.ghosts[x].reborn = False
def start_game():
constants.scene = "Game"
#music
constants.effects["begin_game"].play()
t = threading.Timer(constants.effects["begin_game"].get_length(), Start_Players_and_Ghosts)
t.start()
#constants
constants.timer = 0
#ghosts
constants.ghosts.append(blinky([constants.spots[27].pos[0], constants.spots[27].pos[1]]))
constants.ghosts[-1].direction = "left"
constants.ghosts[-1].target = constants.spots[18]
constants.ghosts[-1].previous_target = constants.spots[27]
constants.ghosts[-1].freeze = True
constants.ghosts.append(pinky([constants.spots[27].pos[0], constants.spots[27].pos[1]]))
constants.ghosts[-1].direction = "left"
constants.ghosts[-1].target = constants.spots[18]
constants.ghosts[-1].previous_target = constants.spots[27]
constants.ghosts[-1].freeze = True
constants.ghosts.append(inky([constants.spots[27].pos[0], constants.spots[27].pos[1]]))
constants.ghosts[-1].direction = "left"
constants.ghosts[-1].target = constants.spots[18]
constants.ghosts[-1].previous_target = constants.spots[27]
constants.ghosts[-1].freeze = True
constants.ghosts.append(clyde([constants.spots[27].pos[0], constants.spots[27].pos[1]]))
constants.ghosts[-1].direction = "left"
constants.ghosts[-1].target = constants.spots[18]
constants.ghosts[-1].previous_target = constants.spots[27]
constants.ghosts[-1].freeze = True
#player
constants.players.append(pacman([math.floor((216 / constants.original_map_size[0]) * (constants.screen_size[0] - constants.game_offset[0])) + constants.game_offset[0],
math.floor((360 / constants.original_map_size[1]) * (constants.screen_size[1] - constants.game_offset[1])) + constants.game_offset[1]]))
constants.players[0].target = constants.spots[29]
constants.players[0].previous_target = constants.spots[37]
constants.players[0].direction = "left"
constants.players[0].freeze = True
#UI
for x in range(len(constants.ui_elements)):
constants.ui_elements.pop(0)
constants.ui_elements.append(TEXT(pos = [constants.screen_size[0] / 4, 15]))
constants.ui_elements.append(TEXT(pos = [constants.screen_size[0] / 5 * 3, 15]))
constants.ui_elements.append(TEXT(pos = [constants.screen_size[0] / 5 * 4, 15]))
def switch_to_from_settings():
if constants.InGame:
constants.InGame = False
else:
constants.InGame = True
def Quit():
constants.run = False
def Distance(a, b):
return math.sqrt((b.pos[0] - a.pos[0])**2 + (b.pos[1] - a.pos[1])**2)
| StarcoderdataPython |
9784778 | #!/usr/bin/env python3
seed_value= 42
import os
import sys
import math
import datetime
import random
import tensorflow as tf
import numpy as np
os.environ['PYTHONHASHSEED'] = str(seed_value)
random.seed(seed_value)
np.random.seed(seed_value)
tf.random.set_seed(seed_value)
from . import utils
from .models import Deepredmt
class WeightCheckpoint(tf.keras.callbacks.Callback):
def __init__(self):
super(WeightCheckpoint, self).__init__()
self.best = np.Inf
self.best_weights = None
def on_epoch_end(self, epoch, logs=None):
current = logs.get('val_loss')
if current < self.best:
self.best = current
self.weights = self.model.get_weights()
print('\nCheckpointing weights val_loss= {0:6.4f}'.format(self.best))
else:
print('\nBest val_loss {0:6.4f}'.format(self.best))
class MyEarlyStopping(tf.keras.callbacks.Callback):
def __init__(self, cutoff):
super(MyEarlyStopping, self).__init__()
self.cutoff = cutoff
def on_epoch_end(self, epoch, logs=None):
if self.model.optimizer.learning_rate < self.cutoff:
self.model.stop_training = True
print('\nStop learning as learning rate is below the threshold\n')
def scheduler(epoch, lr):
if epoch < 10:
return (lr * (epoch+1))/10
else:
return lr
def get_callbacks(datetime_tag):
log_dir = "./logs/" 'deepredmt/' + datetime_tag
callbacks = [
tf.keras.callbacks.TensorBoard(log_dir=log_dir,
histogram_freq=1),
tf.keras.callbacks.ReduceLROnPlateau(monitor='val_loss',
factor=0.1,
patience=3,
verbose=1),
WeightCheckpoint(),
MyEarlyStopping(1e-15),
# tf.keras.callbacks.EarlyStopping(monitor="val_loss",
# patience=4,
# verbose=1),
# tf.keras.callbacks.LearningRateScheduler(stop_learning_based_on_lr,
# verbose=0)
]
return callbacks
def fit(fin,
augmentation=True,
num_hidden_units=5,
batch_size=16,
epochs=100,
training_set_size=.8,
modelname=None):
# prepare training and validation datasets
train_gen, valid_gen = utils.prepare_dataset(
fin,
augmentation=augmentation,
label_smoothing=True,
training_set_size=training_set_size,
occlude_target=False,
batch_size=batch_size
)
#breakpoint()
win_shape = (41, 4)
model = Deepredmt.build(win_shape, num_hidden_units)
model.get_layer('encoder').summary()
model.summary()
datetime_tag = datetime.datetime.now().strftime("%y%m%d-%H%M")
callbacks = get_callbacks(datetime_tag)
model.fit(train_gen,
epochs=epochs,
validation_data=valid_gen,
callbacks=callbacks,
workers=16)
# save best model
best_weights = [c for c in callbacks if type(c).__name__ == "WeightCheckpoint"][0].weights
model.set_weights(best_weights)
if modelname is None:
modelname = datetime_tag
model_file = "./models/" + 'deepredmt/' + modelname + ".tf"
model.save(model_file)
print('Best model saved.')
return True
| StarcoderdataPython |
6615658 | """
3377 / 3377 test cases passed.
Runtime: 44 ms
Memory Usage: 15 MB
"""
class Solution:
def lastRemaining(self, n: int) -> int:
head, step, left = 1, 1, True
while n > 1:
if left or n & 1 == 1:
head += step
step <<= 1
n >>= 1
left = not left
return head
| StarcoderdataPython |
102345 | # Generated by Django 4.0 on 2021-12-26 07:30
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('stats', '0025_alter_player_created_by'),
]
operations = [
migrations.AlterField(
model_name='player',
name='hash_redeemable',
field=models.BooleanField(default=False, verbose_name='May be redeemed by a user'),
),
]
| StarcoderdataPython |
11243810 | <reponame>walkacross/simonsc
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import pandas as pd
import datetime
from simonsc import auth
from simonsc import history_bars
auth("quantresearch","quantresearch")
dt = datetime.datetime(2020,4,20)
fields=["datetime","open","high","low","close"]
data = history_bars(order_book_id="000001.XSHE", dt=dt, bar_count=20, frequency="1d", fields=fields)
print(data)
| StarcoderdataPython |
3237651 | #!/usr/bin/env python3
""" Threads that waste CPU cycles """
import os
import threading
# a simple function that wastes CPU cycles forever
def cpu_waster():
while True:
pass
# display information about this process
print('\n Process ID: ', os.getpid())
print('Thread Count: ', threading.active_count())
for thread in threading.enumerate():
print(thread)
print('\nStarting 12 CPU Wasters...')
for i in range(6):
threading.Thread(target=cpu_waster).start()
# display information about this process
print('\n Process ID: ', os.getpid())
print('Thread Count: ', threading.active_count())
for thread in threading.enumerate():
print(thread)
| StarcoderdataPython |
5062337 | <reponame>Yangruipis/simple_ml<filename>examples/feature_select_eaxmple.py
# -*- coding:utf-8 -*-
from simple_ml.feature_select import Filter, Embedded
from simple_ml.base.base_enum import FilterType, EmbeddedType
import numpy as np
from simple_ml.classify_data import get_wine
def wine_example():
x, y = get_wine()
x = x[(y == 0) | (y == 1)]
y = y[(y == 0) | (y == 1)]
_filter = Filter(FilterType.corr, 3)
x_filter = _filter.fit_transform(x, y)
print(x_filter.shape)
_filter = Filter(FilterType.var, 3)
x_filter = _filter.fit_transform(x, y)
print(x_filter.shape)
embedded = Embedded(3, EmbeddedType.Lasso)
x_embedded = embedded.fit_transform(x, y)
print(x_embedded.shape) # lasso后稀疏到只有两个值非0,因此只输出了两个特征
# GBDT暂时只支持离散特征
embedded = Embedded(3, EmbeddedType.GBDT)
x = np.random.choice([0, 1], 50).reshape(10, 5)
y = np.random.rand(10)
x_embedded = embedded.fit_transform(x, y)
print(x_embedded, y)
if __name__ == '__main__':
wine_example()
| StarcoderdataPython |
5009108 | from watson_developer_cloud import SpeechToTextV1
from watson_developer_cloud.websocket import RecognizeCallback, AudioSource
from os.path import join, dirname
import json
speech_to_text = SpeechToTextV1(
username='ユーザー名',
password='<PASSWORD>')
class MyRecognizeCallback(RecognizeCallback):
def __init__(self):
RecognizeCallback.__init__(self)
def on_connected(self):
print('Connection was successful')
def on_data(self, data):
print(json.dumps(data, indent=2))
def on_error(self, error):
print('Error received: {}'.format(error))
def on_inactivity_timeout(self, error):
print('Inactivity timeout: {}'.format(error))
myRecognizeCallback = MyRecognizeCallback()
with open('out1.wav','rb') as audio_file:
audio_source = AudioSource(audio_file)
speech_to_text.recognize_using_websocket(
audio=audio_source,
content_type='audio/wav',
recognize_callback=myRecognizeCallback,
interim_results=True,
model='en-US_BroadbandModel',)
| StarcoderdataPython |
81056 | import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
from ..layers.LayerSandPileReservoir import LayerSandPileReservoir
from ..layers.LayerLinearRegression import LayerLinearRegression
from .LayeredModel import LayeredModel
class SandPileModel(LayeredModel):
def __init__(self, input_size, output_size, reservoir_size,
# spectral_scale=0.29401253252, thresh_scale=1.1142252352,
spectral_scale=0.2, thresh_scale=3.0,
input_weight_scale=0.01, regulariser=1e-6):
"""
input_size : input dimension of the data
output_size : output dimension of the data
reservoir_size : size of the reservoir
spectral_scale : how much to scale the reservoir weights
echo_param : 'leaky' rate of the activations of the reservoir units
input_weight_scale : how much to scale the input weights by
regulariser : regularisation parameter for the linear regression output
"""
# live info
self.live_im = None
self.live_fig = None
layer_res = LayerSandPileReservoir(input_size, reservoir_size)
layer_res.initialize_input_weights(scale=input_weight_scale, strategy="uniform", offset=0.0, sparsity=0.1)
# print(layer_res.W_in)
# layer_res.initialize_threshold(layer_res.threshold_uniform, thresh_scale=thresh_scale)
layer_res.initialize_threshold(layer_res.threshold_unit, thresh_scale=thresh_scale)
layer_res.initialize_reservoir(strategy='uniform', spectral_scale=spectral_scale)
layer_lr = LayerLinearRegression(reservoir_size+input_size, output_size, regulariser=regulariser)
self.layers = [layer_res, layer_lr]
super(SandPileModel, self).__init__(self.layers)
def plot_reservoir(self):
signals = self.layers[0].signals
signals_shape = np.reshape(signals, (np.shape(signals)[0], -1))
# print(np.shape(signals_shape))
sns.heatmap(signals_shape)
plt.plot()
# def display(self):
# signals = self.layers[0].state
# signals_shape = np.reshape(signals, (np.shape(signals)[0], -1))
# # print(signals_shape)
# # print(np.shape(signals_shape))
# # create the figure
# if self.live_fig == None:
# self.live_fig = plt.figure()
# ax = self.live_fig.add_subplot(111)
# self.live_im = ax.imshow(signals_shape, cmap="Reds")
# # self.live_im = ax.imshow(self.weights,cmap="Reds")
# plt.show(block=False)
# else:
# # draw some data in loop
# # wait for a second
# time.sleep(0.1)
# # replace the image contents
# self.live_im.set_array(signals_shape)
# # self.live_im.set_array(self.weights)
# # redraw the figure
# self.live_fig.canvas.draw()
# plt.pause(0.001)
| StarcoderdataPython |
5000078 | # Copyright 2016 NeuroData (http://neurodata.io)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# group_func.py
# Created by <NAME> on 2016-04-03.
# Email: <EMAIL>
import warnings
warnings.simplefilter("ignore")
import pickle
from ndmg.stats.qa_mri import qa_mri as mqa
import numpy as np
import ndmg.utils as mgu
import os
import plotly.offline as pyo
from plotly.tools import FigureFactory as ff
from ndmg.utils import loadGraphs
from ndmg.stats.qa_graphs import compute_metrics
from ndmg.stats.qa_graphs_plotting import make_panel_plot
import networkx as nx
from ndmg.stats.plotly_helper import *
class group_func(object):
def __init__(self, basedir, outdir, atlas=None, dataset=None):
"""
A class for group level quality control.
**Positional Arguments:**
- basedir:
- the ndmg-formatted functional outputs.
should have a qa/ folder contained within it.
- outdir:
- the directory to place all group level quality control.
- dataset:
- an optional parameter for the name of the dataset
to be present in the quality control output filenames.
"""
print(atlas)
self.ndmgdir = basedir
self.qadir = "{}/qa".format(self.ndmgdir)
self.outdir = outdir
self.conn_dir = "{}/connectomes".format(self.ndmgdir)
self.dataset = dataset
self.atlas = atlas
(self.qa_files, self.subs) = self.get_qa_files()
self.connectomes = self.get_connectomes()
self.qa_objects = self.load_qa()
self.group_level_analysis()
pass
def get_qa_files(self):
"""
A function to load the relevant quality assessment files,
for all the subjects we have in our study, given a properly-formatted
ndmg functional directory.
"""
qa_files = []
subs = []
for sub in os.listdir(self.qadir):
sub_qa = "{}/{}/{}_stats.pkl".format(self.qadir, sub, sub)
# if the files exists, add it to our qa_files
if os.path.isfile(sub_qa):
qa_files.append(sub_qa)
subs.append(sub)
return (qa_files, subs)
def get_connectomes(self):
"""
A function to load the relevant connectomes for all of the subjects
for each parcellation we have.
"""
connectomes = {}
for label in os.listdir(self.conn_dir):
print(label)
this_label = []
label_dir = "{}/{}".format(self.conn_dir, label)
for connectome in os.listdir(label_dir):
conn_path = "{}/{}".format(label_dir, connectome)
if os.path.isfile(conn_path):
this_label.append(conn_path)
connectomes[label] = this_label
return connectomes
def load_qa(self):
"""
A function to load the quality control objects.
"""
qa_objects = []
for qa_file in self.qa_files:
# load the qa objects as qa_mri objects
qa_objects.append(mqa.load(qa_file))
return qa_objects
def group_level_analysis(self):
"""
A function to perform group level analysis after loading the
functional qa objects properly.
"""
self.group_reg()
self.group_motion()
def group_reg(self):
"""
A function that performs group level registration quality control.
"""
regdir = "{}/{}".format(self.outdir, "reg")
cmd = "mkdir -p {}".format(regdir)
mgu.execute_cmd(cmd)
self_reg_sc = []
temp_reg_sc = []
cnr = []
snr = []
for sub in self.qa_objects:
self_reg_sc.append(sub.self_reg_sc)
temp_reg_sc.append(sub.temp_reg_sc)
cnr.append(sub.cnr)
snr.append(sub.snr)
fig_cnr = plot_rugdensity(cnr)
fig_snr = plot_rugdensity(snr)
fig_sreg = plot_rugdensity(self_reg_sc)
fig_treg = plot_rugdensity(temp_reg_sc)
figs = [fig_cnr, fig_snr, fig_sreg, fig_treg]
names = [
"temporal Contrast to Noise Ratio",
"temporal Signal to Noise Ratio",
"Self Registration Score",
"Template Registration Score",
]
ylab = ["Density", "Density", "Density", "Density"]
xlab = ["Ratio", "Ratio", "Score", "Score"]
traces = [fig_to_trace(fig) for fig in figs]
fname_multi = "registration_qa.html"
# if a dataset name is provided, add it to the name
if self.dataset is not None:
fname_multi = "{}_{}".format(self.dataset, fname_multi)
fname_multi = "{}/{}".format(regdir, fname_multi)
multi = traces_to_panels(traces, names=names, ylabs=ylab, xlabs=xlab)
pyo.plot(multi, validate=False, filename=fname_multi)
pass
def group_motion(self):
"""
A function that performs group level motion corrective quality control.
"""
mcdir = "{}/{}".format(self.outdir, "mc")
cmd = "mkdir -p {}".format(mcdir)
mgu.execute_cmd(cmd)
trans_abs = np.zeros((len(self.qa_objects)))
trans_rel = np.zeros((len(self.qa_objects)))
trans_abs_gt = np.zeros((len(self.qa_objects)))
trans_rel_gt = np.zeros((len(self.qa_objects)))
FD_mean = [sub.fd_mean for sub in self.qa_objects]
FD_max = [sub.fd_max for sub in self.qa_objects]
FD_gt_100um = [sub.fd_gt_100um for sub in self.qa_objects]
FD_gt_200um = [sub.fd_gt_200um for sub in self.qa_objects]
fig_mean = plot_rugdensity(FD_mean)
fig_max = plot_rugdensity(FD_max)
fig_gt_100um = plot_rugdensity(FD_gt_100um)
fig_gt_200um = plot_rugdensity(FD_gt_200um)
figs = [fig_mean, fig_max, fig_gt_100um, fig_gt_200um]
names = [
"Average FD KDE",
"Max FD KDE",
"Number of FD > 0.1 mm KDE",
"Number of FD > 0.2 mm KDE",
]
ylab = ["Density", "Density", "Density", "Density"]
xlab = [
"Average FD (mm)",
"Average Motion (mm)",
"Number of Volumes",
"Number of Volumes",
]
traces = [fig_to_trace(fig) for fig in figs]
fname_multi = "motion_correction.html"
# if a dataset name is provided, add it to the name
if self.dataset is not None:
fname_multi = "{}_{}".format(self.dataset, fname_multi)
fname_multi = "{}/{}".format(mcdir, fname_multi)
multi = traces_to_panels(traces, names=names, ylabs=ylab, xlabs=xlab)
pyo.plot(multi, validate=False, filename=fname_multi)
pass
| StarcoderdataPython |
6443367 | <gh_stars>0
import sys
sys.path.insert(1,"../../")
import h2o
from tests import pyunit_utils
def get_model_test():
prostate = h2o.import_file(path=pyunit_utils.locate("smalldata/logreg/prostate.csv"))
r = prostate[0].runif()
train = prostate[r < 0.70]
test = prostate[r >= 0.70]
# Regression
regression_gbm1 = h2o.gbm(y=train[1], x=train[2:9], distribution="gaussian")
predictions1 = regression_gbm1.predict(test)
regression_gbm2 = h2o.get_model(regression_gbm1._id)
assert regression_gbm2._model_json['output']['model_category'] == "Regression"
predictions2 = regression_gbm2.predict(test)
for r in range(predictions1.nrow):
p1 = predictions1[r,0]
p2 = predictions2[r,0]
assert p1 == p2, "expected regression predictions to be the same for row {}, but got {} and {}".format(r, p1, p2)
# Binomial
train[1] = train[1].asfactor()
bernoulli_gbm1 = h2o.gbm(y=train[1], x=train[2:], distribution="bernoulli")
predictions1 = bernoulli_gbm1.predict(test)
bernoulli_gbm2 = h2o.get_model(bernoulli_gbm1._id)
assert bernoulli_gbm2._model_json['output']['model_category'] == "Binomial"
predictions2 = bernoulli_gbm2.predict(test)
for r in range(predictions1.nrow):
p1 = predictions1[r,0]
p2 = predictions2[r,0]
assert p1 == p2, "expected binomial predictions to be the same for row {}, but got {} and {}".format(r, p1, p2)
# Clustering
benign_h2o = h2o.import_file(path=pyunit_utils.locate("smalldata/logreg/benign.csv"))
km_h2o = h2o.kmeans(x=benign_h2o, k=3)
benign_km = h2o.get_model(km_h2o._id)
assert benign_km._model_json['output']['model_category'] == "Clustering"
# Multinomial
train[4] = train[4].asfactor()
multinomial_dl1 = h2o.deeplearning(x=train[0:2], y=train[4], loss='CrossEntropy')
predictions1 = multinomial_dl1.predict(test)
multinomial_dl2 = h2o.get_model(multinomial_dl1._id)
assert multinomial_dl2._model_json['output']['model_category'] == "Multinomial"
predictions2 = multinomial_dl2.predict(test)
for r in range(predictions1.nrow):
p1 = predictions1[r,0]
p2 = predictions2[r,0]
assert p1 == p2, "expected multinomial predictions to be the same for row {0}, but got {1} and {2}" \
"".format(r, p1, p2)
if __name__ == "__main__":
pyunit_utils.standalone_test(get_model_test)
else:
get_model_test()
| StarcoderdataPython |
3433727 | # Strategy described in <NAME>'s "The Evolution of Trust"
# https://ncase.me/trust/
#
# SIMPLETON: Hi! I try to start by cooperating. If you cooperate
# back, I do the same thing as my last move, even if it was a mistake.
# If you defect back, I do the opposite thing as my last move, even
# if it was a mistake.
# Reminder: For the history array, "cooperate" = 1, "defect" = 0
def strategy(history, memory):
choice = None
if history.shape[1] == 0: # We're on the first turn!
choice = "cooperate"
else:
choice = "cooperate" if history[0,-1] == 1 else "defect" # I will keep doing the same thing as last move!
if history[1,-1] == 0: # If my opponent defected last turn, I'll just do the opposite thing as my last move:
choice = "defect" if history[0,-1] == 1 else "cooperate" # I will keep doing the same thing as last move!
return choice, None
| StarcoderdataPython |
6649621 | <gh_stars>10-100
import os
from pint import UnitRegistry
ureg = UnitRegistry()
dir_path = os.path.dirname(os.path.realpath(__file__))
path_to_unit_defs_file = os.path.join(dir_path, 'unit_defs.txt')
ureg.load_definitions(path_to_unit_defs_file)
| StarcoderdataPython |
225494 | <gh_stars>1-10
from os.path import isfile
class BuildToCPP:
def __init__(self, tokens, filename):
self.tokens = tokens
self.filename = filename
self.imports = []
self.imported = []
self.import_code = ""
self.final_code = ""
self.go_code = "func main() {\n\t"
def do_display(self, text):
if "fmt" not in self.imported:
self.imported.append("fmt")
self.imports.append("fmt")
if text[0:3] == "VAR":
self.go_code += "fmt.Println({})\n\t".format(text[4:])
elif text[0:3] == "NUM":
self.go_code += "fmt.Println({})\n\t".format(text[4:])
elif text[0:4] == "EXPR":
self.go_code += "fmt.Println({})\n\t".format(text[5:])
elif text[0:6] == "STRING":
self.go_code += "fmt.Println({})\n\t".format(text[7:])
def do_var(self, value, varname):
if value[0:3] == "NUM":
self.go_code += "{} := {}\n\t".format(varname, value[4:])
elif value[0:4] == "EXPR":
self.go_code += "{} := {}\n\t".format(varname, value[5:])
elif value[0:6] == "STRING":
self.go_code += "{} := {}\n\t".format(varname, value[7:])
def do_input(self, text, variable_name, type):
if "fmt" not in self.imported:
self.imported.append("fmt")
self.imports.append("fmt")
print(type)
if type == "INT":
self.go_code += "fmt.Printf({})\n\tfmt.Scanf(\"%d\", &{})\n\t".format(text, variable_name)
elif type == "STR":
self.go_code += "fmt.Printf({})\n\tfmt.Scanf(\"%s\", &{})\n\t".format(text, variable_name)
def find_if_toks(self, all_toks):
if_toks = []
i = 0
print(all_toks)
for i in range(len(all_toks)):
if_toks.append(all_toks[i])
i += 1
if if_toks == "BRACKETr":
break
return if_toks[:-1], i
def if_write(self, tokens):
i = 0
while i < len(tokens):
if "{} {}".format(tokens[i][0], tokens[i + 1][0]) == "DISPLAY COLON":
self.do_display(tokens[i + 2][0])
i += 3
elif "{} {}".format(tokens[i][0][0:3], tokens[i + 1][0][0:6]) == "VAR EQUALS":
self.do_var(tokens[i + 2][0], tokens[i][0][4:])
i += 3
elif "{} {} {}".format(tokens[i][0], tokens[i + 1][0], tokens[i + 3][0]) == "INPUT COLON COMMA":
self.do_input(tokens[i + 2][0][7:], tokens[i + 4][0][4:], tokens[i + 6][0])
i += 7
self.go_code += "}\n\t"
def build(self):
if not isfile(self.filename):
file = open(self.filename[0:-7] + ".go", "w+")
else:
file = open(self.filename[0:-7] + ".go", "w")
i = 0
while i < len(self.tokens):
if "{} {}".format(self.tokens[i][0], self.tokens[i + 1][0]) == "DISPLAY COLON":
self.do_display(self.tokens[i + 2][0])
i += 3
elif "{} {}".format(self.tokens[i][0][0:3], self.tokens[i + 1][0][0:6]) == "VAR EQUALS":
self.do_var(self.tokens[i + 2][0], self.tokens[i][0][4:])
i += 3
elif "{} {} {} {}".format(self.tokens[i][0][0:2], self.tokens[i + 1][0][0:5], self.tokens[i + 2][0][0:9], self.tokens[i + 3][0][0:8]) == "IF COLON CONDITION BRACKETl":
self.go_code += "if {} {{\n\t\t".format(self.tokens[i + 2][0][10:])
if_contents, iter = self.find_if_toks(self.tokens[i + 4:])
self.if_write(if_contents)
i += 4 + iter
elif "{} {} {}".format(self.tokens[i][0], self.tokens[i + 1][0], self.tokens[i + 3][0]) == "INPUT COLON COMMA":
self.do_input(self.tokens[i + 2][0][7:], self.tokens[i + 4][0][4:], self.tokens[i + 6][0])
i += 7
for i in range(len(self.imports)):
self.import_code += "import \"{}\"\n".format(self.imports[i])
self.go_code += "\n}"
self.final_code = "package main\n\n{}\n{}".format(self.import_code, self.go_code)
file.write(self.final_code)
| StarcoderdataPython |
1878220 | import time
import os
import logging
from threading import Thread
from urllib.parse import urljoin
import speech_recognition as sr
import requests
import json_config
from speech_recognition import Recognizer, Microphone
from requests.exceptions import ConnectionError
import queue
import susi_python as susi
from .lights import lights
from .internet_test import internet_on
from ..scheduler import ActionScheduler
from ..player import player
from ..config import susi_config
from ..speech import TTS
logger = logging.getLogger(__name__)
try:
import RPi.GPIO as GPIO
except:
logger.warning("This device doesn't have GPIO port")
GPIO = None
class Components:
"""Common components accessible by each state of the the SUSI state Machine.
"""
def __init__(self, renderer=None):
try:
import RPi.GPIO as GPIO
GPIO.setmode(GPIO.BCM)
GPIO.setup(27, GPIO.OUT)
GPIO.setup(22, GPIO.OUT)
except ImportError:
logger.warning("This device doesn't have GPIO port")
except RuntimeError as e:
logger.error(e)
pass
thread1 = Thread(target=self.server_checker, name="Thread1")
thread1.daemon = True
thread1.start()
recognizer = Recognizer()
recognizer.dynamic_energy_threshold = False
recognizer.energy_threshold = 1000
self.recognizer = recognizer
self.microphone = Microphone()
self.susi = susi
self.renderer = renderer
self.server_url = "https://127.0.0.1:4000"
self.action_schduler = ActionScheduler()
self.action_schduler.start()
try:
res = requests.get('http://ip-api.com/json').json()
self.susi.update_location(
longitude=res['lon'], latitude=res['lat'],
country_name=res['country'], country_code=res['countryCode'])
except ConnectionError as e:
logger.error(e)
self.config = json_config.connect('config.json')
if self.config['usage_mode'] == 'authenticated':
try:
susi.sign_in(email=self.config['login_credentials']['email'],
password=self.config['login_credentials']['password'])
except Exception as e:
logger.error('Some error occurred in login. Check you login details in config.json.\n%s', e)
if self.config['hotword_engine'] == 'Snowboy':
from ..hotword_engine.snowboy_detector import SnowboyDetector
self.hotword_detector = SnowboyDetector()
else:
from ..hotword_engine.sphinx_detector import PocketSphinxDetector
self.hotword_detector = PocketSphinxDetector()
if self.config['WakeButton'] == 'enabled':
logger.info("Susi has the wake button enabled")
if self.config['Device'] == 'RaspberryPi':
logger.info("Susi runs on a RaspberryPi")
from ..hardware_components import RaspberryPiWakeButton
self.wake_button = RaspberryPiWakeButton()
else:
logger.warning("Susi is not running on a RaspberryPi")
self.wake_button = None
else:
logger.warning("Susi has the wake button disabled")
self.wake_button = None
def server_checker(self):
response_one = None
test_params = {
'q': 'Hello',
'timezoneOffset': int(time.timezone / 60)
}
while response_one is None:
try:
logger.debug("checking for local server")
url = urljoin(self.server_url, '/susi/chat.json')
response_one = requests.get(url, test_params).result()
api_endpoint = self.server_url
susi.use_api_endpoint(api_endpoint)
except AttributeError:
time.sleep(10)
continue
except ConnectionError:
time.sleep(10)
continue
class SusiStateMachine():
"""Actually not a state machine, but we keep the name for now"""
def __init__(self, renderer=None):
self.components = Components(renderer)
self.event_queue = queue.Queue()
if self.components.hotword_detector is not None:
self.components.hotword_detector.subject.subscribe(
on_next=lambda x: self.hotword_detected_callback())
if self.components.wake_button is not None:
self.components.wake_button.subject.subscribe(
on_next=lambda x: self.hotword_detected_callback())
if self.components.renderer is not None:
self.components.renderer.subject.subscribe(
on_next=lambda x: self.hotword_detected_callback())
if self.components.action_schduler is not None:
self.components.action_schduler.subject.subscribe(
on_next=lambda x: self.queue_event(x))
def queue_event(self,event):
self.event_queue.put(event)
def start(self):
while True:
logger.debug("starting detector")
if self.event_queue.empty():
self.start_detector()
else:
ev = self.event_queue.get()
self.deal_with_answer(ev)
logger.debug("after starting detector")
# back from processing
player.restore_softvolume()
if GPIO:
try:
GPIO.output(27, False)
GPIO.output(22, False)
except RuntimeError:
pass
def notify_renderer(self, message, payload=None):
if self.components.renderer is not None:
self.components.renderer.receive_message(message, payload)
def start_detector(self):
self.components.hotword_detector.start()
def stop_detector(self):
self.components.hotword_detector.stop()
def hotword_detected_callback(self):
# beep
player.beep(os.path.abspath(os.path.join(self.components.config['data_base_dir'],
self.components.config['detection_bell_sound'])))
# stop hotword detection
logger.debug("stopping hotword detector")
self.stop_detector()
if GPIO:
GPIO.output(22, True)
audio = None
logger.debug("notify renderer for listening")
self.notify_renderer('listening')
recognizer = self.components.recognizer
with self.components.microphone as source:
try:
logger.debug("listening to voice command")
audio = recognizer.listen(source, timeout=10.0, phrase_time_limit=5)
except sr.WaitTimeoutError:
logger.debug("timeout reached waiting for voice command")
self.deal_with_error('ListenTimeout')
return
if GPIO:
GPIO.output(22, False)
lights.off()
lights.think()
try:
logger.debug("Converting audio to text")
value = self.recognize_audio(audio=audio, recognizer=recognizer)
logger.debug("recognize_audio => %s", value)
self.notify_renderer('recognized', value)
if self.deal_with_answer(value):
pass
else:
logger.error("Error dealing with answer")
except sr.UnknownValueError as e:
logger.error("UnknownValueError from SpeechRecognition: %s", e)
self.deal_with_error('RecognitionError')
return
def __speak(self, text):
"""Method to set the default TTS for the Speaker
"""
if self.components.config['default_tts'] == 'google':
TTS.speak_google_tts(text)
if self.components.config['default_tts'] == 'flite':
logger.info("Using flite for TTS") # indication for using an offline music player
TTS.speak_flite_tts(text)
elif self.components.config['default_tts'] == 'watson':
TTS.speak_watson_tts(text)
def recognize_audio(self, recognizer, audio):
logger.info("Trying to recognize audio with %s in language: %s", self.components.config['default_stt'], susi_config["language"])
if self.components.config['default_stt'] == 'google':
return recognizer.recognize_google(audio, language=susi_config["language"])
elif self.components.config['default_stt'] == 'watson':
username = self.components.config['watson_stt_config']['username']
password = self.components.config['watson_stt_config']['password']
return recognizer.recognize_ibm(
username=username,
password=password,
language=susi_config["language"],
audio_data=audio)
elif self.components.config['default_stt'] == 'pocket_sphinx':
lang = susi_config["language"].replace("_", "-")
if internet_on():
self.components.config['default_stt'] = 'google'
return recognizer.recognize_google(audio, language=lang)
else:
return recognizer.recognize_sphinx(audio, language=lang)
elif self.components.config['default_stt'] == 'bing':
api_key = self.components.config['bing_speech_api_key']
return recognizer.recognize_bing(audio_data=audio, key=api_key, language=susi_config["language"])
elif self.components.config['default_stt'] == 'deepspeech-local':
lang = susi_config["language"].replace("_", "-")
return recognizer.recognize_deepspeech(audio, language=lang)
def deal_with_error(self, payload=None):
if payload == 'RecognitionError':
logger.debug("ErrorState Recognition Error")
self.notify_renderer('error', 'recognition')
lights.speak()
player.say(os.path.abspath(os.path.join(self.components.config['data_base_dir'],
self.components.config['recognition_error_sound'])))
lights.off()
elif payload == 'ConnectionError':
self.notify_renderer('error', 'connection')
config['default_tts'] = 'flite'
config['default_stt'] = 'pocket_sphinx'
print("Internet Connection not available")
lights.speak()
lights.off()
logger.info("Changed to offline providers")
elif payload == 'ListenTimeout':
self.notify_renderer('error', 'timeout')
# TODO make a Tada sound here
lights.speak()
lights.off()
else:
print("Error: {} \n".format(payload))
self.notify_renderer('error')
lights.speak()
player.say(os.path.abspath(os.path.join(self.components.config['data_base_dir'],
self.components.config['problem_sound'])))
lights.off()
def deal_with_answer(self, payload=None):
try:
no_answer_needed = False
if isinstance(payload, str):
logger.debug("Sending payload to susi server: %s", payload)
reply = self.components.susi.ask(payload)
else:
logger.debug("Executing planned action response", payload)
reply = payload
if GPIO:
GPIO.output(27, True)
if self.components.renderer is not None:
self.notify_renderer('speaking', payload={'susi_reply': reply})
if 'planned_actions' in reply.keys():
logger.debug("planning action: ")
for plan in reply['planned_actions']:
logger.debug("plan = " + str(plan))
# TODO TODO
# plan_delay is wrong, it is 0, we need to use
# plan = {'language': 'en', 'answer': 'ALARM', 'plan_delay': 0, 'plan_date': '2019-12-30T13:36:05.458Z'}
# plan_date !!!!!
self.components.action_schduler.add_event(int(plan['plan_delay']) / 1000,
plan)
# first responses WITHOUT answer key!
# {'answer': 'Audio volume is now 10 percent.', 'volume': '10'}
if 'volume' in reply.keys():
no_answer_needed = True
player.volume(reply['volume'])
player.say(os.path.abspath(os.path.join(self.components.config['data_base_dir'],
self.components.config['detection_bell_sound'])))
if 'media_action' in reply.keys():
action = reply['media_action']
if action == 'pause':
no_answer_needed = True
player.pause()
lights.off()
lights.wakeup()
elif action == 'resume':
no_answer_needed = True
player.resume()
elif action == 'restart':
no_answer_needed = True
player.restart()
elif action == 'next':
no_answer_needed = True
player.next()
elif action == 'previous':
no_answer_needed = True
player.previous()
elif action == 'shuffle':
no_answer_needed = True
player.shuffle()
else:
logger.error('Unknown media action: %s', action)
# {'stop': <susi_python.models.StopAction object at 0x7f4641598d30>}
if 'stop' in reply.keys():
no_answer_needed = True
player.stop()
if 'answer' in reply.keys():
logger.info('Susi: %s', reply['answer'])
lights.off()
lights.speak()
self.__speak(reply['answer'])
lights.off()
else:
if not no_answer_needed and 'identifier' not in reply.keys():
lights.off()
lights.speak()
self.__speak("I don't have an answer to this")
lights.off()
if 'language' in reply.keys():
answer_lang = reply['language']
if answer_lang != susi_config["language"]:
logger.info("Switching language to: %s", answer_lang)
# switch language
susi_config["language"] = answer_lang
# answer to "play ..."
# {'identifier': 'ytd-04854XqcfCY', 'answer': 'Playing Queen - We Are The Champions (Official Video)'}
if 'identifier' in reply.keys():
url = reply['identifier']
logger.debug("Playing " + url)
if url[:3] == 'ytd':
player.playytb(url[4:])
else:
player.play(url)
if 'table' in reply.keys():
table = reply['table']
for h in table.head:
print('%s\t' % h, end='')
self.__speak(h)
print()
for datum in table.data[0:4]:
for value in datum:
print('%s\t' % value, end='')
self.__speak(value)
print()
if 'rss' in reply.keys():
rss = reply['rss']
entities = rss['entities']
count = rss['count']
for entity in entities[0:count]:
logger.debug(entity.title)
self.__speak(entity.title)
except ConnectionError:
return self.to_error('ConnectionError')
except Exception as e:
logger.error('Got error: %s', e)
return False
return True
| StarcoderdataPython |
8174301 | <gh_stars>100-1000
"""
================
Simple Axisline4
================
"""
import matplotlib.pyplot as plt
from mpl_toolkits.axes_grid1 import host_subplot
import numpy as np
ax = host_subplot(111)
xx = np.arange(0, 2*np.pi, 0.01)
ax.plot(xx, np.sin(xx))
ax2 = ax.twin() # ax2 is responsible for "top" axis and "right" axis
ax2.set_xticks([0., .5*np.pi, np.pi, 1.5*np.pi, 2*np.pi])
ax2.set_xticklabels(["$0$", r"$\frac{1}{2}\pi$",
r"$\pi$", r"$\frac{3}{2}\pi$", r"$2\pi$"])
ax2.axis["right"].major_ticklabels.set_visible(False)
ax2.axis["top"].major_ticklabels.set_visible(True)
plt.show()
| StarcoderdataPython |
4955361 | <gh_stars>0
# -*- coding: utf-8 -*-
from PyQt5.QtCore import Qt, pyqtSignal
from PyQt5.QtGui import QPixmap
from PyQt5.QtWidgets import QMessageBox, QWidget
from .....Classes.Winding import Winding
from .....Classes.WindingCW1L import WindingCW1L
from .....Classes.WindingCW2LR import WindingCW2LR
from .....Classes.WindingCW2LT import WindingCW2LT
from .....Classes.WindingDW1L import WindingDW1L
from .....Classes.WindingDW2L import WindingDW2L
from .....Functions.Winding.comp_wind_sym import comp_wind_sym
from .....GUI.Dialog.DMachineSetup.SWindPat.Gen_SWindPat import Gen_SWindPat
from .....GUI.Resources import pixmap_dict
from .....Methods.Machine.Winding import WindingError
# For the Pattern combobox
TYPE_INDEX = [WindingCW2LT, WindingCW1L, WindingDW2L, WindingDW1L, WindingCW2LR]
class SWindPat(Gen_SWindPat, QWidget):
"""Step to define the winding pattern"""
# Signal to DMachineSetup to know that the save popup is needed
saveNeeded = pyqtSignal()
# Information for DMachineSetup nav
step_name = "Winding Pattern"
def __init__(self, machine, matlib, is_stator=False):
"""Initialize the GUI according to machine
Parameters
----------
self : SWindPat
A SWindPat widget
machine : Machine
current machine to edit
matlib : MatLib
Material Library
is_stator : bool
To adapt the GUI to set either the stator or the rotor
"""
# Build the interface according to the .ui file
QWidget.__init__(self)
self.setupUi(self)
# Set Help URL
self.b_help.url = "https://eomys.com/produits/manatee/howtos/article/"
self.b_help.url += "how-to-set-up-the-winding"
# Saving arguments
self.machine = machine
self.matlib = matlib
self.is_stator = is_stator
# Fill the fields with the machine values (if they're filled)
if self.is_stator:
self.obj = machine.stator
else:
self.obj = machine.rotor
if machine.type_machine == 9 and not self.is_stator:
# Enforce tooth winding for WRSM rotor
self.obj.winding = WindingCW2LT(init_dict=self.obj.winding.as_dict())
self.obj.winding.qs = 1
self.b_preview.setEnabled(False)
self.si_qs.setEnabled(False)
self.c_wind_type.setEnabled(False)
self.c_wind_type.setCurrentIndex(0)
self.c_wind_type.setItemText(0, "DC wound winding for salient pole")
else:
self.b_preview.setEnabled(True)
self.si_qs.setEnabled(True)
self.c_wind_type.setEnabled(True)
self.c_wind_type.setItemText(0, "Double Layer Concentrated Orthoradial")
# Set the current Winding pattern
if self.obj.winding is None or type(self.obj.winding) is Winding:
# The default type_winding is WindingCW2LT
self.obj.winding = WindingCW2LT(init_dict=self.obj.winding.as_dict())
self.c_wind_type.setCurrentIndex(0)
else:
self.c_wind_type.setCurrentIndex(TYPE_INDEX.index(type(self.obj.winding)))
self.update_image()
if type(self.obj.winding) is WindingDW2L:
if self.obj.winding.coil_pitch is None:
self.obj.winding.coil_pitch = 0
self.si_coil_pitch.setValue(self.obj.winding.coil_pitch)
if self.obj.winding.Nslot_shift_wind is not None:
self.si_Nslot.setValue(self.obj.winding.Nslot_shift_wind)
else:
self.si_Nslot.setValue(0)
self.obj.winding.Nslot_shift_wind = 0
if self.obj.winding.qs is None: # default value
self.obj.winding.qs = 3
self.si_qs.setValue(self.obj.winding.qs)
if self.obj.winding.Ntcoil is None:
self.obj.winding.Ntcoil = 1 # Default value for preview
if self.obj.winding.is_reverse_wind is not None:
if self.obj.winding.is_reverse_wind:
self.is_reverse.setCheckState(Qt.Checked)
else:
self.is_reverse.setCheckState(Qt.Unchecked)
else:
self.obj.winding.is_reverse_wind = False
# Display shape of wind_mat
self.set_output()
self.hide_coil_pitch()
# Connect the signal/slot
self.c_wind_type.currentIndexChanged.connect(self.set_type)
self.si_qs.editingFinished.connect(self.set_qs)
self.si_coil_pitch.editingFinished.connect(self.set_coil_pitch)
self.si_Nslot.editingFinished.connect(self.set_Nslot)
self.is_reverse.stateChanged.connect(self.set_is_reverse_wind)
self.b_preview.clicked.connect(self.s_plot)
def set_type(self, index):
"""Signal to update the winding type
Parameters
----------
self : SWindPat
A SWindPat object
index : int
Index of selected type
"""
w_dict = Winding.as_dict(self.obj.winding)
self.obj.winding = TYPE_INDEX[index](init_dict=w_dict)
# Update out_shape
self.set_output()
self.hide_coil_pitch()
# Update image
self.update_image()
# Notify the machine GUI that the machine has changed
self.saveNeeded.emit()
def update_image(self):
"""Update the schematics to the current winding pattern
Parameters
----------
self : SWindPat
A SWindPat object
"""
self.img_wind_pat.setPixmap(
QPixmap(pixmap_dict[type(self.obj.winding).__name__])
)
def set_qs(self):
"""Signal to update the value of qs according to the spinbox
Parameters
----------
self : SWindPat
A SWindPat object
"""
self.obj.winding.qs = self.si_qs.value()
self.set_output()
# Notify the machine GUI that the machine has changed
self.saveNeeded.emit()
def set_coil_pitch(self):
"""Signal to update the value of coil_pitch according to the spinbox
Parameters
----------
self : SWindPat
A SWindPat object
"""
self.obj.winding.coil_pitch = self.si_coil_pitch.value()
self.set_output()
# Notify the machine GUI that the machine has changed
self.saveNeeded.emit()
def set_Nslot(self):
"""Signal to update the value of Nslot_shift_wind according to the
spinbox
Parameters
----------
self : SWindPat
A SWindPat object
"""
self.obj.winding.Nslot_shift_wind = self.si_Nslot.value()
# Notify the machine GUI that the machine has changed
self.saveNeeded.emit()
def set_is_reverse_wind(self, value):
"""Signal to update the value of is_reverse_wind according to the
widget
Parameters
----------
self : SWindPat
A SWindPat object
value :
New value of is_reverse_wind
"""
value = self.is_reverse.isChecked()
self.obj.winding.is_reverse_wind = value
# Notify the machine GUI that the machine has changed
self.saveNeeded.emit()
def hide_coil_pitch(self):
"""Show coil_pitch only if type(winding) is WindingDW2L
Parameters
----------
self : SWindPat
A SWindPat object
"""
self.si_coil_pitch.blockSignals(True)
if type(self.obj.winding) is WindingDW2L:
self.si_coil_pitch.show()
self.in_coil_pitch.show()
else:
self.si_coil_pitch.hide()
self.in_coil_pitch.hide()
self.si_coil_pitch.blockSignals(False)
def set_output(self):
"""Update the shape and period Label to match the current winding setup
Parameters
----------
self : SWindPat
a SWindPat object
"""
wind = self.obj.winding # For readability
# Wind_matrix is a matrix of dimension [Nlay_rad, Nlay_tan, Zs, qs]
# Nlay_rad and Nlay_tan depend of the winding type
(Nrad, Ntan) = wind.get_dim_wind()
Nlay = str(Nrad) + ", " + str(Ntan) + ", "
# Zs should be set, but to be sure:
if self.obj.slot.Zs is None:
Zs = "?, "
else:
Zs = str(self.obj.slot.Zs) + ", "
if wind.qs is None:
qs = "?]"
else:
qs = str(wind.qs) + "]"
self.out_shape.setText(self.tr("Winding Matrix shape: [") + Nlay + Zs + qs)
try:
ms = str(self.obj.slot.Zs / (wind.p * wind.qs * 2.0))
except TypeError: # One of the value is None
ms = "?"
if self.obj.is_stator:
self.out_ms.setText(self.tr("ms = Zs / (2*p*qs) = ") + ms)
else:
self.out_ms.setText(self.tr("ms = Zr / (2*p*qr) = ") + ms)
try:
wind_mat = wind.comp_connection_mat(self.obj.slot.Zs)
Nperw = str(comp_wind_sym(wind_mat)[0])
except Exception: # Unable to compution the connection matrix
Nperw = "?"
self.out_Nperw.setText(self.tr("Nperw: ") + Nperw)
def s_plot(self):
"""Plot a preview of the winding in a popup
Parameters
----------
self : SWindPat
A SWindPat object
"""
try:
self.obj.plot_winding()
except (AssertionError, WindingError) as e:
QMessageBox().critical(self, self.tr("Error"), str(e))
@staticmethod
def check(lamination):
"""Check that the lamination have all the needed field set
Parameters
----------
lamination : Lamination
Lamination to check
Returns
-------
error: str
Error message (return None if no error)
"""
try:
# Check that everything is set
if lamination.winding.qs is None:
return "You must set qs !"
except Exception as e:
return str(e)
| StarcoderdataPython |
6404157 | <filename>old/server/ts.py
#!/usr/bin/env python2.7
# time that the bluetooth takes to get going?
EXTRA_WAKEUP = -3
FETCH_TRIES = 3
# avoid turning off the bluetooth etc.
TESTING = False
import sys
# for wrt
sys.path.append('/root/python')
import httplib
import time
import traceback
import binascii
import hmac
import zlib
import urllib
import urllib2
import logging
import socket
L = logging.info
W = logging.warning
E = logging.error
import config
from utils import monotonic_time, retry, readline, crc16
import utils
import bluetooth
def get_socket(addr):
s = bluetooth.BluetoothSocket( bluetooth.RFCOMM )
L("connecting")
s.connect((addr, 1))
s.setblocking(False)
s.settimeout(1)
return s
def flush(sock):
ret = []
while True:
l = readline(sock)
if l:
ret.append(l)
else:
break
return ret
def encode_extra(extra_lines):
return ['extra%d=%s' % (n, l.strip()) for (n,l) in enumerate(extra_lines)]
@retry()
def fetch(sock):
extra_lines = flush(sock)
sock.send("fetch\n")
crc = 0
lines = []
l = readline(sock)
if not l:
return None
if l != 'START\n':
W("Bad expected START line '%s'\n" % l.rstrip('\n'))
extra_lines.append(l)
return encode_extra(extra_lines)
crc = crc16(l, crc)
while True:
l = readline(sock)
crc = crc16(l, crc)
if l == 'END\n':
break
lines.append(l.rstrip('\n'))
lines += encode_extra(extra_lines)
for d in lines:
L("Received: %s" % d)
l = readline(sock)
recv_crc = None
try:
k, v = l.rstrip('\n').split('=')
if k == 'CRC':
recv_crc = int(v)
if recv_crc < 0 or recv_crc > 0xffff:
recv_crc = None
except ValueError:
pass
if recv_crc is None:
W("Bad expected CRC line '%s'\n" % l.rstrip('\n'))
return None
if recv_crc != crc:
W("Bad CRC: calculated 0x%x vs received 0x%x\n" % (crc, recv_crc))
return None
return lines
@retry()
def turn_off(sock):
if TESTING:
return 99
L("Sending btoff")
flush(sock)
sock.send("btoff\n");
# read newline
l = readline(sock)
if not l:
W("Bad response to btoff")
return None
if not l.startswith('next_wake'):
W("Bad response to btoff '%s'" % l)
return None
L("Next wake line %s" % l)
toks = dict(v.split('=') for v in l.split(','))
rem = int(toks['rem'])
tick_secs = int(toks['tick_secs'])
tick_wake = int(toks['tick_wake']) + 1
next_wake = int(toks['next_wake'])
rem_secs = float(rem) / tick_wake * tick_secs
next_wake_secs = next_wake - rem_secs
L("next_wake_secs %f\n", next_wake_secs)
return next_wake_secs
@retry()
def clear_meas(sock):
flush(sock)
sock.send("clear\n");
l = readline(sock)
if l and l.rstrip() == 'cleared':
return True
E("Bad response to clear '%s'" % str(l))
return False
def send_results(lines):
enc_lines = binascii.b2a_base64(zlib.compress('\n'.join(lines)))
mac = hmac.new(config.HMAC_KEY, enc_lines).hexdigest()
url_data = urllib.urlencode( {'lines': enc_lines, 'hmac': mac} )
con = urllib2.urlopen(config.UPDATE_URL, url_data)
result = con.read(100)
if result == 'OK':
return True
else:
W("Bad result '%s'" % result)
return False
def do_comms(sock):
L("do_comms")
d = None
# serial could be unreliable, try a few times
d = fetch(sock)
if not d:
return
res = send_results(d)
if not res:
return
clear_meas(sock)
next_wake = 600
#next_wake = turn_off(sock)
#sock.close()
return next_wake
testcount = 0
def sleep_for(secs):
until = monotonic_time() + secs
while True:
length = until - monotonic_time()
if length <= 0:
return
time.sleep(length)
def setup_logging():
logging.basicConfig(format='%(asctime)s %(message)s',
datefmt='%m/%d/%Y %I:%M:%S %p',
level=logging.INFO)
def get_net_socket(host, port):
s = socket.create_connection((host, port))
s.setblocking(False)
s.settimeout(1)
return s
def main():
setup_logging()
L("Running templog rfcomm server")
if '--daemon' in sys.argv:
utils.cheap_daemon()
next_wake_time = 0
while True:
sock = None
try:
sock = get_net_socket(config.SERIAL_HOST, config.SERIAL_PORT)
except Exception, e:
#logging.exception("Error connecting")
pass
if not sock:
sleep_for(config.SLEEP_TIME)
continue
while True:
try:
do_comms(sock)
sleep_for(config.SLEEP_TIME)
except Exception, e:
logging.exception("Error in do_comms")
break
if __name__ == '__main__':
main()
| StarcoderdataPython |
4898794 | <filename>virt/ansible-latest/lib/python2.7/site-packages/ansible/modules/cloud/vmware/vmware_guest_tools_upgrade.py
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright: (c) 2018, <NAME> <<EMAIL>>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = '''
---
module: vmware_guest_tools_upgrade
short_description: Module to upgrade VMTools
version_added: 2.8
description:
- This module upgrades the VMWare Tools on Windows and Linux guests.
requirements:
- "python >= 2.6"
- PyVmomi
notes:
- In order to upgrade VMTools, please power on virtual machine before hand - either 'manually' or using module M(vmware_guest_powerstate).
options:
name:
description:
- Name of the virtual machine to work with.
- This is required if C(UUID) is not supplied.
name_match:
description:
- If multiple virtual machines matching the name, use the first or last found.
default: 'first'
choices: ['first', 'last']
uuid:
description:
- UUID of the instance to manage if known, this is VMware's unique identifier.
- This is required if C(name) is not supplied.
folder:
description:
- Destination folder, absolute or relative path to find an existing guest.
- This is required, if C(name) is supplied.
- The folder should include the datacenter. ESX's datacenter is ha-datacenter
- 'Examples:'
- ' folder: /ha-datacenter/vm'
- ' folder: ha-datacenter/vm'
- ' folder: /datacenter1/vm'
- ' folder: datacenter1/vm'
- ' folder: /datacenter1/vm/folder1'
- ' folder: datacenter1/vm/folder1'
- ' folder: /folder1/datacenter1/vm'
- ' folder: folder1/datacenter1/vm'
- ' folder: /folder1/datacenter1/vm/folder2'
- ' folder: vm/folder2'
datacenter:
description:
- Destination datacenter where the virtual machine exists.
required: True
extends_documentation_fragment: vmware.documentation
author:
- <NAME> (@MikeKlebolt) <<EMAIL>>
'''
EXAMPLES = '''
- name: Upgrade VMWare Tools
vmware_guest_tools_upgrade:
hostname: "{{ vcenter_hostname }}"
username: "{{ vcenter_username }}"
password: "{{ <PASSWORD> }}"
datacenter: "{{ datacenter_name }}"
uuid: 421e4592-c069-924d-ce20-7e7533fab926
delegate_to: localhost
'''
RETURN = ''' # '''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.vmware import PyVmomi, vmware_argument_spec, wait_for_task
from ansible.module_utils._text import to_native
class PyVmomiHelper(PyVmomi):
def __init__(self, module):
super(PyVmomiHelper, self).__init__(module)
def upgrade_tools(self, vm):
result = {'failed': False, 'changed': False, 'msg': ''}
# Exit if VMware tools is already up to date
if vm.guest.toolsStatus == "toolsOk":
result.update(
changed=False,
msg="VMware tools is already up to date",
)
return result
# Fail if VM is not powered on
elif vm.summary.runtime.powerState != "poweredOn":
result.update(
failed=True,
msg="VM must be powered on to upgrade tools",
)
return result
# Fail if VMware tools is either not running or not installed
elif vm.guest.toolsStatus in ["toolsNotRunning", "toolsNotInstalled"]:
result.update(
failed=True,
msg="VMware tools is either not running or not installed",
)
return result
# If vmware tools is out of date, check major OS family
# Upgrade tools on Linux and Windows guests
elif vm.guest.toolsStatus == "toolsOld":
try:
if vm.guest.guestFamily in ["linuxGuest", "windowsGuest"]:
task = vm.UpgradeTools()
changed, err_msg = wait_for_task(task)
result.update(changed=changed, msg=to_native(err_msg))
else:
result.update(msg='Guest Operating System is other than Linux and Windows.')
return result
except Exception as exc:
result.update(
failed=True,
msg='Error while upgrading VMware tools %s' % to_native(exc),
)
return result
else:
result.update(
failed=True,
msg="VMWare tools could not be upgraded",
)
return result
def main():
argument_spec = vmware_argument_spec()
argument_spec.update(
name=dict(type='str'),
name_match=dict(type='str', choices=['first', 'last'], default='first'),
uuid=dict(type='str'),
folder=dict(type='str'),
datacenter=dict(type='str', required=True),
)
module = AnsibleModule(argument_spec=argument_spec,
required_one_of=[['name', 'uuid']])
if module.params['folder']:
# FindByInventoryPath() does not require an absolute path
# so we should leave the input folder path unmodified
module.params['folder'] = module.params['folder'].rstrip('/')
pyv = PyVmomiHelper(module)
# Check if the VM exists before continuing
vm = pyv.get_vm()
# VM already exists
if vm:
try:
result = pyv.upgrade_tools(vm)
if result['changed']:
module.exit_json(changed=result['changed'])
elif result['failed']:
module.fail_json(msg=result['msg'])
else:
module.exit_json(msg=result['msg'], changed=result['changed'])
except Exception as exc:
module.fail_json(msg='Unknown error: %s' % to_native(exc))
else:
module.fail_json(msg='Unable to find VM %s' % (module.params.get('uuid') or module.params.get('name')))
if __name__ == '__main__':
main()
| StarcoderdataPython |
5000364 | <reponame>jcgoble3/discord_bot.py<filename>distest-test-bot.py
#!/usr/bin/env python3.7
# Based on the example tester at
# https://github.com/JakeCover/distest/blob/develop/example_tester.py
#
# To run:
# ./distest-test-bot.py --run all --channel <channel ID> 821891815329890336 <tester token>
# (821891815329890336 is the user ID of the bot being tested)
#
# To run with coverage enabled, run these two commands:
# coverage run distest-test-bot.py --run all --channel <channel ID> 821891815329890336 <tester token>
# coverage report
import asyncio
import sys
from concurrent.futures import ThreadPoolExecutor
from distest import TestCollector, run_dtest_bot
from distest.patches import patch_target
from distest.exceptions import TestRequirementFailure
# Actual tests
test = TestCollector()
@test()
async def startup_delay(interface):
# There is a race condition with the first test sometimes being
# attempted before the tested bot is ready, resulting in random
# failures. This delay helps to ensure that the bot is ready to
# accept commands.
await asyncio.sleep(10)
## @story{51} This test of a pre-existing command was used as a
# proof-of-concept for the use of distest to test commands.
@test()
async def test_reverse(interface):
await interface.assert_reply_contains("!reverse this class sucks", "skcus ssalc siht")
## @story{8}
@test()
async def test_filter(interface):
await interface.assert_reply_contains("shit", "Bad word detected!")
## @story{11}
@test()
async def test_logging(interface):
with open("LogFile.txt", "r") as file:
before = file.read()
message = "abcdefghijklmnopqrstuvwxyz0123456789"
await interface.send_message(message)
await asyncio.sleep(1)
with open("LogFile.txt", "r") as file:
diff = file.read()[len(before):]
if message not in diff:
raise TestRequirementFailure
## @story{10}
@test()
async def test_joke(interface):
# Random response is hard to test; ? is the only thing common to all
# responses
await interface.assert_reply_contains("!joke", "?")
## @story{6}
@test()
async def test_meme(interface):
await interface.assert_reply_has_image("!meme")
## @story{58}
@test()
async def test_xkcd(interface):
await interface.assert_reply_contains("!comic", "https://xkcd.com/")
## @story{9}
@test()
async def test_trivia(interface):
await interface.assert_reply_contains("!trivia stop", "not in progress")
await interface.assert_reply_contains("!trivia status", "not in progress")
await interface.assert_reply_contains("!trivia start", "Starting")
await interface.assert_reply_contains("!trivia start", "already in progress")
await interface.assert_reply_contains("!trivia status", "game in progress")
await interface.assert_reply_contains("!trivia answer wrongAnswer", "is incorrect")
await interface.assert_reply_contains("!trivia answer testAnswer", "is correct")
await interface.assert_reply_contains("!trivia stop", "Stopping")
await interface.assert_reply_contains("!trivia oops", "arg1:")
## @story{83}
@test()
async def test_hello(interface):
await interface.assert_reply_contains("!hello", "my name is")
await interface.assert_reply_contains("bad", "That's bad!")
await interface.assert_reply_contains("!hello", "my name is")
await interface.assert_reply_contains("good", "That's good!")
await interface.assert_reply_contains("!hello", "my name is")
await interface.assert_reply_contains("great", "That's great!")
await interface.assert_reply_contains("!hello", "my name is")
await interface.assert_reply_contains("meh", "don't understand")
await interface.assert_reply_contains("!hello", "my name is")
await interface.get_delayed_reply(12, interface.assert_message_contains, "too long to respond")
## @story{15}
@test()
async def test_poll(interface):
message = await interface.wait_for_reply("!poll question answer_yes answer_no")
await interface.assert_message_contains(message, "question")
await interface.assert_message_contains(message, "answer_yes")
await interface.assert_message_contains(message, "answer_no")
# poll is largely self-testing due to programming by contract, so
# just let the bot finish the reactions
await asyncio.sleep(3)
## @story{15}
@test()
async def test_bad_poll(interface):
message = await interface.wait_for_reply("!poll")
await interface.assert_message_contains(message, "Cannot create poll")
await interface.assert_message_contains(message, "No question")
await interface.assert_message_contains(message, "option #1")
await interface.assert_message_contains(message, "option #2")
## @story{12}
@test()
async def test_guessing_game(interface):
# Since the target number is a local variable, we can't reliably test
# both branches, so we just test one for now. "correct number" appears
# in both responses.
await interface.assert_reply_contains("!guess 8", "correct number")
# Run the tests
async def run_tests():
from utils.bot import bot, config
from utils.trivia import QuestionAnswer
patch_target(bot)
bot.trivia.qAsked = QuestionAnswer("question", "testAnswer")
def patch_signals(*args, **kwargs):
raise NotImplementedError
loop = asyncio.get_event_loop()
loop.add_signal_handler = patch_signals
def set_loop():
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
loop.add_signal_handler = patch_signals
with ThreadPoolExecutor(initializer=set_loop) as pool:
await asyncio.gather(
bot.start(config["token"]),
loop.run_in_executor(pool, run_dtest_bot, sys.argv, test)
)
await bot.logout()
loop = asyncio.get_event_loop()
loop.run_until_complete(run_tests())
| StarcoderdataPython |
144066 | """python 3.7+
Run allele stage2_var_obj methods.
<NAME> 2019-2022
"""
import sys
import os
import exceptions
from run_scripts.tools import run_mash_screen, create_dataframe, \
apply_filters, create_csv, get_variant_ids
def sort_genes(gene, stage2_var_obj, allele_or_gene, session):
"""
Main run script for allele/gene presence stage2_var_obj.
calls run_allele function for mash screen and collates results
:param gene: variants query object from stage2_var_obj.var_list
:param stage2_var_obj: stage2_var_obj class object
:param allele_or_gene : type of stage2_var_obj "gene" or "allele"
:param session: active database session
:return: updated analysis or stage2_var object
"""
genename = gene.genes.gene_name
if allele_or_gene == "allele":
# run screens against appropriate allele FASTAs
hit_genes = run_alleles(stage2_var_obj, genename)
elif allele_or_gene == "gene_presence":
# run screens against appropriate gene FASTAs
hit_genes = run_genes(stage2_var_obj, genename)
else:
#exit if unrecognised (coding error)
sys.stderr.write(f"Code error: Unrecognised stage2_var_obj type {allele_or_gene}")
sys.exit(1)
# append hit genes output to stage2_var_obj object
if type(stage2_var_obj.stage2_result) != str:
stage2_var_obj.stage2_result.update(hit_genes)
# use variant query to get Variant records for hit
stage2_var = get_variant_ids(hit_genes, allele_or_gene, stage2_var_obj.grp_id, session)
stage2_var_obj.stage2_varids.append(stage2_var)
return stage2_var_obj
def run_alleles(stage2_var_obj, genename):
"""
Run allele determinations using mash screen cut off 90%
:param stage2_var_obj: stage2_var_obj object
:param genename: genename of allele
:return: list of alleles
"""
hit_alleles = {}
allele_cut = 90
try:
# get ref sketch for genename from database folder
ref_sketch = os.path.join(stage2_var_obj.database, stage2_var_obj.folder,
f"{genename}.msh")
# run mash screen on the files
outfile = run_mash_screen(stage2_var_obj, ref_sketch, genename)
#create dataframe from the TSV
if os.path.getsize(outfile) == 0:
stage2_var_obj.stage2_result = f" {genename} not found in isolate, possible variant"
sys.stderr.write(f"ERROR: {genename} not found in isolate, possible variant. \n")
else:
df = create_dataframe(outfile, "Allele")
#TODO these are using the universally input mash cut offs may need to change
#Filter dataframe for cutoffs using the filtering function
filtered_df, original = apply_filters(df, allele_cut,
stage2_var_obj.minmulti, False)
original = original.sort_values(by=["percent", "identity"],
ascending=False)
#create csv of original mash output with added percent fields
filename = f"{stage2_var_obj.sampleid}_{genename}_screen.csv"
create_csv(original, stage2_var_obj.output_dir, filename)
# find max percent hit and variant for result display
max_percent = round(original['percent'].max(), 2)
max_hit_var = original['Allele'][original['percent'] == original['percent'].max()].iloc[0]
#analyse outputs
if not filtered_df.empty:
# collate hits
for index, rows in filtered_df.iterrows():
result = rows.Allele
hit_alleles[genename] = result
stage2_var_obj.stage2_hits[genename] = [result, max_percent]
sys.stdout.write(f"Completed {genename} allele analysis.\n")
else: # for samples with no hits
hit_alleles[genename] = 0
stage2_var_obj.stage2_hits[genename] = [f"{max_hit_var}: {max_percent}"]
stage2_var_obj.rag_status = "RED"
if max_percent < 20:
sys.stdout.write(f"Allele {genename} did not match references, hit <20% \n")
# for samples failing on low median multiplicity only add result but flag amber
elif max_percent > allele_cut:
stage2_var_obj.stage2_hits[genename] = [f"{max_hit_var}: {max_percent}"]
stage2_var_obj.rag_status = "AMBER"
sys.stdout.write(f"Allele {genename}- median multiplicity below {stage2_var_obj.minmulti} "
f"check seq quality\n")
stage2_var_obj.stage2_result = stage2_var_obj.folder
# for samples with intermediate %match - unusual alleles
else:
sys.stdout.write(f"Allele {genename}- hit <90%, possible variant or seq quality issue\n")
return hit_alleles
except IOError:
sys.stderr.write(f"CTV.db/file integrity error - {stage2_var_obj.sampleid}. Reference files not found")
raise exceptions.CtvdbError()
def run_genes(stage2_var_obj, genename):
"""
Run presence/absence determinations using mash screen
Present if >80%, Absent if <20
>50% - AMBER presence
<50% - AMBER absence
:param stage2_var_obj: object run -either analysis or stage2 mixsero object
:return: list of genes
"""
hit_genes = {}
hit_cut = 80
try:
# get ref sketch for genename from database folder
ref_sketch = os.path.join(stage2_var_obj.database, stage2_var_obj.folder,
f"{genename}.msh")
# run mash screen on the files
outfile = run_mash_screen(stage2_var_obj, ref_sketch, genename)
# No hits at all on MASH stage2_var_obj
if os.path.getsize(outfile) == 0:
hit_genes[genename] = "not_detected"
stage2_var_obj.stage2_hits[genename] = 0
sys.stdout.write(f"Gene {genename} not detected\n")
#create dataframe from the TSV
else:
df = create_dataframe(outfile, "Gene_presence")
#Filter dataframe for cutoffs using the filtering function
# use 80% initial hit cut off (Green RAG)
filtered_df, original = apply_filters(df, hit_cut,
stage2_var_obj.minmulti, False)
original = original.sort_values(by=["percent", "identity"],
ascending=False)
#create csv of original mash output with added percent fields
filename = f"{stage2_var_obj.sampleid}_{genename}_screen.csv"
create_csv(original, stage2_var_obj.output_dir, filename)
# find max percent hit for result display
max_percent = round(original['percent'].max(), 2)
max_mm = round(original['median-multiplicity'].max(), 2)
stage2_var_obj.stage2_hits[genename] = max_percent
#analyse outputs
if not filtered_df.empty:
# if over upper cut off only one sequence so only 1 row possible
hit_genes[genename] = "detected"
sys.stdout.write(f"Gene {genename} detected at {max_percent}%\n")
else: # for samples with no hits
if max_percent < 20:
hit_genes[genename] = "not_detected"
sys.stdout.write(f"Gene {genename} not detected\n")
# for samples with intermediate %match - possible variants
elif max_percent < 50:
# Update status but avoid overwriting previous RED or Amber status
if stage2_var_obj.rag_status == "GREEN":
stage2_var_obj.rag_status = "AMBER"
hit_genes[genename] = "not_detected"
sys.stdout.write(f"Gene {genename} not detected, however {max_percent} hit to gene, possible variant\n")
elif max_percent < hit_cut:
if stage2_var_obj.rag_status == "GREEN":
stage2_var_obj.rag_status = "AMBER"
hit_genes[genename] = "detected"
sys.stdout.write(f"Gene {genename} detected, however {max_percent} hit to gene, possible variant \n")
# catch samples with low median multiplicity
elif max_mm < stage2_var_obj.minmulti and max_percent > hit_cut:
if stage2_var_obj.rag_status == "GREEN":
stage2_var_obj.rag_status = "AMBER"
hit_genes[genename] = "detected"
sys.stdout.write(
f"Gene {genename} detected, however median_multiplicity below {stage2_var_obj.minmulti}"
f" - check sequence depth\n")
else:
# NB this shouldn't be triggered if all options covered above
hit_genes[genename] = "No Match"
sys.stdout.write(f"Gene {genename} unrecognised, match = {max_percent} percent.\n")
return hit_genes
except IOError:
sys.stderr.write(f"CTV.db/file integrity error. Reference files not found")
raise exceptions.CtvdbError()
| StarcoderdataPython |
362563 | import os
from office365.sharepoint.client_context import ClientContext
from tests import settings
cert_settings = {
'client_id': '51d03106-4726-442c-86db-70b32fa7547f',
'thumbprint': "6B36FBFC86FB1C019EB6496494B9195E6D179DDB",
'certificate_path': '{0}/selfsigncert.pem'.format(os.path.dirname(__file__))
}
ctx = ClientContext(settings.get('default', 'url')).with_client_certificate(settings.get('default', 'tenant'),
cert_settings.get('client_id'),
cert_settings.get('thumbprint'),
cert_settings.get('certificate_path'))
current_web = ctx.web.get().execute_query()
print("{0}".format(current_web.url))
| StarcoderdataPython |
4908434 | #!/usr/bin/env python3
import sys
sys.path.insert(0,'../')
from aoc_input import *
if len(sys.argv) != 2:
print('Usage:', sys.argv[0], '<input.txt>')
sys.exit(1)
ss = input_as_lines(sys.argv[1])
l = len(ss[0])
# Xor mask to invert l bits
mask = int('1' * l, 2)
gamma = []
for i in range(l):
zeros = 0
ones = 0
for s in ss:
b = s[i]
if b == '1':
ones += 1
else:
zeros += 1
if ones > zeros:
gamma.append('1')
else:
gamma.append('0')
g = int(''.join(gamma), 2)
# Epsilon rate = bitwise inverse of gamma rate
e = g ^ mask
print(g, e, g * e)
| StarcoderdataPython |
1835011 | # -*- coding=utf-8 -*-
'''
Created on 2019年7月15日
@author: Dark
'''
from AnanasStepperSDK import SerialHelper
import logging
class SerialCon(object):
'''
Serial Connect
Hanlder with Ananas Serial Interface with SerialHelper
'''
def __init__(self, Port="COM5"):
self.com = Port
self.myserial = SerialHelper.SerialHelper(Port=self.com, BaudRate="115200")
self.change_target_callback = None
self.serial_receive_data_callback = None
print("Open Com %s at"%(self.com) + str(self.myserial))
def on_connected_changed(self, change_target_callback):
self.change_target_callback = change_target_callback
self.myserial.on_connected_changed(self.myserial_on_connected_changed)
def write(self, data):
self.myserial.write(data, False);
def myserial_on_connected_changed(self, is_connected):
if is_connected:
logging.debug("myserial_on_connected_changed Connected")
self.myserial.connect()
self.myserial.on_data_received(self.myserial_on_data_received)
else:
logging.debug("myserial_on_connected_changed DisConnected")
try:
if self.change_target_callback:
logging.debug("charge target callback is registed call it now")
self.change_target_callback(is_connected)
except Exception as e:
logging.error("error " +str(e))
def myserial_data_received_callback(self,receive_callback):
if receive_callback:
logging.debug("register data receive callback")
self.serial_receive_data_callback = receive_callback
def myserial_on_data_received(self, data):
print(data)
# call back function may have exception
try:
if self.serial_receive_data_callback:
self.serial_receive_data_callback(data)
except Exception as e:
logging.error("error " + str(e))
def disconnect(self):
if self.myserial:
self.myserial.disconnect()
if __name__ == '__main__':
import time
def seiarl_change(is_connect):
print("connect state is " + str(is_connect))
con = SerialCon("COM5")
print(str(con))
if con:
con.on_connected_changed(seiarl_change)
time.sleep(10)
| StarcoderdataPython |
6581728 | <gh_stars>1-10
import sys
import pika
import json
sys.path.append("..")
from common.settings import cfg
import common.file_system_manager as fsm
import common.tree_tools as tt
import xml.dom.minidom as xml
from bson.objectid import ObjectId
from pymongo import MongoClient
# Score rebuilder should always re-index, and rely on the measure order instead of the n./label-attribute
def callback(ch, method, properties, body):
data = json.loads(body)
sheet_name = data['name']
task_id = data['task_id']
# Obtain corresponding task and slice
task = db[cfg.col_task].find_one({"_id" : ObjectId(task_id)})
# Get aggregated XML
aggregated_result = db[cfg.col_aggregated_result].find_one({
"task_id" : task_id,
"step" : task["step"]
})
status_update_msg = {
"_id": task_id,
"module": "form_processor"
}
form_output = json.loads(aggregated_result["result"])
# General procedure for all verification steps
if task["step"]=="verify":
verification_passed = form_output["verify"][0]
if verification_passed:
status_update_msg["status"] = "verification-passed"
else:
status_update_msg["status"] = "verification-failed"
if "status" not in status_update_msg:
raise Exception(f"Task of type {task['type']} did not receive a status, make sure it gets handled in this module!")
global channel
channel.queue_declare(queue=cfg.mq_task_scheduler_status)
channel.basic_publish(exchange="", routing_key=cfg.mq_task_scheduler_status, body=json.dumps(status_update_msg))
connection = pika.BlockingConnection(pika.ConnectionParameters(*cfg.rabbitmq_address))
channel = connection.channel()
channel.queue_declare(queue=cfg.mq_form_processor)
channel.basic_consume(queue=cfg.mq_form_processor, on_message_callback=callback, auto_ack=True)
client = MongoClient(*cfg.mongodb_address)
db = client[cfg.db_name]
print('Form processor is listening...')
channel.start_consuming() | StarcoderdataPython |
1814947 | import scipy.stats
from .utils import *
from scipy.stats import mannwhitneyu, ttest_ind, betabinom
def calc_wilcoxon_fn(M, N, m, s, alpha = 0.05, n_sim = 10_000):
"""
:param M: number of patients, as a list
:param N: number of cells, as a list
:param m: mean for both groups, as a list
:param s: std for both groups, as a list
:param alpha: significance level
:param n_sim: simulation iterations
:return: false negative rate, i.e., 1 - power
"""
N0, N1 = N
M0, M1 = M
m0, m1 = m
s0, s1 = s
a0, b0 = normal_to_beta(m0, s0)
r0 = betabinom.rvs(N0, a0, b0, size=(M0, n_sim)) / n
a1, b1 = normal_to_beta(m1, s1)
r1 = betabinom.rvs(N1, a1, b1, size=(M1, n_sim)) / n
return 1 - sum(mannwhitneyu(r0, r1).pvalue < alpha) / n_sim
def calc_fn_rate_beta(M, N, a, b, alpha=0.05, test_type="one-sided", offset=0, sign=0):
"""
Calculate false negative rate
:param M: number of patients
:param N: number of cells
:param a: Beta(a, b)
:param b: Beta(a, b)
:param alpha: significance level
:param test_type: one-sided or two-sided
:param offset:
:param sign:
:return: false negative rate, i.e., 1 - power
"""
if not is_iterable(M):
M = [M, M]
if not is_iterable(N):
N = [N, N]
Ep = [a[0] / (a[0] + b[0]),
a[1] / (a[1] + b[1])]
# Vp = [a[0] * b[0] * (a[0] + b[0] + N[0]) / ((N[0] * (a[0] + b[0]) * (a[0] + b[0])) * (a[0] + b[0] + 1)),
# a[1] * b[1] * (a[1] + b[1] + N[1]) / ((N[1] * (a[1] + b[1]) * (a[1] + b[1])) * (a[1] + b[1] + 1))]
Vp = [var_betabinom_over_n(N[0], a[0], b[0]), var_betabinom_over_n(N[1], a[1], b[1])]
Et = (Ep[1] - (Ep[0] + offset)) / (Vp[0] / M[0] + Vp[1] / M[1]) ** .5
if sign == 0:
Et = abs(Et)
else:
Et = sign * Et
nu = (Vp[0] / M[0] + Vp[1] / M[1]) ** 2 / ((Vp[0] / M[0]) ** 2 / (M[0] - 1) + (Vp[1] / M[1]) ** 2 / (M[1] - 1))
if test_type == "one-sided":
t_star = scipy.stats.t.ppf(q=1 - alpha, df=nu)
elif test_type == "two-sided":
t_star = scipy.stats.t.ppf(q=1 - alpha / 2, df=nu)
else:
raise ValueError("test must be one-sided or two-sided")
return scipy.stats.t.cdf(t_star - Et, df=nu)
def calc_fn_rate(M, N, m, s, alpha, test_type, offset, sign):
"""
:param M:
:param N:
:param m:
:param s:
:param alpha:
:param test_type:
:param offset:
:param sign:
:return:
"""
if not is_iterable(s):
s = [s, s]
a = [None, None]
b = [None, None]
try:
a[0], b[0] = normal_to_beta(m[0], s[0])
a[1], b[1] = normal_to_beta(m[1], s[1])
except ZeroDivisionError:
return float("nan")
return calc_fn_rate_beta(M, N, a, b, alpha, test_type, offset, sign)
def calc_fn_rate_override(M, N, m, s, alpha, test_type, override_diff):
"""
:param M:
:param N:
:param m:
:param s:
:param alpha:
:param test_type:
:param override_diff: overriden difference
:return:
"""
if not is_iterable(s):
s = [s, s]
a = [None, None]
b = [None, None]
try:
a[0], b[0] = normal_to_beta(m[0], s[0])
a[1], b[1] = normal_to_beta(m[1], s[1])
except ZeroDivisionError:
return float("nan")
if not is_iterable(M):
M = [M, M]
if not is_iterable(N):
N = [N, N]
Ep = [a[0] / (a[0] + b[0]),
a[1] / (a[1] + b[1])]
# Vp = [a[0] * b[0] * (a[0] + b[0] + N[0]) / ((N[0] * (a[0] + b[0]) * (a[0] + b[0])) * (a[0] + b[0] + 1)),
# a[1] * b[1] * (a[1] + b[1] + N[1]) / ((N[1] * (a[1] + b[1]) * (a[1] + b[1])) * (a[1] + b[1] + 1))]
Vp = [var_betabinom_over_n(N[0], a[0], b[0]), var_betabinom_over_n(N[1], a[1], b[1])]
Et = override_diff / (Vp[0] / M[0] + Vp[1] / M[1]) ** .5
Et = abs(Et)
nu = (Vp[0] / M[0] + Vp[1] / M[1]) ** 2 / ((Vp[0] / M[0]) ** 2 / (M[0] - 1) + (Vp[1] / M[1]) ** 2 / (M[1] - 1))
if test_type == "one-sided":
t_star = scipy.stats.t.ppf(q=1 - alpha, df=nu)
elif test_type == "two-sided":
t_star = scipy.stats.t.ppf(q=1 - alpha / 2, df=nu)
else:
raise ValueError("test must be one-sided or two-sided")
return scipy.stats.t.cdf(t_star - Et, df=nu)
def calc_fn_rate_baseline(M, m, s, alpha, test_type, offset, sign):
"""
:param M:
:param N:
:param m:
:param s:
:param alpha:
:param test_type:
:param override_diff: overriden difference
:return:
"""
if not is_iterable(s):
s = [s, s]
a = [None, None]
b = [None, None]
try:
a[0], b[0] = normal_to_beta(m[0], s[0])
a[1], b[1] = normal_to_beta(m[1], s[1])
except ZeroDivisionError:
return float("nan")
if not is_iterable(M):
M = [M, M]
Ep = m
Vp = [s[0] ** 2, s[1] ** 2]
Et = (Ep[1] - (Ep[0] + offset)) / (Vp[0] / M[0] + Vp[1] / M[1]) ** .5
if sign == 0:
Et = abs(Et)
else:
Et = sign * Et
nu = (Vp[0] / M[0] + Vp[1] / M[1]) ** 2 / ((Vp[0] / M[0]) ** 2 / (M[0] - 1) + (Vp[1] / M[1]) ** 2 / (M[1] - 1))
if test_type == "one-sided":
t_star = scipy.stats.t.ppf(q=1 - alpha, df=nu)
elif test_type == "two-sided":
t_star = scipy.stats.t.ppf(q=1 - alpha / 2, df=nu)
else:
raise ValueError("test must be one-sided or two-sided")
return scipy.stats.t.cdf(t_star - Et, df=nu) | StarcoderdataPython |
8024507 | <reponame>trustidkid/myscrumy
from django import forms
from django.contrib.auth.models import User
from .models import ScrumyGoals
from django.forms import ModelForm
# Lab 19 starts here
"""
SignupForm and CreateGoalForm. The signup form will contain fields from the User model such as first_name,last_name,email,username,password and the CreateGoalForm will contain the goal_name field and user field from the ScrumyGoals model. The user field will enable a user of your application select the particular user the goal is being created for
"""
class SignupForm(ModelForm):
class Meta:
model = User
fields = ['first_name', 'last_name',
'email', 'username', 'password']
class CreateGoalForm(ModelForm):
class Meta:
model = ScrumyGoals
fields = ['goal_name', 'user']
class MoveGoal(ModelForm):
class Meta:
model = ScrumyGoals
fields = ['goal_status']
| StarcoderdataPython |
1759780 | from datetime import datetime, timedelta
from fastapi import FastAPI
from jose import jwt
from starlette.authentication import AuthCredentials, requires
from starlette.requests import Request
from fastapi_auth_middleware import OAuth2Middleware, FastAPIUser
from tests.keys import PUBLIC_KEY, PRIVATE_KEY
def get_scopes(decoded_token: dict):
try:
return decoded_token["scope"].split(" ")
except KeyError or AttributeError:
return []
def get_user(decoded_token: dict):
user_id = decoded_token.get("sub")
try:
first_name, last_name = decoded_token.get("name").split(" ")
return FastAPIUser(first_name=first_name, last_name=last_name, user_id=user_id)
except AttributeError:
return FastAPIUser(first_name="did not specify", last_name="<NAME>", user_id=user_id)
def get_new_token(old_token: str):
# Usually you would perform some kind of exchange with a lookup for the the refresh token here
content = {
"sub": "1",
"iat": datetime.utcnow(),
"exp": datetime.utcnow() + timedelta(hours=1), # Valid for 1 hour
"aud": "tests",
"iss": "tests",
"name": "Code Specialist",
"scope": "a b c"
}
return jwt.encode(content, key=PRIVATE_KEY, algorithm='RS256')
app = FastAPI(title="OAuth2 FastAPI App")
app.add_middleware(OAuth2Middleware,
public_key=PUBLIC_KEY,
get_scopes=get_scopes,
get_user=get_user,
get_new_token=get_new_token,
decode_token_options={"verify_signature": True, "verify_aud": False}
)
@app.get("/")
def home():
return 'Hello World'
@app.get("/scopes")
def scopes(request: Request):
auth_credentials: AuthCredentials = request.auth
return auth_credentials.scopes
@app.get("/a-scope")
@requires("a")
def a_scope_required(request: Request):
user: FastAPIUser = request.user
return f'{user.is_authenticated} {user.display_name} {user.identity}'
@app.get("/a-b-c-scope")
@requires(["a", "b", "c"])
def a_scope_required(request: Request):
user: FastAPIUser = request.user
return f'{user.is_authenticated} {user.display_name} {user.identity}'
@app.get("/d-scope")
@requires("d")
def a_scope_required(request: Request):
user: FastAPIUser = request.user
return f'{user.is_authenticated} {user.display_name} {user.identity}'
| StarcoderdataPython |
8011427 | <reponame>anildoferreira/CursoPython-PyCharm<gh_stars>0
rep = ' '
c = 0
l = list()
while rep not in 'Nn':
c += 1
l.append(int(input(f'Digite o {c}° número: ')))
while True:
rep = str(input('Quer continuar? [S/N]: ')).strip()[0]
if rep in 'NnSs':
break
print('Tente novamente!', end=' ')
print(f'Foram Digitados {c} números.')
l.sort(reverse=True)
print(f'Os valores em ordem decrescente são {l}')
if 5 in l:
print('Sim, contém o valor 5 na lista!')
else:
print('Não contém o valor 5 na lista!')
| StarcoderdataPython |
109790 | <reponame>simota/zengin-py<filename>zengin_code/bank.py<gh_stars>0
# -*- coding: utf-8 -*-
from __future__ import division, print_function, absolute_import # NOQA
from collections import OrderedDict
import six
class BankMeta(type):
banks = OrderedDict()
names = {}
fullnames = {}
def __setitem__(cls, code, bank):
cls.banks[code] = bank
cls.names[bank.name] = bank
cls.fullnames[bank.fullname] = bank
def __getitem__(cls, code):
return cls.banks[code]
@property
def all(cls):
return cls.banks
def by_name(cls, name):
bank = cls.names.get(name)
if bank:
return bank
return cls.fullnames.get(name)
class Bank(six.with_metaclass(BankMeta)):
def __init__(self, code, name, kana, hira, roma, fullname):
self.code = code
self.name = name
self.kana = kana
self.hira = hira
self.roma = roma
self.fullname = fullname
self.branches = OrderedDict()
self.named_branches = {}
self.__class__[code] = self
def branch_by_name(self, name):
return self.named_branches.get(name)
def branch_by_code(self, code):
return self.branches.get(code)
def add_branch(self, branch):
self.branches[branch.code] = branch
self.named_branches[branch.name] = branch
| StarcoderdataPython |
82488 | <filename>app/models/City/methods/__init__.py<gh_stars>1-10
from .delete import delete
from .find import find
from .update import update
from .create import create
from .find_many import find_many | StarcoderdataPython |
6520184 | import sys
import random
def main(argv=None):
"""Main entry point"""
if argv is None:
argv = sys.argv
print(merge_sort([6,3,5,8,2,6,2,5,8,0,5,3,2]))
# batch_test((selection_sort, insertion_sort), n=100, runs=10)
return 0
def batch_test(algos, n, runs, max=None):
if max is None:
max = n * 10
for algo in algos:
for _ in range(runs):
print('Checking algo {} with {} integers'.format(algo, n))
arr = random.sample(range(1, max), n)
assert sorted(arr) == algo(arr)
return
def merge_sort(array):
final_array = sort(array)
return final_array
def sort(array, verbose=False):
#If we're down to one element it's already sorted.
if len(array) == 1:
return array
mid = int(len(array) / 2)
print('mid = {}, left = {}, right = {}'.format(mid, array[:mid], array[mid:]))
lista = sort(array[:mid])
listb = sort(array[mid:])
result = merge(lista, listb, verbose=True)
return result
def merge(lista, listb, verbose=False):
"""Merges two lists together"""
ptra = 0
ptrb = 0
if verbose: print('\na = {}, b = {}'.format(lista, listb))
output = list()
while ptra < len(lista) or ptrb < len(listb):
if ptra == len(lista):
output.extend(listb[ptrb:])
ptrb = len(listb)
elif ptrb == len(listb):
output.extend(lista[ptra:])
ptra = len(lista)
elif lista[ptra] <= listb[ptrb]:
output.append(lista[ptra])
ptra += 1
elif lista[ptra] > listb[ptrb]:
output.append(listb[ptrb])
ptrb += 1
if verbose: print('ptra = {}, lista = {}, ptrb = {}, listb = {}, output = {}'.format(ptra, lista, ptrb, listb, output))
if verbose: print('output = {}'.format(output))
return output
if __name__ == "__main__":
sys.exit(main(sys.argv)) | StarcoderdataPython |
11221362 | #!/usr/bin/python
import sys, re, os
packages = []
lines = []
def readPackages(filename):
global methodmap
f = open(filename)
for line in f:
packages.append(line.rstrip())
f.close()
def readFailedSinks(filename):
global methodmap
f = open(filename)
for line in f:
#print line.rstrip()
flag = True
for package in packages:
pattern = re.compile(package)
if pattern.match(line.rstrip()):
flag = False
if flag:
lines.append(line.rstrip())
#print flag
f.close()
def main():
readPackages("packages.txt")
readFailedSinks(sys.argv[1])
for line in lines:
print line
if __name__ == "__main__":
main() | StarcoderdataPython |
4881902 | <filename>tests/parameter_test.py<gh_stars>1-10
# stootr_test.py
# Author: <NAME> - MIT License 2019
import pytest
from ottr import OttrGenerator
from rdflib import Literal, URIRef
from rdflib.namespace import RDF, FOAF
failing_tests = [
# type related errors
("""
@prefix ex: <http://example.org#>.
ex:Person[ ottr:IRI ?uri ] :: {
o-rdf:Type (?uri, foaf:Person )
} .
""", """
@prefix ex: <http://example.org#>.
ex:Person("ex:Ann").
"""),
("""
@prefix ex: <http://example.org#>.
ex:Person[ xsd:integer ?age ] :: {
ottr:Triple (_:person, foaf:age, ?age )
} .
""", """
@prefix ex: <http://example.org#>.
ex:Person("12"^^xsd:number).
"""),
# optional errors
("""
@prefix ex: <http://example.org#>.
ex:Person[ ?uri ] :: {
o-rdf:Type (?uri, foaf:Person )
} .
""", """
@prefix ex: <http://example.org#>.
ex:Person(none).
"""),
# non blank errors
("""
@prefix ex: <http://example.org#>.
ex:Person[ ! ?uri ] :: {
o-rdf:Type (?uri, foaf:Person )
} .
""", """
@prefix ex: <http://example.org#>.
ex:Person(_:person).
"""),
]
correct_tests = [
("""
@prefix ex: <http://example.org#>.
ex:Person[ ottr:IRI ?uri ] :: {
o-rdf:Type (?uri, foaf:Person )
} .
""", """
@prefix ex: <http://example.org#>.
ex:Person (ex:Ann).
""", [(URIRef("http://example.org#Ann"), RDF.type, FOAF.Person)]),
("""
@prefix ex: <http://example.org#>.
ex:Person[ ottr:IRI ?uri, xsd:integer ?age ] :: {
ottr:Triple (?uri, foaf:age, ?age )
} .
""", """
@prefix ex: <http://example.org#>.
ex:Person(ex:Ann, "12"^^xsd:integer).
""", [(URIRef("http://example.org#Ann"), FOAF.age, Literal(12))]),
("""
@prefix ex: <http://example.org#>.
ex:Person[ ! ?uri ] :: {
o-rdf:Type (?uri, foaf:Person )
} .
""", """
@prefix ex: <http://example.org#>.
ex:Person(ex:Ann).
""", [(URIRef("http://example.org#Ann"), RDF.type, FOAF.Person)]),
("""
@prefix ex: <http://example.org#>.
ex:Person[ ?uri = ex:Ann ] :: {
o-rdf:Type (?uri, foaf:Person )
} .
""", """
@prefix ex: <http://example.org#>.
ex:Person(none).
""", [(URIRef("http://example.org#Ann"), RDF.type, FOAF.Person)])
]
@pytest.mark.parametrize("template,instance", failing_tests)
def test_invalid_parameter(template, instance):
gen = OttrGenerator()
gen.load_templates(template)
with pytest.raises(Exception):
gen.instanciate(instance)
@pytest.mark.parametrize("template,instance,expected", correct_tests)
def test_valid_parameter(template, instance, expected):
gen = OttrGenerator()
gen.load_templates(template)
instances = gen.instanciate(instance)
for triple in instances.execute(as_nt=False):
assert triple in expected
expected.remove(triple)
assert len(expected) == 0
| StarcoderdataPython |
8124041 | <gh_stars>0
from enum import Enum
class Priority:
@staticmethod
def priority_index_to_value(index):
priority_values = ['0.1', '1', '1.5', '2']
return priority_values[index]
@staticmethod
def priority_value_to_index(value):
priority_values = ['0.1', '1', '1.5', '2']
return priority_values.index(value)
class TaskType(Enum):
HABIT = 'habit'
DAILY = 'daily'
TODO = 'todo'
REWARD = 'reward'
def __str__(self):
return self.value
class Task:
def __init__(self):
self.id = None
self.text = ''
self.notes = ''
self.priority = '' | StarcoderdataPython |
9746243 | """Event class
Simple event class to define what attributes an event has, and other helpful functions like euqlity-checking.
"""
import calendar
from . import utils
class Event(object):
def __init__(self,
id='', # Unique ID for every event
name='', # Event name
description='', # Event description
url='', # URL where event was found
img='', # Image URL
date_start='', # Start-date of event
date_end='', # End-date of event
date_fuzzy='', # If no hard date is given
time_start='', # Time of event
time_end='', # Time of event
location='', # Event-Location
cost='', # Entry-fee to event
status='', # Cancelled, Online, Postponed, ...
other='', # Additional information tag
visibility='', # Prefecture, University, ... used for visibility to channels
source='', # Source where event was scrapped
date_added=None): # ONLY SET BY DATABASE: Date of when event was added to database
self.id = id
self.name = name
self.description = description
self.url = url
self.img = img
self.date_start = date_start
self.date_end = date_end
self.date_fuzzy = date_fuzzy
self.time_start = time_start
self.time_end = time_end
self.location = location
self.cost = cost
self.status = status
self.other = other
self.visibility = visibility
self.source = source
self.date_added = date_added
def __eq__(self, other):
if isinstance(other, Event):
return self.id == other.id
return False
def __str__(self):
text = f"""***{self.name}*** [{self.id}]
date: {self.getDateRange() if not self.date_fuzzy else self.date_fuzzy}
time: {self.getTimeRange()}
location: {self.location}
cost: {self.cost}
url: {self.url}
image-url: {self.img}
status: {self.status}
visibility: {self.visibility}
source: {self.source}
other: {self.other}
description: {self.description}"""
return text
def getDateRange(self) -> str:
"""Returns date-range of when event occurs"""
if self.date_fuzzy:
return self.date_fuzzy
date_start = str(utils.custom_strftime('%b {S} ({DAY}), %Y', self.date_start))
date_end = str(utils.custom_strftime('%b {S} ({DAY}), %Y', self.date_end)) if self.date_start != self.date_end else ''
return f"{date_start} - {date_end}".strip(' - ')
def getTimeRange(self) -> str:
"""Returns time-range of when event occurs"""
if not self.time_start:
return '---'
time_start = self.time_start.strftime('%H:%M')
time_end = self.time_end.strftime('%H:%M') if self.time_end else ''
return f"{time_start} - {time_end}".strip(' - ')
def mergeDuplicateEvents(events, check_duplicate_func=None, merge_func=None, verbose=False):
"""
Merges duplicate events in given list.
Duplicate events happen when e.g. the same event is hold next week again.
Optional arguments:
* check_duplicate_func: Pointer to function that checks if two events are identical. [Default: Check by event-ID]
* merge_func: Pointer to function that merges the events. [Default: Only merge event-dates]
* verbose: Flag, defines if merged events shall be printed
"""
# Merging functions
def sameIDDate(eventA:Event, eventB:Event):
"""Checks if two events are duplicate by their ID and start_date"""
if not (eventA and eventB):
utils.print_warning("One of the two events was `None`!")
return False
return eventA.id == eventB.id and eventA.date_start == eventB.date_start
def mergeDate(eventA:Event, eventB:Event):
"""Merges two events by appending only their date"""
if not (eventA and eventB):
utils.print_warning("One of the two events was `None`!")
if eventA:
return eventA
return eventB
eventA.date += ' & ' + eventB.date
return eventA
def dontmerge(eventA:Event, eventB:Event):
"""Simply discards eventB. Does not merge metadata."""
return eventA
# Process mergefunc
if type(merge_func) is str:
merge_func = {'mergeDate':mergeDate,'dontmerge':dontmerge}.get(merge_func, dontmerge)
# Fallback: If no check/merging functions given, use the default (merge if same ID; merge by date)
if check_duplicate_func is None:
check_duplicate_func = sameIDDate
if merge_func is None:
merge_func = dontmerge
# Loop over entire event array
i = 0
while i < len(events):
eventA = events[i]
j = i + 1
while j < len(events):
eventB = events[j]
if check_duplicate_func(eventA, eventB):
eventA = merge_func(eventA, eventB)
events[i] = eventA
if verbose:
print("Merged events:\n\teventA: {}\n\teventB: {}".format(eventA.url,eventB.url))
#print("Merged event:\n{}".format(eventA))
del events[j]
j -= 1
j += 1
i += 1
return events
| StarcoderdataPython |
11358897 | import glob
import io
import os
import pdb
import sys
import numpy as np
import fnmatch
import re
from sklearn.linear_model import SGDClassifier
import nltk
from nltk import sent_tokenize
from nltk import word_tokenize
from nltk import pos_tag
from nltk import ne_chunk
from commonregex import CommonRegex
from sklearn.feature_extraction import DictVectorizer
from sklearn import svm
import glob
import io
import os
import pdb
import sys
import re
import nltk
from nltk import sent_tokenize
from nltk import word_tokenize
from nltk import pos_tag
from nltk import ne_chunk
import random
import numpy as np
from sklearn.feature_extraction import DictVectorizer
from sklearn.naive_bayes import MultinomialNB
from sklearn.metrics import accuracy_score
from sklearn.linear_model import SGDClassifier
from sklearn import svm
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn import ensemble
from sklearn.metrics.pairwise import cosine_similarity
import regex
def Read_files(text_files):
# print(text_files)
data = []
filenames =[]
for filename in glob.glob(os.path.join(os.getcwd(),text_files)):
#print(filename)
filenames.append(filename.split('/')[-1])
print(filenames)
with open(os.path.join(os.getcwd(), filename), "r", encoding='utf-8') as f:
data1 = f.read()
data.append(data1)
return data, filenames
def read_files(path):
os.chdir(path)
data = []
file_names = []
for file in os.listdir():
if file.endswith(".txt"):
file_names.append(file)
f_path=f'{path}\{file}'
with io.open(f_path, 'r', encoding='utf-8') as file1:
text = file1.read()
data.append(text)
return data, file_names
def get_redacted_entity(data):
person_list=[]
#person_list1=[]
for sent in sent_tokenize(data):
from nltk import word_tokenize
x=word_tokenize(sent)
for chunk in ne_chunk(pos_tag(x)):
if hasattr(chunk, 'label') and chunk.label() == 'PERSON':
a=""
for c in chunk.leaves():
a=a+c[0]
a=a+" "
person_list.append(a[:-1])
count=len(person_list)
personlist1=set(person_list)
person_list1=list(personlist1)
#print(persons)
person_list1=sorted(person_list1, reverse= True)
#print(person_list1)
return person_list1
def retrieve_train_features(text, person_name_list):
features = []
cc = len(text)
wc = len(text.split())
sc = len(sent_tokenize(text))
cs = 0
for i in text:
if i == " ":
cs+=1
for i in range(0, len(person_name_list)):
dict = {}
dict['sent_count'] = sc
dict['word_count'] = wc
dict['character_count'] = cc
dict['space_count'] = cs
dict['name_length'] = len(person_name_list[i])
dict['total_names'] = len(person_name_list)
features.append(dict)
return features
def retrieve_test_features(text, redacted_names_in_block):
features = []
cc = len(text)
wc = len(text.split())
sc = len(sent_tokenize(text))
cs = 0
for i in text:
if i == " ":
cs+=1
for i in range(0, len(redacted_names_in_block)):
dict = {}
dict['sent_count'] = sc
dict['word_count'] = wc
dict['character_count'] = cc
dict['space_count'] = cs
dict['name_length'] = len(redacted_names_in_block[i])
dict['total_names'] = len(redacted_names_in_block)
features.append(dict)
return features
def Fields_to_redact(person_list1):
replace=[]
for element in person_list1:
replace.append(element)
return replace
def Redact(replace,data):
for j in range(0,len(replace)):
if replace[j] in data:
length = len(replace[j])
data = re.sub(replace[j], length*'\u2588', data, 1)
return data
def Get_Unique_Names(names_list):
names_list_unique = (set(names_list))
names_list_unique = list(names_list_unique)
#unique = [i for n, i in enumerate(names_list) if i not in names_list[:n]]
#unique_namelist=unique
return names_list_unique
def Save_to_output_redacted(redact_result, folder, file_name):
new_file = file_name.replace(".txt", ".redacted.txt")
isFolder = os.path.isdir(folder)
if isFolder== False:
os.makedirs(os.path.dirname(folder))
with open( os.path.join(folder, new_file), "w+", encoding="utf-8") as f:
f.write(redact_result)
f.close()
def Save_to_output_predicted(redact_result, folder, file_name, data_list, redacted_names):
isFolder = os.path.isdir(folder)
if isFolder== False:
os.makedirs(os.path.dirname(folder))
result = Get_predicted_output(redact_result, data_list, redacted_names)
with open( os.path.join(folder, file_name), "w+", encoding="utf-8") as f:
f.write(result)
f.close()
def Get_predicted_output(redact_result, data_list, redacted_names):
result =redact_result
for i in range(0, len(data_list)):
names = ""
for j in data_list[i]:
names += j
names += ","
names = names[:-1]
result +="\n {} top 5` predicted names are {}".format(redacted_names[i], names)
return result
def Read_files2(text_files):
# print(text_files)
print("123")
data = []
filenames =[]
for filename in glob.glob(os.getcwd()):
print(filename)
print(filename)
filenames.append(filename.split('/')[-1])
print(filenames)
with open(os.path.join(os.getcwd(), filename), "r") as f:
data1 = f.read()
data.append(data1)
return data, filenames
def retrieve_predicted_words(probabilities_all_classes, Names_Redacted):
All_predicted_words_review = []
for test_word in range(0, len(Names_Redacted)):
test_word_probabilities = probabilities_all_classes[test_word]
top_5_idx = np.argsort(test_word_probabilities)[-5:]
#print(top_5_idx)
predicted_words = []
for i in range(0,5):
index_range = top_5_idx[i]
predicted_word = names_unique[index_range]
predicted_words.append(predicted_word)
#print(predicted_words)
All_predicted_words_review.append(predicted_words)
#print(All_predicted_words_review)
return (All_predicted_words_review)
if __name__=='__main__':
#train the model
input_path = "input"
output_path_redacted = "redacted"
output_path_prediction = "predicted"
train_data, file_names = Read_files(input_path)
replace_result_list = []
names_list = []
redacted_data_list=[]
redacted_data=[]
full_list_training_features = []
full_list_names = []
redacted_result = []
for itr in range(0, len(train_data)):
person_list_result = get_redacted_entity(train_data[itr])
#print(person_list_result)
replace_result = Fields_to_redact(person_list_result)
for entry in replace_result_list:
for names in entry:
names_list.append(names)
redact_result = Redact(replace_result,train_data[itr])
Save_to_output_redacted(redact_result, output_path_redacted, file_names[itr])
redacted_data_list.append(redact_result)
list_names_dict_features = retrieve_train_features(train_data[itr], person_list_result)
full_list_training_features.extend(list_names_dict_features)
full_list_names.extend(person_list_result)
#TODO: store the results in the file
#for now I will store the redacted result in a list and pass it in the testing function
#print(redact_result)
v = DictVectorizer()
X = v.fit_transform(full_list_training_features).toarray()
full_list_names = np.array(full_list_names)
model = svm.SVC(probability=True)
#model = SGDClassifier()
model.fit(X, full_list_names)
names_unique = Get_Unique_Names(full_list_names)
#redacted_data = redacted_data_list[:12]
#read redacted data from path
redacted_data, file_names = read_files(output_path_redacted)
for i in range(0, 12):
redacted_names = re.findall(r'(\u2588+)', redacted_data[i])
test_features = retrieve_test_features(redacted_data[i], redacted_names)
if len(test_features) > 0:
X_test = v.fit_transform(test_features).toarray()
probabilities_all_classes = model.predict_proba(X_test)
All_predicted_words_review = retrieve_predicted_words(probabilities_all_classes, redacted_names)
Save_to_output_predicted(redacted_data[i], output_path_prediction, file_names[i], All_predicted_words_review, redacted_names)
| StarcoderdataPython |
1931531 | <gh_stars>0
# -*- encoding:utf-8 -*-
from .app import app
| StarcoderdataPython |
8032186 | # Generated by Django 2.2 on 2020-02-28 19:58
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Course',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=400)),
('description', models.CharField(max_length=400)),
('pub_date', models.DateTimeField(verbose_name='date published')),
('next', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='nextOne', to='course.Course')),
('prev', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='previousOne', to='course.Course')),
],
),
]
| StarcoderdataPython |
3482887 | <filename>syntropycli/__main__.py
#!/usr/bin/env python
from collections import defaultdict
from datetime import datetime, timedelta
import click
import syntropy_sdk as sdk
from syntropycli.decorators import *
from syntropycli.utils import *
@click.group()
def apis():
"""Syntropy Networks Command Line Interface."""
@apis.command()
@click.option("--skip", default=0, type=int, help="Skip N providers.")
@click.option("--take", default=128, type=int, help="Take N providers.")
@click.option(
"--json",
"-j",
is_flag=True,
default=False,
help="Outputs a JSON instead of a table.",
)
@syntropy_api
def get_providers(skip, take, json, api):
"""Retrieve a list of endpoint providers."""
api = sdk.AgentsApi(api)
providers = WithPagination(api.platform_agent_provider_index)(
skip=skip, take=take, _preload_content=False
)["data"]
fields = [
("ID", "agent_provider_id"),
("Name", "agent_provider_name"),
]
print_table(providers, fields, to_json=json)
@apis.command()
@click.option("--skip", default=0, type=int, help="Skip N API keys.")
@click.option("--take", default=128, type=int, help="Take N API keys.")
@click.option(
"--json",
"-j",
is_flag=True,
default=False,
help="Outputs a JSON instead of a table.",
)
@syntropy_api
def get_api_keys(skip, take, json, api):
"""List all API keys.
API keys are being used by the endpoint agent to connect to the syntropy platform.
By default this command will retrieve up to 128 API keys. You can use --take parameter to get more keys.
"""
api = sdk.APIKeysApi(api)
keys = WithPagination(api.get_api_key)(
skip=skip, take=take, _preload_content=False
)["data"]
fields = [
("ID", "api_key_id", lambda x: int(x)),
("Name", "api_key_name"),
("Is Suspended", "api_key_is_suspended", lambda x: x and "Yes" or "No"),
("Status", "api_key_status", lambda x: x and "Ok" or "Err"),
("Created At", "api_key_created_at"),
("Updated At", "api_key_updated_at"),
("Expires At", "api_key_valid_until"),
]
print_table(keys, fields, to_json=json)
@apis.command()
@click.argument("name")
@click.argument(
"expires",
type=click.DateTime(formats=["%Y-%m-%d %H:%M:%S"]),
default=(datetime.now() + timedelta(days=30)).strftime("%Y-%m-%d %H:%M:%S"),
)
@syntropy_api
def create_api_key(name, expires, api):
"""Create a API key for endpoint agent.
NOTE: Be sure to remember the API key as it will be only available as a result of this command.
"""
body = {
"api_key_name": name,
"api_key_valid_until": expires,
}
api = sdk.APIKeysApi(api)
result = api.create_api_key(body=body)
click.echo(result.data.api_key_secret)
def confirm_deletion(name, id):
try:
return click.confirm(f"Do you want to delete '{name}' (id={id})?")
except click.Abort:
raise SystemExit(1)
@apis.command()
@click.option("--name", default=None, type=str)
@click.option("--id", default=None, type=int)
@click.option(
"--yes",
"-y",
is_flag=True,
default=False,
help="Forces to delete all matching keys.",
)
@syntropy_api
def delete_api_key(name, id, yes, api):
"""Delete API key either by name or by id. If there are multiple names - please use id."""
if name is None and id is None:
click.secho("Either API key name or id must be specified.", err=True, fg="red")
raise SystemExit(1)
api = sdk.APIKeysApi(api)
if id is None:
keys = api.get_api_key(filter=f"api_key_name:'{name}'").data
for key in keys:
if not yes and not confirm_deletion(key.api_key_name, key.api_key_id):
continue
api.delete_api_key(int(key.api_key_id))
click.secho(
f"Deleted API key: {key.api_key_name} (id={key.api_key_id}).",
fg="green",
)
else:
api.delete_api_key(id)
click.secho(f"Deleted API key: id={id}.", fg="green")
def _get_endpoints(
name, id, tag, skip, take, show_services, online, offline, json, api
):
filters = []
if name:
filters.append(f"id|name:'{name}'")
elif id:
filters.append(f"ids[]:{id}")
if tag:
filters.append(f"tags_names[]:{tag}")
agents = WithPagination(sdk.AgentsApi(api).platform_agent_index)(
filter=",".join(filters) if filters else None,
skip=skip,
take=take,
_preload_content=False,
)["data"]
if online or offline:
filtered_agents = []
is_online = online and not offline
while agents and len(filtered_agents) < take:
filtered_agents += [
agent for agent in agents if agent["agent_is_online"] == is_online
]
if len(filtered_agents) < take:
skip += take
agents = WithPagination(sdk.AgentsApi(api).platform_agent_index)(
filter=",".join(filters) if filters else None,
skip=skip,
take=take,
_preload_content=False,
)["data"]
agents = filtered_agents
fields = [
("Agent ID", "agent_id"),
("Name", "agent_name"),
("Public IP", "agent_public_ipv4"),
("Provider", ("agent_provider", "agent_provider_name")),
("Location", "agent_location_city"),
("Online", "agent_is_online"),
(
"Tags",
"agent_tags",
lambda x: x and ", ".join(i["agent_tag_name"] for i in x) or "-",
),
]
if show_services:
ids = [agent["agent_id"] for agent in agents]
agents_services = BatchedRequestQuery(
sdk.ServicesApi(api).platform_agent_service_index,
max_query_size=MAX_QUERY_FIELD_SIZE,
)(ids, _preload_content=False)["data"]
agent_services = defaultdict(list)
for agent in agents_services:
agent_services[agent["agent_id"]].append(agent)
agents = [
{
**agent,
"agent_services": agent_services.get(agent["agent_id"], []),
}
for agent in agents
]
fields.append(("Services", "agent_services", collect_endpoint_services))
print_table(agents, fields, to_json=json)
@apis.command()
@click.option("--name", default=None, type=str, help="Filter endpoints by name.")
@click.option("--id", default=None, type=int, help="Filter endpoints by IDs.")
@click.option("--tag", default=None, type=str, help="Filter endpoints by tag.")
@click.option("--skip", default=0, type=int, help="Skip N endpoints.")
@click.option("--take", default=42, type=int, help="Take N endpoints.")
@click.option(
"--show-services",
is_flag=True,
default=False,
help="Retrieves services that are configured for each endpoint.",
)
@click.option(
"--online", is_flag=True, default=False, help="List only online endpoints."
)
@click.option(
"--offline", is_flag=True, default=False, help="List only offline endpoints."
)
@click.option(
"--json",
"-j",
is_flag=True,
default=False,
help="Outputs a JSON instead of a table.",
)
@syntropy_api
def get_endpoints(name, id, tag, skip, take, show_services, online, offline, json, api):
"""List all endpoints.
By default this command will retrieve up to 42 endpoints. You can use --take parameter to get more endpoints.
Endpoint service status is added to the end of the service name with the following possible symbols:
\b
^ - Enabled
! - Disabled
~ - Subnets partially enabled
\b
For example:
`nginx^^` - the service is enabled as well as all subnets it exposes.
`nginx^~` - the service is enabled, but only some subnets are enabled.
`nginx!~` - the service is disabled, but some subnets are enabled.
`nginx!!` - the service and subnets are disabled.
"""
_get_endpoints(
name,
id,
tag,
skip,
take,
show_services,
online,
offline,
json,
api,
)
@apis.command()
@click.argument("endpoint")
@click.option(
"--name",
"-n",
is_flag=True,
default=False,
help="Use endpoint name instead of id.",
)
@click.option(
"--json",
"-j",
is_flag=True,
default=False,
help="Outputs a JSON instead of a table.",
)
@click.option(
"--set-provider",
"-p",
type=str,
default=None,
help="Set a provider to the endpoint.",
)
@click.option(
"--set-tag",
"-t",
type=str,
default=None,
multiple=True,
help="Set a tag to the endpoint(removes all other tags). Supports multiple options.",
)
@click.option(
"--set-service",
"-s",
type=str,
default=None,
multiple=True,
help="Enable a service for the endpoint(disables all other services). Supports multiple options.",
)
@click.option(
"--add-tag",
"-T",
type=str,
default=None,
multiple=True,
help="Add a tag to the endpoint(won't affect other tags). Supports multiple options.",
)
@click.option(
"--enable-service",
"-S",
type=str,
default=None,
multiple=True,
help="Enable a service for the endpoint(won't affect other services). Supports multiple options.",
)
@click.option(
"--remove-tag",
"-R",
type=str,
default=None,
multiple=True,
help="Remove a tag from the endpoint(won't affect other tags). Supports multiple options.",
)
@click.option("--clear-tags", is_flag=True, default=False, help="Removes all tags.")
@click.option(
"--disable-service",
"-D",
type=str,
default=None,
multiple=True,
help="Disable a service for the endpoint(won't affect other services). Supports multiple options.",
)
@click.option(
"--disable-all-services", is_flag=True, default=False, help="Disable all services."
)
@click.option(
"--enable-all-services", is_flag=True, default=False, help="Enable all services."
)
@click.option("--skip", default=0, type=int, help="Skip N endpoints.")
@click.option("--take", default=42, type=int, help="Take N endpoints.")
@syntropy_api
def configure_endpoints(
api,
endpoint,
set_provider,
set_tag,
set_service,
add_tag,
enable_service,
remove_tag,
disable_service,
clear_tags,
disable_all_services,
enable_all_services,
name,
take,
skip,
json,
):
"""Configures an endpoint with provided provider, tags. Also, allows to enable/disable services.
Endpoint can be an ID or a name (use -n option). Multiple endpoints can be configured if names match partially with the provided name.
It is possible to supply multiple --set-tag, --add-tag and --remove-tag options. The sequence of operations is set, add and then remove.
So if you run this:
syntropyctl configure-endpoints --set-tag tag1 --set-tag tag2 --add-tag tag3 --add-tag tag4 --remove-tag tag1 -n <endpoint-name>
\b
then syntropyctl will:
1. clear all tags and add tag1 and tag2,
2. add tag3 and tag4,
3. remove tag1.
The same applies to services.
"""
filter_str = f"name:{endpoint}" if name else f"ids[]:{endpoint}"
agents = sdk.utils.WithPagination(sdk.AgentsApi(api).platform_agent_index)(
filter=filter_str,
_preload_content=False,
)["data"]
if not agents:
click.secho("Could not find any endpoints.", err=True, fg="red")
raise SystemExit(1)
else:
click.secho(f"Found {len(agents)} endpoints.", fg="green")
if set_provider or set_tag or add_tag or remove_tag or clear_tags:
agents_tags = {
agent["agent_id"]: [
tag["agent_tag_name"] for tag in agent.get("agent_tags", [])
]
for agent in agents
if "agent_tags" in agent
}
for agent in agents:
original_tags = agents_tags.get(agent["agent_id"], [])
tags = update_list(original_tags, set_tag, add_tag, remove_tag, clear_tags)
payload = {}
current_provider = (
agent.get("agent_provider") if agent.get("agent_provider") else {}
)
if set_provider and set_provider != current_provider.get(
"agent_provider_name"
):
payload["agent_provider_name"] = set_provider
if (set_tag or add_tag or remove_tag or clear_tags) and set(
original_tags
) != set(tags):
payload["agent_tags"] = tags
if payload:
sdk.AgentsApi(api).platform_agent_update(payload, agent["agent_id"])
click.secho("Tags and provider configured.", fg="green")
else:
click.secho(
"Nothing to do for tags and provider configuration.", fg="yellow"
)
show_services = False
if (
set_service
or enable_service
or disable_service
or enable_all_services
or disable_all_services
):
service_api = sdk.ServicesApi(api)
show_services = True
ids = [agent["agent_id"] for agent in agents]
agents_services_all = sdk.utils.BatchedRequestQuery(
sdk.ServicesApi(api).platform_agent_service_index,
max_query_size=MAX_QUERY_FIELD_SIZE,
)(ids, _preload_content=False)["data"]
agents_services = defaultdict(list)
for agent in agents_services_all:
agents_services[agent["agent_id"]].append(agent)
for agent in agents:
services = {
service["agent_service_name"]: service
for service in agents_services[agent["agent_id"]]
}
enabled_services = [
service["agent_service_name"]
for service in agents_services[agent["agent_id"]]
if (
(
all(
subnet["agent_service_subnet_is_user_enabled"]
for subnet in service["agent_service_subnets"]
)
and service["agent_service_is_active"]
)
or enable_all_services
)
]
enabled_services = update_list(
enabled_services,
set_service,
enable_service,
disable_service,
disable_all_services,
validate=False,
)
missing_services = [
service for service in enabled_services if service not in services
]
if missing_services:
click.secho(
f"Warning: the following services were not found: {', '.join(missing_services)}",
err=True,
fg="yellow",
)
subnets = [
{
"id": subnet["agent_service_subnet_id"],
"isEnabled": name in enabled_services,
}
for name, service in services.items()
for subnet in service["agent_service_subnets"]
if subnet["agent_service_subnet_is_user_enabled"]
!= (name in enabled_services)
]
if subnets:
payload = {"subnetsToUpdate": subnets}
service_api.platform_agent_service_subnet_update(payload)
click.secho("Service subnets updated.", fg="green")
else:
click.secho("Nothing to do for service configuration.", fg="yellow")
_get_endpoints(
endpoint,
None,
None,
skip,
take,
show_services,
None,
None,
json,
api,
)
@apis.command()
@click.option("--id", default=None, type=int, help="Filter endpoints by ID.")
@click.option("--name", default=None, type=str, help="Filter endpoints by ID or name.")
@click.option("--skip", default=0, type=int, help="Skip N connections.")
@click.option("--take", default=42, type=int, help="Take N connections.")
@click.option(
"--show-services",
is_flag=True,
default=False,
help="Retrieves services that are configured for each endpoint.",
)
@click.option(
"--json",
"-j",
is_flag=True,
default=False,
help="Outputs a JSON instead of a table.",
)
@syntropy_api
def get_connections(id, name, skip, take, show_services, json, api):
"""Retrieves connections.
Connection service status is added to the end of the service name with the following possible symbols:
\b
^ - Service is online.
! - There was an error exposing the service
~ - Service is in PENDING state
? - Unknown state
By default this command will retrieve up to 42 connections. You can use --take parameter to get more connections.
"""
filters = []
if name:
filters.append(f"id|name:{name}")
if id:
filters.append(f"agent_ids[]:{id}")
connections = WithPagination(
sdk.ConnectionsApi(api).platform_connection_groups_index
)(
filter=",".join(filters) if filters else None,
skip=skip,
take=take,
_preload_content=False,
)[
"data"
]
fields = [
("ID", "agent_connection_group_id"),
("Endpoint 1", ("agent_1", "agent_name")),
("ID 1", ("agent_1", "agent_id")),
("IP 1", ("agent_1", "agent_public_ipv4")),
("Endpoint 2", ("agent_2", "agent_name")),
("ID 2", ("agent_2", "agent_id")),
("IP 2", ("agent_2", "agent_public_ipv4")),
("Status", "agent_connection_group_status"),
("Modified At", "agent_connection_group_updated_at"),
("Latency", "agent_connection_latency_ms"),
("Packet Loss", "agent_connection_packet_loss"),
]
if show_services:
ids = [connection["agent_connection_group_id"] for connection in connections]
connections_services = BatchedRequestQuery(
sdk.ServicesApi(api).platform_connection_service_show,
max_query_size=MAX_QUERY_FIELD_SIZE,
)(ids, _preload_content=False)["data"]
connection_services = {
connection["agent_connection_group_id"]: connection
for connection in connections_services
}
connections = [
{
**connection,
"agent_connection_services": connection_services[
connection["agent_connection_group_id"]
],
}
for connection in connections
]
fields.append(
("Services", "agent_connection_services", collect_connection_services)
)
print_table(connections, fields, to_json=json)
@apis.command()
@click.argument("agents", nargs=-1)
@click.option(
"--use-names",
is_flag=True,
default=False,
help="Use endpoint names instead of ids. Will not work with name duplicates.",
)
@click.option(
"--json",
"-j",
is_flag=True,
default=False,
help="Outputs a JSON instead of a table.",
)
@syntropy_api
def create_connections(agents, use_names, json, api):
"""Create connections between endpoints. Number of endpoints must be even.
\b
Arguments:
agents - a list of endpoint ids or names separated by spaces.
In order to use endpoint names instead of ids provide --use-names option.
Example:
syntropyctl create-connections 1 2 3 4 5 6 7 8
This command will create 4 connections from Endpoint 1 to Endpoint 2 like this:
\b
Endpoint 1 ID | Endpoint 2 ID
1 | 2
3 | 4
5 | 6
7 | 8
"""
if use_names:
all_agents = WithPagination(sdk.AgentsApi(api).platform_agent_index)(
_preload_content=False
)["data"]
agents = find_by_name(all_agents, agents, "agent")
if any(i is None for i in agents):
raise SystemExit(1)
else:
try:
agents = [int(i) for i in agents]
except ValueError:
click.secho("Invalid agent id", err=True, fg="red")
raise SystemExit(1)
if len(agents) == 0 or len(agents) % 2 != 0:
click.secho("Number of agents must be even.", err=True, fg="red")
raise SystemExit(1)
agents = list(zip(agents[:-1:2], agents[1::2]))
body = {
"agent_ids": [{"agent_1_id": a, "agent_2_id": b} for a, b in agents],
}
result = sdk.ConnectionsApi(api).platform_connection_create_p2p(body=body)
if result and "errors" in result:
for error in result["errors"]:
click.secho(f"Error: {error.get('message')}", err=True, fg="red")
@apis.command()
@click.argument("endpoint-1", type=int)
@click.argument("endpoint-2", type=int)
@syntropy_api
def delete_connection(endpoint_1, endpoint_2, api):
"""Delete a connection."""
sdk.ConnectionsApi(api).platform_connections_destroy_deprecated(
{
"agent_1_id": endpoint_1,
"agent_2_id": endpoint_2,
}
)
def main():
apis(prog_name="syntropyctl")
if __name__ == "__main__":
main()
| StarcoderdataPython |
1886842 | '''
Tests for all sorts of locks.
'''
import etcd
import redis
import sherlock
import unittest
from mock import Mock
# import reload in Python 3
try:
reload
except NameError:
try:
from importlib import reload
except ModuleNotFoundError:
from implib import reload
class TestBaseLock(unittest.TestCase):
def test_init_uses_global_defaults(self):
sherlock.configure(namespace='new_namespace')
lock = sherlock.lock.BaseLock('lockname')
self.assertEqual(lock.namespace, 'new_namespace')
def test_init_does_not_use_global_default_for_client_obj(self):
client_obj = etcd.Client()
sherlock.configure(client=client_obj)
lock = sherlock.lock.BaseLock('lockname')
self.assertNotEqual(lock.client, client_obj)
def test__locked_raises_not_implemented_error(self):
def _test(): sherlock.lock.BaseLock('')._locked
self.assertRaises(NotImplementedError, _test)
def test_locked_raises_not_implemented_error(self):
self.assertRaises(NotImplementedError,
sherlock.lock.BaseLock('').locked)
def test__acquire_raises_not_implemented_error(self):
self.assertRaises(NotImplementedError,
sherlock.lock.BaseLock('')._acquire)
def test_acquire_raises_not_implemented_error(self):
self.assertRaises(NotImplementedError,
sherlock.lock.BaseLock('').acquire)
def test__release_raises_not_implemented_error(self):
self.assertRaises(NotImplementedError,
sherlock.lock.BaseLock('')._release)
def test_release_raises_not_implemented_error(self):
self.assertRaises(NotImplementedError,
sherlock.lock.BaseLock('').release)
def test_acquire_acquires_blocking_lock(self):
lock = sherlock.lock.BaseLock('')
lock._acquire = Mock(return_value=True)
self.assertTrue(lock.acquire())
def test_acquire_acquires_non_blocking_lock(self):
lock = sherlock.lock.BaseLock('123')
lock._acquire = Mock(return_value=True)
self.assertTrue(lock.acquire())
def test_acquire_obeys_timeout(self):
lock = sherlock.lock.BaseLock('123', timeout=1)
lock._acquire = Mock(return_value=False)
self.assertRaises(sherlock.LockTimeoutException, lock.acquire)
def test_acquire_obeys_retry_interval(self):
lock = sherlock.lock.BaseLock('123', timeout=0.5,
retry_interval=0.1)
lock._acquire = Mock(return_value=False)
try:
lock.acquire()
except sherlock.LockTimeoutException:
pass
self.assertEqual(lock._acquire.call_count, 6)
def test_deleting_lock_object_releases_the_lock(self):
lock = sherlock.lock.BaseLock('123')
release_func = Mock()
lock.release = release_func
del lock
self.assertTrue(release_func.called)
class TestLock(unittest.TestCase):
def setUp(self):
reload(sherlock)
reload(sherlock.lock)
def test_lock_does_not_accept_custom_client_object(self):
self.assertRaises(TypeError, sherlock.lock.Lock, client=None)
def test_lock_does_not_create_proxy_when_backend_is_not_set(self):
sherlock._configuration._backend = None
sherlock._configuration._client = None
lock = sherlock.lock.Lock('')
self.assertEqual(lock._lock_proxy, None)
self.assertRaises(sherlock.lock.LockException, lock.acquire)
self.assertRaises(sherlock.lock.LockException, lock.release)
self.assertRaises(sherlock.lock.LockException, lock.locked)
def test_lock_creates_proxy_when_backend_is_set(self):
sherlock._configuration.backend = sherlock.backends.ETCD
lock = sherlock.lock.Lock('')
self.assertTrue(isinstance(lock._lock_proxy,
sherlock.lock.EtcdLock))
def test_lock_uses_proxys_methods(self):
sherlock.lock.RedisLock._acquire = Mock(return_value=True)
sherlock.lock.RedisLock._release = Mock()
sherlock.lock.RedisLock.locked = Mock(return_value=False)
sherlock._configuration.backend = sherlock.backends.REDIS
lock = sherlock.lock.Lock('')
lock.acquire()
self.assertTrue(sherlock.lock.RedisLock._acquire.called)
lock.release()
self.assertTrue(sherlock.lock.RedisLock._release.called)
lock.locked()
self.assertTrue(sherlock.lock.RedisLock.locked.called)
def test_lock_sets_client_object_on_lock_proxy_when_globally_configured(self):
client = etcd.Client(host='8.8.8.8')
sherlock.configure(client=client)
lock = sherlock.lock.Lock('lock')
self.assertEqual(lock._lock_proxy.client, client)
class TestRedisLock(unittest.TestCase):
def setUp(self):
reload(sherlock)
reload(sherlock.lock)
def test_valid_key_names_are_generated_when_namespace_not_set(self):
name = 'lock'
lock = sherlock.lock.RedisLock(name)
self.assertEqual(lock._key_name, name)
def test_valid_key_names_are_generated_when_namespace_is_set(self):
name = 'lock'
lock = sherlock.lock.RedisLock(name, namespace='local_namespace')
self.assertEqual(lock._key_name, 'local_namespace_%s' % name)
sherlock.configure(namespace='global_namespace')
lock = sherlock.lock.RedisLock(name)
self.assertEqual(lock._key_name, 'global_namespace_%s' % name)
class TestEtcdLock(unittest.TestCase):
def setUp(self):
reload(sherlock)
reload(sherlock.lock)
def test_valid_key_names_are_generated_when_namespace_not_set(self):
name = 'lock'
lock = sherlock.lock.EtcdLock(name)
self.assertEqual(lock._key_name, '/' + name)
def test_valid_key_names_are_generated_when_namespace_is_set(self):
name = 'lock'
lock = sherlock.lock.EtcdLock(name, namespace='local_namespace')
self.assertEqual(lock._key_name, '/local_namespace/%s' % name)
sherlock.configure(namespace='global_namespace')
lock = sherlock.lock.EtcdLock(name)
self.assertEqual(lock._key_name, '/global_namespace/%s' % name)
class TestMCLock(unittest.TestCase):
def setUp(self):
reload(sherlock)
reload(sherlock.lock)
def test_valid_key_names_are_generated_when_namespace_not_set(self):
name = 'lock'
lock = sherlock.lock.MCLock(name)
self.assertEqual(lock._key_name, name)
def test_valid_key_names_are_generated_when_namespace_is_set(self):
name = 'lock'
lock = sherlock.lock.MCLock(name, namespace='local_namespace')
self.assertEqual(lock._key_name, 'local_namespace_%s' % name)
sherlock.configure(namespace='global_namespace')
lock = sherlock.lock.MCLock(name)
self.assertEqual(lock._key_name, 'global_namespace_%s' % name)
| StarcoderdataPython |
11288836 | <reponame>AllenInstitute/render-modules
import os
import pytest
import renderapi
import glob
import copy
import marshmallow as mm
from test_data import (
ROUGH_MONTAGE_TILESPECS_JSON,
ROUGH_MONTAGE_TRANSFORM_JSON,
ROUGH_POINT_MATCH_COLLECTION,
ROUGH_DS_TEST_TILESPECS_JSON,
ROUGH_MAPPED_PT_MATCH_COLLECTION,
render_params,
test_rough_parameters as solver_example,
apply_rough_alignment_example as ex1,
pool_size,
test_legacy_rough)
from asap.module.render_module import RenderModuleException
from asap.materialize.render_downsample_sections import (
RenderSectionAtScale)
from asap.dataimport.make_montage_scapes_stack import (
MakeMontageScapeSectionStack)
from asap.deprecated.rough_align.do_rough_alignment import (
SolveRoughAlignmentModule)
from asap.rough_align.apply_rough_alignment_to_montages import (
ApplyRoughAlignmentTransform)
# skip these tests if not explicitly asked for
pytestmark = pytest.mark.skipif(not test_legacy_rough, reason=(
"legacy code not being tested -- to test, "
"set environment variable ASAP_TEST_LEGACY_ROUGH"))
@pytest.fixture(scope='module')
def render():
render_params['project'] = 'rough_align_test'
render = renderapi.connect(**render_params)
return render
@pytest.fixture(scope='module')
def resolvedtiles_from_json():
tilespecs = [renderapi.tilespec.TileSpec(json=d)
for d in ROUGH_MONTAGE_TILESPECS_JSON]
transforms = [renderapi.transform.load_transform_json(d)
for d in ROUGH_MONTAGE_TRANSFORM_JSON]
return renderapi.resolvedtiles.ResolvedTiles(
tilespecs, transforms)
@pytest.fixture(scope='module')
def tspecs():
tilespecs = [renderapi.tilespec.TileSpec(json=d)
for d in ROUGH_DS_TEST_TILESPECS_JSON]
return tilespecs
# A stack with multiple sections montaged
@pytest.fixture(scope='module')
def montage_stack(render, resolvedtiles_from_json):
tspecs = resolvedtiles_from_json.tilespecs
tforms = resolvedtiles_from_json.transforms
test_montage_stack = 'input_montage_stack'
renderapi.stack.create_stack(test_montage_stack, render=render)
renderapi.client.import_tilespecs(test_montage_stack,
tspecs, sharedTransforms=tforms,
render=render)
renderapi.stack.set_stack_state(test_montage_stack,
'COMPLETE',
render=render)
# assure stack is built correctly
assert len({tspec.tileId for tspec
in tspecs}.symmetric_difference(set(
renderapi.stack.get_stack_tileIds(
test_montage_stack, render=render)))) == 0
zvalues = render.run(renderapi.stack.get_z_values_for_stack,
test_montage_stack)
zs = [1020, 1021, 1022]
assert(set(zvalues) == set(zs))
yield test_montage_stack
renderapi.stack.delete_stack(test_montage_stack, render=render)
@pytest.fixture(scope='module')
def downsample_sections_dir(montage_stack, tmpdir_factory):
image_directory = str(tmpdir_factory.mktemp('rough_align'))
print(render_params)
ex = {
"render": render_params,
"input_stack": montage_stack,
"image_directory": image_directory,
"imgformat": "png",
"scale": 0.1,
"minZ": 1020,
"maxZ": 1022,
}
ex['output_json'] = os.path.join(image_directory, 'output.json')
# set bounds parameter to stack bounds
ex['use_stack_bounds'] = "True"
mod = RenderSectionAtScale(input_data=ex, args=[])
mod.run()
out_dir = os.path.join(
image_directory, render_params['project'], montage_stack,
'sections_at_0.1/001/0')
assert(os.path.exists(out_dir))
files = glob.glob(os.path.join(out_dir, '*.png'))
assert(len(files) == len(range(ex['minZ'], ex['maxZ']+1)))
for fil in files:
img = os.path.join(out_dir, fil)
assert(os.path.exists(img) and
os.path.isfile(img) and
os.path.getsize(img) > 0)
# remove files from the previous run
os.system("rm {}/*.png".format(out_dir))
# set bounds to section bounds
ex['use_stack_bounds'] = "False"
mod = RenderSectionAtScale(input_data=ex, args=[])
mod.run()
out_dir = os.path.join(
image_directory, render_params['project'], montage_stack,
'sections_at_0.1/001/0')
assert(os.path.exists(out_dir))
files = glob.glob(os.path.join(out_dir, '*.png'))
assert(len(files) == len(range(ex['minZ'], ex['maxZ']+1)))
for fil in files:
img = os.path.join(out_dir, fil)
assert(os.path.exists(img) and
os.path.isfile(img) and
os.path.getsize(img) > 0)
yield image_directory
@pytest.fixture(scope='module')
def rough_point_matches_from_json():
point_matches = [d for d in ROUGH_POINT_MATCH_COLLECTION]
return point_matches
@pytest.fixture(scope='module')
def rough_mapped_pt_matches_from_json():
pt_matches = [d for d in ROUGH_MAPPED_PT_MATCH_COLLECTION]
return pt_matches
@pytest.fixture(scope='module')
def montage_scape_stack(render, montage_stack, downsample_sections_dir):
output_stack = '{}_DS'.format(montage_stack)
params = {
"render": render_params,
"montage_stack": montage_stack,
"output_stack": output_stack,
"image_directory": downsample_sections_dir,
"imgformat": "png",
"scale": 0.1,
"zstart": 1020,
"zend": 1022
}
outjson = 'test_montage_scape_output.json'
mod = MakeMontageScapeSectionStack(
input_data=params, args=['--output_json', outjson])
mod.run()
zvalues = render.run(renderapi.stack.get_z_values_for_stack, output_stack)
zs = [1020, 1021, 1022]
assert(set(zvalues) == set(zs))
yield output_stack
renderapi.stack.delete_stack(output_stack, render=render)
@pytest.fixture(scope='module')
def montage_scape_stack_with_scale(render, montage_stack,
downsample_sections_dir):
output_stack = '{}_DS_scale'.format(montage_stack)
params = {
"render": render_params,
"montage_stack": montage_stack,
"output_stack": output_stack,
"image_directory": downsample_sections_dir,
"imgformat": "png",
"scale": 0.1,
"apply_scale": "True",
"zstart": 1020,
"zend": 1022
}
outjson = 'test_montage_scape_output.json'
mod = MakeMontageScapeSectionStack(
input_data=params, args=['--output_json', outjson])
mod.run()
zvalues = render.run(renderapi.stack.get_z_values_for_stack, output_stack)
zs = [1020, 1021, 1022]
assert(set(zvalues) == set(zs))
yield output_stack
renderapi.stack.delete_stack(output_stack, render=render)
@pytest.fixture(scope='module')
def montage_z_mapped_stack(render, montage_stack, downsample_sections_dir):
output_stack = '{}_mapped_DS'.format(montage_stack)
params = {
"render": render_params,
"montage_stack": montage_stack,
"output_stack": output_stack,
"image_directory": downsample_sections_dir,
"imgformat": "png",
"scale": 0.1,
"zstart": 1020,
"zend": 1021,
"set_new_z": True,
"new_z_start": 251
}
outjson = 'test_montage_scape_output.json'
mod = MakeMontageScapeSectionStack(
input_data=params, args=['--output_json', outjson])
mod.run()
zvalues = render.run(renderapi.stack.get_z_values_for_stack, output_stack)
zs = [251, 252]
for z in zs:
assert(z in zvalues)
# check for overwrite = False
params1 = {
"render": render_params,
"montage_stack": montage_stack,
"output_stack": output_stack,
"image_directory": downsample_sections_dir,
"imgformat": "png",
"scale": 0.1,
"zstart": 1022,
"zend": 1022,
"set_new_z": True,
"new_z_start": 253
}
mod = MakeMontageScapeSectionStack(
input_data=params1, args=['--output_json', outjson])
mod.run()
zvalues = render.run(renderapi.stack.get_z_values_for_stack, output_stack)
zs = [251, 252, 253]
for z in zs:
assert(z in zvalues)
params2 = {
"render": render_params,
"montage_stack": montage_stack,
"output_stack": output_stack,
"image_directory": downsample_sections_dir,
"overwrite_zlayer": True,
"imgformat": "png",
"scale": 0.1,
"zstart": 1022,
"zend": 1022,
"set_new_z": True,
"new_z_start": 253,
"close_stack": True
}
mod = MakeMontageScapeSectionStack(
input_data=params2, args=['--output_json', outjson])
mod.run()
zvalues = render.run(renderapi.stack.get_z_values_for_stack, output_stack)
zs = [251, 252, 253]
for z in zs:
assert(z in zvalues)
yield output_stack
renderapi.stack.delete_stack(output_stack, render=render)
@pytest.fixture(scope='module')
def rough_point_match_collection(render, rough_point_matches_from_json):
pt_match_collection = 'rough_point_match_collection'
renderapi.pointmatch.import_matches(pt_match_collection,
rough_point_matches_from_json,
render=render)
# check if point matches have been imported properly
groupIds = render.run(
renderapi.pointmatch.get_match_groupIds, pt_match_collection)
yield pt_match_collection
render.run(renderapi.pointmatch.delete_collection, pt_match_collection)
@pytest.fixture(scope='module')
def rough_mapped_pt_match_collection(
render, rough_mapped_pt_matches_from_json):
pt_match_collection = 'rough_mapped_point_match_collection'
renderapi.pointmatch.import_matches(pt_match_collection,
rough_mapped_pt_matches_from_json,
render=render)
# check if point matches have been imported properly
groupIds = render.run(
renderapi.pointmatch.get_match_groupIds, pt_match_collection)
assert(len(groupIds) == 3)
yield pt_match_collection
render.run(
renderapi.pointmatch.delete_collection, pt_match_collection)
@pytest.fixture(scope='module')
def test_do_rough_alignment(
render, montage_scape_stack, rough_point_match_collection,
tmpdir_factory, output_lowres_stack=None):
if output_lowres_stack is None:
output_lowres_stack = '{}_DS_Rough'.format(montage_scape_stack)
output_directory = str(tmpdir_factory.mktemp('output_json'))
solver_ex = copy.deepcopy(solver_example)
solver_ex = dict(solver_example, **{
'output_json': os.path.join(output_directory, 'output.json'),
'source_collection': dict(solver_example['source_collection'], **{
'stack': montage_scape_stack}),
'target_collection': dict(solver_example['target_collection'], **{
'stack': output_lowres_stack}),
'source_point_match_collection': dict(
solver_example['source_point_match_collection'], **{
'match_collection': rough_point_match_collection
})
})
mod = SolveRoughAlignmentModule(input_data=solver_ex, args=[])
mod.run()
zvalues = render.run(
renderapi.stack.get_z_values_for_stack, output_lowres_stack)
zs = [1020, 1021, 1022]
assert(set(zvalues) == set(zs))
yield output_lowres_stack
renderapi.stack.delete_stack(output_lowres_stack, render=render)
@pytest.fixture(scope="module")
def test_do_mapped_rough_alignment(
render, montage_z_mapped_stack, rough_mapped_pt_match_collection,
tmpdir_factory, output_lowres_stack=None):
if output_lowres_stack is None:
output_lowres_stack = '{}_DS_Rough'.format(montage_z_mapped_stack)
output_directory = str(tmpdir_factory.mktemp('output_json'))
solver_ex = copy.deepcopy(solver_example)
solver_ex = dict(solver_example, **{
'output_json': os.path.join(output_directory, 'output.json'),
'first_section': 251,
'last_section': 253,
'source_collection': dict(solver_example['source_collection'], **{
'stack': montage_z_mapped_stack}),
'target_collection': dict(solver_example['target_collection'], **{
'stack': output_lowres_stack}),
'source_point_match_collection': dict(
solver_example['source_point_match_collection'], **{
'match_collection': rough_mapped_pt_match_collection
})
})
mod = SolveRoughAlignmentModule(input_data=solver_ex, args=[])
mod.run()
zvalues = render.run(
renderapi.stack.get_z_values_for_stack, output_lowres_stack)
zs = [251, 252, 253]
assert(set(zvalues) == set(zs))
yield output_lowres_stack
renderapi.stack.delete_stack(output_lowres_stack, render=render)
@pytest.fixture(scope='module')
def test_do_rough_alignment_with_scale(
render, montage_scape_stack_with_scale, rough_point_match_collection,
tmpdir_factory, output_lowres_stack=None):
if output_lowres_stack is None:
output_lowres_stack = '{}_DS_Rough_scale'.format(
montage_scape_stack_with_scale)
output_directory = str(tmpdir_factory.mktemp('output_json'))
solver_ex = copy.deepcopy(solver_example)
solver_ex = dict(solver_example, **{
'output_json': os.path.join(output_directory, 'output.json'),
'source_collection': dict(solver_example['source_collection'], **{
'stack': montage_scape_stack_with_scale}),
'target_collection': dict(solver_example['target_collection'], **{
'stack': output_lowres_stack}),
'source_point_match_collection': dict(
solver_example['source_point_match_collection'], **{
'match_collection': rough_point_match_collection
})
})
mod = SolveRoughAlignmentModule(input_data=solver_ex, args=[])
mod.run()
zvalues = render.run(
renderapi.stack.get_z_values_for_stack, output_lowres_stack)
zs = [1020, 1021, 1022]
assert(set(zvalues) == set(zs))
yield output_lowres_stack
renderapi.stack.delete_stack(output_lowres_stack, render=render)
def test_mapped_apply_rough_alignment_transform(
render, montage_stack, test_do_mapped_rough_alignment,
tmpdir_factory, prealigned_stack=None, output_stack=None):
ex = dict(ex1, **{
'render': dict(ex1['render'], **render_params),
'montage_stack': montage_stack,
'lowres_stack': test_do_mapped_rough_alignment,
'prealigned_stack': None,
'output_stack': '{}_Rough'.format(montage_stack),
'tilespec_directory': str(tmpdir_factory.mktemp('scratch')),
'old_z': [1020, 1021, 1022],
'new_z': [251, 252, 253],
'map_z': True,
'scale': 0.1,
'pool_size': pool_size,
'output_json': str(tmpdir_factory.mktemp('output').join(
'output.json')),
'loglevel': 'DEBUG'
})
mod = ApplyRoughAlignmentTransform(input_data=ex, args=[])
mod.run()
zvalues = render.run(
renderapi.stack.get_z_values_for_stack, ex['output_stack'])
zs = [251, 252, 253]
assert(set(zvalues) == set(zs))
# test for map_z_start validation error
ex4 = dict(ex, **{'new_z': [-1]})
with pytest.raises(mm.ValidationError):
mod = ApplyRoughAlignmentTransform(input_data=ex4, args=[])
def test_apply_rough_alignment_transform(
render, montage_stack, test_do_rough_alignment,
tmpdir_factory, prealigned_stack=None, output_stack=None):
ex = dict(ex1, **{
'render': dict(ex1['render'], **render_params),
'montage_stack': montage_stack,
'lowres_stack': test_do_rough_alignment,
'prealigned_stack': None,
'output_stack': '{}_Rough'.format(montage_stack),
'tilespec_directory': str(tmpdir_factory.mktemp('scratch')),
'old_z': [1020, 1021, 1022],
'scale': 0.1,
'pool_size': pool_size,
'output_json': str(tmpdir_factory.mktemp('output').join(
'output.json')),
'loglevel': 'DEBUG'
})
mod = ApplyRoughAlignmentTransform(input_data=ex, args=[])
mod.run()
zstart = 1020
zend = 1022
zvalues = render.run(
renderapi.stack.get_z_values_for_stack, ex['output_stack'])
zs = range(zstart, zend+1)
assert(set(zvalues) == set(zs))
for z in zs:
# WARNING: montage stack should be different than output stack
in_resolvedtiles = render.run(
renderapi.resolvedtiles.get_resolved_tiles_from_z,
ex['montage_stack'], z)
out_resolvedtiles = render.run(
renderapi.resolvedtiles.get_resolved_tiles_from_z,
ex['output_stack'], z)
assert in_resolvedtiles.transforms
assert in_resolvedtiles.transforms == out_resolvedtiles.transforms
assert all([isinstance(
ts.tforms[0], renderapi.transform.ReferenceTransform)
for ts in out_resolvedtiles.tilespecs])
def test_apply_rough_alignment_transform_with_scale(
render, montage_stack, test_do_rough_alignment_with_scale,
tmpdir_factory, prealigned_stack=None, output_stack=None):
ex = copy.deepcopy(ex1)
ex = dict(ex1, **{
'render': dict(ex1['render'], **render_params),
'montage_stack': montage_stack,
'lowres_stack': test_do_rough_alignment_with_scale,
'prealigned_stack': None,
'output_stack': '{}_Rough_scaled'.format(montage_stack),
'tilespec_directory': str(tmpdir_factory.mktemp('scratch')),
'old_z': [1020, 1021, 1022],
'scale': 0.1,
'apply_scale': "True",
'pool_size': pool_size,
'output_json': str(tmpdir_factory.mktemp('output').join(
'output.json')),
'loglevel': 'DEBUG'
})
mod = ApplyRoughAlignmentTransform(input_data=ex, args=[])
mod.run()
zstart = 1020
zend = 1022
zvalues = render.run(
renderapi.stack.get_z_values_for_stack, ex['output_stack'])
zs = range(zstart, zend+1)
assert(set(zvalues) == set(zs))
for z in zs:
# WARNING: montage stack should be different than output stack
in_resolvedtiles = render.run(
renderapi.resolvedtiles.get_resolved_tiles_from_z,
ex['montage_stack'], z)
out_resolvedtiles = render.run(
renderapi.resolvedtiles.get_resolved_tiles_from_z,
ex['output_stack'], z)
assert in_resolvedtiles.transforms
assert in_resolvedtiles.transforms == out_resolvedtiles.transforms
assert all([isinstance(
ts.tforms[0], renderapi.transform.ReferenceTransform)
for ts in out_resolvedtiles.tilespecs])
def test_solver_default_options(
render, montage_scape_stack, rough_point_match_collection,
tmpdir_factory):
output_lowres_stack = '{}_DS_Rough'.format(montage_scape_stack)
output_directory = str(tmpdir_factory.mktemp('output_json'))
solver_ex1 = copy.deepcopy(solver_example)
solver_ex1 = dict(solver_ex1, **{
'output_json': os.path.join(output_directory, 'output.json'),
'source_collection': dict(solver_example['source_collection'], **{
'stack': montage_scape_stack,
'owner': None,
'project': None,
'service_host': None,
'baseURL': None,
'renderbinPath': None}),
'target_collection': dict(solver_example['target_collection'], **{
'stack': output_lowres_stack,
'owner': None,
'project': None,
'service_host': None,
'baseURL': None,
'renderbinPath': None}),
'source_point_match_collection': dict(
solver_example['source_point_match_collection'], **{
'match_collection': rough_point_match_collection,
'server': None,
'owner': None
}),
'first_section': 1020,
'last_section': 1020})
solver_ex2 = dict(solver_ex1, **{
'first_section': 1022,
'last_section': 1020})
solver_ex3 = copy.copy(solver_ex2)
solver_example['source_collection'].pop('owner', None)
solver_example['source_collection'].pop('project', None)
solver_example['source_collection'].pop('service_host', None)
solver_example['source_collection'].pop('baseURL', None)
solver_example['source_collection'].pop('renderbinPath', None)
solver_example['target_collection'].pop('owner', None)
solver_example['target_collection'].pop('project', None)
solver_example['target_collection'].pop('service_host', None)
solver_example['target_collection'].pop('baseURL', None)
solver_example['target_collection'].pop('renderbinPath', None)
mod = SolveRoughAlignmentModule(input_data=solver_ex1, args=[])
with pytest.raises(RenderModuleException):
mod.run()
mod = SolveRoughAlignmentModule(input_data=solver_ex2, args=[])
with pytest.raises(RenderModuleException):
mod.run()
os.environ.pop('MCRROOT', None)
mod = SolveRoughAlignmentModule(input_data=solver_ex3, args=[])
with pytest.raises(mm.ValidationError):
mod.run()
| StarcoderdataPython |
3403627 | <filename>blqs/blqs/loops_test.py
# Copyright 2021 The Blqs Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pymore
import pytest
import blqs
def test_for_eq():
eq = pymore.EqualsTester()
eq.make_equality_group(lambda: blqs.For(blqs.Iterable("range(5)", blqs.Register("a"))))
f = blqs.For(blqs.Iterable("range(5)", blqs.Register("a")))
with f.loop_block():
blqs.Statement()
eq.add_equality_group(f)
f = blqs.For(blqs.Iterable("range(5)", blqs.Register("a")))
with f.loop_block():
blqs.Statement()
with f.else_block():
blqs.Statement()
eq.add_equality_group(f)
f = blqs.For(blqs.Iterable("range(5)", blqs.Register("a")))
with f.loop_block():
blqs.Statement()
with f.else_block():
blqs.Statement()
blqs.Statement()
eq.add_equality_group(f)
def test_for_str():
iterable = blqs.Iterable("range(5)", blqs.Register("a"))
loop = blqs.For(iterable)
with loop.loop_block():
op = blqs.Op("MOV")
op(0, 1)
assert str(loop) == "for R(a) in range(5):\n MOV 0, 1"
with loop.else_block():
op = blqs.Op("H")
op(0)
assert str(loop) == "for R(a) in range(5):\n MOV 0, 1\nelse:\n H 0"
def test_for_iterable_not_iterable():
with pytest.raises(AssertionError, match="SupportsIterable"):
_ = blqs.For(1)
def test_for_iterable():
iterable = blqs.Iterable("range(5)", blqs.Register("a"))
assert blqs.For(iterable).iterable() == iterable
assert blqs.For(iterable).loop_vars() == (blqs.Register("a"),)
def test_for_blocks():
loop = blqs.For(blqs.Iterable("range(5)", blqs.Register("a")))
with loop.loop_block():
s1 = blqs.Statement()
assert loop.loop_block() == blqs.Block.of(s1)
with loop.else_block():
s2 = blqs.Statement()
assert loop.else_block() == blqs.Block.of(s2)
def test_while_eq():
eq = pymore.EqualsTester()
eq.make_equality_group(lambda: blqs.While(blqs.Register("a")))
eq.add_equality_group(blqs.While(blqs.Register("b")))
loop = blqs.While(blqs.Register("a"))
with loop.loop_block():
blqs.Statement()
eq.add_equality_group(loop)
loop = blqs.While(blqs.Register("a"))
with loop.loop_block():
blqs.Statement()
with loop.loop_block():
blqs.Statement()
eq.add_equality_group(loop)
def test_while_str():
loop = blqs.While(blqs.Register("a"))
with loop.loop_block():
op = blqs.Op("MOV")
op(0, 1)
assert str(loop) == "while R(a):\n MOV 0, 1\n"
with loop.else_block():
op = blqs.Op("H")
op(0)
assert str(loop) == "while R(a):\n MOV 0, 1\nelse:\n H 0"
def test_while_condition_not_readable():
with pytest.raises(AssertionError, match="SupportsIsReadable"):
_ = blqs.While(True)
def test_while_condition():
loop = blqs.While(blqs.Register("a"))
assert loop.condition() == blqs.Register("a")
def test_while_blocks():
loop = blqs.While(blqs.Register("a"))
with loop.loop_block():
s1 = blqs.Statement()
assert loop.loop_block() == blqs.Block.of(s1)
with loop.else_block():
s2 = blqs.Statement()
assert loop.else_block() == blqs.Block.of(s2)
| StarcoderdataPython |
9634244 | #! /usr/bin/env python3
import sys
sys.argv.append( '-b' ) # batch mode
import os
import ROOT
import yaml
import math
from ROOT import gROOT
# gROOT.LoadMacro("asdf.cxx")
from ROOT import TProfile2D,TProfile
import array
# Sum up the bins of the y-axis, return array of (val,err)
def SumUpProfile(Pf2,CentBin):
valList = []
nBinsX = Pf2.GetXaxis().GetNbins()
nBinsY = Pf2.GetYaxis().GetNbins()
YProj = Pf2.ProfileY(("%s_pfy_Cent%d" % (Pf2.GetName(),CentBin)))
for j in range(nBinsY):
# print(j)
localVal = YProj.GetBinContent(j+1)
localErr = YProj.GetBinError(j+1)
# print("bin %d has total %f \\pm %f" % (j,localVal,localErr))
valList.append((localVal,localErr))
return valList
# for i in range (nBinsX):
# localSum += Pf2.GetBinContent(i,j)
# localErr
# delete YProj
def CalcEventPlaneResolution():
print("---------------------------------")
print("| Starting Event Plane Res Calc.|")
print("---------------------------------")
InputFileName="AnalysisResults.root"
RootDirCent0Name="AliAnalysisTaskMBPi0Candv_Pi0H_SE_tracks_caloClusters_Cent0_histos"
RootDirCent1Name="AliAnalysisTaskMBPi0Candv_Pi0H_SE_tracks_caloClusters_Cent1_histos"
RootDirCent2Name="AliAnalysisTaskMBPi0Candv_Pi0H_SE_tracks_caloClusters_Cent2_histos"
RootDirCent3Name="AliAnalysisTaskMBPi0Candv_Pi0H_SE_tracks_caloClusters_Cent3_histos"
InputFile = ROOT.TFile(InputFileName)
if ( InputFile == 0 ):
print("Could not open file")
dirlist = InputFile.GetListOfKeys()
iter = dirlist.MakeIterator()
key = iter.Next()
dirs = {}
td = None
while key:
if key.GetClassName() == 'AliEmcalList': #'TDirectory'
td = key.ReadObj()
dirName = td.GetName()
print("found directory", dirName)
dirs[dirName] = td
key = iter.Next()
for dir in dirs:
print(dir)
# just use Pi0Cand wagons for now.
if 'Pi0Cand' not in dir:
print("\tNot using this for EPR")
continue
CentIndex=4+dir.find('Cent')
CentBin = int(dir[CentIndex:CentIndex+1])
print("Cent Bin %d" % CentBin)
localDir = InputFile.Get(dir)
# Vals = []
# Event Plane 2
# EP3,4 stored as EP3R_CosD%d_N%d, EP4R_...
print("Finding EPRs for Event Plane 2")
for i in range(6): # individual N values
LocalVals = []
for j in range(3): # individual Dn values
Pf2Name="EPR_CosD%d_N%d" % (j+1,i+1)
Pf2=localDir.FindObject(Pf2Name)
ValArray = SumUpProfile(Pf2,CentBin) # returns a list of tuples, one per cent bin
LocalVals.append(ValArray)
# print("Cent = %d" % CentBin)
# print(LocalVals)
LocalRn=0
LocalRn_Un=0
MeanCosD1=LocalVals[0][CentBin][0]
MeanCosD2=LocalVals[1][CentBin][0]
MeanCosD3=LocalVals[2][CentBin][0]
MeanCosD1_Un=LocalVals[0][CentBin][1]
MeanCosD2_Un=LocalVals[1][CentBin][1]
MeanCosD3_Un=LocalVals[2][CentBin][1]
if (LocalVals[2][CentBin][0] > 0.):
LocalRn=math.sqrt((MeanCosD1 * MeanCosD2) / MeanCosD3)
#LocalRn=math.sqrt((LocalVals[0][CentBin][0] * LocalVals[1][CentBin][0]) / LocalVals[2][CentBin][0])
LocalRn_Un = LocalRn * math.sqrt((0.5)*math.pow(MeanCosD1_Un/MeanCosD1,2) + (0.5)*math.pow(MeanCosD2_Un/MeanCosD2,2) + (0.5)*math.pow(MeanCosD3_Un/MeanCosD3,2))
#(LocalVals[0][CentBin][0]/LocalVals[0][CentBin][1]) + LocalVals[1][CentBin][0]) / LocalVals[2][CentBin][0])
print("Found R_{%d,2} = %f \\pm %f" % (i+1,LocalRn,LocalRn_Un))
print("Finding EPRs for Event Plane 3")
for i in range(6): # individual N values
LocalVals = []
for j in range(3): # individual Dn values
Pf2Name="EP3R_CosD%d_N%d" % (j+1,i+1)
Pf2=localDir.FindObject(Pf2Name)
ValArray = SumUpProfile(Pf2,CentBin) # returns a list of tuples, one per cent bin
LocalVals.append(ValArray)
LocalRn=0
LocalRn_Un=0
MeanCosD1=LocalVals[0][CentBin][0]
MeanCosD2=LocalVals[1][CentBin][0]
MeanCosD3=LocalVals[2][CentBin][0]
MeanCosD1_Un=LocalVals[0][CentBin][1]
MeanCosD2_Un=LocalVals[1][CentBin][1]
MeanCosD3_Un=LocalVals[2][CentBin][1]
if (LocalVals[2][CentBin][0] > 0.):
LocalRn=math.sqrt((MeanCosD1 * MeanCosD2) / MeanCosD3)
#LocalRn=math.sqrt((LocalVals[0][CentBin][0] * LocalVals[1][CentBin][0]) / LocalVals[2][CentBin][0])
LocalRn_Un = LocalRn * math.sqrt((0.5)*math.pow(MeanCosD1_Un/MeanCosD1,2) + (0.5)*math.pow(MeanCosD2_Un/MeanCosD2,2) + (0.5)*math.pow(MeanCosD3_Un/MeanCosD3,2))
#(LocalVals[0][CentBin][0]/LocalVals[0][CentBin][1]) + LocalVals[1][CentBin][0]) / LocalVals[2][CentBin][0])
print("Found EP3 R_{%d,3} = %f \\pm %f" % (i+1,LocalRn,LocalRn_Un))
print("Finding EPRs for Event Plane 4")
for i in range(6): # individual N values
LocalVals = []
for j in range(3): # individual Dn values
Pf2Name="EP4R_CosD%d_N%d" % (j+1,i+1)
Pf2=localDir.FindObject(Pf2Name)
ValArray = SumUpProfile(Pf2,CentBin) # returns a list of tuples, one per cent bin
LocalVals.append(ValArray)
LocalRn=0
LocalRn_Un=0
MeanCosD1=LocalVals[0][CentBin][0]
MeanCosD2=LocalVals[1][CentBin][0]
MeanCosD3=LocalVals[2][CentBin][0]
MeanCosD1_Un=LocalVals[0][CentBin][1]
MeanCosD2_Un=LocalVals[1][CentBin][1]
MeanCosD3_Un=LocalVals[2][CentBin][1]
if (LocalVals[2][CentBin][0] > 0.):
LocalRn=math.sqrt((MeanCosD1 * MeanCosD2) / MeanCosD3)
#LocalRn=math.sqrt((LocalVals[0][CentBin][0] * LocalVals[1][CentBin][0]) / LocalVals[2][CentBin][0])
LocalRn_Un = LocalRn * math.sqrt((0.5)*math.pow(MeanCosD1_Un/MeanCosD1,2) + (0.5)*math.pow(MeanCosD2_Un/MeanCosD2,2) + (0.5)*math.pow(MeanCosD3_Un/MeanCosD3,2))
#(LocalVals[0][CentBin][0]/LocalVals[0][CentBin][1]) + LocalVals[1][CentBin][0]) / LocalVals[2][CentBin][0])
print("Found EP4 R_{%d,4} = %f \\pm %f" % (i+1,LocalRn,LocalRn_Un))
# print("Seraching for object "+ RootDirCent0Name + ".EPR_CosD1_N1")
# Pf2_CosD1_N1 = InputFile.Get(RootDirCent0Name + ".EPR_CosD1_N1")
# if (Pf2_CosD1_N1 == 0):
# print("Could not get object "+ RootDirCent0Name + ".EPR_CosD1_N1")
# exit()
#
# print("Found " + Pf2_CosD1_N1.GetName() + " succesfully")
if __name__ == '__main__':
CalcEventPlaneResolution()
| StarcoderdataPython |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.