hexsha stringlengths 40 40 | size int64 3 1.03M | ext stringclasses 10
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 3 972 | max_stars_repo_name stringlengths 6 130 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 972 | max_issues_repo_name stringlengths 6 130 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 116k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 972 | max_forks_repo_name stringlengths 6 130 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 3 1.03M | avg_line_length float64 1.13 941k | max_line_length int64 2 941k | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
95421c336ab0c839a22267c8bc9f64c8852c6b1f | 1,299 | py | Python | Discord Bots/Boba's Utilities/cogs/invite/tracker.py | LUNA761/Code-Archive | c455ca1b4faa230fbbd86c4222c1589ddb0df964 | [
"MIT"
] | 1 | 2021-10-04T08:30:04.000Z | 2021-10-04T08:30:04.000Z | Discord Bots/Boba's Utilities/cogs/invite/tracker.py | LUNA761/Code-Archive | c455ca1b4faa230fbbd86c4222c1589ddb0df964 | [
"MIT"
] | null | null | null | Discord Bots/Boba's Utilities/cogs/invite/tracker.py | LUNA761/Code-Archive | c455ca1b4faa230fbbd86c4222c1589ddb0df964 | [
"MIT"
] | 2 | 2022-01-06T19:07:19.000Z | 2022-01-07T14:52:15.000Z | import discord, time, asyncio, os, random, json
from discord.ext import commands, tasks
from discord.ext.commands import has_permissions, cooldown, MissingPermissions, check
from discord.utils import get
from datetime import datetime, date
from termcolor import colored
class Utils(commands.Cog):
def __init__(self, client):
self.client = client
print("Invites - Tracker "+colored('Running', 'green'))
@commands.command()
async def invites(self, ctx, usr : discord.Member = None):
if usr == None:
user = ctx.author
else:
user = usr
total_invites = 0
for i in await ctx.guild.invites():
if i.inviter == user:
total_invites += i.uses
embed=discord.Embed(title=f"{user.name}'s Invites!", description="Read below:", color=0x00ffff)
embed.add_field(name="Invites:", value=str(total_invites), inline=False)
embed.set_footer(text="Boba's Utilities - Invites")
await ctx.send(embed=embed)
@invites.error
async def invites_error(self, ctx, error):
if isinstance(error, commands.MemberNotFound):
await ctx.send("That user could not be found!")
return
def setup(client):
client.add_cog(Utils(client))
| 35.108108 | 103 | 0.643572 |
cd15131b8a5bb2baa9ba35e5db131fe2570f0c4d | 637 | py | Python | script/echo-client.py | FedeVerstraeten/smn-lidar-controller | 7850fd48702d5f2e00d07b499812b3b2fb2b7676 | [
"MIT"
] | null | null | null | script/echo-client.py | FedeVerstraeten/smn-lidar-controller | 7850fd48702d5f2e00d07b499812b3b2fb2b7676 | [
"MIT"
] | 1 | 2021-10-05T03:53:55.000Z | 2021-10-05T03:53:55.000Z | script/echo-client.py | FedeVerstraeten/smnar-lidar-controller | 7850fd48702d5f2e00d07b499812b3b2fb2b7676 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
import sys
import socket
HOST = '10.49.234.234'
PORT = 2055
def command_to_licel(licelcommand):
data=None
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
s.connect((HOST, PORT))
print('Command to execute:',licelcommand)
s.sendall(bytes(licelcommand+'\r\n','utf-8'))
data = s.recv(1024)
print("Len:",len(data),"type:",type(data))
return data
if __name__ == '__main__':
if len(sys.argv) > 1:
rsp=repr(command_to_licel(str(sys.argv[1])))
print('Received',rsp)
with open('outputlicel', 'w') as f:
f.write(rsp)
else:
print('ERROR arguments needed')
| 23.592593 | 62 | 0.657771 |
c75a61a479c9427e8b10e7cf1290dbf3dc2c36f2 | 13,657 | py | Python | FMNIST/Deep_AE.py | Sungyeop/IPRL | 6ee17f415998ac5cc058c63cea06a5cad40b267c | [
"MIT"
] | 2 | 2021-02-08T05:34:45.000Z | 2021-03-02T08:36:06.000Z | FMNIST/Deep_AE.py | Sungyeop/IPRL | 6ee17f415998ac5cc058c63cea06a5cad40b267c | [
"MIT"
] | null | null | null | FMNIST/Deep_AE.py | Sungyeop/IPRL | 6ee17f415998ac5cc058c63cea06a5cad40b267c | [
"MIT"
] | null | null | null | import numpy as np
import copy
import torch
import torchvision
from torch import nn, optim
from torchvision import transforms, datasets
from scipy.spatial.distance import pdist, squareform
from scipy.special import expit
import matplotlib.pyplot as plt
# Training Options
#==============================================================================================================
EPOCH = 50 # Epoch
batch = 100 # mini-batch size
n1 = 256 # the number of nodes in the first hidden layer (E1)
n2 = 128 # the number of nodes in the second hidden layer (E2)
n3 = 50 # the number of nodes in bottleneck layer (Z)
lr = 0.005 # learning rate
view = 15 # the number of sample images
gamma = 2 # constant in kernel bandwidth
alpha = 1.01 # Renyi's alpha-entropy
time_int = 'Iteration' # Time interval of Information Plane : iteration
# time_int = 'Epoch' # Time interval of Inforamtion Plane : Epoch
epsilon = 10**(-8) # divergence regulator
DEVICE = "cpu"
#==============================================================================================================
# Data Load
#==============================================================================================================
trainset = datasets.FashionMNIST(root = './.data/', train = True, download = True, transform = transforms.ToTensor())
testset = datasets.FashionMNIST(root = './.data/', train = False, download = True, transform = transforms.ToTensor())
train_loader = torch.utils.data.DataLoader(dataset = trainset, batch_size=batch, shuffle = True, num_workers=0)
test_loader = torch.utils.data.DataLoader(dataset = testset, batch_size=batch, shuffle = True, num_workers=0)
#==============================================================================================================
class Deep_AE(nn.Module):
def __init__(self, n1, n2, n3):
super(Deep_AE, self).__init__()
self.fc1 = nn.Linear(28*28,n1)
self.fc2 = nn.Linear(n1,n2)
self.fc3 = nn.Linear(n2,n3)
self.fc4 = nn.Linear(n3,n2)
self.fc5 = nn.Linear(n2,n1)
self.fc6 = nn.Linear(n1,28*28)
def forward(self,x):
X = x.view(-1,28*28)
E1 = torch.sigmoid(self.fc1(X))
E2 = torch.sigmoid(self.fc2(E1))
Z = torch.sigmoid(self.fc3(E2))
D1 = torch.sigmoid(self.fc4(Z))
D2 = torch.sigmoid(self.fc5(D1))
Y = torch.sigmoid(self.fc6(D2))
return Y
autoencoder = Deep_AE(n1,n2,n3).to(DEVICE)
optimizer = torch.optim.Adam(autoencoder.parameters(), lr = lr)
MSE = nn.MSELoss()
def train(autoencoder, train_loader, history_W1, history_b1, history_W2, history_b2, history_W3, history_b3, \
history_W4, history_b4, history_W5, history_b5, history_W6, history_b6, history_trainloss, history_testloss):
autoencoder.train()
for step, (x,label) in enumerate(train_loader):
x = x.view(-1,28*28).to(DEVICE)
y = x.view(-1,28*28).to(DEVICE)
label = label.to(DEVICE)
Y = autoencoder(x)
W1 = autoencoder.fc1.weight.data.detach().numpy()
b1 = autoencoder.fc1.bias.data.detach().numpy()
W2 = autoencoder.fc2.weight.data.detach().numpy()
b2 = autoencoder.fc2.bias.data.detach().numpy()
W3 = autoencoder.fc3.weight.data.detach().numpy()
b3 = autoencoder.fc3.bias.data.detach().numpy()
W4 = autoencoder.fc4.weight.data.detach().numpy()
b4 = autoencoder.fc4.bias.data.detach().numpy()
W5 = autoencoder.fc5.weight.data.detach().numpy()
b5 = autoencoder.fc5.bias.data.detach().numpy()
W6 = autoencoder.fc6.weight.data.detach().numpy()
b6 = autoencoder.fc6.bias.data.detach().numpy()
history_W1.append(copy.deepcopy(W1))
history_b1.append(copy.deepcopy(b1))
history_W2.append(copy.deepcopy(W2))
history_b2.append(copy.deepcopy(b2))
history_W3.append(copy.deepcopy(W3))
history_b3.append(copy.deepcopy(b3))
history_W4.append(copy.deepcopy(W4))
history_b4.append(copy.deepcopy(b4))
history_W5.append(copy.deepcopy(W5))
history_b5.append(copy.deepcopy(b5))
history_W6.append(copy.deepcopy(W6))
history_b6.append(copy.deepcopy(b6))
trainloss = MSE(Y, y)
history_trainloss.append(trainloss.detach().numpy())
test_data = testset.data.view(-1,784).type(torch.FloatTensor)/255.
output = autoencoder(test_data)
testloss = MSE(output, test_data)
history_testloss.append(testloss.detach().numpy())
optimizer.zero_grad()
trainloss.backward()
optimizer.step()
return (history_W1, history_b1, history_W2, history_b2, history_W3, history_b3, \
history_W4, history_b4, history_W5, history_b5, history_W6, history_b6, history_trainloss, history_testloss)
def sigmoid(x):
return expit(x)
def Ent(X,gamma,alpha):
N = np.size(X,0)
d = np.size(X,1)
sigma = gamma*N**(-1/(4+d))
X_norm = X
pairwise_dist = squareform(pdist(X_norm, 'euclidean'))
K = np.exp(-pairwise_dist**2/(2*sigma**2))
A = 1/N*K
_, eigenv, _ = np.linalg.svd(A)
S = 1/(1-alpha)*np.log2(np.sum(eigenv**alpha)+epsilon).real
return A, S
def MI(X,Y,gamma,alpha):
A_X, S_X = Ent(X,gamma,alpha)
A_Y, S_Y = Ent(Y,gamma,alpha)
A_XY = A_X*A_Y/np.trace(A_X*A_Y)
_, eigenv, _ = np.linalg.svd(A_XY)
S_XY = 1/(1-alpha)*np.log2(np.sum(eigenv**alpha)+epsilon).real
S = S_X + S_Y - S_XY
return S, S_XY
def encoder(test, W1, b1, W2, b2, W3, b3):
E1 = sigmoid(np.einsum('ij,jk->ik', test, W1.T) + b1)
E2 = sigmoid(np.einsum('ij,jk->ik', E1, W2.T) + b2)
Z = sigmoid(np.einsum('ij,jk->ik', E2, W3.T) + b3)
return E1, E2, Z
def decoder(Z, W4, b4, W5, b5, W6, b6):
D1 = sigmoid(np.einsum('ij,jk->ik', Z, W4.T) + b4)
D2 = sigmoid(np.einsum('ij,jk->ik', D1, W5.T) + b5)
output = sigmoid(np.einsum('ij,jk->ik', D2, W6.T) + b6)
return D1, D2, output
def IP(history_W1, history_b1, history_W2, history_b2, history_W3, history_b3, history_W4, history_b4,
history_W5, history_b5, history_W6, history_b6, history_trainloss, history_testloss):
if time_int == 'Epoch':
step = EPOCH
ind = np.linspace(0,len(history_trainloss)*(1-1/step),step)
elif time_int == 'Iteration':
jump = 1
step = np.int(len(history_trainloss)/jump)
ind = np.linspace(0,len(history_trainloss)-jump,step)
I_XE1_cont = np.zeros((step,))
I_E1Y_cont = np.zeros((step,))
I_XE2_cont = np.zeros((step,))
I_E2Y_cont = np.zeros((step,))
I_XZ_cont = np.zeros((step,))
I_ZY_cont = np.zeros((step,))
I_XD1_cont = np.zeros((step,))
I_D1Y_cont = np.zeros((step,))
I_XD2_cont = np.zeros((step,))
I_D2Y_cont = np.zeros((step,))
train_E_cont = np.zeros((step,))
test_E_cont = np.zeros((step,))
FMNIST_test = testset.data.view(-1,28*28).type(torch.FloatTensor)/255.
FMNIST_test = FMNIST_test.detach().numpy()
for j in range(step):
i = np.int(ind[j])
W1 = history_W1[i]
b1 = history_b1[i]
b1 = np.reshape(b1, (1,len(b1)))
W2 = history_W2[i]
b2 = history_b2[i]
b2 = np.reshape(b2, (1,len(b2)))
W3 = history_W3[i]
b3 = history_b3[i]
b3 = np.reshape(b3, (1,len(b3)))
W4 = history_W4[i]
b4 = history_b4[i]
b4 = np.reshape(b4, (1,len(b4)))
W5 = history_W5[i]
b5 = history_b5[i]
b5 = np.reshape(b5, (1,len(b5)))
W6 = history_W6[i]
b6 = history_b6[i]
b6 = np.reshape(b6, (1,len(b6)))
train_E = history_trainloss[i]
test_E = history_testloss[i]
X = FMNIST_test[:batch,:]
E1, E2, Z = encoder(X, W1, b1, W2, b2, W3, b3)
D1, D2, Y = decoder(Z, W4, b4, W5, b5, W6, b6)
I_XE1, H_XE1 = MI(X,E1,gamma,alpha)
I_E1Y, H_E1Y = MI(E1,Y,gamma,alpha)
I_XE2, H_XE2 = MI(X,E2,gamma,alpha)
I_E2Y, H_E2Y = MI(E2,Y,gamma,alpha)
I_XZ, H_XZ = MI(X,Z,gamma,alpha)
I_ZY, H_ZY = MI(Z,Y,gamma,alpha)
I_XD1, H_XD1 = MI(X,D1,gamma,alpha)
I_D1Y, H_D1Y = MI(D1,Y,gamma,alpha)
I_XD2, H_XD2 = MI(X,D2,gamma,alpha)
I_D2Y, H_D2Y = MI(D2,Y,gamma,alpha)
I_XE1_cont[j] = I_XE1
I_E1Y_cont[j] = I_E1Y
I_XE2_cont[j] = I_XE2
I_E2Y_cont[j] = I_E2Y
I_XZ_cont[j] = I_XZ
I_ZY_cont[j] = I_ZY
I_XD1_cont[j] = I_XD1
I_D1Y_cont[j] = I_D1Y
I_XD2_cont[j] = I_XD2
I_D2Y_cont[j] = I_D2Y
train_E_cont[j] = train_E
test_E_cont[j] = test_E
# Information plane
D = 7
size = 7
xx = np.linspace(0,D,500)
yy = np.linspace(0,D,500)
num = np.linspace(0,step,step)
fig = plt.figure(figsize=(12,8))
suptitle = fig.suptitle('Information Plane of Deep Autoencoder', y=1.01, fontsize='20')
ax1 = plt.subplot(2,3,1)
plt.plot(xx, yy, 'k--')
im = plt.scatter(I_XE1_cont, I_E1Y_cont, c=num, cmap='rainbow', label = 'E1', s=size)
plt.ylabel(r"$I(T;X')$", fontsize=13)
ax1.axes.get_xaxis().set_ticks([])
plt.legend(fontsize='15')
ax2 = plt.subplot(2,3,2)
plt.plot(xx, yy, 'k--')
plt.scatter(I_XE2_cont, I_E2Y_cont, c=num, cmap='rainbow', label = 'E2', s=size)
ax2.axes.get_xaxis().set_ticks([])
ax2.axes.get_yaxis().set_ticks([])
plt.legend(fontsize='15')
ax3 = plt.subplot(2,3,3)
plt.plot(xx, yy, 'k--')
plt.scatter(I_XZ_cont, I_ZY_cont, c=num, cmap='rainbow', label = 'Z', s=size)
ax3.axes.get_xaxis().set_ticks([])
ax3.axes.get_yaxis().set_ticks([])
plt.legend(fontsize='15')
ax4 = plt.subplot(2,3,4)
plt.plot(xx, yy, 'k--')
plt.scatter(I_XZ_cont, I_ZY_cont, c=num, cmap='rainbow', label = 'Z', s=size)
plt.xlabel(r'$I(X;T)$', fontsize=13)
plt.ylabel(r"$I(T;X')$", fontsize=13)
plt.legend(fontsize='15')
ax5 = plt.subplot(2,3,5)
plt.plot(xx, yy, 'k--')
plt.scatter(I_XD1_cont, I_D1Y_cont, c=num, cmap='rainbow', label = 'D1', s=size)
plt.xlabel(r'$I(X;T)$', fontsize=13)
ax5.axes.get_yaxis().set_ticks([])
plt.legend(fontsize='15')
ax6 = plt.subplot(2,3,6)
plt.plot(xx, yy, 'k--')
plt.scatter(I_XD2_cont, I_D2Y_cont, c=num, cmap='rainbow', label = 'D2', s=size)
plt.xlabel(r'$I(X;T)$', fontsize=13)
ax6.axes.get_yaxis().set_ticks([])
plt.legend(fontsize='15')
plt.tight_layout()
b_ax = fig.add_axes([1.02, 0.15, 0.02, 0.7])
bar = fig.colorbar(im, cax=b_ax)
bar.set_label('{}'.format(time_int))
plt.show()
# DPI & Train/Test Loss
fig = plt.figure(figsize=(12,4))
ax1 = plt.subplot(1,3,1)
plt.plot(I_XE1_cont, label = r'$I(X;E_1)$')
plt.plot(I_XE2_cont, label = r'$I(X;E_2)$')
plt.plot(I_XZ_cont, label = 'I(X;Z)')
plt.xlabel('{}'.format(time_int))
plt.title('DPI of Encoder', fontsize=15)
plt.legend()
ax2 = plt.subplot(1,3,2)
plt.plot(I_D2Y_cont, label = r'$I(D_2;Y)$')
plt.plot(I_D1Y_cont, label = r'$I(D_1;Y)$')
plt.plot(I_ZY_cont, label = 'I(Z;Y)')
plt.xlabel('{}'.format(time_int))
plt.title('DPI of Decoder', fontsize=15)
plt.legend()
ax3 = plt.subplot(1,3,3)
plt.plot(np.log10(train_E_cont), label='Train')
plt.plot(np.log10(test_E_cont), label='Test')
plt.ylabel('log(Loss)')
plt.xlabel('{}'.format(time_int))
plt.title('Train/Test Loss', fontsize=15)
plt.legend()
plt.tight_layout()
plt.show()
def main():
history_W1 = []
history_b1 = []
history_W2 = []
history_b2 = []
history_W3 = []
history_b3 = []
history_W4 = []
history_b4 = []
history_W5 = []
history_b5 = []
history_W6 = []
history_b6 = []
history_trainloss = []
history_testloss = []
sample_data = trainset.data[:view].view(-1,28*28)
sample_data = sample_data.type(torch.FloatTensor)/255.
print('Training Starts!')
for epoch in range(1,EPOCH+1):
history_W1, history_b1, history_W2, history_b2, history_W3, history_b3, \
history_W4, history_b4, history_W5, history_b5, history_W6, history_b6, history_trainloss, history_testloss = \
train(autoencoder, train_loader, history_W1, history_b1, history_W2, history_b2, history_W3, history_b3, \
history_W4, history_b4, history_W5, history_b5, history_W6, history_b6, history_trainloss, history_testloss)
sample_x = sample_data.to(DEVICE)
sample_y = autoencoder(sample_x)
if epoch == EPOCH:
f,a = plt.subplots(2,view,figsize=(view,2))
for i in range(view):
img = np.reshape(sample_x.data.numpy()[i], (28,28))
a[0][i].imshow(img, cmap='gray')
a[0][i].set_xticks(()); a[0][i].set_yticks(())
for i in range(view):
img = np.reshape(sample_y.data.numpy()[i],(28,28))
a[1][i].imshow(img, cmap='gray')
a[1][i].set_xticks(()); a[1][i].set_yticks(())
plt.show()
print('Training Ends!')
print('Estimating Mutual Information...')
IP(history_W1, history_b1, history_W2, history_b2, history_W3, history_b3, history_W4, history_b4,\
history_W5, history_b5, history_W6, history_b6, history_trainloss, history_testloss)
| 35.107969 | 122 | 0.580435 |
e86df1682ec167f63feee9194ecd88986182e864 | 1,798 | py | Python | python/vaccine_notifier.py | abishekvashok/vaccine-checker | 1c193db0ff9c13bfc30ffc89ef5ea5661f7b504a | [
"MIT"
] | 9 | 2021-06-03T10:02:49.000Z | 2021-06-16T16:17:51.000Z | python/vaccine_notifier.py | abishekvashok/vaccine-checker | 1c193db0ff9c13bfc30ffc89ef5ea5661f7b504a | [
"MIT"
] | null | null | null | python/vaccine_notifier.py | abishekvashok/vaccine-checker | 1c193db0ff9c13bfc30ffc89ef5ea5661f7b504a | [
"MIT"
] | null | null | null | import time
import requests
from playsound import playsound
print("Select mode: 1 - Pincode, 2 - District: ",end="")
mode = int(input())
pincode = ""
district = ""
district_code = ""
if(mode == 1):
print("Input Pincode: ",end="")
pincode = input()
elif(mode == 2):
print("Enter District Name: ",end="")
district = input()
# Todo district mapping
district_code = "307"
else:
print("Invalid choice!")
print("Enter date in DD-MM-YYYY format: ",end="")
date = input()
print("Enter Age: ",end="")
age = int(input())
urls = [
"https://cdn-api.co-vin.in/api/v2/appointment/sessions/public/calendarByDistrict?district_id="+district_code+"&date="+date,
"https://cdn-api.co-vin.in/api/v2/appointment/sessions/public/findByPin?pincode="+pincode+"&date="+date
]
header = {
"Accept": "application/json",
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36",
"Origin": "https://www.cowin.gov.in",
"Sec-Fetch-Site": "cross-site",
"Sec-Fetch-Mode": "cors",
"Sec-Fetch-Dest": "empty",
"Referer": "https://www.cowin.gov.in/",
"Accept-Language": "en-GB,en-US;q=0.9,en;q=0.8"
}
print("Checking for open slots. Will noitfy when available...")
def makeRequest():
response = requests.get(urls[0], headers = header)
centers = response.json()["centers"]
for center in centers:
sessions = center["sessions"]
checkEachSession(sessions)
def checkEachSession(sessions):
for session in sessions:
if(session["min_age_limit"] <= age):
if(session["available_capacity"] > 0):
print("We have a vaccine slot available!")
playsound('./alert.mp3')
while(True):
makeRequest()
time.sleep(5) | 29.966667 | 135 | 0.63515 |
004d725f028f98d03fdbf9fe94c3ca5287b621e5 | 4,975 | py | Python | tests/test_webapp.py | rjw57/bdfu | 386d800738e6943ed9063f1bf904ece86410c7c7 | [
"MIT"
] | null | null | null | tests/test_webapp.py | rjw57/bdfu | 386d800738e6943ed9063f1bf904ece86410c7c7 | [
"MIT"
] | null | null | null | tests/test_webapp.py | rjw57/bdfu | 386d800738e6943ed9063f1bf904ece86410c7c7 | [
"MIT"
] | null | null | null | """
Basic functionality tests for web application.
"""
import uuid
from io import BytesIO
from flask import current_app
from flask.ext.testing import TestCase
import pytest
from mock import patch
from bdfu.auth import _jwt_token
from bdfu.webapp import app
def jwt_headers(*args, **kwargs):
"""Like _jwt_token() but return a dict of headers containing HTTP
Authorization bearer token using the JWT.
"""
return { 'Authorization': 'Bearer ' + _jwt_token(*args, **kwargs).decode('ascii') }
def jwt_payload(user=None):
"""Create a minimally valid JWT payload. If user is None, use "testuser".
"""
if user is None:
user = 'testuser'
return dict(user=user)
class WebAppTestCase(TestCase):
def create_app(self):
# Create and record a secret key
self.secret = uuid.uuid4().hex
# Configure application
app.config['JWT_SECRET_KEY'] = self.secret
app.debug = True
return app
def test_have_secret_key(self):
"""We have a key and that key is a string."""
assert isinstance(current_app.config['JWT_SECRET_KEY'], str)
def test_get_not_allowed(self):
"""GET-ing .../upload returns Method Not Allowed (405)."""
assert self.client.get('/upload').status_code == 405
def test_empty_post_fails(self):
"""POST-ing without JWT is Unauthorized (401)."""
assert self.client.post('/upload').status_code == 401
def test_empty_post_fails(self):
"""POST-ing with incorrect JWT is a Bad Request (400)."""
auth_headers = jwt_headers(jwt_payload(), 'this is not the secret')
assert self.client.post('/upload', headers=auth_headers).status_code == 400
def test_poorly_configured_app_fails(self):
"""POST-ing with JWT authentication but no secret raises an Exception in debug and
is an Internal Server Error (500) in production.
"""
# Check we have a secret at the moment
assert self.secret is not None
assert current_app.config['JWT_SECRET_KEY'] is not None
# Remove secret
current_app.config['JWT_SECRET_KEY'] = None
# in debug...
current_app.debug = True
with pytest.raises(Exception):
self.client.post(
'/upload', headers=jwt_headers(jwt_payload(), self.secret)
)
# in production...
current_app.debug = False
assert self.client.post(
'/upload', headers=jwt_headers(jwt_payload(), self.secret)
).status_code == 500
def test_empty_authorised_post_fails(self):
"""POST-ing an authorised empty body is a Bad Request (400)."""
auth_headers = jwt_headers(jwt_payload(), self.secret)
assert self.client.post('/upload', headers=auth_headers).status_code == 400
def test_authorised_post_needs_user(self):
"""POST-ing an authorized non-empty body with file results in a Bad
Request (400) if the user is not present.
"""
pl = jwt_payload()
del pl['user']
auth_headers = jwt_headers(pl, self.secret)
# Create a random file contents
file_contents = BytesIO(uuid.uuid4().bytes)
resp = self.client.post(
'/upload', headers=auth_headers,
data=dict(file=(file_contents, 'test_file.bin')),
)
assert resp.status_code == 400
@patch('bdfu.webapp._get_storage')
def test_authorised_post_succeeds(self, gs_mock):
"""POST-ing an authorized non-empty body with file results in a file
being Created (201).
"""
auth_headers = jwt_headers(jwt_payload(user='myuser'), self.secret)
# Create a random file contents
file_contents = uuid.uuid4().bytes
# Mock the storage's write method to record it's call values
new_id = uuid.uuid4().hex
stored_state = {}
def side_effect(username, fobj):
# HACK: we need to use str() here as a "copy" since the user is
# passed as current_user which is only a proxy object for the real
# username. Without the call to str(), the username would be
# "None" by the time we check it.
stored_state['username'] = str(username)
stored_state['contents'] = fobj.read()
return new_id
gs_mock().write.side_effect = side_effect
# Upload a file
resp = self.client.post(
'/upload', headers=auth_headers,
data=dict(file=(BytesIO(file_contents), 'test_file.bin')),
)
# Check write was called exactly once
assert gs_mock().write.call_count == 1
# Check that the storage was passed the right values
assert stored_state['username'] == 'myuser'
assert stored_state['contents'] == file_contents
# Check that we Created the correct file
assert resp.status_code == 201
assert resp.json['id'] == new_id
| 34.075342 | 90 | 0.633568 |
e14f1c2bff366a435f187df35d82e1b511ac0486 | 5,883 | py | Python | locations/spiders/calvin_klein.py | cmecklenborg/alltheplaces | e62b59fb0071b6e289c4622d368fdb203a28347e | [
"MIT"
] | null | null | null | locations/spiders/calvin_klein.py | cmecklenborg/alltheplaces | e62b59fb0071b6e289c4622d368fdb203a28347e | [
"MIT"
] | null | null | null | locations/spiders/calvin_klein.py | cmecklenborg/alltheplaces | e62b59fb0071b6e289c4622d368fdb203a28347e | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
import re
import scrapy
from locations.items import GeojsonPointItem
from locations.hours import OpeningHours
from scrapy.selector import Selector
class CalvinKleinSpider(scrapy.Spider):
name = "calvin_klein"
item_attributes = {"brand": "Calvin Klein"}
allowed_domains = ["gotwww.com", "calvinklein.com"]
def start_requests(self):
states = [
"Alabama",
"Arizona",
"California",
"Colorado",
"Connecticut",
"Delaware",
"Florida",
"Georgia",
"Hawaii",
"Illinois",
"Indiana",
"Kentucky",
"Louisiana",
"Maine",
"Maryland",
"Massachusetts",
"Michigan",
"Minnesota",
"Mississippi",
"Missouri",
"Montana",
"Nevada",
"New Hampshire",
"New Jersey",
"New York",
"Ohio",
"Oregon",
"Pennsylvania",
"South Carolina",
"Tennessee",
"Texas",
"Utah",
"Virginia",
"Washington",
"Wisconsin",
"Alberta",
"British Columbia",
"Manitoba",
"Ontario",
"Quebec",
]
s_url = (
"https://secure.gotwww.com/gotlocations.com/ck.com/ckna.php?address={state}"
)
for state in states:
url = s_url.format(state=state)
yield scrapy.Request(url=url, callback=self.parse_us_ca)
## UK, FR/LU, DE, BE/NL, ES, TR, RS, XK, AM, BY, GE, UH, LB, EG, IL, JO, RU, ZA
## Europe and MEA+
europe = [
"715837896",
"715837898",
"715837895",
"715837892",
"715837887",
"715837894",
"715837933",
"715837936",
"715837922",
"715837917",
"715837938",
"715837908",
"715837888",
"715837900",
"715837930",
"715837912",
"715837886",
"715837934",
]
e_url = "https://www.calvinklein.co.uk/wcs/resources/store/20027/storelocator/byGeoNode/{code}"
for code in europe:
url = e_url.format(code=code)
yield scrapy.Request(url=url, callback=self.parse_europe)
## Australia
yield scrapy.Request(
url="https://www.calvinklein.com.au/stores/index/dataAjax/?_=1573849948857",
callback=self.parse_au,
)
def parse_au(self, response):
data = response.json()
for store in data:
properties = {
"ref": store["i"],
"name": "Calvin Klein",
"addr_full": store["a"][0],
"city": store["a"][1],
"state": store["a"][2],
"postcode": store["a"][3],
"country": "Australia",
"lat": store["l"],
"lon": store["g"],
"phone": store["p"],
}
yield GeojsonPointItem(**properties)
def parse_europe(self, response):
data = response.json()
for store in data["PhysicalStore"]:
try:
state = store["stateOrProvinceName"]
except:
state = "Europe"
try:
postal = store["postalCode"].strip()
except:
postal = ""
properties = {
"ref": store["storeName"],
"name": "Calvin Klein",
"addr_full": store["addressLine"][0],
"city": store["city"],
"state": state,
"postcode": postal,
"country": store["country"],
"lat": store["latitude"],
"lon": store["longitude"],
}
yield GeojsonPointItem(**properties)
def parse_us_ca(self, response):
data = response.xpath('//script[contains(.,"map")]/text()').extract_first()
places = re.findall("L.marker(.*)", data)
for place in places:
coordinates = re.search(r"\[(.*)\]", place).groups()[0]
coords = coordinates.split(",")
html = re.search(r"bindPopup\('(.*)'\);", place).groups()[0]
info = (
Selector(text=html)
.xpath('//*[@class="map_text"]/table/tr[7]/td/text()')
.getall()
)
if len(info) == 5:
country = info[3]
phone = info[4]
city = re.search(r"^(.*),", info[2]).groups()[0]
state = re.search(r",\s([A-Z]{2})", info[2]).groups()[0]
postal = re.search(r"[A-Z]{2}\s(.*)", info[2]).groups()[0]
elif len(info) == 4:
country = info[2]
phone = info[3]
city = re.search(r"^(.*),", info[1]).groups()[0]
state = re.search(r",\s([A-Z]{2})", info[1]).groups()[0]
postal = re.search(r"[A-Z]{2}\s(.*)", info[1]).groups()[0]
## Other countries show up in some searches
if country not in ["United States", "Canada"]:
pass
else:
properties = {
"ref": info[0],
"name": "Calvin Klein",
"addr_full": info[0],
"city": city,
"state": state,
"postcode": postal,
"country": country,
"lat": coords[0],
"lon": coords[1],
"phone": phone,
}
yield GeojsonPointItem(**properties)
| 30.324742 | 103 | 0.432942 |
ff4b7248e63cbc0bcfc0cf9fae33b3943466ff79 | 2,207 | py | Python | haas_lib_bundles/python/libraries/uln2003/uln2003.py | wstong999/AliOS-Things | 6554769cb5b797e28a30a4aa89b3f4cb2ef2f5d9 | [
"Apache-2.0"
] | null | null | null | haas_lib_bundles/python/libraries/uln2003/uln2003.py | wstong999/AliOS-Things | 6554769cb5b797e28a30a4aa89b3f4cb2ef2f5d9 | [
"Apache-2.0"
] | null | null | null | haas_lib_bundles/python/libraries/uln2003/uln2003.py | wstong999/AliOS-Things | 6554769cb5b797e28a30a4aa89b3f4cb2ef2f5d9 | [
"Apache-2.0"
] | null | null | null | """
Copyright (C) 2015-2021 Alibaba Group Holding Limited
MicroPython's driver for ULN2003
Author: HaaS
Date: 2022/03/15
"""
from driver import GPIO
from utime import sleep_ms
from micropython import const
import math
class ULN2003(object):
"""
This class implements uln2003 chip's defs.
"""
def __init__(self, a, a_, b, b_):
self._a = None
self._a_ = None
self._b = None
self._b_ = None
if not isinstance(a, GPIO):
raise ValueError("parameter is not an GPIO object")
if not isinstance(a_, GPIO):
raise ValueError("parameter is not an GPIO object")
if not isinstance(b, GPIO):
raise ValueError("parameter is not an GPIO object")
if not isinstance(b_, GPIO):
raise ValueError("parameter is not an GPIO object")
# make ULN2003's internal object points to gpio
self._a = a
self._a_ = a_
self._b = b
self._b_ = b_
def motorCw(self, speed=4):
self._a.write(1)
self._a_.write(0)
self._b.write(0)
self._b_.write(0)
sleep_ms(speed)
self._a.write(0)
self._a_.write(1)
self._b.write(0)
self._b_.write(0)
sleep_ms(speed)
self._a.write(0)
self._a_.write(0)
self._b.write(1)
self._b_.write(0)
sleep_ms(speed)
self._a.write(0)
self._a_.write(0)
self._b.write(0)
self._b_.write(1)
sleep_ms(speed)
def motorCcw(self, speed=4):
self._a.write(0)
self._a_.write(0)
self._b.write(0)
self._b_.write(1)
sleep_ms(speed)
self._a.write(0)
self._a_.write(0)
self._b.write(1)
self._b_.write(0)
sleep_ms(speed)
self._a.write(0)
self._a_.write(1)
self._b.write(0)
self._b_.write(0)
sleep_ms(speed)
self._a.write(1)
self._a_.write(0)
self._b.write(0)
self._b_.write(0)
sleep_ms(speed)
def motorStop(self):
self._a.write(0)
self._a_.write(0)
self._b.write(0)
self._b_.write(0)
| 23.231579 | 63 | 0.550974 |
618998507d02efc1d9a11f2200dfdca4c11ecb18 | 3,617 | py | Python | curie/vsan_node_util.py | mike0615/curie | e25691f465c23cf53c39be157fcfa2eea4978b26 | [
"MIT"
] | 4 | 2019-02-26T05:18:13.000Z | 2020-07-15T00:34:41.000Z | curie/vsan_node_util.py | nutanix/curie | e25691f465c23cf53c39be157fcfa2eea4978b26 | [
"MIT"
] | 3 | 2021-03-31T18:55:50.000Z | 2021-04-20T17:13:31.000Z | curie/vsan_node_util.py | mike0615/curie | e25691f465c23cf53c39be157fcfa2eea4978b26 | [
"MIT"
] | 2 | 2020-01-09T02:24:00.000Z | 2020-11-04T23:09:02.000Z | #
# Copyright (c) 2015 Nutanix Inc. All rights reserved.
#
# VSAN Specific node utilities.
#
# VSAN reference:
# http://www.vmware.com/files/pdf/products/vsan/VSAN-Troubleshooting-Reference-Manual.pdf
import logging
from pyVmomi import vmodl
from curie.node_util import NodeUtil
from curie.vsphere_vcenter import VsphereVcenter
log = logging.getLogger(__name__)
class VsanNodeUtil(NodeUtil):
@classmethod
def _use_handler(cls, node):
"""Returns True if 'node' should use this class as its handler."""
software_info = node.cluster().metadata().cluster_software_info
return software_info.HasField("vsan_info")
def __init__(self, node):
self.__node = node
self.__cluster_metadata = self.__node.cluster().metadata()
self.__vcenter_info = \
self.__cluster_metadata.cluster_management_server_info.vcenter_info
def is_ready(self):
"""See 'NodeUtil.is_ready' documentation for further details.
Confirms node is ready by requesting node's health info from Vsphere
corresponding to the node.
Raises:
CurieTestException: If the VsanNodeUtil's node is not found (passed
through from __get_vim_host).
"""
with VsphereVcenter(
self.__vcenter_info.vcenter_host,
self.__vcenter_info.decrypt_field("vcenter_user"),
self.__vcenter_info.decrypt_field("vcenter_password")) as vcenter:
vim_host = self.__get_vim_host(vcenter)
return self.__vim_host_is_ready(vim_host)
def __get_vim_host(self, vsphere_vcenter):
"""Returns the HostSystem associated with this VsanNodeUtil.
Args:
vsphere_vcenter: VsphereVcenter instance. Must already be open.
Returns:
HostSystem instance.
Raises:
CurieTestException: If the VsanNodeUtil's node is not found.
"""
vim_datacenter = vsphere_vcenter.lookup_datacenter(
self.__vcenter_info.vcenter_datacenter_name)
vim_cluster = vsphere_vcenter.lookup_cluster(
vim_datacenter,
self.__vcenter_info.vcenter_cluster_name)
# Raises CurieTestException if not found.
vim_host = vsphere_vcenter.lookup_hosts(vim_cluster,
[self.__node.node_id()])[0]
return vim_host
def __vim_host_is_ready(self, vim_host):
"""Returns True if the VSAN service on a HostSystem is active and healthy.
Args:
vim_host: HostSystem instance.
Returns:
True if active and healthy; otherwise, False.
Note:
There are three normal roles for a host master, agent, and backup. The
first two are self explanatory. Backup is the host designated to take
over if the master fails. See the VSAN Troubleshooting guide linked at
the top this module for more information.
The following attributes should always be set by the vSphere api:
"host_stats"<VsanHostClusterStatus>
"health"<VsanHostHealthState>
"nodeStatus"<VsanHostClusterStatusState>
"state"<VsanHostClusterStatusState>
"""
try:
host_status = vim_host.configManager.vsanSystem.QueryHostStatus()
except vmodl.RuntimeFault:
log.warning("Error querying VSAN node %s status",
self.__node.node_id(), exc_info=True)
return False
log.info("node: %s; host_status.nodeState.state == \"%s\"; "
"host_status.health == \"%s\"", self.__node.node_id(),
host_status.nodeState.state, host_status.health)
if (host_status.nodeState.state in ("master", "agent", "backup") and
host_status.health == "healthy"):
return True
else:
return False
| 33.803738 | 90 | 0.700304 |
32256d70e67e585b347cd93ba09815b0ffd000d9 | 256 | py | Python | anyflow/err.py | Cologler/anyflow-python | cde20b0c74faf18cb7dc503072d4c2f99d5681de | [
"MIT"
] | null | null | null | anyflow/err.py | Cologler/anyflow-python | cde20b0c74faf18cb7dc503072d4c2f99d5681de | [
"MIT"
] | null | null | null | anyflow/err.py | Cologler/anyflow-python | cde20b0c74faf18cb7dc503072d4c2f99d5681de | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
#
# Copyright (c) 2020~2999 - Cologler <skyoflw@gmail.com>
# ----------
#
# ----------
class Abort(Exception):
def __init__(self, reason, *args, **kwargs):
super().__init__(*args, **kwargs)
self.reason = reason
| 21.333333 | 56 | 0.546875 |
5019e3d715df3bdf6d8dd075785c3a9112295d91 | 13,577 | py | Python | acme/jax/running_statistics.py | ostap-viniavskyi/acme | 8fbae90217557a35e1d773aa63ab80890e799765 | [
"Apache-2.0"
] | 1 | 2022-03-31T17:24:10.000Z | 2022-03-31T17:24:10.000Z | acme/jax/running_statistics.py | ostap-viniavskyi/acme | 8fbae90217557a35e1d773aa63ab80890e799765 | [
"Apache-2.0"
] | null | null | null | acme/jax/running_statistics.py | ostap-viniavskyi/acme | 8fbae90217557a35e1d773aa63ab80890e799765 | [
"Apache-2.0"
] | null | null | null | # Lint as: python3
# Copyright 2018 DeepMind Technologies Limited. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utility functions to compute running statistics."""
import dataclasses
from typing import Any, Optional, Tuple, Union
from acme import types
from acme.jax import utils
from acme.utils import tree_utils
import chex
import jax
import jax.numpy as jnp
import numpy as np
import tree
Path = Tuple[Any, ...]
"""Path in a nested structure.
A path is a tuple of indices (normally strings for maps and integers for
arrays and tuples) that uniquely identifies a subtree in the nested structure.
See
https://tree.readthedocs.io/en/latest/api.html#tree.map_structure_with_path
for more details.
"""
def _is_prefix(a: Path, b: Path) -> bool:
"""Returns whether `a` is a prefix of `b`."""
return b[:len(a)] == a
@chex.dataclass(frozen=True)
class NestedMeanStd:
"""A container for running statistics (mean, std) of possibly nested data."""
mean: types.NestedArray
std: types.NestedArray
@chex.dataclass(frozen=True)
class RunningStatisticsState(NestedMeanStd):
"""Full state of running statistics computation."""
count: Union[int, jnp.ndarray]
summed_variance: types.NestedArray
@dataclasses.dataclass(frozen=True)
class NestStatisticsConfig:
"""Specifies how to compute statistics for Nests with the same structure.
Attributes:
paths: A sequence of Nest paths to compute statistics for. If there is a
collision between paths (one is a prefix of the other), the shorter path
takes precedence.
"""
paths: Tuple[Path, ...] = ((),)
def _is_path_included(config: NestStatisticsConfig, path: Path) -> bool:
"""Returns whether the path is included in the config."""
# A path is included in the config if it corresponds to a tree node that
# belongs to a subtree rooted at the node corresponding to some path in
# the config.
return any(_is_prefix(config_path, path) for config_path in config.paths)
def init_state(nest: types.Nest) -> RunningStatisticsState:
"""Initializes the running statistics for the given nested structure."""
dtype = jnp.float64 if jax.config.jax_enable_x64 else jnp.float32
return RunningStatisticsState(
count=0.,
mean=utils.zeros_like(nest, dtype=dtype),
summed_variance=utils.zeros_like(nest, dtype=dtype),
# Initialize with ones to make sure normalization works correctly
# in the initial state.
std=utils.ones_like(nest, dtype=dtype))
def _validate_batch_shapes(batch: types.NestedArray,
reference_sample: types.NestedArray,
batch_dims: Tuple[int, ...]) -> None:
"""Verifies shapes of the batch leaves against the reference sample.
Checks that batch dimensions are the same in all leaves in the batch.
Checks that non-batch dimensions for all leaves in the batch are the same
as in the reference sample.
Arguments:
batch: the nested batch of data to be verified.
reference_sample: the nested array to check non-batch dimensions.
batch_dims: a Tuple of indices of batch dimensions in the batch shape.
Returns:
None.
"""
def validate_node_shape(reference_sample: jnp.ndarray,
batch: jnp.ndarray) -> None:
expected_shape = batch_dims + reference_sample.shape
assert batch.shape == expected_shape, f'{batch.shape} != {expected_shape}'
tree_utils.fast_map_structure(validate_node_shape, reference_sample, batch)
def update(state: RunningStatisticsState,
batch: types.NestedArray,
*,
config: NestStatisticsConfig = NestStatisticsConfig(),
weights: Optional[jnp.ndarray] = None,
std_min_value: float = 1e-6,
std_max_value: float = 1e6,
pmap_axis_name: Optional[str] = None,
validate_shapes: bool = True) -> RunningStatisticsState:
"""Updates the running statistics with the given batch of data.
Note: data batch and state elements (mean, etc.) must have the same structure.
Note: by default will use int32 for counts and float32 for accumulated
variance. This results in an integer overflow after 2^31 data points and
degrading precision after 2^24 batch updates or even earlier if variance
updates have large dynamic range.
To improve precision, consider setting jax_enable_x64 to True, see
https://jax.readthedocs.io/en/latest/notebooks/Common_Gotchas_in_JAX.html#double-64bit-precision
Arguments:
state: The running statistics before the update.
batch: The data to be used to update the running statistics.
config: The config that specifies which leaves of the nested structure
should the running statistics be computed for.
weights: Weights of the batch data. Should match the batch dimensions.
Passing a weight of 2. should be equivalent to updating on the
corresponding data point twice.
std_min_value: Minimum value for the standard deviation.
std_max_value: Maximum value for the standard deviation.
pmap_axis_name: Name of the pmapped axis, if any.
validate_shapes: If true, the shapes of all leaves of the batch will be
validated. Enabled by default. Doesn't impact performance when jitted.
Returns:
Updated running statistics.
"""
# We require exactly the same structure to avoid issues when flattened
# batch and state have different order of elements.
tree.assert_same_structure(batch, state.mean)
batch_shape = tree.flatten(batch)[0].shape
# We assume the batch dimensions always go first.
batch_dims = batch_shape[:len(batch_shape) - tree.flatten(state.mean)[0].ndim]
batch_axis = range(len(batch_dims))
if weights is None:
step_increment = np.prod(batch_dims)
else:
step_increment = jnp.sum(weights)
if pmap_axis_name is not None:
step_increment = jax.lax.psum(step_increment, axis_name=pmap_axis_name)
count = state.count + step_increment
# Validation is important. If the shapes don't match exactly, but are
# compatible, arrays will be silently broadcasted resulting in incorrect
# statistics.
if validate_shapes:
if weights is not None:
if weights.shape != batch_dims:
raise ValueError(f'{weights.shape} != {batch_dims}')
_validate_batch_shapes(batch, state.mean, batch_dims)
def _compute_node_statistics(
path: Path, mean: jnp.ndarray, summed_variance: jnp.ndarray,
batch: jnp.ndarray) -> Tuple[jnp.ndarray, jnp.ndarray]:
assert isinstance(mean, jnp.ndarray), type(mean)
assert isinstance(summed_variance, jnp.ndarray), type(summed_variance)
if not _is_path_included(config, path):
# Return unchanged.
return mean, summed_variance
# The mean and the sum of past variances are updated with Welford's
# algorithm using batches (see https://stackoverflow.com/q/56402955).
diff_to_old_mean = batch - mean
if weights is not None:
expanded_weights = jnp.reshape(
weights,
list(weights.shape) + [1] * (batch.ndim - weights.ndim))
diff_to_old_mean = diff_to_old_mean * expanded_weights
mean_update = jnp.sum(diff_to_old_mean, axis=batch_axis) / count
if pmap_axis_name is not None:
mean_update = jax.lax.psum(
mean_update, axis_name=pmap_axis_name)
mean = mean + mean_update
diff_to_new_mean = batch - mean
variance_update = diff_to_old_mean * diff_to_new_mean
variance_update = jnp.sum(variance_update, axis=batch_axis)
if pmap_axis_name is not None:
variance_update = jax.lax.psum(variance_update, axis_name=pmap_axis_name)
summed_variance = summed_variance + variance_update
return mean, summed_variance
updated_stats = tree_utils.fast_map_structure_with_path(
_compute_node_statistics, state.mean, state.summed_variance, batch)
# map_structure_up_to is slow, so shortcut if we know the input is not
# structured.
if isinstance(state.mean, jnp.ndarray):
mean, summed_variance = updated_stats
else:
# Reshape the updated stats from `nest(mean, summed_variance)` to
# `nest(mean), nest(summed_variance)`.
mean, summed_variance = [
tree.map_structure_up_to(
state.mean, lambda s, i=idx: s[i], updated_stats)
for idx in range(2)
]
def compute_std(path: Path, summed_variance: jnp.ndarray,
std: jnp.ndarray) -> jnp.ndarray:
assert isinstance(summed_variance, jnp.ndarray)
if not _is_path_included(config, path):
return std
# Summed variance can get negative due to rounding errors.
summed_variance = jnp.maximum(summed_variance, 0)
std = jnp.sqrt(summed_variance / count)
std = jnp.clip(std, std_min_value, std_max_value)
return std
std = tree_utils.fast_map_structure_with_path(compute_std, summed_variance,
state.std)
return RunningStatisticsState(
count=count, mean=mean, summed_variance=summed_variance, std=std)
def normalize(batch: types.NestedArray,
mean_std: NestedMeanStd,
max_abs_value: Optional[float] = None) -> types.NestedArray:
"""Normalizes data using running statistics."""
def normalize_leaf(data: jnp.ndarray, mean: jnp.ndarray,
std: jnp.ndarray) -> jnp.ndarray:
# Only normalize inexact types.
if not jnp.issubdtype(data.dtype, jnp.inexact):
return data
data = (data - mean) / std
if max_abs_value is not None:
# TODO(b/124318564): remove pylint directive
data = jnp.clip(data, -max_abs_value, +max_abs_value) # pylint: disable=invalid-unary-operand-type
return data
return tree_utils.fast_map_structure(normalize_leaf, batch, mean_std.mean,
mean_std.std)
def denormalize(batch: types.NestedArray,
mean_std: NestedMeanStd) -> types.NestedArray:
"""Denormalizes values in a nested structure using the given mean/std.
Only values of inexact types are denormalized.
See https://numpy.org/doc/stable/_images/dtype-hierarchy.png for Numpy type
hierarchy.
Args:
batch: a nested structure containing batch of data.
mean_std: mean and standard deviation used for denormalization.
Returns:
Nested structure with denormalized values.
"""
def denormalize_leaf(data: jnp.ndarray, mean: jnp.ndarray,
std: jnp.ndarray) -> jnp.ndarray:
# Only denormalize inexact types.
if not np.issubdtype(data.dtype, np.inexact):
return data
return data * std + mean
return tree_utils.fast_map_structure(denormalize_leaf, batch, mean_std.mean,
mean_std.std)
@dataclasses.dataclass(frozen=True)
class NestClippingConfig:
"""Specifies how to clip Nests with the same structure.
Attributes:
path_map: A map that specifies how to clip values in Nests with the same
structure. Keys correspond to paths in the nest. Values are maximum
absolute values to use for clipping. If there is a collision between paths
(one path is a prefix of the other), the behavior is undefined.
"""
path_map: Tuple[Tuple[Path, float], ...] = ()
def get_clip_config_for_path(config: NestClippingConfig,
path: Path) -> NestClippingConfig:
"""Returns the config for a subtree from the leaf defined by the path."""
# Start with an empty config.
path_map = []
for map_path, max_abs_value in config.path_map:
if _is_prefix(map_path, path):
return NestClippingConfig(path_map=(((), max_abs_value),))
if _is_prefix(path, map_path):
path_map.append((map_path[len(path):], max_abs_value))
return NestClippingConfig(path_map=tuple(path_map))
def clip(batch: types.NestedArray,
clipping_config: NestClippingConfig) -> types.NestedArray:
"""Clips the batch."""
def max_abs_value_for_path(path: Path, x: jnp.ndarray) -> Optional[float]:
del x # Unused, needed by interface.
return next((max_abs_value
for clipping_path, max_abs_value in clipping_config.path_map
if _is_prefix(clipping_path, path)), None)
max_abs_values = tree_utils.fast_map_structure_with_path(
max_abs_value_for_path, batch)
def clip_leaf(data: jnp.ndarray,
max_abs_value: Optional[float]) -> jnp.ndarray:
if max_abs_value is not None:
# TODO(b/124318564): remove pylint directive
data = jnp.clip(data, -max_abs_value, +max_abs_value) # pylint: disable=invalid-unary-operand-type
return data
return tree_utils.fast_map_structure(clip_leaf, batch, max_abs_values)
@dataclasses.dataclass(frozen=True)
class NestNormalizationConfig:
"""Specifies how to normalize Nests with the same structure.
Attributes:
stats_config: A config that defines how to compute running statistics to be
used for normalization.
clip_config: A config that defines how to clip normalized values.
"""
stats_config: NestStatisticsConfig = NestStatisticsConfig()
clip_config: NestClippingConfig = NestClippingConfig()
| 38.791429 | 105 | 0.716138 |
5bb45a79bb093014d830467491cd6aeaca7a8193 | 14,763 | py | Python | gemini/scripts/gemini_install.py | drmjc/gemini | d735735a9dc506a506f455a1e42ed6aa9cd1c2a5 | [
"MIT"
] | 1 | 2019-07-29T02:53:51.000Z | 2019-07-29T02:53:51.000Z | gemini/scripts/gemini_install.py | drmjc/gemini | d735735a9dc506a506f455a1e42ed6aa9cd1c2a5 | [
"MIT"
] | 1 | 2021-09-11T14:30:32.000Z | 2021-09-11T14:30:32.000Z | gemini/scripts/gemini_install.py | drmjc/gemini | d735735a9dc506a506f455a1e42ed6aa9cd1c2a5 | [
"MIT"
] | 2 | 2016-12-19T02:27:46.000Z | 2019-07-29T02:53:54.000Z | #!/usr/bin/env python
"""Installer for gemini: a lightweight db framework for disease and population genetics.
https://github.com/arq5x/gemini
Handles installation of:
- Required third party software
- Required Python libraries
- Gemini application
- Associated data files
Requires: Python 2.7, git, and compilers (gcc, g++)
Run gemini_install.py -h for usage.
"""
import argparse
import platform
import os
import shutil
import subprocess
import sys
import urllib2
remotes = {"requirements_pip":
"https://raw.github.com/arq5x/gemini/master/requirements.txt",
"requirements_conda":
"",
"versioned_installations":
"https://raw.githubusercontent.com/arq5x/gemini/master/versioning/",
"cloudbiolinux":
"https://github.com/chapmanb/cloudbiolinux.git",
"gemini":
"https://github.com/arq5x/gemini.git",
"anaconda":
"http://repo.continuum.io/miniconda/Miniconda-3.5.5-%s-x86_64.sh"}
def main(args):
check_dependencies()
work_dir = os.path.join(os.getcwd(), "tmpgemini_install")
if not os.path.exists(work_dir):
os.makedirs(work_dir)
os.chdir(work_dir)
if args.gemini_version != 'latest':
requirements_pip = os.path.join( remotes['versioned_installations'], args.gemini_version, 'requirements_pip.txt' )
requirements_conda = os.path.join( remotes['versioned_installations'], args.gemini_version, 'requirements_conda.txt' )
try:
urllib2.urlopen( requirements_pip )
except:
sys.exit('Gemini version %s could not be found. Try the latest version.' % args.gemini_version)
remotes.update( {'requirements_pip': requirements_pip, 'requirements_conda': requirements_conda} )
print "Installing isolated base python installation"
make_dirs(args)
anaconda = install_anaconda_python(args, remotes)
print "Installing gemini..."
install_conda_pkgs(anaconda, remotes, args)
gemini = install_gemini(anaconda, remotes, args.datadir, args.tooldir, args.sudo)
if args.install_tools:
cbl = get_cloudbiolinux(remotes["cloudbiolinux"])
fabricrc = write_fabricrc(cbl["fabricrc"], args.tooldir, args.datadir,
"ubuntu", args.sudo)
print "Installing associated tools..."
install_tools(gemini["fab"], cbl["tool_fabfile"], fabricrc)
os.chdir(work_dir)
install_data(gemini["python"], gemini["data_script"], args)
os.chdir(work_dir)
test_script = install_testbase(args.datadir, remotes["gemini"], gemini)
print "Finished: gemini, tools and data installed"
print " Tools installed in:\n %s" % args.tooldir
print " Data installed in:\n %s" % args.datadir
print " Run tests with:\n cd %s && bash %s" % (os.path.dirname(test_script),
os.path.basename(test_script))
print " NOTE: be sure to add %s/bin to your PATH." % args.tooldir
print " NOTE: Install data files for GERP_bp & CADD_scores (not installed by default).\n "
shutil.rmtree(work_dir)
def install_gemini(anaconda, remotes, datadir, tooldir, use_sudo):
"""Install gemini plus python dependencies inside isolated Anaconda environment.
"""
# Work around issue with distribute where asks for 'distribute==0.0'
# try:
# subprocess.check_call([anaconda["easy_install"], "--upgrade", "distribute"])
# except subprocess.CalledProcessError:
# try:
# subprocess.check_call([anaconda["pip"], "install", "--upgrade", "distribute"])
# except subprocess.CalledProcessError:
# pass
# Ensure latest version of fabric for running CloudBioLinux
subprocess.check_call([anaconda["pip"], "install", "fabric>=1.7.0"])
# allow downloads excluded in recent pip (1.5 or greater) versions
try:
p = subprocess.Popen([anaconda["pip"], "--version"], stdout=subprocess.PIPE)
pip_version = p.communicate()[0].split()[1]
except:
pip_version = ""
pip_compat = []
if pip_version >= "1.5":
for req in ["python-graph-core", "python-graph-dot"]:
pip_compat += ["--allow-external", req, "--allow-unverified", req]
subprocess.check_call([anaconda["pip"], "install"] + pip_compat + ["-r", remotes["requirements_pip"]])
python_bin = os.path.join(anaconda["dir"], "bin", "python")
_cleanup_problem_files(anaconda["dir"])
_add_missing_inits(python_bin)
for final_name, ve_name in [("gemini", "gemini"), ("gemini_python", "python"),
("gemini_pip", "pip")]:
final_script = os.path.join(tooldir, "bin", final_name)
ve_script = os.path.join(anaconda["dir"], "bin", ve_name)
sudo_cmd = ["sudo"] if use_sudo else []
if os.path.lexists(final_script):
subprocess.check_call(sudo_cmd + ["rm", "-f", final_script])
else:
subprocess.check_call(sudo_cmd + ["mkdir", "-p", os.path.dirname(final_script)])
cmd = ["ln", "-s", ve_script, final_script]
subprocess.check_call(sudo_cmd + cmd)
library_loc = subprocess.check_output("%s -c 'import gemini; print gemini.__file__'" % python_bin,
shell=True)
return {"fab": os.path.join(anaconda["dir"], "bin", "fab"),
"data_script": os.path.join(os.path.dirname(library_loc.strip()), "install-data.py"),
"python": python_bin,
"cmd": os.path.join(anaconda["dir"], "bin", "gemini")}
def install_conda_pkgs(anaconda, remotes, args):
if args.gemini_version != 'latest':
pkgs = ["--file", remotes['requirements_conda']]
else:
pkgs = ["bx-python", "conda", "cython", "ipython", "jinja2", "nose", "numpy",
"pip", "pycrypto", "pyparsing", "pysam", "pyyaml",
"pyzmq", "pandas", "scipy"]
channels = ["-c", "https://conda.binstar.org/bcbio"]
subprocess.check_call([anaconda["conda"], "install", "--yes"] + channels + pkgs)
def install_anaconda_python(args, remotes):
"""Provide isolated installation of Anaconda python.
http://docs.continuum.io/anaconda/index.html
"""
anaconda_dir = os.path.join(args.datadir, "anaconda")
bindir = os.path.join(anaconda_dir, "bin")
conda = os.path.join(bindir, "conda")
if platform.mac_ver()[0]:
distribution = "macosx"
else:
distribution = "linux"
if not os.path.exists(anaconda_dir) or not os.path.exists(conda):
if os.path.exists(anaconda_dir):
shutil.rmtree(anaconda_dir)
url = remotes["anaconda"] % ("MacOSX" if distribution == "macosx" else "Linux")
if not os.path.exists(os.path.basename(url)):
subprocess.check_call(["wget", url])
subprocess.check_call("bash %s -b -p %s" %
(os.path.basename(url), anaconda_dir), shell=True)
return {"conda": conda,
"pip": os.path.join(bindir, "pip"),
"easy_install": os.path.join(bindir, "easy_install"),
"dir": anaconda_dir}
def _add_missing_inits(python_bin):
"""pip/setuptools strips __init__.py files with namespace declarations.
I have no idea why, but this adds them back.
"""
library_loc = subprocess.check_output("%s -c 'import pygraph.classes.graph; "
"print pygraph.classes.graph.__file__'" % python_bin,
shell=True)
pygraph_init = os.path.normpath(os.path.join(os.path.dirname(library_loc.strip()), os.pardir,
"__init__.py"))
if not os.path.exists(pygraph_init):
with open(pygraph_init, "w") as out_handle:
out_handle.write("__import__('pkg_resources').declare_namespace(__name__)\n")
def _cleanup_problem_files(venv_dir):
"""Remove problem bottle items in PATH which conflict with site-packages
"""
for cmd in ["bottle.py", "bottle.pyc"]:
bin_cmd = os.path.join(venv_dir, "bin", cmd)
if os.path.exists(bin_cmd):
os.remove(bin_cmd)
def install_tools(fab_cmd, fabfile, fabricrc):
"""Install 3rd party tools used by Gemini using a custom CloudBioLinux flavor.
"""
tools = ["tabix", "grabix", "samtools", "bedtools"]
flavor_dir = os.path.join(os.getcwd(), "gemini-flavor")
if not os.path.exists(flavor_dir):
os.makedirs(flavor_dir)
with open(os.path.join(flavor_dir, "main.yaml"), "w") as out_handle:
out_handle.write("packages:\n")
out_handle.write(" - bio_nextgen\n")
out_handle.write("libraries:\n")
with open(os.path.join(flavor_dir, "custom.yaml"), "w") as out_handle:
out_handle.write("bio_nextgen:\n")
for tool in tools:
out_handle.write(" - %s\n" % tool)
cmd = [fab_cmd, "-f", fabfile, "-H", "localhost", "-c", fabricrc,
"install_biolinux:target=custom,flavor=%s" % flavor_dir]
subprocess.check_call(cmd)
def install_data(python_cmd, data_script, args):
"""Install biological data used by gemini.
"""
data_dir = os.path.join(args.datadir, "gemini_data") if args.sharedpy else args.datadir
cmd = [python_cmd, data_script, data_dir]
if args.install_data:
print "Installing gemini data..."
else:
cmd.append("--nodata")
subprocess.check_call(cmd)
def install_testbase(datadir, repo, gemini):
"""Clone or update gemini code so we have the latest test suite.
"""
gemini_dir = os.path.join(datadir, "gemini")
cur_dir = os.getcwd()
needs_git = True
if os.path.exists(gemini_dir):
os.chdir(gemini_dir)
try:
subprocess.check_call(["git", "pull", "origin", "master", "--tags"])
needs_git = False
except:
os.chdir(cur_dir)
shutil.rmtree(gemini_dir)
if needs_git:
os.chdir(os.path.split(gemini_dir)[0])
subprocess.check_call(["git", "clone", repo])
os.chdir(gemini_dir)
_update_testdir_revision(gemini["cmd"])
os.chdir(cur_dir)
return os.path.join(gemini_dir, "master-test.sh")
def _update_testdir_revision(gemini_cmd):
"""Update test directory to be in sync with a tagged installed version or development.
"""
try:
p = subprocess.Popen([gemini_cmd, "--version"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
gversion = p.communicate()[0].split()[1]
except:
gversion = ""
tag = ""
if gversion:
try:
p = subprocess.Popen("git tag -l | grep %s" % gversion, stdout=subprocess.PIPE, shell=True)
tag = p.communicate()[0].strip()
except:
tag = ""
if tag:
subprocess.check_call(["git", "checkout", "tags/%s" % tag])
pass
else:
subprocess.check_call(["git", "reset", "--hard", "HEAD"])
def write_fabricrc(base_file, tooldir, datadir, distribution, use_sudo):
out_file = os.path.join(os.getcwd(), os.path.basename(base_file))
with open(base_file) as in_handle:
with open(out_file, "w") as out_handle:
for line in in_handle:
if line.startswith("system_install"):
line = "system_install = %s\n" % tooldir
elif line.startswith("local_install"):
line = "local_install = %s/install\n" % tooldir
elif line.startswith("data_files"):
line = "data_files = %s\n" % datadir
elif line.startswith("distribution"):
line = "distribution = %s\n" % distribution
elif line.startswith("use_sudo"):
line = "use_sudo = %s\n" % use_sudo
elif line.startswith("edition"):
line = "edition = minimal\n"
elif line.startswith("#galaxy_home"):
line = "galaxy_home = %s\n" % os.path.join(datadir, "galaxy")
out_handle.write(line)
return out_file
def make_dirs(args):
sudo_cmd = ["sudo"] if args.sudo else []
for dname in [args.datadir, args.tooldir]:
if not os.path.exists(dname):
subprocess.check_call(sudo_cmd + ["mkdir", "-p", dname])
username = subprocess.check_output("echo $USER", shell=True).strip()
subprocess.check_call(sudo_cmd + ["chown", username, dname])
def get_cloudbiolinux(repo):
base_dir = os.path.join(os.getcwd(), "cloudbiolinux")
if not os.path.exists(base_dir):
subprocess.check_call(["git", "clone", repo])
return {"fabricrc": os.path.join(base_dir, "config", "fabricrc.txt"),
"tool_fabfile": os.path.join(base_dir, "fabfile.py")}
def check_dependencies():
"""Ensure required tools for installation are present.
"""
print "Checking required dependencies..."
for cmd, url in [("git", "http://git-scm.com/"),
("wget", "http://www.gnu.org/software/wget/"),
("curl", "http://curl.haxx.se/")]:
try:
retcode = subprocess.call([cmd, "--version"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
except OSError:
retcode = 127
if retcode == 127:
raise OSError("gemini requires %s (%s)" % (cmd, url))
else:
print " %s found" % cmd
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Automated installer for gemini framework.")
parser.add_argument("tooldir", help="Directory to install 3rd party software tools",
type=os.path.abspath)
parser.add_argument("datadir", help="Directory to install gemini data files",
type=os.path.abspath)
parser.add_argument("--gemini-version", dest="gemini_version", default="latest",
help="Install one specific gemini version with a fixed dependency chain.")
parser.add_argument("--nosudo", help="Specify we cannot use sudo for commands",
dest="sudo", action="store_false", default=True)
parser.add_argument("--notools", help="Do not install tool dependencies",
dest="install_tools", action="store_false", default=True)
parser.add_argument("--nodata", help="Do not install data dependencies",
dest="install_data", action="store_false", default=True)
parser.add_argument("--sharedpy", help=("Indicate we share an Anaconda Python directory with "
"another project. Creates unique gemini data directory."),
action="store_true", default=False)
if len(sys.argv) == 1:
parser.print_help()
else:
main(parser.parse_args())
| 45.146789 | 126 | 0.617354 |
5b10ca17c81fd79b2b5d1638dc56886e76045970 | 101,974 | py | Python | env/Lib/site-packages/plotly/graph_objs/_scatter3d.py | andresgreen-byte/Laboratorio-1--Inversion-de-Capital | 8a4707301d19c3826c31026c4077930bcd6a8182 | [
"MIT"
] | 7 | 2022-01-16T12:28:16.000Z | 2022-03-04T15:31:45.000Z | packages/python/plotly/plotly/graph_objs/_scatter3d.py | jiangrongbo/plotly.py | df19fc702b309586cc24e25373b87e8bdbb3ff60 | [
"MIT"
] | 14 | 2021-10-20T23:33:47.000Z | 2021-12-21T04:50:37.000Z | packages/python/plotly/plotly/graph_objs/_scatter3d.py | jiangrongbo/plotly.py | df19fc702b309586cc24e25373b87e8bdbb3ff60 | [
"MIT"
] | 1 | 2021-11-29T22:55:05.000Z | 2021-11-29T22:55:05.000Z | from plotly.basedatatypes import BaseTraceType as _BaseTraceType
import copy as _copy
class Scatter3d(_BaseTraceType):
# class properties
# --------------------
_parent_path_str = ""
_path_str = "scatter3d"
_valid_props = {
"connectgaps",
"customdata",
"customdatasrc",
"error_x",
"error_y",
"error_z",
"hoverinfo",
"hoverinfosrc",
"hoverlabel",
"hovertemplate",
"hovertemplatesrc",
"hovertext",
"hovertextsrc",
"ids",
"idssrc",
"legendgroup",
"legendgrouptitle",
"legendrank",
"line",
"marker",
"meta",
"metasrc",
"mode",
"name",
"opacity",
"projection",
"scene",
"showlegend",
"stream",
"surfaceaxis",
"surfacecolor",
"text",
"textfont",
"textposition",
"textpositionsrc",
"textsrc",
"texttemplate",
"texttemplatesrc",
"type",
"uid",
"uirevision",
"visible",
"x",
"xcalendar",
"xhoverformat",
"xsrc",
"y",
"ycalendar",
"yhoverformat",
"ysrc",
"z",
"zcalendar",
"zhoverformat",
"zsrc",
}
# connectgaps
# -----------
@property
def connectgaps(self):
"""
Determines whether or not gaps (i.e. {nan} or missing values)
in the provided data arrays are connected.
The 'connectgaps' property must be specified as a bool
(either True, or False)
Returns
-------
bool
"""
return self["connectgaps"]
@connectgaps.setter
def connectgaps(self, val):
self["connectgaps"] = val
# customdata
# ----------
@property
def customdata(self):
"""
Assigns extra data each datum. This may be useful when
listening to hover, click and selection events. Note that,
"scatter" traces also appends customdata items in the markers
DOM elements
The 'customdata' property is an array that may be specified as a tuple,
list, numpy array, or pandas Series
Returns
-------
numpy.ndarray
"""
return self["customdata"]
@customdata.setter
def customdata(self, val):
self["customdata"] = val
# customdatasrc
# -------------
@property
def customdatasrc(self):
"""
Sets the source reference on Chart Studio Cloud for
`customdata`.
The 'customdatasrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["customdatasrc"]
@customdatasrc.setter
def customdatasrc(self, val):
self["customdatasrc"] = val
# error_x
# -------
@property
def error_x(self):
"""
The 'error_x' property is an instance of ErrorX
that may be specified as:
- An instance of :class:`plotly.graph_objs.scatter3d.ErrorX`
- A dict of string/value properties that will be passed
to the ErrorX constructor
Supported dict properties:
array
Sets the data corresponding the length of each
error bar. Values are plotted relative to the
underlying data.
arrayminus
Sets the data corresponding the length of each
error bar in the bottom (left) direction for
vertical (horizontal) bars Values are plotted
relative to the underlying data.
arrayminussrc
Sets the source reference on Chart Studio Cloud
for `arrayminus`.
arraysrc
Sets the source reference on Chart Studio Cloud
for `array`.
color
Sets the stoke color of the error bars.
copy_zstyle
symmetric
Determines whether or not the error bars have
the same length in both direction (top/bottom
for vertical bars, left/right for horizontal
bars.
thickness
Sets the thickness (in px) of the error bars.
traceref
tracerefminus
type
Determines the rule used to generate the error
bars. If *constant`, the bar lengths are of a
constant value. Set this constant in `value`.
If "percent", the bar lengths correspond to a
percentage of underlying data. Set this
percentage in `value`. If "sqrt", the bar
lengths correspond to the square of the
underlying data. If "data", the bar lengths are
set with data set `array`.
value
Sets the value of either the percentage (if
`type` is set to "percent") or the constant (if
`type` is set to "constant") corresponding to
the lengths of the error bars.
valueminus
Sets the value of either the percentage (if
`type` is set to "percent") or the constant (if
`type` is set to "constant") corresponding to
the lengths of the error bars in the bottom
(left) direction for vertical (horizontal) bars
visible
Determines whether or not this set of error
bars is visible.
width
Sets the width (in px) of the cross-bar at both
ends of the error bars.
Returns
-------
plotly.graph_objs.scatter3d.ErrorX
"""
return self["error_x"]
@error_x.setter
def error_x(self, val):
self["error_x"] = val
# error_y
# -------
@property
def error_y(self):
"""
The 'error_y' property is an instance of ErrorY
that may be specified as:
- An instance of :class:`plotly.graph_objs.scatter3d.ErrorY`
- A dict of string/value properties that will be passed
to the ErrorY constructor
Supported dict properties:
array
Sets the data corresponding the length of each
error bar. Values are plotted relative to the
underlying data.
arrayminus
Sets the data corresponding the length of each
error bar in the bottom (left) direction for
vertical (horizontal) bars Values are plotted
relative to the underlying data.
arrayminussrc
Sets the source reference on Chart Studio Cloud
for `arrayminus`.
arraysrc
Sets the source reference on Chart Studio Cloud
for `array`.
color
Sets the stoke color of the error bars.
copy_zstyle
symmetric
Determines whether or not the error bars have
the same length in both direction (top/bottom
for vertical bars, left/right for horizontal
bars.
thickness
Sets the thickness (in px) of the error bars.
traceref
tracerefminus
type
Determines the rule used to generate the error
bars. If *constant`, the bar lengths are of a
constant value. Set this constant in `value`.
If "percent", the bar lengths correspond to a
percentage of underlying data. Set this
percentage in `value`. If "sqrt", the bar
lengths correspond to the square of the
underlying data. If "data", the bar lengths are
set with data set `array`.
value
Sets the value of either the percentage (if
`type` is set to "percent") or the constant (if
`type` is set to "constant") corresponding to
the lengths of the error bars.
valueminus
Sets the value of either the percentage (if
`type` is set to "percent") or the constant (if
`type` is set to "constant") corresponding to
the lengths of the error bars in the bottom
(left) direction for vertical (horizontal) bars
visible
Determines whether or not this set of error
bars is visible.
width
Sets the width (in px) of the cross-bar at both
ends of the error bars.
Returns
-------
plotly.graph_objs.scatter3d.ErrorY
"""
return self["error_y"]
@error_y.setter
def error_y(self, val):
self["error_y"] = val
# error_z
# -------
@property
def error_z(self):
"""
The 'error_z' property is an instance of ErrorZ
that may be specified as:
- An instance of :class:`plotly.graph_objs.scatter3d.ErrorZ`
- A dict of string/value properties that will be passed
to the ErrorZ constructor
Supported dict properties:
array
Sets the data corresponding the length of each
error bar. Values are plotted relative to the
underlying data.
arrayminus
Sets the data corresponding the length of each
error bar in the bottom (left) direction for
vertical (horizontal) bars Values are plotted
relative to the underlying data.
arrayminussrc
Sets the source reference on Chart Studio Cloud
for `arrayminus`.
arraysrc
Sets the source reference on Chart Studio Cloud
for `array`.
color
Sets the stoke color of the error bars.
symmetric
Determines whether or not the error bars have
the same length in both direction (top/bottom
for vertical bars, left/right for horizontal
bars.
thickness
Sets the thickness (in px) of the error bars.
traceref
tracerefminus
type
Determines the rule used to generate the error
bars. If *constant`, the bar lengths are of a
constant value. Set this constant in `value`.
If "percent", the bar lengths correspond to a
percentage of underlying data. Set this
percentage in `value`. If "sqrt", the bar
lengths correspond to the square of the
underlying data. If "data", the bar lengths are
set with data set `array`.
value
Sets the value of either the percentage (if
`type` is set to "percent") or the constant (if
`type` is set to "constant") corresponding to
the lengths of the error bars.
valueminus
Sets the value of either the percentage (if
`type` is set to "percent") or the constant (if
`type` is set to "constant") corresponding to
the lengths of the error bars in the bottom
(left) direction for vertical (horizontal) bars
visible
Determines whether or not this set of error
bars is visible.
width
Sets the width (in px) of the cross-bar at both
ends of the error bars.
Returns
-------
plotly.graph_objs.scatter3d.ErrorZ
"""
return self["error_z"]
@error_z.setter
def error_z(self, val):
self["error_z"] = val
# hoverinfo
# ---------
@property
def hoverinfo(self):
"""
Determines which trace information appear on hover. If `none`
or `skip` are set, no information is displayed upon hovering.
But, if `none` is set, click and hover events are still fired.
The 'hoverinfo' property is a flaglist and may be specified
as a string containing:
- Any combination of ['x', 'y', 'z', 'text', 'name'] joined with '+' characters
(e.g. 'x+y')
OR exactly one of ['all', 'none', 'skip'] (e.g. 'skip')
- A list or array of the above
Returns
-------
Any|numpy.ndarray
"""
return self["hoverinfo"]
@hoverinfo.setter
def hoverinfo(self, val):
self["hoverinfo"] = val
# hoverinfosrc
# ------------
@property
def hoverinfosrc(self):
"""
Sets the source reference on Chart Studio Cloud for
`hoverinfo`.
The 'hoverinfosrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["hoverinfosrc"]
@hoverinfosrc.setter
def hoverinfosrc(self, val):
self["hoverinfosrc"] = val
# hoverlabel
# ----------
@property
def hoverlabel(self):
"""
The 'hoverlabel' property is an instance of Hoverlabel
that may be specified as:
- An instance of :class:`plotly.graph_objs.scatter3d.Hoverlabel`
- A dict of string/value properties that will be passed
to the Hoverlabel constructor
Supported dict properties:
align
Sets the horizontal alignment of the text
content within hover label box. Has an effect
only if the hover label text spans more two or
more lines
alignsrc
Sets the source reference on Chart Studio Cloud
for `align`.
bgcolor
Sets the background color of the hover labels
for this trace
bgcolorsrc
Sets the source reference on Chart Studio Cloud
for `bgcolor`.
bordercolor
Sets the border color of the hover labels for
this trace.
bordercolorsrc
Sets the source reference on Chart Studio Cloud
for `bordercolor`.
font
Sets the font used in hover labels.
namelength
Sets the default length (in number of
characters) of the trace name in the hover
labels for all traces. -1 shows the whole name
regardless of length. 0-3 shows the first 0-3
characters, and an integer >3 will show the
whole name if it is less than that many
characters, but if it is longer, will truncate
to `namelength - 3` characters and add an
ellipsis.
namelengthsrc
Sets the source reference on Chart Studio Cloud
for `namelength`.
Returns
-------
plotly.graph_objs.scatter3d.Hoverlabel
"""
return self["hoverlabel"]
@hoverlabel.setter
def hoverlabel(self, val):
self["hoverlabel"] = val
# hovertemplate
# -------------
@property
def hovertemplate(self):
"""
Template string used for rendering the information that appear
on hover box. Note that this will override `hoverinfo`.
Variables are inserted using %{variable}, for example "y: %{y}"
as well as %{xother}, {%_xother}, {%_xother_}, {%xother_}. When
showing info for several points, "xother" will be added to
those with different x positions from the first point. An
underscore before or after "(x|y)other" will add a space on
that side, only when this field is shown. Numbers are formatted
using d3-format's syntax %{variable:d3-format}, for example
"Price: %{y:$.2f}".
https://github.com/d3/d3-format/tree/v1.4.5#d3-format for
details on the formatting syntax. Dates are formatted using
d3-time-format's syntax %{variable|d3-time-format}, for example
"Day: %{2019-01-01|%A}". https://github.com/d3/d3-time-
format/tree/v2.2.3#locale_format for details on the date
formatting syntax. The variables available in `hovertemplate`
are the ones emitted as event data described at this link
https://plotly.com/javascript/plotlyjs-events/#event-data.
Additionally, every attributes that can be specified per-point
(the ones that are `arrayOk: true`) are available. Anything
contained in tag `<extra>` is displayed in the secondary box,
for example "<extra>{fullData.name}</extra>". To hide the
secondary box completely, use an empty tag `<extra></extra>`.
The 'hovertemplate' property is a string and must be specified as:
- A string
- A number that will be converted to a string
- A tuple, list, or one-dimensional numpy array of the above
Returns
-------
str|numpy.ndarray
"""
return self["hovertemplate"]
@hovertemplate.setter
def hovertemplate(self, val):
self["hovertemplate"] = val
# hovertemplatesrc
# ----------------
@property
def hovertemplatesrc(self):
"""
Sets the source reference on Chart Studio Cloud for
`hovertemplate`.
The 'hovertemplatesrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["hovertemplatesrc"]
@hovertemplatesrc.setter
def hovertemplatesrc(self, val):
self["hovertemplatesrc"] = val
# hovertext
# ---------
@property
def hovertext(self):
"""
Sets text elements associated with each (x,y,z) triplet. If a
single string, the same string appears over all the data
points. If an array of string, the items are mapped in order to
the this trace's (x,y,z) coordinates. To be seen, trace
`hoverinfo` must contain a "text" flag.
The 'hovertext' property is a string and must be specified as:
- A string
- A number that will be converted to a string
- A tuple, list, or one-dimensional numpy array of the above
Returns
-------
str|numpy.ndarray
"""
return self["hovertext"]
@hovertext.setter
def hovertext(self, val):
self["hovertext"] = val
# hovertextsrc
# ------------
@property
def hovertextsrc(self):
"""
Sets the source reference on Chart Studio Cloud for
`hovertext`.
The 'hovertextsrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["hovertextsrc"]
@hovertextsrc.setter
def hovertextsrc(self, val):
self["hovertextsrc"] = val
# ids
# ---
@property
def ids(self):
"""
Assigns id labels to each datum. These ids for object constancy
of data points during animation. Should be an array of strings,
not numbers or any other type.
The 'ids' property is an array that may be specified as a tuple,
list, numpy array, or pandas Series
Returns
-------
numpy.ndarray
"""
return self["ids"]
@ids.setter
def ids(self, val):
self["ids"] = val
# idssrc
# ------
@property
def idssrc(self):
"""
Sets the source reference on Chart Studio Cloud for `ids`.
The 'idssrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["idssrc"]
@idssrc.setter
def idssrc(self, val):
self["idssrc"] = val
# legendgroup
# -----------
@property
def legendgroup(self):
"""
Sets the legend group for this trace. Traces part of the same
legend group hide/show at the same time when toggling legend
items.
The 'legendgroup' property is a string and must be specified as:
- A string
- A number that will be converted to a string
Returns
-------
str
"""
return self["legendgroup"]
@legendgroup.setter
def legendgroup(self, val):
self["legendgroup"] = val
# legendgrouptitle
# ----------------
@property
def legendgrouptitle(self):
"""
The 'legendgrouptitle' property is an instance of Legendgrouptitle
that may be specified as:
- An instance of :class:`plotly.graph_objs.scatter3d.Legendgrouptitle`
- A dict of string/value properties that will be passed
to the Legendgrouptitle constructor
Supported dict properties:
font
Sets this legend group's title font.
text
Sets the title of the legend group.
Returns
-------
plotly.graph_objs.scatter3d.Legendgrouptitle
"""
return self["legendgrouptitle"]
@legendgrouptitle.setter
def legendgrouptitle(self, val):
self["legendgrouptitle"] = val
# legendrank
# ----------
@property
def legendrank(self):
"""
Sets the legend rank for this trace. Items and groups with
smaller ranks are presented on top/left side while with
`*reversed* `legend.traceorder` they are on bottom/right side.
The default legendrank is 1000, so that you can use ranks less
than 1000 to place certain items before all unranked items, and
ranks greater than 1000 to go after all unranked items.
The 'legendrank' property is a number and may be specified as:
- An int or float
Returns
-------
int|float
"""
return self["legendrank"]
@legendrank.setter
def legendrank(self, val):
self["legendrank"] = val
# line
# ----
@property
def line(self):
"""
The 'line' property is an instance of Line
that may be specified as:
- An instance of :class:`plotly.graph_objs.scatter3d.Line`
- A dict of string/value properties that will be passed
to the Line constructor
Supported dict properties:
autocolorscale
Determines whether the colorscale is a default
palette (`autocolorscale: true`) or the palette
determined by `line.colorscale`. Has an effect
only if in `line.color`is set to a numerical
array. In case `colorscale` is unspecified or
`autocolorscale` is true, the default palette
will be chosen according to whether numbers in
the `color` array are all positive, all
negative or mixed.
cauto
Determines whether or not the color domain is
computed with respect to the input data (here
in `line.color`) or the bounds set in
`line.cmin` and `line.cmax` Has an effect only
if in `line.color`is set to a numerical array.
Defaults to `false` when `line.cmin` and
`line.cmax` are set by the user.
cmax
Sets the upper bound of the color domain. Has
an effect only if in `line.color`is set to a
numerical array. Value should have the same
units as in `line.color` and if set,
`line.cmin` must be set as well.
cmid
Sets the mid-point of the color domain by
scaling `line.cmin` and/or `line.cmax` to be
equidistant to this point. Has an effect only
if in `line.color`is set to a numerical array.
Value should have the same units as in
`line.color`. Has no effect when `line.cauto`
is `false`.
cmin
Sets the lower bound of the color domain. Has
an effect only if in `line.color`is set to a
numerical array. Value should have the same
units as in `line.color` and if set,
`line.cmax` must be set as well.
color
Sets thelinecolor. It accepts either a specific
color or an array of numbers that are mapped to
the colorscale relative to the max and min
values of the array or relative to `line.cmin`
and `line.cmax` if set.
coloraxis
Sets a reference to a shared color axis.
References to these shared color axes are
"coloraxis", "coloraxis2", "coloraxis3", etc.
Settings for these shared color axes are set in
the layout, under `layout.coloraxis`,
`layout.coloraxis2`, etc. Note that multiple
color scales can be linked to the same color
axis.
colorbar
:class:`plotly.graph_objects.scatter3d.line.Col
orBar` instance or dict with compatible
properties
colorscale
Sets the colorscale. Has an effect only if in
`line.color`is set to a numerical array. The
colorscale must be an array containing arrays
mapping a normalized value to an rgb, rgba,
hex, hsl, hsv, or named color string. At
minimum, a mapping for the lowest (0) and
highest (1) values are required. For example,
`[[0, 'rgb(0,0,255)'], [1, 'rgb(255,0,0)']]`.
To control the bounds of the colorscale in
color space, use`line.cmin` and `line.cmax`.
Alternatively, `colorscale` may be a palette
name string of the following list: Blackbody,Bl
uered,Blues,Cividis,Earth,Electric,Greens,Greys
,Hot,Jet,Picnic,Portland,Rainbow,RdBu,Reds,Viri
dis,YlGnBu,YlOrRd.
colorsrc
Sets the source reference on Chart Studio Cloud
for `color`.
dash
Sets the dash style of the lines.
reversescale
Reverses the color mapping if true. Has an
effect only if in `line.color`is set to a
numerical array. If true, `line.cmin` will
correspond to the last color in the array and
`line.cmax` will correspond to the first color.
showscale
Determines whether or not a colorbar is
displayed for this trace. Has an effect only if
in `line.color`is set to a numerical array.
width
Sets the line width (in px).
Returns
-------
plotly.graph_objs.scatter3d.Line
"""
return self["line"]
@line.setter
def line(self, val):
self["line"] = val
# marker
# ------
@property
def marker(self):
"""
The 'marker' property is an instance of Marker
that may be specified as:
- An instance of :class:`plotly.graph_objs.scatter3d.Marker`
- A dict of string/value properties that will be passed
to the Marker constructor
Supported dict properties:
autocolorscale
Determines whether the colorscale is a default
palette (`autocolorscale: true`) or the palette
determined by `marker.colorscale`. Has an
effect only if in `marker.color`is set to a
numerical array. In case `colorscale` is
unspecified or `autocolorscale` is true, the
default palette will be chosen according to
whether numbers in the `color` array are all
positive, all negative or mixed.
cauto
Determines whether or not the color domain is
computed with respect to the input data (here
in `marker.color`) or the bounds set in
`marker.cmin` and `marker.cmax` Has an effect
only if in `marker.color`is set to a numerical
array. Defaults to `false` when `marker.cmin`
and `marker.cmax` are set by the user.
cmax
Sets the upper bound of the color domain. Has
an effect only if in `marker.color`is set to a
numerical array. Value should have the same
units as in `marker.color` and if set,
`marker.cmin` must be set as well.
cmid
Sets the mid-point of the color domain by
scaling `marker.cmin` and/or `marker.cmax` to
be equidistant to this point. Has an effect
only if in `marker.color`is set to a numerical
array. Value should have the same units as in
`marker.color`. Has no effect when
`marker.cauto` is `false`.
cmin
Sets the lower bound of the color domain. Has
an effect only if in `marker.color`is set to a
numerical array. Value should have the same
units as in `marker.color` and if set,
`marker.cmax` must be set as well.
color
Sets themarkercolor. It accepts either a
specific color or an array of numbers that are
mapped to the colorscale relative to the max
and min values of the array or relative to
`marker.cmin` and `marker.cmax` if set.
coloraxis
Sets a reference to a shared color axis.
References to these shared color axes are
"coloraxis", "coloraxis2", "coloraxis3", etc.
Settings for these shared color axes are set in
the layout, under `layout.coloraxis`,
`layout.coloraxis2`, etc. Note that multiple
color scales can be linked to the same color
axis.
colorbar
:class:`plotly.graph_objects.scatter3d.marker.C
olorBar` instance or dict with compatible
properties
colorscale
Sets the colorscale. Has an effect only if in
`marker.color`is set to a numerical array. The
colorscale must be an array containing arrays
mapping a normalized value to an rgb, rgba,
hex, hsl, hsv, or named color string. At
minimum, a mapping for the lowest (0) and
highest (1) values are required. For example,
`[[0, 'rgb(0,0,255)'], [1, 'rgb(255,0,0)']]`.
To control the bounds of the colorscale in
color space, use`marker.cmin` and
`marker.cmax`. Alternatively, `colorscale` may
be a palette name string of the following list:
Blackbody,Bluered,Blues,Cividis,Earth,Electric,
Greens,Greys,Hot,Jet,Picnic,Portland,Rainbow,Rd
Bu,Reds,Viridis,YlGnBu,YlOrRd.
colorsrc
Sets the source reference on Chart Studio Cloud
for `color`.
line
:class:`plotly.graph_objects.scatter3d.marker.L
ine` instance or dict with compatible
properties
opacity
Sets the marker opacity. Note that the marker
opacity for scatter3d traces must be a scalar
value for performance reasons. To set a
blending opacity value (i.e. which is not
transparent), set "marker.color" to an rgba
color and use its alpha channel.
reversescale
Reverses the color mapping if true. Has an
effect only if in `marker.color`is set to a
numerical array. If true, `marker.cmin` will
correspond to the last color in the array and
`marker.cmax` will correspond to the first
color.
showscale
Determines whether or not a colorbar is
displayed for this trace. Has an effect only if
in `marker.color`is set to a numerical array.
size
Sets the marker size (in px).
sizemin
Has an effect only if `marker.size` is set to a
numerical array. Sets the minimum size (in px)
of the rendered marker points.
sizemode
Has an effect only if `marker.size` is set to a
numerical array. Sets the rule for which the
data in `size` is converted to pixels.
sizeref
Has an effect only if `marker.size` is set to a
numerical array. Sets the scale factor used to
determine the rendered size of marker points.
Use with `sizemin` and `sizemode`.
sizesrc
Sets the source reference on Chart Studio Cloud
for `size`.
symbol
Sets the marker symbol type.
symbolsrc
Sets the source reference on Chart Studio Cloud
for `symbol`.
Returns
-------
plotly.graph_objs.scatter3d.Marker
"""
return self["marker"]
@marker.setter
def marker(self, val):
self["marker"] = val
# meta
# ----
@property
def meta(self):
"""
Assigns extra meta information associated with this trace that
can be used in various text attributes. Attributes such as
trace `name`, graph, axis and colorbar `title.text`, annotation
`text` `rangeselector`, `updatemenues` and `sliders` `label`
text all support `meta`. To access the trace `meta` values in
an attribute in the same trace, simply use `%{meta[i]}` where
`i` is the index or key of the `meta` item in question. To
access trace `meta` in layout attributes, use
`%{data[n[.meta[i]}` where `i` is the index or key of the
`meta` and `n` is the trace index.
The 'meta' property accepts values of any type
Returns
-------
Any|numpy.ndarray
"""
return self["meta"]
@meta.setter
def meta(self, val):
self["meta"] = val
# metasrc
# -------
@property
def metasrc(self):
"""
Sets the source reference on Chart Studio Cloud for `meta`.
The 'metasrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["metasrc"]
@metasrc.setter
def metasrc(self, val):
self["metasrc"] = val
# mode
# ----
@property
def mode(self):
"""
Determines the drawing mode for this scatter trace. If the
provided `mode` includes "text" then the `text` elements appear
at the coordinates. Otherwise, the `text` elements appear on
hover. If there are less than 20 points and the trace is not
stacked then the default is "lines+markers". Otherwise,
"lines".
The 'mode' property is a flaglist and may be specified
as a string containing:
- Any combination of ['lines', 'markers', 'text'] joined with '+' characters
(e.g. 'lines+markers')
OR exactly one of ['none'] (e.g. 'none')
Returns
-------
Any
"""
return self["mode"]
@mode.setter
def mode(self, val):
self["mode"] = val
# name
# ----
@property
def name(self):
"""
Sets the trace name. The trace name appear as the legend item
and on hover.
The 'name' property is a string and must be specified as:
- A string
- A number that will be converted to a string
Returns
-------
str
"""
return self["name"]
@name.setter
def name(self, val):
self["name"] = val
# opacity
# -------
@property
def opacity(self):
"""
Sets the opacity of the trace.
The 'opacity' property is a number and may be specified as:
- An int or float in the interval [0, 1]
Returns
-------
int|float
"""
return self["opacity"]
@opacity.setter
def opacity(self, val):
self["opacity"] = val
# projection
# ----------
@property
def projection(self):
"""
The 'projection' property is an instance of Projection
that may be specified as:
- An instance of :class:`plotly.graph_objs.scatter3d.Projection`
- A dict of string/value properties that will be passed
to the Projection constructor
Supported dict properties:
x
:class:`plotly.graph_objects.scatter3d.projecti
on.X` instance or dict with compatible
properties
y
:class:`plotly.graph_objects.scatter3d.projecti
on.Y` instance or dict with compatible
properties
z
:class:`plotly.graph_objects.scatter3d.projecti
on.Z` instance or dict with compatible
properties
Returns
-------
plotly.graph_objs.scatter3d.Projection
"""
return self["projection"]
@projection.setter
def projection(self, val):
self["projection"] = val
# scene
# -----
@property
def scene(self):
"""
Sets a reference between this trace's 3D coordinate system and
a 3D scene. If "scene" (the default value), the (x,y,z)
coordinates refer to `layout.scene`. If "scene2", the (x,y,z)
coordinates refer to `layout.scene2`, and so on.
The 'scene' property is an identifier of a particular
subplot, of type 'scene', that may be specified as the string 'scene'
optionally followed by an integer >= 1
(e.g. 'scene', 'scene1', 'scene2', 'scene3', etc.)
Returns
-------
str
"""
return self["scene"]
@scene.setter
def scene(self, val):
self["scene"] = val
# showlegend
# ----------
@property
def showlegend(self):
"""
Determines whether or not an item corresponding to this trace
is shown in the legend.
The 'showlegend' property must be specified as a bool
(either True, or False)
Returns
-------
bool
"""
return self["showlegend"]
@showlegend.setter
def showlegend(self, val):
self["showlegend"] = val
# stream
# ------
@property
def stream(self):
"""
The 'stream' property is an instance of Stream
that may be specified as:
- An instance of :class:`plotly.graph_objs.scatter3d.Stream`
- A dict of string/value properties that will be passed
to the Stream constructor
Supported dict properties:
maxpoints
Sets the maximum number of points to keep on
the plots from an incoming stream. If
`maxpoints` is set to 50, only the newest 50
points will be displayed on the plot.
token
The stream id number links a data trace on a
plot with a stream. See https://chart-
studio.plotly.com/settings for more details.
Returns
-------
plotly.graph_objs.scatter3d.Stream
"""
return self["stream"]
@stream.setter
def stream(self, val):
self["stream"] = val
# surfaceaxis
# -----------
@property
def surfaceaxis(self):
"""
If "-1", the scatter points are not fill with a surface If 0,
1, 2, the scatter points are filled with a Delaunay surface
about the x, y, z respectively.
The 'surfaceaxis' property is an enumeration that may be specified as:
- One of the following enumeration values:
[-1, 0, 1, 2]
Returns
-------
Any
"""
return self["surfaceaxis"]
@surfaceaxis.setter
def surfaceaxis(self, val):
self["surfaceaxis"] = val
# surfacecolor
# ------------
@property
def surfacecolor(self):
"""
Sets the surface fill color.
The 'surfacecolor' property is a color and may be specified as:
- A hex string (e.g. '#ff0000')
- An rgb/rgba string (e.g. 'rgb(255,0,0)')
- An hsl/hsla string (e.g. 'hsl(0,100%,50%)')
- An hsv/hsva string (e.g. 'hsv(0,100%,100%)')
- A named CSS color:
aliceblue, antiquewhite, aqua, aquamarine, azure,
beige, bisque, black, blanchedalmond, blue,
blueviolet, brown, burlywood, cadetblue,
chartreuse, chocolate, coral, cornflowerblue,
cornsilk, crimson, cyan, darkblue, darkcyan,
darkgoldenrod, darkgray, darkgrey, darkgreen,
darkkhaki, darkmagenta, darkolivegreen, darkorange,
darkorchid, darkred, darksalmon, darkseagreen,
darkslateblue, darkslategray, darkslategrey,
darkturquoise, darkviolet, deeppink, deepskyblue,
dimgray, dimgrey, dodgerblue, firebrick,
floralwhite, forestgreen, fuchsia, gainsboro,
ghostwhite, gold, goldenrod, gray, grey, green,
greenyellow, honeydew, hotpink, indianred, indigo,
ivory, khaki, lavender, lavenderblush, lawngreen,
lemonchiffon, lightblue, lightcoral, lightcyan,
lightgoldenrodyellow, lightgray, lightgrey,
lightgreen, lightpink, lightsalmon, lightseagreen,
lightskyblue, lightslategray, lightslategrey,
lightsteelblue, lightyellow, lime, limegreen,
linen, magenta, maroon, mediumaquamarine,
mediumblue, mediumorchid, mediumpurple,
mediumseagreen, mediumslateblue, mediumspringgreen,
mediumturquoise, mediumvioletred, midnightblue,
mintcream, mistyrose, moccasin, navajowhite, navy,
oldlace, olive, olivedrab, orange, orangered,
orchid, palegoldenrod, palegreen, paleturquoise,
palevioletred, papayawhip, peachpuff, peru, pink,
plum, powderblue, purple, red, rosybrown,
royalblue, rebeccapurple, saddlebrown, salmon,
sandybrown, seagreen, seashell, sienna, silver,
skyblue, slateblue, slategray, slategrey, snow,
springgreen, steelblue, tan, teal, thistle, tomato,
turquoise, violet, wheat, white, whitesmoke,
yellow, yellowgreen
Returns
-------
str
"""
return self["surfacecolor"]
@surfacecolor.setter
def surfacecolor(self, val):
self["surfacecolor"] = val
# text
# ----
@property
def text(self):
"""
Sets text elements associated with each (x,y,z) triplet. If a
single string, the same string appears over all the data
points. If an array of string, the items are mapped in order to
the this trace's (x,y,z) coordinates. If trace `hoverinfo`
contains a "text" flag and "hovertext" is not set, these
elements will be seen in the hover labels.
The 'text' property is a string and must be specified as:
- A string
- A number that will be converted to a string
- A tuple, list, or one-dimensional numpy array of the above
Returns
-------
str|numpy.ndarray
"""
return self["text"]
@text.setter
def text(self, val):
self["text"] = val
# textfont
# --------
@property
def textfont(self):
"""
The 'textfont' property is an instance of Textfont
that may be specified as:
- An instance of :class:`plotly.graph_objs.scatter3d.Textfont`
- A dict of string/value properties that will be passed
to the Textfont constructor
Supported dict properties:
color
colorsrc
Sets the source reference on Chart Studio Cloud
for `color`.
family
HTML font family - the typeface that will be
applied by the web browser. The web browser
will only be able to apply a font if it is
available on the system which it operates.
Provide multiple font families, separated by
commas, to indicate the preference in which to
apply fonts if they aren't available on the
system. The Chart Studio Cloud (at
https://chart-studio.plotly.com or on-premise)
generates images on a server, where only a
select number of fonts are installed and
supported. These include "Arial", "Balto",
"Courier New", "Droid Sans",, "Droid Serif",
"Droid Sans Mono", "Gravitas One", "Old
Standard TT", "Open Sans", "Overpass", "PT Sans
Narrow", "Raleway", "Times New Roman".
size
sizesrc
Sets the source reference on Chart Studio Cloud
for `size`.
Returns
-------
plotly.graph_objs.scatter3d.Textfont
"""
return self["textfont"]
@textfont.setter
def textfont(self, val):
self["textfont"] = val
# textposition
# ------------
@property
def textposition(self):
"""
Sets the positions of the `text` elements with respects to the
(x,y) coordinates.
The 'textposition' property is an enumeration that may be specified as:
- One of the following enumeration values:
['top left', 'top center', 'top right', 'middle left',
'middle center', 'middle right', 'bottom left', 'bottom
center', 'bottom right']
- A tuple, list, or one-dimensional numpy array of the above
Returns
-------
Any|numpy.ndarray
"""
return self["textposition"]
@textposition.setter
def textposition(self, val):
self["textposition"] = val
# textpositionsrc
# ---------------
@property
def textpositionsrc(self):
"""
Sets the source reference on Chart Studio Cloud for
`textposition`.
The 'textpositionsrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["textpositionsrc"]
@textpositionsrc.setter
def textpositionsrc(self, val):
self["textpositionsrc"] = val
# textsrc
# -------
@property
def textsrc(self):
"""
Sets the source reference on Chart Studio Cloud for `text`.
The 'textsrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["textsrc"]
@textsrc.setter
def textsrc(self, val):
self["textsrc"] = val
# texttemplate
# ------------
@property
def texttemplate(self):
"""
Template string used for rendering the information text that
appear on points. Note that this will override `textinfo`.
Variables are inserted using %{variable}, for example "y:
%{y}". Numbers are formatted using d3-format's syntax
%{variable:d3-format}, for example "Price: %{y:$.2f}".
https://github.com/d3/d3-format/tree/v1.4.5#d3-format for
details on the formatting syntax. Dates are formatted using
d3-time-format's syntax %{variable|d3-time-format}, for example
"Day: %{2019-01-01|%A}". https://github.com/d3/d3-time-
format/tree/v2.2.3#locale_format for details on the date
formatting syntax. Every attributes that can be specified per-
point (the ones that are `arrayOk: true`) are available.
The 'texttemplate' property is a string and must be specified as:
- A string
- A number that will be converted to a string
- A tuple, list, or one-dimensional numpy array of the above
Returns
-------
str|numpy.ndarray
"""
return self["texttemplate"]
@texttemplate.setter
def texttemplate(self, val):
self["texttemplate"] = val
# texttemplatesrc
# ---------------
@property
def texttemplatesrc(self):
"""
Sets the source reference on Chart Studio Cloud for
`texttemplate`.
The 'texttemplatesrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["texttemplatesrc"]
@texttemplatesrc.setter
def texttemplatesrc(self, val):
self["texttemplatesrc"] = val
# uid
# ---
@property
def uid(self):
"""
Assign an id to this trace, Use this to provide object
constancy between traces during animations and transitions.
The 'uid' property is a string and must be specified as:
- A string
- A number that will be converted to a string
Returns
-------
str
"""
return self["uid"]
@uid.setter
def uid(self, val):
self["uid"] = val
# uirevision
# ----------
@property
def uirevision(self):
"""
Controls persistence of some user-driven changes to the trace:
`constraintrange` in `parcoords` traces, as well as some
`editable: true` modifications such as `name` and
`colorbar.title`. Defaults to `layout.uirevision`. Note that
other user-driven trace attribute changes are controlled by
`layout` attributes: `trace.visible` is controlled by
`layout.legend.uirevision`, `selectedpoints` is controlled by
`layout.selectionrevision`, and `colorbar.(x|y)` (accessible
with `config: {editable: true}`) is controlled by
`layout.editrevision`. Trace changes are tracked by `uid`,
which only falls back on trace index if no `uid` is provided.
So if your app can add/remove traces before the end of the
`data` array, such that the same trace has a different index,
you can still preserve user-driven changes if you give each
trace a `uid` that stays with it as it moves.
The 'uirevision' property accepts values of any type
Returns
-------
Any
"""
return self["uirevision"]
@uirevision.setter
def uirevision(self, val):
self["uirevision"] = val
# visible
# -------
@property
def visible(self):
"""
Determines whether or not this trace is visible. If
"legendonly", the trace is not drawn, but can appear as a
legend item (provided that the legend itself is visible).
The 'visible' property is an enumeration that may be specified as:
- One of the following enumeration values:
[True, False, 'legendonly']
Returns
-------
Any
"""
return self["visible"]
@visible.setter
def visible(self, val):
self["visible"] = val
# x
# -
@property
def x(self):
"""
Sets the x coordinates.
The 'x' property is an array that may be specified as a tuple,
list, numpy array, or pandas Series
Returns
-------
numpy.ndarray
"""
return self["x"]
@x.setter
def x(self, val):
self["x"] = val
# xcalendar
# ---------
@property
def xcalendar(self):
"""
Sets the calendar system to use with `x` date data.
The 'xcalendar' property is an enumeration that may be specified as:
- One of the following enumeration values:
['chinese', 'coptic', 'discworld', 'ethiopian',
'gregorian', 'hebrew', 'islamic', 'jalali', 'julian',
'mayan', 'nanakshahi', 'nepali', 'persian', 'taiwan',
'thai', 'ummalqura']
Returns
-------
Any
"""
return self["xcalendar"]
@xcalendar.setter
def xcalendar(self, val):
self["xcalendar"] = val
# xhoverformat
# ------------
@property
def xhoverformat(self):
"""
Sets the hover text formatting rulefor `x` using d3 formatting
mini-languages which are very similar to those in Python. For
numbers, see:
https://github.com/d3/d3-format/tree/v1.4.5#d3-format. And for
dates see: https://github.com/d3/d3-time-
format/tree/v2.2.3#locale_format. We add two items to d3's date
formatter: "%h" for half of the year as a decimal number as
well as "%{n}f" for fractional seconds with n digits. For
example, *2016-10-13 09:15:23.456* with tickformat
"%H~%M~%S.%2f" would display *09~15~23.46*By default the values
are formatted using `xaxis.hoverformat`.
The 'xhoverformat' property is a string and must be specified as:
- A string
- A number that will be converted to a string
Returns
-------
str
"""
return self["xhoverformat"]
@xhoverformat.setter
def xhoverformat(self, val):
self["xhoverformat"] = val
# xsrc
# ----
@property
def xsrc(self):
"""
Sets the source reference on Chart Studio Cloud for `x`.
The 'xsrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["xsrc"]
@xsrc.setter
def xsrc(self, val):
self["xsrc"] = val
# y
# -
@property
def y(self):
"""
Sets the y coordinates.
The 'y' property is an array that may be specified as a tuple,
list, numpy array, or pandas Series
Returns
-------
numpy.ndarray
"""
return self["y"]
@y.setter
def y(self, val):
self["y"] = val
# ycalendar
# ---------
@property
def ycalendar(self):
"""
Sets the calendar system to use with `y` date data.
The 'ycalendar' property is an enumeration that may be specified as:
- One of the following enumeration values:
['chinese', 'coptic', 'discworld', 'ethiopian',
'gregorian', 'hebrew', 'islamic', 'jalali', 'julian',
'mayan', 'nanakshahi', 'nepali', 'persian', 'taiwan',
'thai', 'ummalqura']
Returns
-------
Any
"""
return self["ycalendar"]
@ycalendar.setter
def ycalendar(self, val):
self["ycalendar"] = val
# yhoverformat
# ------------
@property
def yhoverformat(self):
"""
Sets the hover text formatting rulefor `y` using d3 formatting
mini-languages which are very similar to those in Python. For
numbers, see:
https://github.com/d3/d3-format/tree/v1.4.5#d3-format. And for
dates see: https://github.com/d3/d3-time-
format/tree/v2.2.3#locale_format. We add two items to d3's date
formatter: "%h" for half of the year as a decimal number as
well as "%{n}f" for fractional seconds with n digits. For
example, *2016-10-13 09:15:23.456* with tickformat
"%H~%M~%S.%2f" would display *09~15~23.46*By default the values
are formatted using `yaxis.hoverformat`.
The 'yhoverformat' property is a string and must be specified as:
- A string
- A number that will be converted to a string
Returns
-------
str
"""
return self["yhoverformat"]
@yhoverformat.setter
def yhoverformat(self, val):
self["yhoverformat"] = val
# ysrc
# ----
@property
def ysrc(self):
"""
Sets the source reference on Chart Studio Cloud for `y`.
The 'ysrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["ysrc"]
@ysrc.setter
def ysrc(self, val):
self["ysrc"] = val
# z
# -
@property
def z(self):
"""
Sets the z coordinates.
The 'z' property is an array that may be specified as a tuple,
list, numpy array, or pandas Series
Returns
-------
numpy.ndarray
"""
return self["z"]
@z.setter
def z(self, val):
self["z"] = val
# zcalendar
# ---------
@property
def zcalendar(self):
"""
Sets the calendar system to use with `z` date data.
The 'zcalendar' property is an enumeration that may be specified as:
- One of the following enumeration values:
['chinese', 'coptic', 'discworld', 'ethiopian',
'gregorian', 'hebrew', 'islamic', 'jalali', 'julian',
'mayan', 'nanakshahi', 'nepali', 'persian', 'taiwan',
'thai', 'ummalqura']
Returns
-------
Any
"""
return self["zcalendar"]
@zcalendar.setter
def zcalendar(self, val):
self["zcalendar"] = val
# zhoverformat
# ------------
@property
def zhoverformat(self):
"""
Sets the hover text formatting rulefor `z` using d3 formatting
mini-languages which are very similar to those in Python. For
numbers, see:
https://github.com/d3/d3-format/tree/v1.4.5#d3-format. And for
dates see: https://github.com/d3/d3-time-
format/tree/v2.2.3#locale_format. We add two items to d3's date
formatter: "%h" for half of the year as a decimal number as
well as "%{n}f" for fractional seconds with n digits. For
example, *2016-10-13 09:15:23.456* with tickformat
"%H~%M~%S.%2f" would display *09~15~23.46*By default the values
are formatted using `zaxis.hoverformat`.
The 'zhoverformat' property is a string and must be specified as:
- A string
- A number that will be converted to a string
Returns
-------
str
"""
return self["zhoverformat"]
@zhoverformat.setter
def zhoverformat(self, val):
self["zhoverformat"] = val
# zsrc
# ----
@property
def zsrc(self):
"""
Sets the source reference on Chart Studio Cloud for `z`.
The 'zsrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["zsrc"]
@zsrc.setter
def zsrc(self, val):
self["zsrc"] = val
# type
# ----
@property
def type(self):
return self._props["type"]
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
connectgaps
Determines whether or not gaps (i.e. {nan} or missing
values) in the provided data arrays are connected.
customdata
Assigns extra data each datum. This may be useful when
listening to hover, click and selection events. Note
that, "scatter" traces also appends customdata items in
the markers DOM elements
customdatasrc
Sets the source reference on Chart Studio Cloud for
`customdata`.
error_x
:class:`plotly.graph_objects.scatter3d.ErrorX` instance
or dict with compatible properties
error_y
:class:`plotly.graph_objects.scatter3d.ErrorY` instance
or dict with compatible properties
error_z
:class:`plotly.graph_objects.scatter3d.ErrorZ` instance
or dict with compatible properties
hoverinfo
Determines which trace information appear on hover. If
`none` or `skip` are set, no information is displayed
upon hovering. But, if `none` is set, click and hover
events are still fired.
hoverinfosrc
Sets the source reference on Chart Studio Cloud for
`hoverinfo`.
hoverlabel
:class:`plotly.graph_objects.scatter3d.Hoverlabel`
instance or dict with compatible properties
hovertemplate
Template string used for rendering the information that
appear on hover box. Note that this will override
`hoverinfo`. Variables are inserted using %{variable},
for example "y: %{y}" as well as %{xother}, {%_xother},
{%_xother_}, {%xother_}. When showing info for several
points, "xother" will be added to those with different
x positions from the first point. An underscore before
or after "(x|y)other" will add a space on that side,
only when this field is shown. Numbers are formatted
using d3-format's syntax %{variable:d3-format}, for
example "Price: %{y:$.2f}".
https://github.com/d3/d3-format/tree/v1.4.5#d3-format
for details on the formatting syntax. Dates are
formatted using d3-time-format's syntax
%{variable|d3-time-format}, for example "Day:
%{2019-01-01|%A}". https://github.com/d3/d3-time-
format/tree/v2.2.3#locale_format for details on the
date formatting syntax. The variables available in
`hovertemplate` are the ones emitted as event data
described at this link
https://plotly.com/javascript/plotlyjs-events/#event-
data. Additionally, every attributes that can be
specified per-point (the ones that are `arrayOk: true`)
are available. Anything contained in tag `<extra>` is
displayed in the secondary box, for example
"<extra>{fullData.name}</extra>". To hide the secondary
box completely, use an empty tag `<extra></extra>`.
hovertemplatesrc
Sets the source reference on Chart Studio Cloud for
`hovertemplate`.
hovertext
Sets text elements associated with each (x,y,z)
triplet. If a single string, the same string appears
over all the data points. If an array of string, the
items are mapped in order to the this trace's (x,y,z)
coordinates. To be seen, trace `hoverinfo` must contain
a "text" flag.
hovertextsrc
Sets the source reference on Chart Studio Cloud for
`hovertext`.
ids
Assigns id labels to each datum. These ids for object
constancy of data points during animation. Should be an
array of strings, not numbers or any other type.
idssrc
Sets the source reference on Chart Studio Cloud for
`ids`.
legendgroup
Sets the legend group for this trace. Traces part of
the same legend group hide/show at the same time when
toggling legend items.
legendgrouptitle
:class:`plotly.graph_objects.scatter3d.Legendgrouptitle
` instance or dict with compatible properties
legendrank
Sets the legend rank for this trace. Items and groups
with smaller ranks are presented on top/left side while
with `*reversed* `legend.traceorder` they are on
bottom/right side. The default legendrank is 1000, so
that you can use ranks less than 1000 to place certain
items before all unranked items, and ranks greater than
1000 to go after all unranked items.
line
:class:`plotly.graph_objects.scatter3d.Line` instance
or dict with compatible properties
marker
:class:`plotly.graph_objects.scatter3d.Marker` instance
or dict with compatible properties
meta
Assigns extra meta information associated with this
trace that can be used in various text attributes.
Attributes such as trace `name`, graph, axis and
colorbar `title.text`, annotation `text`
`rangeselector`, `updatemenues` and `sliders` `label`
text all support `meta`. To access the trace `meta`
values in an attribute in the same trace, simply use
`%{meta[i]}` where `i` is the index or key of the
`meta` item in question. To access trace `meta` in
layout attributes, use `%{data[n[.meta[i]}` where `i`
is the index or key of the `meta` and `n` is the trace
index.
metasrc
Sets the source reference on Chart Studio Cloud for
`meta`.
mode
Determines the drawing mode for this scatter trace. If
the provided `mode` includes "text" then the `text`
elements appear at the coordinates. Otherwise, the
`text` elements appear on hover. If there are less than
20 points and the trace is not stacked then the default
is "lines+markers". Otherwise, "lines".
name
Sets the trace name. The trace name appear as the
legend item and on hover.
opacity
Sets the opacity of the trace.
projection
:class:`plotly.graph_objects.scatter3d.Projection`
instance or dict with compatible properties
scene
Sets a reference between this trace's 3D coordinate
system and a 3D scene. If "scene" (the default value),
the (x,y,z) coordinates refer to `layout.scene`. If
"scene2", the (x,y,z) coordinates refer to
`layout.scene2`, and so on.
showlegend
Determines whether or not an item corresponding to this
trace is shown in the legend.
stream
:class:`plotly.graph_objects.scatter3d.Stream` instance
or dict with compatible properties
surfaceaxis
If "-1", the scatter points are not fill with a surface
If 0, 1, 2, the scatter points are filled with a
Delaunay surface about the x, y, z respectively.
surfacecolor
Sets the surface fill color.
text
Sets text elements associated with each (x,y,z)
triplet. If a single string, the same string appears
over all the data points. If an array of string, the
items are mapped in order to the this trace's (x,y,z)
coordinates. If trace `hoverinfo` contains a "text"
flag and "hovertext" is not set, these elements will be
seen in the hover labels.
textfont
:class:`plotly.graph_objects.scatter3d.Textfont`
instance or dict with compatible properties
textposition
Sets the positions of the `text` elements with respects
to the (x,y) coordinates.
textpositionsrc
Sets the source reference on Chart Studio Cloud for
`textposition`.
textsrc
Sets the source reference on Chart Studio Cloud for
`text`.
texttemplate
Template string used for rendering the information text
that appear on points. Note that this will override
`textinfo`. Variables are inserted using %{variable},
for example "y: %{y}". Numbers are formatted using
d3-format's syntax %{variable:d3-format}, for example
"Price: %{y:$.2f}".
https://github.com/d3/d3-format/tree/v1.4.5#d3-format
for details on the formatting syntax. Dates are
formatted using d3-time-format's syntax
%{variable|d3-time-format}, for example "Day:
%{2019-01-01|%A}". https://github.com/d3/d3-time-
format/tree/v2.2.3#locale_format for details on the
date formatting syntax. Every attributes that can be
specified per-point (the ones that are `arrayOk: true`)
are available.
texttemplatesrc
Sets the source reference on Chart Studio Cloud for
`texttemplate`.
uid
Assign an id to this trace, Use this to provide object
constancy between traces during animations and
transitions.
uirevision
Controls persistence of some user-driven changes to the
trace: `constraintrange` in `parcoords` traces, as well
as some `editable: true` modifications such as `name`
and `colorbar.title`. Defaults to `layout.uirevision`.
Note that other user-driven trace attribute changes are
controlled by `layout` attributes: `trace.visible` is
controlled by `layout.legend.uirevision`,
`selectedpoints` is controlled by
`layout.selectionrevision`, and `colorbar.(x|y)`
(accessible with `config: {editable: true}`) is
controlled by `layout.editrevision`. Trace changes are
tracked by `uid`, which only falls back on trace index
if no `uid` is provided. So if your app can add/remove
traces before the end of the `data` array, such that
the same trace has a different index, you can still
preserve user-driven changes if you give each trace a
`uid` that stays with it as it moves.
visible
Determines whether or not this trace is visible. If
"legendonly", the trace is not drawn, but can appear as
a legend item (provided that the legend itself is
visible).
x
Sets the x coordinates.
xcalendar
Sets the calendar system to use with `x` date data.
xhoverformat
Sets the hover text formatting rulefor `x` using d3
formatting mini-languages which are very similar to
those in Python. For numbers, see:
https://github.com/d3/d3-format/tree/v1.4.5#d3-format.
And for dates see: https://github.com/d3/d3-time-
format/tree/v2.2.3#locale_format. We add two items to
d3's date formatter: "%h" for half of the year as a
decimal number as well as "%{n}f" for fractional
seconds with n digits. For example, *2016-10-13
09:15:23.456* with tickformat "%H~%M~%S.%2f" would
display *09~15~23.46*By default the values are
formatted using `xaxis.hoverformat`.
xsrc
Sets the source reference on Chart Studio Cloud for
`x`.
y
Sets the y coordinates.
ycalendar
Sets the calendar system to use with `y` date data.
yhoverformat
Sets the hover text formatting rulefor `y` using d3
formatting mini-languages which are very similar to
those in Python. For numbers, see:
https://github.com/d3/d3-format/tree/v1.4.5#d3-format.
And for dates see: https://github.com/d3/d3-time-
format/tree/v2.2.3#locale_format. We add two items to
d3's date formatter: "%h" for half of the year as a
decimal number as well as "%{n}f" for fractional
seconds with n digits. For example, *2016-10-13
09:15:23.456* with tickformat "%H~%M~%S.%2f" would
display *09~15~23.46*By default the values are
formatted using `yaxis.hoverformat`.
ysrc
Sets the source reference on Chart Studio Cloud for
`y`.
z
Sets the z coordinates.
zcalendar
Sets the calendar system to use with `z` date data.
zhoverformat
Sets the hover text formatting rulefor `z` using d3
formatting mini-languages which are very similar to
those in Python. For numbers, see:
https://github.com/d3/d3-format/tree/v1.4.5#d3-format.
And for dates see: https://github.com/d3/d3-time-
format/tree/v2.2.3#locale_format. We add two items to
d3's date formatter: "%h" for half of the year as a
decimal number as well as "%{n}f" for fractional
seconds with n digits. For example, *2016-10-13
09:15:23.456* with tickformat "%H~%M~%S.%2f" would
display *09~15~23.46*By default the values are
formatted using `zaxis.hoverformat`.
zsrc
Sets the source reference on Chart Studio Cloud for
`z`.
"""
def __init__(
self,
arg=None,
connectgaps=None,
customdata=None,
customdatasrc=None,
error_x=None,
error_y=None,
error_z=None,
hoverinfo=None,
hoverinfosrc=None,
hoverlabel=None,
hovertemplate=None,
hovertemplatesrc=None,
hovertext=None,
hovertextsrc=None,
ids=None,
idssrc=None,
legendgroup=None,
legendgrouptitle=None,
legendrank=None,
line=None,
marker=None,
meta=None,
metasrc=None,
mode=None,
name=None,
opacity=None,
projection=None,
scene=None,
showlegend=None,
stream=None,
surfaceaxis=None,
surfacecolor=None,
text=None,
textfont=None,
textposition=None,
textpositionsrc=None,
textsrc=None,
texttemplate=None,
texttemplatesrc=None,
uid=None,
uirevision=None,
visible=None,
x=None,
xcalendar=None,
xhoverformat=None,
xsrc=None,
y=None,
ycalendar=None,
yhoverformat=None,
ysrc=None,
z=None,
zcalendar=None,
zhoverformat=None,
zsrc=None,
**kwargs
):
"""
Construct a new Scatter3d object
The data visualized as scatter point or lines in 3D dimension
is set in `x`, `y`, `z`. Text (appearing either on the chart or
on hover only) is via `text`. Bubble charts are achieved by
setting `marker.size` and/or `marker.color` Projections are
achieved via `projection`. Surface fills are achieved via
`surfaceaxis`.
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of :class:`plotly.graph_objs.Scatter3d`
connectgaps
Determines whether or not gaps (i.e. {nan} or missing
values) in the provided data arrays are connected.
customdata
Assigns extra data each datum. This may be useful when
listening to hover, click and selection events. Note
that, "scatter" traces also appends customdata items in
the markers DOM elements
customdatasrc
Sets the source reference on Chart Studio Cloud for
`customdata`.
error_x
:class:`plotly.graph_objects.scatter3d.ErrorX` instance
or dict with compatible properties
error_y
:class:`plotly.graph_objects.scatter3d.ErrorY` instance
or dict with compatible properties
error_z
:class:`plotly.graph_objects.scatter3d.ErrorZ` instance
or dict with compatible properties
hoverinfo
Determines which trace information appear on hover. If
`none` or `skip` are set, no information is displayed
upon hovering. But, if `none` is set, click and hover
events are still fired.
hoverinfosrc
Sets the source reference on Chart Studio Cloud for
`hoverinfo`.
hoverlabel
:class:`plotly.graph_objects.scatter3d.Hoverlabel`
instance or dict with compatible properties
hovertemplate
Template string used for rendering the information that
appear on hover box. Note that this will override
`hoverinfo`. Variables are inserted using %{variable},
for example "y: %{y}" as well as %{xother}, {%_xother},
{%_xother_}, {%xother_}. When showing info for several
points, "xother" will be added to those with different
x positions from the first point. An underscore before
or after "(x|y)other" will add a space on that side,
only when this field is shown. Numbers are formatted
using d3-format's syntax %{variable:d3-format}, for
example "Price: %{y:$.2f}".
https://github.com/d3/d3-format/tree/v1.4.5#d3-format
for details on the formatting syntax. Dates are
formatted using d3-time-format's syntax
%{variable|d3-time-format}, for example "Day:
%{2019-01-01|%A}". https://github.com/d3/d3-time-
format/tree/v2.2.3#locale_format for details on the
date formatting syntax. The variables available in
`hovertemplate` are the ones emitted as event data
described at this link
https://plotly.com/javascript/plotlyjs-events/#event-
data. Additionally, every attributes that can be
specified per-point (the ones that are `arrayOk: true`)
are available. Anything contained in tag `<extra>` is
displayed in the secondary box, for example
"<extra>{fullData.name}</extra>". To hide the secondary
box completely, use an empty tag `<extra></extra>`.
hovertemplatesrc
Sets the source reference on Chart Studio Cloud for
`hovertemplate`.
hovertext
Sets text elements associated with each (x,y,z)
triplet. If a single string, the same string appears
over all the data points. If an array of string, the
items are mapped in order to the this trace's (x,y,z)
coordinates. To be seen, trace `hoverinfo` must contain
a "text" flag.
hovertextsrc
Sets the source reference on Chart Studio Cloud for
`hovertext`.
ids
Assigns id labels to each datum. These ids for object
constancy of data points during animation. Should be an
array of strings, not numbers or any other type.
idssrc
Sets the source reference on Chart Studio Cloud for
`ids`.
legendgroup
Sets the legend group for this trace. Traces part of
the same legend group hide/show at the same time when
toggling legend items.
legendgrouptitle
:class:`plotly.graph_objects.scatter3d.Legendgrouptitle
` instance or dict with compatible properties
legendrank
Sets the legend rank for this trace. Items and groups
with smaller ranks are presented on top/left side while
with `*reversed* `legend.traceorder` they are on
bottom/right side. The default legendrank is 1000, so
that you can use ranks less than 1000 to place certain
items before all unranked items, and ranks greater than
1000 to go after all unranked items.
line
:class:`plotly.graph_objects.scatter3d.Line` instance
or dict with compatible properties
marker
:class:`plotly.graph_objects.scatter3d.Marker` instance
or dict with compatible properties
meta
Assigns extra meta information associated with this
trace that can be used in various text attributes.
Attributes such as trace `name`, graph, axis and
colorbar `title.text`, annotation `text`
`rangeselector`, `updatemenues` and `sliders` `label`
text all support `meta`. To access the trace `meta`
values in an attribute in the same trace, simply use
`%{meta[i]}` where `i` is the index or key of the
`meta` item in question. To access trace `meta` in
layout attributes, use `%{data[n[.meta[i]}` where `i`
is the index or key of the `meta` and `n` is the trace
index.
metasrc
Sets the source reference on Chart Studio Cloud for
`meta`.
mode
Determines the drawing mode for this scatter trace. If
the provided `mode` includes "text" then the `text`
elements appear at the coordinates. Otherwise, the
`text` elements appear on hover. If there are less than
20 points and the trace is not stacked then the default
is "lines+markers". Otherwise, "lines".
name
Sets the trace name. The trace name appear as the
legend item and on hover.
opacity
Sets the opacity of the trace.
projection
:class:`plotly.graph_objects.scatter3d.Projection`
instance or dict with compatible properties
scene
Sets a reference between this trace's 3D coordinate
system and a 3D scene. If "scene" (the default value),
the (x,y,z) coordinates refer to `layout.scene`. If
"scene2", the (x,y,z) coordinates refer to
`layout.scene2`, and so on.
showlegend
Determines whether or not an item corresponding to this
trace is shown in the legend.
stream
:class:`plotly.graph_objects.scatter3d.Stream` instance
or dict with compatible properties
surfaceaxis
If "-1", the scatter points are not fill with a surface
If 0, 1, 2, the scatter points are filled with a
Delaunay surface about the x, y, z respectively.
surfacecolor
Sets the surface fill color.
text
Sets text elements associated with each (x,y,z)
triplet. If a single string, the same string appears
over all the data points. If an array of string, the
items are mapped in order to the this trace's (x,y,z)
coordinates. If trace `hoverinfo` contains a "text"
flag and "hovertext" is not set, these elements will be
seen in the hover labels.
textfont
:class:`plotly.graph_objects.scatter3d.Textfont`
instance or dict with compatible properties
textposition
Sets the positions of the `text` elements with respects
to the (x,y) coordinates.
textpositionsrc
Sets the source reference on Chart Studio Cloud for
`textposition`.
textsrc
Sets the source reference on Chart Studio Cloud for
`text`.
texttemplate
Template string used for rendering the information text
that appear on points. Note that this will override
`textinfo`. Variables are inserted using %{variable},
for example "y: %{y}". Numbers are formatted using
d3-format's syntax %{variable:d3-format}, for example
"Price: %{y:$.2f}".
https://github.com/d3/d3-format/tree/v1.4.5#d3-format
for details on the formatting syntax. Dates are
formatted using d3-time-format's syntax
%{variable|d3-time-format}, for example "Day:
%{2019-01-01|%A}". https://github.com/d3/d3-time-
format/tree/v2.2.3#locale_format for details on the
date formatting syntax. Every attributes that can be
specified per-point (the ones that are `arrayOk: true`)
are available.
texttemplatesrc
Sets the source reference on Chart Studio Cloud for
`texttemplate`.
uid
Assign an id to this trace, Use this to provide object
constancy between traces during animations and
transitions.
uirevision
Controls persistence of some user-driven changes to the
trace: `constraintrange` in `parcoords` traces, as well
as some `editable: true` modifications such as `name`
and `colorbar.title`. Defaults to `layout.uirevision`.
Note that other user-driven trace attribute changes are
controlled by `layout` attributes: `trace.visible` is
controlled by `layout.legend.uirevision`,
`selectedpoints` is controlled by
`layout.selectionrevision`, and `colorbar.(x|y)`
(accessible with `config: {editable: true}`) is
controlled by `layout.editrevision`. Trace changes are
tracked by `uid`, which only falls back on trace index
if no `uid` is provided. So if your app can add/remove
traces before the end of the `data` array, such that
the same trace has a different index, you can still
preserve user-driven changes if you give each trace a
`uid` that stays with it as it moves.
visible
Determines whether or not this trace is visible. If
"legendonly", the trace is not drawn, but can appear as
a legend item (provided that the legend itself is
visible).
x
Sets the x coordinates.
xcalendar
Sets the calendar system to use with `x` date data.
xhoverformat
Sets the hover text formatting rulefor `x` using d3
formatting mini-languages which are very similar to
those in Python. For numbers, see:
https://github.com/d3/d3-format/tree/v1.4.5#d3-format.
And for dates see: https://github.com/d3/d3-time-
format/tree/v2.2.3#locale_format. We add two items to
d3's date formatter: "%h" for half of the year as a
decimal number as well as "%{n}f" for fractional
seconds with n digits. For example, *2016-10-13
09:15:23.456* with tickformat "%H~%M~%S.%2f" would
display *09~15~23.46*By default the values are
formatted using `xaxis.hoverformat`.
xsrc
Sets the source reference on Chart Studio Cloud for
`x`.
y
Sets the y coordinates.
ycalendar
Sets the calendar system to use with `y` date data.
yhoverformat
Sets the hover text formatting rulefor `y` using d3
formatting mini-languages which are very similar to
those in Python. For numbers, see:
https://github.com/d3/d3-format/tree/v1.4.5#d3-format.
And for dates see: https://github.com/d3/d3-time-
format/tree/v2.2.3#locale_format. We add two items to
d3's date formatter: "%h" for half of the year as a
decimal number as well as "%{n}f" for fractional
seconds with n digits. For example, *2016-10-13
09:15:23.456* with tickformat "%H~%M~%S.%2f" would
display *09~15~23.46*By default the values are
formatted using `yaxis.hoverformat`.
ysrc
Sets the source reference on Chart Studio Cloud for
`y`.
z
Sets the z coordinates.
zcalendar
Sets the calendar system to use with `z` date data.
zhoverformat
Sets the hover text formatting rulefor `z` using d3
formatting mini-languages which are very similar to
those in Python. For numbers, see:
https://github.com/d3/d3-format/tree/v1.4.5#d3-format.
And for dates see: https://github.com/d3/d3-time-
format/tree/v2.2.3#locale_format. We add two items to
d3's date formatter: "%h" for half of the year as a
decimal number as well as "%{n}f" for fractional
seconds with n digits. For example, *2016-10-13
09:15:23.456* with tickformat "%H~%M~%S.%2f" would
display *09~15~23.46*By default the values are
formatted using `zaxis.hoverformat`.
zsrc
Sets the source reference on Chart Studio Cloud for
`z`.
Returns
-------
Scatter3d
"""
super(Scatter3d, self).__init__("scatter3d")
if "_parent" in kwargs:
self._parent = kwargs["_parent"]
return
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.Scatter3d
constructor must be a dict or
an instance of :class:`plotly.graph_objs.Scatter3d`"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop("skip_invalid", False)
self._validate = kwargs.pop("_validate", True)
# Populate data dict with properties
# ----------------------------------
_v = arg.pop("connectgaps", None)
_v = connectgaps if connectgaps is not None else _v
if _v is not None:
self["connectgaps"] = _v
_v = arg.pop("customdata", None)
_v = customdata if customdata is not None else _v
if _v is not None:
self["customdata"] = _v
_v = arg.pop("customdatasrc", None)
_v = customdatasrc if customdatasrc is not None else _v
if _v is not None:
self["customdatasrc"] = _v
_v = arg.pop("error_x", None)
_v = error_x if error_x is not None else _v
if _v is not None:
self["error_x"] = _v
_v = arg.pop("error_y", None)
_v = error_y if error_y is not None else _v
if _v is not None:
self["error_y"] = _v
_v = arg.pop("error_z", None)
_v = error_z if error_z is not None else _v
if _v is not None:
self["error_z"] = _v
_v = arg.pop("hoverinfo", None)
_v = hoverinfo if hoverinfo is not None else _v
if _v is not None:
self["hoverinfo"] = _v
_v = arg.pop("hoverinfosrc", None)
_v = hoverinfosrc if hoverinfosrc is not None else _v
if _v is not None:
self["hoverinfosrc"] = _v
_v = arg.pop("hoverlabel", None)
_v = hoverlabel if hoverlabel is not None else _v
if _v is not None:
self["hoverlabel"] = _v
_v = arg.pop("hovertemplate", None)
_v = hovertemplate if hovertemplate is not None else _v
if _v is not None:
self["hovertemplate"] = _v
_v = arg.pop("hovertemplatesrc", None)
_v = hovertemplatesrc if hovertemplatesrc is not None else _v
if _v is not None:
self["hovertemplatesrc"] = _v
_v = arg.pop("hovertext", None)
_v = hovertext if hovertext is not None else _v
if _v is not None:
self["hovertext"] = _v
_v = arg.pop("hovertextsrc", None)
_v = hovertextsrc if hovertextsrc is not None else _v
if _v is not None:
self["hovertextsrc"] = _v
_v = arg.pop("ids", None)
_v = ids if ids is not None else _v
if _v is not None:
self["ids"] = _v
_v = arg.pop("idssrc", None)
_v = idssrc if idssrc is not None else _v
if _v is not None:
self["idssrc"] = _v
_v = arg.pop("legendgroup", None)
_v = legendgroup if legendgroup is not None else _v
if _v is not None:
self["legendgroup"] = _v
_v = arg.pop("legendgrouptitle", None)
_v = legendgrouptitle if legendgrouptitle is not None else _v
if _v is not None:
self["legendgrouptitle"] = _v
_v = arg.pop("legendrank", None)
_v = legendrank if legendrank is not None else _v
if _v is not None:
self["legendrank"] = _v
_v = arg.pop("line", None)
_v = line if line is not None else _v
if _v is not None:
self["line"] = _v
_v = arg.pop("marker", None)
_v = marker if marker is not None else _v
if _v is not None:
self["marker"] = _v
_v = arg.pop("meta", None)
_v = meta if meta is not None else _v
if _v is not None:
self["meta"] = _v
_v = arg.pop("metasrc", None)
_v = metasrc if metasrc is not None else _v
if _v is not None:
self["metasrc"] = _v
_v = arg.pop("mode", None)
_v = mode if mode is not None else _v
if _v is not None:
self["mode"] = _v
_v = arg.pop("name", None)
_v = name if name is not None else _v
if _v is not None:
self["name"] = _v
_v = arg.pop("opacity", None)
_v = opacity if opacity is not None else _v
if _v is not None:
self["opacity"] = _v
_v = arg.pop("projection", None)
_v = projection if projection is not None else _v
if _v is not None:
self["projection"] = _v
_v = arg.pop("scene", None)
_v = scene if scene is not None else _v
if _v is not None:
self["scene"] = _v
_v = arg.pop("showlegend", None)
_v = showlegend if showlegend is not None else _v
if _v is not None:
self["showlegend"] = _v
_v = arg.pop("stream", None)
_v = stream if stream is not None else _v
if _v is not None:
self["stream"] = _v
_v = arg.pop("surfaceaxis", None)
_v = surfaceaxis if surfaceaxis is not None else _v
if _v is not None:
self["surfaceaxis"] = _v
_v = arg.pop("surfacecolor", None)
_v = surfacecolor if surfacecolor is not None else _v
if _v is not None:
self["surfacecolor"] = _v
_v = arg.pop("text", None)
_v = text if text is not None else _v
if _v is not None:
self["text"] = _v
_v = arg.pop("textfont", None)
_v = textfont if textfont is not None else _v
if _v is not None:
self["textfont"] = _v
_v = arg.pop("textposition", None)
_v = textposition if textposition is not None else _v
if _v is not None:
self["textposition"] = _v
_v = arg.pop("textpositionsrc", None)
_v = textpositionsrc if textpositionsrc is not None else _v
if _v is not None:
self["textpositionsrc"] = _v
_v = arg.pop("textsrc", None)
_v = textsrc if textsrc is not None else _v
if _v is not None:
self["textsrc"] = _v
_v = arg.pop("texttemplate", None)
_v = texttemplate if texttemplate is not None else _v
if _v is not None:
self["texttemplate"] = _v
_v = arg.pop("texttemplatesrc", None)
_v = texttemplatesrc if texttemplatesrc is not None else _v
if _v is not None:
self["texttemplatesrc"] = _v
_v = arg.pop("uid", None)
_v = uid if uid is not None else _v
if _v is not None:
self["uid"] = _v
_v = arg.pop("uirevision", None)
_v = uirevision if uirevision is not None else _v
if _v is not None:
self["uirevision"] = _v
_v = arg.pop("visible", None)
_v = visible if visible is not None else _v
if _v is not None:
self["visible"] = _v
_v = arg.pop("x", None)
_v = x if x is not None else _v
if _v is not None:
self["x"] = _v
_v = arg.pop("xcalendar", None)
_v = xcalendar if xcalendar is not None else _v
if _v is not None:
self["xcalendar"] = _v
_v = arg.pop("xhoverformat", None)
_v = xhoverformat if xhoverformat is not None else _v
if _v is not None:
self["xhoverformat"] = _v
_v = arg.pop("xsrc", None)
_v = xsrc if xsrc is not None else _v
if _v is not None:
self["xsrc"] = _v
_v = arg.pop("y", None)
_v = y if y is not None else _v
if _v is not None:
self["y"] = _v
_v = arg.pop("ycalendar", None)
_v = ycalendar if ycalendar is not None else _v
if _v is not None:
self["ycalendar"] = _v
_v = arg.pop("yhoverformat", None)
_v = yhoverformat if yhoverformat is not None else _v
if _v is not None:
self["yhoverformat"] = _v
_v = arg.pop("ysrc", None)
_v = ysrc if ysrc is not None else _v
if _v is not None:
self["ysrc"] = _v
_v = arg.pop("z", None)
_v = z if z is not None else _v
if _v is not None:
self["z"] = _v
_v = arg.pop("zcalendar", None)
_v = zcalendar if zcalendar is not None else _v
if _v is not None:
self["zcalendar"] = _v
_v = arg.pop("zhoverformat", None)
_v = zhoverformat if zhoverformat is not None else _v
if _v is not None:
self["zhoverformat"] = _v
_v = arg.pop("zsrc", None)
_v = zsrc if zsrc is not None else _v
if _v is not None:
self["zsrc"] = _v
# Read-only literals
# ------------------
self._props["type"] = "scatter3d"
arg.pop("type", None)
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False
| 37.203211 | 89 | 0.551346 |
ca819a11e0021c3ffb57bb0edd34a15a32280388 | 666 | py | Python | PINNFramework/models/activations/snake.py | varun1423/NeuralSolvers | 0036319ec3e7cd570a029aa5e154d0bc995ae9ce | [
"MIT"
] | 59 | 2020-07-18T07:22:16.000Z | 2022-03-28T10:50:35.000Z | PINNFramework/models/activations/snake.py | varun1423/NeuralSolvers | 0036319ec3e7cd570a029aa5e154d0bc995ae9ce | [
"MIT"
] | 40 | 2020-07-16T10:03:33.000Z | 2021-12-22T12:13:40.000Z | PINNFramework/models/activations/snake.py | varun1423/NeuralSolvers | 0036319ec3e7cd570a029aa5e154d0bc995ae9ce | [
"MIT"
] | 24 | 2020-10-01T08:50:33.000Z | 2022-03-29T12:48:22.000Z | import torch
import torch.nn as nn
class Snake(nn.Module,):
""" Implementation of the snake activation function as a torch nn module
The result of the activation function a(x) is calculated by a(x) = x + sin^2(x)
With alpha is a trainab
"""
def __init__(self,frequency=10):
"""Constructor function that initialize the torch module
"""
super(Snake, self).__init__()
# making beta trainable by activating gradient calculation
self.a = nn.Parameter(torch.tensor([float(frequency)], requires_grad=True))
def forward(self, x):
return x + ((torch.sin(self.a* x)) ** 2) / self.a | 33.3 | 83 | 0.636637 |
e3d3f5f5565090131ec78975c86e1609a671954e | 11,212 | py | Python | traffic_signs_detection/traffic_signs_classification/code/classification.py | YaelBenShalom/Traffic-Sign-Recognition-and-Classification | cdedeac6ce9d103109c2d7eb53b4a44914f5f9f4 | [
"MIT"
] | 2 | 2021-11-30T15:29:57.000Z | 2022-01-18T22:02:40.000Z | traffic_signs_detection/traffic_signs_classification/code/classification.py | YaelBenShalom/Traffic-Sign-Recognition-and-Classification | cdedeac6ce9d103109c2d7eb53b4a44914f5f9f4 | [
"MIT"
] | null | null | null | traffic_signs_detection/traffic_signs_classification/code/classification.py | YaelBenShalom/Traffic-Sign-Recognition-and-Classification | cdedeac6ce9d103109c2d7eb53b4a44914f5f9f4 | [
"MIT"
] | 1 | 2022-03-30T07:14:37.000Z | 2022-03-30T07:14:37.000Z | import argparse
import os
import numpy as np
import random
import cv2
import matplotlib.pyplot as plt
import torch
import torch.nn as nn
import torch.nn.functional as F
import torchvision.transforms as transforms
from load_data import load_dataset
from read_data import ReadDataset
from run_model import run_model, predict
from cnn import BaselineNet, ResNet
def load_arguments(args):
"""
This function loads the image argument.
If an image was added as argument, the program will predict its class after training and testing the model.
If no image was added as argument, the program will only train and test the results on the datasets.
Inputs:
args: the input arguments of the program in the form of a dictionary {"image" : <argument>}.
if args exist, <argument> is the input image, else <argument> is None.
Output:
test_image: the input image that should be tested by the model.
"""
test_image = plt.imread(args["image"])
plt.figure()
plt.imshow(test_image)
plt.show()
return test_image
def dataset_properties(trainset_name, validset_name, testset_name, class_names, data_dir):
"""
This function finds the dataset properties.
This function is for information only.
Inputs:
trainset_name: the name of the training set file.
validset_name: the name of the validation set file.
testset_name: the name of the testing set file.
Output: None
"""
train_features, train_labels = load_dataset(
trainset_name, base_folder=data_dir)
valid_features, valid_labels = load_dataset(
validset_name, base_folder=data_dir)
test_features, test_labels = load_dataset(
testset_name, base_folder=data_dir)
# print(f"train_features shape: {train_features.shape}")
# print(f"train_labels shape: {train_labels.shape}")
print(f"train dataset size: {len(train_features)}")
# print(f"valid_features: {valid_features.shape}")
# print(f"valid_labels: {valid_labels.shape}")
print(f"validation dataset size: {len(valid_features)}")
# print(f"test_features: {test_features.shape}")
# print(f"test_labels: {test_labels.shape}")
print(f"test dataset size: {len(test_labels)}")
# Finding the number of classes in the dataset
classes_num = len(set(train_labels))
print(f"Number of classes: {classes_num}")
# Finding the size of the images in the dataset
image_shape = train_features[0].shape[:2]
print(f"images shape: {image_shape}")
# Plotting class distribution for training set
fig, ax = plt.subplots()
ax.bar(range(classes_num), np.bincount(train_labels))
ax.set_title('Class Distribution in the Train Set', fontsize=20)
ax.set_xlabel('Class Number')
ax.set_ylabel('Number of Events')
plt.savefig('images/Class_Distribution.png')
plt.show()
# Plotting random 40 images from train set
plt.figure(figsize=(12, 12))
for i in range(40):
feature_index = random.randint(0, train_labels.shape[0])
plt.subplot(6, 8, i + 1)
plt.subplots_adjust(left=0.1, bottom=0.03, right=0.9,
top=0.92, wspace=0.2, hspace=0.2)
plt.axis('off')
plt.imshow(train_features[feature_index])
plt.suptitle('Random Training Images', fontsize=20)
plt.savefig('images/Random_Training_Images.png')
plt.show()
# Plotting images for every class from train set
plt.figure(figsize=(14, 14))
for i in range(classes_num):
feature_index = random.choice(np.where(train_labels == i)[0])
plt.subplot(6, 8, i + 1)
plt.subplots_adjust(left=0.1, bottom=0.03, right=0.9,
top=0.92, wspace=0.2, hspace=0.2)
plt.axis('off')
plt.title(class_names[i], fontsize=10)
plt.imshow(train_features[feature_index])
plt.suptitle('Random training images from different classes', fontsize=20)
plt.savefig('images/Random_Training_Images_Different_Class.png')
plt.show()
def class_names_fun(data_dir):
"""
This function returns a dictionary with the classes numbers and names.
Inputs: None
Output:
class_names: a dictionary with the classes numbers and names.
"""
# Class names path
class_names_path = os.path.join(data_dir, "label_names.csv")
class_names_rows = open(class_names_path).read().strip().split("\n")[1:]
# Defining class names dictionary
class_names = {}
for row in class_names_rows:
label, label_name = row.strip().split(",")
class_names[int(label)] = label_name
return class_names
def plot_training_results(train_loss_list, valid_loss_list, valid_accuracy_list, epoch_num):
"""
This function plots the results of training the network.
Inputs:
train_loss_list: list of loss value on the entire training dataset.
valid_loss_list: list of loss value on the entire validation dataset.
valid_accuracy_list: list of accuracy on the entire validation dataset.
Output: None
"""
# Plotting training and validation loss vs. epoch number
plt.figure()
plt.plot(range(len(train_loss_list)),
train_loss_list, label='Training Loss')
plt.plot(range(len(valid_loss_list)),
valid_loss_list, label='Validation Loss')
plt.title(
f'Training and Validation Loss Vs. Epoch Number ({epoch_num} Epochs)')
plt.xlabel('Epoch Number')
plt.ylabel('Loss')
plt.legend(loc="best")
plt.savefig(f"images/Losses ({epoch_num} Epochs).png")
plt.show()
# Plotting validation accuracy vs. epoch number
plt.figure()
plt.plot(range(len(valid_accuracy_list)),
valid_accuracy_list, label='Validation Accuracy')
plt.title(f'Validation Accuracy Vs. Epoch Number ({epoch_num} Epochs)')
plt.xlabel('Epoch Number')
plt.ylabel('Accuracy')
plt.xlim([0, len(train_loss_list)])
plt.ylim([0, 100])
plt.legend(loc="best")
plt.savefig(f"images/Accuracy ({epoch_num} Epochs).png")
plt.show()
def main(args):
""" Main function of the program
Inputs:
args: the input arguments of the program in the form of a dictionary {"image" : <argument>}.
if args exist, <argument> is the input image, else <argument> is None.
Output: None
"""
# Define dataset directory
data_dir = "data"
# Define dataset files
trainset_name = "train.p"
validset_name = "valid.p"
testset_name = "test.p"
# Finding dataset properties
class_names = class_names_fun(data_dir)
# Visualizing the dataset
dataset_properties(trainset_name, validset_name,
testset_name, class_names, data_dir)
# Define the device parameters
torch.manual_seed(1)
use_cuda = torch.cuda.is_available()
device = torch.device("cuda" if use_cuda else "cpu")
# Define the model
model = BaselineNet().to(device)
# Define the training properties
epoch_num = 100
criterion = nn.CrossEntropyLoss()
learning_rate = 1e-3
batch_size = 64
stop_threshold = 1e-4
# Computing data transformation to normalize data
# from https://pytorch.org/docs/stable/torchvision/transforms.html
mean = (0.485, 0.456, 0.406)
std = (0.229, 0.224, 0.225) # -"-
transform = transforms.Compose([transforms.ToTensor(),
transforms.Resize((32, 32)),
transforms.Normalize(mean=mean, std=std)])
# Reading the datasets
train_dataset = ReadDataset(trainset_name, transform=transform)
valid_dataset = ReadDataset(validset_name, transform=transform)
test_dataset = ReadDataset(testset_name, transform=transform)
# If no input model - training a new model
if not args["model"]:
# Defining the model
model_path = os.path.abspath("model")
# Train the network
model, train_loss_list, valid_loss_list, valid_accuracy_list = run_model(model, running_mode='train',
train_set=train_dataset,
valid_set=valid_dataset,
test_set=test_dataset,
batch_size=batch_size, epoch_num=epoch_num,
learning_rate=learning_rate,
stop_thr=stop_threshold,
criterion=criterion, device=device)
# Plot the results of training the network
plot_training_results(train_loss_list, valid_loss_list,
valid_accuracy_list, epoch_num)
# Save the trained model
torch.save(model.state_dict(), model_path)
# If input model - load the existing model
else:
# Defining the model
model_path = os.path.abspath(args["model"])
# Load the trained model
model.load_state_dict(torch.load(model_path, map_location=device))
# Test the network
test_loss, test_accuracy = run_model(model, running_mode='test', train_set=train_dataset,
valid_set=valid_dataset, test_set=test_dataset,
batch_size=batch_size, epoch_num=epoch_num,
learning_rate=learning_rate, stop_thr=stop_threshold,
criterion=criterion, device=device)
print(f"Test loss: {test_loss:.3f}")
print(f"Test accuracy: {test_accuracy:.2f}%")
# Check if image argument exists
if args["image"]:
# Load the image argument
test_image = load_arguments(args)
test_image_resized = cv2.resize(test_image, (32, 32))
test_image_tensor = transforms.ToTensor()(np.array(test_image_resized))
# Transform tested image
test_image_transform4d = test_image_tensor.unsqueeze(0)
# Predict the class of the tested image
prediction = int(predict(model, test_image_transform4d)[0])
print(
f"Test prediction: {prediction} -> Class: {class_names[prediction]}")
# Plot the image with the predicted class
plt.figure()
plt.axis('off')
plt.title(class_names[prediction], fontsize=10)
plt.imshow(test_image)
plt.suptitle('Image Classification', fontsize=18)
plt.savefig('images/Image_Classification')
plt.show()
if __name__ == "__main__":
# construct the argument parse and parse the arguments
parser = argparse.ArgumentParser()
parser.add_argument("-i", "--image", help="path to the input image")
parser.add_argument("-m", "--model", help="path to the input image")
args = vars(parser.parse_args())
main(args)
| 37.624161 | 124 | 0.62995 |
f864c718d281db027412cce01662761f44fbdc79 | 295 | py | Python | src/App/Python/Lib/site-packages/inputimeout/__version__.py | Shekhar9521/covid-vaccine-booking | 545ec88aadec069278b82a7edfc0aae34bf34ba5 | [
"MIT"
] | 9 | 2019-06-28T10:09:47.000Z | 2022-02-11T19:59:10.000Z | support/App/Python/Lib/site-packages/inputimeout/__version__.py | harshit-gh27/covid-vaccine-booking | 3513bdfcae5890aa8bd7aa9fc69bfdb866e922da | [
"MIT"
] | 8 | 2018-09-02T09:37:07.000Z | 2021-06-25T15:26:29.000Z | support/App/Python/Lib/site-packages/inputimeout/__version__.py | harshit-gh27/covid-vaccine-booking | 3513bdfcae5890aa8bd7aa9fc69bfdb866e922da | [
"MIT"
] | 12 | 2019-11-28T14:11:09.000Z | 2022-02-14T01:47:20.000Z | __title__ = 'inputimeout'
__description__ = 'Multi platform standard input with timeout'
__url__ = 'http://github.com/johejo/inutimeout'
__version__ = '1.0.4'
__author__ = 'Mitsuo Heijo'
__author_email__ = 'mitsuo_h@outlook.com'
__license__ = 'MIT'
__copyright__ = 'Copyright 2018 Mitsuo Heijo'
| 32.777778 | 62 | 0.772881 |
e242a997219b0d73bc2993d0a5162e0158ea578a | 3,278 | py | Python | src/pyams_utils/context.py | Py-AMS/pyams-utils | 65b166596a8b9f66fb092a69ce5d53ac6675685e | [
"ZPL-2.1"
] | null | null | null | src/pyams_utils/context.py | Py-AMS/pyams-utils | 65b166596a8b9f66fb092a69ce5d53ac6675685e | [
"ZPL-2.1"
] | null | null | null | src/pyams_utils/context.py | Py-AMS/pyams-utils | 65b166596a8b9f66fb092a69ce5d53ac6675685e | [
"ZPL-2.1"
] | null | null | null | #
# Copyright (c) 2008-2015 Thierry Florac <tflorac AT ulthar.net>
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
"""PyAMS_utils.context module
This module provides a "context" selector which can be used as Pyramid's subscriber
predicate. Matching argument can be a class or an interface: for subscriber to be actually called,
subscriber's argument should inherit from it (if it's a class) or implement it (if it's an
interface).
"""
import sys
from contextlib import contextmanager
from io import StringIO
__docformat__ = 'restructuredtext'
@contextmanager
def capture(func, *args, **kwargs):
"""Context manager used to capture standard output"""
out, sys.stdout = sys.stdout, StringIO()
try:
result = func(*args, **kwargs)
sys.stdout.seek(0)
yield result, sys.stdout.read()
finally:
sys.stdout = out
@contextmanager
def capture_stderr(func, *args, **kwargs):
"""Context manager used to capture error output"""
err, sys.stderr = sys.stderr, StringIO()
try:
result = func(*args, **kwargs)
sys.stderr.seek(0)
yield result, sys.stderr.read()
finally:
sys.stderr = err
@contextmanager
def capture_all(func, *args, **kwargs):
"""Context manager used to capture standard output and standard error output"""
out, sys.stdout, err, sys.stderr = sys.stdout, StringIO(), sys.stderr, StringIO()
try:
result = func(*args, **kwargs)
sys.stdout.seek(0)
sys.stderr.seek(0)
yield result, sys.stdout.read(), sys.stderr.read()
finally:
sys.stdout, sys.stderr = out, err
class ContextSelector: # pylint: disable=too-few-public-methods
"""Interface based context selector
This selector can be used as a predicate to define a class or an interface that the context
must inherit from or implement for the subscriber to be called:
.. code-block:: python
from zope.lifecycleevent.interfaces import IObjectModifiedEvent
from pyams_site.interfaces import ISiteRoot
@subscriber(IObjectModifiedEvent, context_selector=ISiteRoot)
def siteroot_modified_event_handler(event):
'''This is an event handler for an ISiteRoot object modification event'''
"""
def __init__(self, ifaces, config): # pylint: disable=unused-argument
if not isinstance(ifaces, (list, tuple, set)):
ifaces = (ifaces,)
self.interfaces = ifaces
def text(self):
"""Return selector """
return 'context_selector = %s' % str(self.interfaces)
phash = text
def __call__(self, event):
for intf in self.interfaces:
try:
if intf.providedBy(event.object):
return True
except (AttributeError, TypeError):
if isinstance(event.object, intf):
return True
return False
| 32.137255 | 98 | 0.671141 |
207da5a3bbfd08be32d252b2c9d7d5981aa5f712 | 2,152 | py | Python | managers/operatorsSignaling/reconstructer.py | HarshKhilawala/cerebmodels | d2a2f2ef947ef9dc23ddce6e55159240cd3233cb | [
"BSD-3-Clause"
] | null | null | null | managers/operatorsSignaling/reconstructer.py | HarshKhilawala/cerebmodels | d2a2f2ef947ef9dc23ddce6e55159240cd3233cb | [
"BSD-3-Clause"
] | 9 | 2020-03-24T17:09:03.000Z | 2021-05-17T16:11:17.000Z | managers/operatorsSignaling/reconstructer.py | myHBPwork/cerebmodels | 371ea7f1bbe388f1acade17c7128b8ca6ab8fb7a | [
"BSD-3-Clause"
] | 1 | 2021-05-21T03:08:41.000Z | 2021-05-21T03:08:41.000Z | # ../managers/operatorsSignaling/reconstructer.py
import efel
#from quantities import mV
class Reconstructer(object):
"""
**Available methods:**
+-------------------------------------------------+--------------------+
| Method name | Method type |
+-------------------------------------------------+--------------------+
| :py:meth:`.construct_base_efel_trace_individual`| static method |
+-------------------------------------------------+--------------------+
| :py:meth:`.construct_base_efel_trace_overall` | class method |
+-------------------------------------------------+--------------------+
"""
@staticmethod
def construct_base_efel_trace_individual(timestamp, datavalue):
"""Returns **a** dictionary with the keys: `"T"`, `"V"`, `"stim_start"` and `"stim_end"`.
**Arguments:**
+----------+-----------------+
| Argument | Value type |
+==========+=================+
| first | array of times |
+----------+-----------------+
| second | array of values |
+----------+-----------------+
"""
a_trace = {}
a_trace["V"] = datavalue
a_trace["T"] = timestamp
a_trace["stim_start"] = [ timestamp[0] ]
a_trace["stim_end"] = [ timestamp[-1] ]
return a_trace
@classmethod
def construct_base_efel_trace_overall(cls, timestamps, datavalues):
"""Returns traces; a list of dictionaries with the keys: `"T"`, `"V"`, `"stim_start"` and `"stim_end"`.
**Arguments:**
+----------+-------------------------+
| Argument | Value type |
+==========+=========================+
| first | list of array of times |
+----------+-------------------------+
| second | list of array of values |
+----------+-------------------------+
"""
return [ cls.construct_base_efel_trace_individual( timestamps[i], datavalues[i] )
for i in range(len(timestamps)) ] # [ trace0, trace1, trace2, ... ]
| 37.103448 | 111 | 0.392658 |
42cb2acf886e03ff6996407a8d1f83f539ceb7d5 | 653 | py | Python | .history/myblog/views_20200416102151.py | abhinavmarwaha/demo-django-blog | c80a7d825e44d7e1589d9272c3583764562a2515 | [
"MIT"
] | null | null | null | .history/myblog/views_20200416102151.py | abhinavmarwaha/demo-django-blog | c80a7d825e44d7e1589d9272c3583764562a2515 | [
"MIT"
] | null | null | null | .history/myblog/views_20200416102151.py | abhinavmarwaha/demo-django-blog | c80a7d825e44d7e1589d9272c3583764562a2515 | [
"MIT"
] | null | null | null | from django.shortcuts import render , get_object_or_404
from django.views import generic
from .models import Post
from .forms import CommentForm
class PostList(generic.ListView):
queryset = Post.objects.filter(status=1).order_by('-created_on')
template_name = 'index.html'
class Postdetail(request, slug):
template_name = 'post_detail.html'
post = get_object_or_404(Post, slug = slug)
comments = post.comments.filter(active = True)
new_comment = None
if request.method == 'POST':
comment_form = CommentForm(data=request.POST)
if comment_form.is_valid():
new_comment = comment_form.save(Comm)
| 29.681818 | 68 | 0.719755 |
fdcfaeaf4b86130f772b5ba9682e874192eb5b94 | 3,248 | py | Python | ModelUtils/actionlist.py | tema-tut/tema-tg | 9c3f119c8bf5cc565e6a3e8e9e6205037e326d89 | [
"MIT"
] | null | null | null | ModelUtils/actionlist.py | tema-tut/tema-tg | 9c3f119c8bf5cc565e6a3e8e9e6205037e326d89 | [
"MIT"
] | null | null | null | ModelUtils/actionlist.py | tema-tut/tema-tg | 9c3f119c8bf5cc565e6a3e8e9e6205037e326d89 | [
"MIT"
] | 1 | 2021-03-27T21:27:32.000Z | 2021-03-27T21:27:32.000Z | #!/usr/bin/env python
# Copyright (c) 2006-2010 Tampere University of Technology
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import os
import sys
import re
from optparse import OptionParser
_aw_re = re.compile('.* -> "(.*end_aw.*)"')
_kw_re = re.compile('.* -> "(~?[kv]w_.*)"')
def action_list(file_object):
aws = set()
kws = set()
for line in file_object:
if not line.startswith("#"):
m = _aw_re.match(line)
if m:
aws.add(m.group(1))
m = _kw_re.match(line)
if m:
kws.add(m.group(1))
return sorted(list(aws)),sorted(list(kws))
def read_args(argv):
usagemessage = "usage: %prog [rules_file] [options]\n\nIf no rules_file is given or rules_file is -, reads from standard input."
description = "Lists all actions in a test model."
parser = OptionParser(usage=usagemessage,description=description)
parser.add_option( "--keywords", action="store_true",default=False,
help="List also keywords")
parser.add_option( "-v", "--verbose", action="store_true",default=False,
help="Be more verbose")
options,args = parser.parse_args(argv)
if len(args) == 0:
rules_file = "-"
elif len(args) == 1:
rules_file = args[0]
else:
parser.error("Can only read one rules file.")
return options,rules_file
def _main(options,rules_file):
aws = []
kws = []
if rules_file == "-":
rules_input_file = sys.stdin
else:
rules_input_file = open(rules_file,'r')
try:
try:
aws,kws = action_list(rules_input_file)
except KeyboardInterrupt:
pass
finally:
if rules_file != "-":
rules_input_file.close()
if options.verbose:
print "Action words"
print "------------"
print os.linesep.join(aws)
if options.verbose and options.keywords:
print
print "Keywords"
print "--------"
print os.linesep.join(kws)
elif options.keywords:
print
print os.linesep.join(kws)
if __name__ == "__main__":
options,rules_file = read_args(sys.argv[1:])
_main(options,rules_file)
| 32.48 | 132 | 0.648399 |
ab8a08a28cf41808ca3ee78ee155df8fa78269fc | 491 | py | Python | fxa/5_Machine_Learning/cachedir/joblib/__main__--Users-joelricci-Documents-Doctorado-Proteinas_modelo-ML-Ensemble-Docking-CDK2-ML_ensemble_docking_repo-fxa-5_Machine_Learning-<ipython-input-c9b2d1ca51cc>/_format_results_to_df/func_code.py | jRicciL/ml_and_ensemble_docking | d2bf7010d6df34710e860b0c01f2746b4dc8e09a | [
"MIT"
] | 7 | 2021-05-11T18:39:26.000Z | 2022-01-28T14:41:49.000Z | fxa/5_Machine_Learning/cachedir/joblib/__main__--Users-joelricci-Documents-Doctorado-Proteinas_modelo-ML-Ensemble-Docking-CDK2-ML_ensemble_docking_repo-fxa-5_Machine_Learning-<ipython-input-c9b2d1ca51cc>/_format_results_to_df/func_code.py | jRicciL/ml_and_ensemble_docking | d2bf7010d6df34710e860b0c01f2746b4dc8e09a | [
"MIT"
] | null | null | null | fxa/5_Machine_Learning/cachedir/joblib/__main__--Users-joelricci-Documents-Doctorado-Proteinas_modelo-ML-Ensemble-Docking-CDK2-ML_ensemble_docking_repo-fxa-5_Machine_Learning-<ipython-input-c9b2d1ca51cc>/_format_results_to_df/func_code.py | jRicciL/ml_and_ensemble_docking | d2bf7010d6df34710e860b0c01f2746b4dc8e09a | [
"MIT"
] | 3 | 2021-12-04T13:42:19.000Z | 2022-01-28T14:41:50.000Z | # first line: 1
@memory.cache
def _format_results_to_df(metrics, results, n):
# Format into a dataframe
# Create the metric names and repeat them
n_metrics = len(metrics)
index_names = [*metrics.keys()]*n
# convert to a dataframe
df_res = pd.DataFrame(
results,
index= pd.MultiIndex.from_tuples(
zip(index_names,
np.repeat(range(n), n_metrics))
))
df_res = df_res.sort_index()
return df_res
| 25.842105 | 47 | 0.604888 |
ab5c9ad6a0782d8501a307eb53ce0933dcf89d3d | 1,526 | py | Python | scripts/eye/start_capture.py | mpsamurai/neochi | d7920201743a6d86f1f308fa76cbe0d2475ff03c | [
"MIT"
] | null | null | null | scripts/eye/start_capture.py | mpsamurai/neochi | d7920201743a6d86f1f308fa76cbe0d2475ff03c | [
"MIT"
] | 10 | 2019-11-12T13:01:07.000Z | 2022-02-10T00:40:48.000Z | scripts/eye/start_capture.py | mpsamurai/neochi | d7920201743a6d86f1f308fa76cbe0d2475ff03c | [
"MIT"
] | null | null | null | # MIT License
#
# Copyright (c) 2019 Morning Project Samurai Inc. (MPS)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
__author__ = 'Junya Kaneko <junya@mpsamurai.org>'
from neochi.core.dataflow.backends import caches
from neochi.neochi import settings
from neochi.eye.eye import Eye
if __name__ == '__main__':
cache = caches.get_cache(settings.DATAFLOW['BACKEND']['CACHE']['MODULE'], host='localhost')
eye = Eye(cache)
eye.update_state(is_capturing=True)
print(eye._state.timestamp, eye.state) | 42.388889 | 95 | 0.76671 |
2dbca85a1d76f5f6611458a3f6a6388b1f15eff2 | 9,812 | py | Python | smu/geometry/utilities.py | ParikhKadam/google-research | 00a282388e389e09ce29109eb050491c96cfab85 | [
"Apache-2.0"
] | 2 | 2022-01-21T18:15:34.000Z | 2022-01-25T15:21:34.000Z | smu/geometry/utilities.py | ParikhKadam/google-research | 00a282388e389e09ce29109eb050491c96cfab85 | [
"Apache-2.0"
] | 110 | 2021-10-01T18:22:38.000Z | 2021-12-27T22:08:31.000Z | smu/geometry/utilities.py | admariner/google-research | 7cee4b22b925581d912e8d993625c180da2a5a4f | [
"Apache-2.0"
] | 1 | 2022-02-10T10:43:10.000Z | 2022-02-10T10:43:10.000Z | # coding=utf-8
# Copyright 2021 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utility functions for SMU."""
import math
from typing import Any, List
import numpy as np
from rdkit import Chem
from smu import dataset_pb2
from smu.parser import smu_utils_lib
BOHR2ANSTROM = 0.529177
DISTANCE_BINS = 10000
def distance_between_atoms(geom, a1, a2):
"""Return the distance between atoms `a1` and `a2` in `geom`.
Args:
geom:
a1:
a2:
Returns:
Distance in Angstroms.
"""
return smu_utils_lib.bohr_to_angstroms(
math.sqrt((geom.atom_positions[a1].x - geom.atom_positions[a2].x) *
(geom.atom_positions[a1].x - geom.atom_positions[a2].x) +
(geom.atom_positions[a1].y - geom.atom_positions[a2].y) *
(geom.atom_positions[a1].y - geom.atom_positions[a2].y) +
(geom.atom_positions[a1].z - geom.atom_positions[a2].z) *
(geom.atom_positions[a1].z - geom.atom_positions[a2].z)))
def bonded(bond_topology):
"""Return an int array of the bonded atoms in `bond_topology`.
Args:
bond_topology:
Returns:
a numpy array of BondType's
"""
natoms = len(bond_topology.atoms)
connected = np.full((natoms, natoms), 0, dtype=np.int32)
for bond in bond_topology.bonds:
a1 = bond.atom_a
a2 = bond.atom_b
connected[a1, a2] = connected[a2, a1] = bond.bond_type
return connected
def btype_to_nbonds(btype):
"""Convert `btype` to a number of bonds.
Turns out that the enum is already set up so that simple
integer conversion works.
Args:
btype:
Returns:
number of bonds
"""
return int(btype)
def number_bonds(bt):
"""For each atom in `bt` return the number of bonds.
single bonds count 1, double 2, triple 3.
Args:
bt: BondTopology
Returns:
Numpy array contains len(bt.atoms) numbers.
"""
result = np.zeros(len(bt.atoms))
for bond in bt.bonds:
a1 = bond.atom_a
a2 = bond.atom_b
nb = btype_to_nbonds(bond.bond_type)
result[a1] += nb
result[a2] += nb
return result
def distances(geometry):
"""Return a float array of the interatomic distances in `geometry`.
Args:
geometry:
Returns:
a numpy array of distances
"""
natoms = len(geometry.atom_positions)
result = np.full((natoms, natoms), 0.0, dtype=np.float32)
for i in range(0, natoms):
for j in range(i + 1, natoms):
result[i, j] = result[j, i] = distance_between_atoms(geometry, i, j)
return result
def rdkit_atom_to_atom_type(atom):
"""Atom to atom type.
Args:
atom:
Returns:
AtomType
"""
if atom.GetAtomicNum() == 1:
return dataset_pb2.BondTopology.ATOM_H
if atom.GetAtomicNum() == 6:
return dataset_pb2.BondTopology.ATOM_C
if atom.GetAtomicNum() == 7:
if atom.GetFormalCharge() == 0:
return dataset_pb2.BondTopology.ATOM_N
else:
return dataset_pb2.BondTopology.ATOM_NPOS
if atom.GetAtomicNum() == 8:
if atom.GetFormalCharge() == 0:
return dataset_pb2.BondTopology.ATOM_O
else:
return dataset_pb2.BondTopology.ATOM_ONEG
if atom.GetAtomicNum() == 9:
return dataset_pb2.BondTopology.ATOM_F
raise ValueError(f"Unrecognized atom type {atom.GetAtomicNum()}")
def rdkit_bond_type_to_btype(bond_type):
"""Converts bond type.
Args:
bond_type:
Returns:
"""
if bond_type == Chem.rdchem.BondType.SINGLE:
return dataset_pb2.BondTopology.BondType.BOND_SINGLE
if bond_type == Chem.rdchem.BondType.DOUBLE:
return dataset_pb2.BondTopology.BondType.BOND_DOUBLE
if bond_type == Chem.rdchem.BondType.TRIPLE:
return dataset_pb2.BondTopology.BondType.BOND_TRIPLE
raise ValueError(f"Unrecognized bond type #{bond_type}")
def molecule_to_bond_topology(mol):
"""Molecule to bond topology.
Args:
mol:
Returns:
Bond topology.
"""
bond_topology = dataset_pb2.BondTopology()
for atom in mol.GetAtoms():
bond_topology.atoms.append(rdkit_atom_to_atom_type(atom))
for bond in mol.GetBonds():
btype = rdkit_bond_type_to_btype(bond.GetBondType())
bt_bond = dataset_pb2.BondTopology.Bond()
bt_bond.atom_a = bond.GetBeginAtom().GetIdx()
bt_bond.atom_b = bond.GetEndAtom().GetIdx()
bt_bond.bond_type = btype
bond_topology.bonds.append(bt_bond)
return bond_topology
def canonical_bond_topology(bond_topology):
"""Transform the bonds attribute of `bond_topology` to a canonical form.
Args:
bond_topology:
Returns:
BondTopology
"""
if len(bond_topology.bonds) < 2:
return
for bond in bond_topology.bonds:
if bond.atom_a > bond.atom_b:
bond.atom_a, bond.atom_b = bond.atom_b, bond.atom_a
bond_topology.bonds.sort(key=lambda b: (b.atom_a, b.atom_b))
def same_bond_topology(bt1, bt2):
"""Return True if bt1 == bt2.
Note that there is no attempt to canonialise the protos.
Args:
bt1:
bt2:
Returns:
Bool.
"""
natoms = len(bt1.atoms)
if len(bt2.atoms) != natoms:
return False
nbonds = len(bt1.bonds)
if len(bt2.bonds) != nbonds:
return False
for i, t1 in enumerate(bt1.atoms):
if t1 != bt2.atoms[i]:
return False
for i, b1 in enumerate(bt1.bonds):
b2 = bt2.bonds[i]
if b1.atom_a != b2.atom_a:
return False
if b1.atom_b != b2.atom_b:
return False
if b1.bond_type != b2.bond_type:
return False
return True
def visit(nbrs, atom, visited):
"""Recusrively visit nodes in the graph defined by `nbrs`.
Args:
nbrs:
atom:
visited:
Returns:
The number of nodes visited - including `atom`.
"""
visited[atom] = 1
result = 1 # To be returned.
for nbr in nbrs[atom]:
if visited[nbr] > 0:
continue
result += visit(nbrs, nbr, visited)
return result
def is_single_fragment(bond_topology):
"""Return True if `bond_topology` is a single fragment.
Args:
bond_topology:
Returns:
True if `bond_topology` is a single fragment.
"""
natoms = len(bond_topology.atoms)
nbonds = len(bond_topology.bonds)
# Some special cases are easy.
if natoms == 1:
return True
if natoms == 2 and nbonds == 1:
return True
if natoms == 3 and nbonds == 2:
return True
if natoms == nbonds and natoms <= 4:
return True
connection_matrix = bonded(bond_topology)
# Any row with all zero means a detached atom.
if np.sum(connection_matrix.any(axis=1)) != natoms:
return False
# For each atom, the neighbours.
attached: List[Any] = []
for i in range(0, natoms):
attached.append(np.ravel(np.argwhere(connection_matrix[i,])))
# neighbours = np.argwhere(connection_matrix > 0)
visited = np.zeros(natoms, dtype=np.int32)
# Mark anything with a single connection as visited.
# Record the index of an atom that has multiple connections.
a_multiply_connected_atom = -1
for i in range(0, natoms):
if bond_topology.atoms[i] == dataset_pb2.BondTopology.AtomType.ATOM_H:
visited[i] = 1
continue
if len(attached[i]) > 1:
a_multiply_connected_atom = i
continue
# A singly connected heavy atom. Mark visited if not of a two atom fragment.
if len(attached[attached[i][0]]) > 1:
visited[i] = 1
if a_multiply_connected_atom < 0: # Cannot happen
return False
number_visited = np.count_nonzero(visited) + visit(
attached, a_multiply_connected_atom, visited)
return number_visited == natoms
def geom_to_angstroms(geometry):
"""Convert all the coordinates in `geometry` to Angstroms.
Args:
geometry: starting Geometry Returns New Geometry with adjusted coordinates.
Returns:
Coordinates in Angstroms.
"""
result = dataset_pb2.Geometry()
for atom in geometry.atom_positions:
new_atom = dataset_pb2.Geometry.AtomPos()
new_atom.x = smu_utils_lib.bohr_to_angstroms(atom.x)
new_atom.y = smu_utils_lib.bohr_to_angstroms(atom.y)
new_atom.z = smu_utils_lib.bohr_to_angstroms(atom.z)
result.atom_positions.append(new_atom)
return result
def max_bonds_any_form(atype):
"""Return the max number of bonds for any form of `atype`.
Args:
atype: a dataset_pb2 atom type
Returns:
Max number of bonds
Raises:
ValueError: on unsupported atype
"""
if atype in [
dataset_pb2.BondTopology.ATOM_C, dataset_pb2.BondTopology.ATOM_NPOS,
dataset_pb2.BondTopology.ATOM_O, dataset_pb2.BondTopology.ATOM_F,
dataset_pb2.BondTopology.ATOM_H
]:
return smu_utils_lib.ATOM_TYPE_TO_MAX_BONDS[atype]
if atype == dataset_pb2.BondTopology.ATOM_N:
return 4
if atype == dataset_pb2.BondTopology.ATOM_ONEG:
return 2
raise ValueError(f"Unsupported AtomType {atype}")
def ring_atom_count_bt(bt):
"""Return the number of ring atoms in `bt`.
Args:
bt: dataset_pb2.BondTopology
Returns:
Integer
"""
mol = smu_utils_lib.bond_topology_to_molecule(bt)
return ring_atom_count_mol(mol)
def ring_atom_count_mol(mol):
"""Return the number of ring atoms in `mol`.
Args:
mol: rdkit molecule.
Returns:
Integer
"""
mol.UpdatePropertyCache()
Chem.GetSymmSSSR(mol)
ringinfo = mol.GetRingInfo()
if ringinfo.NumRings() == 0:
return 0
natoms = mol.GetNumAtoms()
result = 0
for i in range(natoms):
if ringinfo.NumAtomRings(i) > 0:
result += 1
return result
| 23.99022 | 80 | 0.68987 |
cb9652a4d6ca6d1f8f1da64cb3dbec2c19d63941 | 20,757 | py | Python | sportsreference/mlb/constants.py | JosephDErwin/sportsreference | f026366bec91fdf4bebef48e3a4bfd7c5bfab4bd | [
"MIT"
] | null | null | null | sportsreference/mlb/constants.py | JosephDErwin/sportsreference | f026366bec91fdf4bebef48e3a4bfd7c5bfab4bd | [
"MIT"
] | null | null | null | sportsreference/mlb/constants.py | JosephDErwin/sportsreference | f026366bec91fdf4bebef48e3a4bfd7c5bfab4bd | [
"MIT"
] | 1 | 2020-07-08T16:05:25.000Z | 2020-07-08T16:05:25.000Z | PARSING_SCHEME = {
'name': 'a',
'league': 'td[data-stat="lg_ID"]:first',
'games': 'td[data-stat="G"]:first',
'wins': 'td[data-stat="W"]:first',
'losses': 'td[data-stat="L"]:first',
'win_percentage': 'td[data-stat="win_loss_perc"]:first',
'streak': 'td[data-stat="winning_streak"]:first',
'runs': 'td[data-stat="R"]:first',
'runs_against': 'td[data-stat="RA"]:first',
'run_difference': 'td[data-stat="run_diff"]:first',
'strength_of_schedule': 'td[data-stat="strength_of_schedule"]:first',
'simple_rating_system': 'td[data-stat="simple_rating_system"]:first',
'pythagorean_win_loss': 'td[data-stat="record_pythag"]:first',
'luck': 'td[data-stat="luck_pythag"]:first',
'interleague_record': 'td[data-stat="record_interleague"]:first',
'home_record': 'td[data-stat="record_home"]:first',
'away_record': 'td[data-stat="record_road"]:first',
'extra_inning_record': 'td[data-stat="record_xinn"]:first',
'single_run_record': 'td[data-stat="record_one_run"]:first',
'record_vs_right_handed_pitchers': 'td[data-stat="record_vs_rhp"]:first',
'record_vs_left_handed_pitchers': 'td[data-stat="record_vs_lhp"]:first',
'record_vs_teams_over_500': 'td[data-stat="record_vs_over_500"]:first',
'record_vs_teams_under_500': 'td[data-stat="record_vs_under_500"]:first',
'last_ten_games_record': 'td[data-stat="record_last_10"]:first',
'last_twenty_games_record': 'td[data-stat="record_last_20"]:first',
'last_thirty_games_record': 'td[data-stat="record_last_30"]:first',
'number_players_used': 'td[data-stat="batters_used"]:first',
'average_batter_age': 'td[data-stat="age_bat"]:first',
'plate_appearances': 'td[data-stat="PA"]:first',
'at_bats': 'td[data-stat="AB"]:first',
'total_runs': 'td[data-stat="R"]:first',
'hits': 'td[data-stat="H"]:first',
'doubles': 'td[data-stat="2B"]:first',
'triples': 'td[data-stat="3B"]:first',
'home_runs': 'td[data-stat="HR"]:first',
'runs_batted_in': 'td[data-stat="RBI"]:first',
'stolen_bases': 'td[data-stat="SB"]:first',
'times_caught_stealing': 'td[data-stat="CS"]:first',
'bases_on_balls': 'td[data-stat="BB"]:first',
'times_struck_out': 'td[data-stat="SO"]:first',
'batting_average': 'td[data-stat="batting_avg"]:first',
'on_base_percentage': 'td[data-stat="onbase_perc"]:first',
'slugging_percentage': 'td[data-stat="slugging_perc"]:first',
'on_base_plus_slugging_percentage':
'td[data-stat="onbase_plus_slugging"]:first',
'on_base_plus_slugging_percentage_plus':
'td[data-stat="onbase_plus_slugging_plus"]:first',
'total_bases': 'td[data-stat="TB"]:first',
'grounded_into_double_plays': 'td[data-stat="GIDP"]:first',
'times_hit_by_pitch': 'td[data-stat="HBP"]:first',
'sacrifice_hits': 'td[data-stat="SH"]:first',
'sacrifice_flies': 'td[data-stat="SF"]:first',
'intentional_bases_on_balls': 'td[data-stat="IBB"]:first',
'runners_left_on_base': 'td[data-stat="LOB"]:first',
'number_of_pitchers': 'td[data-stat="pitchers_used"]:first',
'average_pitcher_age': 'td[data-stat="age_pitch"]:first',
'runs_allowed_per_game': 'td[data-stat="runs_allowed_per_game"]:first',
'earned_runs_against': 'td[data-stat="earned_run_avg"]:first',
'games_finished': 'td[data-stat="GF"]:first',
'complete_games': 'td[data-stat="CG"]:first',
'shutouts': 'td[data-stat="SHO_team"]:first',
'complete_game_shutouts': 'td[data-stat="SHO_cg"]:first',
'saves': 'td[data-stat="SV"]:first',
'innings_pitched': 'td[data-stat="IP"]:first',
'hits_allowed': 'td[data-stat="H"]:first',
'home_runs_against': 'td[data-stat="HR"]:first',
'bases_on_walks_given': 'td[data-stat="BB"]:first',
'strikeouts': 'td[data-stat="SO"]:first',
'hit_pitcher': 'td[data-stat="HBP"]:first',
'balks': 'td[data-stat="BK"]:first',
'wild_pitches': 'td[data-stat="WP"]:first',
'batters_faced': 'td[data-stat="batters_faced"]:first',
'earned_runs_against_plus': 'td[data-stat="earned_run_avg_plus"]:first',
'fielding_independent_pitching': 'td[data-stat="fip"]:first',
'whip': 'td[data-stat="whip"]:first',
'hits_per_nine_innings': 'td[data-stat="hits_per_nine"]:first',
'home_runs_per_nine_innings': 'td[data-stat="home_runs_per_nine"]:first',
'bases_on_walks_given_per_nine_innings':
'td[data-stat="bases_on_balls_per_nine"]:first',
'strikeouts_per_nine_innings': 'td[data-stat="strikeouts_per_nine"]:first',
'strikeouts_per_base_on_balls':
'td[data-stat="strikeouts_per_base_on_balls"]:first',
'opposing_runners_left_on_base': 'td[data-stat="LOB"]:first'
}
TEAM_ELEMENT = {
'home_wins': 0,
'home_losses': 1,
'away_wins': 0,
'away_losses': 1,
'extra_inning_wins': 0,
'extra_inning_losses': 1,
'single_run_wins': 0,
'single_run_losses': 1,
'wins_vs_right_handed_pitchers': 0,
'losses_vs_right_handed_pitchers': 1,
'wins_vs_left_handed_pitchers': 0,
'losses_vs_left_handed_pitchers': 1,
'wins_vs_teams_over_500': 0,
'losses_vs_teams_over_500': 1,
'wins_vs_teams_under_500': 0,
'losses_vs_teams_under_500': 1,
'wins_last_ten_games': 0,
'losses_last_ten_games': 1,
'wins_last_twenty_games': 0,
'losses_last_twenty_games': 1,
'wins_last_thirty_games': 0,
'losses_last_thirty_games': 1
}
SCHEDULE_SCHEME = {
'game': 'th[data-stat="team_game"]:first',
'date': 'td[data-stat="date_game"]:first',
'location': 'td[data-stat="homeORvis"]:first',
'opponent_abbr': 'td[data-stat="opp_ID"]:first',
'result': 'td[data-stat="win_loss_result"]:first',
'runs_scored': 'td[data-stat="R"]:first',
'runs_allowed': 'td[data-stat="RA"]:first',
'innings': 'td[data-stat="extra_innings"]:first',
'record': 'td[data-stat="win_loss_record"]:first',
'rank': 'td[data-stat="rank"]:first',
'games_behind': 'td[data-stat="games_back"]:first',
'winner': 'td[data-stat="winning_pitcher"]:first',
'loser': 'td[data-stat="losing_pitcher"]:first',
'save': 'td[data-stat="saving_pitcher"]:first',
'game_duration': 'td[data-stat="time_of_game"]:first',
'day_or_night': 'td[data-stat="day_or_night"]:first',
'attendance': 'td[data-stat="attendance"]:first',
'streak': 'td[data-stat="win_loss_streak"]:first'
}
ELEMENT_INDEX = {
'total_runs': 1,
'bases_on_walks_given': 1,
'hits_allowed': 1,
'strikeouts': 1,
'home_runs_against': 1,
'opposing_runners_left_on_base': 1
}
BOXSCORE_SCHEME = {
'game_info': 'div[class="scorebox_meta"]',
'away_name': 'a[itemprop="name"]:first',
'home_name': 'a[itemprop="name"]:last',
'winner': 'td[data-stat=""]',
'winning_name': 'td[data-stat=""]',
'winning_abbr': 'td[data-stat=""]',
'losing_name': 'td[data-stat=""]',
'losing_abbr': 'td[data-stat=""]',
'losing_abbr': 'td[data-stat=""]',
'away_at_bats': 'tfoot td[data-stat="AB"]',
'away_runs': 'tfoot td[data-stat="R"]',
'away_hits': 'tfoot td[data-stat="H"]',
'away_rbi': 'tfoot td[data-stat="RBI"]',
'away_earned_runs': 'tfoot td[data-stat="earned_run_avg"]',
'away_bases_on_balls': 'tfoot td[data-stat="BB"]',
'away_strikeouts': 'tfoot td[data-stat="SO"]',
'away_plate_appearances': 'tfoot td[data-stat="PA"]',
'away_batting_average': 'tfoot td[data-stat="batting_avg"]',
'away_on_base_percentage': 'tfoot td[data-stat="onbase_perc"]',
'away_slugging_percentage': 'tfoot td[data-stat="slugging_perc"]',
'away_on_base_plus': 'tfoot td[data-stat="onbase_plus_slugging"]',
'away_pitches': 'tfoot td[data-stat="pitches"]',
'away_strikes': 'tfoot td[data-stat="strikes_total"]',
'away_win_probability_for_offensive_player':
'tfoot td[data-stat="wpa_bat"]',
'away_average_leverage_index': 'tfoot td[data-stat="leverage_index_avg"]',
'away_win_probability_added': 'tfoot td[data-stat="wpa_bat_pos"]',
'away_win_probability_subtracted': 'tfoot td[data-stat="wpa_bat_neg"]',
'away_base_out_runs_added': 'tfoot td[data-stat="re24_bat"]',
'away_putouts': 'tfoot td[data-stat="PO"]',
'away_assists': 'tfoot td[data-stat="A"]',
'away_innings_pitched': 'tfoot td[data-stat="IP"]',
'away_home_runs': 'tfoot td[data-stat="HR"]',
'away_strikes_by_contact': 'tfoot td[data-stat="strikes_contact"]',
'away_strikes_swinging': 'tfoot td[data-stat="strikes_swinging"]',
'away_strikes_looking': 'tfoot td[data-stat="strikes_looking"]',
'away_grounded_balls': 'tfoot td[data-stat="inplay_gb_total"]',
'away_fly_balls': 'tfoot td[data-stat="inplay_fb_total"]',
'away_line_drives': 'tfoot td[data-stat="inplay_ld"]',
'away_unknown_bat_type': 'tfoot td[data-stat="inplay_unk"]',
'away_game_score': 'tfoot td[data-stat="game_score"]',
'away_inherited_runners': 'tfoot td[data-stat="inherited_runners"]',
'away_inherited_score': 'tfoot td[data-stat="inherited_score"]',
'away_win_probability_by_pitcher': 'tfoot td[data-stat="wpa_def"]',
'away_average_leverage_index': 'tfoot td[data-stat="leverage_index_avg"]',
'away_base_out_runs_saved': 'tfoot td[data-stat="re24_def"]',
'home_at_bats': 'tfoot td[data-stat="AB"]',
'home_runs': 'tfoot td[data-stat="R"]',
'home_hits': 'tfoot td[data-stat="H"]',
'home_rbi': 'tfoot td[data-stat="RBI"]',
'home_earned_runs': 'tfoot td[data-stat="earned_run_avg"]',
'home_bases_on_balls': 'tfoot td[data-stat="BB"]',
'home_strikeouts': 'tfoot td[data-stat="SO"]',
'home_plate_appearances': 'tfoot td[data-stat="PA"]',
'home_batting_average': 'tfoot td[data-stat="batting_avg"]',
'home_on_base_percentage': 'tfoot td[data-stat="onbase_perc"]',
'home_slugging_percentage': 'tfoot td[data-stat="slugging_perc"]',
'home_on_base_plus': 'tfoot td[data-stat="onbase_plus_slugging"]',
'home_pitches': 'tfoot td[data-stat="pitches"]',
'home_strikes': 'tfoot td[data-stat="strikes_total"]',
'home_win_probability_for_offensive_player':
'tfoot td[data-stat="wpa_bat"]',
'home_average_leverage_index': 'tfoot td[data-stat="leverage_index_avg"]',
'home_win_probability_added': 'tfoot td[data-stat="wpa_bat_pos"]',
'home_win_probability_subtracted': 'tfoot td[data-stat="wpa_bat_neg"]',
'home_base_out_runs_added': 'tfoot td[data-stat="re24_bat"]',
'home_putouts': 'tfoot td[data-stat="PO"]',
'home_assists': 'tfoot td[data-stat="A"]',
'home_innings_pitched': 'tfoot td[data-stat="IP"]',
'home_home_runs': 'tfoot td[data-stat="HR"]',
'home_strikes_by_contact': 'tfoot td[data-stat="strikes_contact"]',
'home_strikes_swinging': 'tfoot td[data-stat="strikes_swinging"]',
'home_strikes_looking': 'tfoot td[data-stat="strikes_looking"]',
'home_grounded_balls': 'tfoot td[data-stat="inplay_gb_total"]',
'home_fly_balls': 'tfoot td[data-stat="inplay_fb_total"]',
'home_line_drives': 'tfoot td[data-stat="inplay_ld"]',
'home_unknown_bat_type': 'tfoot td[data-stat="inplay_unk"]',
'home_game_score': 'tfoot td[data-stat="game_score"]',
'home_inherited_runners': 'tfoot td[data-stat="inherited_runners"]',
'home_inherited_score': 'tfoot td[data-stat="inherited_score"]',
'home_win_probability_by_pitcher': 'tfoot td[data-stat="wpa_def"]',
'home_average_leverage_index': 'tfoot td[data-stat="leverage_index_avg"]',
'home_base_out_runs_saved': 'tfoot td[data-stat="re24_def"]'
}
BOXSCORE_ELEMENT_INDEX = {
'date': 0,
'time': 1,
'attendance': 2,
'venue': 3,
'duration': 4,
'time_of_day': 5,
'away_at_bats': 0,
'away_runs': 0,
'away_hits': 0,
'away_rbi': 0,
'away_earned_runs': 1,
'away_bases_on_balls': 0,
'away_strikeouts': 0,
'away_plate_appearances': 0,
'away_batting_average': 0,
'away_on_base_percentage': 0,
'away_slugging_percentage': 0,
'away_on_base_plus': 0,
'away_pitches': 1,
'away_strikes': 0,
'away_win_probability_for_offensive_player': 0,
'away_average_leverage_index': 1,
'away_win_probability_added': 0,
'away_win_probability_subtracted': 0,
'away_base_out_runs_added': 0,
'away_putouts': 0,
'away_assists': 0,
'away_innings_pitched': 0,
'away_home_runs': 0,
'away_strikes_by_contact': 1,
'away_strikes_swinging': 1,
'away_strikes_looking': 1,
'away_grounded_balls': 1,
'away_fly_balls': 1,
'away_line_drives': 1,
'away_unknown_bat_type': 1,
'away_game_score': 0,
'away_inherited_runners': 0,
'away_inherited_score': 0,
'away_win_probability_by_pitcher': 0,
'away_base_out_runs_saved': 0,
'home_at_bats': 1,
'home_runs': 1,
'home_hits': 1,
'home_rbi': 1,
'home_earned_runs': 0,
'home_bases_on_balls': 1,
'home_strikeouts': 1,
'home_plate_appearances': 1,
'home_batting_average': 1,
'home_on_base_percentage': 1,
'home_slugging_percentage': 1,
'home_on_base_plus': 1,
'home_pitches': 0,
'home_strikes': 1,
'home_win_probability_for_offensive_player': 1,
'home_average_leverage_index': 0,
'home_win_probability_added': 1,
'home_win_probability_subtracted': 1,
'home_base_out_runs_added': 1,
'home_putouts': 1,
'home_assists': 1,
'home_innings_pitched': 1,
'home_home_runs': 1,
'home_strikes_by_contact': 0,
'home_strikes_swinging': 0,
'home_strikes_looking': 0,
'home_grounded_balls': 0,
'home_fly_balls': 0,
'home_line_drives': 0,
'home_unknown_bat_type': 0,
'home_game_score': 1,
'home_inherited_runners': 1,
'home_inherited_score': 1,
'home_win_probability_by_pitcher': 1,
'home_base_out_runs_saved': 1
}
PLAYER_SCHEME = {
'season': 'th[data-stat="year_ID"]',
'name': 'h1[itemprop="name"]',
'team_abbreviation': 'td[data-stat="team_ID"]',
'position': 'td[data-stat="pos"]',
'height': 'p span[itemprop="height"]',
'weight': 'p span[itemprop="weight"]',
'birth_date': 'td[data-stat=""]',
'nationality': 'td[data-stat=""]',
'games': 'td[data-stat="G"]',
'games_started': 'td[data-stat="GS"]',
'plate_appearances': 'td[data-stat="PA"]',
'at_bats': 'td[data-stat="AB"]',
'runs': 'td[data-stat="R"]',
'hits': 'td[data-stat="H"]',
'doubles': 'td[data-stat="2B"]',
'triples': 'td[data-stat="3B"]',
'home_runs': 'td[data-stat="HR"]',
'runs_batted_in': 'td[data-stat="RBI"]',
'stolen_bases': 'td[data-stat="SB"]',
'times_caught_stealing': 'td[data-stat="CS"]',
'bases_on_balls': 'td[data-stat="BB"]',
'times_struck_out': 'td[data-stat="SO"]',
'batting_average': 'td[data-stat="batting_avg"]',
'on_base_percentage': 'td[data-stat="onbase_perc"]',
'slugging_percentage': 'td[data-stat="slugging_perc"]',
'on_base_plus_slugging_percentage': 'td[data-stat="onbase_plus_slugging"]',
'on_base_plus_slugging_percentage_plus':
'td[data-stat="onbase_plus_slugging_plus"]',
'total_bases': 'td[data-stat="TB"]',
'grounded_into_double_plays': 'td[data-stat="GIDP"]',
'times_hit_by_pitch': 'td[data-stat="HBP"]',
'sacrifice_hits': 'td[data-stat="SH"]',
'sacrifice_flies': 'td[data-stat="SF"]',
'intentional_bases_on_balls': 'td[data-stat="IBB"]',
'complete_games': 'td[data-stat="CG"]',
'innings_played': 'td[data-stat="Inn_def"]',
'defensive_chances': 'td[data-stat="chances"]',
'putouts': 'td[data-stat="PO"]',
'assists': 'td[data-stat="A"]',
'errors': 'td[data-stat="E_def"]',
'double_plays_turned': 'td[data-stat="DP_def"]',
'fielding_percentage': 'td[data-stat="fielding_perc"]',
'total_fielding_runs_above_average': 'td[data-stat="tz_runs_total"]',
'defensive_runs_saved_above_average': 'td[data-stat="bis_runs_total"]',
'total_fielding_runs_above_average_per_innings':
'td[data-stat="tz_runs_total_per_season"]',
'defensive_runs_saved_above_average_per_innings':
'td[data-stat="bis_runs_total_per_season"]',
'range_factor_per_nine_innings': 'td[data-stat="range_factor_per_nine"]',
'range_factor_per_game': 'td[data-stat="range_factor_per_game"]',
'league_fielding_percentage': 'td[data-stat="fielding_perc_lg"]',
'league_range_factor_per_nine_innings':
'td[data-stat="range_factor_per_nine_lg"]',
'league_range_factor_per_game': 'td[data-stat="range_factor_per_game_lg"]',
'games_in_batting_order': 'td[data-stat="G_batting"]',
'games_in_defensive_lineup': 'td[data-stat="G_defense"]',
'games_pitcher': 'td[data-stat="G_p_app"]',
'games_catcher': 'td[data-stat="G_c"]',
'games_first_baseman': 'td[data-stat="G_1b"]',
'games_second_baseman': 'td[data-stat="G_2b"]',
'games_third_baseman': 'td[data-stat="G_3b"]',
'games_shortstop': 'td[data-stat="G_ss"]',
'games_left_fielder': 'td[data-stat="G_lf_app"]',
'games_center_fielder': 'td[data-stat="G_cf_app"]',
'games_right_fielder': 'td[data-stat="G_rf_app"]',
'games_outfielder': 'td[data-stat="G_of_app"]',
'games_designated_hitter': 'td[data-stat="G_dh"]',
'games_pinch_hitter': 'td[data-stat="G_ph"]',
'games_pinch_runner': 'td[data-stat="G_pr"]',
'wins': 'td[data-stat="W"]',
'losses': 'td[data-stat="L"]',
'win_percentage': 'td[data-stat="win_loss_perc"]',
'era': 'td[data-stat="earned_run_avg"]',
'games_finished': 'td[data-stat="GF"]',
'shutouts': 'td[data-stat="SHO"]',
'saves': 'td[data-stat="SV"]',
'hits_allowed': 'td[data-stat="H"]',
'runs_allowed': 'td[data-stat="R"]',
'earned_runs_allowed': 'td[data-stat="ER"]',
'home_runs_allowed': 'td[data-stat="HR"]',
'bases_on_balls_given': 'td[data-stat="BB"]',
'intentional_bases_on_balls_given': 'td[data-stat="IBB"]',
'strikeouts': 'td[data-stat="SO"]',
'times_hit_player': 'td[data-stat="HBP"]',
'balks': 'td[data-stat="BK"]',
'wild_pitches': 'td[data-stat="WP"]',
'batters_faced': 'td[data-stat="batters_faced"]',
'era_plus': 'td[data-stat="earned_run_avg_plus"]',
'fielding_independent_pitching': 'td[data-stat="fip"]',
'whip': 'td[data-stat="whip"]',
'hits_against_per_nine_innings': 'td[data-stat="hits_per_nine"]',
'home_runs_against_per_nine_innings': 'td[data-stat="home_runs_per_nine"]',
'bases_on_balls_given_per_nine_innings':
'td[data-stat="bases_on_balls_per_nine"]',
'batters_struckout_per_nine_innings':
'td[data-stat="strikeouts_per_nine"]',
'strikeouts_thrown_per_walk':
'td[data-stat="strikeouts_per_base_on_balls"]'
}
PLAYER_ELEMENT_INDEX = {
'bases_on_balls_given': 1,
'hits_allowed': 1,
'home_runs_allowed': 1,
'intentional_bases_on_balls_given': 1,
'runs_allowed': 1,
'strikeouts': 1,
'times_hit_player': 1
}
NATIONALITY = {
'af': 'Afghanistan',
'as': 'American Samoa',
'aw': 'Aruba',
'au': 'Australia',
'at': 'Austria',
'bs': 'Bahamas',
'be': 'Belgium',
'bz': 'Belize',
'br': 'Brazil',
'ca': 'Canada',
'cn': 'China',
'co': 'Colombia',
'cu': 'Cuba',
'cw': 'Curacao',
'cz': 'Czech Republic',
'dk': 'Denmark',
'do': 'Dominican Replubic',
'fi': 'Finland',
'fr': 'France',
'de': 'Germany',
'gr': 'Greece',
'gu': 'Guam',
'hn': 'Honduras',
'hk': 'Hong Kong',
'id': 'Indonesia',
'ie': 'Ireland',
'it': 'Italy',
'jm': 'Jamaica',
'jp': 'Japan',
'lv': 'Latvia',
'lt': 'Lithuania',
'mx': 'Mexico',
'nl': 'Netherlands',
'ni': 'Nicaragua',
'no': 'Norway',
'pa': 'Panama',
'ph': 'Philippines',
'pl': 'Poland',
'pt': 'Portugal',
'pr': 'Puerto Rico',
'ru': 'Russian Federation',
'sa': 'Saudi Arabia',
'si': 'Singapore',
'sk': 'Slovakia',
'za': 'South Africa',
'kr': 'South Korea',
'es': 'Spain',
'se': 'Sweden',
'ch': 'Switzerland',
'tw': 'Taiwan',
'us': 'United States of America',
'vi': 'U.S. Virgin Islands',
'gb': 'United Kingdom',
've': 'Venezuela',
'vn': 'Viet Nam'
}
DOUBLE_HEADER_INDICES = {
'time': 'start time',
'attendance': 'attendance',
'venue': 'venue',
'duration': 'game duration'
}
STANDINGS_URL = ('https://www.baseball-reference.com/leagues/MLB/'
'%s-standings.shtml')
TEAM_STATS_URL = 'https://www.baseball-reference.com/leagues/MLB/%s.shtml'
SCHEDULE_URL = ('https://www.baseball-reference.com/teams/%s/'
'%s-schedule-scores.shtml')
BOXSCORE_URL = 'https://www.baseball-reference.com/boxes/%s.shtml'
BOXSCORES_URL = ('https://www.baseball-reference.com/boxes/'
'?year=%s&month=%s&day=%s')
PLAYER_URL = 'https://www.baseball-reference.com/players/%s/%s.shtml'
ROSTER_URL = 'https://www.baseball-reference.com/teams/%s/%s.shtml'
NIGHT = 'Night'
DAY = 'Day'
| 41.597194 | 79 | 0.65443 |
d1b0ffb6323b7aa0f8d7cbbb04d833a15925a384 | 6,757 | py | Python | test/Root/src/main/validate_results.py | royadityak94/Interview | 40a7f7e2edddbb525bc6b71ea72d6cd2bda5708f | [
"MIT"
] | null | null | null | test/Root/src/main/validate_results.py | royadityak94/Interview | 40a7f7e2edddbb525bc6b71ea72d6cd2bda5708f | [
"MIT"
] | null | null | null | test/Root/src/main/validate_results.py | royadityak94/Interview | 40a7f7e2edddbb525bc6b71ea72d6cd2bda5708f | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Helper scripts to validate the output by the individual implementations against the verified implementation
import warnings
warnings.filterwarnings("ignore")
import pandas as pd
from pandas.util.testing import assert_frame_equal
import os
import mmap
from parse import parse
from utilities import RunningConstants
import argparse
def load_file_to_pandas(file_path: str) -> pd.DataFrame:
"""Module to load structured file data into pandas dataframe
Parameters
----------
file_path*: Support both relative, absolute references to the input file location.
(* - Required parameters)
Returns
-------
pandas dataframe
"""
# Return exception should the file be inexistent
if not os.path.exists(file_path):
raise FileNotFoundError
file_output = {}
with open(file_path, mode='r', encoding='utf-8') as file:
with mmap.mmap(file.fileno(), length=0, access=mmap.ACCESS_READ) as mmap_obj:
try:
chunks = mmap_obj.read().decode('utf-8')+'\n'
for chunk in chunks.split('\n'):
if len(chunk) > 0:
non_zero_parse = parse(
'{driver}: {distance} miles @ {speed} mph', chunk) \
or parse(
'{driver}: {distance} miles', chunk)
if not non_zero_parse:
raise SyntaxError("The format of the line processed is unexpected " + chunk)
non_zero_parse = non_zero_parse.named
if len(non_zero_parse) not in range(2, 4):
raise ValueError("The value of the line processed is unexpected " + chunk)
elif len(non_zero_parse) == 3:
driver_info = non_zero_parse['driver']
miles_info = non_zero_parse['distance']
speed = non_zero_parse['speed']
file_output[driver_info] = {'distance': miles_info, 'speed': speed}
else:
file_output[non_zero_parse['driver']] = {'distance': 0, 'speed': 0}
except AttributeError as ae:
raise AttributeError("Attribute Error encountered, possibly with : ", non_zero_parse)
except IOError as ioe:
raise IOError('I/O Error({0}): {1}'.format(ioe.errno, ioe.strerror))
except Exception as ex:
raise Exception("Error: ", ex)
# Load the file into dataframe and return the dataframe
return pd.DataFrame.from_dict(file_output, orient='index').reset_index().rename(columns={'index': 'driver'})
def prepareDf(df:pd.DataFrame) -> pd.DataFrame:
"""Helper module to sort the pandas dataframe
The purpose of this module is to bring consistency in the compared dataframes, as
two records with same miles driven may be written in different rows in the output file
Parameters
----------
df*: Pandas dataframe (with columns: distance, speed, driver)
(* - Required parameters)
Returns
-------
pandas dataframe
"""
# Sort the dataframe by appropriate key
return df.sort_values(by=['distance', 'speed', \
'driver'], ascending=False).reset_index(drop=True)
def validate_results(table1: str, table2: str, tolerance:int = 0) -> dict:
"""Module to verify if the contents of two files are same or different
within a given tolerance value.
Parameters
----------
table1*: Support both relative, absolute references to the file location
table2*: Support both relative, absolute references to the file location
tolerance: Supports mismatch upto a certain count (default: 0)
(* - Required parameters)
Returns
-------
dictionary:
Tuple:
0: status- boolean value (True if matched, False elsewise)
1: mismatch count (if matched under tolerance) or
entire mismatch (in case of a failed match)
"""
# Load both the reference and baseline dataframes
base_table = load_file_to_pandas(table1)
reference_table = load_file_to_pandas(table2)
base_table = prepareDf(base_table)
reference_table = prepareDf(reference_table)
if not tolerance:
tolerance = RunningConstants.RESULTSET_TOLERANCE.value
try:
assert_frame_equal (base_table, reference_table, check_dtype=False, check_like=True)
return {'status': (True, 0)}
except AssertionError as ae:
# Ignoring mismatch within a given tolerance
mismatch = reference_table.compare(base_table).reset_index(drop=True)
all_mismatched = 0
for level in mismatch.columns.levels[0]:
if level not in ['speed', 'distance']:
continue
compared = mismatch[level]
compared['self'] = pd.to_numeric(compared['self'])
compared['other'] = pd.to_numeric(compared['other'])
compared = compared.query("abs(self-other) > 1")
mismatched_count = compared.count()[0]
if mismatched_count > tolerance:
return {'status': (False, mismatch)}
all_mismatched += mismatched_count
return {'status': (True, all_mismatched)}
return
if __name__ == '__main__':
# Enabling command-line support to run this file directly from the terminal
# with support for arguments (required, default)
parser = argparse.ArgumentParser()
parser.add_argument(
"-t1",
"--table1",
help="First table file path (Relative/Absolute)",
type=str
)
parser.add_argument(
"-t2",
"--table2",
help="Second table file path (Relative/Absolute)",
type=str
)
args = parser.parse_args()
# Throwing error, should the required input arguments be missing
if not args.table1:
parser.error("Missing *required --table1 argument")
elif not args.table2:
parser.error("Missing *required --table2 argument")
flag, others = validate_results(args.table1, args.table2)['status']
# Comprehending the comparison results as approprate
if flag:
if not others:
print ("Successful. Additional Details: Both tables have matched completely")
else:
print ("Successful. Additional Details: Mismatch %d under tolerance" % others)
else:
print ("Comparison was unsuccessful. Detailed mismatch is as follows: \n", others)
| 43.593548 | 113 | 0.60293 |
418f26418ee16694e7fd0f15c8765894b28750b3 | 17,468 | py | Python | examples/imagenet/main_fp16_optimizer.py | bcbcbcbcbcl/apex | 7b3ac7221367dc7b7527a68e34cf08b5eeb0fc47 | [
"BSD-3-Clause"
] | 2 | 2021-06-24T18:31:04.000Z | 2021-06-24T20:34:44.000Z | examples/imagenet/main_fp16_optimizer.py | bcbcbcbcbcl/apex | 7b3ac7221367dc7b7527a68e34cf08b5eeb0fc47 | [
"BSD-3-Clause"
] | null | null | null | examples/imagenet/main_fp16_optimizer.py | bcbcbcbcbcl/apex | 7b3ac7221367dc7b7527a68e34cf08b5eeb0fc47 | [
"BSD-3-Clause"
] | null | null | null | import argparse
import os
import shutil
import time
import torch
import torch.nn as nn
import torch.nn.parallel
import torch.backends.cudnn as cudnn
import torch.distributed as dist
import torch.optim
import torch.utils.data
import torch.utils.data.distributed
import torchvision.transforms as transforms
import torchvision.datasets as datasets
import torchvision.models as models
import numpy as np
try:
from apex.parallel import DistributedDataParallel as DDP
from apex.fp16_utils import *
except ImportError:
raise ImportError("Please install apex from https://www.github.com/nvidia/apex to run this example.")
model_names = sorted(name for name in models.__dict__
if name.islower() and not name.startswith("__")
and callable(models.__dict__[name]))
parser = argparse.ArgumentParser(description='PyTorch ImageNet Training')
parser.add_argument('data', metavar='DIR',
help='path to dataset')
parser.add_argument('--arch', '-a', metavar='ARCH', default='resnet18',
choices=model_names,
help='model architecture: ' +
' | '.join(model_names) +
' (default: resnet18)')
parser.add_argument('-j', '--workers', default=4, type=int, metavar='N',
help='number of data loading workers (default: 4)')
parser.add_argument('--epochs', default=90, type=int, metavar='N',
help='number of total epochs to run')
parser.add_argument('--start-epoch', default=0, type=int, metavar='N',
help='manual epoch number (useful on restarts)')
parser.add_argument('-b', '--batch-size', default=256, type=int,
metavar='N', help='mini-batch size per process (default: 256)')
parser.add_argument('--lr', '--learning-rate', default=0.1, type=float,
metavar='LR', help='Initial learning rate. Will be scaled by <global batch size>/256: args.lr = args.lr*float(args.batch_size*args.world_size)/256. A warmup schedule will also be applied over the first 5 epochs.')
parser.add_argument('--momentum', default=0.9, type=float, metavar='M',
help='momentum')
parser.add_argument('--weight-decay', '--wd', default=1e-4, type=float,
metavar='W', help='weight decay (default: 1e-4)')
parser.add_argument('--print-freq', '-p', default=10, type=int,
metavar='N', help='print frequency (default: 10)')
parser.add_argument('--resume', default='', type=str, metavar='PATH',
help='path to latest checkpoint (default: none)')
parser.add_argument('-e', '--evaluate', dest='evaluate', action='store_true',
help='evaluate model on validation set')
parser.add_argument('--pretrained', dest='pretrained', action='store_true',
help='use pre-trained model')
parser.add_argument('--fp16', action='store_true',
help='Run model fp16 mode.')
parser.add_argument('--static-loss-scale', type=float, default=1,
help='Static loss scale, positive power of 2 values can improve fp16 convergence.')
parser.add_argument('--dynamic-loss-scale', action='store_true',
help='Use dynamic loss scaling. If supplied, this argument supersedes ' +
'--static-loss-scale.')
parser.add_argument('--prof', dest='prof', action='store_true',
help='Only run 10 iterations for profiling.')
parser.add_argument('--deterministic', action='store_true')
parser.add_argument("--local_rank", default=0, type=int)
parser.add_argument('--sync_bn', action='store_true',
help='enabling apex sync BN.')
cudnn.benchmark = True
def fast_collate(batch):
imgs = [img[0] for img in batch]
targets = torch.tensor([target[1] for target in batch], dtype=torch.int64)
w = imgs[0].size[0]
h = imgs[0].size[1]
tensor = torch.zeros( (len(imgs), 3, h, w), dtype=torch.uint8 )
for i, img in enumerate(imgs):
nump_array = np.asarray(img, dtype=np.uint8)
tens = torch.from_numpy(nump_array)
if(nump_array.ndim < 3):
nump_array = np.expand_dims(nump_array, axis=-1)
nump_array = np.rollaxis(nump_array, 2)
tensor[i] += torch.from_numpy(nump_array)
return tensor, targets
best_prec1 = 0
args = parser.parse_args()
if args.deterministic:
cudnn.benchmark = False
cudnn.deterministic = True
torch.manual_seed(args.local_rank)
def main():
global best_prec1, args
args.distributed = False
if 'WORLD_SIZE' in os.environ:
args.distributed = int(os.environ['WORLD_SIZE']) > 1
args.gpu = 0
args.world_size = 1
if args.distributed:
args.gpu = args.local_rank % torch.cuda.device_count()
torch.cuda.set_device(args.gpu)
torch.distributed.init_process_group(backend='nccl',
init_method='env://')
args.world_size = torch.distributed.get_world_size()
if args.fp16:
assert torch.backends.cudnn.enabled, "fp16 mode requires cudnn backend to be enabled."
if args.static_loss_scale != 1.0:
if not args.fp16:
print("Warning: if --fp16 is not used, static_loss_scale will be ignored.")
# create model
if args.pretrained:
print("=> using pre-trained model '{}'".format(args.arch))
model = models.__dict__[args.arch](pretrained=True)
else:
print("=> creating model '{}'".format(args.arch))
model = models.__dict__[args.arch]()
if args.sync_bn:
import apex
print("using apex synced BN")
model = apex.parallel.convert_syncbn_model(model)
model = model.cuda()
if args.fp16:
model = network_to_half(model)
if args.distributed:
# By default, apex.parallel.DistributedDataParallel overlaps communication with
# computation in the backward pass.
# model = DDP(model)
# delay_allreduce delays all communication to the end of the backward pass.
model = DDP(model, delay_allreduce=True)
# define loss function (criterion) and optimizer
criterion = nn.CrossEntropyLoss().cuda()
# Scale learning rate based on global batch size
args.lr = args.lr*float(args.batch_size*args.world_size)/256.
optimizer = torch.optim.SGD(model.parameters(), args.lr,
momentum=args.momentum,
weight_decay=args.weight_decay)
if args.fp16:
optimizer = FP16_Optimizer(optimizer,
static_loss_scale=args.static_loss_scale,
dynamic_loss_scale=args.dynamic_loss_scale)
# Optionally resume from a checkpoint
if args.resume:
# Use a local scope to avoid dangling references
def resume():
if os.path.isfile(args.resume):
print("=> loading checkpoint '{}'".format(args.resume))
checkpoint = torch.load(args.resume, map_location = lambda storage, loc: storage.cuda(args.gpu))
args.start_epoch = checkpoint['epoch']
best_prec1 = checkpoint['best_prec1']
model.load_state_dict(checkpoint['state_dict'])
# An FP16_Optimizer instance's state dict internally stashes the master params.
optimizer.load_state_dict(checkpoint['optimizer'])
print("=> loaded checkpoint '{}' (epoch {})"
.format(args.resume, checkpoint['epoch']))
else:
print("=> no checkpoint found at '{}'".format(args.resume))
resume()
# Data loading code
traindir = os.path.join(args.data, 'train')
valdir = os.path.join(args.data, 'val')
if(args.arch == "inception_v3"):
crop_size = 299
val_size = 320 # I chose this value arbitrarily, we can adjust.
else:
crop_size = 224
val_size = 256
train_dataset = datasets.ImageFolder(
traindir,
transforms.Compose([
transforms.RandomResizedCrop(crop_size),
transforms.RandomHorizontalFlip(),
# transforms.ToTensor(), Too slow
# normalize,
]))
val_dataset = datasets.ImageFolder(valdir, transforms.Compose([
transforms.Resize(val_size),
transforms.CenterCrop(crop_size),
]))
train_sampler = None
val_sampler = None
if args.distributed:
train_sampler = torch.utils.data.distributed.DistributedSampler(train_dataset)
val_sampler = torch.utils.data.distributed.DistributedSampler(val_dataset)
train_loader = torch.utils.data.DataLoader(
train_dataset, batch_size=args.batch_size, shuffle=(train_sampler is None),
num_workers=args.workers, pin_memory=True, sampler=train_sampler, collate_fn=fast_collate)
val_loader = torch.utils.data.DataLoader(
val_dataset,
batch_size=args.batch_size, shuffle=False,
num_workers=args.workers, pin_memory=True,
sampler=val_sampler,
collate_fn=fast_collate)
if args.evaluate:
validate(val_loader, model, criterion)
return
for epoch in range(args.start_epoch, args.epochs):
if args.distributed:
train_sampler.set_epoch(epoch)
# train for one epoch
train(train_loader, model, criterion, optimizer, epoch)
if args.prof:
break
# evaluate on validation set
prec1 = validate(val_loader, model, criterion)
# remember best prec@1 and save checkpoint
if args.local_rank == 0:
is_best = prec1 > best_prec1
best_prec1 = max(prec1, best_prec1)
save_checkpoint({
'epoch': epoch + 1,
'arch': args.arch,
'state_dict': model.state_dict(),
'best_prec1': best_prec1,
'optimizer' : optimizer.state_dict(),
}, is_best)
class data_prefetcher():
def __init__(self, loader):
self.loader = iter(loader)
self.stream = torch.cuda.Stream()
self.mean = torch.tensor([0.485 * 255, 0.456 * 255, 0.406 * 255]).cuda().view(1,3,1,1)
self.std = torch.tensor([0.229 * 255, 0.224 * 255, 0.225 * 255]).cuda().view(1,3,1,1)
if args.fp16:
self.mean = self.mean.half()
self.std = self.std.half()
self.preload()
def preload(self):
try:
self.next_input, self.next_target = next(self.loader)
except StopIteration:
self.next_input = None
self.next_target = None
return
with torch.cuda.stream(self.stream):
self.next_input = self.next_input.cuda(async=True)
self.next_target = self.next_target.cuda(async=True)
if args.fp16:
self.next_input = self.next_input.half()
else:
self.next_input = self.next_input.float()
self.next_input = self.next_input.sub_(self.mean).div_(self.std)
def next(self):
torch.cuda.current_stream().wait_stream(self.stream)
input = self.next_input
target = self.next_target
self.preload()
return input, target
def train(train_loader, model, criterion, optimizer, epoch):
batch_time = AverageMeter()
data_time = AverageMeter()
losses = AverageMeter()
top1 = AverageMeter()
top5 = AverageMeter()
# switch to train mode
model.train()
end = time.time()
prefetcher = data_prefetcher(train_loader)
input, target = prefetcher.next()
i = -1
while input is not None:
i += 1
adjust_learning_rate(optimizer, epoch, i, len(train_loader))
if args.prof:
if i > 10:
break
# measure data loading time
data_time.update(time.time() - end)
# compute output
output = model(input)
loss = criterion(output, target)
# measure accuracy and record loss
prec1, prec5 = accuracy(output.data, target, topk=(1, 5))
if args.distributed:
reduced_loss = reduce_tensor(loss.data)
prec1 = reduce_tensor(prec1)
prec5 = reduce_tensor(prec5)
else:
reduced_loss = loss.data
losses.update(to_python_float(reduced_loss), input.size(0))
top1.update(to_python_float(prec1), input.size(0))
top5.update(to_python_float(prec5), input.size(0))
# compute gradient and do SGD step
optimizer.zero_grad()
if args.fp16:
optimizer.backward(loss)
else:
loss.backward()
optimizer.step()
torch.cuda.synchronize()
# measure elapsed time
batch_time.update(time.time() - end)
end = time.time()
input, target = prefetcher.next()
if args.local_rank == 0 and i % args.print_freq == 0 and i > 1:
print('Epoch: [{0}][{1}/{2}]\t'
'Time {batch_time.val:.3f} ({batch_time.avg:.3f})\t'
'Speed {3:.3f} ({4:.3f})\t'
'Data {data_time.val:.3f} ({data_time.avg:.3f})\t'
'Loss {loss.val:.4f} ({loss.avg:.4f})\t'
'Prec@1 {top1.val:.3f} ({top1.avg:.3f})\t'
'Prec@5 {top5.val:.3f} ({top5.avg:.3f})'.format(
epoch, i, len(train_loader),
args.world_size * args.batch_size / batch_time.val,
args.world_size * args.batch_size / batch_time.avg,
batch_time=batch_time,
data_time=data_time, loss=losses, top1=top1, top5=top5))
def validate(val_loader, model, criterion):
batch_time = AverageMeter()
losses = AverageMeter()
top1 = AverageMeter()
top5 = AverageMeter()
# switch to evaluate mode
model.eval()
end = time.time()
prefetcher = data_prefetcher(val_loader)
input, target = prefetcher.next()
i = -1
while input is not None:
i += 1
# compute output
with torch.no_grad():
output = model(input)
loss = criterion(output, target)
# measure accuracy and record loss
prec1, prec5 = accuracy(output.data, target, topk=(1, 5))
if args.distributed:
reduced_loss = reduce_tensor(loss.data)
prec1 = reduce_tensor(prec1)
prec5 = reduce_tensor(prec5)
else:
reduced_loss = loss.data
losses.update(to_python_float(reduced_loss), input.size(0))
top1.update(to_python_float(prec1), input.size(0))
top5.update(to_python_float(prec5), input.size(0))
# measure elapsed time
batch_time.update(time.time() - end)
end = time.time()
if args.local_rank == 0 and i % args.print_freq == 0:
print('Test: [{0}/{1}]\t'
'Time {batch_time.val:.3f} ({batch_time.avg:.3f})\t'
'Speed {2:.3f} ({3:.3f})\t'
'Loss {loss.val:.4f} ({loss.avg:.4f})\t'
'Prec@1 {top1.val:.3f} ({top1.avg:.3f})\t'
'Prec@5 {top5.val:.3f} ({top5.avg:.3f})'.format(
i, len(val_loader),
args.world_size * args.batch_size / batch_time.val,
args.world_size * args.batch_size / batch_time.avg,
batch_time=batch_time, loss=losses,
top1=top1, top5=top5))
input, target = prefetcher.next()
print(' * Prec@1 {top1.avg:.3f} Prec@5 {top5.avg:.3f}'
.format(top1=top1, top5=top5))
return top1.avg
def save_checkpoint(state, is_best, filename='checkpoint.pth.tar'):
torch.save(state, filename)
if is_best:
shutil.copyfile(filename, 'model_best.pth.tar')
class AverageMeter(object):
"""Computes and stores the average and current value"""
def __init__(self):
self.reset()
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, n=1):
self.val = val
self.sum += val * n
self.count += n
self.avg = self.sum / self.count
def adjust_learning_rate(optimizer, epoch, step, len_epoch):
"""LR schedule that should yield 76% converged accuracy with batch size 256"""
factor = epoch // 30
if epoch >= 80:
factor = factor + 1
lr = args.lr*(0.1**factor)
"""Warmup"""
if epoch < 5:
lr = lr*float(1 + step + epoch*len_epoch)/(5.*len_epoch)
# if(args.local_rank == 0):
# print("epoch = {}, step = {}, lr = {}".format(epoch, step, lr))
for param_group in optimizer.param_groups:
param_group['lr'] = lr
def accuracy(output, target, topk=(1,)):
"""Computes the precision@k for the specified values of k"""
maxk = max(topk)
batch_size = target.size(0)
_, pred = output.topk(maxk, 1, True, True)
pred = pred.t()
correct = pred.eq(target.view(1, -1).expand_as(pred))
res = []
for k in topk:
correct_k = correct[:k].view(-1).float().sum(0, keepdim=True)
res.append(correct_k.mul_(100.0 / batch_size))
return res
def reduce_tensor(tensor):
rt = tensor.clone()
dist.all_reduce(rt, op=dist.reduce_op.SUM)
rt /= args.world_size
return rt
if __name__ == '__main__':
main()
| 35.942387 | 235 | 0.601786 |
ccc6e582a423f88ca9c5986939f8f7c6810dd378 | 6,915 | py | Python | src/azure-cli-testsdk/azure/cli/testsdk/checkers.py | YuanyuanNi/azure-cli | 63844964374858bfacd209bfe1b69eb456bd64ca | [
"MIT"
] | 3,287 | 2016-07-26T17:34:33.000Z | 2022-03-31T09:52:13.000Z | src/azure-cli-testsdk/azure/cli/testsdk/checkers.py | YuanyuanNi/azure-cli | 63844964374858bfacd209bfe1b69eb456bd64ca | [
"MIT"
] | 19,206 | 2016-07-26T07:04:42.000Z | 2022-03-31T23:57:09.000Z | src/azure-cli-testsdk/azure/cli/testsdk/checkers.py | YuanyuanNi/azure-cli | 63844964374858bfacd209bfe1b69eb456bd64ca | [
"MIT"
] | 2,575 | 2016-07-26T06:44:40.000Z | 2022-03-31T22:56:06.000Z | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
import re
import collections
import jmespath
from .exceptions import JMESPathCheckAssertionError
class JMESPathCheck(object): # pylint: disable=too-few-public-methods
def __init__(self, query, expected_result, case_sensitive=True):
self._query = query
self._expected_result = expected_result
self._case_sensitive = case_sensitive
def __call__(self, execution_result):
json_value = execution_result.get_output_in_json()
actual_result = None
try:
actual_result = jmespath.search(self._query, json_value,
jmespath.Options(collections.OrderedDict))
except jmespath.exceptions.JMESPathTypeError:
raise JMESPathCheckAssertionError(self._query, self._expected_result, actual_result,
execution_result.output)
if self._case_sensitive:
equals = actual_result == self._expected_result or str(actual_result) == str(self._expected_result)
else:
equals = actual_result == self._expected_result \
or str(actual_result).lower() == str(self._expected_result).lower()
if not equals:
if actual_result:
raise JMESPathCheckAssertionError(self._query, self._expected_result, actual_result,
execution_result.output)
raise JMESPathCheckAssertionError(self._query, self._expected_result, 'None',
execution_result.output)
class JMESPathCheckExists(object): # pylint: disable=too-few-public-methods
def __init__(self, query):
self._query = query
def __call__(self, execution_result):
json_value = execution_result.get_output_in_json()
actual_result = jmespath.search(self._query, json_value,
jmespath.Options(collections.OrderedDict))
if not actual_result:
raise JMESPathCheckAssertionError(self._query, 'some value', actual_result,
execution_result.output)
class JMESPathCheckNotExists(object): # pylint: disable=too-few-public-methods
def __init__(self, query):
self._query = query
def __call__(self, execution_result):
json_value = execution_result.get_output_in_json()
actual_result = jmespath.search(self._query, json_value,
jmespath.Options(collections.OrderedDict))
if actual_result:
raise JMESPathCheckAssertionError(self._query, 'some value', actual_result,
execution_result.output)
class JMESPathCheckGreaterThan(object): # pylint: disable=too-few-public-methods
def __init__(self, query, expected_result):
self._query = query
self._expected_result = expected_result
def __call__(self, execution_result):
json_value = execution_result.get_output_in_json()
actual_result = jmespath.search(self._query, json_value,
jmespath.Options(collections.OrderedDict))
if not actual_result > self._expected_result:
expected_result_format = "> {}".format(self._expected_result)
if actual_result:
raise JMESPathCheckAssertionError(self._query, expected_result_format, actual_result,
execution_result.output)
raise JMESPathCheckAssertionError(self._query, expected_result_format, 'None',
execution_result.output)
class JMESPathPatternCheck(object): # pylint: disable=too-few-public-methods
def __init__(self, query, expected_result):
self._query = query
self._expected_result = expected_result
def __call__(self, execution_result):
json_value = execution_result.get_output_in_json()
actual_result = jmespath.search(self._query, json_value,
jmespath.Options(collections.OrderedDict))
if not re.match(self._expected_result, str(actual_result), re.IGNORECASE):
raise JMESPathCheckAssertionError(self._query, self._expected_result, actual_result,
execution_result.output)
class NoneCheck(object): # pylint: disable=too-few-public-methods
def __call__(self, execution_result): # pylint: disable=no-self-use
none_strings = ['[]', '{}', 'false']
try:
data = execution_result.output.strip()
assert not data or data in none_strings
except AssertionError:
raise AssertionError("Actual value '{}' != Expected value falsy (None, '', []) or "
"string in {}".format(data, none_strings))
class StringCheck(object): # pylint: disable=too-few-public-methods
def __init__(self, expected_result):
self.expected_result = expected_result
def __call__(self, execution_result):
try:
result = execution_result.output.strip().strip('"')
assert result == self.expected_result
except AssertionError:
raise AssertionError(
"Actual value '{}' != Expected value {}".format(result, self.expected_result))
class StringContainCheck(object): # pylint: disable=too-few-public-methods
def __init__(self, expected_result):
self.expected_result = expected_result
def __call__(self, execution_result):
try:
result = execution_result.output.strip('"')
assert self.expected_result in result
except AssertionError:
raise AssertionError(
"Actual value '{}' doesn't contain Expected value {}".format(result,
self.expected_result))
class StringContainCheckIgnoreCase(object): # pylint: disable=too-few-public-methods
def __init__(self, expected_result):
self.expected_result = expected_result.lower()
def __call__(self, execution_result):
try:
result = execution_result.output.strip('"').lower()
assert self.expected_result in result
except AssertionError:
raise AssertionError(
"Actual value '{}' doesn't contain Expected value {}".format(result,
self.expected_result))
| 46.409396 | 111 | 0.60752 |
3e53c064bff073a8c4e1360bafdd3256b1fa08c5 | 26,671 | py | Python | monk/gluon/finetune/level_3_training_base.py | take2rohit/monk_v1 | 9c567bf2c8b571021b120d879ba9edf7751b9f92 | [
"Apache-2.0"
] | 542 | 2019-11-10T12:09:31.000Z | 2022-03-28T11:39:07.000Z | monk/gluon/finetune/level_3_training_base.py | take2rohit/monk_v1 | 9c567bf2c8b571021b120d879ba9edf7751b9f92 | [
"Apache-2.0"
] | 117 | 2019-11-12T09:39:24.000Z | 2022-03-12T00:20:41.000Z | monk/gluon/finetune/level_3_training_base.py | take2rohit/monk_v1 | 9c567bf2c8b571021b120d879ba9edf7751b9f92 | [
"Apache-2.0"
] | 246 | 2019-11-09T21:53:24.000Z | 2022-03-29T00:57:07.000Z | from monk.gluon.finetune.imports import *
from monk.system.imports import *
from monk.gluon.finetune.level_2_model_base import finetune_model
class finetune_training(finetune_model):
'''
Base class for training and associated functions
Args:
verbose (int): Set verbosity levels
0 - Print Nothing
1 - Print desired details
'''
@accepts("self", verbose=int, post_trace=False)
#@TraceFunction(trace_args=True, trace_rv=True)
def __init__(self, verbose=1):
super().__init__(verbose=verbose);
###############################################################################################################################################
@accepts("self", post_trace=False)
#@TraceFunction(trace_args=True, trace_rv=True)
def get_training_estimate(self):
'''
Get estimated time for training a single epoch based on all set parameters
Args:
None
Returns:
float: Total time per epoch in seconds
'''
total_time_per_epoch = 0;
self.system_dict = load_scheduler(self.system_dict);
self.system_dict = load_optimizer(self.system_dict);
self.system_dict = load_loss(self.system_dict);
num_iterations_train = len(self.system_dict["local"]["data_loaders"]["train"])//10;
num_iterations_val = len(self.system_dict["local"]["data_loaders"]["val"])//10;
since = time.time();
train_loss = 0;
for i, batch in enumerate(self.system_dict["local"]["data_loaders"]["train"]):
data = mx.gluon.utils.split_and_load(batch[0], ctx_list=self.system_dict["local"]["ctx"], batch_axis=0, even_split=False)
label = mx.gluon.utils.split_and_load(batch[1], ctx_list=self.system_dict["local"]["ctx"], batch_axis=0, even_split=False)
with ag.record():
outputs = [self.system_dict["local"]["model"](X) for X in data]
loss = [self.system_dict["local"]["criterion"](yhat, y) for yhat, y in zip(outputs, label)]
for l in loss:
l.backward()
train_loss += sum([l.mean().asscalar() for l in loss]) / len(loss)
if(i==num_iterations_train):
break;
for i, batch in enumerate(self.system_dict["local"]["data_loaders"]["val"]):
data = mx.gluon.utils.split_and_load(batch[0], ctx_list=self.system_dict["local"]["ctx"], batch_axis=0, even_split=False)
label = mx.gluon.utils.split_and_load(batch[1], ctx_list=self.system_dict["local"]["ctx"], batch_axis=0, even_split=False)
with ag.record():
outputs = [self.system_dict["local"]["model"](X) for X in data]
loss = [self.system_dict["local"]["criterion"](yhat, y) for yhat, y in zip(outputs, label)]
if(i==num_iterations_val):
break;
total_time_per_epoch = (time.time() - since)*10;
return total_time_per_epoch;
###############################################################################################################################################
###############################################################################################################################################
@accepts("self", post_trace=False)
#@TraceFunction(trace_args=True, trace_rv=True)
def set_training_evaluation(self):
'''
Base function for running validation while training
Args:
None
Returns:
dict: Validation metrics
float: Test Loss
'''
num_batch = len(self.system_dict["local"]["data_loaders"]["val"]);
if(self.system_dict["dataset"]["label_type"] == "single"):
metric = mx.metric.Accuracy();
else:
metric = mx.metric.CustomMetric(feval=self.custom_metric)
if(self.system_dict["training"]["settings"]["display_progress_realtime"] and self.system_dict["verbose"]):
pbar = tqdm(total=num_batch);
test_loss = 0;
for i, batch in enumerate(self.system_dict["local"]["data_loaders"]["val"]):
if(self.system_dict["training"]["settings"]["display_progress_realtime"] and self.system_dict["verbose"]):
pbar.update();
data = mx.gluon.utils.split_and_load(batch[0], ctx_list=self.system_dict["local"]["ctx"], batch_axis=0, even_split=False)
label = mx.gluon.utils.split_and_load(batch[1], ctx_list=self.system_dict["local"]["ctx"], batch_axis=0, even_split=False)
with ag.record():
outputs = [self.system_dict["local"]["model"](X) for X in data]
loss = [self.system_dict["local"]["criterion"](yhat, y) for yhat, y in zip(outputs, label)]
test_loss += sum([l.mean().asscalar() for l in loss]) / len(loss)
metric.update(label, outputs)
return metric.get(), test_loss;
###############################################################################################################################################
###############################################################################################################################################
@accepts("self", post_trace=False)
#@TraceFunction(trace_args=True, trace_rv=True)
def set_training_final(self):
'''
Main training function
Args:
None
Returns:
None
'''
if(self.system_dict["states"]["resume_train"]):
self.custom_print("Training Resume");
self.system_dict = load_scheduler(self.system_dict);
self.system_dict = load_optimizer(self.system_dict);
self.system_dict = load_loss(self.system_dict);
if(self.system_dict["dataset"]["label_type"] == "single"):
metric = mx.metric.Accuracy();
else:
metric = mx.metric.CustomMetric(feval=self.custom_metric)
trainer = mx.gluon.Trainer(self.system_dict["local"]["model"].collect_params(), optimizer=self.system_dict["local"]["optimizer"]);
self.system_dict["training"]["status"] = False;
since = time.time()
pid = os.getpid();
if(self.system_dict["training"]["settings"]["save_training_logs"]):
val_acc_history = list(np.load(self.system_dict["log_dir"] + "val_acc_history.npy", allow_pickle=True));
train_acc_history = list(np.load(self.system_dict["log_dir"] + "train_acc_history.npy", allow_pickle=True));
val_loss_history = list(np.load(self.system_dict["log_dir"] + "val_loss_history.npy", allow_pickle=True));
train_loss_history = list(np.load(self.system_dict["log_dir"] + "train_loss_history.npy", allow_pickle=True));
num_batch_train = len(self.system_dict["local"]["data_loaders"]["train"]);
num_batch_val = len(self.system_dict["local"]["data_loaders"]["val"]);
best_acc = 0.0;
best_acc_epoch = 0;
max_gpu_usage = 0;
for epoch in range(self.system_dict["hyper-parameters"]["num_epochs"]):
if(self.system_dict["training"]["settings"]["display_progress"]):
self.custom_print(' Epoch {}/{}'.format(epoch+1, self.system_dict["hyper-parameters"]["num_epochs"]))
self.custom_print(' ' + '-' * 10)
if(epoch < self.system_dict["training"]["outputs"]["epochs_completed"]):
self.custom_print("Skipping Current Epoch");
self.custom_print("");
self.custom_print("");
continue;
since = time.time();
train_loss = 0
metric.reset()
if(self.system_dict["training"]["settings"]["display_progress_realtime"] and self.system_dict["verbose"]):
pbar = tqdm(total=num_batch_train);
for i, batch in enumerate(self.system_dict["local"]["data_loaders"]["train"]):
if(self.system_dict["training"]["settings"]["display_progress_realtime"] and self.system_dict["verbose"]):
pbar.update();
data = mx.gluon.utils.split_and_load(batch[0], ctx_list=self.system_dict["local"]["ctx"], batch_axis=0, even_split=False)
label = mx.gluon.utils.split_and_load(batch[1], ctx_list=self.system_dict["local"]["ctx"], batch_axis=0, even_split=False)
with ag.record():
outputs = [self.system_dict["local"]["model"](X) for X in data]
loss = [self.system_dict["local"]["criterion"](yhat, y) for yhat, y in zip(outputs, label)]
for l in loss:
l.backward()
trainer.step(self.system_dict["dataset"]["params"]["batch_size"]);
train_loss += sum([l.mean().asscalar() for l in loss]) / len(loss)
metric.update(label, outputs)
_, train_acc = metric.get()
train_loss /= num_batch_train;
val_acc, val_loss = self.set_training_evaluation();
val_acc = val_acc[1];
val_loss /= num_batch_val;
if(not os.getcwd() == "/kaggle/working"):
if(self.system_dict["model"]["params"]["use_gpu"]):
GPUs = GPUtil.getGPUs()
gpuMemoryUsed = GPUs[0].memoryUsed
if(self.system_dict["training"]["outputs"]["max_gpu_memory_usage"] < int(gpuMemoryUsed)):
self.system_dict["training"]["outputs"]["max_gpu_memory_usage"] = int(gpuMemoryUsed);
else:
gpuMemoryUsed = 0;
self.system_dict["training"]["outputs"]["max_gpu_memory_usage"] = 0;
if(self.system_dict["training"]["settings"]["save_training_logs"]):
val_acc_history.append(val_acc);
val_loss_history.append(val_loss);
train_acc_history.append(train_acc);
train_loss_history.append(train_loss);
if(val_acc > best_acc):
best_acc = val_acc;
best_acc_epoch = epoch;
if(self.system_dict["training"]["settings"]["save_intermediate_models"]):
self.system_dict["local"]["model"].export(self.system_dict["model_dir"] + self.system_dict["training"]["settings"]["intermediate_model_prefix"],
epoch=epoch)
self.system_dict["local"]["model"].export(self.system_dict["model_dir"] + "best_model", epoch=0);
self.system_dict["training"]["outputs"]["best_val_acc"] = "{:4f}".format(best_acc);
self.system_dict["training"]["outputs"]["best_val_acc_epoch_num"] = best_acc_epoch;
time_elapsed_since = time.time() - since;
if("training_time" in self.system_dict["training"]["outputs"].keys()):
minutes, seconds = self.system_dict["training"]["outputs"]["training_time"].split(" ");
minutes = int(minutes[:len(minutes)-1]);
seconds = int(seconds[:len(seconds)-1]);
time_elapsed_since += minutes*60 + seconds;
self.system_dict["training"]["outputs"]["training_time"] = "{:.0f}m {:.0f}s".format(time_elapsed_since // 60, time_elapsed_since % 60);
if(self.system_dict["training"]["settings"]["save_training_logs"]):
np.save(self.system_dict["log_dir"] + "val_acc_history.npy", np.array(val_acc_history));
np.save(self.system_dict["log_dir"] + "val_loss_history.npy", np.array(val_loss_history));
np.save(self.system_dict["log_dir"] + "train_acc_history.npy", np.array(train_acc_history));
np.save(self.system_dict["log_dir"] + "train_loss_history.npy", np.array(train_loss_history));
create_train_test_plots_accuracy([train_acc_history, val_acc_history], ["Epoch Num", "Accuracy"], self.system_dict["log_dir"], show_img=False, save_img=True);
create_train_test_plots_loss([train_loss_history, val_loss_history], ["Epoch Num", "Loss"], self.system_dict["log_dir"], show_img=False, save_img=True);
self.system_dict["local"]["model"].export(self.system_dict["model_dir"] + "resume_state", epoch=0);
if(self.system_dict["training"]["settings"]["display_progress_realtime"] and self.system_dict["verbose"]):
self.custom_print("");
self.custom_print("");
if(self.system_dict["training"]["settings"]["display_progress"]):
curr_lr = trainer.learning_rate
self.custom_print(" curr_lr - {}".format(curr_lr));
self.custom_print(' [Epoch %d] Train-acc: %.3f, Train-loss: %.3f | Val-acc: %3f, Val-loss: %.3f, | time: %.1f sec' %
(epoch+1, train_acc, train_loss, val_acc, val_loss, time.time() - since));
self.custom_print("");
self.system_dict["training"]["outputs"]["epochs_completed"] = epoch+1;
save(self.system_dict);
elif(self.system_dict["states"]["eval_infer"]):
msg = "Cannot train in testing (eval_infer) mode.\n";
msg += "Tip - use new_experiment function with a copy_from argument.\n";
raise ConstraintError(msg);
else:
self.custom_print("Training Start");
self.system_dict = load_scheduler(self.system_dict);
self.system_dict = load_optimizer(self.system_dict);
self.system_dict = load_loss(self.system_dict);
if(self.system_dict["dataset"]["label_type"] == "single"):
metric = mx.metric.Accuracy();
else:
metric = mx.metric.CustomMetric(feval=self.custom_metric)
trainer = mx.gluon.Trainer(self.system_dict["local"]["model"].collect_params(), optimizer=self.system_dict["local"]["optimizer"]);
self.system_dict["training"]["status"] = False;
pid = os.getpid()
if(self.system_dict["training"]["settings"]["save_training_logs"]):
val_acc_history = [];
train_acc_history = [];
val_loss_history = [];
train_loss_history = [];
num_batch_train = len(self.system_dict["local"]["data_loaders"]["train"]);
num_batch_val = len(self.system_dict["local"]["data_loaders"]["val"]);
best_acc = 0.0;
best_acc_epoch = 0;
max_gpu_usage = 0;
for epoch in range(self.system_dict["hyper-parameters"]["num_epochs"]):
if(self.system_dict["training"]["settings"]["display_progress"]):
self.custom_print(' Epoch {}/{}'.format(epoch+1, self.system_dict["hyper-parameters"]["num_epochs"]))
self.custom_print(' ' + '-' * 10)
since = time.time();
train_loss = 0
metric.reset()
if(self.system_dict["training"]["settings"]["display_progress_realtime"] and self.system_dict["verbose"]):
pbar = tqdm(total=num_batch_train);
for i, batch in enumerate(self.system_dict["local"]["data_loaders"]["train"]):
if(self.system_dict["training"]["settings"]["display_progress_realtime"] and self.system_dict["verbose"]):
pbar.update();
data = mx.gluon.utils.split_and_load(batch[0], ctx_list=self.system_dict["local"]["ctx"], batch_axis=0, even_split=False)
label = mx.gluon.utils.split_and_load(batch[1], ctx_list=self.system_dict["local"]["ctx"], batch_axis=0, even_split=False)
with ag.record():
outputs = [self.system_dict["local"]["model"](X) for X in data]
loss = [self.system_dict["local"]["criterion"](yhat, y) for yhat, y in zip(outputs, label)]
for l in loss:
l.backward()
trainer.step(self.system_dict["dataset"]["params"]["batch_size"]);
train_loss += sum([l.mean().asscalar() for l in loss]) / len(loss)
metric.update(label, outputs)
_, train_acc = metric.get()
train_loss /= num_batch_train;
val_acc, val_loss = self.set_training_evaluation();
val_acc = val_acc[1];
val_loss /= num_batch_val;
if(not os.getcwd() == "/kaggle/working"):
if(self.system_dict["model"]["params"]["use_gpu"]):
GPUs = GPUtil.getGPUs()
gpuMemoryUsed = GPUs[0].memoryUsed
if(self.system_dict["training"]["outputs"]["max_gpu_memory_usage"] < int(gpuMemoryUsed)):
self.system_dict["training"]["outputs"]["max_gpu_memory_usage"] = int(gpuMemoryUsed);
else:
gpuMemoryUsed = 0;
self.system_dict["training"]["outputs"]["max_gpu_memory_usage"] = 0;
if(self.system_dict["training"]["settings"]["save_training_logs"]):
val_acc_history.append(val_acc);
val_loss_history.append(val_loss);
train_acc_history.append(train_acc);
train_loss_history.append(train_loss);
if(self.system_dict["training"]["settings"]["save_intermediate_models"]):
self.system_dict["local"]["model"].export(self.system_dict["model_dir"] + self.system_dict["training"]["settings"]["intermediate_model_prefix"],
epoch=epoch)
if(val_acc > best_acc):
best_acc = val_acc;
best_acc_epoch = epoch;
self.system_dict["local"]["model"].export(self.system_dict["model_dir"] + "best_model", epoch=0);
self.system_dict["training"]["outputs"]["best_val_acc"] = "{:4f}".format(best_acc);
self.system_dict["training"]["outputs"]["best_val_acc_epoch_num"] = best_acc_epoch;
time_elapsed_since = time.time() - since;
if("training_time" in self.system_dict["training"]["outputs"].keys()):
minutes, seconds = self.system_dict["training"]["outputs"]["training_time"].split(" ");
minutes = int(minutes[:len(minutes)-1]);
seconds = int(seconds[:len(seconds)-1]);
time_elapsed_since += minutes*60 + seconds;
self.system_dict["training"]["outputs"]["training_time"] = "{:.0f}m {:.0f}s".format(time_elapsed_since // 60, time_elapsed_since % 60);
if(self.system_dict["training"]["settings"]["save_training_logs"]):
np.save(self.system_dict["log_dir"] + "val_acc_history.npy", np.array(val_acc_history));
np.save(self.system_dict["log_dir"] + "val_loss_history.npy", np.array(val_loss_history));
np.save(self.system_dict["log_dir"] + "train_acc_history.npy", np.array(train_acc_history));
np.save(self.system_dict["log_dir"] + "train_loss_history.npy", np.array(train_loss_history));
create_train_test_plots_accuracy([train_acc_history, val_acc_history], ["Epoch Num", "Accuracy"], self.system_dict["log_dir"], show_img=False, save_img=True);
create_train_test_plots_loss([train_loss_history, val_loss_history], ["Epoch Num", "Loss"], self.system_dict["log_dir"], show_img=False, save_img=True);
self.system_dict["local"]["model"].export(self.system_dict["model_dir"] + "resume_state", epoch=0);
if(self.system_dict["training"]["settings"]["display_progress_realtime"] and self.system_dict["verbose"]):
self.custom_print("");
self.custom_print("");
if(self.system_dict["training"]["settings"]["display_progress"]):
curr_lr = trainer.learning_rate
self.custom_print(" curr_lr - {}".format(curr_lr));
self.custom_print(' [Epoch %d] Train-acc: %.3f, Train-loss: %.3f | Val-acc: %3f, Val-loss: %.3f, | time: %.1f sec' %
(epoch+1, train_acc, train_loss, val_acc, val_loss, time.time() - since));
self.custom_print("");
self.system_dict["training"]["outputs"]["epochs_completed"] = epoch+1;
save(self.system_dict);
if(self.system_dict["training"]["settings"]["display_progress"]):
self.custom_print(' Training completed in: {:.0f}m {:.0f}s'.format(time_elapsed_since // 60, time_elapsed_since % 60))
self.custom_print(' Best val Acc: {:4f}'.format(best_acc))
self.custom_print("");
if(not self.system_dict["states"]["eval_infer"]):
self.custom_print("Training End");
self.custom_print("");
self.system_dict["training"]["outputs"]["best_val_acc"] = "{:4f}".format(best_acc);
self.system_dict["training"]["outputs"]["best_val_acc_epoch_num"] = best_acc_epoch;
self.system_dict["training"]["outputs"]["training_time"] = "{:.0f}m {:.0f}s".format(time_elapsed_since // 60, time_elapsed_since % 60);
self.system_dict["training"]["outputs"]["max_gpu_usage"] = str(self.system_dict["training"]["outputs"]["max_gpu_memory_usage"]) + " Mb";
self.system_dict["local"]["model"].export(self.system_dict["model_dir"] + "final", epoch=0);
if(self.system_dict["training"]["settings"]["save_training_logs"]):
self.custom_print("Training Outputs");
self.custom_print(" Model Dir: {}".format(self.system_dict["model_dir"]));
self.custom_print(" Log Dir: {}".format(self.system_dict["log_dir"]));
self.custom_print(" Final model: {}".format("final"));
self.custom_print(" Best model: {}".format("best_model"));
self.custom_print(" Log 1 - Validation accuracy history log: {}".format("val_acc_history.npy"));
self.custom_print(" Log 2 - Validation loss history log: {}".format("val_loss_history.npy"));
self.custom_print(" Log 3 - Training accuracy history log: {}".format("train_acc_history.npy"));
self.custom_print(" Log 4 - Training loss history log: {}".format("train_loss_history.npy"));
self.custom_print(" Log 5 - Training curve: {}".format("train_loss_history.npy"));
self.custom_print(" Log 6 - Validation curve: {}".format("train_loss_history.npy"));
self.custom_print("");
np.save(self.system_dict["log_dir"] + "val_acc_history.npy", np.array(val_acc_history));
np.save(self.system_dict["log_dir"] + "val_loss_history.npy", np.array(val_loss_history));
np.save(self.system_dict["log_dir"] + "train_acc_history.npy", np.array(train_acc_history));
np.save(self.system_dict["log_dir"] + "train_loss_history.npy", np.array(train_loss_history));
self.system_dict["training"]["outputs"]["log_val_acc_history"] = self.system_dict["log_dir"] + "val_acc_history.npy";
self.system_dict["training"]["outputs"]["log_val_loss_history"] = self.system_dict["log_dir"] + "val_loss_history.npy";
self.system_dict["training"]["outputs"]["log_train_acc_history"] = self.system_dict["log_dir"] + "train_acc_history.npy";
self.system_dict["training"]["outputs"]["log_train_loss_history"] = self.system_dict["log_dir"] + "train_loss_history.npy";
self.system_dict["training"]["outputs"]["log_val_acc_history_relative"] = self.system_dict["log_dir_relative"] + "val_acc_history.npy";
self.system_dict["training"]["outputs"]["log_val_loss_history_relative"] = self.system_dict["log_dir_relative"] + "val_loss_history.npy";
self.system_dict["training"]["outputs"]["log_train_acc_history_relative"] = self.system_dict["log_dir_relative"] + "train_acc_history.npy";
self.system_dict["training"]["outputs"]["log_train_loss_history_relative"] = self.system_dict["log_dir_relative"] + "train_loss_history.npy";
create_train_test_plots_accuracy([train_acc_history, val_acc_history], ["Epoch Num", "Accuracy"], self.system_dict["log_dir"], show_img=False, save_img=True);
create_train_test_plots_loss([train_loss_history, val_loss_history], ["Epoch Num", "Loss"], self.system_dict["log_dir"], show_img=False, save_img=True);
self.system_dict["training"]["status"] = True;
###############################################################################################################################################
###############################################################################################################################################
def custom_metric(self, labels, raw_scores):
num_correct = 0;
total_labels = 0;
list_classes = [];
for i in range(labels.shape[0]):
for j in range(labels.shape[1]):
score = logistic.cdf(raw_scores[i][j])
pred = False
if(score > 0.5):
pred = True
else:
pred = False
if(pred and labels[i][j]):
num_correct += 1;
if(labels[i][j]):
total_labels += 1;
return num_correct/total_labels;
###############################################################################################################################################
| 54.319756 | 178 | 0.547898 |
6ff7e41bfbfd74ddcb52806817d87195abbe54c2 | 742 | py | Python | Src/StdLib/Lib/site-packages/win32/lib/winxptheme.py | cwensley/ironpython2 | f854444e1e08afc8850cb7c1a739a7dd2d10d32a | [
"Apache-2.0"
] | 1,078 | 2016-07-19T02:48:30.000Z | 2022-03-30T21:22:34.000Z | Src/StdLib/Lib/site-packages/win32/lib/winxptheme.py | cwensley/ironpython2 | f854444e1e08afc8850cb7c1a739a7dd2d10d32a | [
"Apache-2.0"
] | 576 | 2017-05-21T12:36:48.000Z | 2022-03-30T13:47:03.000Z | Src/StdLib/Lib/site-packages/win32/lib/winxptheme.py | cwensley/ironpython2 | f854444e1e08afc8850cb7c1a739a7dd2d10d32a | [
"Apache-2.0"
] | 269 | 2017-05-21T04:44:47.000Z | 2022-03-31T16:18:13.000Z | """A useful wrapper around the "_winxptheme" module.
Unlike _winxptheme, this module will load on any version of Windows.
If _winxptheme is not available, then this module will have only 2 functions -
IsAppThemed() and IsThemeActive, which will both always return False.
If _winxptheme is available, this module will have all methods in that module,
including real implementations of IsAppThemed() and IsThemeActive().
"""
import win32api
try:
win32api.FreeLibrary(win32api.LoadLibrary("Uxtheme.dll"))
# Life is good, everything is available.
from _winxptheme import *
except win32api.error:
# Probably not running XP.
def IsAppThemed():
return False
def IsThemeActive():
return False
del win32api
| 30.916667 | 78 | 0.749326 |
1d94cbeafee4ffdc13159ff7f1286c4ea22e0ea5 | 8,337 | py | Python | examples/get_temperatures_inventory.py | jixj5/python-redfish-lenovo | 0b4a5d633a2b1fcee3bf22f06fd35f59b7806d70 | [
"Apache-2.0"
] | null | null | null | examples/get_temperatures_inventory.py | jixj5/python-redfish-lenovo | 0b4a5d633a2b1fcee3bf22f06fd35f59b7806d70 | [
"Apache-2.0"
] | null | null | null | examples/get_temperatures_inventory.py | jixj5/python-redfish-lenovo | 0b4a5d633a2b1fcee3bf22f06fd35f59b7806d70 | [
"Apache-2.0"
] | null | null | null | ###
#
# Lenovo Redfish examples - Get temperatures inventory
#
# Copyright Notice:
#
# Copyright 2018 Lenovo Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
###
import sys
import redfish
import json
import traceback
import lenovo_utils as utils
def get_temperatures_inventory(ip, login_account, login_password):
"""Get temperatures inventory
:params ip: BMC IP address
:type ip: string
:params login_account: BMC user name
:type login_account: string
:params login_password: BMC user password
:type login_password: string
:returns: returns get temperatures inventory result when succeeded or error message when failed
"""
result = {}
login_host = "https://" + ip
# Connect using the BMC address, account name, and password
# Create a REDFISH object
REDFISH_OBJ = redfish.redfish_client(base_url=login_host, username=login_account, timeout=utils.g_timeout,
password=login_password, default_prefix='/redfish/v1', cafile=utils.g_CAFILE)
# Login into the server and create a session
try:
REDFISH_OBJ.login(auth=utils.g_AUTH)
except:
traceback.print_exc()
result = {'ret': False, 'msg': "Please check the username, password, IP is correct\n"}
return result
# Get ServiceBase resource
try:
response_base_url = REDFISH_OBJ.get('/redfish/v1', None)
# Get response_base_url
if response_base_url.status == 200:
chassis_url = response_base_url.dict['Chassis']['@odata.id']
else:
error_message = utils.get_extended_error(response_base_url)
result = {'ret': False, 'msg': "Url '%s' response Error code %s\nerror_message: %s" % (
'/redfish/v1', response_base_url.status, error_message)}
return result
response_chassis_url = REDFISH_OBJ.get(chassis_url, None)
if response_chassis_url.status == 200:
rt_list_temperatures = []
#get temperatures inventory
for request in response_chassis_url.dict['Members']:
request_url = request['@odata.id']
response_url = REDFISH_OBJ.get(request_url, None)
if response_url.status == 200:
# if chassis is not normal skip it
if len(response_chassis_url.dict['Members']) > 1 and ("Links" not in response_url.dict or
"ComputerSystems" not in response_url.dict["Links"]):
continue
if 'ThermalSubsystem' in response_url.dict and '@odata.id' in response_url.dict['ThermalSubsystem']:
thermal_subsystem_url = response_url.dict["ThermalSubsystem"]['@odata.id']
response_thermal_url = REDFISH_OBJ.get(thermal_subsystem_url, None)
if response_thermal_url.status == 200:
temperatures_url = response_thermal_url.dict["ThermalMetrics"]['@odata.id']
temperatures_response = REDFISH_OBJ.get(temperatures_url, None)
for temperature in temperatures_response.dict["TemperatureReadingsCelsius"]:
tmp_temperatures_item = {}
data_source_uri = temperature['DataSourceUri']
readings_celsius = REDFISH_OBJ.get(data_source_uri, None)
if readings_celsius.status != 200:
error_message = utils.get_extended_error(readings_celsius)
result = {'ret': False, 'msg': "Url '%s' response Error code %s\nerror_message: %s" % (
data_source_uri, readings_celsius.status, error_message)}
return result
for key in readings_celsius.dict:
if key not in ["Description", "@odata.context", "@odata.id", "@odata.type",
"@odata.etag", "Links", "Actions", "RelatedItem"]:
tmp_temperatures_item[key] = readings_celsius.dict[key]
rt_list_temperatures.append(tmp_temperatures_item)
else:
error_message = utils.get_extended_error(response_thermal_url)
result = {'ret': False, 'msg': "Url '%s' response Error code %s\nerror_message: %s" % (
thermal_subsystem_url, response_thermal_url.status, error_message)}
return result
else:
if "Thermal" not in response_url.dict:
continue
thermal_url = response_url.dict["Thermal"]['@odata.id']
response_thermal_url = REDFISH_OBJ.get(thermal_url, None)
if response_thermal_url.status == 200:
list_temperatures = response_thermal_url.dict["Temperatures"]
for temperatures_item in list_temperatures:
tmp_temperatures_item = {}
for key in temperatures_item:
if key not in ["Description", "@odata.context", "@odata.id", "@odata.type",
"@odata.etag", "Links", "Actions", "RelatedItem"]:
tmp_temperatures_item[key] = temperatures_item[key]
rt_list_temperatures.append(tmp_temperatures_item)
else:
error_message = utils.get_extended_error(response_thermal_url)
result = {'ret': False, 'msg': "Url '%s' response Error code %s\nerror_message: %s" % (
thermal_url, response_thermal_url.status, error_message)}
return result
else:
error_message = utils.get_extended_error(response_url)
result = {'ret': False, 'msg': "Url '%s' response Error code %s\nerror_message: %s" % (
request_url, response_url.status, error_message)}
return result
result["ret"] = True
result["entries"] = rt_list_temperatures
return result
else:
error_message = utils.get_extended_error(response_chassis_url)
result = {'ret': False, 'msg': "Url '%s' response Error code %s\nerror_message: %s" % (
chassis_url, response_chassis_url.status, error_message)}
return result
except Exception as e:
traceback.print_exc()
result = {'ret': False, 'msg': 'exception msg %s' % e}
return result
finally:
try:
REDFISH_OBJ.logout()
except:
pass
if __name__ == '__main__':
# Get parameters from config.ini and/or command line
argget = utils.create_common_parameter_list()
args = argget.parse_args()
parameter_info = utils.parse_parameter(args)
# Get connection info from the parameters user specified
ip = parameter_info['ip']
login_account = parameter_info["user"]
login_password = parameter_info["passwd"]
# get temperatures inventory and check result
result = get_temperatures_inventory(ip, login_account, login_password)
if result['ret'] is True:
del result['ret']
sys.stdout.write(json.dumps(result['entries'], sort_keys=True, indent=2))
else:
sys.stderr.write(result['msg'])
sys.exit(1)
| 50.835366 | 123 | 0.575267 |
3266c36e1b4c28786d2ce29efc95bc06f9195afb | 7,151 | py | Python | src/ginkgo/auxFunctions.py | lbg251/ginkgo | dda16a2525b1b89cb093af65dccb3ee236e043a7 | [
"MIT"
] | 3 | 2021-11-30T14:25:48.000Z | 2022-03-02T15:05:55.000Z | src/ginkgo/auxFunctions.py | lbg251/ginkgo | dda16a2525b1b89cb093af65dccb3ee236e043a7 | [
"MIT"
] | null | null | null | src/ginkgo/auxFunctions.py | lbg251/ginkgo | dda16a2525b1b89cb093af65dccb3ee236e043a7 | [
"MIT"
] | 1 | 2021-06-10T15:42:56.000Z | 2021-06-10T15:42:56.000Z | import numpy as np
import pickle
import time
import logging
import pyro
from .utils import get_logger
logger = get_logger(level=logging.INFO)
def traversePhi(jet, node_id, constPhiList, PhiDeltaList, PhiDeltaListRel):
"""
Recursive function that traverses the tree. Gets leaves angle phi, and delta_parent phi angle for all parents in the tree.
"""
if jet["tree"][node_id, 0] == -1:
constPhi = np.arctan2(jet["content"][node_id][0], jet["content"][node_id][1])
constPhiList.append(constPhi)
else:
""" Get angle for the splitting value Delta """
idL = jet["tree"][node_id][0]
idR = jet["tree"][node_id][1]
pL = jet["content"][idL]
pR = jet["content"][idR]
delta_vec = (pR - pL) / 2
""" Find subjet angle"""
PhiPseudoJet = np.arctan2(jet["content"][node_id][0], jet["content"][node_id][1])
""" arctan2 to find the right quadrant"""
TempDeltaPhi = np.arctan2(delta_vec[0], delta_vec[1])
PhiDeltaList.append(TempDeltaPhi)
PhiDeltaListRel.append( abs(TempDeltaPhi - PhiPseudoJet))
traversePhi(
jet,
jet["tree"][node_id, 0],
constPhiList,
PhiDeltaList,
PhiDeltaListRel,
)
traversePhi(
jet,
jet["tree"][node_id, 1],
constPhiList,
PhiDeltaList,
PhiDeltaListRel,
)
return constPhiList, PhiDeltaList, PhiDeltaListRel
def traverse(
root,
jetContent,
jetTree=None,
Nleaves=None,
# flip=False,
):
"""
This function call the recursive function _traverse_rec to make the trees starting from the root
:param root: root node id
:param jetContent: array with the momentum of all the nodes of the jet tree (both leaves and inners).
:param jetTree: dictionary that has the node id of a parent as a key and a list with the id of the 2 children as the values
:param Nleaves: Number of constituents (leaves)
:param dendrogram: bool. If True, then return tree_ancestors list.
:return:
- tree: Reclustered tree structure.
- content: Reclustered tree momentum vectors
- node_id: list where leaves idxs are added in the order that they appear when we traverse the reclustered tree (each number indicates the node id that we picked when we did the reclustering.). However, the idx value specifies the order in which the leaf nodes appear when traversing the origianl jet (e.g. truth level jet). The value here is an integer between 0 and Nleaves.
So if we go from truth to kt algorithm, then in the truth tree the leaves go as [0,1,2,3,4,,...,Nleaves-1]
- tree_ancestors: List with one entry for each leaf of the tree, where each entry lists all the ancestor node ids when traversing the tree from the root to the leaf node.
"""
tree = []
content = []
node_id = []
tree_ancestors = []
leaves = []
globals()["Bernoulli_dist"] = pyro.distributions.Bernoulli(probs=0.5)
_traverse_flipLR(
root,
-1,
False,
tree,
content,
jetContent,
leaves,
jetTree=jetTree,
Nleaves=Nleaves,
# flip=flip,
)
return tree, content, leaves, node_id, tree_ancestors
def _traverse_flipLR(
root,
parent_id,
is_left,
tree,
content,
jetContent,
leaves,
jetTree=None,
Nleaves=None,
# flip = False,
):
"""
Recursive function to build the jet tree structure.
:param root: parent node momentum
:param parent_id: parent node idx
:param is_left: bool.
:param tree: List with the tree
:param content: List with the momentum vectors
:param jetContent: array with the momentum of all the nodes of the jet tree (both leaves and inners).
:param jetTree: dictionary that has the node id of a parent as a key and a list with the id of the 2 children as the values
:param Nleaves: Number of constituents (leaves)
:param node_id: list where leaves idxs are added in the order they appear when we traverse the reclustered tree (each number indicates the node id
that we picked when we did the reclustering.). However, the idx value specifies the order in which the leaf nodes appear when traversing the truth level jet . The value here is an integer between 0 and Nleaves.
So if we went from truth to kt algorithm, then in the truth tree the leaves go as [0,1,2,3,4,,...,Nleaves-1]
:param ancestors: 1 entry of tree_ancestors (there is one for each leaf of the tree). It is appended to tree_ancestors.
:param tree_ancestors: List with one entry for each leaf of the tree, where each entry lists all the ancestor node ids when traversing the tree from the root to the leaf node.
:param dendrogram: bool. If True, append ancestors to tree_ancestors list.
"""
""""
(With each momentum vector we increase the content array by one element and the tree array by 2 elements.
But then we take id=tree.size()//2, so the id increases by 1.)
"""
id = len(tree) // 2
if parent_id >= 0:
if is_left:
"""Insert in the tree list, the location of the lef child in the content array."""
tree[2 * parent_id] = id
else:
"""Insert in the tree list, the location of the right child in the content array."""
tree[2 * parent_id + 1] = id
"""Insert 2 new nodes to the vector that constitutes the tree. If the current node is a parent, then we will replace the -1 with its children idx in the content array"""
tree.append(-1)
tree.append(-1)
""" Append node momentum to content list """
content.append(jetContent[root])
# print('Root = ', root)
# print("Nleaves = ", Nleaves)
""" Move from the root down recursively until we get to the leaves. """
# if root <= Nleaves and root>0:
if jetTree[root][0] != -1:
# print('Root2 = ', root)
children = jetTree[root]
logger.debug(f"Children = {children}")
flip = pyro.sample("Bernoulli" + str(root), Bernoulli_dist)
if flip:
L_idx = children[1]
R_idx = children[0]
else:
L_idx = children[0]
R_idx = children[1]
_traverse_flipLR(L_idx,
id,
True,
tree,
content,
jetContent,
leaves,
jetTree,
Nleaves=Nleaves,
# flip = flip,
)
_traverse_flipLR(R_idx,
id,
False,
tree,
content,
jetContent,
leaves,
jetTree,
Nleaves=Nleaves,
# flip=flip,
)
else:
""" If the node is a leaf, then append idx to node_id and its ancestors as a new row of tree_ancestors """
leaves.append(jetContent[root])
| 31.641593 | 382 | 0.608586 |
d85f367d2100e88473b37e317a32084594c8dd1c | 2,955 | py | Python | tests/test_api.py | OrquestraDigital/aboutcode-toolkit | d9ff859735a72635563fb5a9e265ecd7023d401a | [
"Apache-2.0"
] | 1 | 2021-08-31T10:58:29.000Z | 2021-08-31T10:58:29.000Z | tests/test_api.py | sthagen/aboutcode-toolkit | cd74f15bcc223c7e1b7424f169481af8e55e0f38 | [
"Apache-2.0"
] | null | null | null | tests/test_api.py | sthagen/aboutcode-toolkit | cd74f15bcc223c7e1b7424f169481af8e55e0f38 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf8 -*-
# ============================================================================
# Copyright (c) nexB Inc. http://www.nexb.com/ - All rights reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
import unittest
import mock
from attributecode import api
from attributecode import ERROR
from attributecode import Error
class FakeResponse(object):
response_content = None
def __init__(self, response_content):
self.response_content = response_content
def read(self):
return self.response_content
class ApiTest(unittest.TestCase):
@mock.patch.object(api, 'request_license_data')
def test_api_get_license_details_from_api(self, request_license_data):
license_data = {
'name': 'Apache License 2.0',
'full_text': 'Apache License Version 2.0 ...',
'key': 'apache-2.0',
}
errors = []
request_license_data.return_value = license_data, errors
expected = (
'Apache License 2.0',
'apache-2.0',
'Apache License Version 2.0 ...',
[])
result = api.get_license_details_from_api(
api_url='api_url', api_key='api_key', license_key='license_key')
assert expected == result
@mock.patch.object(api, 'urlopen')
def test_api_request_license_data_with_result(self, mock_data):
response_content = (
b'{"count":1,"results":[{"name":"Apache 2.0","key":"apache-2.0","text":"Text"}]}'
)
mock_data.return_value = FakeResponse(response_content)
license_data = api.request_license_data(
api_url='http://fake.url/', api_key='api_key', license_key='apache-2.0')
expected = (
{'name': 'Apache 2.0', 'key': 'apache-2.0', 'text': 'Text'},
[]
)
assert expected == license_data
@mock.patch.object(api, 'urlopen')
def test_api_request_license_data_without_result(self, mock_data):
response_content = b'{"count":0,"results":[]}'
mock_data.return_value = FakeResponse(response_content)
license_data = api.request_license_data(
api_url='http://fake.url/', api_key='api_key', license_key='apache-2.0')
expected = ({}, [Error(ERROR, "Invalid 'license': apache-2.0")])
assert expected == license_data
| 37.884615 | 93 | 0.615905 |
3df964f98a6cfcc3da4682ac3de21c34988f878b | 3,118 | py | Python | src/cms/views/users/region_user_list_view.py | Integreat/integreat-cms | b3f80964a6182d714f26ac229342b47e1c7c4f29 | [
"Apache-2.0"
] | 14 | 2020-12-03T07:56:30.000Z | 2021-10-30T13:09:50.000Z | src/cms/views/users/region_user_list_view.py | Integreat/integreat-cms | b3f80964a6182d714f26ac229342b47e1c7c4f29 | [
"Apache-2.0"
] | 367 | 2020-11-20T00:34:20.000Z | 2021-12-14T15:20:42.000Z | src/cms/views/users/region_user_list_view.py | Integreat/integreat-cms | b3f80964a6182d714f26ac229342b47e1c7c4f29 | [
"Apache-2.0"
] | 3 | 2021-02-09T18:46:52.000Z | 2021-12-07T10:41:39.000Z | import logging
from django.contrib.auth.decorators import login_required
from django.core.paginator import Paginator
from django.shortcuts import render
from django.utils.decorators import method_decorator
from django.views.generic import TemplateView
from backend.settings import PER_PAGE
from ...forms import ObjectSearchForm
from ...decorators import region_permission_required, permission_required
from ...models import Region
from ...utils.user_utils import search_users
logger = logging.getLogger(__name__)
@method_decorator(login_required, name="dispatch")
@method_decorator(region_permission_required, name="dispatch")
@method_decorator(permission_required("cms.view_user"), name="dispatch")
class RegionUserListView(TemplateView):
"""
View for listing region users
"""
#: The template to render (see :class:`~django.views.generic.base.TemplateResponseMixin`)
template_name = "users/region/list.html"
#: The context dict passed to the template (see :class:`~django.views.generic.base.ContextMixin`)
base_context = {"current_menu_item": "region_users"}
def get(self, request, *args, **kwargs):
"""
Render region user list
:param request: The current request
:type request: ~django.http.HttpResponse
:param args: The supplied arguments
:type args: list
:param kwargs: The supplied keyword arguments
:type kwargs: dict
:return: The rendered template response
:rtype: ~django.template.response.TemplateResponse
"""
region = Region.get_current_region(request)
users = (
region.users.select_related("organization")
.prefetch_related("groups__role")
.order_by("username")
)
query = None
search_data = kwargs.get("search_data")
search_form = ObjectSearchForm(search_data)
if search_form.is_valid():
query = search_form.cleaned_data["query"]
user_keys = search_users(region, query).values("pk")
users = users.filter(pk__in=user_keys)
# for consistent pagination querysets should be ordered
paginator = Paginator(users, PER_PAGE)
chunk = request.GET.get("page")
user_chunk = paginator.get_page(chunk)
return render(
request,
self.template_name,
{
**self.base_context,
"users": user_chunk,
"region_slug": region.slug,
"search_query": query,
},
)
def post(self, request, *args, **kwargs):
"""
Apply the query and filter the rendered users
:param request: The current request
:type request: ~django.http.HttpResponse
:param args: The supplied arguments
:type args: list
:param kwargs: The supplied keyword arguments
:type kwargs: dict
:return: The rendered template response
:rtype: ~django.template.response.TemplateResponse
"""
return self.get(request, *args, **kwargs, search_data=request.POST)
| 33.526882 | 101 | 0.661001 |
c6eebf3041086f1831e1395918c648219e498149 | 85,786 | py | Python | lib/spack/spack/installer.py | dreaqdp/spack | 6d22e9cd7b427f2d3d24bf1246215e0b9ba248e2 | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | null | null | null | lib/spack/spack/installer.py | dreaqdp/spack | 6d22e9cd7b427f2d3d24bf1246215e0b9ba248e2 | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 32 | 2020-12-15T17:29:20.000Z | 2022-03-21T15:08:31.000Z | lib/spack/spack/installer.py | dreaqdp/spack | 6d22e9cd7b427f2d3d24bf1246215e0b9ba248e2 | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 2 | 2021-07-19T20:31:27.000Z | 2021-07-19T21:14:14.000Z | # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""
This module encapsulates package installation functionality.
The PackageInstaller coordinates concurrent builds of packages for the same
Spack instance by leveraging the dependency DAG and file system locks. It
also proceeds with the installation of non-dependent packages of failed
dependencies in order to install as many dependencies of a package as possible.
Bottom-up traversal of the dependency DAG while prioritizing packages with no
uninstalled dependencies allows multiple processes to perform concurrent builds
of separate packages associated with a spec.
File system locks enable coordination such that no two processes attempt to
build the same or a failed dependency package.
Failures to install dependency packages result in removal of their dependents'
build tasks from the current process. A failure file is also written (and
locked) so that other processes can detect the failure and adjust their build
tasks accordingly.
This module supports the coordination of local and distributed concurrent
installations of packages in a Spack instance.
"""
import copy
import glob
import heapq
import itertools
import os
import shutil
import sys
import time
from collections import defaultdict
import six
import llnl.util.filesystem as fs
import llnl.util.lock as lk
import llnl.util.tty as tty
from llnl.util.tty.color import colorize
from llnl.util.tty.log import log_output
import spack.binary_distribution as binary_distribution
import spack.compilers
import spack.error
import spack.hooks
import spack.monitor
import spack.package
import spack.package_prefs as prefs
import spack.repo
import spack.store
from spack.util.environment import dump_environment
from spack.util.executable import which
from spack.util.timer import Timer
#: Counter to support unique spec sequencing that is used to ensure packages
#: with the same priority are (initially) processed in the order in which they
#: were added (see https://docs.python.org/2/library/heapq.html).
_counter = itertools.count(0)
#: Build status indicating task has been added.
STATUS_ADDED = 'queued'
#: Build status indicating the spec failed to install
STATUS_FAILED = 'failed'
#: Build status indicating the spec is being installed (possibly by another
#: process)
STATUS_INSTALLING = 'installing'
#: Build status indicating the spec was sucessfully installed
STATUS_INSTALLED = 'installed'
#: Build status indicating the task has been popped from the queue
STATUS_DEQUEUED = 'dequeued'
#: Build status indicating task has been removed (to maintain priority
#: queue invariants).
STATUS_REMOVED = 'removed'
def _check_last_phase(pkg):
"""
Ensures the specified package has a valid last phase before proceeding
with its installation.
The last phase is also set to None if it is the last phase of the
package already.
Args:
pkg (PackageBase): the package being installed
Raises:
``BadInstallPhase`` if stop_before or last phase is invalid
"""
if pkg.stop_before_phase and pkg.stop_before_phase not in pkg.phases:
raise BadInstallPhase(pkg.name, pkg.stop_before_phase)
if pkg.last_phase and pkg.last_phase not in pkg.phases:
raise BadInstallPhase(pkg.name, pkg.last_phase)
# If we got a last_phase, make sure it's not already last
if pkg.last_phase and pkg.last_phase == pkg.phases[-1]:
pkg.last_phase = None
def _handle_external_and_upstream(pkg, explicit):
"""
Determine if the package is external or upstream and register it in the
database if it is external package.
Args:
pkg (Package): the package whose installation is under consideration
explicit (bool): the package was explicitly requested by the user
Return:
(bool): ``True`` if the package is external or upstream (so not to
be installed locally), otherwise, ``True``
"""
# For external packages the workflow is simplified, and basically
# consists in module file generation and registration in the DB.
if pkg.spec.external:
_process_external_package(pkg, explicit)
_print_installed_pkg('{0} (external {1})'
.format(pkg.prefix, package_id(pkg)))
return True
if pkg.installed_upstream:
tty.verbose('{0} is installed in an upstream Spack instance at {1}'
.format(package_id(pkg), pkg.spec.prefix))
_print_installed_pkg(pkg.prefix)
# This will result in skipping all post-install hooks. In the case
# of modules this is considered correct because we want to retrieve
# the module from the upstream Spack instance.
return True
return False
def _do_fake_install(pkg):
"""
Make a fake install directory containing fake executables, headers,
and libraries.
Args:
pkg (PackageBase): the package whose installation is to be faked
"""
command = pkg.name
header = pkg.name
library = pkg.name
# Avoid double 'lib' for packages whose names already start with lib
if not pkg.name.startswith('lib'):
library = 'lib' + library
dso_suffix = '.dylib' if sys.platform == 'darwin' else '.so'
chmod = which('chmod')
# Install fake command
fs.mkdirp(pkg.prefix.bin)
fs.touch(os.path.join(pkg.prefix.bin, command))
chmod('+x', os.path.join(pkg.prefix.bin, command))
# Install fake header file
fs.mkdirp(pkg.prefix.include)
fs.touch(os.path.join(pkg.prefix.include, header + '.h'))
# Install fake shared and static libraries
fs.mkdirp(pkg.prefix.lib)
for suffix in [dso_suffix, '.a']:
fs.touch(os.path.join(pkg.prefix.lib, library + suffix))
# Install fake man page
fs.mkdirp(pkg.prefix.man.man1)
packages_dir = spack.store.layout.build_packages_path(pkg.spec)
dump_packages(pkg.spec, packages_dir)
def _packages_needed_to_bootstrap_compiler(compiler, architecture, pkgs):
"""
Return a list of packages required to bootstrap `pkg`s compiler
Checks Spack's compiler configuration for a compiler that
matches the package spec.
Args:
compiler (CompilerSpec): the compiler to bootstrap
architecture (ArchSpec): the architecture for which to boostrap the
compiler
pkgs (list of PackageBase): the packages that may need their compiler
installed
Return:
(list) list of tuples, (PackageBase, bool), for concretized compiler-
-related packages that need to be installed and bool values
specify whether the package is the bootstrap compiler
(``True``) or one of its dependencies (``False``). The list
will be empty if there are no compilers.
"""
tty.debug('Bootstrapping {0} compiler'.format(compiler))
compilers = spack.compilers.compilers_for_spec(
compiler, arch_spec=architecture)
if compilers:
return []
dep = spack.compilers.pkg_spec_for_compiler(compiler)
# Set the architecture for the compiler package in a way that allows the
# concretizer to back off if needed for the older bootstrapping compiler
dep.constrain('platform=%s' % str(architecture.platform))
dep.constrain('os=%s' % str(architecture.os))
dep.constrain('target=%s:' %
architecture.target.microarchitecture.family.name)
# concrete CompilerSpec has less info than concrete Spec
# concretize as Spec to add that information
dep.concretize()
# mark compiler as depended-on by the packages that use it
for pkg in pkgs:
dep._dependents[pkg.name] = spack.spec.DependencySpec(
pkg.spec, dep, ('build',))
packages = [(s.package, False) for
s in dep.traverse(order='post', root=False)]
packages.append((dep.package, True))
return packages
def _hms(seconds):
"""
Convert seconds to hours, minutes, seconds
Args:
seconds (int): time to be converted in seconds
Return:
(str) String representation of the time as #h #m #.##s
"""
m, s = divmod(seconds, 60)
h, m = divmod(m, 60)
parts = []
if h:
parts.append("%dh" % h)
if m:
parts.append("%dm" % m)
if s:
parts.append("%.2fs" % s)
return ' '.join(parts)
def _install_from_cache(pkg, cache_only, explicit, unsigned=False,
full_hash_match=False):
"""
Extract the package from binary cache
Args:
pkg (PackageBase): the package to install from the binary cache
cache_only (bool): only extract from binary cache
explicit (bool): ``True`` if installing the package was explicitly
requested by the user, otherwise, ``False``
unsigned (bool): ``True`` if binary package signatures to be checked,
otherwise, ``False``
Return:
(bool) ``True`` if the package was extract from binary cache,
``False`` otherwise
"""
installed_from_cache = _try_install_from_binary_cache(
pkg, explicit, unsigned=unsigned, full_hash_match=full_hash_match)
pkg_id = package_id(pkg)
if not installed_from_cache:
pre = 'No binary for {0} found'.format(pkg_id)
if cache_only:
tty.die('{0} when cache-only specified'.format(pre))
tty.msg('{0}: installing from source'.format(pre))
return False
tty.debug('Successfully extracted {0} from binary cache'.format(pkg_id))
_print_installed_pkg(pkg.spec.prefix)
spack.hooks.post_install(pkg.spec)
return True
def _print_installed_pkg(message):
"""
Output a message with a package icon.
Args:
message (str): message to be output
"""
print(colorize('@*g{[+]} ') + message)
def _process_external_package(pkg, explicit):
"""
Helper function to run post install hooks and register external packages.
Args:
pkg (Package): the external package
explicit (bool): if the package was requested explicitly by the user,
``False`` if it was pulled in as a dependency of an explicit
package.
"""
assert pkg.spec.external, \
'Expected to post-install/register an external package.'
pre = '{s.name}@{s.version} :'.format(s=pkg.spec)
spec = pkg.spec
if spec.external_modules:
tty.msg('{0} has external module in {1}'
.format(pre, spec.external_modules))
tty.debug('{0} is actually installed in {1}'
.format(pre, spec.external_path))
else:
tty.debug('{0} externally installed in {1}'
.format(pre, spec.external_path))
try:
# Check if the package was already registered in the DB.
# If this is the case, then just exit.
tty.debug('{0} already registered in DB'.format(pre))
# Update the explicit state if it is necessary
if explicit:
spack.store.db.update_explicit(spec, explicit)
except KeyError:
# If not, register it and generate the module file.
# For external packages we just need to run
# post-install hooks to generate module files.
tty.debug('{0} generating module file'.format(pre))
spack.hooks.post_install(spec)
# Add to the DB
tty.debug('{0} registering into DB'.format(pre))
spack.store.db.add(spec, None, explicit=explicit)
def _process_binary_cache_tarball(pkg, binary_spec, explicit, unsigned,
preferred_mirrors=None):
"""
Process the binary cache tarball.
Args:
pkg (PackageBase): the package being installed
binary_spec (Spec): the spec whose cache has been confirmed
explicit (bool): the package was explicitly requested by the user
unsigned (bool): ``True`` if binary package signatures to be checked,
otherwise, ``False``
preferred_mirrors (list): Optional list of urls to prefer when
attempting to download the tarball
Return:
(bool) ``True`` if the package was extracted from binary cache,
else ``False``
"""
tarball = binary_distribution.download_tarball(
binary_spec, preferred_mirrors=preferred_mirrors)
# see #10063 : install from source if tarball doesn't exist
if tarball is None:
tty.msg('{0} exists in binary cache but with different hash'
.format(pkg.name))
return False
pkg_id = package_id(pkg)
tty.msg('Extracting {0} from binary cache'.format(pkg_id))
binary_distribution.extract_tarball(binary_spec, tarball, allow_root=False,
unsigned=unsigned, force=False)
pkg.installed_from_binary_cache = True
spack.store.db.add(pkg.spec, spack.store.layout, explicit=explicit)
return True
def _try_install_from_binary_cache(pkg, explicit, unsigned=False,
full_hash_match=False):
"""
Try to extract the package from binary cache.
Args:
pkg (PackageBase): the package to be extracted from binary cache
explicit (bool): the package was explicitly requested by the user
unsigned (bool): ``True`` if binary package signatures to be checked,
otherwise, ``False``
"""
pkg_id = package_id(pkg)
tty.debug('Searching for binary cache of {0}'.format(pkg_id))
matches = binary_distribution.get_mirrors_for_spec(
pkg.spec, full_hash_match=full_hash_match)
if not matches:
return False
# In the absence of guidance from user or some other reason to prefer one
# mirror over another, any match will suffice, so just pick the first one.
preferred_mirrors = [match['mirror_url'] for match in matches]
binary_spec = matches[0]['spec']
return _process_binary_cache_tarball(pkg, binary_spec, explicit, unsigned,
preferred_mirrors=preferred_mirrors)
def clear_failures():
"""
Remove all failure tracking markers for the Spack instance.
"""
spack.store.db.clear_all_failures()
def combine_phase_logs(phase_log_files, log_path):
"""
Read set or list of logs and combine them into one file.
Each phase will produce it's own log, so this function aims to cat all the
separate phase log output files into the pkg.log_path. It is written
generally to accept some list of files, and a log path to combine them to.
Args:
phase_log_files (list): a list or iterator of logs to combine
log_path (path): the path to combine them to
"""
with open(log_path, 'w') as log_file:
for phase_log_file in phase_log_files:
with open(phase_log_file, 'r') as phase_log:
log_file.write(phase_log.read())
def dump_packages(spec, path):
"""
Dump all package information for a spec and its dependencies.
This creates a package repository within path for every namespace in the
spec DAG, and fills the repos with package files and patch files for every
node in the DAG.
Args:
spec (Spec): the Spack spec whose package information is to be dumped
path (str): the path to the build packages directory
"""
fs.mkdirp(path)
# Copy in package.py files from any dependencies.
# Note that we copy them in as they are in the *install* directory
# NOT as they are in the repository, because we want a snapshot of
# how *this* particular build was done.
for node in spec.traverse(deptype=all):
if node is not spec:
# Locate the dependency package in the install tree and find
# its provenance information.
source = spack.store.layout.build_packages_path(node)
source_repo_root = os.path.join(source, node.namespace)
# If there's no provenance installed for the package, skip it.
# If it's external, skip it because it either:
# 1) it wasn't built with Spack, so it has no Spack metadata
# 2) it was built by another Spack instance, and we do not
# (currently) use Spack metadata to associate repos with externals
# built by other Spack instances.
# Spack can always get something current from the builtin repo.
if node.external or not os.path.isdir(source_repo_root):
continue
# Create a source repo and get the pkg directory out of it.
try:
source_repo = spack.repo.Repo(source_repo_root)
source_pkg_dir = source_repo.dirname_for_package_name(
node.name)
except spack.repo.RepoError as err:
tty.debug('Failed to create source repo for {0}: {1}'
.format(node.name, str(err)))
source_pkg_dir = None
tty.warn("Warning: Couldn't copy in provenance for {0}"
.format(node.name))
# Create a destination repository
dest_repo_root = os.path.join(path, node.namespace)
if not os.path.exists(dest_repo_root):
spack.repo.create_repo(dest_repo_root)
repo = spack.repo.Repo(dest_repo_root)
# Get the location of the package in the dest repo.
dest_pkg_dir = repo.dirname_for_package_name(node.name)
if node is spec:
spack.repo.path.dump_provenance(node, dest_pkg_dir)
elif source_pkg_dir:
fs.install_tree(source_pkg_dir, dest_pkg_dir)
def get_dependent_ids(spec):
"""
Return a list of package ids for the spec's dependents
Args:
spec (Spec): Concretized spec
Returns:
(list of str): list of package ids
"""
return [package_id(d.package) for d in spec.dependents()]
def install_msg(name, pid):
"""
Colorize the name/id of the package being installed
Args:
name (str): Name/id of the package being installed
pid (id): id of the installer process
Return:
(str) Colorized installing message
"""
pre = '{0}: '.format(pid) if tty.show_pid() else ''
return pre + colorize('@*{Installing} @*g{%s}' % name)
def log(pkg):
"""
Copy provenance into the install directory on success
Args:
pkg (Package): the package that was built and installed
"""
packages_dir = spack.store.layout.build_packages_path(pkg.spec)
# Remove first if we're overwriting another build
try:
# log and env install paths are inside this
shutil.rmtree(packages_dir)
except Exception as e:
# FIXME : this potentially catches too many things...
tty.debug(e)
# Archive the whole stdout + stderr for the package
fs.install(pkg.log_path, pkg.install_log_path)
# Archive all phase log paths
for phase_log in pkg.phase_log_files:
log_file = os.path.basename(phase_log)
log_file = os.path.join(os.path.dirname(packages_dir), log_file)
fs.install(phase_log, log_file)
# Archive the environment used for the build
fs.install(pkg.env_path, pkg.install_env_path)
if os.path.exists(pkg.configure_args_path):
# Archive the args used for the build
fs.install(pkg.configure_args_path, pkg.install_configure_args_path)
# Finally, archive files that are specific to each package
with fs.working_dir(pkg.stage.path):
errors = six.StringIO()
target_dir = os.path.join(
spack.store.layout.metadata_path(pkg.spec), 'archived-files')
for glob_expr in pkg.archive_files:
# Check that we are trying to copy things that are
# in the stage tree (not arbitrary files)
abs_expr = os.path.realpath(glob_expr)
if os.path.realpath(pkg.stage.path) not in abs_expr:
errors.write('[OUTSIDE SOURCE PATH]: {0}\n'.format(glob_expr))
continue
# Now that we are sure that the path is within the correct
# folder, make it relative and check for matches
if os.path.isabs(glob_expr):
glob_expr = os.path.relpath(glob_expr, pkg.stage.path)
files = glob.glob(glob_expr)
for f in files:
try:
target = os.path.join(target_dir, f)
# We must ensure that the directory exists before
# copying a file in
fs.mkdirp(os.path.dirname(target))
fs.install(f, target)
except Exception as e:
tty.debug(e)
# Here try to be conservative, and avoid discarding
# the whole install procedure because of copying a
# single file failed
errors.write('[FAILED TO ARCHIVE]: {0}'.format(f))
if errors.getvalue():
error_file = os.path.join(target_dir, 'errors.txt')
fs.mkdirp(target_dir)
with open(error_file, 'w') as err:
err.write(errors.getvalue())
tty.warn('Errors occurred when archiving files.\n\t'
'See: {0}'.format(error_file))
dump_packages(pkg.spec, packages_dir)
def package_id(pkg):
"""A "unique" package identifier for installation purposes
The identifier is used to track build tasks, locks, install, and
failure statuses.
The identifier needs to distinguish between combinations of compilers
and packages for combinatorial environments.
Args:
pkg (PackageBase): the package from which the identifier is derived
"""
if not pkg.spec.concrete:
raise ValueError("Cannot provide a unique, readable id when "
"the spec is not concretized.")
return "{0}-{1}-{2}".format(pkg.name, pkg.version, pkg.spec.dag_hash())
class PackageInstaller(object):
'''
Class for managing the install process for a Spack instance based on a
bottom-up DAG approach.
This installer can coordinate concurrent batch and interactive, local
and distributed (on a shared file system) builds for the same Spack
instance.
'''
def __init__(self, installs=[]):
""" Initialize the installer.
Args:
installs (list of (pkg, install_args)): list of tuples, where each
tuple consists of a package (PackageBase) and its associated
install arguments (dict)
Return:
(PackageInstaller) instance
"""
# List of build requests
self.build_requests = [BuildRequest(pkg, install_args)
for pkg, install_args in installs]
# Priority queue of build tasks
self.build_pq = []
# Mapping of unique package ids to build task
self.build_tasks = {}
# Cache of package locks for failed packages, keyed on package's ids
self.failed = {}
# Cache the PID for distributed build messaging
self.pid = os.getpid()
# Cache of installed packages' unique ids
self.installed = set()
# Data store layout
self.layout = spack.store.layout
# Locks on specs being built, keyed on the package's unique id
self.locks = {}
# Cache fail_fast option to ensure if one build request asks to fail
# fast then that option applies to all build requests.
self.fail_fast = False
def __repr__(self):
"""Returns a formal representation of the package installer."""
rep = '{0}('.format(self.__class__.__name__)
for attr, value in self.__dict__.items():
rep += '{0}={1}, '.format(attr, value.__repr__())
return '{0})'.format(rep.strip(', '))
def __str__(self):
"""Returns a printable version of the package installer."""
requests = '#requests={0}'.format(len(self.build_requests))
tasks = '#tasks={0}'.format(len(self.build_tasks))
failed = 'failed ({0}) = {1}'.format(len(self.failed), self.failed)
installed = 'installed ({0}) = {1}'.format(
len(self.installed), self.installed)
return '{0}: {1}; {2}; {3}; {4}'.format(
self.pid, requests, tasks, installed, failed)
def _add_bootstrap_compilers(
self, compiler, architecture, pkgs, request, all_deps):
"""
Add bootstrap compilers and dependencies to the build queue.
Args:
compiler: the compiler to boostrap
architecture: the architecture for which to bootstrap the compiler
pkgs (PackageBase): the package with possible compiler dependencies
request (BuildRequest): the associated install request
all_deps (defaultdict(set)): dictionary of all dependencies and
associated dependents
"""
packages = _packages_needed_to_bootstrap_compiler(
compiler, architecture, pkgs)
for (comp_pkg, is_compiler) in packages:
if package_id(comp_pkg) not in self.build_tasks:
self._add_init_task(comp_pkg, request, is_compiler, all_deps)
def _add_init_task(self, pkg, request, is_compiler, all_deps):
"""
Creates and queus the initial build task for the package.
Args:
pkg (Package): the package to be built and installed
request (BuildRequest or None): the associated install request
where ``None`` can be used to indicate the package was
explicitly requested by the user
is_compiler (bool): whether task is for a bootstrap compiler
all_deps (defaultdict(set)): dictionary of all dependencies and
associated dependents
"""
task = BuildTask(pkg, request, is_compiler, 0, 0, STATUS_ADDED,
self.installed)
for dep_id in task.dependencies:
all_deps[dep_id].add(package_id(pkg))
self._push_task(task)
def _check_db(self, spec):
"""Determine if the spec is flagged as installed in the database
Args:
spec (Spec): spec whose database install status is being checked
Return:
(rec, installed_in_db) tuple where rec is the database record, or
None, if there is no matching spec, and installed_in_db is
``True`` if the spec is considered installed and ``False``
otherwise
"""
try:
rec = spack.store.db.get_record(spec)
installed_in_db = rec.installed if rec else False
except KeyError:
# KeyError is raised if there is no matching spec in the database
# (versus no matching specs that are installed).
rec = None
installed_in_db = False
return rec, installed_in_db
def _check_deps_status(self, request):
"""Check the install status of the requested package
Args:
request (BuildRequest): the associated install request
"""
err = 'Cannot proceed with {0}: {1}'
for dep in request.traverse_dependencies():
dep_pkg = dep.package
dep_id = package_id(dep_pkg)
# Check for failure since a prefix lock is not required
if spack.store.db.prefix_failed(dep):
action = "'spack install' the dependency"
msg = '{0} is marked as an install failure: {1}' \
.format(dep_id, action)
raise InstallError(err.format(request.pkg_id, msg))
# Attempt to get a write lock to ensure another process does not
# uninstall the dependency while the requested spec is being
# installed
ltype, lock = self._ensure_locked('write', dep_pkg)
if lock is None:
msg = '{0} is write locked by another process'.format(dep_id)
raise InstallError(err.format(request.pkg_id, msg))
# Flag external and upstream packages as being installed
if dep_pkg.spec.external or dep_pkg.installed_upstream:
self._flag_installed(dep_pkg)
continue
# Check the database to see if the dependency has been installed
# and flag as such if appropriate
rec, installed_in_db = self._check_db(dep)
if installed_in_db and (
dep.dag_hash() not in request.overwrite or
rec.installation_time > request.overwrite_time):
tty.debug('Flagging {0} as installed per the database'
.format(dep_id))
self._flag_installed(dep_pkg)
def _prepare_for_install(self, task):
"""
Check the database and leftover installation directories/files and
prepare for a new install attempt for an uninstalled package.
Preparation includes cleaning up installation and stage directories
and ensuring the database is up-to-date.
Args:
task (BuildTask): the build task whose associated package is
being checked
"""
install_args = task.request.install_args
keep_prefix = install_args.get('keep_prefix')
keep_stage = install_args.get('keep_stage')
restage = install_args.get('restage')
# Make sure the package is ready to be locally installed.
self._ensure_install_ready(task.pkg)
# Skip file system operations if we've already gone through them for
# this spec.
if task.pkg_id in self.installed:
# Already determined the spec has been installed
return
# Determine if the spec is flagged as installed in the database
rec, installed_in_db = self._check_db(task.pkg.spec)
if not installed_in_db:
# Ensure there is no other installed spec with the same prefix dir
if spack.store.db.is_occupied_install_prefix(task.pkg.spec.prefix):
raise InstallError(
"Install prefix collision for {0}".format(task.pkg_id),
long_msg="Prefix directory {0} already used by another "
"installed spec.".format(task.pkg.spec.prefix))
# Make sure the installation directory is in the desired state
# for uninstalled specs.
if os.path.isdir(task.pkg.spec.prefix):
if not keep_prefix:
task.pkg.remove_prefix()
else:
tty.debug('{0} is partially installed'.format(task.pkg_id))
# Destroy the stage for a locally installed, non-DIYStage, package
if restage and task.pkg.stage.managed_by_spack:
task.pkg.stage.destroy()
if installed_in_db and (
rec.spec.dag_hash() not in task.request.overwrite or
rec.installation_time > task.request.overwrite_time
):
self._update_installed(task)
# Only update the explicit entry once for the explicit package
if task.explicit:
spack.store.db.update_explicit(task.pkg.spec, True)
# In case the stage directory has already been created, this
# check ensures it is removed after we checked that the spec is
# installed.
if not keep_stage:
task.pkg.stage.destroy()
def _cleanup_all_tasks(self):
"""Cleanup all build tasks to include releasing their locks."""
for pkg_id in self.locks:
self._release_lock(pkg_id)
for pkg_id in self.failed:
self._cleanup_failed(pkg_id)
ids = list(self.build_tasks)
for pkg_id in ids:
try:
self._remove_task(pkg_id)
except Exception:
pass
def _cleanup_failed(self, pkg_id):
"""
Cleanup any failed markers for the package
Args:
pkg_id (str): identifier for the failed package
"""
lock = self.failed.get(pkg_id, None)
if lock is not None:
err = "{0} exception when removing failure tracking for {1}: {2}"
msg = 'Removing failure mark on {0}'
try:
tty.verbose(msg.format(pkg_id))
lock.release_write()
except Exception as exc:
tty.warn(err.format(exc.__class__.__name__, pkg_id, str(exc)))
def _cleanup_task(self, pkg):
"""
Cleanup the build task for the spec
Args:
pkg (PackageBase): the package being installed
"""
self._remove_task(package_id(pkg))
# Ensure we have a read lock to prevent others from uninstalling the
# spec during our installation.
self._ensure_locked('read', pkg)
def _ensure_install_ready(self, pkg):
"""
Ensure the package is ready to install locally, which includes
already locked.
Args:
pkg (PackageBase): the package being locally installed
"""
pkg_id = package_id(pkg)
pre = "{0} cannot be installed locally:".format(pkg_id)
# External packages cannot be installed locally.
if pkg.spec.external:
raise ExternalPackageError('{0} {1}'.format(pre, 'is external'))
# Upstream packages cannot be installed locally.
if pkg.installed_upstream:
raise UpstreamPackageError('{0} {1}'.format(pre, 'is upstream'))
# The package must have a prefix lock at this stage.
if pkg_id not in self.locks:
raise InstallLockError('{0} {1}'.format(pre, 'not locked'))
def _ensure_locked(self, lock_type, pkg):
"""
Add a prefix lock of the specified type for the package spec
If the lock exists, then adjust accordingly. That is, read locks
will be upgraded to write locks if a write lock is requested and
write locks will be downgraded to read locks if a read lock is
requested.
The lock timeout for write locks is deliberately near zero seconds in
order to ensure the current process proceeds as quickly as possible to
the next spec.
Args:
lock_type (str): 'read' for a read lock, 'write' for a write lock
pkg (PackageBase): the package whose spec is being installed
Return:
(lock_type, lock) tuple where lock will be None if it could not
be obtained
"""
assert lock_type in ['read', 'write'], \
'"{0}" is not a supported package management lock type' \
.format(lock_type)
pkg_id = package_id(pkg)
ltype, lock = self.locks.get(pkg_id, (lock_type, None))
if lock and ltype == lock_type:
return ltype, lock
desc = '{0} lock'.format(lock_type)
msg = '{0} a {1} on {2} with timeout {3}'
err = 'Failed to {0} a {1} for {2} due to {3}: {4}'
if lock_type == 'read':
# Wait until the other process finishes if there are no more
# build tasks with priority 0 (i.e., with no uninstalled
# dependencies).
no_p0 = len(self.build_tasks) == 0 or not self._next_is_pri0()
timeout = None if no_p0 else 3
else:
timeout = 1e-9 # Near 0 to iterate through install specs quickly
try:
if lock is None:
tty.debug(msg.format('Acquiring', desc, pkg_id, timeout))
op = 'acquire'
lock = spack.store.db.prefix_lock(pkg.spec, timeout)
if timeout != lock.default_timeout:
tty.warn('Expected prefix lock timeout {0}, not {1}'
.format(timeout, lock.default_timeout))
if lock_type == 'read':
lock.acquire_read()
else:
lock.acquire_write()
elif lock_type == 'read': # write -> read
# Only get here if the current lock is a write lock, which
# must be downgraded to be a read lock
# Retain the original lock timeout, which is in the lock's
# default_timeout setting.
tty.debug(msg.format('Downgrading to', desc, pkg_id,
lock.default_timeout))
op = 'downgrade to'
lock.downgrade_write_to_read()
else: # read -> write
# Only get here if the current lock is a read lock, which
# must be upgraded to be a write lock
tty.debug(msg.format('Upgrading to', desc, pkg_id, timeout))
op = 'upgrade to'
lock.upgrade_read_to_write(timeout)
tty.verbose('{0} is now {1} locked'.format(pkg_id, lock_type))
except (lk.LockDowngradeError, lk.LockTimeoutError) as exc:
tty.debug(err.format(op, desc, pkg_id, exc.__class__.__name__,
str(exc)))
lock = None
except (Exception, KeyboardInterrupt, SystemExit) as exc:
tty.error(err.format(op, desc, pkg_id, exc.__class__.__name__,
str(exc)))
self._cleanup_all_tasks()
raise
self.locks[pkg_id] = (lock_type, lock)
return self.locks[pkg_id]
def _add_tasks(self, request, all_deps):
"""Add tasks to the priority queue for the given build request.
It also tracks all dependents associated with each dependency in
order to ensure proper tracking of uninstalled dependencies.
Args:
request (BuildRequest): the associated install request
all_deps (defaultdict(set)): dictionary of all dependencies and
associated dependents
"""
tty.debug('Initializing the build queue for {0}'
.format(request.pkg.name))
# Ensure not attempting to perform an installation when user didn't
# want to go that far for the requested package.
try:
_check_last_phase(request.pkg)
except BadInstallPhase as err:
tty.warn('Installation request refused: {0}'.format(str(err)))
return
# Skip out early if the spec is not being installed locally (i.e., if
# external or upstream).
#
# External and upstream packages need to get flagged as installed to
# ensure proper status tracking for environment build.
not_local = _handle_external_and_upstream(request.pkg, True)
if not_local:
self._flag_installed(request.pkg)
return
install_compilers = spack.config.get(
'config:install_missing_compilers', False)
install_deps = request.install_args.get('install_deps')
# Bootstrap compilers first
if install_deps and install_compilers:
packages_per_compiler = {}
for dep in request.traverse_dependencies():
dep_pkg = dep.package
compiler = dep_pkg.spec.compiler
arch = dep_pkg.spec.architecture
if compiler not in packages_per_compiler:
packages_per_compiler[compiler] = {}
if arch not in packages_per_compiler[compiler]:
packages_per_compiler[compiler][arch] = []
packages_per_compiler[compiler][arch].append(dep_pkg)
compiler = request.pkg.spec.compiler
arch = request.pkg.spec.architecture
if compiler not in packages_per_compiler:
packages_per_compiler[compiler] = {}
if arch not in packages_per_compiler[compiler]:
packages_per_compiler[compiler][arch] = []
packages_per_compiler[compiler][arch].append(request.pkg)
for compiler, archs in packages_per_compiler.items():
for arch, packages in archs.items():
self._add_bootstrap_compilers(
compiler, arch, packages, request, all_deps)
if install_deps:
for dep in request.traverse_dependencies():
dep_pkg = dep.package
dep_id = package_id(dep_pkg)
if dep_id not in self.build_tasks:
self._add_init_task(dep_pkg, request, False, all_deps)
# Clear any persistent failure markings _unless_ they are
# associated with another process in this parallel build
# of the spec.
spack.store.db.clear_failure(dep, force=False)
install_package = request.install_args.get('install_package')
if install_package and request.pkg_id not in self.build_tasks:
# Be sure to clear any previous failure
spack.store.db.clear_failure(request.spec, force=True)
# If not installing dependencies, then determine their
# installation status before proceeding
if not install_deps:
self._check_deps_status(request)
# Now add the package itself, if appropriate
self._add_init_task(request.pkg, request, False, all_deps)
# Ensure if one request is to fail fast then all requests will.
fail_fast = request.install_args.get('fail_fast')
self.fail_fast = self.fail_fast or fail_fast
def _install_task(self, task):
"""
Perform the installation of the requested spec and/or dependency
represented by the build task.
Args:
task (BuildTask): the installation build task for a package"""
install_args = task.request.install_args
cache_only = install_args.get('cache_only')
explicit = task.explicit
full_hash_match = install_args.get('full_hash_match')
tests = install_args.get('tests')
unsigned = install_args.get('unsigned')
use_cache = install_args.get('use_cache')
pkg, pkg_id = task.pkg, task.pkg_id
tty.msg(install_msg(pkg_id, self.pid))
task.start = task.start or time.time()
task.status = STATUS_INSTALLING
# Use the binary cache if requested
if use_cache and \
_install_from_cache(pkg, cache_only, explicit, unsigned,
full_hash_match):
self._update_installed(task)
if task.compiler:
spack.compilers.add_compilers_to_config(
spack.compilers.find_compilers([pkg.spec.prefix]))
return
pkg.run_tests = (tests is True or tests and pkg.name in tests)
# hook that allows tests to inspect the Package before installation
# see unit_test_check() docs.
if not pkg.unit_test_check():
return
try:
self._setup_install_dir(pkg)
# Create a child process to do the actual installation.
# Preserve verbosity settings across installs.
spack.package.PackageBase._verbose = (
spack.build_environment.start_build_process(
pkg, build_process, install_args)
)
# Note: PARENT of the build process adds the new package to
# the database, so that we don't need to re-read from file.
spack.store.db.add(pkg.spec, spack.store.layout,
explicit=explicit)
# If a compiler, ensure it is added to the configuration
if task.compiler:
spack.compilers.add_compilers_to_config(
spack.compilers.find_compilers([pkg.spec.prefix]))
except spack.build_environment.StopPhase as e:
# A StopPhase exception means that do_install was asked to
# stop early from clients, and is not an error at this point
pid = '{0}: '.format(pkg.pid) if tty.show_pid() else ''
tty.debug('{0}{1}'.format(pid, str(e)))
tty.debug('Package stage directory: {0}'
.format(pkg.stage.source_path))
def _next_is_pri0(self):
"""
Determine if the next build task has priority 0
Return:
True if it does, False otherwise
"""
# Leverage the fact that the first entry in the queue is the next
# one that will be processed
task = self.build_pq[0][1]
return task.priority == 0
def _pop_task(self):
"""
Remove and return the lowest priority build task.
Source: Variant of function at docs.python.org/2/library/heapq.html
"""
while self.build_pq:
task = heapq.heappop(self.build_pq)[1]
if task.status != STATUS_REMOVED:
del self.build_tasks[task.pkg_id]
task.status = STATUS_DEQUEUED
return task
return None
def _push_task(self, task):
"""
Push (or queue) the specified build task for the package.
Source: Customization of "add_task" function at
docs.python.org/2/library/heapq.html
Args:
task (BuildTask): the installation build task for a package
"""
msg = "{0} a build task for {1} with status '{2}'"
skip = 'Skipping requeue of task for {0}: {1}'
# Ensure do not (re-)queue installed or failed packages whose status
# may have been determined by a separate process.
if task.pkg_id in self.installed:
tty.debug(skip.format(task.pkg_id, 'installed'))
return
if task.pkg_id in self.failed:
tty.debug(skip.format(task.pkg_id, 'failed'))
return
# Remove any associated build task since its sequence will change
self._remove_task(task.pkg_id)
desc = 'Queueing' if task.attempts == 0 else 'Requeueing'
tty.verbose(msg.format(desc, task.pkg_id, task.status))
# Now add the new task to the queue with a new sequence number to
# ensure it is the last entry popped with the same priority. This
# is necessary in case we are re-queueing a task whose priority
# was decremented due to the installation of one of its dependencies.
self.build_tasks[task.pkg_id] = task
heapq.heappush(self.build_pq, (task.key, task))
def _release_lock(self, pkg_id):
"""
Release any lock on the package
Args:
pkg_id (str): identifier for the package whose lock is be released
"""
if pkg_id in self.locks:
err = "{0} exception when releasing {1} lock for {2}: {3}"
msg = 'Releasing {0} lock on {1}'
ltype, lock = self.locks[pkg_id]
if lock is not None:
try:
tty.verbose(msg.format(ltype, pkg_id))
if ltype == 'read':
lock.release_read()
else:
lock.release_write()
except Exception as exc:
tty.warn(err.format(exc.__class__.__name__, ltype,
pkg_id, str(exc)))
def _remove_task(self, pkg_id):
"""
Mark the existing package build task as being removed and return it.
Raises KeyError if not found.
Source: Variant of function at docs.python.org/2/library/heapq.html
Args:
pkg_id (str): identifier for the package to be removed
"""
if pkg_id in self.build_tasks:
tty.verbose('Removing build task for {0} from list'
.format(pkg_id))
task = self.build_tasks.pop(pkg_id)
task.status = STATUS_REMOVED
return task
else:
return None
def _requeue_task(self, task):
"""
Requeues a task that appears to be in progress by another process.
Args:
task (BuildTask): the installation build task for a package
"""
if task.status not in [STATUS_INSTALLED, STATUS_INSTALLING]:
tty.debug('{0} {1}'.format(install_msg(task.pkg_id, self.pid),
'in progress by another process'))
new_task = task.next_attempt(self.installed)
new_task.status = STATUS_INSTALLING
self._push_task(new_task)
def _setup_install_dir(self, pkg):
"""
Create and ensure proper access controls for the install directory.
Write a small metadata file with the current spack environment.
Args:
pkg (Package): the package to be built and installed
"""
if not os.path.exists(pkg.spec.prefix):
tty.verbose('Creating the installation directory {0}'
.format(pkg.spec.prefix))
spack.store.layout.create_install_directory(pkg.spec)
else:
# Set the proper group for the prefix
group = prefs.get_package_group(pkg.spec)
if group:
fs.chgrp(pkg.spec.prefix, group)
# Set the proper permissions.
# This has to be done after group because changing groups blows
# away the sticky group bit on the directory
mode = os.stat(pkg.spec.prefix).st_mode
perms = prefs.get_package_dir_permissions(pkg.spec)
if mode != perms:
os.chmod(pkg.spec.prefix, perms)
# Ensure the metadata path exists as well
fs.mkdirp(spack.store.layout.metadata_path(pkg.spec), mode=perms)
# Always write host environment - we assume this can change
spack.store.layout.write_host_environment(pkg.spec)
def _update_failed(self, task, mark=False, exc=None):
"""
Update the task and transitive dependents as failed; optionally mark
externally as failed; and remove associated build tasks.
Args:
task (BuildTask): the build task for the failed package
mark (bool): ``True`` if the package and its dependencies are to
be marked as "failed", otherwise, ``False``
exc (Exception): optional exception if associated with the failure
"""
pkg_id = task.pkg_id
err = '' if exc is None else ': {0}'.format(str(exc))
tty.debug('Flagging {0} as failed{1}'.format(pkg_id, err))
if mark:
self.failed[pkg_id] = spack.store.db.mark_failed(task.pkg.spec)
else:
self.failed[pkg_id] = None
task.status = STATUS_FAILED
for dep_id in task.dependents:
if dep_id in self.build_tasks:
tty.warn('Skipping build of {0} since {1} failed'
.format(dep_id, pkg_id))
# Ensure the dependent's uninstalled dependents are
# up-to-date and their build tasks removed.
dep_task = self.build_tasks[dep_id]
self._update_failed(dep_task, mark)
self._remove_task(dep_id)
else:
tty.verbose('No build task for {0} to skip since {1} failed'
.format(dep_id, pkg_id))
def _update_installed(self, task):
"""
Mark the task as installed and ensure dependent build tasks are aware.
Args:
task (BuildTask): the build task for the installed package
"""
task.status = STATUS_INSTALLED
self._flag_installed(task.pkg, task.dependents)
def _flag_installed(self, pkg, dependent_ids=None):
"""
Flag the package as installed and ensure known by all build tasks of
known dependents.
Args:
pkg (Package): Package that has been installed locally, externally
or upstream
dependent_ids (list of str or None): list of the package's
dependent ids, or None if the dependent ids are limited to
those maintained in the package (dependency DAG)
"""
pkg_id = package_id(pkg)
if pkg_id in self.installed:
# Already determined the package has been installed
return
tty.debug('Flagging {0} as installed'.format(pkg_id))
self.installed.add(pkg_id)
# Update affected dependents
dependent_ids = dependent_ids or get_dependent_ids(pkg.spec)
for dep_id in set(dependent_ids):
tty.debug('Removing {0} from {1}\'s uninstalled dependencies.'
.format(pkg_id, dep_id))
if dep_id in self.build_tasks:
# Ensure the dependent's uninstalled dependencies are
# up-to-date. This will require requeueing the task.
dep_task = self.build_tasks[dep_id]
self._push_task(dep_task.next_attempt(self.installed))
else:
tty.debug('{0} has no build task to update for {1}\'s success'
.format(dep_id, pkg_id))
def _init_queue(self):
"""Initialize the build queue from the list of build requests."""
all_dependencies = defaultdict(set)
tty.debug('Initializing the build queue from the build requests')
for request in self.build_requests:
self._add_tasks(request, all_dependencies)
# Add any missing dependents to ensure proper uninstalled dependency
# tracking when installing multiple specs
tty.debug('Ensure all dependencies know all dependents across specs')
for dep_id in all_dependencies:
if dep_id in self.build_tasks:
dependents = all_dependencies[dep_id]
task = self.build_tasks[dep_id]
for dependent_id in dependents.difference(task.dependents):
task.add_dependent(dependent_id)
def install(self):
"""
Install the requested package(s) and or associated dependencies.
Args:
pkg (Package): the package to be built and installed"""
self._init_queue()
fail_fast_err = 'Terminating after first install failure'
single_explicit_spec = len(self.build_requests) == 1
failed_explicits = []
exists_errors = []
while self.build_pq:
task = self._pop_task()
if task is None:
continue
spack.hooks.on_install_start(task.request.pkg.spec)
install_args = task.request.install_args
keep_prefix = install_args.get('keep_prefix')
pkg, pkg_id, spec = task.pkg, task.pkg_id, task.pkg.spec
tty.verbose('Processing {0}: task={1}'.format(pkg_id, task))
# Ensure that the current spec has NO uninstalled dependencies,
# which is assumed to be reflected directly in its priority.
#
# If the spec has uninstalled dependencies, then there must be
# a bug in the code (e.g., priority queue or uninstalled
# dependencies handling). So terminate under the assumption that
# all subsequent tasks will have non-zero priorities or may be
# dependencies of this task.
if task.priority != 0:
tty.error('Detected uninstalled dependencies for {0}: {1}'
.format(pkg_id, task.uninstalled_deps))
left = [dep_id for dep_id in task.uninstalled_deps if
dep_id not in self.installed]
if not left:
tty.warn('{0} does NOT actually have any uninstalled deps'
' left'.format(pkg_id))
dep_str = 'dependencies' if task.priority > 1 else 'dependency'
# Hook to indicate task failure, but without an exception
spack.hooks.on_install_failure(task.request.pkg.spec)
raise InstallError(
'Cannot proceed with {0}: {1} uninstalled {2}: {3}'
.format(pkg_id, task.priority, dep_str,
','.join(task.uninstalled_deps)))
# Skip the installation if the spec is not being installed locally
# (i.e., if external or upstream) BUT flag it as installed since
# some package likely depends on it.
if not task.explicit:
if _handle_external_and_upstream(pkg, False):
self._flag_installed(pkg, task.dependents)
continue
# Flag a failed spec. Do not need an (install) prefix lock since
# assume using a separate (failed) prefix lock file.
if pkg_id in self.failed or spack.store.db.prefix_failed(spec):
tty.warn('{0} failed to install'.format(pkg_id))
self._update_failed(task)
# Mark that the package failed
# TODO: this should also be for the task.pkg, but we don't
# model transitive yet.
spack.hooks.on_install_failure(task.request.pkg.spec)
if self.fail_fast:
raise InstallError(fail_fast_err)
continue
# Attempt to get a write lock. If we can't get the lock then
# another process is likely (un)installing the spec or has
# determined the spec has already been installed (though the
# other process may be hung).
ltype, lock = self._ensure_locked('write', pkg)
if lock is None:
# Attempt to get a read lock instead. If this fails then
# another process has a write lock so must be (un)installing
# the spec (or that process is hung).
ltype, lock = self._ensure_locked('read', pkg)
# Requeue the spec if we cannot get at least a read lock so we
# can check the status presumably established by another process
# -- failed, installed, or uninstalled -- on the next pass.
if lock is None:
self._requeue_task(task)
continue
# Take a timestamp with the overwrite argument to allow checking
# whether another process has already overridden the package.
if task.request.overwrite and task.explicit:
task.request.overwrite_time = time.time()
# Determine state of installation artifacts and adjust accordingly.
self._prepare_for_install(task)
# Flag an already installed package
if pkg_id in self.installed:
# Downgrade to a read lock to preclude other processes from
# uninstalling the package until we're done installing its
# dependents.
ltype, lock = self._ensure_locked('read', pkg)
if lock is not None:
self._update_installed(task)
_print_installed_pkg(pkg.prefix)
# It's an already installed compiler, add it to the config
if task.compiler:
spack.compilers.add_compilers_to_config(
spack.compilers.find_compilers([pkg.spec.prefix]))
else:
# At this point we've failed to get a write or a read
# lock, which means another process has taken a write
# lock between our releasing the write and acquiring the
# read.
#
# Requeue the task so we can re-check the status
# established by the other process -- failed, installed,
# or uninstalled -- on the next pass.
self.installed.remove(pkg_id)
self._requeue_task(task)
continue
# Having a read lock on an uninstalled pkg may mean another
# process completed an uninstall of the software between the
# time we failed to acquire the write lock and the time we
# took the read lock.
#
# Requeue the task so we can check the status presumably
# established by the other process -- failed, installed, or
# uninstalled -- on the next pass.
if ltype == 'read':
self._requeue_task(task)
continue
# Proceed with the installation since we have an exclusive write
# lock on the package.
try:
if pkg.spec.dag_hash() in task.request.overwrite:
rec, _ = self._check_db(pkg.spec)
if rec and rec.installed:
if rec.installation_time < task.request.overwrite_time:
# If it's actually overwriting, do a fs transaction
if os.path.exists(rec.path):
with fs.replace_directory_transaction(
rec.path):
self._install_task(task)
else:
tty.debug("Missing installation to overwrite")
self._install_task(task)
else:
# overwriting nothing
self._install_task(task)
else:
self._install_task(task)
self._update_installed(task)
# If we installed then we should keep the prefix
stop_before_phase = getattr(pkg, 'stop_before_phase', None)
last_phase = getattr(pkg, 'last_phase', None)
keep_prefix = keep_prefix or \
(stop_before_phase is None and last_phase is None)
except spack.directory_layout.InstallDirectoryAlreadyExistsError \
as exc:
tty.debug('Install prefix for {0} exists, keeping {1} in '
'place.'.format(pkg.name, pkg.prefix))
self._update_installed(task)
# Only terminate at this point if a single build request was
# made.
if task.explicit and single_explicit_spec:
spack.hooks.on_install_failure(task.request.pkg.spec)
raise
if task.explicit:
exists_errors.append((pkg_id, str(exc)))
except KeyboardInterrupt as exc:
# The build has been terminated with a Ctrl-C so terminate
# regardless of the number of remaining specs.
err = 'Failed to install {0} due to {1}: {2}'
tty.error(err.format(pkg.name, exc.__class__.__name__,
str(exc)))
spack.hooks.on_install_failure(task.request.pkg.spec)
raise
except (Exception, SystemExit) as exc:
self._update_failed(task, True, exc)
spack.hooks.on_install_failure(task.request.pkg.spec)
# Best effort installs suppress the exception and mark the
# package as a failure.
if (not isinstance(exc, spack.error.SpackError) or
not exc.printed):
exc.printed = True
# SpackErrors can be printed by the build process or at
# lower levels -- skip printing if already printed.
# TODO: sort out this and SpackError.print_context()
tty.error('Failed to install {0} due to {1}: {2}'
.format(pkg.name, exc.__class__.__name__,
str(exc)))
# Terminate if requested to do so on the first failure.
if self.fail_fast:
raise InstallError('{0}: {1}'
.format(fail_fast_err, str(exc)))
# Terminate at this point if the single explicit spec has
# failed to install.
if single_explicit_spec and task.explicit:
raise
# Track explicit spec id and error to summarize when done
if task.explicit:
failed_explicits.append((pkg_id, str(exc)))
finally:
# Remove the install prefix if anything went wrong during
# install.
if not keep_prefix:
pkg.remove_prefix()
# The subprocess *may* have removed the build stage. Mark it
# not created so that the next time pkg.stage is invoked, we
# check the filesystem for it.
pkg.stage.created = False
# Perform basic task cleanup for the installed spec to
# include downgrading the write to a read lock
self._cleanup_task(pkg)
# Cleanup, which includes releasing all of the read locks
self._cleanup_all_tasks()
# Ensure we properly report if one or more explicit specs failed
# or were not installed when should have been.
missing = [request.pkg_id for request in self.build_requests if
request.install_args.get('install_package') and
request.pkg_id not in self.installed]
if exists_errors or failed_explicits or missing:
for pkg_id, err in exists_errors:
tty.error('{0}: {1}'.format(pkg_id, err))
for pkg_id, err in failed_explicits:
tty.error('{0}: {1}'.format(pkg_id, err))
for pkg_id in missing:
tty.error('{0}: Package was not installed'.format(pkg_id))
raise InstallError('Installation request failed. Refer to '
'reported errors for failing package(s).')
def build_process(pkg, kwargs):
"""Perform the installation/build of the package.
This runs in a separate child process, and has its own process and
python module space set up by build_environment.start_build_process().
This function's return value is returned to the parent process.
"""
fake = kwargs.get('fake', False)
install_source = kwargs.get('install_source', False)
keep_stage = kwargs.get('keep_stage', False)
skip_patch = kwargs.get('skip_patch', False)
unmodified_env = kwargs.get('unmodified_env', {})
verbose = kwargs.get('verbose', False)
timer = Timer()
if not fake:
if not skip_patch:
pkg.do_patch()
else:
pkg.do_stage()
pid = '{0}: '.format(pkg.pid) if tty.show_pid() else ''
pre = '{0}{1}:'.format(pid, pkg.name)
pkg_id = package_id(pkg)
tty.debug('{0} Building {1} [{2}]'
.format(pre, pkg_id, pkg.build_system_class))
# get verbosity from do_install() parameter or saved value
echo = verbose
if spack.package.PackageBase._verbose is not None:
echo = spack.package.PackageBase._verbose
pkg.stage.keep = keep_stage
with pkg.stage:
# Run the pre-install hook in the child process after
# the directory is created.
spack.hooks.pre_install(pkg.spec)
if fake:
_do_fake_install(pkg)
else:
source_path = pkg.stage.source_path
if install_source and os.path.isdir(source_path):
src_target = os.path.join(pkg.spec.prefix, 'share',
pkg.name, 'src')
tty.debug('{0} Copying source to {1}'
.format(pre, src_target))
fs.install_tree(pkg.stage.source_path, src_target)
# Do the real install in the source directory.
with fs.working_dir(pkg.stage.source_path):
# Save the build environment in a file before building.
dump_environment(pkg.env_path)
for attr in ('configure_args', 'cmake_args'):
try:
configure_args = getattr(pkg, attr)()
configure_args = ' '.join(configure_args)
with open(pkg.configure_args_path, 'w') as \
args_file:
args_file.write(configure_args)
break
except Exception:
pass
# cache debug settings
debug_level = tty.debug_level()
# Spawn a daemon that reads from a pipe and redirects
# everything to log_path, and provide the phase for logging
for i, (phase_name, phase_attr) in enumerate(zip(
pkg.phases, pkg._InstallPhase_phases)):
# Keep a log file for each phase
log_dir = os.path.dirname(pkg.log_path)
log_file = "spack-build-%02d-%s-out.txt" % (
i + 1, phase_name.lower()
)
log_file = os.path.join(log_dir, log_file)
try:
# DEBUGGING TIP - to debug this section, insert an IPython
# embed here, and run the sections below without log capture
with log_output(log_file, echo, True,
env=unmodified_env) as logger:
with logger.force_echo():
inner_debug_level = tty.debug_level()
tty.set_debug(debug_level)
tty.msg("{0} Executing phase: '{1}'"
.format(pre, phase_name))
tty.set_debug(inner_debug_level)
# Redirect stdout and stderr to daemon pipe
phase = getattr(pkg, phase_attr)
timer.phase(phase_name)
# Catch any errors to report to logging
phase(pkg.spec, pkg.prefix)
spack.hooks.on_phase_success(pkg, phase_name, log_file)
except BaseException:
combine_phase_logs(pkg.phase_log_files, pkg.log_path)
spack.hooks.on_phase_error(pkg, phase_name, log_file)
raise
# We assume loggers share echo True/False
echo = logger.echo
# After log, we can get all output/error files from the package stage
combine_phase_logs(pkg.phase_log_files, pkg.log_path)
log(pkg)
# Stop the timer and save results
timer.stop()
with open(pkg.times_log_path, 'w') as timelog:
timer.write_json(timelog)
# Run post install hooks before build stage is removed.
spack.hooks.post_install(pkg.spec)
build_time = timer.total - pkg._fetch_time
tty.msg('{0} Successfully installed {1}'.format(pre, pkg_id),
'Fetch: {0}. Build: {1}. Total: {2}.'
.format(_hms(pkg._fetch_time), _hms(build_time),
_hms(timer.total)))
_print_installed_pkg(pkg.prefix)
# Send final status that install is successful
spack.hooks.on_install_success(pkg.spec)
# preserve verbosity across runs
return echo
class BuildTask(object):
"""Class for representing the build task for a package."""
def __init__(self, pkg, request, compiler, start, attempts, status,
installed):
"""
Instantiate a build task for a package.
Args:
pkg (Package): the package to be built and installed
request (BuildRequest or None): the associated install request
where ``None`` can be used to indicate the package was
explicitly requested by the user
compiler (bool): whether task is for a bootstrap compiler
start (int): the initial start time for the package, in seconds
attempts (int): the number of attempts to install the package
status (str): the installation status
installed (list of str): the identifiers of packages that have
been installed so far
"""
# Ensure dealing with a package that has a concrete spec
if not isinstance(pkg, spack.package.PackageBase):
raise ValueError("{0} must be a package".format(str(pkg)))
self.pkg = pkg
if not self.pkg.spec.concrete:
raise ValueError("{0} must have a concrete spec"
.format(self.pkg.name))
# The "unique" identifier for the task's package
self.pkg_id = package_id(self.pkg)
# The explicit build request associated with the package
if not isinstance(request, BuildRequest):
raise ValueError("{0} must have a build request".format(str(pkg)))
self.request = request
# Initialize the status to an active state. The status is used to
# ensure priority queue invariants when tasks are "removed" from the
# queue.
if status == STATUS_REMOVED:
msg = "Cannot create a build task for {0} with status '{1}'"
raise InstallError(msg.format(self.pkg_id, status))
self.status = status
# Package is associated with a bootstrap compiler
self.compiler = compiler
# The initial start time for processing the spec
self.start = start
# Set of dependents, which needs to include the requesting package
# to support tracking of parallel, multi-spec, environment installs.
self.dependents = set(get_dependent_ids(self.pkg.spec))
tty.debug(
'Pkg id {0} has the following dependents:'.format(self.pkg_id))
for dep_id in self.dependents:
tty.debug('- {0}'.format(dep_id))
# Set of dependencies
#
# Be consistent wrt use of dependents and dependencies. That is,
# if use traverse for transitive dependencies, then must remove
# transitive dependents on failure.
deptypes = self.request.get_deptypes(self.pkg)
self.dependencies = set(package_id(d.package) for d in
self.pkg.spec.dependencies(deptype=deptypes)
if package_id(d.package) != self.pkg_id)
# Handle bootstrapped compiler
#
# The bootstrapped compiler is not a dependency in the spec, but it is
# a dependency of the build task. Here we add it to self.dependencies
compiler_spec = self.pkg.spec.compiler
arch_spec = self.pkg.spec.architecture
if not spack.compilers.compilers_for_spec(compiler_spec,
arch_spec=arch_spec):
# The compiler is in the queue, identify it as dependency
dep = spack.compilers.pkg_spec_for_compiler(compiler_spec)
dep.constrain('platform=%s' % str(arch_spec.platform))
dep.constrain('os=%s' % str(arch_spec.os))
dep.constrain('target=%s:' %
arch_spec.target.microarchitecture.family.name)
dep.concretize()
dep_id = package_id(dep.package)
self.dependencies.add(dep_id)
# List of uninstalled dependencies, which is used to establish
# the priority of the build task.
#
self.uninstalled_deps = set(pkg_id for pkg_id in self.dependencies if
pkg_id not in installed)
# Ensure key sequence-related properties are updated accordingly.
self.attempts = 0
self._update()
def __eq__(self, other):
return self.key == other.key
def __ge__(self, other):
return self.key >= other.key
def __gt__(self, other):
return self.key > other.key
def __le__(self, other):
return self.key <= other.key
def __lt__(self, other):
return self.key < other.key
def __ne__(self, other):
return self.key != other.key
def __repr__(self):
"""Returns a formal representation of the build task."""
rep = '{0}('.format(self.__class__.__name__)
for attr, value in self.__dict__.items():
rep += '{0}={1}, '.format(attr, value.__repr__())
return '{0})'.format(rep.strip(', '))
def __str__(self):
"""Returns a printable version of the build task."""
dependencies = '#dependencies={0}'.format(len(self.dependencies))
return ('priority={0}, status={1}, start={2}, {3}'
.format(self.priority, self.status, self.start, dependencies))
def _update(self):
"""Update properties associated with a new instance of a task."""
# Number of times the task has/will be queued
self.attempts = self.attempts + 1
# Ensure the task gets a unique sequence number to preserve the
# order in which it is added.
self.sequence = next(_counter)
def add_dependent(self, pkg_id):
"""
Ensure the dependent package id is in the task's list so it will be
properly updated when this package is installed.
Args:
pkg_id (str): package identifier of the dependent package
"""
if pkg_id != self.pkg_id and pkg_id not in self.dependents:
tty.debug('Adding {0} as a dependent of {1}'
.format(pkg_id, self.pkg_id))
self.dependents.add(pkg_id)
def flag_installed(self, installed):
"""
Ensure the dependency is not considered to still be uninstalled.
Args:
installed (list of str): the identifiers of packages that have
been installed so far
"""
now_installed = self.uninstalled_deps & set(installed)
for pkg_id in now_installed:
self.uninstalled_deps.remove(pkg_id)
tty.debug('{0}: Removed {1} from uninstalled deps list: {2}'
.format(self.pkg_id, pkg_id, self.uninstalled_deps))
@property
def explicit(self):
"""The package was explicitly requested by the user."""
return self.pkg == self.request.pkg
@property
def key(self):
"""The key is the tuple (# uninstalled dependencies, sequence)."""
return (self.priority, self.sequence)
def next_attempt(self, installed):
"""Create a new, updated task for the next installation attempt."""
task = copy.copy(self)
task._update()
task.start = self.start or time.time()
task.flag_installed(installed)
return task
@property
def priority(self):
"""The priority is based on the remaining uninstalled dependencies."""
return len(self.uninstalled_deps)
class BuildRequest(object):
"""Class for representing an installation request."""
def __init__(self, pkg, install_args):
"""
Instantiate a build request for a package.
Args:
pkg (Package): the package to be built and installed
install_args (dict): the install arguments associated with ``pkg``
"""
# Ensure dealing with a package that has a concrete spec
if not isinstance(pkg, spack.package.PackageBase):
raise ValueError("{0} must be a package".format(str(pkg)))
self.pkg = pkg
if not self.pkg.spec.concrete:
raise ValueError("{0} must have a concrete spec"
.format(self.pkg.name))
# Cache the package phase options with the explicit package,
# popping the options to ensure installation of associated
# dependencies is NOT affected by these options.
self.pkg.stop_before_phase = install_args.pop('stop_before', None)
self.pkg.last_phase = install_args.pop('stop_at', None)
# Cache the package id for convenience
self.pkg_id = package_id(pkg)
# Save off the original install arguments plus standard defaults
# since they apply to the requested package *and* dependencies.
self.install_args = install_args if install_args else {}
self._add_default_args()
# Cache overwrite information
self.overwrite = set(self.install_args.get('overwrite', []))
self.overwrite_time = time.time()
# Save off dependency package ids for quick checks since traversals
# are not able to return full dependents for all packages across
# environment specs.
deptypes = self.get_deptypes(self.pkg)
self.dependencies = set(package_id(d.package) for d in
self.pkg.spec.dependencies(deptype=deptypes)
if package_id(d.package) != self.pkg_id)
def __repr__(self):
"""Returns a formal representation of the build request."""
rep = '{0}('.format(self.__class__.__name__)
for attr, value in self.__dict__.items():
rep += '{0}={1}, '.format(attr, value.__repr__())
return '{0})'.format(rep.strip(', '))
def __str__(self):
"""Returns a printable version of the build request."""
return 'package={0}, install_args={1}' \
.format(self.pkg.name, self.install_args)
def _add_default_args(self):
"""Ensure standard install options are set to at least the default."""
for arg, default in [('cache_only', False),
('context', 'build'), # installs *always* build
('dirty', False),
('fail_fast', False),
('fake', False),
('full_hash_match', False),
('install_deps', True),
('install_package', True),
('install_source', False),
('keep_prefix', False),
('keep_stage', False),
('restage', False),
('skip_patch', False),
('tests', False),
('unsigned', False),
('use_cache', True),
('verbose', False), ]:
_ = self.install_args.setdefault(arg, default)
def get_deptypes(self, pkg):
"""Determine the required dependency types for the associated package.
Args:
pkg (PackageBase): explicit or implicit package being installed
Returns:
(tuple) required dependency type(s) for the package
"""
deptypes = ['link', 'run']
include_build_deps = self.install_args.get('include_build_deps')
if not self.install_args.get('cache_only') or include_build_deps:
deptypes.append('build')
if self.run_tests(pkg):
deptypes.append('test')
return tuple(sorted(deptypes))
def has_dependency(self, dep_id):
"""Returns ``True`` if the package id represents a known dependency
of the requested package, ``False`` otherwise."""
return dep_id in self.dependencies
def run_tests(self, pkg):
"""Determine if the tests should be run for the provided packages
Args:
pkg (PackageBase): explicit or implicit package being installed
Returns:
(bool) ``True`` if they should be run; ``False`` otherwise
"""
tests = self.install_args.get('tests', False)
return tests is True or (tests and pkg.name in tests)
@property
def spec(self):
"""The specification associated with the package."""
return self.pkg.spec
def traverse_dependencies(self):
"""
Yield any dependencies of the appropriate type(s)
Yields:
(Spec) The next child spec in the DAG
"""
get_spec = lambda s: s.spec
deptypes = self.get_deptypes(self.pkg)
tty.debug('Processing dependencies for {0}: {1}'
.format(self.pkg_id, deptypes))
for dspec in self.spec.traverse_edges(
deptype=deptypes, order='post', root=False,
direction='children'):
yield get_spec(dspec)
class InstallError(spack.error.SpackError):
"""Raised when something goes wrong during install or uninstall."""
def __init__(self, message, long_msg=None):
super(InstallError, self).__init__(message, long_msg)
class BadInstallPhase(InstallError):
"""Raised for an install phase option is not allowed for a package."""
def __init__(self, pkg_name, phase):
super(BadInstallPhase, self).__init__(
'\'{0}\' is not a valid phase for package {1}'
.format(phase, pkg_name))
class ExternalPackageError(InstallError):
"""Raised by install() when a package is only for external use."""
class InstallLockError(InstallError):
"""Raised during install when something goes wrong with package locking."""
class UpstreamPackageError(InstallError):
"""Raised during install when something goes wrong with an upstream
package."""
| 39.459982 | 84 | 0.600914 |
92a48888f3684ea726a941442827c5b22e80e9a9 | 140 | py | Python | cartomap/__init__.py | mrinalghosh/cartomap | 741c5916ad180b382dd1e60e5c8bb5168899c878 | [
"MIT"
] | 1 | 2020-12-09T05:39:28.000Z | 2020-12-09T05:39:28.000Z | cartomap/__init__.py | mrinalghosh/cartomap | 741c5916ad180b382dd1e60e5c8bb5168899c878 | [
"MIT"
] | null | null | null | cartomap/__init__.py | mrinalghosh/cartomap | 741c5916ad180b382dd1e60e5c8bb5168899c878 | [
"MIT"
] | null | null | null | from .geogmap import plotCartoMap # noqa: F401
from .geogmap import plotKeogram # noqa: F401
from .geogmap import plotSlice # noqa: F401
| 35 | 47 | 0.764286 |
cc0aa50c9fe0afe1b684ba1ef4fa942b75f7426b | 4,995 | py | Python | sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2017_06_01/aio/operations/_skus_operations.py | rsdoherty/azure-sdk-for-python | 6bba5326677468e6660845a703686327178bb7b1 | [
"MIT"
] | 2,728 | 2015-01-09T10:19:32.000Z | 2022-03-31T14:50:33.000Z | sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2017_06_01/aio/operations/_skus_operations.py | rsdoherty/azure-sdk-for-python | 6bba5326677468e6660845a703686327178bb7b1 | [
"MIT"
] | 17,773 | 2015-01-05T15:57:17.000Z | 2022-03-31T23:50:25.000Z | sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2017_06_01/aio/operations/_skus_operations.py | rsdoherty/azure-sdk-for-python | 6bba5326677468e6660845a703686327178bb7b1 | [
"MIT"
] | 1,916 | 2015-01-19T05:05:41.000Z | 2022-03-31T19:36:44.000Z | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class SkusOperations:
"""SkusOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.storage.v2017_06_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
**kwargs: Any
) -> AsyncIterable["_models.StorageSkuListResult"]:
"""Lists the available SKUs supported by Microsoft.Storage for given subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either StorageSkuListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storage.v2017_06_01.models.StorageSkuListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageSkuListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-06-01"
accept = "application/json, text/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('StorageSkuListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Storage/skus'} # type: ignore
| 45.825688 | 133 | 0.659459 |
642d8a5b9384d53ce0e4b02c0e32f475f65f65fa | 21,898 | py | Python | httprider/generated/base_window.py | iSWORD/http-rider | 5d9e5cc8c5166ab58f81d30d21b3ce2497bf09b9 | [
"MIT"
] | null | null | null | httprider/generated/base_window.py | iSWORD/http-rider | 5d9e5cc8c5166ab58f81d30d21b3ce2497bf09b9 | [
"MIT"
] | null | null | null | httprider/generated/base_window.py | iSWORD/http-rider | 5d9e5cc8c5166ab58f81d30d21b3ce2497bf09b9 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'resources/ui/base_window.ui'
#
# Created by: PyQt5 UI code generator 5.13.2
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(1077, 723)
MainWindow.setUnifiedTitleAndToolBarOnMac(False)
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.horizontalLayout_2 = QtWidgets.QHBoxLayout(self.centralwidget)
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
self.splitter = QtWidgets.QSplitter(self.centralwidget)
self.splitter.setOrientation(QtCore.Qt.Horizontal)
self.splitter.setObjectName("splitter")
self.lst_http_requests = ApiCallsListView(self.splitter)
self.lst_http_requests.setEditTriggers(QtWidgets.QAbstractItemView.NoEditTriggers)
self.lst_http_requests.setDragDropMode(QtWidgets.QAbstractItemView.InternalMove)
self.lst_http_requests.setDefaultDropAction(QtCore.Qt.MoveAction)
self.lst_http_requests.setObjectName("lst_http_requests")
self.frame_request_response = QtWidgets.QFrame(self.splitter)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(1)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.frame_request_response.sizePolicy().hasHeightForWidth())
self.frame_request_response.setSizePolicy(sizePolicy)
self.frame_request_response.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame_request_response.setFrameShadow(QtWidgets.QFrame.Sunken)
self.frame_request_response.setLineWidth(1)
self.frame_request_response.setObjectName("frame_request_response")
self.verticalLayout = QtWidgets.QVBoxLayout(self.frame_request_response)
self.verticalLayout.setContentsMargins(5, 5, 5, 5)
self.verticalLayout.setObjectName("verticalLayout")
self.horizontalLayout = QtWidgets.QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
self.cmb_http_method = QtWidgets.QComboBox(self.frame_request_response)
font = QtGui.QFont()
font.setPointSize(14)
self.cmb_http_method.setFont(font)
self.cmb_http_method.setObjectName("cmb_http_method")
self.cmb_http_method.addItem("")
self.cmb_http_method.addItem("")
self.cmb_http_method.addItem("")
self.cmb_http_method.addItem("")
self.cmb_http_method.addItem("")
self.cmb_http_method.addItem("")
self.cmb_http_method.addItem("")
self.horizontalLayout.addWidget(self.cmb_http_method)
self.txt_http_url = CompletionLineEdit(self.frame_request_response)
font = QtGui.QFont()
font.setPointSize(14)
self.txt_http_url.setFont(font)
self.txt_http_url.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.txt_http_url.setObjectName("txt_http_url")
self.horizontalLayout.addWidget(self.txt_http_url)
self.btn_send_request = QtWidgets.QPushButton(self.frame_request_response)
font = QtGui.QFont()
font.setPointSize(16)
font.setBold(False)
font.setWeight(50)
self.btn_send_request.setFont(font)
self.btn_send_request.setObjectName("btn_send_request")
self.horizontalLayout.addWidget(self.btn_send_request)
self.verticalLayout.addLayout(self.horizontalLayout)
self.tabWidget = QtWidgets.QTabWidget(self.frame_request_response)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(1)
sizePolicy.setHeightForWidth(self.tabWidget.sizePolicy().hasHeightForWidth())
self.tabWidget.setSizePolicy(sizePolicy)
self.tabWidget.setObjectName("tabWidget")
self.tab_description = QtWidgets.QWidget()
self.tab_description.setObjectName("tab_description")
self.verticalLayout_3 = QtWidgets.QVBoxLayout(self.tab_description)
self.verticalLayout_3.setObjectName("verticalLayout_3")
self.verticalLayout_2 = QtWidgets.QVBoxLayout()
self.verticalLayout_2.setContentsMargins(0, -1, -1, -1)
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.txt_api_title = QtWidgets.QLineEdit(self.tab_description)
self.txt_api_title.setObjectName("txt_api_title")
self.verticalLayout_2.addWidget(self.txt_api_title)
self.txt_api_description = QtWidgets.QPlainTextEdit(self.tab_description)
self.txt_api_description.setObjectName("txt_api_description")
self.verticalLayout_2.addWidget(self.txt_api_description)
self.verticalLayout_3.addLayout(self.verticalLayout_2)
self.tabWidget.addTab(self.tab_description, "")
self.tab_headers = QtWidgets.QWidget()
self.tab_headers.setObjectName("tab_headers")
self.horizontalLayout_3 = QtWidgets.QHBoxLayout(self.tab_headers)
self.horizontalLayout_3.setObjectName("horizontalLayout_3")
self.lst_request_headers = QtWidgets.QListWidget(self.tab_headers)
self.lst_request_headers.setObjectName("lst_request_headers")
self.horizontalLayout_3.addWidget(self.lst_request_headers)
self.tabWidget.addTab(self.tab_headers, "")
self.tab_queryparams = QtWidgets.QWidget()
self.tab_queryparams.setObjectName("tab_queryparams")
self.horizontalLayout_5 = QtWidgets.QHBoxLayout(self.tab_queryparams)
self.horizontalLayout_5.setObjectName("horizontalLayout_5")
self.lst_request_params = QtWidgets.QListWidget(self.tab_queryparams)
self.lst_request_params.setObjectName("lst_request_params")
self.horizontalLayout_5.addWidget(self.lst_request_params)
self.tabWidget.addTab(self.tab_queryparams, "")
self.tab_formparams = QtWidgets.QWidget()
self.tab_formparams.setObjectName("tab_formparams")
self.horizontalLayout_14 = QtWidgets.QHBoxLayout(self.tab_formparams)
self.horizontalLayout_14.setContentsMargins(12, -1, 12, 12)
self.horizontalLayout_14.setObjectName("horizontalLayout_14")
self.lst_form_params = QtWidgets.QListWidget(self.tab_formparams)
self.lst_form_params.setObjectName("lst_form_params")
self.horizontalLayout_14.addWidget(self.lst_form_params)
self.tabWidget.addTab(self.tab_formparams, "")
self.tab_request_body = QtWidgets.QWidget()
self.tab_request_body.setObjectName("tab_request_body")
self.verticalLayout_4 = QtWidgets.QVBoxLayout(self.tab_request_body)
self.verticalLayout_4.setObjectName("verticalLayout_4")
self.txt_request_body = CompletionPlainTextEdit(self.tab_request_body)
self.txt_request_body.setObjectName("txt_request_body")
self.verticalLayout_4.addWidget(self.txt_request_body)
self.tabWidget.addTab(self.tab_request_body, "")
self.tab_7 = QtWidgets.QWidget()
self.tab_7.setObjectName("tab_7")
self.verticalLayout_6 = QtWidgets.QVBoxLayout(self.tab_7)
self.verticalLayout_6.setObjectName("verticalLayout_6")
self.horizontalLayout_16 = QtWidgets.QHBoxLayout()
self.horizontalLayout_16.setObjectName("horizontalLayout_16")
self.chk_mock_response_enabled = QtWidgets.QCheckBox(self.tab_7)
self.chk_mock_response_enabled.setObjectName("chk_mock_response_enabled")
self.horizontalLayout_16.addWidget(self.chk_mock_response_enabled)
spacerItem = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_16.addItem(spacerItem)
self.txt_mocked_response_code = QtWidgets.QLineEdit(self.tab_7)
self.txt_mocked_response_code.setText("")
self.txt_mocked_response_code.setObjectName("txt_mocked_response_code")
self.horizontalLayout_16.addWidget(self.txt_mocked_response_code)
self.verticalLayout_6.addLayout(self.horizontalLayout_16)
self.lst_mocked_response_headers = QtWidgets.QListWidget(self.tab_7)
self.lst_mocked_response_headers.setObjectName("lst_mocked_response_headers")
self.verticalLayout_6.addWidget(self.lst_mocked_response_headers)
self.txt_mocked_response_body = CompletionPlainTextEdit(self.tab_7)
self.txt_mocked_response_body.setPlaceholderText("")
self.txt_mocked_response_body.setObjectName("txt_mocked_response_body")
self.verticalLayout_6.addWidget(self.txt_mocked_response_body)
self.tabWidget.addTab(self.tab_7, "")
self.verticalLayout.addWidget(self.tabWidget)
self.tags_layout = QtWidgets.QHBoxLayout()
self.tags_layout.setObjectName("tags_layout")
self.btn_add_tag = QtWidgets.QToolButton(self.frame_request_response)
self.btn_add_tag.setObjectName("btn_add_tag")
self.tags_layout.addWidget(self.btn_add_tag)
self.verticalLayout.addLayout(self.tags_layout)
self.line = QtWidgets.QFrame(self.frame_request_response)
self.line.setFrameShape(QtWidgets.QFrame.HLine)
self.line.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line.setObjectName("line")
self.verticalLayout.addWidget(self.line)
self.horizontalLayout_4 = QtWidgets.QHBoxLayout()
self.horizontalLayout_4.setObjectName("horizontalLayout_4")
self.label = QtWidgets.QLabel(self.frame_request_response)
self.label.setObjectName("label")
self.horizontalLayout_4.addWidget(self.label)
self.btn_open_assertions_dialog = QtWidgets.QToolButton(self.frame_request_response)
self.btn_open_assertions_dialog.setObjectName("btn_open_assertions_dialog")
self.horizontalLayout_4.addWidget(self.btn_open_assertions_dialog)
self.verticalLayout.addLayout(self.horizontalLayout_4)
self.list_assertion_results = QtWidgets.QListWidget(self.frame_request_response)
self.list_assertion_results.setObjectName("list_assertion_results")
self.verticalLayout.addWidget(self.list_assertion_results)
self.frame_exchange = QtWidgets.QFrame(self.splitter)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(1)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.frame_exchange.sizePolicy().hasHeightForWidth())
self.frame_exchange.setSizePolicy(sizePolicy)
self.frame_exchange.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame_exchange.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_exchange.setObjectName("frame_exchange")
self.verticalLayout_5 = QtWidgets.QVBoxLayout(self.frame_exchange)
self.verticalLayout_5.setContentsMargins(5, 5, 5, 5)
self.verticalLayout_5.setObjectName("verticalLayout_5")
self.horizontalLayout_6 = QtWidgets.QHBoxLayout()
self.horizontalLayout_6.setContentsMargins(0, -1, -1, -1)
self.horizontalLayout_6.setObjectName("horizontalLayout_6")
self.lbl_request_time = QtWidgets.QLabel(self.frame_exchange)
self.lbl_request_time.setObjectName("lbl_request_time")
self.horizontalLayout_6.addWidget(self.lbl_request_time)
self.btn_share_preview = QtWidgets.QToolButton(self.frame_exchange)
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(":/images/share-48.png"), QtGui.QIcon.Normal, QtGui.QIcon.On)
self.btn_share_preview.setIcon(icon)
self.btn_share_preview.setObjectName("btn_share_preview")
self.horizontalLayout_6.addWidget(self.btn_share_preview)
self.verticalLayout_5.addLayout(self.horizontalLayout_6)
self.tabWidget_3 = QtWidgets.QTabWidget(self.frame_exchange)
self.tabWidget_3.setObjectName("tabWidget_3")
self.tab_8 = QtWidgets.QWidget()
self.tab_8.setObjectName("tab_8")
self.horizontalLayout_7 = QtWidgets.QHBoxLayout(self.tab_8)
self.horizontalLayout_7.setObjectName("horizontalLayout_7")
self.txt_raw_request = QtWidgets.QPlainTextEdit(self.tab_8)
self.txt_raw_request.setReadOnly(True)
self.txt_raw_request.setObjectName("txt_raw_request")
self.horizontalLayout_7.addWidget(self.txt_raw_request)
self.tabWidget_3.addTab(self.tab_8, "")
self.tab = QtWidgets.QWidget()
self.tab.setObjectName("tab")
self.horizontalLayout_12 = QtWidgets.QHBoxLayout(self.tab)
self.horizontalLayout_12.setObjectName("horizontalLayout_12")
self.tbl_exchange_request_headers = QtWidgets.QTreeWidget(self.tab)
self.tbl_exchange_request_headers.setObjectName("tbl_exchange_request_headers")
self.tbl_exchange_request_headers.headerItem().setTextAlignment(0, QtCore.Qt.AlignCenter)
self.tbl_exchange_request_headers.headerItem().setTextAlignment(1, QtCore.Qt.AlignCenter)
self.horizontalLayout_12.addWidget(self.tbl_exchange_request_headers)
self.tabWidget_3.addTab(self.tab, "")
self.tab_2 = QtWidgets.QWidget()
self.tab_2.setObjectName("tab_2")
self.horizontalLayout_13 = QtWidgets.QHBoxLayout(self.tab_2)
self.horizontalLayout_13.setObjectName("horizontalLayout_13")
self.tbl_exchange_request_params = QtWidgets.QTreeWidget(self.tab_2)
self.tbl_exchange_request_params.setObjectName("tbl_exchange_request_params")
self.tbl_exchange_request_params.headerItem().setTextAlignment(0, QtCore.Qt.AlignCenter)
self.tbl_exchange_request_params.headerItem().setTextAlignment(1, QtCore.Qt.AlignCenter)
self.horizontalLayout_13.addWidget(self.tbl_exchange_request_params)
self.tabWidget_3.addTab(self.tab_2, "")
self.tab_6 = QtWidgets.QWidget()
self.tab_6.setObjectName("tab_6")
self.horizontalLayout_15 = QtWidgets.QHBoxLayout(self.tab_6)
self.horizontalLayout_15.setObjectName("horizontalLayout_15")
self.tbl_exchange_form_params = QtWidgets.QTreeWidget(self.tab_6)
self.tbl_exchange_form_params.setObjectName("tbl_exchange_form_params")
self.tbl_exchange_form_params.headerItem().setTextAlignment(0, QtCore.Qt.AlignCenter)
self.tbl_exchange_form_params.headerItem().setTextAlignment(1, QtCore.Qt.AlignCenter)
self.horizontalLayout_15.addWidget(self.tbl_exchange_form_params)
self.tabWidget_3.addTab(self.tab_6, "")
self.tab_3 = QtWidgets.QWidget()
self.tab_3.setObjectName("tab_3")
self.horizontalLayout_11 = QtWidgets.QHBoxLayout(self.tab_3)
self.horizontalLayout_11.setObjectName("horizontalLayout_11")
self.txt_exchange_request_body = QtWidgets.QTextEdit(self.tab_3)
self.txt_exchange_request_body.setAcceptDrops(False)
self.txt_exchange_request_body.setReadOnly(True)
self.txt_exchange_request_body.setObjectName("txt_exchange_request_body")
self.horizontalLayout_11.addWidget(self.txt_exchange_request_body)
self.tabWidget_3.addTab(self.tab_3, "")
self.verticalLayout_5.addWidget(self.tabWidget_3)
self.tabWidget_4 = QtWidgets.QTabWidget(self.frame_exchange)
self.tabWidget_4.setObjectName("tabWidget_4")
self.tab_9 = QtWidgets.QWidget()
self.tab_9.setObjectName("tab_9")
self.horizontalLayout_18 = QtWidgets.QHBoxLayout(self.tab_9)
self.horizontalLayout_18.setObjectName("horizontalLayout_18")
self.txt_raw_response = QtWidgets.QPlainTextEdit(self.tab_9)
self.txt_raw_response.setReadOnly(True)
self.txt_raw_response.setObjectName("txt_raw_response")
self.horizontalLayout_18.addWidget(self.txt_raw_response)
self.tabWidget_4.addTab(self.tab_9, "")
self.tab_4 = QtWidgets.QWidget()
self.tab_4.setObjectName("tab_4")
self.horizontalLayout_10 = QtWidgets.QHBoxLayout(self.tab_4)
self.horizontalLayout_10.setObjectName("horizontalLayout_10")
self.tbl_response_headers = QtWidgets.QTreeWidget(self.tab_4)
self.tbl_response_headers.setObjectName("tbl_response_headers")
self.tbl_response_headers.headerItem().setTextAlignment(0, QtCore.Qt.AlignCenter)
self.tbl_response_headers.headerItem().setTextAlignment(1, QtCore.Qt.AlignCenter)
self.horizontalLayout_10.addWidget(self.tbl_response_headers)
self.tabWidget_4.addTab(self.tab_4, "")
self.tab_5 = QtWidgets.QWidget()
self.tab_5.setObjectName("tab_5")
self.horizontalLayout_9 = QtWidgets.QHBoxLayout(self.tab_5)
self.horizontalLayout_9.setObjectName("horizontalLayout_9")
self.txt_response_body = QtWidgets.QTextEdit(self.tab_5)
self.txt_response_body.setAcceptDrops(False)
self.txt_response_body.setReadOnly(True)
self.txt_response_body.setObjectName("txt_response_body")
self.horizontalLayout_9.addWidget(self.txt_response_body)
self.tabWidget_4.addTab(self.tab_5, "")
self.verticalLayout_5.addWidget(self.tabWidget_4)
self.horizontalLayout_2.addWidget(self.splitter)
MainWindow.setCentralWidget(self.centralwidget)
self.retranslateUi(MainWindow)
self.tabWidget.setCurrentIndex(0)
self.tabWidget_3.setCurrentIndex(0)
self.tabWidget_4.setCurrentIndex(0)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "::::"))
self.cmb_http_method.setItemText(0, _translate("MainWindow", "GET"))
self.cmb_http_method.setItemText(1, _translate("MainWindow", "POST"))
self.cmb_http_method.setItemText(2, _translate("MainWindow", "PATCH"))
self.cmb_http_method.setItemText(3, _translate("MainWindow", "PUT"))
self.cmb_http_method.setItemText(4, _translate("MainWindow", "DELETE"))
self.cmb_http_method.setItemText(5, _translate("MainWindow", "OPTIONS"))
self.cmb_http_method.setItemText(6, _translate("MainWindow", "HEAD"))
self.txt_http_url.setText(_translate("MainWindow", "https://httpbin.org/get"))
self.btn_send_request.setText(_translate("MainWindow", "Send"))
self.txt_api_title.setPlaceholderText(_translate("MainWindow", "Untitled request"))
self.txt_api_description.setPlaceholderText(_translate("MainWindow", "Request description ..."))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_description), _translate("MainWindow", "Description"))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_headers), _translate("MainWindow", "Headers"))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_queryparams), _translate("MainWindow", "Query Params"))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_formparams), _translate("MainWindow", "Form Params"))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_request_body), _translate("MainWindow", "Request Body"))
self.chk_mock_response_enabled.setText(_translate("MainWindow", "Enabled"))
self.txt_mocked_response_code.setPlaceholderText(_translate("MainWindow", "200"))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_7), _translate("MainWindow", "Mocked Response"))
self.btn_add_tag.setText(_translate("MainWindow", "+"))
self.label.setText(_translate("MainWindow", "Assertions"))
self.btn_open_assertions_dialog.setText(_translate("MainWindow", "Setup"))
self.lbl_request_time.setText(_translate("MainWindow", "request time"))
self.btn_share_preview.setText(_translate("MainWindow", "..."))
self.tabWidget_3.setTabText(self.tabWidget_3.indexOf(self.tab_8), _translate("MainWindow", "Raw"))
self.tbl_exchange_request_headers.headerItem().setText(0, _translate("MainWindow", "Name"))
self.tbl_exchange_request_headers.headerItem().setText(1, _translate("MainWindow", "Value"))
self.tabWidget_3.setTabText(self.tabWidget_3.indexOf(self.tab), _translate("MainWindow", "Headers"))
self.tbl_exchange_request_params.headerItem().setText(0, _translate("MainWindow", "Name"))
self.tbl_exchange_request_params.headerItem().setText(1, _translate("MainWindow", "Value"))
self.tabWidget_3.setTabText(self.tabWidget_3.indexOf(self.tab_2), _translate("MainWindow", "Params"))
self.tbl_exchange_form_params.headerItem().setText(0, _translate("MainWindow", "Name"))
self.tbl_exchange_form_params.headerItem().setText(1, _translate("MainWindow", "Value"))
self.tabWidget_3.setTabText(self.tabWidget_3.indexOf(self.tab_6), _translate("MainWindow", "Form"))
self.tabWidget_3.setTabText(self.tabWidget_3.indexOf(self.tab_3), _translate("MainWindow", "Body"))
self.tabWidget_4.setTabText(self.tabWidget_4.indexOf(self.tab_9), _translate("MainWindow", "Raw"))
self.tbl_response_headers.headerItem().setText(0, _translate("MainWindow", "Name"))
self.tbl_response_headers.headerItem().setText(1, _translate("MainWindow", "Value"))
self.tabWidget_4.setTabText(self.tabWidget_4.indexOf(self.tab_4), _translate("MainWindow", "Headers"))
self.tabWidget_4.setTabText(self.tabWidget_4.indexOf(self.tab_5), _translate("MainWindow", "Body"))
from ..widgets.api_calls_list_view import ApiCallsListView
from ..widgets.completion_line_edit import CompletionLineEdit
from ..widgets.completion_plain_text import CompletionPlainTextEdit
| 64.217009 | 122 | 0.748196 |
841bf574a3042ae0676f4ae1b61d4ea46f357f44 | 3,992 | py | Python | scripts/combine_headers.py | rpavlik/TypePack | 256f3353f458d7423f9a279fdadbb70cb07ccbea | [
"BSL-1.0"
] | null | null | null | scripts/combine_headers.py | rpavlik/TypePack | 256f3353f458d7423f9a279fdadbb70cb07ccbea | [
"BSL-1.0"
] | null | null | null | scripts/combine_headers.py | rpavlik/TypePack | 256f3353f458d7423f9a279fdadbb70cb07ccbea | [
"BSL-1.0"
] | null | null | null | #!/usr/bin/env python3
# Copyright 2019 Collabora, Ltd
# SPDX-License-Identifier: BSL-1.0
# Author: Ryan Pavlik <ryan.pavlik@collabora.com>
from pathlib import Path
from HeaderMerger import HeaderMergerBase
REPO_ROOT = Path(__file__).resolve().parent.parent
INCLUDE_DIR = REPO_ROOT / 'include' / 'TypePack'
FULL_MERGE_OUTFILE = REPO_ROOT / 'generated' / 'TypePack.h'
META_HEADER = "TypePack.h"
NAMESPACE_STRING = "namespace typepack"
START_NAMESPACE = NAMESPACE_STRING + " {"
END_NAMESPACE = "} // " + NAMESPACE_STRING
HEADER_SUFFIXES = set(('.h', '.hpp'))
GENERATED_WARNING = """
NOTE: This is a generated single-file version of TypePack - do not edit directly!
Instead, edit the individual source files and regenerate this with combine_headers.py.
"""
def get_headers(base_dir):
for fn in base_dir.iterdir():
if not fn.is_file():
continue
if fn.suffix in HEADER_SUFFIXES:
yield fn
def join_lines(lines):
return '\n'.join(lines) + '\n'
def get_template_parts(include_dir):
prefix = []
between_includes_and_content = []
after_content = (END_NAMESPACE,)
with open(include_dir / META_HEADER, 'r', encoding='utf-8') as f:
step = 1
for line in f:
line = line.rstrip()
if step == 1:
# Before the includes
if "Header including" in line:
# Adjust the @brief comment line
line = line.replace(
"Header including", "Generated single-file header containing")
if "@date" in line:
# Stick the warning in here
prefix.append(GENERATED_WARNING)
prefix.append(line)
# looking for the pragma - the last line in this segment
if "#pragma once" in line:
step = 2
elif step == 2:
# Between the includes and the open of the namespace
if START_NAMESPACE in line:
between_includes_and_content.append(START_NAMESPACE)
break
between_includes_and_content.append(line)
return (prefix, between_includes_and_content, after_content)
class TypePackMergerBase(HeaderMergerBase):
def __init__(self, include_dir, files):
self.include_dir = include_dir
self.files = files
self.known_filenames = set((fn.name for fn in files))
super().__init__()
def get_external_include_from_match(self, match):
"""Identify external includes by checking if this file is in our group of files to merge."""
if match.group("name") not in self.known_filenames:
# Regenerate with no extra spaces, so the set can de-duplicate nicely.
return "#include {}".format(match.group("quoted_name"))
return None
def is_start_marker(self, line):
return line == START_NAMESPACE
def is_end_marker(self, line):
return line == END_NAMESPACE
def parse_all(self):
self.parse_files(self.files)
def write(self, fn):
sorted_names = self.get_sorted_deps()
prefix, between_includes_and_content, after_content = get_template_parts(
self.include_dir)
lines = prefix
lines.extend(sorted(self.get_other_includes_of(sorted_names)))
lines.extend(between_includes_and_content)
lines.extend(self.get_contents_of(sorted_names))
lines.extend(after_content)
with open(fn, 'w', encoding='utf-8') as f:
f.write(join_lines(lines))
class TypePackFullMerger(TypePackMergerBase):
def __init__(self, include_dir):
files = [fn for fn in get_headers(include_dir)
if fn.name != META_HEADER]
super().__init__(include_dir, files)
if __name__ == "__main__":
full_merge = TypePackFullMerger(INCLUDE_DIR)
full_merge.parse_all()
full_merge.write(str(FULL_MERGE_OUTFILE))
| 32.721311 | 100 | 0.636022 |
26ae28dfed1f748012f8baea0174c8783781d367 | 340 | py | Python | zemfrog/extensions/migrate.py | aprilahijriyan/zemfrog | 0a4ebc303caa8fadf89ca0cc8569de60c61f3252 | [
"MIT"
] | null | null | null | zemfrog/extensions/migrate.py | aprilahijriyan/zemfrog | 0a4ebc303caa8fadf89ca0cc8569de60c61f3252 | [
"MIT"
] | null | null | null | zemfrog/extensions/migrate.py | aprilahijriyan/zemfrog | 0a4ebc303caa8fadf89ca0cc8569de60c61f3252 | [
"MIT"
] | null | null | null | import os
from flask.app import Flask
from flask_migrate import Migrate
from ..globals import db
def init_app(app: Flask):
migrate = Migrate(db=db)
directory = os.path.join(
app.root_path, "migrations/" + os.getenv("ZEMFROG_ENV", "development").lower()
)
migrate.init_app(app, directory=directory)
| 22.666667 | 87 | 0.673529 |
614ddb13a3cf960d884b7650fee762666fd6095a | 43,241 | py | Python | sdk/azurestack/azure-mgmt-azurestack/azure/mgmt/azurestack/models/_models.py | mohamedshabanofficial/azure-sdk-for-python | 81c585f310cd2ec23d2ad145173958914a075a58 | [
"MIT"
] | 2 | 2021-03-24T06:26:11.000Z | 2021-04-18T15:55:59.000Z | sdk/azurestack/azure-mgmt-azurestack/azure/mgmt/azurestack/models/_models.py | mohamedshabanofficial/azure-sdk-for-python | 81c585f310cd2ec23d2ad145173958914a075a58 | [
"MIT"
] | 2 | 2021-11-03T06:10:36.000Z | 2021-12-01T06:29:39.000Z | sdk/azurestack/azure-mgmt-azurestack/azure/mgmt/azurestack/models/_models.py | mohamedshabanofficial/azure-sdk-for-python | 81c585f310cd2ec23d2ad145173958914a075a58 | [
"MIT"
] | 1 | 2021-05-19T02:55:10.000Z | 2021-05-19T02:55:10.000Z | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from azure.core.exceptions import HttpResponseError
import msrest.serialization
class ActivationKeyResult(msrest.serialization.Model):
"""The resource containing the Azure Stack activation key.
:param activation_key: Azure Stack activation key.
:type activation_key: str
"""
_attribute_map = {
'activation_key': {'key': 'activationKey', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ActivationKeyResult, self).__init__(**kwargs)
self.activation_key = kwargs.get('activation_key', None)
class CloudManifestFileDeploymentData(msrest.serialization.Model):
"""Cloud specific manifest data for AzureStack deployment.
:param external_dsms_certificates: Dsms external certificates.
:type external_dsms_certificates: str
:param custom_cloud_verification_key: Signing verification public key.
:type custom_cloud_verification_key: str
:param custom_cloud_arm_endpoint: ARM endpoint.
:type custom_cloud_arm_endpoint: str
:param external_dsms_endpoint: Dsms endpoint.
:type external_dsms_endpoint: str
"""
_attribute_map = {
'external_dsms_certificates': {'key': 'externalDsmsCertificates', 'type': 'str'},
'custom_cloud_verification_key': {'key': 'customCloudVerificationKey', 'type': 'str'},
'custom_cloud_arm_endpoint': {'key': 'customEnvironmentEndpoints.customCloudArmEndpoint', 'type': 'str'},
'external_dsms_endpoint': {'key': 'customEnvironmentEndpoints.externalDsmsEndpoint', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(CloudManifestFileDeploymentData, self).__init__(**kwargs)
self.external_dsms_certificates = kwargs.get('external_dsms_certificates', None)
self.custom_cloud_verification_key = kwargs.get('custom_cloud_verification_key', None)
self.custom_cloud_arm_endpoint = kwargs.get('custom_cloud_arm_endpoint', None)
self.external_dsms_endpoint = kwargs.get('external_dsms_endpoint', None)
class CloudManifestFileProperties(msrest.serialization.Model):
"""Cloud specific manifest JSON properties.
:param deployment_data: Cloud specific manifest data.
:type deployment_data: ~azure.mgmt.azurestack.models.CloudManifestFileDeploymentData
:param signature: Signature of the cloud specific manifest data.
:type signature: str
"""
_attribute_map = {
'deployment_data': {'key': 'deploymentData', 'type': 'CloudManifestFileDeploymentData'},
'signature': {'key': 'signature', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(CloudManifestFileProperties, self).__init__(**kwargs)
self.deployment_data = kwargs.get('deployment_data', None)
self.signature = kwargs.get('signature', None)
class Resource(msrest.serialization.Model):
"""Base resource object.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: ID of the resource.
:vartype id: str
:ivar name: Name of the resource.
:vartype name: str
:ivar type: Type of Resource.
:vartype type: str
:param etag: The entity tag used for optimistic concurrency when modifying the resource.
:type etag: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(Resource, self).__init__(**kwargs)
self.id = None
self.name = None
self.type = None
self.etag = kwargs.get('etag', None)
class CloudManifestFileResponse(Resource):
"""Cloud specific manifest GET response.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: ID of the resource.
:vartype id: str
:ivar name: Name of the resource.
:vartype name: str
:ivar type: Type of Resource.
:vartype type: str
:param etag: The entity tag used for optimistic concurrency when modifying the resource.
:type etag: str
:param properties: Cloud specific manifest data.
:type properties: ~azure.mgmt.azurestack.models.CloudManifestFileProperties
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'properties': {'key': 'properties', 'type': 'CloudManifestFileProperties'},
}
def __init__(
self,
**kwargs
):
super(CloudManifestFileResponse, self).__init__(**kwargs)
self.properties = kwargs.get('properties', None)
class Compatibility(msrest.serialization.Model):
"""Product compatibility.
:param is_compatible: Tells if product is compatible with current device.
:type is_compatible: bool
:param message: Short error message if any compatibility issues are found.
:type message: str
:param description: Full error message if any compatibility issues are found.
:type description: str
:param issues: List of all issues found.
:type issues: list[str or ~azure.mgmt.azurestack.models.CompatibilityIssue]
"""
_attribute_map = {
'is_compatible': {'key': 'isCompatible', 'type': 'bool'},
'message': {'key': 'message', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'issues': {'key': 'issues', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
super(Compatibility, self).__init__(**kwargs)
self.is_compatible = kwargs.get('is_compatible', None)
self.message = kwargs.get('message', None)
self.description = kwargs.get('description', None)
self.issues = kwargs.get('issues', None)
class CustomerSubscription(Resource):
"""Customer subscription.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: ID of the resource.
:vartype id: str
:ivar name: Name of the resource.
:vartype name: str
:ivar type: Type of Resource.
:vartype type: str
:param etag: The entity tag used for optimistic concurrency when modifying the resource.
:type etag: str
:param tenant_id: Tenant Id.
:type tenant_id: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'tenant_id': {'key': 'properties.tenantId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(CustomerSubscription, self).__init__(**kwargs)
self.tenant_id = kwargs.get('tenant_id', None)
class CustomerSubscriptionList(msrest.serialization.Model):
"""Pageable list of customer subscriptions.
:param next_link: URI to the next page.
:type next_link: str
:param value: List of customer subscriptions.
:type value: list[~azure.mgmt.azurestack.models.CustomerSubscription]
"""
_attribute_map = {
'next_link': {'key': 'nextLink', 'type': 'str'},
'value': {'key': 'value', 'type': '[CustomerSubscription]'},
}
def __init__(
self,
**kwargs
):
super(CustomerSubscriptionList, self).__init__(**kwargs)
self.next_link = kwargs.get('next_link', None)
self.value = kwargs.get('value', None)
class DataDiskImage(msrest.serialization.Model):
"""Data disk image.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar lun: The LUN.
:vartype lun: int
:ivar source_blob_sas_uri: SAS key for source blob.
:vartype source_blob_sas_uri: str
"""
_validation = {
'lun': {'readonly': True},
'source_blob_sas_uri': {'readonly': True},
}
_attribute_map = {
'lun': {'key': 'lun', 'type': 'int'},
'source_blob_sas_uri': {'key': 'sourceBlobSasUri', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(DataDiskImage, self).__init__(**kwargs)
self.lun = None
self.source_blob_sas_uri = None
class DeviceConfiguration(msrest.serialization.Model):
"""Device Configuration.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar device_version: Version of the device.
:vartype device_version: str
:ivar identity_system: Identity system of the device. Possible values include: "AzureAD",
"ADFS".
:vartype identity_system: str or ~azure.mgmt.azurestack.models.Category
"""
_validation = {
'device_version': {'readonly': True},
'identity_system': {'readonly': True},
}
_attribute_map = {
'device_version': {'key': 'deviceVersion', 'type': 'str'},
'identity_system': {'key': 'identitySystem', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(DeviceConfiguration, self).__init__(**kwargs)
self.device_version = None
self.identity_system = None
class Display(msrest.serialization.Model):
"""Contains the localized display information for this particular operation or action.
:param provider: The localized, friendly version of the resource provider name.
:type provider: str
:param resource: The localized, friendly version of the resource type related to this action or
operation; the resource type should match the public documentation for the resource provider.
:type resource: str
:param operation: The localized, friendly name for the operation. Use the name as it will
displayed to the user.
:type operation: str
:param description: The localized, friendly description for the operation. The description will
be displayed to the user. It should be thorough and concise for used in both tooltips and
detailed views.
:type description: str
"""
_attribute_map = {
'provider': {'key': 'provider', 'type': 'str'},
'resource': {'key': 'resource', 'type': 'str'},
'operation': {'key': 'operation', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(Display, self).__init__(**kwargs)
self.provider = kwargs.get('provider', None)
self.resource = kwargs.get('resource', None)
self.operation = kwargs.get('operation', None)
self.description = kwargs.get('description', None)
class ErrorDetails(msrest.serialization.Model):
"""The details of the error.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar code: Error code.
:vartype code: str
:ivar message: Error message indicating why the operation failed.
:vartype message: str
:ivar target: The target of the particular error.
:vartype target: str
"""
_validation = {
'code': {'readonly': True},
'message': {'readonly': True},
'target': {'readonly': True},
}
_attribute_map = {
'code': {'key': 'code', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
'target': {'key': 'target', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ErrorDetails, self).__init__(**kwargs)
self.code = None
self.message = None
self.target = None
class ErrorResponse(msrest.serialization.Model):
"""Error response indicates that the service is not able to process the incoming request. The reason is provided in the error message.
:param error: The details of the error.
:type error: ~azure.mgmt.azurestack.models.ErrorDetails
"""
_attribute_map = {
'error': {'key': 'error', 'type': 'ErrorDetails'},
}
def __init__(
self,
**kwargs
):
super(ErrorResponse, self).__init__(**kwargs)
self.error = kwargs.get('error', None)
class ExtendedProduct(msrest.serialization.Model):
"""Extended description about the product required for installing it into Azure Stack.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar gallery_package_blob_sas_uri: The URI to the .azpkg file that provides information
required for showing product in the gallery.
:vartype gallery_package_blob_sas_uri: str
:ivar product_kind: Specifies the kind of the product (virtualMachine or
virtualMachineExtension).
:vartype product_kind: str
:ivar compute_role: Specifies kind of compute role included in the package. Possible values
include: "None", "IaaS", "PaaS".
:vartype compute_role: str or ~azure.mgmt.azurestack.models.ComputeRole
:ivar is_system_extension: Specifies if product is a Virtual Machine Extension.
:vartype is_system_extension: bool
:ivar support_multiple_extensions: Indicates if specified product supports multiple extensions.
:vartype support_multiple_extensions: bool
:ivar version_properties_version: Specifies product version.
:vartype version_properties_version: str
:ivar vm_os_type: Specifies operating system used by the product. Possible values include:
"None", "Windows", "Linux".
:vartype vm_os_type: str or ~azure.mgmt.azurestack.models.OperatingSystem
:ivar vm_scale_set_enabled: Indicates if virtual machine Scale Set is enabled in the specified
product.
:vartype vm_scale_set_enabled: bool
:ivar uri: The URI.
:vartype uri: str
:ivar version: Specifies product version.
:vartype version: str
:ivar os_disk_image: OS disk image used by product.
:vartype os_disk_image: ~azure.mgmt.azurestack.models.OsDiskImage
:ivar data_disk_images: List of attached data disks.
:vartype data_disk_images: list[~azure.mgmt.azurestack.models.DataDiskImage]
"""
_validation = {
'gallery_package_blob_sas_uri': {'readonly': True},
'product_kind': {'readonly': True},
'compute_role': {'readonly': True},
'is_system_extension': {'readonly': True},
'support_multiple_extensions': {'readonly': True},
'version_properties_version': {'readonly': True},
'vm_os_type': {'readonly': True},
'vm_scale_set_enabled': {'readonly': True},
'uri': {'readonly': True},
'version': {'readonly': True},
'os_disk_image': {'readonly': True},
'data_disk_images': {'readonly': True},
}
_attribute_map = {
'gallery_package_blob_sas_uri': {'key': 'galleryPackageBlobSasUri', 'type': 'str'},
'product_kind': {'key': 'productKind', 'type': 'str'},
'compute_role': {'key': 'properties.computeRole', 'type': 'str'},
'is_system_extension': {'key': 'properties.isSystemExtension', 'type': 'bool'},
'support_multiple_extensions': {'key': 'properties.supportMultipleExtensions', 'type': 'bool'},
'version_properties_version': {'key': 'properties.version', 'type': 'str'},
'vm_os_type': {'key': 'properties.vmOsType', 'type': 'str'},
'vm_scale_set_enabled': {'key': 'properties.vmScaleSetEnabled', 'type': 'bool'},
'uri': {'key': 'properties.sourceBlob.uri', 'type': 'str'},
'version': {'key': 'properties.version', 'type': 'str'},
'os_disk_image': {'key': 'properties.osDiskImage', 'type': 'OsDiskImage'},
'data_disk_images': {'key': 'properties.dataDiskImages', 'type': '[DataDiskImage]'},
}
def __init__(
self,
**kwargs
):
super(ExtendedProduct, self).__init__(**kwargs)
self.gallery_package_blob_sas_uri = None
self.product_kind = None
self.compute_role = None
self.is_system_extension = None
self.support_multiple_extensions = None
self.version_properties_version = None
self.vm_os_type = None
self.vm_scale_set_enabled = None
self.uri = None
self.version = None
self.os_disk_image = None
self.data_disk_images = None
class VirtualMachineProductProperties(msrest.serialization.Model):
"""Product information.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar version: Specifies product version.
:vartype version: str
:ivar os_disk_image: OS disk image used by product.
:vartype os_disk_image: ~azure.mgmt.azurestack.models.OsDiskImage
:ivar data_disk_images: List of attached data disks.
:vartype data_disk_images: list[~azure.mgmt.azurestack.models.DataDiskImage]
"""
_validation = {
'version': {'readonly': True},
'os_disk_image': {'readonly': True},
'data_disk_images': {'readonly': True},
}
_attribute_map = {
'version': {'key': 'version', 'type': 'str'},
'os_disk_image': {'key': 'osDiskImage', 'type': 'OsDiskImage'},
'data_disk_images': {'key': 'dataDiskImages', 'type': '[DataDiskImage]'},
}
def __init__(
self,
**kwargs
):
super(VirtualMachineProductProperties, self).__init__(**kwargs)
self.version = None
self.os_disk_image = None
self.data_disk_images = None
class VirtualMachineExtensionProductProperties(msrest.serialization.Model):
"""Product information.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar compute_role: Specifies kind of compute role included in the package. Possible values
include: "None", "IaaS", "PaaS".
:vartype compute_role: str or ~azure.mgmt.azurestack.models.ComputeRole
:ivar is_system_extension: Specifies if product is a Virtual Machine Extension.
:vartype is_system_extension: bool
:ivar support_multiple_extensions: Indicates if specified product supports multiple extensions.
:vartype support_multiple_extensions: bool
:ivar version: Specifies product version.
:vartype version: str
:ivar vm_os_type: Specifies operating system used by the product. Possible values include:
"None", "Windows", "Linux".
:vartype vm_os_type: str or ~azure.mgmt.azurestack.models.OperatingSystem
:ivar vm_scale_set_enabled: Indicates if virtual machine Scale Set is enabled in the specified
product.
:vartype vm_scale_set_enabled: bool
:ivar uri: The URI.
:vartype uri: str
"""
_validation = {
'compute_role': {'readonly': True},
'is_system_extension': {'readonly': True},
'support_multiple_extensions': {'readonly': True},
'version': {'readonly': True},
'vm_os_type': {'readonly': True},
'vm_scale_set_enabled': {'readonly': True},
'uri': {'readonly': True},
}
_attribute_map = {
'compute_role': {'key': 'computeRole', 'type': 'str'},
'is_system_extension': {'key': 'isSystemExtension', 'type': 'bool'},
'support_multiple_extensions': {'key': 'supportMultipleExtensions', 'type': 'bool'},
'version': {'key': 'version', 'type': 'str'},
'vm_os_type': {'key': 'vmOsType', 'type': 'str'},
'vm_scale_set_enabled': {'key': 'vmScaleSetEnabled', 'type': 'bool'},
'uri': {'key': 'sourceBlob.uri', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VirtualMachineExtensionProductProperties, self).__init__(**kwargs)
self.compute_role = None
self.is_system_extension = None
self.support_multiple_extensions = None
self.version = None
self.vm_os_type = None
self.vm_scale_set_enabled = None
self.uri = None
class ExtendedProductProperties(VirtualMachineExtensionProductProperties, VirtualMachineProductProperties):
"""Product information.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar os_disk_image: OS disk image used by product.
:vartype os_disk_image: ~azure.mgmt.azurestack.models.OsDiskImage
:ivar data_disk_images: List of attached data disks.
:vartype data_disk_images: list[~azure.mgmt.azurestack.models.DataDiskImage]
:ivar compute_role: Specifies kind of compute role included in the package. Possible values
include: "None", "IaaS", "PaaS".
:vartype compute_role: str or ~azure.mgmt.azurestack.models.ComputeRole
:ivar is_system_extension: Specifies if product is a Virtual Machine Extension.
:vartype is_system_extension: bool
:ivar support_multiple_extensions: Indicates if specified product supports multiple extensions.
:vartype support_multiple_extensions: bool
:ivar version: Specifies product version.
:vartype version: str
:ivar vm_os_type: Specifies operating system used by the product. Possible values include:
"None", "Windows", "Linux".
:vartype vm_os_type: str or ~azure.mgmt.azurestack.models.OperatingSystem
:ivar vm_scale_set_enabled: Indicates if virtual machine Scale Set is enabled in the specified
product.
:vartype vm_scale_set_enabled: bool
:ivar uri: The URI.
:vartype uri: str
"""
_validation = {
'os_disk_image': {'readonly': True},
'data_disk_images': {'readonly': True},
'compute_role': {'readonly': True},
'is_system_extension': {'readonly': True},
'support_multiple_extensions': {'readonly': True},
'version': {'readonly': True},
'vm_os_type': {'readonly': True},
'vm_scale_set_enabled': {'readonly': True},
'uri': {'readonly': True},
}
_attribute_map = {
'os_disk_image': {'key': 'osDiskImage', 'type': 'OsDiskImage'},
'data_disk_images': {'key': 'dataDiskImages', 'type': '[DataDiskImage]'},
'compute_role': {'key': 'computeRole', 'type': 'str'},
'is_system_extension': {'key': 'isSystemExtension', 'type': 'bool'},
'support_multiple_extensions': {'key': 'supportMultipleExtensions', 'type': 'bool'},
'version': {'key': 'version', 'type': 'str'},
'vm_os_type': {'key': 'vmOsType', 'type': 'str'},
'vm_scale_set_enabled': {'key': 'vmScaleSetEnabled', 'type': 'bool'},
'uri': {'key': 'sourceBlob.uri', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ExtendedProductProperties, self).__init__(**kwargs)
self.os_disk_image = None
self.data_disk_images = None
self.compute_role = None
self.is_system_extension = None
self.support_multiple_extensions = None
self.version = None
self.vm_os_type = None
self.vm_scale_set_enabled = None
self.uri = None
class IconUris(msrest.serialization.Model):
"""Links to product icons.
:param large: URI to large icon.
:type large: str
:param wide: URI to wide icon.
:type wide: str
:param medium: URI to medium icon.
:type medium: str
:param small: URI to small icon.
:type small: str
:param hero: URI to hero icon.
:type hero: str
"""
_attribute_map = {
'large': {'key': 'large', 'type': 'str'},
'wide': {'key': 'wide', 'type': 'str'},
'medium': {'key': 'medium', 'type': 'str'},
'small': {'key': 'small', 'type': 'str'},
'hero': {'key': 'hero', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(IconUris, self).__init__(**kwargs)
self.large = kwargs.get('large', None)
self.wide = kwargs.get('wide', None)
self.medium = kwargs.get('medium', None)
self.small = kwargs.get('small', None)
self.hero = kwargs.get('hero', None)
class MarketplaceProductLogUpdate(msrest.serialization.Model):
"""Update details for product log.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar operation: Operation to log.
:vartype operation: str
:ivar status: Operation status to log.
:vartype status: str
:ivar error: Error related to the operation.
:vartype error: str
:ivar details: Error details related to operation.
:vartype details: str
"""
_validation = {
'operation': {'readonly': True},
'status': {'readonly': True},
'error': {'readonly': True},
'details': {'readonly': True},
}
_attribute_map = {
'operation': {'key': 'operation', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'error': {'key': 'error', 'type': 'str'},
'details': {'key': 'details', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(MarketplaceProductLogUpdate, self).__init__(**kwargs)
self.operation = None
self.status = None
self.error = None
self.details = None
class Operation(msrest.serialization.Model):
"""Describes the supported REST operation.
:param name: The name of the operation being performed on this particular object.
:type name: str
:param display: Contains the localized display information for this particular operation or
action.
:type display: ~azure.mgmt.azurestack.models.Display
:param origin: The intended executor of the operation.
:type origin: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'display': {'key': 'display', 'type': 'Display'},
'origin': {'key': 'origin', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(Operation, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.display = kwargs.get('display', None)
self.origin = kwargs.get('origin', None)
class OperationList(msrest.serialization.Model):
"""List of Operations.
:param value: Array of operations.
:type value: list[~azure.mgmt.azurestack.models.Operation]
:param next_link: URI to the next page of operations.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[Operation]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(OperationList, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class OsDiskImage(msrest.serialization.Model):
"""OS disk image.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar operating_system: OS operating system type. Possible values include: "None", "Windows",
"Linux".
:vartype operating_system: str or ~azure.mgmt.azurestack.models.OperatingSystem
:ivar source_blob_sas_uri: SAS key for source blob.
:vartype source_blob_sas_uri: str
"""
_validation = {
'operating_system': {'readonly': True},
'source_blob_sas_uri': {'readonly': True},
}
_attribute_map = {
'operating_system': {'key': 'operatingSystem', 'type': 'str'},
'source_blob_sas_uri': {'key': 'sourceBlobSasUri', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(OsDiskImage, self).__init__(**kwargs)
self.operating_system = None
self.source_blob_sas_uri = None
class Product(Resource):
"""Product information.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: ID of the resource.
:vartype id: str
:ivar name: Name of the resource.
:vartype name: str
:ivar type: Type of Resource.
:vartype type: str
:param etag: The entity tag used for optimistic concurrency when modifying the resource.
:type etag: str
:param display_name: The display name of the product.
:type display_name: str
:param description: The description of the product.
:type description: str
:param publisher_display_name: The user-friendly name of the product publisher.
:type publisher_display_name: str
:param publisher_identifier: Publisher identifier.
:type publisher_identifier: str
:param offer: The offer representing the product.
:type offer: str
:param offer_version: The version of the product offer.
:type offer_version: str
:param sku: The product SKU.
:type sku: str
:param billing_part_number: The part number used for billing purposes.
:type billing_part_number: str
:param vm_extension_type: The type of the Virtual Machine Extension.
:type vm_extension_type: str
:param gallery_item_identity: The identifier of the gallery item corresponding to the product.
:type gallery_item_identity: str
:param icon_uris: Additional links available for this product.
:type icon_uris: ~azure.mgmt.azurestack.models.IconUris
:param links: Additional links available for this product.
:type links: list[~azure.mgmt.azurestack.models.ProductLink]
:param legal_terms: The legal terms.
:type legal_terms: str
:param privacy_policy: The privacy policy.
:type privacy_policy: str
:param payload_length: The length of product content.
:type payload_length: long
:param product_kind: The kind of the product (virtualMachine or virtualMachineExtension).
:type product_kind: str
:param product_properties: Additional properties for the product.
:type product_properties: ~azure.mgmt.azurestack.models.ProductProperties
:param compatibility: Product compatibility with current device.
:type compatibility: ~azure.mgmt.azurestack.models.Compatibility
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'display_name': {'key': 'properties.displayName', 'type': 'str'},
'description': {'key': 'properties.description', 'type': 'str'},
'publisher_display_name': {'key': 'properties.publisherDisplayName', 'type': 'str'},
'publisher_identifier': {'key': 'properties.publisherIdentifier', 'type': 'str'},
'offer': {'key': 'properties.offer', 'type': 'str'},
'offer_version': {'key': 'properties.offerVersion', 'type': 'str'},
'sku': {'key': 'properties.sku', 'type': 'str'},
'billing_part_number': {'key': 'properties.billingPartNumber', 'type': 'str'},
'vm_extension_type': {'key': 'properties.vmExtensionType', 'type': 'str'},
'gallery_item_identity': {'key': 'properties.galleryItemIdentity', 'type': 'str'},
'icon_uris': {'key': 'properties.iconUris', 'type': 'IconUris'},
'links': {'key': 'properties.links', 'type': '[ProductLink]'},
'legal_terms': {'key': 'properties.legalTerms', 'type': 'str'},
'privacy_policy': {'key': 'properties.privacyPolicy', 'type': 'str'},
'payload_length': {'key': 'properties.payloadLength', 'type': 'long'},
'product_kind': {'key': 'properties.productKind', 'type': 'str'},
'product_properties': {'key': 'properties.productProperties', 'type': 'ProductProperties'},
'compatibility': {'key': 'properties.compatibility', 'type': 'Compatibility'},
}
def __init__(
self,
**kwargs
):
super(Product, self).__init__(**kwargs)
self.display_name = kwargs.get('display_name', None)
self.description = kwargs.get('description', None)
self.publisher_display_name = kwargs.get('publisher_display_name', None)
self.publisher_identifier = kwargs.get('publisher_identifier', None)
self.offer = kwargs.get('offer', None)
self.offer_version = kwargs.get('offer_version', None)
self.sku = kwargs.get('sku', None)
self.billing_part_number = kwargs.get('billing_part_number', None)
self.vm_extension_type = kwargs.get('vm_extension_type', None)
self.gallery_item_identity = kwargs.get('gallery_item_identity', None)
self.icon_uris = kwargs.get('icon_uris', None)
self.links = kwargs.get('links', None)
self.legal_terms = kwargs.get('legal_terms', None)
self.privacy_policy = kwargs.get('privacy_policy', None)
self.payload_length = kwargs.get('payload_length', None)
self.product_kind = kwargs.get('product_kind', None)
self.product_properties = kwargs.get('product_properties', None)
self.compatibility = kwargs.get('compatibility', None)
class ProductLink(msrest.serialization.Model):
"""Link with additional information about a product.
:param display_name: The description of the link.
:type display_name: str
:param uri: The URI corresponding to the link.
:type uri: str
"""
_attribute_map = {
'display_name': {'key': 'displayName', 'type': 'str'},
'uri': {'key': 'uri', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ProductLink, self).__init__(**kwargs)
self.display_name = kwargs.get('display_name', None)
self.uri = kwargs.get('uri', None)
class ProductList(msrest.serialization.Model):
"""Pageable list of products.
:param next_link: URI to the next page.
:type next_link: str
:param value: List of products.
:type value: list[~azure.mgmt.azurestack.models.Product]
"""
_attribute_map = {
'next_link': {'key': 'nextLink', 'type': 'str'},
'value': {'key': 'value', 'type': '[Product]'},
}
def __init__(
self,
**kwargs
):
super(ProductList, self).__init__(**kwargs)
self.next_link = kwargs.get('next_link', None)
self.value = kwargs.get('value', None)
class ProductLog(msrest.serialization.Model):
"""Product action log.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Log ID.
:vartype id: str
:ivar product_id: Logged product ID.
:vartype product_id: str
:ivar subscription_id: Logged subscription ID.
:vartype subscription_id: str
:ivar registration_name: Logged registration name.
:vartype registration_name: str
:ivar resource_group_name: Logged resource group name.
:vartype resource_group_name: str
:ivar operation: Logged operation.
:vartype operation: str
:ivar start_date: Operation start datetime.
:vartype start_date: str
:ivar end_date: Operation end datetime.
:vartype end_date: str
:ivar status: Operation status.
:vartype status: str
:ivar error: Operation error data.
:vartype error: str
:ivar details: Operation error details.
:vartype details: str
"""
_validation = {
'id': {'readonly': True},
'product_id': {'readonly': True},
'subscription_id': {'readonly': True},
'registration_name': {'readonly': True},
'resource_group_name': {'readonly': True},
'operation': {'readonly': True},
'start_date': {'readonly': True},
'end_date': {'readonly': True},
'status': {'readonly': True},
'error': {'readonly': True},
'details': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'product_id': {'key': 'productId', 'type': 'str'},
'subscription_id': {'key': 'subscriptionId', 'type': 'str'},
'registration_name': {'key': 'registrationName', 'type': 'str'},
'resource_group_name': {'key': 'resourceGroupName', 'type': 'str'},
'operation': {'key': 'operation', 'type': 'str'},
'start_date': {'key': 'startDate', 'type': 'str'},
'end_date': {'key': 'endDate', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'error': {'key': 'error', 'type': 'str'},
'details': {'key': 'details', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ProductLog, self).__init__(**kwargs)
self.id = None
self.product_id = None
self.subscription_id = None
self.registration_name = None
self.resource_group_name = None
self.operation = None
self.start_date = None
self.end_date = None
self.status = None
self.error = None
self.details = None
class ProductProperties(msrest.serialization.Model):
"""Additional properties of the product.
:param version: The version.
:type version: str
"""
_attribute_map = {
'version': {'key': 'version', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ProductProperties, self).__init__(**kwargs)
self.version = kwargs.get('version', None)
class TrackedResource(msrest.serialization.Model):
"""Base resource object.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: ID of the resource.
:vartype id: str
:ivar name: Name of the resource.
:vartype name: str
:ivar type: Type of Resource.
:vartype type: str
:param location: Required. Location of the resource. Possible values include: "global".
:type location: str or ~azure.mgmt.azurestack.models.Location
:param tags: A set of tags. Custom tags for the resource.
:type tags: dict[str, str]
:param etag: The entity tag used for optimistic concurrency when modifying the resource.
:type etag: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'location': {'required': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'etag': {'key': 'etag', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(TrackedResource, self).__init__(**kwargs)
self.id = None
self.name = None
self.type = None
self.location = kwargs['location']
self.tags = kwargs.get('tags', None)
self.etag = kwargs.get('etag', None)
class Registration(TrackedResource):
"""Registration information.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: ID of the resource.
:vartype id: str
:ivar name: Name of the resource.
:vartype name: str
:ivar type: Type of Resource.
:vartype type: str
:param location: Required. Location of the resource. Possible values include: "global".
:type location: str or ~azure.mgmt.azurestack.models.Location
:param tags: A set of tags. Custom tags for the resource.
:type tags: dict[str, str]
:param etag: The entity tag used for optimistic concurrency when modifying the resource.
:type etag: str
:param object_id: The object identifier associated with the Azure Stack connecting to Azure.
:type object_id: str
:param cloud_id: The identifier of the registered Azure Stack.
:type cloud_id: str
:param billing_model: Specifies the billing mode for the Azure Stack registration.
:type billing_model: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'location': {'required': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'etag': {'key': 'etag', 'type': 'str'},
'object_id': {'key': 'properties.objectId', 'type': 'str'},
'cloud_id': {'key': 'properties.cloudId', 'type': 'str'},
'billing_model': {'key': 'properties.billingModel', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(Registration, self).__init__(**kwargs)
self.object_id = kwargs.get('object_id', None)
self.cloud_id = kwargs.get('cloud_id', None)
self.billing_model = kwargs.get('billing_model', None)
class RegistrationList(msrest.serialization.Model):
"""Pageable list of registrations.
:param next_link: URI to the next page.
:type next_link: str
:param value: List of Registrations.
:type value: list[~azure.mgmt.azurestack.models.Registration]
"""
_attribute_map = {
'next_link': {'key': 'nextLink', 'type': 'str'},
'value': {'key': 'value', 'type': '[Registration]'},
}
def __init__(
self,
**kwargs
):
super(RegistrationList, self).__init__(**kwargs)
self.next_link = kwargs.get('next_link', None)
self.value = kwargs.get('value', None)
class RegistrationParameter(msrest.serialization.Model):
"""Registration resource.
All required parameters must be populated in order to send to Azure.
:param location: Required. Location of the resource. Possible values include: "global".
:type location: str or ~azure.mgmt.azurestack.models.Location
:param registration_token: Required. The token identifying registered Azure Stack.
:type registration_token: str
"""
_validation = {
'location': {'required': True},
'registration_token': {'required': True},
}
_attribute_map = {
'location': {'key': 'location', 'type': 'str'},
'registration_token': {'key': 'properties.registrationToken', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(RegistrationParameter, self).__init__(**kwargs)
self.location = kwargs['location']
self.registration_token = kwargs['registration_token']
| 35.618616 | 138 | 0.63338 |
16de149158e8edcff03da44d91a026b83823412d | 20,049 | py | Python | lib/streamlit/caching.py | domoritz/streamlit | 5e8e0ec1b46ac0b322dc48d27494be674ad238fa | [
"Apache-2.0"
] | 1 | 2020-01-28T06:48:27.000Z | 2020-01-28T06:48:27.000Z | lib/streamlit/caching.py | johnjdailey/streamlit | a178dd3ec5387a85662ae42ad0fe20f6e07390c9 | [
"Apache-2.0"
] | null | null | null | lib/streamlit/caching.py | johnjdailey/streamlit | a178dd3ec5387a85662ae42ad0fe20f6e07390c9 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright 2018-2020 Streamlit Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A library of caching utilities."""
# Python 2/3 compatibility
from __future__ import absolute_import, division, print_function
import ast
import contextlib
import hashlib
import inspect
import os
import shutil
import struct
import textwrap
import threading
from collections import namedtuple
import streamlit as st
from streamlit.util import functools_wraps
from streamlit import config
from streamlit import file_util
from streamlit import util
from streamlit.compatibility import setup_2_3_shims
from streamlit.hashing import CodeHasher
from streamlit.hashing import Context
from streamlit.hashing import get_hash
from streamlit.logger import get_logger
setup_2_3_shims(globals())
CACHED_ST_FUNCTION_WARNING = """
Your script writes to your Streamlit app from within a cached function. This
code will only be called when we detect a cache "miss", which can lead to
unexpected results.
How to resolve this warning:
* Move the streamlit function call outside the cached function.
* Or, if you know what you're doing, use `@st.cache(suppress_st_warning=True)`
to suppress the warning.
"""
try:
# cPickle, if available, is much faster than pickle.
# Source: https://pymotw.com/2/pickle/
import cPickle as pickle
except ImportError:
import pickle
LOGGER = get_logger(__name__)
class CacheError(Exception):
pass
class CacheKeyNotFoundError(Exception):
pass
class CachedObjectWasMutatedError(ValueError):
def __init__(self, cached_value):
self.cached_value = cached_value
CacheEntry = namedtuple("CacheEntry", ["value", "hash"])
DiskCacheEntry = namedtuple("DiskCacheEntry", ["value"])
# The in memory cache.
_mem_cache = {} # Type: Dict[string, CacheEntry]
# A thread-local counter that's incremented when we enter @st.cache
# and decremented when we exit.
class ThreadLocalCacheInfo(threading.local):
def __init__(self):
self.within_cached_func = 0
self.suppress_st_function_warning = 0
_cache_info = ThreadLocalCacheInfo()
@contextlib.contextmanager
def _calling_cached_function():
_cache_info.within_cached_func += 1
try:
yield
finally:
_cache_info.within_cached_func -= 1
@contextlib.contextmanager
def suppress_cached_st_function_warning():
_cache_info.suppress_st_function_warning += 1
try:
yield
finally:
_cache_info.suppress_st_function_warning -= 1
assert _cache_info.suppress_st_function_warning >= 0
def _show_cached_st_function_warning(dg):
# Avoid infinite recursion by suppressing additional cached
# function warnings from within the cached function warning.
with suppress_cached_st_function_warning():
dg.warning(CACHED_ST_FUNCTION_WARNING)
def maybe_show_cached_st_function_warning(dg):
"""If appropriate, warn about calling st.foo inside @cache.
DeltaGenerator's @_with_element and @_widget wrappers use this to warn
the user when they're calling st.foo() from within a function that is
wrapped in @st.cache.
Parameters
----------
dg : DeltaGenerator
The DeltaGenerator to publish the warning to.
"""
if (
_cache_info.within_cached_func > 0
and _cache_info.suppress_st_function_warning <= 0
):
_show_cached_st_function_warning(dg)
class _AddCopy(ast.NodeTransformer):
"""
An AST transformer that wraps function calls with copy.deepcopy.
Use this transformer if you will convert the AST back to code.
The code won't work without importing copy.
"""
def __init__(self, func_name):
self.func_name = func_name
def visit_Call(self, node):
if (
hasattr(node.func, "func")
and hasattr(node.func.func, "value")
and node.func.func.value.id == "st"
and node.func.func.attr == "cache"
):
# Wrap st.cache(func(...))().
return ast.copy_location(
ast.Call(
func=ast.Attribute(
value=ast.Name(id="copy", ctx=ast.Load()),
attr="deepcopy",
ctx=ast.Load(),
),
args=[node],
keywords=[],
),
node,
)
elif hasattr(node.func, "id") and node.func.id == self.func_name:
# Wrap func(...) where func is the cached function.
# Add caching to nested calls.
self.generic_visit(node)
return ast.copy_location(
ast.Call(
func=ast.Attribute(
value=ast.Name(id="copy", ctx=ast.Load()),
attr="deepcopy",
ctx=ast.Load(),
),
args=[node],
keywords=[],
),
node,
)
self.generic_visit(node)
return node
def _get_mutated_output_error_message():
message = textwrap.dedent(
"""
**WARNING: Cached Object Mutated**
By default, Streamlit’s cache is immutable. You received this warning
because Streamlit thinks you modified a cached object.
[Click here to see how to fix this issue.](https://docs.streamlit.io/advanced_concepts.html#advanced-caching)
"""
).strip("\n")
return message
def _read_from_mem_cache(key, allow_output_mutation, hash_funcs):
if key in _mem_cache:
entry = _mem_cache[key]
if (
allow_output_mutation
or get_hash(entry.value, hash_funcs=hash_funcs) == entry.hash
):
LOGGER.debug("Memory cache HIT: %s", type(entry.value))
return entry.value
else:
LOGGER.debug("Cache object was mutated: %s", key)
raise CachedObjectWasMutatedError(entry.value)
else:
LOGGER.debug("Memory cache MISS: %s", key)
raise CacheKeyNotFoundError("Key not found in mem cache")
def _write_to_mem_cache(key, value, allow_output_mutation, hash_funcs):
if allow_output_mutation:
hash = None
else:
hash = get_hash(value, hash_funcs=hash_funcs)
_mem_cache[key] = CacheEntry(value=value, hash=hash)
def _read_from_disk_cache(key):
path = file_util.get_streamlit_file_path("cache", "%s.pickle" % key)
try:
with file_util.streamlit_read(path, binary=True) as input:
entry = pickle.load(input)
value = entry.value
LOGGER.debug("Disk cache HIT: %s", type(value))
except util.Error as e:
LOGGER.error(e)
raise CacheError("Unable to read from cache: %s" % e)
except (OSError, FileNotFoundError): # Python 2 # Python 3
raise CacheKeyNotFoundError("Key not found in disk cache")
return value
def _write_to_disk_cache(key, value):
path = file_util.get_streamlit_file_path("cache", "%s.pickle" % key)
try:
with file_util.streamlit_write(path, binary=True) as output:
entry = DiskCacheEntry(value=value)
pickle.dump(entry, output, pickle.HIGHEST_PROTOCOL)
# In python 2, it's pickle struct error.
# In python 3, it's an open error in util.
except (util.Error, struct.error) as e:
LOGGER.debug(e)
# Clean up file so we don't leave zero byte files.
try:
os.remove(path)
except (FileNotFoundError, IOError, OSError):
pass
raise CacheError("Unable to write to cache: %s" % e)
def _read_from_cache(
key, persisted, allow_output_mutation, func_or_code, message_opts, hash_funcs
):
"""
Read the value from the cache. Our goal is to read from memory
if possible. If the data was mutated (hash changed), we show a
warning. If reading from memory fails, we either read from disk
or rerun the code.
"""
try:
return _read_from_mem_cache(key, allow_output_mutation, hash_funcs)
except CachedObjectWasMutatedError as e:
st.warning(_get_mutated_output_error_message())
return e.cached_value
except CacheKeyNotFoundError as e:
if persisted:
value = _read_from_disk_cache(key)
_write_to_mem_cache(key, value, allow_output_mutation, hash_funcs)
return value
raise e
def _write_to_cache(key, value, persist, allow_output_mutation, hash_funcs):
_write_to_mem_cache(key, value, allow_output_mutation, hash_funcs)
if persist:
_write_to_disk_cache(key, value)
def cache(
func=None,
persist=False,
allow_output_mutation=False,
show_spinner=True,
suppress_st_warning=False,
hash_funcs=None,
ignore_hash=False,
):
"""Function decorator to memoize function executions.
Parameters
----------
func : callable
The function to cache. Streamlit hashes the function and dependent code.
Streamlit can only hash nested objects (e.g. `bar` in `foo.bar`) in
Python 3.4+.
persist : boolean
Whether to persist the cache on disk.
allow_output_mutation : boolean
Streamlit normally shows a warning when return values are not mutated, as that
can have unintended consequences. This is done by hashing the return value internally.
If you know what you're doing and would like to override this warning, set this to True.
show_spinner : boolean
Enable the spinner. Default is True to show a spinner when there is
a cache miss.
suppress_st_warning : boolean
Suppress warnings about calling Streamlit functions from within
the cached function.
hash_funcs : dict or None
Mapping of types to hash functions. This is used to override the behavior of the hasher
inside Streamlit's caching mechanism: when the hasher encounters an object, it will first
check to see if its type matches a key in this dict and, if so, will use the provided
function to generate a hash for it. See below for an example of how this can be used.
ignore_hash : boolean
DEPRECATED. Please use allow_output_mutation instead.
This argument will be fully removed after 2020-03-16.
Example
-------
>>> @st.cache
... def fetch_and_clean_data(url):
... # Fetch data from URL here, and then clean it up.
... return data
...
>>> d1 = fetch_and_clean_data(DATA_URL_1)
>>> # Actually executes the function, since this is the first time it was
>>> # encountered.
>>>
>>> d2 = fetch_and_clean_data(DATA_URL_1)
>>> # Does not execute the function. Just returns its previously computed
>>> # value. This means that now the data in d1 is the same as in d2.
>>>
>>> d3 = fetch_and_clean_data(DATA_URL_2)
>>> # This is a different URL, so the function executes.
To set the `persist` parameter, use this command as follows:
>>> @st.cache(persist=True)
... def fetch_and_clean_data(url):
... # Fetch data from URL here, and then clean it up.
... return data
To disable hashing return values, set the `allow_output_mutation` parameter to `True`:
>>> @st.cache(allow_output_mutation=True)
... def fetch_and_clean_data(url):
... # Fetch data from URL here, and then clean it up.
... return data
To override the default hashing behavior, pass a mapping of type to hash function:
>>> @st.cache(hash_funcs={MongoClient: id})
... def connect_to_database(url):
... return MongoClient(url)
"""
# Help users migrate to the new kwarg
# Remove this warning after 2020-03-16.
if ignore_hash:
raise Exception(
"The `ignore_hash` argument has been renamed to `allow_output_mutation`."
)
# Support passing the params via function decorator, e.g.
# @st.cache(persist=True, allow_output_mutation=True)
if func is None:
return lambda f: cache(
func=f,
persist=persist,
allow_output_mutation=allow_output_mutation,
show_spinner=show_spinner,
suppress_st_warning=suppress_st_warning,
hash_funcs=hash_funcs,
)
@functools_wraps(func)
def wrapped_func(*args, **kwargs):
"""This function wrapper will only call the underlying function in
the case of a cache miss. Cached objects are stored in the cache/
directory."""
if not config.get_option("client.caching"):
LOGGER.debug("Purposefully skipping cache")
return func(*args, **kwargs)
name = func.__name__
if len(args) == 0 and len(kwargs) == 0:
message = "Running %s()." % name
else:
message = "Running %s(...)." % name
def get_or_set_cache():
hasher = hashlib.new("md5")
args_hasher = CodeHasher("md5", hasher, hash_funcs)
args_hasher.update([args, kwargs])
LOGGER.debug("Hashing arguments to %s of %i bytes.", name, args_hasher.size)
code_hasher = CodeHasher("md5", hasher, hash_funcs)
code_hasher.update(func)
LOGGER.debug("Hashing function %s in %i bytes.", name, code_hasher.size)
key = hasher.hexdigest()
LOGGER.debug("Cache key: %s", key)
caller_frame = inspect.currentframe().f_back
try:
return_value = _read_from_cache(
key, persist, allow_output_mutation, func, caller_frame, hash_funcs
)
except CacheKeyNotFoundError:
with _calling_cached_function():
if suppress_st_warning:
with suppress_cached_st_function_warning():
return_value = func(*args, **kwargs)
else:
return_value = func(*args, **kwargs)
_write_to_cache(
key=key,
value=return_value,
persist=persist,
allow_output_mutation=allow_output_mutation,
hash_funcs=hash_funcs,
)
return return_value
if show_spinner:
with st.spinner(message):
return get_or_set_cache()
else:
return get_or_set_cache()
# Make this a well-behaved decorator by preserving important function
# attributes.
try:
wrapped_func.__dict__.update(func.__dict__)
except AttributeError:
pass
return wrapped_func
class Cache(dict):
"""Cache object to persist data across reruns.
Parameters
----------
Example
-------
>>> c = st.Cache()
... if c:
... # Fetch data from URL here, and then clean it up. Finally assign to c.
... c.data = ...
...
>>> # c.data will always be defined but the code block only runs the first time
The only valid side effect inside the if code block are changes to c. Any
other side effect has undefined behavior.
In Python 3.8 and above, you can combine the assignment and if-check with an
assignment expression (`:=`).
>>> if c := st.Cache():
... # Fetch data from URL here, and then clean it up. Finally assign to c.
... c.data = ...
"""
def __init__(self, persist=False, allow_output_mutation=False):
self._persist = persist
self._allow_output_mutation = allow_output_mutation
dict.__init__(self)
def has_changes(self):
current_frame = inspect.currentframe()
caller_frame = current_frame.f_back
current_file = inspect.getfile(current_frame)
caller_file = inspect.getfile(caller_frame)
real_caller_is_parent_frame = current_file == caller_file
if real_caller_is_parent_frame:
caller_frame = caller_frame.f_back
frameinfo = inspect.getframeinfo(caller_frame)
filename, caller_lineno, _, code_context, _ = frameinfo
code_context = code_context[0]
context_indent = len(code_context) - len(code_context.lstrip())
lines = []
# TODO: Memoize open(filename, 'r') in a way that clears the memoized
# version with each run of the user's script. Then use the memoized
# text here, in st.echo, and other places.
with open(filename, "r") as f:
for line in f.readlines()[caller_lineno:]:
if line.strip() == "":
lines.append(line)
indent = len(line) - len(line.lstrip())
if indent <= context_indent:
break
if line.strip() and not line.lstrip().startswith("#"):
lines.append(line)
while lines[-1].strip() == "":
lines.pop()
code_block = "".join(lines)
program = textwrap.dedent(code_block)
context = Context(dict(caller_frame.f_globals, **caller_frame.f_locals), {}, {})
code = compile(program, filename, "exec")
code_hasher = CodeHasher("md5")
code_hasher.update(code, context)
LOGGER.debug("Hashing block in %i bytes.", code_hasher.size)
key = code_hasher.hexdigest()
LOGGER.debug("Cache key: %s", key)
try:
value, _ = _read_from_cache(
key,
self._persist,
self._allow_output_mutation,
code,
[caller_lineno + 1, caller_lineno + len(lines)],
)
self.update(value)
except CacheKeyNotFoundError:
if self._allow_output_mutation and not self._persist:
# If we don't hash the results, we don't need to use exec and just return True.
# This way line numbers will be correct.
_write_to_cache(
key=key, value=self, persist=False, allow_output_mutation=True
)
return True
exec(code, caller_frame.f_globals, caller_frame.f_locals)
_write_to_cache(
key=key,
value=self,
persist=self._persist,
allow_output_mutation=self._allow_output_mutation,
)
# Return False so that we have control over the execution.
return False
def __bool__(self):
return self.has_changes()
# Python 2 doesn't have __bool__
def __nonzero__(self):
return self.has_changes()
def __getattr__(self, key):
if key not in self:
raise AttributeError("Cache has no atribute %s" % key)
return self.__getitem__(key)
def __setattr__(self, key, value):
dict.__setitem__(self, key, value)
def clear_cache():
"""Clear the memoization cache.
Returns
-------
boolean
True if the disk cache was cleared. False otherwise (e.g. cache file
doesn't exist on disk).
"""
_clear_mem_cache()
return _clear_disk_cache()
def get_cache_path():
return file_util.get_streamlit_file_path("cache")
def _clear_disk_cache():
# TODO: Only delete disk cache for functions related to the user's current
# script.
cache_path = get_cache_path()
if os.path.isdir(cache_path):
shutil.rmtree(cache_path)
return True
return False
def _clear_mem_cache():
global _mem_cache
_mem_cache = {}
| 31.672986 | 117 | 0.631004 |
042dda74c23cbc90625f811e398a6ebc68b24147 | 1,077 | py | Python | src/sagemaker/xgboost/defaults.py | eitansela/sagemaker-python-sdk | aa54102b5113b1d39bbbd4d9d341775f84641681 | [
"Apache-2.0"
] | 1 | 2021-07-22T00:23:51.000Z | 2021-07-22T00:23:51.000Z | src/sagemaker/xgboost/defaults.py | eitansela/sagemaker-python-sdk | aa54102b5113b1d39bbbd4d9d341775f84641681 | [
"Apache-2.0"
] | 24 | 2021-05-18T07:10:27.000Z | 2021-05-28T13:36:51.000Z | src/sagemaker/xgboost/defaults.py | eitansela/sagemaker-python-sdk | aa54102b5113b1d39bbbd4d9d341775f84641681 | [
"Apache-2.0"
] | null | null | null | # Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
"""Placeholder docstring"""
from __future__ import absolute_import
XGBOOST_NAME = "xgboost"
XGBOOST_UNSUPPORTED_VERSIONS = {
"1.1": (
"XGBoost 1.1 is not supported on SageMaker because XGBoost 1.1 has broken capability to "
"run prediction when the test input has fewer features than the training data in LIBSVM "
"inputs. This capability has been restored in XGBoost 1.2 "
"(https://github.com/dmlc/xgboost/pull/5955). Consider using SageMaker XGBoost 1.2-1."
),
}
| 43.08 | 97 | 0.730734 |
5d513c2e10437019c6cf8822b0d67715e82c575d | 1,952 | py | Python | qf_lib/common/utils/dateutils/get_values_common_dates.py | webclinic017/qf-lib | 96463876719bba8a76c8269cef76addf3a2d836d | [
"Apache-2.0"
] | 198 | 2019-08-16T15:09:23.000Z | 2022-03-30T12:44:00.000Z | qf_lib/common/utils/dateutils/get_values_common_dates.py | webclinic017/qf-lib | 96463876719bba8a76c8269cef76addf3a2d836d | [
"Apache-2.0"
] | 13 | 2021-01-07T10:15:19.000Z | 2022-03-29T13:01:47.000Z | qf_lib/common/utils/dateutils/get_values_common_dates.py | webclinic017/qf-lib | 96463876719bba8a76c8269cef76addf3a2d836d | [
"Apache-2.0"
] | 29 | 2019-08-16T15:21:28.000Z | 2022-02-23T09:53:49.000Z | # Copyright 2016-present CERN – European Organization for Nuclear Research
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import List
from qf_lib.containers.time_indexed_container import TimeIndexedContainer
def get_values_for_common_dates(*containers: TimeIndexedContainer, remove_nans: bool = False)\
-> List[TimeIndexedContainer]:
"""
Gets list/tuple of series/dataframes (possibly mixed) and finds the common dates for all of them. Then it returns
corresponding series/dataframes as a list. All series and dataframes in the result list contain only values
for common dates.
Parameters
----------
containers
variable length list of arguments where each of the arguments is a TimeIndexedContainer
remove_nans
if True, then all incomplete rows will be removed from each provided container before finding common dates
Returns
-------
List
list composed of TimeIndexedContainers containing only values for common dates
"""
if remove_nans:
dates_axis_number = 0
containers = [container.dropna(axis=dates_axis_number) for container in containers]
common_dates = containers[0].index
for i in range(1, len(containers)):
container = containers[i]
common_dates = common_dates.intersection(container.index)
return [container.loc[common_dates] for container in containers]
| 39.04 | 117 | 0.726434 |
dc52e037f5f8f1a9e3f59e662a5d476641f85204 | 2,215 | py | Python | ditto/pinboard/migrations/0007_auto_20150724_1957.py | garrettc/django-ditto | fcf15beb8f9b4d61634efd4a88064df12ee16a6f | [
"MIT"
] | 54 | 2016-08-15T17:32:41.000Z | 2022-02-27T03:32:05.000Z | ditto/pinboard/migrations/0007_auto_20150724_1957.py | garrettc/django-ditto | fcf15beb8f9b4d61634efd4a88064df12ee16a6f | [
"MIT"
] | 229 | 2015-07-23T12:50:47.000Z | 2022-03-24T10:33:20.000Z | ditto/pinboard/migrations/0007_auto_20150724_1957.py | garrettc/django-ditto | fcf15beb8f9b4d61634efd4a88064df12ee16a6f | [
"MIT"
] | 8 | 2015-09-10T17:10:35.000Z | 2022-03-25T13:05:01.000Z | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
("pinboard", "0006_auto_20150723_1322"),
]
operations = [
migrations.AlterModelOptions(
name="account", options={"ordering": ["username"]},
),
migrations.AlterModelOptions(
name="bookmark", options={"ordering": ["-post_time"]},
),
migrations.AlterField(
model_name="account",
name="time_created",
field=models.DateTimeField(
help_text=b"The time this item was created in the database.",
auto_now_add=True,
),
),
migrations.AlterField(
model_name="account",
name="time_modified",
field=models.DateTimeField(
help_text=b"The time this item was last saved to the database.",
auto_now=True,
),
),
migrations.AlterField(
model_name="bookmark",
name="fetch_time",
field=models.DateTimeField(
help_text=b"The time the item's data was last fetched, and was new or changed.", # noqa: E501
null=True,
blank=True,
),
),
migrations.AlterField(
model_name="bookmark",
name="is_private",
field=models.BooleanField(
default=False,
help_text=b"If set, this item will not be shown on public-facing pages.", # noqa: E501
),
),
migrations.AlterField(
model_name="bookmark",
name="time_created",
field=models.DateTimeField(
help_text=b"The time this item was created in the database.",
auto_now_add=True,
),
),
migrations.AlterField(
model_name="bookmark",
name="time_modified",
field=models.DateTimeField(
help_text=b"The time this item was last saved to the database.",
auto_now=True,
),
),
]
| 31.642857 | 110 | 0.526862 |
db4c255e2ead377955190e2ec229f43db67d52ce | 3,381 | py | Python | deploy_gh_pages.py | climblinne/docs | 6e404abafcefd096cf33b0ca4dc6a174023d12b9 | [
"MIT"
] | null | null | null | deploy_gh_pages.py | climblinne/docs | 6e404abafcefd096cf33b0ca4dc6a174023d12b9 | [
"MIT"
] | null | null | null | deploy_gh_pages.py | climblinne/docs | 6e404abafcefd096cf33b0ca4dc6a174023d12b9 | [
"MIT"
] | null | null | null | import json
import os
import shutil
import tempfile
def copytree(src, dst, symlinks=False, ignore=None):
for item in os.listdir(src):
s = os.path.join(src, item)
d = os.path.join(dst, item)
if os.path.isdir(s):
shutil.copytree(s, d, symlinks, ignore)
else:
shutil.copy2(s, d)
def call(command, ignore_error=False):
ret = os.system(command)
if ret != 0 and not ignore_error:
raise Exception("Command failed: %s" % command)
excluded_files = (".git", "CNAME", "index.html")
def config_git():
call('git config --global user.email "lasote@gmail.com"')
call('git config --global user.name "Luis Martinez de Bartolome"')
def clean_gh_pages():
call('git config remote.origin.fetch "+refs/heads/*:refs/remotes/origin/*" 1>/dev/null')
call("git fetch origin -q")
call("git checkout gh-pages")
if os.path.exists("en"):
shutil.rmtree("en")
def build_and_copy(branch, folder_name, versions_available, validate_links=False):
call("git checkout %s" % branch)
call("git pull origin %s" % branch)
with open('versions.json', 'w') as f:
f.write(json.dumps(versions_available))
call("make html")
if validate_links:
call("make spelling")
call("make linkcheck")
call("make latexpdf")
tmp_dir = tempfile.mkdtemp()
copytree("_build/html/", tmp_dir)
shutil.copy2("_build/latex/conan.pdf", tmp_dir)
shutil.rmtree("_build")
# Go to deploy branch, copy new files and commit
call("git checkout gh-pages")
if not os.path.exists("en"):
os.mkdir("en")
version_folders = ["en/%s" % folder_name]
if branch == "master":
version_folders.append("en/latest")
for version_folder in version_folders:
if os.path.exists(version_folder):
shutil.rmtree(version_folder)
os.mkdir(version_folder)
copytree(tmp_dir, version_folder)
call("git add -A .")
call("git commit --message 'committed version %s'" % folder_name, ignore_error=True)
def should_deploy():
if not os.getenv("TRAVIS_BRANCH", None) == "master":
print("Skipping deploy for not master branch")
return False
if os.getenv("TRAVIS_PULL_REQUEST", "") != "false":
print("Deploy skipped, This is a PR in the main repository")
return False
if not os.getenv("GITHUB_API_KEY"):
print("Deploy skipped, missing GITHUB_API_KEY. Is this a PR?")
return False
return True
def deploy():
call('git remote add origin-pages '
'https://%s@github.com/conan-io/docs.git > /dev/null 2>&1' % os.getenv("GITHUB_API_KEY"))
call('git push origin-pages gh-pages')
if __name__ == "__main__":
if should_deploy():
config_git()
clean_gh_pages()
versions_dict = {"master": "1.8",
"release/1.7.4": "1.7",
"release/1.6.1": "1.6",
"release/1.5.2": "1.5",
"release/1.4.5": "1.4",
"release/1.3.3": "1.3"}
for branch, folder_name in versions_dict.items():
build_and_copy(branch, folder_name, versions_dict, validate_links=branch == "master")
deploy()
else:
call("make spelling")
call("make html")
call("make linkcheck")
| 29.146552 | 98 | 0.60278 |
5f09283249fe35dfe844c7a9865a25b97ca1b2c1 | 10,135 | py | Python | pandas/tests/series/indexing/test_setitem.py | erictleung/pandas | ca52e3968058dce48c20070b3e9efc2ec42d7cf4 | [
"BSD-3-Clause"
] | 3 | 2021-12-13T05:53:26.000Z | 2022-03-07T01:38:02.000Z | pandas/tests/series/indexing/test_setitem.py | erictleung/pandas | ca52e3968058dce48c20070b3e9efc2ec42d7cf4 | [
"BSD-3-Clause"
] | null | null | null | pandas/tests/series/indexing/test_setitem.py | erictleung/pandas | ca52e3968058dce48c20070b3e9efc2ec42d7cf4 | [
"BSD-3-Clause"
] | 1 | 2021-12-13T05:53:20.000Z | 2021-12-13T05:53:20.000Z | from datetime import date
import numpy as np
import pytest
from pandas import (
DatetimeIndex,
MultiIndex,
NaT,
Series,
Timestamp,
date_range,
period_range,
)
import pandas._testing as tm
from pandas.core.indexing import IndexingError
from pandas.tseries.offsets import BDay
class TestSetitemDT64Values:
def test_setitem_none_nan(self):
series = Series(date_range("1/1/2000", periods=10))
series[3] = None
assert series[3] is NaT
series[3:5] = None
assert series[4] is NaT
series[5] = np.nan
assert series[5] is NaT
series[5:7] = np.nan
assert series[6] is NaT
def test_setitem_multiindex_empty_slice(self):
# https://github.com/pandas-dev/pandas/issues/35878
idx = MultiIndex.from_tuples([("a", 1), ("b", 2)])
result = Series([1, 2], index=idx)
expected = result.copy()
result.loc[[]] = 0
tm.assert_series_equal(result, expected)
def test_setitem_with_string_index(self):
# GH#23451
ser = Series([1, 2, 3], index=["Date", "b", "other"])
ser["Date"] = date.today()
assert ser.Date == date.today()
assert ser["Date"] == date.today()
def test_setitem_with_different_tz_casts_to_object(self):
# GH#24024
ser = Series(date_range("2000", periods=2, tz="US/Central"))
ser[0] = Timestamp("2000", tz="US/Eastern")
expected = Series(
[
Timestamp("2000-01-01 00:00:00-05:00", tz="US/Eastern"),
Timestamp("2000-01-02 00:00:00-06:00", tz="US/Central"),
],
dtype=object,
)
tm.assert_series_equal(ser, expected)
def test_setitem_tuple_with_datetimetz_values(self):
# GH#20441
arr = date_range("2017", periods=4, tz="US/Eastern")
index = [(0, 1), (0, 2), (0, 3), (0, 4)]
result = Series(arr, index=index)
expected = result.copy()
result[(0, 1)] = np.nan
expected.iloc[0] = np.nan
tm.assert_series_equal(result, expected)
class TestSetitemPeriodDtype:
@pytest.mark.parametrize("na_val", [None, np.nan])
def test_setitem_na_period_dtype_casts_to_nat(self, na_val):
ser = Series(period_range("2000-01-01", periods=10, freq="D"))
ser[3] = na_val
assert ser[3] is NaT
ser[3:5] = na_val
assert ser[4] is NaT
class TestSetitemScalarIndexer:
def test_setitem_negative_out_of_bounds(self):
ser = Series(tm.rands_array(5, 10), index=tm.rands_array(10, 10))
msg = "index -11 is out of bounds for axis 0 with size 10"
with pytest.raises(IndexError, match=msg):
ser[-11] = "foo"
class TestSetitemSlices:
def test_setitem_slice_float_raises(self, datetime_series):
msg = (
"cannot do slice indexing on DatetimeIndex with these indexers "
r"\[{key}\] of type float"
)
with pytest.raises(TypeError, match=msg.format(key=r"4\.0")):
datetime_series[4.0:10.0] = 0
with pytest.raises(TypeError, match=msg.format(key=r"4\.5")):
datetime_series[4.5:10.0] = 0
class TestSetitemBooleanMask:
def test_setitem_boolean(self, string_series):
mask = string_series > string_series.median()
# similar indexed series
result = string_series.copy()
result[mask] = string_series * 2
expected = string_series * 2
tm.assert_series_equal(result[mask], expected[mask])
# needs alignment
result = string_series.copy()
result[mask] = (string_series * 2)[0:5]
expected = (string_series * 2)[0:5].reindex_like(string_series)
expected[-mask] = string_series[mask]
tm.assert_series_equal(result[mask], expected[mask])
def test_setitem_boolean_corner(self, datetime_series):
ts = datetime_series
mask_shifted = ts.shift(1, freq=BDay()) > ts.median()
msg = (
r"Unalignable boolean Series provided as indexer \(index of "
r"the boolean Series and of the indexed object do not match"
)
with pytest.raises(IndexingError, match=msg):
ts[mask_shifted] = 1
with pytest.raises(IndexingError, match=msg):
ts.loc[mask_shifted] = 1
def test_setitem_boolean_different_order(self, string_series):
ordered = string_series.sort_values()
copy = string_series.copy()
copy[ordered > 0] = 0
expected = string_series.copy()
expected[expected > 0] = 0
tm.assert_series_equal(copy, expected)
@pytest.mark.parametrize("func", [list, np.array, Series])
def test_setitem_boolean_python_list(self, func):
# GH19406
ser = Series([None, "b", None])
mask = func([True, False, True])
ser[mask] = ["a", "c"]
expected = Series(["a", "b", "c"])
tm.assert_series_equal(ser, expected)
@pytest.mark.parametrize("value", [None, NaT, np.nan])
def test_setitem_boolean_td64_values_cast_na(self, value):
# GH#18586
series = Series([0, 1, 2], dtype="timedelta64[ns]")
mask = series == series[0]
series[mask] = value
expected = Series([NaT, 1, 2], dtype="timedelta64[ns]")
tm.assert_series_equal(series, expected)
def test_setitem_boolean_nullable_int_types(self, any_numeric_dtype):
# GH: 26468
ser = Series([5, 6, 7, 8], dtype=any_numeric_dtype)
ser[ser > 6] = Series(range(4), dtype=any_numeric_dtype)
expected = Series([5, 6, 2, 3], dtype=any_numeric_dtype)
tm.assert_series_equal(ser, expected)
ser = Series([5, 6, 7, 8], dtype=any_numeric_dtype)
ser.loc[ser > 6] = Series(range(4), dtype=any_numeric_dtype)
tm.assert_series_equal(ser, expected)
ser = Series([5, 6, 7, 8], dtype=any_numeric_dtype)
loc_ser = Series(range(4), dtype=any_numeric_dtype)
ser.loc[ser > 6] = loc_ser.loc[loc_ser > 1]
tm.assert_series_equal(ser, expected)
class TestSetitemViewCopySemantics:
def test_setitem_invalidates_datetime_index_freq(self):
# GH#24096 altering a datetime64tz Series inplace invalidates the
# `freq` attribute on the underlying DatetimeIndex
dti = date_range("20130101", periods=3, tz="US/Eastern")
ts = dti[1]
ser = Series(dti)
assert ser._values is not dti
assert ser._values._data.base is not dti._data._data.base
assert dti.freq == "D"
ser.iloc[1] = NaT
assert ser._values.freq is None
# check that the DatetimeIndex was not altered in place
assert ser._values is not dti
assert ser._values._data.base is not dti._data._data.base
assert dti[1] == ts
assert dti.freq == "D"
def test_dt64tz_setitem_does_not_mutate_dti(self):
# GH#21907, GH#24096
dti = date_range("2016-01-01", periods=10, tz="US/Pacific")
ts = dti[0]
ser = Series(dti)
assert ser._values is not dti
assert ser._values._data.base is not dti._data._data.base
assert ser._mgr.blocks[0].values is not dti
assert ser._mgr.blocks[0].values._data.base is not dti._data._data.base
ser[::3] = NaT
assert ser[0] is NaT
assert dti[0] == ts
class TestSetitemCallable:
def test_setitem_callable_key(self):
# GH#12533
ser = Series([1, 2, 3, 4], index=list("ABCD"))
ser[lambda x: "A"] = -1
expected = Series([-1, 2, 3, 4], index=list("ABCD"))
tm.assert_series_equal(ser, expected)
def test_setitem_callable_other(self):
# GH#13299
inc = lambda x: x + 1
ser = Series([1, 2, -1, 4])
ser[ser < 0] = inc
expected = Series([1, 2, inc, 4])
tm.assert_series_equal(ser, expected)
class TestSetitemCasting:
def test_setitem_nan_casts(self):
# these induce dtype changes
expected = Series([np.nan, 3, np.nan, 5, np.nan, 7, np.nan, 9, np.nan])
ser = Series([2, 3, 4, 5, 6, 7, 8, 9, 10])
ser[::2] = np.nan
tm.assert_series_equal(ser, expected)
# gets coerced to float, right?
expected = Series([np.nan, 1, np.nan, 0])
ser = Series([True, True, False, False])
ser[::2] = np.nan
tm.assert_series_equal(ser, expected)
expected = Series([np.nan, np.nan, np.nan, np.nan, np.nan, 5, 6, 7, 8, 9])
ser = Series(np.arange(10))
ser[:5] = np.nan
tm.assert_series_equal(ser, expected)
class TestSetitemWithExpansion:
def test_setitem_empty_series(self):
# GH#10193
key = Timestamp("2012-01-01")
series = Series(dtype=object)
series[key] = 47
expected = Series(47, [key])
tm.assert_series_equal(series, expected)
def test_setitem_empty_series_datetimeindex_preserves_freq(self):
# GH#33573 our index should retain its freq
series = Series([], DatetimeIndex([], freq="D"), dtype=object)
key = Timestamp("2012-01-01")
series[key] = 47
expected = Series(47, DatetimeIndex([key], freq="D"))
tm.assert_series_equal(series, expected)
assert series.index.freq == expected.index.freq
def test_setitem_scalar_into_readonly_backing_data():
# GH#14359: test that you cannot mutate a read only buffer
array = np.zeros(5)
array.flags.writeable = False # make the array immutable
series = Series(array)
for n in range(len(series)):
msg = "assignment destination is read-only"
with pytest.raises(ValueError, match=msg):
series[n] = 1
assert array[n] == 0
def test_setitem_slice_into_readonly_backing_data():
# GH#14359: test that you cannot mutate a read only buffer
array = np.zeros(5)
array.flags.writeable = False # make the array immutable
series = Series(array)
msg = "assignment destination is read-only"
with pytest.raises(ValueError, match=msg):
series[1:3] = 1
assert not array.any()
| 33.013029 | 82 | 0.616083 |
2c1cc0df069c21b21ac0eecac1f8b7d27bdffef4 | 273,172 | py | Python | Configuration/PyReleaseValidation/python/relval_steps.py | m-sedghi/cmssw | 859df8affee372c53be79cdd2d8a5ff001eae841 | [
"Apache-2.0"
] | null | null | null | Configuration/PyReleaseValidation/python/relval_steps.py | m-sedghi/cmssw | 859df8affee372c53be79cdd2d8a5ff001eae841 | [
"Apache-2.0"
] | 1 | 2020-04-02T10:53:40.000Z | 2021-01-12T13:05:14.000Z | Configuration/PyReleaseValidation/python/relval_steps.py | m-sedghi/cmssw | 859df8affee372c53be79cdd2d8a5ff001eae841 | [
"Apache-2.0"
] | null | null | null | from __future__ import absolute_import
from .MatrixUtil import *
import six
from Configuration.HLT.autoHLT import autoHLT
from Configuration.AlCa.autoPCL import autoPCL
from .upgradeWorkflowComponents import step3_trackingOnly
concurrentLumis = {'--nStreams': 4,
'--nConcurrentLumis': 2,
}
# step1 gensim: for run1
step1Defaults = {'--relval' : None, # need to be explicitly set
'-s' : 'GEN,SIM',
'-n' : 10,
'--conditions' : 'auto:run1_mc',
'--beamspot' : 'Realistic8TeVCollision',
'--datatier' : 'GEN-SIM',
'--eventcontent': 'RAWSIM',
}
# step1 gensim: for postLS1
step1Up2015Defaults = {'-s' : 'GEN,SIM',
'-n' : 10,
'--conditions' : 'auto:run2_mc',
'--beamspot' : 'Realistic25ns13TeV2016Collision',
'--datatier' : 'GEN-SIM',
'--eventcontent': 'FEVTDEBUG',
'--era' : 'Run2_2016'
}
# step1 gensim: for 2017
step1Up2017Defaults = merge ([{'--conditions':'auto:phase1_2017_realistic','--era':'Run2_2017','--beamspot':'Realistic25ns13TeVEarly2017Collision'},step1Up2015Defaults])
# step1 gensim: for 2018
step1Up2018Defaults = merge ([{'--conditions':'auto:phase1_2018_realistic','--era':'Run2_2018','--beamspot':'Realistic25ns13TeVEarly2018Collision','--geometry':'DB:Extended'},step1Up2017Defaults])
# step1 gensim: for 2018 HI
step1Up2018HiDefaults = merge ([{'--conditions':'auto:phase1_2018_realistic_hi','--era':'Run2_2018_pp_on_AA','--beamspot':'RealisticPbPbCollision2018','--geometry':'DB:Extended'},step1Up2017Defaults])
# step1 gensim: for 2018 prod
step1Up2018ProdDefaults = merge ([{'--eventcontent':'RAWSIM'},step1Up2018Defaults])
# step1 gensim: for 2018 HI prod
step1Up2018HiProdDefaults = merge ([{'--eventcontent':'RAWSIM'},step1Up2018HiDefaults])
# step1 gensim: for 2021 HI prod
step1Up2021HiProdDefaults = merge ([{'--conditions':'auto:phase1_2021_realistic_hi','--era':'Run3_pp_on_PbPb','--beamspot':'Run3RoundOptics25ns13TeVLowSigmaZ','--eventcontent':'RAWSIM','--geometry':'DB:Extended'},step1Up2018HiDefaults])
steps = Steps()
#### Production test section ####
steps['ProdMinBias']=merge([{'cfg':'MinBias_8TeV_pythia8_TuneCUETP8M1_cff','--relval':'9000,300'},step1Defaults])
steps['ProdTTbar']=merge([{'cfg':'TTbar_8TeV_TuneCUETP8M1_cfi','--relval':'9000,100'},step1Defaults])
steps['ProdQCD_Pt_3000_3500']=merge([{'cfg':'QCD_Pt_3000_3500_8TeV_TuneCUETP8M1_cfi','--relval':'9000,50'},step1Defaults])
#### data ####
Run2010ASk=[138937,138934,138924,138923,139790,139789,139788,139787,144086,144085,144084,144083,144011]
Run2010BSk=[146644,147115,147929,148822,149011,149181,149182,149291,149294,149442]
steps['MinimumBias2010A']={'INPUT':InputInfo(dataSet='/MinimumBias/Run2010A-valskim-v6/RAW-RECO',label='2010A',location='STD',run=Run2010ASk)}
steps['MinimumBias2010B']={'INPUT':InputInfo(dataSet='/MinimumBias/Run2010B-valskim-v2/RAW-RECO',label='2010B',run=Run2010BSk)}
steps['WZMuSkim2010A']={'INPUT':InputInfo(dataSet='/Mu/Run2010A-WZMu-Nov4Skim_v1/RAW-RECO',label='2010A',run=Run2010ASk)}
steps['WZMuSkim2010B']={'INPUT':InputInfo(dataSet='/Mu/Run2010B-WZMu-Nov4Skim_v1/RAW-RECO',label='2010B',run=Run2010BSk)}
steps['WZEGSkim2010A']={'INPUT':InputInfo(dataSet='/EG/Run2010A-WZEG-Nov4Skim_v1/RAW-RECO',label='2010A',run=Run2010ASk)}
steps['WZEGSkim2010B']={'INPUT':InputInfo(dataSet='/Electron/Run2010B-WZEG-Nov4Skim_v1/RAW-RECO',label='2010B',run=Run2010BSk)}
steps['RunCosmicsA']={'INPUT':InputInfo(dataSet='/Cosmics/Run2010A-v1/RAW',label='2010A',run=[142089],events=100000)}
Run2010B=[149011]
steps['RunMinBias2010B']={'INPUT':InputInfo(dataSet='/MinimumBias/Run2010B-RelValRawSkim-v1/RAW',label='2010B',run=Run2010B,events=100000)}
steps['RunMu2010B']={'INPUT':InputInfo(dataSet='/Mu/Run2010B-RelValRawSkim-v1/RAW',label='2010B',run=Run2010B,events=100000)}
steps['RunElectron2010B']={'INPUT':InputInfo(dataSet='/Electron/Run2010B-RelValRawSkim-v1/RAW',label='2010B',run=Run2010B,events=100000)}
steps['RunPhoton2010B']={'INPUT':InputInfo(dataSet='/Photon/Run2010B-RelValRawSkim-v1/RAW',label='2010B',run=Run2010B,events=100000)}
steps['RunJet2010B']={'INPUT':InputInfo(dataSet='/Jet/Run2010B-RelValRawSkim-v1/RAW',label='2010B',run=Run2010B,events=100000)}
#list of run to harvest 2011A: 165121, 172802,
Run2011ASk=[165121,172802]
steps['ValSkim2011A']={'INPUT':InputInfo(dataSet='/MinimumBias/Run2011A-ValSkim-08Nov2011-v1/RAW-RECO',ib_block='239c497e-0fae-11e1-a8b1-00221959e72f',label='2011A',location='STD',run=Run2011ASk)}
steps['WMuSkim2011A']={'INPUT':InputInfo(dataSet='/SingleMu/Run2011A-WMu-08Nov2011-v1/RAW-RECO',ib_block='388c2990-0de6-11e1-bb7e-00221959e72f',label='2011A',location='STD',run=Run2011ASk)}
steps['WElSkim2011A']={'INPUT':InputInfo(dataSet='/SingleElectron/Run2011A-WElectron-08Nov2011-v1/RAW-RECO',ib_block='9c48c4ea-0db2-11e1-b62c-00221959e69e',label='2011A',location='STD',run=Run2011ASk)}
steps['ZMuSkim2011A']={'INPUT':InputInfo(dataSet='/DoubleMu/Run2011A-ZMu-08Nov2011-v1/RAW-RECO',label='2011A',location='STD',run=Run2011ASk)}
steps['ZElSkim2011A']={'INPUT':InputInfo(dataSet='/DoubleElectron/Run2011A-ZElectron-08Nov2011-v1/RAW-RECO',label='2011A',location='STD',run=Run2011ASk)}
steps['HighMet2011A']={'INPUT':InputInfo(dataSet='/Jet/Run2011A-HighMET-08Nov2011-v1/RAW-RECO',ib_block='3c764584-0b59-11e1-b62c-00221959e69e',label='2011A',location='STD',run=Run2011ASk)}
steps['RunCosmics2011A']={'INPUT':InputInfo(dataSet='/Cosmics/Run2011A-v1/RAW',label='2011A',run=[160960],events=100000,location='STD')}
Run2011A=[165121]
Run2017BTE=[299149]
Run2016HALP=[283383]
steps['RunMinBias2011A']={'INPUT':InputInfo(dataSet='/MinimumBias/Run2011A-v1/RAW',label='2011A',run=Run2011A,events=100000,location='STD')}
steps['RunMu2011A']={'INPUT':InputInfo(dataSet='/SingleMu/Run2011A-v1/RAW',label='2011A',run=Run2011A,events=100000)}
steps['RunElectron2011A']={'INPUT':InputInfo(dataSet='/SingleElectron/Run2011A-v1/RAW',label='2011A',run=Run2011A,events=100000)}
steps['RunPhoton2011A']={'INPUT':InputInfo(dataSet='/Photon/Run2011A-v1/RAW',label='2011A',run=Run2011A,events=100000)}
steps['RunJet2011A']={'INPUT':InputInfo(dataSet='/Jet/Run2011A-v1/RAW',label='2011A',run=Run2011A,events=100000)}
steps['TestEnableEcalHCAL2017B']={'INPUT':InputInfo(dataSet='/TestEnablesEcalHcal/Run2017B-v1/RAW',label='2017B',run=Run2017BTE,events=100000,location='STD')}
steps['AlCaLumiPixels2016H']={'INPUT':InputInfo(dataSet='/AlCaLumiPixels1/Run2016H-v1/RAW',label='2016H',run=Run2016HALP,events=100000,location='STD')}
Run2011B=[177719]
Run2011BSk=[177719,177790,177096,175874]
steps['RunMinBias2011B']={'INPUT':InputInfo(dataSet='/MinimumBias/Run2011B-v1/RAW',label='2011B',run=Run2011B,events=100000,location='STD')}
steps['RunMu2011B']={'INPUT':InputInfo(dataSet='/SingleMu/Run2011B-v1/RAW',label='2011B',run=Run2011B,events=100000)}
steps['RunElectron2011B']={'INPUT':InputInfo(dataSet='/SingleElectron/Run2011B-v1/RAW',label='2011B',run=Run2011B,events=100000)}
steps['RunPhoton2011B']={'INPUT':InputInfo(dataSet='/Photon/Run2011B-v1/RAW',label='2011B',run=Run2011B,events=100000)}
steps['RunJet2011B']={'INPUT':InputInfo(dataSet='/Jet/Run2011B-v1/RAW',label='2011B',run=Run2011B,events=100000)}
steps['ValSkim2011B']={'INPUT':InputInfo(dataSet='/MinimumBias/Run2011B-ValSkim-19Nov2011-v1/RAW-RECO',label='2011B',location='STD',run=Run2011BSk)}
steps['WMuSkim2011B']={'INPUT':InputInfo(dataSet='/SingleMu/Run2011B-WMu-19Nov2011-v1/RAW-RECO',ib_block='19110c74-1b66-11e1-a98b-003048f02c8a',label='2011B',location='STD',run=Run2011BSk)}
steps['WElSkim2011B']={'INPUT':InputInfo(dataSet='/SingleElectron/Run2011B-WElectron-19Nov2011-v1/RAW-RECO',ib_block='d75771a4-1b3f-11e1-aef4-003048f02c8a',label='2011B',location='STD',run=Run2011BSk)}
steps['ZMuSkim2011B']={'INPUT':InputInfo(dataSet='/DoubleMu/Run2011B-ZMu-19Nov2011-v1/RAW-RECO',label='2011B',location='STD',run=Run2011BSk)}
steps['ZElSkim2011B']={'INPUT':InputInfo(dataSet='/DoubleElectron/Run2011B-ZElectron-19Nov2011-v1/RAW-RECO',label='2011B',run=Run2011BSk)}
steps['HighMet2011B']={'INPUT':InputInfo(dataSet='/Jet/Run2011B-HighMET-19Nov2011-v1/RAW-RECO',label='2011B',run=Run2011BSk)}
steps['RunHI2010']={'INPUT':InputInfo(dataSet='/HIAllPhysics/HIRun2010-v1/RAW',label='hi2010',run=[152698],events=10000,location='STD')}
steps['RunHI2011']={'INPUT':InputInfo(dataSet='/HIMinBiasUPC/HIRun2011-v1/RAW',label='hi2011',run=[182124],events=10000,location='STD')}
steps['RunPA2013']={'INPUT':InputInfo(dataSet='/PAMinBiasUPC/HIRun2013-v1/RAW',label='pa2013',run=[211313],events=10000,location='STD')}
Run2015HI={263400: [[65,904]]}
steps['RunHI2015VR']={'INPUT':InputInfo(dataSet='/HITrackerVirginRaw/HIRun2015-v1/RAW',label='hi2015vr',events=10000,location='STD',ls=Run2015HI)}
#steps['RunHI2018']={'INPUT':InputInfo(dataSet='/HIMinimumBias0/Tier0_REPLAY_vocms015-v214/RAW',label='hi2018',run=[325174],events=10000,location='STD')}
#steps['RunHI2018Reduced']={'INPUT':InputInfo(dataSet='/HIMinimumBiasReducedFormat0/Tier0_REPLAY_vocms015-v214/RAW',label='hi2018reduced',run=[325174],events=10000,location='STD')}
Run2018HI={326479: [[1,23]]}
steps['RunHI2018']={'INPUT':InputInfo(dataSet='/HIHardProbes/HIRun2018A-v1/RAW',label='hi2018',events=10000,location='STD',ls=Run2018HI)}
steps['RunHI2018Reduced']={'INPUT':InputInfo(dataSet='/HIMinimumBiasReducedFormat0/HIRun2018A-v1/RAW',label='hi2018reduced',events=10000,location='STD',ls=Run2018HI)}
steps['RunHI2018AOD']={'INPUT':InputInfo(dataSet='/HIHardProbes/HIRun2018A-04Apr2019-v1/AOD',label='hi2018aod',events=10000,location='STD',ls=Run2018HI)}
Run2012A=[191226]
Run2012ASk=Run2012A+[]
steps['RunMinBias2012A']={'INPUT':InputInfo(dataSet='/MinimumBias/Run2012A-v1/RAW',label='2012A',run=Run2012A, events=100000,location='STD')}
steps['RunTau2012A']={'INPUT':InputInfo(dataSet='/Tau/Run2012A-v1/RAW',label='2012A', run=Run2012A, events=100000,location='STD')}
steps['RunMET2012A']={'INPUT':InputInfo(dataSet='/MET/Run2012A-v1/RAW',label='2012A', run=Run2012A, events=100000,location='STD')}
steps['RunMu2012A']={'INPUT':InputInfo(dataSet='/SingleMu/Run2012A-v1/RAW',label='2012A', run=Run2012A, events=100000,location='STD')}
steps['RunElectron2012A']={'INPUT':InputInfo(dataSet='/SingleElectron/Run2012A-v1/RAW',label='2012A', run=Run2012A, events=100000,location='STD')}
steps['RunJet2012A']={'INPUT':InputInfo(dataSet='/Jet/Run2012A-v1/RAW',label='2012A', run=Run2012A, events=100000,location='STD')}
steps['WElSkim2012A']={'INPUT':InputInfo(dataSet='/SingleElectron/Run2012A-WElectron-13Jul2012-v1/USER',label='2012A',location='STD',run=Run2012ASk)}
steps['ZMuSkim2012A']={'INPUT':InputInfo(dataSet='/SingleMu/Run2012A-ZMu-13Jul2012-v1/RAW-RECO',label='2012A',location='STD',run=Run2012ASk)}
steps['ZElSkim2012A']={'INPUT':InputInfo(dataSet='/DoubleElectron/Run2012A-ZElectron-13Jul2012-v1/RAW-RECO',label='2012A',run=Run2012ASk)}
steps['HighMet2012A']={'INPUT':InputInfo(dataSet='/HT/Run2012A-HighMET-13Jul2012-v1/RAW-RECO',label='2012A',run=Run2012ASk)}
Run2012B=[194533]
Run2012Bsk=Run2012B+[194912,195016]
steps['RunMinBias2012B']={'INPUT':InputInfo(dataSet='/MinimumBias/Run2012B-v1/RAW',label='2012B',run=Run2012B, events=100000,location='STD')}
steps['RunMu2012B']={'INPUT':InputInfo(dataSet='/SingleMu/Run2012B-v1/RAW',label='2012B',location='STD',run=Run2012B)}
steps['RunPhoton2012B']={'INPUT':InputInfo(dataSet='/SinglePhoton/Run2012B-v1/RAW',ib_block='28d7fcc8-a2a0-11e1-86c7-003048caaace',label='2012B',location='STD',run=Run2012B)}
steps['RunEl2012B']={'INPUT':InputInfo(dataSet='/SingleElectron/Run2012B-v1/RAW',label='2012B',location='STD',run=Run2012B)}
steps['RunJet2012B']={'INPUT':InputInfo(dataSet='/JetHT/Run2012B-v1/RAW',label='2012B',location='STD',run=Run2012B)}
steps['ZMuSkim2012B']={'INPUT':InputInfo(dataSet='/SingleMu/Run2012B-ZMu-13Jul2012-v1/RAW-RECO',label='2012B',location='CAF',run=Run2012Bsk)}
steps['WElSkim2012B']={'INPUT':InputInfo(dataSet='/SingleElectron/Run2012B-WElectron-13Jul2012-v1/USER',label='2012B',location='STD',run=Run2012Bsk)}
steps['ZElSkim2012B']={'INPUT':InputInfo(dataSet='/DoubleElectron/Run2012B-ZElectron-22Jan2013-v1/RAW-RECO',ib_block='1f13b876-69fb-11e2-a7eb-00221959e72f',label='2012B',location='STD',run=Run2012Bsk)}
Run2012C=[199812]
Run2012Csk=Run2012C+[203002]
steps['RunMinBias2012C']={'INPUT':InputInfo(dataSet='/MinimumBias/Run2012C-v1/RAW',label='2012C',run=Run2012C, events=100000,location='STD')}
steps['RunMu2012C']={'INPUT':InputInfo(dataSet='/SingleMu/Run2012C-v1/RAW',label='2012C',location='STD',run=Run2012C)}
steps['RunPhoton2012C']={'INPUT':InputInfo(dataSet='/SinglePhoton/Run2012C-v1/RAW',label='2012C',location='STD',run=Run2012C)}
steps['RunEl2012C']={'INPUT':InputInfo(dataSet='/SingleElectron/Run2012C-v1/RAW',label='2012C',location='STD',run=Run2012C)}
steps['RunJet2012C']={'INPUT':InputInfo(dataSet='/JetHT/Run2012C-v1/RAW',label='2012C',location='STD',run=Run2012C)}
steps['ZMuSkim2012C']={'INPUT':InputInfo(dataSet='/SingleMu/Run2012C-ZMu-PromptSkim-v3/RAW-RECO',label='2012C',location='CAF',run=Run2012Csk)}
steps['WElSkim2012C']={'INPUT':InputInfo(dataSet='/SingleElectron/Run2012C-WElectron-PromptSkim-v3/USER',label='2012C',location='STD',run=Run2012Csk)}
steps['ZElSkim2012C']={'INPUT':InputInfo(dataSet='/DoubleElectron/Run2012C-ZElectron-22Jan2013-v1/RAW-RECO',label='2012C',location='STD',run=Run2012Csk)}
Run2012D=[208307]
Run2012Dsk=Run2012D+[207454]
steps['RunMinBias2012D']={'INPUT':InputInfo(dataSet='/MinimumBias/Run2012D-v1/RAW',label='2012D',run=Run2012D, events=100000,location='STD')}
steps['RunMu2012D']={'INPUT':InputInfo(dataSet='/SingleMu/Run2012D-v1/RAW',label='2012D',location='STD',run=Run2012D)}
steps['RunPhoton2012D']={'INPUT':InputInfo(dataSet='/SinglePhoton/Run2012D-v1/RAW',label='2012D',location='STD',run=Run2012D)}
steps['RunEl2012D']={'INPUT':InputInfo(dataSet='/SingleElectron/Run2012D-v1/RAW',label='2012D',location='STD',run=Run2012D)}
steps['RunJet2012D']={'INPUT':InputInfo(dataSet='/JetHT/Run2012D-v1/RAW',label='2012D',location='STD',run=Run2012D)}
# the previous /SingleMu/Run2012D-ZMu-15Apr2014-v1/RAW-RECO is deprecated in DAS
steps['ZMuSkim2012D']={'INPUT':InputInfo(dataSet='/SingleMu/Run2012D-ZMu-15Apr2014-v1/RAW-RECO',label='2012D',location='STD',run=Run2012Dsk)}
# the previous /SingleElectron/Run2012D-WElectron-PromptSkim-v1/USER is deprecated in DAS
steps['WElSkim2012D']={'INPUT':InputInfo(dataSet='/SingleElectron/Run2012D-WElectron-22Jan2013-v1/USER',label='2012D',location='STD',run=Run2012Dsk)}
steps['ZElSkim2012D']={'INPUT':InputInfo(dataSet='/DoubleElectron/Run2012D-ZElectron-22Jan2013-v1/RAW-RECO',label='2012D',location='STD',run=Run2012Dsk)}
#### run2 2015B ####
# Run2015B=[251642] # 251561 251638 251642
Run2015B=selectedLS([251251])
steps['RunHLTPhy2015B']={'INPUT':InputInfo(dataSet='/HLTPhysics/Run2015B-v1/RAW',label='2015B',events=100000,location='STD', ls=Run2015B)}
steps['RunDoubleEG2015B']={'INPUT':InputInfo(dataSet='/DoubleEG/Run2015B-v1/RAW',label='2015B',events=100000,location='STD', ls=Run2015B)}
steps['RunDoubleMuon2015B']={'INPUT':InputInfo(dataSet='/DoubleMuon/Run2015B-v1/RAW',label='2015B',events=100000,location='STD', ls=Run2015B)}
steps['RunJetHT2015B']={'INPUT':InputInfo(dataSet='/JetHT/Run2015B-v1/RAW',label='2015B',events=100000,location='STD', ls=Run2015B)}
steps['RunMET2015B']={'INPUT':InputInfo(dataSet='/MET/Run2015B-v1/RAW',label='2015B',events=100000,location='STD', ls=Run2015B)}
steps['RunMuonEG2015B']={'INPUT':InputInfo(dataSet='/MuonEG/Run2015B-v1/RAW',label='2015B',events=100000,location='STD', ls=Run2015B)}
steps['RunSingleEl2015B']={'INPUT':InputInfo(dataSet='/SingleElectron/Run2015B-v1/RAW',label='2015B',events=100000,location='STD', ls=Run2015B)}
steps['RunSingleMu2015B']={'INPUT':InputInfo(dataSet='/SingleMuon/Run2015B-v1/RAW',label='2015B',events=100000,location='STD', ls=Run2015B)}
steps['RunSinglePh2015B']={'INPUT':InputInfo(dataSet='/SinglePhoton/Run2015B-v1/RAW',label='2015B',events=100000,location='STD', ls=Run2015B)}
steps['RunZeroBias2015B']={'INPUT':InputInfo(dataSet='/ZeroBias/Run2015B-v1/RAW',label='2015B',events=100000,location='STD', ls=Run2015B)}
#### run2 2015C ####
# Run2015C, 25ns: 254790 (852 LS and 65 files), 254852 (126 LS and 5 files), 254879 (178 LS and 11 files)
Run2015C=selectedLS([254790])
Run2015C_full=selectedLS([254790, 254852, 254879])
steps['RunHLTPhy2015C']={'INPUT':InputInfo(dataSet='/HLTPhysics/Run2015C-v1/RAW',label='2015C',events=100000,location='STD', ls=Run2015C)}
steps['RunDoubleEG2015C']={'INPUT':InputInfo(dataSet='/DoubleEG/Run2015C-v1/RAW',label='2015C',events=100000,location='STD', ls=Run2015C)}
steps['RunDoubleMuon2015C']={'INPUT':InputInfo(dataSet='/DoubleMuon/Run2015C-v1/RAW',label='2015C',events=100000,location='STD', ls=Run2015C)}
steps['RunJetHT2015C']={'INPUT':InputInfo(dataSet='/JetHT/Run2015C-v1/RAW',label='2015C',events=100000,location='STD', ls=Run2015C)}
steps['RunMET2015C']={'INPUT':InputInfo(dataSet='/MET/Run2015C-v1/RAW',label='2015C',events=100000,location='STD', ls=Run2015C)}
steps['RunMuonEG2015C']={'INPUT':InputInfo(dataSet='/MuonEG/Run2015C-v1/RAW',label='2015C',events=100000,location='STD', ls=Run2015C)}
steps['RunDoubleEGPrpt2015C']={'INPUT':InputInfo(dataSet='/DoubleEG/Run2015C-ZElectron-PromptReco-v1/RAW-RECO',label='2015C',events=100000,location='STD', ls=Run2015C_full)}
steps['RunSingleMuPrpt2015C']={'INPUT':InputInfo(dataSet='/SingleMuon/Run2015C-ZMu-PromptReco-v1/RAW-RECO',label='2015C',events=100000,location='STD', ls=Run2015C_full)}
steps['RunSingleEl2015C']={'INPUT':InputInfo(dataSet='/SingleElectron/Run2015C-v1/RAW',label='2015C',events=100000,location='STD', ls=Run2015C)}
steps['RunSingleMu2015C']={'INPUT':InputInfo(dataSet='/SingleMuon/Run2015C-v1/RAW',label='2015C',events=100000,location='STD', ls=Run2015C)}
steps['RunSinglePh2015C']={'INPUT':InputInfo(dataSet='/SinglePhoton/Run2015C-v1/RAW',label='2015C',events=100000,location='STD', ls=Run2015C)}
steps['RunZeroBias2015C']={'INPUT':InputInfo(dataSet='/ZeroBias/Run2015C-v1/RAW',label='2015C',events=100000,location='STD', ls=Run2015C)}
#### run2 2015D ####
# Run2015D, 25ns: 256677
Run2015D=selectedLS([256677])
steps['RunHLTPhy2015D']={'INPUT':InputInfo(dataSet='/HLTPhysics/Run2015D-v1/RAW',label='2015D',events=100000,location='STD', ls=Run2015D)}
steps['RunDoubleEG2015D']={'INPUT':InputInfo(dataSet='/DoubleEG/Run2015D-v1/RAW',label='2015D',events=100000,location='STD', ls=Run2015D)}
steps['RunDoubleMuon2015D']={'INPUT':InputInfo(dataSet='/DoubleMuon/Run2015D-v1/RAW',label='2015D',events=100000,location='STD', ls=Run2015D)}
steps['RunJetHT2015D']={'INPUT':InputInfo(dataSet='/JetHT/Run2015D-v1/RAW',label='2015D',events=100000,location='STD', ls=Run2015D)}
steps['RunMET2015D']={'INPUT':InputInfo(dataSet='/MET/Run2015D-v1/RAW',label='2015D',events=100000,location='STD', ls=Run2015D)}
steps['RunMuonEG2015D']={'INPUT':InputInfo(dataSet='/MuonEG/Run2015D-v1/RAW',label='2015D',events=100000,location='STD', ls=Run2015D)}
steps['RunDoubleEGPrpt2015D']={'INPUT':InputInfo(dataSet='/DoubleEG/Run2015D-ZElectron-PromptReco-v3/RAW-RECO',label='2015D',events=100000,location='STD', ls=Run2015D)}
steps['RunSingleMuPrpt2015D']={'INPUT':InputInfo(dataSet='/SingleMuon/Run2015D-ZMu-PromptReco-v3/RAW-RECO',label='2015D',events=100000,location='STD', ls=Run2015D)}
steps['RunSingleEl2015D']={'INPUT':InputInfo(dataSet='/SingleElectron/Run2015D-v1/RAW',label='2015D',events=100000,location='STD', ls=Run2015D)}
steps['RunSingleMu2015D']={'INPUT':InputInfo(dataSet='/SingleMuon/Run2015D-v1/RAW',label='2015D',events=100000,location='STD', ls=Run2015D)}
steps['RunSinglePh2015D']={'INPUT':InputInfo(dataSet='/SinglePhoton/Run2015D-v1/RAW',label='2015D',events=100000,location='STD', ls=Run2015D)}
steps['RunZeroBias2015D']={'INPUT':InputInfo(dataSet='/ZeroBias/Run2015D-v1/RAW',label='2015D',events=100000,location='STD',ib_block='38d4cab6-5d5f-11e5-824b-001e67ac06a0',ls=Run2015D)}
#### run2 2016B ####
# Run2016B, 25ns: 274160
#Run2016B=selectedLS([274160],l_json=data_json2016)
Run2016B={274199: [[1, 180]]}
steps['RunHLTPhy2016B']={'INPUT':InputInfo(dataSet='/HLTPhysics/Run2016B-v2/RAW',label='2016B',events=100000,location='STD', ls=Run2016B)}
steps['RunDoubleEG2016B']={'INPUT':InputInfo(dataSet='/DoubleEG/Run2016B-v2/RAW',label='2016B',events=100000,location='STD', ls=Run2016B)}
steps['RunDoubleMuon2016B']={'INPUT':InputInfo(dataSet='/DoubleMuon/Run2016B-v2/RAW',label='2016B',events=100000,location='STD', ls=Run2016B)}
steps['RunJetHT2016B']={'INPUT':InputInfo(dataSet='/JetHT/Run2016B-v2/RAW',label='2016B',events=100000,location='STD', ls=Run2016B)}
steps['RunMET2016B']={'INPUT':InputInfo(dataSet='/MET/Run2016B-v2/RAW',label='2016B',events=100000,location='STD', ls=Run2016B)}
steps['RunMuonEG2016B']={'INPUT':InputInfo(dataSet='/MuonEG/Run2016B-v2/RAW',label='2016B',events=100000,location='STD', ls=Run2016B)}
steps['RunDoubleEGPrpt2016B']={'INPUT':InputInfo(dataSet='/DoubleEG/Run2016B-ZElectron-PromptReco-v2/RAW-RECO',label='2016B',events=100000,location='STD', ls=Run2016B)}
steps['RunSingleMuPrpt2016B']={'INPUT':InputInfo(dataSet='/SingleMuon/Run2016B-ZMu-PromptReco-v2/RAW-RECO',label='2016B',events=100000,location='STD', ls=Run2016B)}
steps['RunSingleEl2016B']={'INPUT':InputInfo(dataSet='/SingleElectron/Run2016B-v2/RAW',label='2016B',events=100000,location='STD', ls={274199: [[1, 120]]})}
steps['RunSingleMu2016B']={'INPUT':InputInfo(dataSet='/SingleMuon/Run2016B-v2/RAW',label='2016B',events=100000,location='STD', ls={274199: [[1, 120]]})}
steps['RunSinglePh2016B']={'INPUT':InputInfo(dataSet='/SinglePhoton/Run2016B-v2/RAW',label='2016B',events=100000,location='STD', ls=Run2016B)}
steps['RunZeroBias2016B']={'INPUT':InputInfo(dataSet='/ZeroBias/Run2016B-v2/RAW',label='2016B',events=100000,location='STD', ls=Run2016B)}
steps['RunMuOnia2016B']={'INPUT':InputInfo(dataSet='/MuOnia/Run2016B-v2/RAW',label='2016B',events=100000,location='STD', ls=Run2016B)}
steps['RunNoBPTX2016B']={'INPUT':InputInfo(dataSet='/NoBPTX/Run2016B-v2/RAW',label='2016B',events=100000,location='STD', ls=Run2016B)}
# reminiAOD selection, mainly for PPS testing
steps['RunJetHT2016B_reminiaodUL']={'INPUT':InputInfo(dataSet='/JetHT/Run2016B-21Feb2020_ver2_UL2016_HIPM-v1/AOD',label='2016BrmaodUL',events=100000,location='STD', ls=Run2016B)}
#### run2 2016C ####
Run2016C={276092: [[115, 149]]}
steps['RunHLTPhy2016C']={'INPUT':InputInfo(dataSet='/HLTPhysics/Run2016C-v2/RAW',label='2016C',events=100000,location='STD', ls=Run2016C)}
steps['RunDoubleEG2016C']={'INPUT':InputInfo(dataSet='/DoubleEG/Run2016C-v2/RAW',label='2016C',events=100000,location='STD', ls=Run2016C)}
steps['RunDoubleMuon2016C']={'INPUT':InputInfo(dataSet='/DoubleMuon/Run2016C-v2/RAW',label='2016C',events=100000,location='STD', ls=Run2016C)}
steps['RunJetHT2016C']={'INPUT':InputInfo(dataSet='/JetHT/Run2016C-v2/RAW',label='2016C',events=100000,location='STD', ls=Run2016C)}
steps['RunMET2016C']={'INPUT':InputInfo(dataSet='/MET/Run2016C-v2/RAW',label='2016C',events=100000,location='STD', ls=Run2016C)}
steps['RunMuonEG2016C']={'INPUT':InputInfo(dataSet='/MuonEG/Run2016C-v2/RAW',label='2016C',events=100000,location='STD', ls=Run2016C)}
steps['RunSingleEl2016C']={'INPUT':InputInfo(dataSet='/SingleElectron/Run2016C-v2/RAW',label='2016C',events=100000,location='STD', ls=Run2016C)}
steps['RunSingleMu2016C']={'INPUT':InputInfo(dataSet='/SingleMuon/Run2016C-v2/RAW',label='2016C',events=100000,location='STD', ls=Run2016C)}
steps['RunSinglePh2016C']={'INPUT':InputInfo(dataSet='/SinglePhoton/Run2016C-v2/RAW',label='2016C',events=100000,location='STD', ls=Run2016C)}
steps['RunZeroBias2016C']={'INPUT':InputInfo(dataSet='/ZeroBias/Run2016C-v2/RAW',label='2016C',events=100000,location='STD', ls=Run2016C)}
steps['RunMuOnia2016C']={'INPUT':InputInfo(dataSet='/MuOnia/Run2016C-v2/RAW',label='2016C',events=100000,location='STD', ls=Run2016C)}
#### run2 2016D ####
Run2016D={276807: [[66, 100]]}
steps['RunHLTPhy2016D']={'INPUT':InputInfo(dataSet='/HLTPhysics/Run2016D-v2/RAW',label='2016D',events=100000,location='STD', ls=Run2016D)}
steps['RunDoubleEG2016D']={'INPUT':InputInfo(dataSet='/DoubleEG/Run2016D-v2/RAW',label='2016D',events=100000,location='STD', ls=Run2016D)}
steps['RunDoubleMuon2016D']={'INPUT':InputInfo(dataSet='/DoubleMuon/Run2016D-v2/RAW',label='2016D',events=100000,location='STD', ls=Run2016D)}
steps['RunJetHT2016D']={'INPUT':InputInfo(dataSet='/JetHT/Run2016D-v2/RAW',label='2016D',events=100000,location='STD', ls=Run2016D)}
steps['RunMET2016D']={'INPUT':InputInfo(dataSet='/MET/Run2016D-v2/RAW',label='2016D',events=100000,location='STD', ls=Run2016D)}
steps['RunMuonEG2016D']={'INPUT':InputInfo(dataSet='/MuonEG/Run2016D-v2/RAW',label='2016D',events=100000,location='STD', ls=Run2016D)}
steps['RunSingleEl2016D']={'INPUT':InputInfo(dataSet='/SingleElectron/Run2016D-v2/RAW',label='2016D',events=100000,location='STD', ls=Run2016D)}
steps['RunSingleMu2016D']={'INPUT':InputInfo(dataSet='/SingleMuon/Run2016D-v2/RAW',label='2016D',events=100000,location='STD', ls=Run2016D)}
steps['RunSinglePh2016D']={'INPUT':InputInfo(dataSet='/SinglePhoton/Run2016D-v2/RAW',label='2016D',events=100000,location='STD', ls=Run2016D)}
steps['RunZeroBias2016D']={'INPUT':InputInfo(dataSet='/ZeroBias/Run2016D-v2/RAW',label='2016D',events=100000,location='STD', ls=Run2016D)}
steps['RunMuOnia2016D']={'INPUT':InputInfo(dataSet='/MuOnia/Run2016D-v2/RAW',label='2016D',events=100000,location='STD', ls=Run2016D)}
#### run2 2016E ####
Run2016E={277069: [[81, 120]]}
steps['RunHLTPhy2016E']={'INPUT':InputInfo(dataSet='/HLTPhysics/Run2016E-v2/RAW',label='2016E',events=100000,location='STD', ls=Run2016E)}
steps['RunDoubleEG2016E']={'INPUT':InputInfo(dataSet='/DoubleEG/Run2016E-v2/RAW',label='2016E',events=100000,location='STD', ls=Run2016E)}
steps['RunDoubleMuon2016E']={'INPUT':InputInfo(dataSet='/DoubleMuon/Run2016E-v2/RAW',label='2016E',events=100000,location='STD', ls=Run2016E)}
steps['RunJetHT2016E']={'INPUT':InputInfo(dataSet='/JetHT/Run2016E-v2/RAW',label='2016E',events=100000,location='STD', ls=Run2016E)}
steps['RunMET2016E']={'INPUT':InputInfo(dataSet='/MET/Run2016E-v2/RAW',label='2016E',events=100000,location='STD', ls=Run2016E)}
steps['RunMuonEG2016E']={'INPUT':InputInfo(dataSet='/MuonEG/Run2016E-v2/RAW',label='2016E',events=100000,location='STD', ls=Run2016E)}
steps['RunSingleEl2016E']={'INPUT':InputInfo(dataSet='/SingleElectron/Run2016E-v2/RAW',label='2016E',events=100000,location='STD', ls=Run2016E)}
steps['RunSingleMu2016E']={'INPUT':InputInfo(dataSet='/SingleMuon/Run2016E-v2/RAW',label='2016E',events=100000,location='STD', ls=Run2016E)}
steps['RunSinglePh2016E']={'INPUT':InputInfo(dataSet='/SinglePhoton/Run2016E-v2/RAW',label='2016E',events=100000,location='STD', ls=Run2016E)}
steps['RunZeroBias2016E']={'INPUT':InputInfo(dataSet='/ZeroBias/Run2016E-v2/RAW',label='2016E',events=100000,location='STD', ls=Run2016E)}
steps['RunMuOnia2016E']={'INPUT':InputInfo(dataSet='/MuOnia/Run2016E-v2/RAW',label='2016E',events=100000,location='STD', ls=Run2016E)}
steps['RunJetHT2016E_reminiaod']={'INPUT':InputInfo(dataSet='/JetHT/Run2016E-18Apr2017-v1/AOD',label='2016Ermaod',events=100000,location='STD', ls=Run2016E)}
steps['RunJetHT2016E_reminiaodUL']={'INPUT':InputInfo(dataSet='/JetHT/Run2016E-21Feb2020_UL2016_HIPM-v1/AOD',label='2016ErmaodUL',events=100000,location='STD', ls=Run2016E)}
#### run2 2016H ####
Run2016H={283877: [[1, 45]]}
steps['RunHLTPhy2016H']={'INPUT':InputInfo(dataSet='/HLTPhysics/Run2016H-v1/RAW',label='2016H',events=100000,location='STD', ls=Run2016H)}
steps['RunDoubleEG2016H']={'INPUT':InputInfo(dataSet='/DoubleEG/Run2016H-v1/RAW',label='2016H',events=100000,location='STD', ls=Run2016H)}
steps['RunDoubleMuon2016H']={'INPUT':InputInfo(dataSet='/DoubleMuon/Run2016H-v1/RAW',label='2016H',events=100000,location='STD', ls=Run2016H)}
steps['RunJetHT2016H']={'INPUT':InputInfo(dataSet='/JetHT/Run2016H-v1/RAW',label='2016H',events=100000,location='STD', ls=Run2016H)}
steps['RunMET2016H']={'INPUT':InputInfo(dataSet='/MET/Run2016H-v1/RAW',label='2016H',events=100000,location='STD', ls=Run2016H)}
steps['RunMuonEG2016H']={'INPUT':InputInfo(dataSet='/MuonEG/Run2016H-v1/RAW',label='2016H',events=100000,location='STD', ls=Run2016H)}
steps['RunSingleEl2016H']={'INPUT':InputInfo(dataSet='/SingleElectron/Run2016H-v1/RAW',label='2016H',events=100000,location='STD', ls=Run2016H)}
steps['RunSingleMu2016H']={'INPUT':InputInfo(dataSet='/SingleMuon/Run2016H-v1/RAW',label='2016H',events=100000,location='STD', ls=Run2016H)}
steps['RunSinglePh2016H']={'INPUT':InputInfo(dataSet='/SinglePhoton/Run2016H-v1/RAW',label='2016H',events=100000,location='STD', ls=Run2016H)}
steps['RunZeroBias2016H']={'INPUT':InputInfo(dataSet='/ZeroBias/Run2016H-v1/RAW',label='2016H',events=100000,location='STD', ls=Run2016H)}
steps['RunMuOnia2016H']={'INPUT':InputInfo(dataSet='/MuOnia/Run2016H-v1/RAW',label='2016H',events=100000,location='STD', ls=Run2016H)}
steps['RunJetHT2016H_reminiaod']={'INPUT':InputInfo(dataSet='/JetHT/Run2016H-18Apr2017-v1/AOD',label='2016Hrmaod',events=100000,location='STD', ls=Run2016H)}
steps['RunJetHT2016H_reminiaodUL']={'INPUT':InputInfo(dataSet='/JetHT/Run2016H-21Feb2020_UL2016-v1/AOD',label='2016HrmaodUL',events=100000,location='STD', ls=Run2016H)}
steps['RunJetHT2016H_nano']={'INPUT':InputInfo(dataSet='/JetHT/Run2016H-18Apr2017-v1/MINIAOD',label='2016Hnano',events=100000,location='STD', ls=Run2016H)}
#### run2 2017B ####
Run2017BlowPU={297227: [[1, 45]]}
Run2017B={297557: [[8, 167]]}
steps['RunHLTPhy2017B']={'INPUT':InputInfo(dataSet='/HLTPhysics/Run2017B-v1/RAW',label='2017B',events=100000,location='STD', ls=Run2017B)}
steps['RunDoubleEG2017B']={'INPUT':InputInfo(dataSet='/DoubleEG/Run2017B-v1/RAW',label='2017B',events=100000,location='STD', ls=Run2017B)}
steps['RunDoubleMuon2017B']={'INPUT':InputInfo(dataSet='/DoubleMuon/Run2017B-v1/RAW',label='2017B',events=100000,location='STD', ls=Run2017B)}
steps['RunJetHT2017B']={'INPUT':InputInfo(dataSet='/JetHT/Run2017B-v1/RAW',label='2017B',events=100000,location='STD', ls=Run2017B)}
steps['RunMET2017B']={'INPUT':InputInfo(dataSet='/MET/Run2017B-v1/RAW',label='2017B',events=100000,location='STD', ls=Run2017B)}
steps['RunMuonEG2017B']={'INPUT':InputInfo(dataSet='/MuonEG/Run2017B-v1/RAW',label='2017B',events=100000,location='STD', ls=Run2017B)}
steps['RunSingleEl2017B']={'INPUT':InputInfo(dataSet='/SingleElectron/Run2017B-v1/RAW',label='2017B',events=100000,location='STD', ls=Run2017B)}
steps['RunSingleMu2017B']={'INPUT':InputInfo(dataSet='/SingleMuon/Run2017B-v1/RAW',label='2017B',events=100000,location='STD', ls=Run2017B)}
steps['RunSinglePh2017B']={'INPUT':InputInfo(dataSet='/SinglePhoton/Run2017B-v1/RAW',label='2017B',events=100000,location='STD', ls=Run2017B)}
steps['RunZeroBias2017B']={'INPUT':InputInfo(dataSet='/ZeroBias/Run2017B-v1/RAW',label='2017B',events=100000,location='STD', ls=Run2017B)}
steps['RunMuOnia2017B']={'INPUT':InputInfo(dataSet='/MuOnia/Run2017B-v1/RAW',label='2017B',events=100000,location='STD', ls=Run2017B)}
steps['RunCharmonium2017B']={'INPUT':InputInfo(dataSet='/Charmonium/Run2017B-v1/RAW',label='2017B',events=100000,location='STD', ls=Run2017B)}
steps['RunNoBPTX2017B']={'INPUT':InputInfo(dataSet='/NoBPTX/Run2017B-v1/RAW',label='2017B',events=100000,location='STD', ls=Run2017B)}
steps['RunHLTPhy2017B_AOD']={'INPUT':InputInfo(dataSet='/HLTPhysics/Run2017B-PromptReco-v1/AOD',label='2017Baod',events=100000,location='STD', ls=Run2017BlowPU)}
steps['RunHLTPhy2017B_AODextra']={'INPUT':InputInfo(dataSet='/HLTPhysics/Run2017B-PromptReco-v1/AOD',label='2017Baodex',events=100000,location='STD', ls=Run2017BlowPU)}
steps['RunHLTPhy2017B_RAWAOD']={'INPUT':InputInfo(dataSet='/HLTPhysics/Run2017B-PromptReco-v1/AOD',dataSetParent='/HLTPhysics/Run2017B-v1/RAW',label='2017Brawaod',events=100000,location='STD', ls=Run2017B)}
#### run2 2017C ####
Run2017C={301998: [[1, 150]]}
steps['RunHLTPhy2017C']={'INPUT':InputInfo(dataSet='/HLTPhysics/Run2017C-v1/RAW',label='2017C',events=100000,location='STD', ls=Run2017C)}
steps['RunDoubleEG2017C']={'INPUT':InputInfo(dataSet='/DoubleEG/Run2017C-v1/RAW',label='2017C',events=100000,location='STD', ls=Run2017C)}
steps['RunDoubleMuon2017C']={'INPUT':InputInfo(dataSet='/DoubleMuon/Run2017C-v1/RAW',label='2017C',events=100000,location='STD', ls=Run2017C)}
steps['RunJetHT2017C']={'INPUT':InputInfo(dataSet='/JetHT/Run2017C-v1/RAW',label='2017C',events=100000,location='STD', ls=Run2017C)}
steps['RunDisplacedJet2017C']={'INPUT':InputInfo(dataSet='/DisplacedJet/Run2017C-v1/RAW',label='2017C',events=100000,location='STD', ls=Run2017C)}
steps['RunMET2017C']={'INPUT':InputInfo(dataSet='/MET/Run2017C-v1/RAW',label='2017C',events=100000,location='STD', ls=Run2017C)}
steps['RunMuonEG2017C']={'INPUT':InputInfo(dataSet='/MuonEG/Run2017C-v1/RAW',label='2017C',events=100000,location='STD', ls=Run2017C)}
steps['RunSingleEl2017C']={'INPUT':InputInfo(dataSet='/SingleElectron/Run2017C-v1/RAW',label='2017C',events=100000,location='STD', ls=Run2017C)}
steps['RunSingleMu2017C']={'INPUT':InputInfo(dataSet='/SingleMuon/Run2017C-v1/RAW',label='2017C',events=100000,location='STD', ls=Run2017C)}
steps['RunSinglePh2017C']={'INPUT':InputInfo(dataSet='/SinglePhoton/Run2017C-v1/RAW',label='2017C',events=100000,location='STD', ls=Run2017C)}
steps['RunZeroBias2017C']={'INPUT':InputInfo(dataSet='/ZeroBias/Run2017C-v1/RAW',label='2017C',events=100000,location='STD', ls=Run2017C)}
steps['RunMuOnia2017C']={'INPUT':InputInfo(dataSet='/MuOnia/Run2017C-v1/RAW',label='2017C',events=100000,location='STD', ls=Run2017C)}
steps['RunCharmonium2017C']={'INPUT':InputInfo(dataSet='/Charmonium/Run2017C-v1/RAW',label='2017C',events=100000,location='STD', ls=Run2017C)}
steps['RunNoBPTX2017C']={'INPUT':InputInfo(dataSet='/NoBPTX/Run2017C-v1/RAW',label='2017C',events=100000,location='STD', ls=Run2017C)}
#### run2 2017D ####
Run2017D={302663: [[1, 100]]} #AVGPU 36
steps['RunHLTPhy2017D']={'INPUT':InputInfo(dataSet='/HLTPhysics/Run2017D-v1/RAW',label='2017D',events=100000,location='STD', ls=Run2017D)}
steps['RunDoubleEG2017D']={'INPUT':InputInfo(dataSet='/DoubleEG/Run2017D-v1/RAW',label='2017D',events=100000,location='STD', ls=Run2017D)}
steps['RunDoubleMuon2017D']={'INPUT':InputInfo(dataSet='/DoubleMuon/Run2017D-v1/RAW',label='2017D',events=100000,location='STD', ls=Run2017D)}
steps['RunJetHT2017D']={'INPUT':InputInfo(dataSet='/JetHT/Run2017D-v1/RAW',label='2017D',events=100000,location='STD', ls=Run2017D)}
steps['RunDisplacedJet2017D']={'INPUT':InputInfo(dataSet='/DisplacedJet/Run2017D-v1/RAW',label='2017D',events=100000,location='STD', ls=Run2017D)}
steps['RunMET2017D']={'INPUT':InputInfo(dataSet='/MET/Run2017D-v1/RAW',label='2017D',events=100000,location='STD', ls=Run2017D)}
steps['RunMuonEG2017D']={'INPUT':InputInfo(dataSet='/MuonEG/Run2017D-v1/RAW',label='2017D',events=100000,location='STD', ls=Run2017D)}
steps['RunSingleEl2017D']={'INPUT':InputInfo(dataSet='/SingleElectron/Run2017D-v1/RAW',label='2017D',events=100000,location='STD', ls=Run2017D)}
steps['RunSingleMu2017D']={'INPUT':InputInfo(dataSet='/SingleMuon/Run2017D-v1/RAW',label='2017D',events=100000,location='STD', ls=Run2017D)}
steps['RunSinglePh2017D']={'INPUT':InputInfo(dataSet='/SinglePhoton/Run2017D-v1/RAW',label='2017D',events=100000,location='STD', ls=Run2017D)}
steps['RunZeroBias2017D']={'INPUT':InputInfo(dataSet='/ZeroBias/Run2017D-v1/RAW',label='2017D',events=100000,location='STD', ls=Run2017D)}
steps['RunMuOnia2017D']={'INPUT':InputInfo(dataSet='/MuOnia/Run2017D-v1/RAW',label='2017D',events=100000,location='STD', ls=Run2017D)}
steps['RunCharmonium2017D']={'INPUT':InputInfo(dataSet='/Charmonium/Run2017D-v1/RAW',label='2017D',events=100000,location='STD', ls=Run2017D)}
steps['RunNoBPTX2017D']={'INPUT':InputInfo(dataSet='/NoBPTX/Run2017D-v1/RAW',label='2017D',events=100000,location='STD', ls=Run2017D)}
#### run2 2017E ####
Run2017E={304125: [[1, 100]]} #AVGPU 46
steps['RunHLTPhy2017E']={'INPUT':InputInfo(dataSet='/HLTPhysics/Run2017E-v1/RAW',label='2017E',events=100000,location='STD', ls=Run2017E)}
steps['RunDoubleEG2017E']={'INPUT':InputInfo(dataSet='/DoubleEG/Run2017E-v1/RAW',label='2017E',events=100000,location='STD', ls=Run2017E)}
steps['RunDoubleMuon2017E']={'INPUT':InputInfo(dataSet='/DoubleMuon/Run2017E-v1/RAW',label='2017E',events=100000,location='STD', ls=Run2017E)}
steps['RunJetHT2017E']={'INPUT':InputInfo(dataSet='/JetHT/Run2017E-v1/RAW',label='2017E',events=100000,location='STD', ls=Run2017E)}
steps['RunDisplacedJet2017E']={'INPUT':InputInfo(dataSet='/DisplacedJet/Run2017E-v1/RAW',label='2017E',events=100000,location='STD', ls=Run2017E)}
steps['RunMET2017E']={'INPUT':InputInfo(dataSet='/MET/Run2017E-v1/RAW',label='2017E',events=100000,location='STD', ls=Run2017E)}
steps['RunMuonEG2017E']={'INPUT':InputInfo(dataSet='/MuonEG/Run2017E-v1/RAW',label='2017E',events=100000,location='STD', ls=Run2017E)}
steps['RunSingleEl2017E']={'INPUT':InputInfo(dataSet='/SingleElectron/Run2017E-v1/RAW',label='2017E',events=100000,location='STD', ls=Run2017E)}
steps['RunSingleMu2017E']={'INPUT':InputInfo(dataSet='/SingleMuon/Run2017E-v1/RAW',label='2017E',events=100000,location='STD', ls=Run2017E)}
steps['RunSinglePh2017E']={'INPUT':InputInfo(dataSet='/SinglePhoton/Run2017E-v1/RAW',label='2017E',events=100000,location='STD', ls=Run2017E)}
steps['RunZeroBias2017E']={'INPUT':InputInfo(dataSet='/ZeroBias/Run2017E-v1/RAW',label='2017E',events=100000,location='STD', ls=Run2017E)}
steps['RunMuOnia2017E']={'INPUT':InputInfo(dataSet='/MuOnia/Run2017E-v1/RAW',label='2017E',events=100000,location='STD', ls=Run2017E)}
steps['RunCharmonium2017E']={'INPUT':InputInfo(dataSet='/Charmonium/Run2017E-v1/RAW',label='2017E',events=100000,location='STD', ls=Run2017E)}
steps['RunNoBPTX2017E']={'INPUT':InputInfo(dataSet='/NoBPTX/Run2017E-v1/RAW',label='2017E',events=100000,location='STD', ls=Run2017E)}
#### run2 2017F ####
Run2017F={305064: [[2, 101]]} #AVGPU 51
steps['RunHLTPhy2017F']={'INPUT':InputInfo(dataSet='/HLTPhysics/Run2017F-v1/RAW',label='2017F',events=100000,location='STD', ls=Run2017F)}
steps['RunDoubleEG2017F']={'INPUT':InputInfo(dataSet='/DoubleEG/Run2017F-v1/RAW',label='2017F',events=100000,location='STD', ls=Run2017F)}
steps['RunDoubleMuon2017F']={'INPUT':InputInfo(dataSet='/DoubleMuon/Run2017F-v1/RAW',label='2017F',events=100000,location='STD', ls=Run2017F)}
steps['RunJetHT2017F']={'INPUT':InputInfo(dataSet='/JetHT/Run2017F-v1/RAW',label='2017F',events=100000,location='STD', ls=Run2017F)}
steps['RunDisplacedJet2017F']={'INPUT':InputInfo(dataSet='/DisplacedJet/Run2017F-v1/RAW',label='2017F',events=100000,location='STD', ls=Run2017F)}
steps['RunMET2017F']={'INPUT':InputInfo(dataSet='/MET/Run2017F-v1/RAW',label='2017F',events=100000,location='STD', ls=Run2017F)}
steps['RunMuonEG2017F']={'INPUT':InputInfo(dataSet='/MuonEG/Run2017F-v1/RAW',label='2017F',events=100000,location='STD', ls=Run2017F)}
steps['RunSingleEl2017F']={'INPUT':InputInfo(dataSet='/SingleElectron/Run2017F-v1/RAW',label='2017F',events=100000,location='STD', ls=Run2017F)}
steps['RunSingleMu2017F']={'INPUT':InputInfo(dataSet='/SingleMuon/Run2017F-v1/RAW',label='2017F',events=100000,location='STD', ls=Run2017F)}
steps['RunSinglePh2017F']={'INPUT':InputInfo(dataSet='/SinglePhoton/Run2017F-v1/RAW',label='2017F',events=100000,location='STD', ls=Run2017F)}
steps['RunZeroBias2017F']={'INPUT':InputInfo(dataSet='/ZeroBias/Run2017F-v1/RAW',label='2017F',events=100000,location='STD', ls=Run2017F)}
steps['RunMuOnia2017F']={'INPUT':InputInfo(dataSet='/MuOnia/Run2017F-v1/RAW',label='2017F',events=100000,location='STD', ls=Run2017F)}
steps['RunCharmonium2017F']={'INPUT':InputInfo(dataSet='/Charmonium/Run2017F-v1/RAW',label='2017F',events=100000,location='STD', ls=Run2017F)}
steps['RunNoBPTX2017F']={'INPUT':InputInfo(dataSet='/NoBPTX/Run2017F-v1/RAW',label='2017F',events=100000,location='STD', ls=Run2017F)}
steps['RunExpressPhy2017F']={'INPUT':InputInfo(dataSet='/ExpressPhysics/Run2017F-Express-v1/FEVT',label='2017F',events=100000,location='STD', ls=Run2017F)}
steps['RunJetHT2017F_reminiaod']={'INPUT':InputInfo(dataSet='/JetHT/Run2017F-17Nov2017-v1/AOD',label='2017Frmaod',events=100000,location='STD', ls=Run2017F)}
steps['RunJetHT2017F_reminiaodUL']={'INPUT':InputInfo(dataSet='/JetHT/Run2017F-09Aug2019_UL2017-v1/AOD',label='2017FrmaodUL',events=100000,location='STD', ls=Run2017F)}
steps['RunJetHT2017C_94Xv2NanoAODINPUT']={'INPUT':InputInfo(dataSet='/JetHT/CMSSW_9_4_5_cand1-94X_dataRun2_relval_v11_RelVal_rmaod_jetHT2017C-v1/MINIAOD',label='2017Cnano',events=100000,location='STD', ls=Run2017C)}
#### run2 2018A ####
Run2018A={315489: [[1, 100]]} #AVGPU 35
steps['RunHLTPhy2018A']={'INPUT':InputInfo(dataSet='/HLTPhysics/Run2018A-v1/RAW',label='2018A',events=100000,location='STD', ls=Run2018A)}
steps['RunEGamma2018A']={'INPUT':InputInfo(dataSet='/EGamma/Run2018A-v1/RAW',label='2018A',events=100000,location='STD', ls=Run2018A)}
steps['RunDoubleMuon2018A']={'INPUT':InputInfo(dataSet='/DoubleMuon/Run2018A-v1/RAW',label='2018A',events=100000,location='STD', ls=Run2018A)}
steps['RunJetHT2018A']={'INPUT':InputInfo(dataSet='/JetHT/Run2018A-v1/RAW',label='2018A',events=100000,location='STD', ls=Run2018A)}
steps['RunMET2018A']={'INPUT':InputInfo(dataSet='/MET/Run2018A-v1/RAW',label='2018A',events=100000,location='STD', ls=Run2018A)}
steps['RunMuonEG2018A']={'INPUT':InputInfo(dataSet='/MuonEG/Run2018A-v1/RAW',label='2018A',events=100000,location='STD', ls=Run2018A)}
steps['RunSingleMu2018A']={'INPUT':InputInfo(dataSet='/SingleMuon/Run2018A-v1/RAW',label='2018A',events=100000,location='STD', ls=Run2018A)}
steps['RunZeroBias2018A']={'INPUT':InputInfo(dataSet='/ZeroBias/Run2018A-v1/RAW',label='2018A',events=100000,location='STD', ls=Run2018A)}
steps['RunMuOnia2018A']={'INPUT':InputInfo(dataSet='/MuOnia/Run2018A-v1/RAW',label='2018A',events=100000,location='STD', ls=Run2018A)}
steps['RunNoBPTX2018A']={'INPUT':InputInfo(dataSet='/NoBPTX/Run2018A-v1/RAW',label='2018A',events=100000,location='STD', ls=Run2018A)}
steps['RunDisplacedJet2018A']={'INPUT':InputInfo(dataSet='/DisplacedJet/Run2018A-v1/RAW',label='2018A',events=100000,location='STD', ls=Run2018A)}
steps['RunCharmonium2018A']={'INPUT':InputInfo(dataSet='/Charmonium/Run2018A-v1/RAW',label='2018A',events=100000,location='STD', ls=Run2018A)}
#### for 90 m beta* Totem run relvals ####
RunhBStarTk={314890: [[500, 700]]} #for central tracking system
steps['RunZeroBias_hBStarTk']={'INPUT':InputInfo(dataSet='/ZeroBias/Commissioning2018-v1/RAW',label='zbhBSTk',events=100000,location='STD', ls=RunhBStarTk)}
RunhBStarRP={314276: [[1, 200]]} #for Roman Pot system
steps['RunZeroBias1_hBStarRP']={'INPUT':InputInfo(dataSet='/ZeroBias1/Commissioning2018-v1/RAW',label='zbhBSRP',events=100000,location='STD', ls=RunhBStarRP)}
#### 2018 NANOAOD from prompt reco 2018 MINIAOD ####
steps['RunJetHT2018A_nano']={'INPUT':InputInfo(dataSet='/JetHT/Run2018A-PromptReco-v1/MINIAOD',label='2018Anano',events=100000,location='STD', ls=Run2018A)}
#### 2018 NANOAOD from prompt reco 2018 MINIAOD - UL ####
steps['RunJetHT2018A_nanoUL']={'INPUT':InputInfo(dataSet='/JetHT/Run2018A-12Nov2019_UL2018-v2/MINIAOD',label='2018Anano',events=100000,location='STD', ls=Run2018A)}
#### run2 2018B ####
Run2018B={317435: [[1, 100]]}
Run2018B_parkingBPH={317661: [[301, 400]]}
steps['RunHLTPhy2018B']={'INPUT':InputInfo(dataSet='/HLTPhysics/Run2018B-v1/RAW',label='2018B',events=100000,location='STD', ls=Run2018B)}
steps['RunEGamma2018B']={'INPUT':InputInfo(dataSet='/EGamma/Run2018B-v1/RAW',label='2018B',events=100000,location='STD', ls=Run2018B)}
steps['RunDoubleMuon2018B']={'INPUT':InputInfo(dataSet='/DoubleMuon/Run2018B-v1/RAW',label='2018B',events=100000,location='STD', ls=Run2018B)}
steps['RunJetHT2018B']={'INPUT':InputInfo(dataSet='/JetHT/Run2018B-v1/RAW',label='2018B',events=100000,location='STD', ls=Run2018B)}
steps['RunMET2018B']={'INPUT':InputInfo(dataSet='/MET/Run2018B-v1/RAW',label='2018B',events=100000,location='STD', ls=Run2018B)}
steps['RunMuonEG2018B']={'INPUT':InputInfo(dataSet='/MuonEG/Run2018B-v1/RAW',label='2018B',events=100000,location='STD', ls=Run2018B)}
steps['RunSingleMu2018B']={'INPUT':InputInfo(dataSet='/SingleMuon/Run2018B-v1/RAW',label='2018B',events=100000,location='STD', ls=Run2018B)}
steps['RunZeroBias2018B']={'INPUT':InputInfo(dataSet='/ZeroBias/Run2018B-v1/RAW',label='2018B',events=100000,location='STD', ls=Run2018B)}
steps['RunMuOnia2018B']={'INPUT':InputInfo(dataSet='/MuOnia/Run2018B-v1/RAW',label='2018B',events=100000,location='STD', ls=Run2018B)}
steps['RunNoBPTX2018B']={'INPUT':InputInfo(dataSet='/NoBPTX/Run2018B-v1/RAW',label='2018B',events=100000,location='STD', ls=Run2018B)}
steps['RunDisplacedJet2018B']={'INPUT':InputInfo(dataSet='/DisplacedJet/Run2018B-v1/RAW',label='2018B',events=100000,location='STD', ls=Run2018B)}
steps['RunCharmonium2018B']={'INPUT':InputInfo(dataSet='/Charmonium/Run2018B-v1/RAW',label='2018B',events=100000,location='STD', ls=Run2018B)}
steps['RunParkingBPH2018B']={'INPUT':InputInfo(dataSet='/ParkingBPH5/Run2018B-v1/RAW',label='2018B',events=100000,location='STD', ls=Run2018B_parkingBPH)}
steps['RunJetHT2018BHEfail']={'INPUT':InputInfo(dataSet='/JetHT/Run2018B-v1/RAW',label='HEfail',events=100000,location='STD', ls=Run2018B)}
steps['RunJetHT2018BBadHcalMitig']={'INPUT':InputInfo(dataSet='/JetHT/Run2018B-v1/RAW',label='BadHcalMitig',events=100000,location='STD', ls=Run2018B)}
#### run2 2018C ####
Run2018C={319450: [[1, 100]]}
steps['RunHLTPhy2018C']={'INPUT':InputInfo(dataSet='/HLTPhysics/Run2018C-v1/RAW',label='2018C',events=100000,location='STD', ls=Run2018C)}
steps['RunEGamma2018C']={'INPUT':InputInfo(dataSet='/EGamma/Run2018C-v1/RAW',label='2018C',events=100000,location='STD', ls=Run2018C)}
steps['RunDoubleMuon2018C']={'INPUT':InputInfo(dataSet='/DoubleMuon/Run2018C-v1/RAW',label='2018C',events=100000,location='STD', ls=Run2018C)}
steps['RunJetHT2018C']={'INPUT':InputInfo(dataSet='/JetHT/Run2018C-v1/RAW',label='2018C',events=100000,location='STD', ls=Run2018C)}
steps['RunMET2018C']={'INPUT':InputInfo(dataSet='/MET/Run2018C-v1/RAW',label='2018C',events=100000,location='STD', ls=Run2018C)}
steps['RunMuonEG2018C']={'INPUT':InputInfo(dataSet='/MuonEG/Run2018C-v1/RAW',label='2018C',events=100000,location='STD', ls=Run2018C)}
steps['RunSingleMu2018C']={'INPUT':InputInfo(dataSet='/SingleMuon/Run2018C-v1/RAW',label='2018C',events=100000,location='STD', ls=Run2018C)}
steps['RunZeroBias2018C']={'INPUT':InputInfo(dataSet='/ZeroBias/Run2018C-v1/RAW',label='2018C',events=100000,location='STD', ls=Run2018C)}
steps['RunMuOnia2018C']={'INPUT':InputInfo(dataSet='/MuOnia/Run2018C-v1/RAW',label='2018C',events=100000,location='STD', ls=Run2018C)}
steps['RunNoBPTX2018C']={'INPUT':InputInfo(dataSet='/NoBPTX/Run2018C-v1/RAW',label='2018C',events=100000,location='STD', ls=Run2018C)}
steps['RunDisplacedJet2018C']={'INPUT':InputInfo(dataSet='/DisplacedJet/Run2018C-v1/RAW',label='2018C',events=100000,location='STD', ls=Run2018C)}
steps['RunCharmonium2018C']={'INPUT':InputInfo(dataSet='/Charmonium/Run2018C-v1/RAW',label='2018C',events=100000,location='STD', ls=Run2018C)}
#### run2 2018D ####
Run2018D={320822: [[1, 100]]}
steps['RunHLTPhy2018D']={'INPUT':InputInfo(dataSet='/HLTPhysics/Run2018D-v1/RAW',label='2018D',events=100000,location='STD', ls=Run2018D)}
steps['RunEGamma2018D']={'INPUT':InputInfo(dataSet='/EGamma/Run2018D-v1/RAW',label='2018D',events=100000,location='STD', ls=Run2018D)}
steps['RunDoubleMuon2018D']={'INPUT':InputInfo(dataSet='/DoubleMuon/Run2018D-v1/RAW',label='2018D',events=100000,location='STD', ls=Run2018D)}
steps['RunJetHT2018D']={'INPUT':InputInfo(dataSet='/JetHT/Run2018D-v1/RAW',label='2018D',events=100000,location='STD', ls=Run2018D)}
steps['RunMET2018D']={'INPUT':InputInfo(dataSet='/MET/Run2018D-v1/RAW',label='2018D',events=100000,location='STD', ls=Run2018D)}
steps['RunMuonEG2018D']={'INPUT':InputInfo(dataSet='/MuonEG/Run2018D-v1/RAW',label='2018D',events=100000,location='STD', ls=Run2018D)}
steps['RunSingleMu2018D']={'INPUT':InputInfo(dataSet='/SingleMuon/Run2018D-v1/RAW',label='2018D',events=100000,location='STD', ls=Run2018D)}
steps['RunZeroBias2018D']={'INPUT':InputInfo(dataSet='/ZeroBias/Run2018D-v1/RAW',label='2018D',events=100000,location='STD', ls=Run2018D)}
steps['RunMuOnia2018D']={'INPUT':InputInfo(dataSet='/MuOnia/Run2018D-v1/RAW',label='2018D',events=100000,location='STD', ls=Run2018D)}
steps['RunNoBPTX2018D']={'INPUT':InputInfo(dataSet='/NoBPTX/Run2018D-v1/RAW',label='2018D',events=100000,location='STD', ls=Run2018D)}
steps['RunDisplacedJet2018D']={'INPUT':InputInfo(dataSet='/DisplacedJet/Run2018D-v1/RAW',label='2018D',events=100000,location='STD', ls=Run2018D)}
steps['RunCharmonium2018D']={'INPUT':InputInfo(dataSet='/Charmonium/Run2018D-v1/RAW',label='2018D',events=100000,location='STD', ls=Run2018D)}
# UL AOD
steps['RunJetHT2018D_reminiaodUL']={'INPUT':InputInfo(dataSet='/JetHT/Run2018D-12Nov2019_UL2018-v4/AOD',label='2018DrmaodUL',events=100000,location='STD', ls=Run2018D)}
# Highstat HLTPhysics
Run2015DHS=selectedLS([258712,258713,258714,258741,258742,258745,258749,258750,259626,259637,259683,259685,259686,259721,259809,259810,259818,259820,259821,259822,259862,259890,259891])
steps['RunHLTPhy2015DHS']={'INPUT':InputInfo(dataSet='/HLTPhysics/Run2015D-v1/RAW',label='2015DHS',events=100000,location='STD', ls=Run2015DHS)}
#### run2 2015 HighLumi High Stat workflows ##
# Run2015HLHS, 25ns, run 260627, JetHT: 2.9M, SingleMuon: 5.7M, ZeroBias: 1.6M
Run2015HLHS=selectedLS([260627])
steps['RunJetHT2015HLHS']={'INPUT':InputInfo(dataSet='/JetHT/Run2015D-v1/RAW',label='2015DHLHS',events=100000,location='STD', ls=Run2015HLHS)}
steps['RunZeroBias2015HLHS']={'INPUT':InputInfo(dataSet='/ZeroBias/Run2015D-v1/RAW',label='2015DHLHS',events=100000,location='STD', ls=Run2015HLHS)}
steps['RunSingleMu2015HLHS']={'INPUT':InputInfo(dataSet='/SingleMuon/Run2015D-v1/RAW',label='2015DHLHS',events=100000,location='STD', ls=Run2015HLHS)}
#### run2 Cosmic ####
##Run 256259 @ 0T 2015C###
##Run 272133 @ 3.8T 2016B###
steps['RunCosmics2015C']={'INPUT':InputInfo(dataSet='/Cosmics/Run2015C-v1/RAW',label='2015C',run=[256259],events=100000,location='STD')}
steps['RunCosmics2016B']={'INPUT':InputInfo(dataSet='/Cosmics/Run2016B-v1/RAW',label='2016B',run=[272133],events=100000,location='STD')}
### LS2 - MWGR ###
steps['RunCosmics2020']={'INPUT':InputInfo(dataSet='/ExpressCosmics/Commissioning2019-Express-v1/FEVT',label='2020GR0',run=[334393],events=100000,location='STD')}
steps['RunCosmics2020GEM']={'INPUT':InputInfo(dataSet='/ExpressCosmics/Commissioning2020-Express-v1/FEVT',label='2020GR4',run=[337973],events=100000,location='STD')}
#### Test of lumi section boundary crossing with run2 2018D ####
Run2018Dml1={320822: [[1,1]] , 320823: [[1,1]]}
Run2018Dml2={320822: [[1,2]]}
steps['RunEGamma2018Dml1']={'INPUT':InputInfo(dataSet='/EGamma/Run2018D-v1/RAW',label='2018D',events=100000,location='STD', ls=Run2018Dml1)}
steps['RunEGamma2018Dml2']={'INPUT':InputInfo(dataSet='/EGamma/Run2018D-v1/RAW',label='2018D',events=100000,location='STD', ls=Run2018Dml2)}
def gen(fragment,howMuch):
global step1Defaults
return merge([{'cfg':fragment},howMuch,step1Defaults])
def gen2015(fragment,howMuch):
global step1Up2015Defaults
return merge([{'cfg':fragment},howMuch,step1Up2015Defaults])
def gen2017(fragment,howMuch):
global step1Up2017Defaults
return merge([{'cfg':fragment},howMuch,step1Up2017Defaults])
def gen2018(fragment,howMuch):
global step1Up2018Defaults
return merge([{'cfg':fragment},howMuch,step1Up2018Defaults])
def gen2018prod(fragment,howMuch):
global step1Up2018ProdDefaults
return merge([{'cfg':fragment},howMuch,step1Up2018ProdDefaults])
def gen2018prodml(fragment,howMuch): #Prod with concurrentLumis
global step1Up2018ProdDefaults
return merge([{'cfg':fragment},howMuch,{'--customise_commands': '"process.source.numberEventsInLuminosityBlock=cms.untracked.uint32(5)"'},concurrentLumis,step1Up2018ProdDefaults]) #this setting is to allow job to cross LS in IB
def gen2018hiprod(fragment,howMuch):
global step1Up2018HiProdDefaults
return merge([{'cfg':fragment},howMuch,step1Up2018HiProdDefaults])
def gen2021hiprod(fragment,howMuch):
global step1Up2021HiProdDefaults
return merge([{'cfg':fragment},howMuch,step1Up2021HiProdDefaults])
### Production test: 13 TeV equivalents
steps['ProdMinBias_13']=gen2015('MinBias_13TeV_pythia8_TuneCUETP8M1_cfi',Kby(9,100))
steps['ProdTTbar_13']=gen2015('TTbar_13TeV_TuneCUETP8M1_cfi',Kby(9,100))
steps['ProdZEE_13']=gen2015('ZEE_13TeV_TuneCUETP8M1_cfi',Kby(9,100))
steps['ProdQCD_Pt_3000_3500_13']=gen2015('QCD_Pt_3000_3500_13TeV_TuneCUETP8M1_cfi',Kby(9,100))
##production 2017
steps['ProdTTbar_13UP17']=gen2017('TTbar_13TeV_TuneCUETP8M1_cfi',Kby(9,50))
steps['ProdMinBias_13UP17']=gen2017('MinBias_13TeV_pythia8_TuneCUETP8M1_cfi',Kby(9,100))
steps['ProdQCD_Pt_3000_3500_13UP17']=gen2017('QCD_Pt_3000_3500_13TeV_TuneCUETP8M1_cfi',Kby(9,100))
steps['MinBias']=gen('MinBias_8TeV_pythia8_TuneCUETP8M1_cff',Kby(9,300))
steps['QCD_Pt_3000_3500']=gen('QCD_Pt_3000_3500_8TeV_TuneCUETP8M1_cfi',Kby(9,25))
steps['QCD_Pt_600_800']=gen('QCD_Pt_600_800_8TeV_TuneCUETP8M1_cfi',Kby(9,50))
steps['QCD_Pt_80_120']=gen('QCD_Pt_80_120_8TeV_TuneCUETP8M1_cfi',Kby(9,100))
steps['MinBias_13']=gen2015('MinBias_13TeV_pythia8_TuneCUETP8M1_cfi',Kby(100,300)) # set HS to provide adequate pool for PU
steps['QCD_Pt_3000_3500_13']=gen2015('QCD_Pt_3000_3500_13TeV_TuneCUETP8M1_cfi',Kby(9,25))
steps['QCD_Pt_600_800_13']=gen2015('QCD_Pt_600_800_13TeV_TuneCUETP8M1_cfi',Kby(9,50))
steps['QCD_Pt_80_120_13']=gen2015('QCD_Pt_80_120_13TeV_TuneCUETP8M1_cfi',Kby(9,100))
steps['QCD_Pt_30_80_BCtoE_8TeV']=gen('QCD_Pt_30_80_BCtoE_8TeV_TuneCUETP8M1_cfi',Kby(9000,100))
steps['QCD_Pt_80_170_BCtoE_8TeV']=gen('QCD_Pt_80_170_BCtoE_8TeV_TuneCUETP8M1_cfi',Kby(9000,100))
steps['SingleElectronPt10']=gen('SingleElectronPt10_pythia8_cfi',Kby(9,3000))
steps['SingleElectronPt35']=gen('SingleElectronPt35_pythia8_cfi',Kby(9,500))
steps['SingleElectronPt1000']=gen('SingleElectronPt1000_pythia8_cfi',Kby(9,50))
steps['SingleElectronFlatPt1To100']=gen('SingleElectronFlatPt1To100_pythia8_cfi',Mby(2,100))
steps['SingleGammaPt10']=gen('SingleGammaPt10_pythia8_cfi',Kby(9,3000))
steps['SingleGammaPt35']=gen('SingleGammaPt35_pythia8_cfi',Kby(9,500))
steps['SingleMuPt1']=gen('SingleMuPt1_pythia8_cfi',Kby(25,1000))
steps['SingleMuPt10']=gen('SingleMuPt10_pythia8_cfi',Kby(25,500))
steps['SingleMuPt100']=gen('SingleMuPt100_pythia8_cfi',Kby(9,500))
steps['SingleMuPt1000']=gen('SingleMuPt1000_pythia8_cfi',Kby(9,500))
steps['SingleElectronPt10_UP15']=gen2015('SingleElectronPt10_pythia8_cfi',Kby(9,3000))
steps['SingleElectronPt35_UP15']=gen2015('SingleElectronPt35_pythia8_cfi',Kby(9,500))
steps['SingleElectronPt1000_UP15']=gen2015('SingleElectronPt1000_pythia8_cfi',Kby(9,50))
steps['SingleElectronFlatPt1To100_UP15']=gen2015('SingleElectronFlatPt1To100_pythia8_cfi',Mby(2,100))
steps['SingleGammaPt10_UP15']=gen2015('SingleGammaPt10_pythia8_cfi',Kby(9,3000))
steps['SingleGammaPt35_UP15']=gen2015('SingleGammaPt35_pythia8_cfi',Kby(9,500))
steps['SingleMuPt1_UP15']=gen2015('SingleMuPt1_pythia8_cfi',Kby(25,1000))
steps['SingleMuPt10_UP15']=gen2015('SingleMuPt10_pythia8_cfi',Kby(25,500))
steps['SingleMuPt100_UP15']=gen2015('SingleMuPt100_pythia8_cfi',Kby(9,500))
steps['SingleMuPt1000_UP15']=gen2015('SingleMuPt1000_pythia8_cfi',Kby(9,500))
steps['NuGun_UP15']=gen2015('SingleNuE10_cfi.py',Kby(9,50))
steps['TTbar']=gen('TTbar_8TeV_TuneCUETP8M1_cfi',Kby(9,100))
steps['TTbarLepton']=gen('TTbarLepton_8TeV_TuneCUETP8M1_cfi',Kby(9,100))
steps['ZEE']=gen('ZEE_8TeV_TuneCUETP8M1_cfi',Kby(9,100))
steps['Wjet_Pt_80_120']=gen('Wjet_Pt_80_120_8TeV_TuneCUETP8M1_cfi',Kby(9,100))
steps['Wjet_Pt_3000_3500']=gen('Wjet_Pt_3000_3500_8TeV_TuneCUETP8M1_cfi',Kby(9,50))
steps['LM1_sfts']=gen('LM1_sfts_8TeV_cfi',Kby(9,100))
steps['QCD_FlatPt_15_3000']=gen('QCDForPF_8TeV_TuneCUETP8M1_cfi',Kby(5,100))
steps['QCD_FlatPt_15_3000HS']=gen('QCDForPF_8TeV_TuneCUETP8M1_cfi',Kby(50,100))
steps['TTbar_13']=gen2015('TTbar_13TeV_TuneCUETP8M1_cfi',Kby(9,50))
steps['TTbarLepton_13']=gen2015('TTbarLepton_13TeV_TuneCUETP8M1_cfi',Kby(9,100))
steps['ZEE_13']=gen2015('ZEE_13TeV_TuneCUETP8M1_cfi',Kby(9,50))
steps['ZEE_13_DBLMINIAOD']=gen2015('ZEE_13TeV_TuneCUETP8M1_cfi',Kby(9,50))
steps['Wjet_Pt_80_120_13']=gen2015('Wjet_Pt_80_120_13TeV_TuneCUETP8M1_cfi',Kby(9,100))
steps['Wjet_Pt_3000_3500_13']=gen2015('Wjet_Pt_3000_3500_13TeV_TuneCUETP8M1_cfi',Kby(9,50))
steps['SMS-T1tttt_mGl-1500_mLSP-100_13']=gen2015('SMS-T1tttt_mGl-1500_mLSP-100_13TeV-pythia8_cfi',Kby(9,50))
steps['QCD_FlatPt_15_3000_13']=gen2015('QCDForPF_13TeV_TuneCUETP8M1_cfi',Kby(9,100))
steps['QCD_FlatPt_15_3000HS_13']=gen2015('QCDForPF_13TeV_TuneCUETP8M1_cfi',Kby(50,100))
steps['ZpMM_2250_8TeV']=gen('ZpMM_2250_8TeV_TuneCUETP8M1_cfi',Kby(9,100))
steps['ZpEE_2250_8TeV']=gen('ZpEE_2250_8TeV_TuneCUETP8M1_cfi',Kby(9,100))
steps['ZpTT_1500_8TeV']=gen('ZpTT_1500_8TeV_TuneCUETP8M1_cfi',Kby(9,100))
steps['ZpMM_2250_13']=gen2015('ZpMM_2250_13TeV_TuneCUETP8M1_cfi',Kby(9,100))
steps['ZpEE_2250_13']=gen2015('ZpEE_2250_13TeV_TuneCUETP8M1_cfi',Kby(9,100))
steps['ZpTT_1500_13']=gen2015('ZpTT_1500_13TeV_TuneCUETP8M1_cfi',Kby(9,100))
steps['HSCPstop_M_200_13']=gen2015('HSCPstop_M_200_TuneCUETP8M1_13TeV_pythia8_cff',Kby(9,100))
steps['RSGravitonToGaGa_13']=gen2015('RSGravitonToGammaGamma_kMpl01_M_3000_TuneCUETP8M1_13TeV_pythia8_cfi',Kby(9,100))
steps['WpToENu_M-2000_13']=gen2015('WprimeToENu_M-2000_TuneCUETP8M1_13TeV-pythia8_cff',Kby(9,100))
steps['DisplacedSUSY_stopToBottom_M_300_1000mm_13']=gen2015('DisplacedSUSY_stopToBottom_M_300_1000mm_TuneCUETP8M1_13TeV_pythia8_cff',Kby(9,100))
### 2017 wf: only the ones for premixing (for the moment)
steps['NuGun_UP17']=gen2017('SingleNuE10_cfi.py',Kby(9,50))
steps['TTbar_13UP17']=gen2017('TTbar_13TeV_TuneCUETP8M1_cfi',Kby(9,50))
steps['ProdZEE_13UP17']=gen2017('ZEE_13TeV_TuneCUETP8M1_cfi',Kby(9,50))
steps['ZEE_13UP17']=gen2017('ZEE_13TeV_TuneCUETP8M1_cfi',Kby(9,50))
steps['ZMM_13UP17']=gen2017('ZMM_13TeV_TuneCUETP8M1_cfi',Kby(18,100))
steps['ZTT_13UP17']=gen2017('ZTT_All_hadronic_13TeV_TuneCUETP8M1_cfi',Kby(9,80))
steps['H125GGgluonfusion_13UP17']=gen2017('H125GGgluonfusion_13TeV_TuneCUETP8M1_cfi',Kby(9,50))
steps['QQH1352T_13UP17']=gen2017('QQH1352T_13TeV_TuneCUETP8M1_cfi',Kby(9,50))
steps['SMS-T1tttt_mGl-1500_mLSP-100_13UP17']=gen2017('SMS-T1tttt_mGl-1500_mLSP-100_13TeV-pythia8_cfi',Kby(9,50))
### 2018 wf: only the ones for premixing (for the moment)
steps['NuGun_UP18']=gen2018('SingleNuE10_cfi.py',Kby(9,50))
steps['ProdTTbar_13UP18']=gen2018prod('TTbar_13TeV_TuneCUETP8M1_cfi',Kby(9,50))
steps['ProdTTbar_13UP18ml']=gen2018prodml('TTbar_13TeV_TuneCUETP8M1_cfi',Kby(9,50))
steps['TTbar_13UP18']=gen2018('TTbar_13TeV_TuneCUETP8M1_cfi',Kby(9,50))
steps['ProdZEE_13UP18']=gen2018prod('ZEE_13TeV_TuneCUETP8M1_cfi',Kby(9,50))
steps['ProdZEE_13UP18ml']=gen2018prodml('ZEE_13TeV_TuneCUETP8M1_cfi',Kby(9,50))
steps['ZEE_13UP18']=gen2018('ZEE_13TeV_TuneCUETP8M1_cfi',Kby(9,50))
steps['ProdZMM_13UP18']=gen2018prod('ZMM_13TeV_TuneCUETP8M1_cfi',Kby(18,100))
steps['ZMM_13UP18']=gen2018('ZMM_13TeV_TuneCUETP8M1_cfi',Kby(18,100))
steps['ZTT_13UP18']=gen2018('ZTT_All_hadronic_13TeV_TuneCUETP8M1_cfi',Kby(9,80))
steps['H125GGgluonfusion_13UP18']=gen2018('H125GGgluonfusion_13TeV_TuneCUETP8M1_cfi',Kby(9,50))
steps['QQH1352T_13UP18']=gen2018('QQH1352T_13TeV_TuneCUETP8M1_cfi',Kby(9,50))
steps['SMS-T1tttt_mGl-1500_mLSP-100_13UP18']=gen2018('SMS-T1tttt_mGl-1500_mLSP-100_13TeV-pythia8_cfi',Kby(9,50))
# 13TeV High Stats samples
steps['ZMM_13_HS']=gen2015('ZMM_13TeV_TuneCUETP8M1_cfi',Kby(209,100))
steps['TTbar_13_HS']=gen2015('TTbar_13TeV_TuneCUETP8M1_cfi',Kby(100,50))
def identitySim(wf):
return merge([{'--restoreRND':'SIM','--process':'SIM2', '--inputCommands':'"keep *","drop *TagInfo*_*_*_*"' },wf])
steps['SingleMuPt10_UP15_ID']=identitySim(steps['SingleMuPt10_UP15'])
steps['TTbar_13_ID']=identitySim(steps['TTbar_13'])
baseDataSetRelease=[
'CMSSW_9_2_4-91X_mcRun1_realistic_v2-v1', # 0 run1 samples; note TTbar GENSIM has v2 (see TTbarINPUT below)
'CMSSW_10_3_0_pre5-103X_upgrade2018_realistic_v7-v1', # 1 GEN-SIM for HI RunII, 2018
'CMSSW_6_2_0_pre8-PRE_ST62_V8_FastSim-v1', # 2 for fastsim id test
# 'CMSSW_7_1_0_pre5-START71_V1-v2', # 3 8 TeV , for the one sample which is part of the routine relval production (RelValZmumuJets_Pt_20_300, because of -v2)
# THIS ABOVE IS NOT USED, AT THE MOMENT
'CMSSW_10_6_0-106X_mcRun2_asymptotic_v3-v1', # 3 - GEN-SIM input for 13 TeV 2016 workfows
'CMSSW_7_3_0_pre1-PRE_LS172_V15_FastSim-v1', # 4 - fast sim GEN-SIM-DIGI-RAW-HLTDEBUG for id tests
'CMSSW_10_6_0-PU25ns_106X_mcRun2_asymptotic_v3-v1', # 5 - fullSim PU 25ns UP15 premix library
'CMSSW_10_4_0_pre1-PU50ns_103X_mcRun2_startup_v1-v1', # 6 - fullSim PU 50ns UP15 premix library
'CMSSW_10_6_0-106X_mcRun2_asymptotic_v3_FastSim-v1', # 7 - fastSim MinBias for mixing UP16
'CMSSW_10_6_0-PU25ns_106X_mcRun2_asymptotic_v3_FastSim-v1',# 8 - fastSim premix library UP16
'CMSSW_10_6_0-106X_upgrade2018_realistic_v4-v1', # 9 - Run2 HI GEN-SIM for mixing
'CMSSW_7_6_0-76X_mcRun2_asymptotic_v11-v1', # 10 - 13 TeV High Stats GEN-SIM
'CMSSW_7_6_0_pre7-76X_mcRun2_asymptotic_v9_realBS-v1', # 11 - 13 TeV High Stats MiniBias for mixing GEN-SIM
'CMSSW_8_1_0_pre9_Geant4102-81X_mcRun2cosmics_startup_peak_v2-v1', # 12 - GEN-SIM input for 1307 cosmics wf from 810_p2
'CMSSW_10_6_0-106X_mc2017_realistic_v3-v1', # 13 - GENSIM input for 2017 fullSim premix workflows
'CMSSW_10_6_0-PU25ns_106X_mc2017_realistic_v3-v1', # 14 - fullSim PU 25ns UP17 premix library
'CMSSW_10_6_0-106X_mc2017_realistic_v3_FastSim-v1', # 15 - fastSim MinBias for mixing UP17
'CMSSW_10_6_0-PU25ns_106X_mc2017_realistic_v3_FastSim-v1',# 16 - fastSim premix library UP17
'CMSSW_10_6_0-PU25ns_106X_upgrade2018_realistic_v4-v1', # 17 - fullSim PU 25ns UP18 premix library
'CMSSW_10_6_0-106X_upgrade2018_realistic_v4-v1', # 18 - GENSIM input for 2018 fullSim premix workflows
'CMSSW_10_6_0-106X_upgrade2018_realistic_v4_FastSim-v1', # 19 - fastSim MinBias for mixing UP18
'CMSSW_10_6_0-PU25ns_106X_upgrade2018_realistic_v4_FastSim-v1',# 20 - fastSim premix library UP18
'CMSSW_10_6_0-106X_mc2017_realistic_v3-v1', # 21 - GEN-SIM inputs for LHE-GEN-SIM 2017 workflows
'CMSSW_10_6_0-106X_upgrade2018_realistic_v4-v1', # 22 - GEN-SIM inputs for LHE-GEN-SIM 2018 workflows
'CMSSW_11_2_0_pre8-112X_mcRun3_2021_realistic_HI_v11-v1', #23 - Run3 HI GEN-SIM for mixing
'CMSSW_11_2_0_pre8-PU_112X_upgrade2018_realistic_v4-v1', # 24 - 2018 Run-Dependent premix library
]
# note: INPUT commands to be added once GEN-SIM w/ 13TeV+PostLS1Geo will be available
steps['MinBiasINPUT']={'INPUT':InputInfo(dataSet='/RelValMinBias/%s/GEN-SIM'%(baseDataSetRelease[0],),location='STD')} #was [0]
steps['QCD_Pt_3000_3500INPUT']={'INPUT':InputInfo(dataSet='/RelValQCD_Pt_3000_3500/%s/GEN-SIM'%(baseDataSetRelease[0],),location='STD')}
steps['QCD_Pt_600_800INPUT']={'INPUT':InputInfo(dataSet='/RelValQCD_Pt_600_800/%s/GEN-SIM'%(baseDataSetRelease[0],),location='STD')}
steps['QCD_Pt_80_120INPUT']={'INPUT':InputInfo(dataSet='/RelValQCD_Pt_80_120/%s/GEN-SIM'%(baseDataSetRelease[0],),location='STD')}
steps['SingleElectronPt10INPUT']={'INPUT':InputInfo(dataSet='/RelValSingleElectronPt10/%s/GEN-SIM'%(baseDataSetRelease[0],),location='STD')}
steps['SingleElectronPt1000INPUT']={'INPUT':InputInfo(dataSet='/RelValSingleElectronPt1000/%s/GEN-SIM'%(baseDataSetRelease[0],),location='STD')}
steps['SingleElectronPt35INPUT']={'INPUT':InputInfo(dataSet='/RelValSingleElectronPt35/%s/GEN-SIM'%(baseDataSetRelease[0],),location='STD')}
steps['SingleGammaPt10INPUT']={'INPUT':InputInfo(dataSet='/RelValSingleGammaPt10/%s/GEN-SIM'%(baseDataSetRelease[0],),location='STD')}
steps['SingleGammaPt35INPUT']={'INPUT':InputInfo(dataSet='/RelValSingleGammaPt35/%s/GEN-SIM'%(baseDataSetRelease[0],),location='STD')}
steps['SingleMuPt1INPUT']={'INPUT':InputInfo(dataSet='/RelValSingleMuPt1/%s/GEN-SIM'%(baseDataSetRelease[0],),location='STD')}
steps['SingleMuPt10INPUT']={'INPUT':InputInfo(dataSet='/RelValSingleMuPt10/%s/GEN-SIM'%(baseDataSetRelease[0],),location='STD')}
steps['SingleMuPt10_UP15IDINPUT']={'INPUT':InputInfo(dataSet='/RelValSingleMuPt10_UP15/%s/GEN-SIM-DIGI-RAW-HLTDEBUG'%(baseDataSetRelease[3],),location='STD',split=1)}
steps['SingleMuPt10_UP15FSIDINPUT']={'INPUT':InputInfo(dataSet='/RelValSingleMuPt10/%s/GEN-SIM-DIGI-RECO'%(baseDataSetRelease[4],),location='STD',split=1)}
steps['SingleMuPt100INPUT']={'INPUT':InputInfo(dataSet='/RelValSingleMuPt100/%s/GEN-SIM'%(baseDataSetRelease[0],),location='STD')}
steps['SingleMuPt1000INPUT']={'INPUT':InputInfo(dataSet='/RelValSingleMuPt1000/%s/GEN-SIM'%(baseDataSetRelease[0],),location='STD')}
steps['TTbarINPUT']={'INPUT':InputInfo(dataSet='/RelValTTbar/%s/GEN-SIM'%((baseDataSetRelease[0].rstrip('1')+'2'),),location='STD')}
steps['TTbar_13IDINPUT']={'INPUT':InputInfo(dataSet='/RelValTTbar_13/%s/GEN-SIM-DIGI-RAW-HLTDEBUG'%(baseDataSetRelease[3],),location='STD',split=1)}
steps['TTbar_13FSIDINPUT']={'INPUT':InputInfo(dataSet='/RelValTTbar_13/%s/GEN-SIM-DIGI-RECO'%(baseDataSetRelease[4],),location='STD',split=1)}
steps['TTbarLeptonINPUT']={'INPUT':InputInfo(dataSet='/RelValTTbarLepton/%s/GEN-SIM'%(baseDataSetRelease[0],),location='STD')}
steps['OldTTbarINPUT']={'INPUT':InputInfo(dataSet='/RelValProdTTbar/CMSSW_5_0_0_pre6-START50_V5-v1/GEN-SIM-RECO',location='STD')}
steps['OldGenSimINPUT']={'INPUT':InputInfo(dataSet='/RelValTTbar/CMSSW_4_4_2-START44_V7-v1/GEN-SIM-DIGI-RAW-HLTDEBUG',location='STD')}
steps['Wjet_Pt_80_120INPUT']={'INPUT':InputInfo(dataSet='/RelValWjet_Pt_80_120/%s/GEN-SIM'%(baseDataSetRelease[0],),location='STD')}
steps['Wjet_Pt_3000_3500INPUT']={'INPUT':InputInfo(dataSet='/RelValWjet_Pt_3000_3500/%s/GEN-SIM'%(baseDataSetRelease[0],),location='STD')}
steps['LM1_sftsINPUT']={'INPUT':InputInfo(dataSet='/RelValLM1_sfts/%s/GEN-SIM'%(baseDataSetRelease[0],),location='STD')}
steps['QCD_FlatPt_15_3000INPUT']={'INPUT':InputInfo(dataSet='/RelValQCD_FlatPt_15_3000/%s/GEN-SIM'%(baseDataSetRelease[0],),location='STD')}
steps['QCD_FlatPt_15_3000HSINPUT']={'INPUT':InputInfo(dataSet='/RelValQCD_FlatPt_15_3000HS/%s/GEN-SIM'%(baseDataSetRelease[0],),location='STD')}
steps['TTbar__DIGIPU1INPUT']={'INPUT':InputInfo(dataSet='/RelValTTbar/CMSSW_5_2_2-PU_START52_V4_special_120326-v1/GEN-SIM-DIGI-RAW-HLTDEBUG',location='STD')}
# INPUT command for reminiAOD wf on 80X relval input
steps['ProdZEE_13_reminiaodINPUT']={'INPUT':InputInfo(dataSet='/RelValProdZEE_13_pmx25ns/CMSSW_8_0_21-PUpmx25ns_80X_mcRun2_asymptotic_2016_TrancheIV_v6_Tr4GT_v6-v1/AODSIM',label='rmaod',location='STD')}
# INPUT command for reminiAOD wf on 94X relval input
steps['TTbar_13_94XreminiaodINPUT']={'INPUT':InputInfo(dataSet='/RelValTTbar_13/CMSSW_9_4_0-94X_mc2017_realistic_v10-v1/GEN-SIM-RECO',label='rmaod',location='STD')}
# INPUT command for reminiAOD wfs on UL-like relval inputs
steps['TTbar_13_reminiaod2016UL_preVFP_INPUT']={'INPUT':InputInfo(dataSet='/RelValTTbar_13UP16/CMSSW_10_6_12-PU25ns_106X_mcRun2_asymptotic_preVFP_v8_hltul16_preVFP-v1/AODSIM',label='rmaod',location='STD')}
steps['TTbar_13_reminiaod2016UL_postVFP_INPUT']={'INPUT':InputInfo(dataSet='/RelValTTbar_13UP16/CMSSW_10_6_12-PU25ns_106X_mcRun2_asymptotic_v13_hltul16_postVFP-v1/AODSIM',label='rmaod',location='STD')}
# FIXME: replace with AODSIM (more appropriate)
steps['TTbar_13_reminiaod2017UL_INPUT']={'INPUT':InputInfo(dataSet='/RelValTTbar_13/CMSSW_10_6_4-PUpmx25ns_106X_mc2017_realistic_v6_rsb-v1/GEN-SIM-RECO',label='rmaod',location='STD')}
steps['TTbar_13_reminiaod2018UL_INPUT']={'INPUT':InputInfo(dataSet='/RelValProdTTbar_13_pmx25ns/CMSSW_10_6_4-PUpmx25ns_106X_upgrade2018_realistic_v9-v1/AODSIM',label='rmaod',location='STD')}
# INPUT command for reminiAOD wfs on PbPb relval inputs
steps['HydjetQ_reminiaodPbPb2018_INPUT']={'INPUT':InputInfo(dataSet='/RelValHydjetQ_B12_5020GeV_2018_ppReco/CMSSW_10_3_3-PU_103X_upgrade2018_realistic_HI_v11-v1/GEN-SIM-RECO',label='rmaod',location='STD')}
#input for a NANOAOD from MINIAOD workflow
steps['ZEE_13_80XNanoAODINPUT']={'INPUT':InputInfo(dataSet='/RelValZEE_13/CMSSW_8_0_21-PU25ns_80X_mcRun2_asymptotic_2016_TrancheIV_v6_Tr4GT_v6-v1/MINIAODSIM',label='nanoaod80X',location='STD')}
steps['TTbar_13_94Xv1NanoAODINPUT']={'INPUT':InputInfo(dataSet='/RelValTTbar_13/CMSSW_9_4_0_pre3-PU25ns_94X_mc2017_realistic_v4-v1/MINIAODSIM',label='nanoaod94X',location='STD')}
steps['TTbar_13_94Xv2NanoAODINPUT']={'INPUT':InputInfo(dataSet='/RelValTTbar_13/CMSSW_9_4_5_cand1-94X_mc2017_realistic_v14_PU_RelVal_rmaod-v1/MINIAODSIM',label='nanoaod94Xv2',location='STD')}
steps['TTbar_13_106Xv1NanoAODINPUT']={'INPUT':InputInfo(dataSet='/RelValTTbar_13/CMSSW_10_6_14-PU25ns_106X_mc2017_realistic_v7-v1/MINIAODSIM',label='nanoaod106X',location='STD')}
# 13 TeV recycle GEN-SIM input
steps['MinBias_13INPUT']={'INPUT':InputInfo(dataSet='/RelValMinBias_13/%s/GEN-SIM'%(baseDataSetRelease[3],),location='STD')}
steps['QCD_Pt_3000_3500_13INPUT']={'INPUT':InputInfo(dataSet='/RelValQCD_Pt_3000_3500_13/%s/GEN-SIM'%(baseDataSetRelease[3],),location='STD')}
steps['QCD_Pt_600_800_13INPUT']={'INPUT':InputInfo(dataSet='/RelValQCD_Pt_600_800_13/%s/GEN-SIM'%(baseDataSetRelease[3],),location='STD')}
steps['QCD_Pt_80_120_13INPUT']={'INPUT':InputInfo(dataSet='/RelValQCD_Pt_80_120_13/%s/GEN-SIM'%(baseDataSetRelease[3],),location='STD')}
steps['QCD_Pt_80_120_13_HIINPUT']={'INPUT':InputInfo(dataSet='/RelValQCD_Pt_80_120_13_HI/%s/GEN-SIM'%(baseDataSetRelease[1],),location='STD')}
steps['TTbar_13INPUT']={'INPUT':InputInfo(dataSet='/RelValTTbar_13/%s/GEN-SIM'%(baseDataSetRelease[3],),location='STD')}
steps['TTbarLepton_13INPUT']={'INPUT':InputInfo(dataSet='/RelValTTbarLepton_13/%s/GEN-SIM'%(baseDataSetRelease[3],),location='STD')}
steps['ZEE_13INPUT']={'INPUT':InputInfo(dataSet='/RelValZEE_13/%s/GEN-SIM'%(baseDataSetRelease[3],),location='STD')}
#steps['ZEE_13_DBLMINIAODINPUT']={'INPUT':InputInfo(dataSet='/RelValZEE_13_DBLMINIAOD/%s/GEN-SIM'%(baseDataSetRelease[3],),location='STD')}
steps['Wjet_Pt_80_120_13INPUT']={'INPUT':InputInfo(dataSet='/RelValWjet_Pt_80_120_13/%s/GEN-SIM'%(baseDataSetRelease[3],),location='STD')}
steps['Wjet_Pt_3000_3500_13INPUT']={'INPUT':InputInfo(dataSet='/RelValWjet_Pt_3000_3500_13/%s/GEN-SIM'%(baseDataSetRelease[3],),location='STD')}
steps['SMS-T1tttt_mGl-1500_mLSP-100_13INPUT']={'INPUT':InputInfo(dataSet='/RelValSMS-T1tttt_mGl-1500_mLSP-100_13/%s/GEN-SIM'%(baseDataSetRelease[3],),location='STD')}
steps['QCD_FlatPt_15_3000_13INPUT']={'INPUT':InputInfo(dataSet='/RelValQCD_FlatPt_15_3000_13/%s/GEN-SIM'%(baseDataSetRelease[3],),location='STD')}
steps['QCD_FlatPt_15_3000HS_13INPUT']={'INPUT':InputInfo(dataSet='/RelValQCD_FlatPt_15_3000HS_13/%s/GEN-SIM'%(baseDataSetRelease[3],),location='STD')}
steps['ZpMM_2250_13INPUT']={'INPUT':InputInfo(dataSet='/RelValZpMM_2250_13/%s/GEN-SIM'%(baseDataSetRelease[3],),location='STD')}
steps['ZpEE_2250_13INPUT']={'INPUT':InputInfo(dataSet='/RelValZpEE_2250_13/%s/GEN-SIM'%(baseDataSetRelease[3],),location='STD')}
steps['ZpTT_1500_13INPUT']={'INPUT':InputInfo(dataSet='/RelValZpTT_1500_13/%s/GEN-SIM'%(baseDataSetRelease[3],),location='STD')}
steps['MinBiasHS_13INPUT']={'INPUT':InputInfo(dataSet='/RelValMinBiasHS_13/%s/GEN-SIM'%(baseDataSetRelease[3],),location='STD')}
steps['Higgs200ChargedTaus_13INPUT']={'INPUT':InputInfo(dataSet='/RelValHiggs200ChargedTaus_13/%s/GEN-SIM'%(baseDataSetRelease[3],),location='STD')}
steps['Upsilon1SToMuMu_13INPUT']={'INPUT':InputInfo(dataSet='/RelValUpsilon1SToMuMu_13/%s/GEN-SIM'%(baseDataSetRelease[3],),location='STD')}
steps['JpsiMuMu_Pt-8INPUT']={'INPUT':InputInfo(dataSet='/RelValJpsiMuMu_Pt-8/%s/GEN-SIM'%(baseDataSetRelease[3],),location='STD')}
# new BPH relvals produced for the first time in 810_pre9
steps['BsToMuMu_13INPUT']={'INPUT':InputInfo(dataSet='/RelValBsToMuMu_13/CMSSW_8_1_0_pre9-81X_mcRun2_asymptotic_v2-v1/GEN-SIM',location='STD')}
steps['BdToMuMu_13INPUT']={'INPUT':InputInfo(dataSet='/RelValBdToMuMu_13/CMSSW_8_1_0_pre9-81X_mcRun2_asymptotic_v2-v1/GEN-SIM',location='STD')}
steps['BuToJpsiK_13INPUT']={'INPUT':InputInfo(dataSet='/RelValBuToJpsiK_13/CMSSW_8_1_0_pre9-81X_mcRun2_asymptotic_v2-v1/GEN-SIM',location='STD')}
steps['BsToJpsiPhi_13INPUT']={'INPUT':InputInfo(dataSet='/RelValBsToJpsiPhi_13/CMSSW_8_1_0_pre9-81X_mcRun2_asymptotic_v2-v1/GEN-SIM',location='STD')}
###
steps['PhiToMuMu_13INPUT']={'INPUT':InputInfo(dataSet='/RelValPhiToMuMu_13/%s/GEN-SIM'%(baseDataSetRelease[3],),location='STD')}
steps['EtaBToJpsiJpsi_13INPUT']={'INPUT':InputInfo(dataSet='/RelValEtaBToJpsiJpsi_13/%s/GEN-SIM'%(baseDataSetRelease[3],),location='STD')}
steps['BuMixing_13INPUT']={'INPUT':InputInfo(dataSet='/RelValBuMixing_13/%s/GEN-SIM'%(baseDataSetRelease[3],),location='STD')}
steps['WE_13INPUT']={'INPUT':InputInfo(dataSet='/RelValWE_13/%s/GEN-SIM'%(baseDataSetRelease[3],),location='STD')}
steps['WM_13INPUT']={'INPUT':InputInfo(dataSet='/RelValWM_13/%s/GEN-SIM'%(baseDataSetRelease[3],),location='STD')}
steps['WpM_13INPUT']={'INPUT':InputInfo(dataSet='/RelValWpM_13/%s/GEN-SIM'%(baseDataSetRelease[3],),location='STD')}
steps['ZMM_13INPUT']={'INPUT':InputInfo(dataSet='/RelValZMM_13/%s/GEN-SIM'%(baseDataSetRelease[3],),location='STD')}
steps['ZEEMM_13_HIINPUT']={'INPUT':InputInfo(dataSet='/RelValZEEMM_13_HI/%s/GEN-SIM'%(baseDataSetRelease[1],),location='STD')}
steps['ZpMM_13INPUT']={'INPUT':InputInfo(dataSet='/RelValZpMM_13/%s/GEN-SIM'%(baseDataSetRelease[3],),location='STD')}
steps['ZTT_13INPUT']={'INPUT':InputInfo(dataSet='/RelValZTT_13/%s/GEN-SIM'%(baseDataSetRelease[3],),location='STD')}
steps['H125GGgluonfusion_13INPUT']={'INPUT':InputInfo(dataSet='/RelValH125GGgluonfusion_13/%s/GEN-SIM'%(baseDataSetRelease[3],),location='STD')}
steps['PhotonJets_Pt_10_13INPUT']={'INPUT':InputInfo(dataSet='/RelValPhotonJets_Pt_10_13/%s/GEN-SIM'%(baseDataSetRelease[3],),location='STD')}
steps['PhotonJets_Pt_10_13_HIINPUT']={'INPUT':InputInfo(dataSet='/RelValPhotonJets_Pt_10_13_HI/%s/GEN-SIM'%(baseDataSetRelease[1],),location='STD')}
steps['QQH1352T_13INPUT']={'INPUT':InputInfo(dataSet='/RelValQQH1352T_13/%s/GEN-SIM'%(baseDataSetRelease[3],),location='STD')}
steps['ADDMonoJet_d3MD3_13INPUT']={'INPUT':InputInfo(dataSet='/RelValADDMonoJet_d3MD3_13/%s/GEN-SIM'%(baseDataSetRelease[3],),location='STD')}
steps['RSKKGluon_m3000GeV_13INPUT']={'INPUT':InputInfo(dataSet='/RelValRSKKGluon_m3000GeV_13/%s/GEN-SIM'%(baseDataSetRelease[3],),location='STD')}
steps['PhiToMuMu_13']=gen2015('PYTHIA8_PhiToMuMu_TuneCUETP8M1_13TeV_cff',Kby(100,1100))
steps['EtaBToJpsiJpsi_13']=gen2015('EtaBToJpsiJpsi_forSTEAM_TuneCUEP8M1_13TeV_cfi',Kby(9,100))
steps['BuMixing_13']=gen2015('BuMixing_BMuonFilter_forSTEAM_13TeV_TuneCUETP8M1_cfi',Kby(900,10000))
steps['Cosmics_UP15INPUT']={'INPUT':InputInfo(dataSet='/RelValCosmics_UP15/%s/GEN-SIM'%(baseDataSetRelease[3],),location='STD')}
steps['CosmicsSPLoose_UP15INPUT']={'INPUT':InputInfo(dataSet='/RelValCosmicsSPLoose_UP15/%s/GEN-SIM'%(baseDataSetRelease[12],),location='STD')}
steps['BeamHalo_13INPUT']={'INPUT':InputInfo(dataSet='/RelValBeamHalo_13/%s/GEN-SIM'%(baseDataSetRelease[3],),location='STD')}
steps['HSCPstop_M_200_13INPUT']={'INPUT':InputInfo(dataSet='/RelValHSCPstop_M_200_13/%s/GEN-SIM'%(baseDataSetRelease[3],),location='STD')}
steps['RSGravitonToGaGa_13INPUT']={'INPUT':InputInfo(dataSet='/RelValRSGravitonToGaGa_13/%s/GEN-SIM'%(baseDataSetRelease[3],),location='STD')}
steps['WpToENu_M-2000_13INPUT']={'INPUT':InputInfo(dataSet='/RelValWpToENu_M-2000_13/%s/GEN-SIM'%(baseDataSetRelease[3],),location='STD')}
steps['DisplacedSUSY_stopToBottom_M_300_1000mm_13INPUT']={'INPUT':InputInfo(dataSet='/RelValDisplacedSUSY_stopToBottom_M_300_1000mm_13/%s/GEN-SIM'%(baseDataSetRelease[3],),location='STD')}
# particle guns with postLS1 geometry recycle GEN-SIM input
steps['SingleElectronPt10_UP15INPUT']={'INPUT':InputInfo(dataSet='/RelValSingleElectronPt10_UP15/%s/GEN-SIM'%(baseDataSetRelease[3],),location='STD')}
steps['SingleElectronPt35_UP15INPUT']={'INPUT':InputInfo(dataSet='/RelValSingleElectronPt35_UP15/%s/GEN-SIM'%(baseDataSetRelease[3],),location='STD')}
steps['SingleElectronPt1000_UP15INPUT']={'INPUT':InputInfo(dataSet='/RelValSingleElectronPt1000_UP15/%s/GEN-SIM'%(baseDataSetRelease[3],),location='STD')}
steps['SingleElectronFlatPt1To100_UP15INPUT']={'INPUT':InputInfo(dataSet='/RelValSingleElectronFlatPt1To100_UP15/%s/GEN-SIM'%(baseDataSetRelease[3],),location='STD')}
steps['SingleGammaPt10_UP15INPUT']={'INPUT':InputInfo(dataSet='/RelValSingleGammaPt10_UP15/%s/GEN-SIM'%(baseDataSetRelease[3],),location='STD')}
steps['SingleGammaPt35_UP15INPUT']={'INPUT':InputInfo(dataSet='/RelValSingleGammaPt35_UP15/%s/GEN-SIM'%(baseDataSetRelease[3],),location='STD')}
steps['SingleMuPt1_UP15INPUT']={'INPUT':InputInfo(dataSet='/RelValSingleMuPt1_UP15/%s/GEN-SIM'%(baseDataSetRelease[3],),location='STD')}
steps['SingleMuPt10_UP15INPUT']={'INPUT':InputInfo(dataSet='/RelValSingleMuPt10_UP15/%s/GEN-SIM'%(baseDataSetRelease[3],),location='STD')}
steps['SingleMuPt100_UP15INPUT']={'INPUT':InputInfo(dataSet='/RelValSingleMuPt100_UP15/%s/GEN-SIM'%(baseDataSetRelease[3],),location='STD')}
steps['SingleMuPt1000_UP15INPUT']={'INPUT':InputInfo(dataSet='/RelValSingleMuPt1000_UP15/%s/GEN-SIM'%(baseDataSetRelease[3],),location='STD')}
steps['NuGun_UP15INPUT']={'INPUT':InputInfo(dataSet='/RelValNuGun_UP15/%s/GEN-SIM'%(baseDataSetRelease[3],),location='STD')}
# INPUT commands for 2017 wf
steps['TTbar_13UP17INPUT']={'INPUT':InputInfo(dataSet='/RelValTTbar_13/%s/GEN-SIM'%(baseDataSetRelease[13],),location='STD')}
steps['ZEE_13UP17INPUT']={'INPUT':InputInfo(dataSet='/RelValZEE_13/%s/GEN-SIM'%(baseDataSetRelease[13],),location='STD')}
steps['ZMM_13UP17INPUT']={'INPUT':InputInfo(dataSet='/RelValZMM_13/%s/GEN-SIM'%(baseDataSetRelease[13],),location='STD')}
steps['ZTT_13UP17INPUT']={'INPUT':InputInfo(dataSet='/RelValZTT_13/%s/GEN-SIM'%(baseDataSetRelease[13],),location='STD')}
steps['H125GGgluonfusion_13UP17INPUT']={'INPUT':InputInfo(dataSet='/RelValH125GGgluonfusion_13/%s/GEN-SIM'%(baseDataSetRelease[13],),location='STD')}
steps['QQH1352T_13UP17INPUT']={'INPUT':InputInfo(dataSet='/RelValQQH1352T_13/%s/GEN-SIM'%(baseDataSetRelease[13],),location='STD')}
steps['NuGun_UP17INPUT']={'INPUT':InputInfo(dataSet='/RelValNuGun/%s/GEN-SIM'%(baseDataSetRelease[13],),location='STD')}
steps['SMS-T1tttt_mGl-1500_mLSP-100_13UP17INPUT']={'INPUT':InputInfo(dataSet='/RelValSMS-T1tttt_mGl-1500_mLSP-100_13/%s/GEN-SIM'%(baseDataSetRelease[13],),location='STD')}
# INPUT commands for 2018 wf
steps['TTbar_13UP18INPUT']={'INPUT':InputInfo(dataSet='/RelValTTbar_13/%s/GEN-SIM'%(baseDataSetRelease[18],),location='STD')}
steps['TTbar_13UP18HEfailINPUT']={'INPUT':InputInfo(dataSet='/RelValTTbar_13/%s/GEN-SIM'%(baseDataSetRelease[18],),location='STD')}
steps['TTbar_13UP18BadHcalMitigINPUT']={'INPUT':InputInfo(dataSet='/RelValTTbar_13/%s/GEN-SIM'%(baseDataSetRelease[18],),label='Mitig',location='STD')}
steps['ZEE_13UP18INPUT']={'INPUT':InputInfo(dataSet='/RelValZEE_13/%s/GEN-SIM'%(baseDataSetRelease[18],),location='STD')}
steps['ZMM_13UP18INPUT']={'INPUT':InputInfo(dataSet='/RelValZMM_13/%s/GEN-SIM'%(baseDataSetRelease[18],),location='STD')}
steps['ZTT_13UP18INPUT']={'INPUT':InputInfo(dataSet='/RelValZTT_13/%s/GEN-SIM'%(baseDataSetRelease[18],),location='STD')}
steps['H125GGgluonfusion_13UP18INPUT']={'INPUT':InputInfo(dataSet='/RelValH125GGgluonfusion_13/%s/GEN-SIM'%(baseDataSetRelease[18],),location='STD')}
steps['QQH1352T_13UP18INPUT']={'INPUT':InputInfo(dataSet='/RelValQQH1352T_13/%s/GEN-SIM'%(baseDataSetRelease[18],),location='STD')}
steps['NuGun_UP18INPUT']={'INPUT':InputInfo(dataSet='/RelValNuGun/%s/GEN-SIM'%(baseDataSetRelease[18],),location='STD')}
steps['SMS-T1tttt_mGl-1500_mLSP-100_13UP18INPUT']={'INPUT':InputInfo(dataSet='/RelValSMS-T1tttt_mGl-1500_mLSP-100_13/%s/GEN-SIM'%(baseDataSetRelease[18],),location='STD')}
#input for fast sim workflows to be added - TODO
#input for 13 TeV High Stats samples
steps['ZMM_13_HSINPUT']={'INPUT':InputInfo(dataSet='/RelValZMM_13_HS/%s/GEN-SIM'%(baseDataSetRelease[10],),location='STD')}
steps['TTbar_13_HSINPUT']={'INPUT':InputInfo(dataSet='/RelValTTbar_13_HS/%s/GEN-SIM'%(baseDataSetRelease[10],),location='STD')}
## high stat step1
ecalHcal={
'-s':'GEN,SIM,DIGI,DIGI2RAW,RAW2DIGI,L1Reco,RECO,EI',
'--datatier':'GEN-SIM-DIGI-RAW-RECO',
#'--geometry':'ECALHCAL',
'--eventcontent':'FEVTDEBUG',
'--customise':'Validation/Configuration/ECALHCAL.customise,SimGeneral/MixingModule/fullMixCustomize_cff.setCrossingFrameOn',
'--beamspot':'NoSmear'}
steps['SingleElectronE120EHCAL']=merge([{'cfg':'SingleElectronE120EHCAL_pythia8_cfi'},ecalHcal,Kby(25,250),step1Defaults])
steps['SinglePiE50HCAL']=merge([{'cfg':'SinglePiE50HCAL_pythia8_cfi'},ecalHcal,Kby(25,250),step1Defaults])
steps['MinBiasHS']=gen('MinBias_8TeV_pythia8_TuneCUETP8M1_cff',Kby(25,300))
steps['InclusiveppMuX']=gen('InclusiveppMuX_8TeV_TuneCUETP8M1_cfi',Mby(11,45000))
steps['SingleElectronFlatPt5To100']=gen('SingleElectronFlatPt5To100_pythia8_cfi',Kby(25,250))
steps['SinglePiPt1']=gen('SinglePiPt1_pythia8_cfi',Kby(25,250))
steps['SingleMuPt1HS']=gen('SingleMuPt1_pythia8_cfi',Kby(25,1000))
steps['ZPrime5000Dijet']=gen('ZPrime5000JJ_8TeV_TuneCUETP8M1_cfi',Kby(25,100))
steps['SinglePi0E10']=gen('SinglePi0E10_pythia8_cfi',Kby(25,100))
steps['SinglePiPt10']=gen('SinglePiPt10_pythia8_cfi',Kby(25,250))
steps['SingleGammaFlatPt10To100']=gen('SingleGammaFlatPt10To100_pythia8_cfi',Kby(25,250))
steps['SingleTauPt50Pythia']=gen('SingleTaupt_50_pythia8_cfi',Kby(25,100))
steps['SinglePiPt100']=gen('SinglePiPt100_pythia8_cfi',Kby(25,250))
def genS(fragment,howMuch):
global step1Defaults,stCond
return merge([{'cfg':fragment},stCond,howMuch,step1Defaults])
steps['Higgs200ChargedTaus']=genS('H200ChargedTaus_Tauola_8TeV_cfi',Kby(9,100))
steps['JpsiMM']=genS('JpsiMM_8TeV_TuneCUETP8M1_cfi',Kby(66,1000))
steps['WE']=genS('WE_8TeV_TuneCUETP8M1_cfi',Kby(9,100))
steps['WM']=genS('WM_8TeV_TuneCUETP8M1_cfi',Kby(9,200))
steps['WpM']=genS('WpM_8TeV_TuneCUETP8M1_cfi',Kby(9,200))
steps['ZMM']=genS('ZMM_8TeV_TuneCUETP8M1_cfi',Kby(18,300))
steps['ZpMM']=genS('ZpMM_8TeV_TuneCUETP8M1_cfi',Kby(9,200))
steps['Higgs200ChargedTaus_13']=gen2015('H200ChargedTaus_Tauola_13TeV_cfi',Kby(9,100))
steps['Upsilon1SToMuMu_13']=gen2015('Upsilon1SToMuMu_forSTEAM_13TeV_TuneCUETP8M1_cfi',Kby(17,190))
steps['BsToMuMu_13']=gen2015('BsToMuMu_13TeV_SoftQCDnonD_TuneCUEP8M1_cfi.py',Kby(21000,150000))
steps['JpsiMuMu_Pt-8']=gen2015('JpsiMuMu_Pt-8_forSTEAM_13TeV_TuneCUETP8M1_cfi',Kby(3100,100000))
steps['BdToMuMu_13']=gen2015('BdToMuMu_13TeV_SoftQCDnonD_TuneCUEP8M1_cfi',Kby(6000,60000))
steps['BuToJpsiK_13']=gen2015('BuToJpsiK_13TeV_SoftQCDnonD_TuneCUEP8M1_cfi',Kby(16000,160000))
steps['BsToJpsiPhi_13']=gen2015('BsToJpsiPhi_13TeV_SoftQCDnonD_TuneCUEP8M1_cfi',Kby(78000,400000))
steps['WE_13']=gen2015('WE_13TeV_TuneCUETP8M1_cfi',Kby(9,100))
steps['WM_13']=gen2015('WM_13TeV_TuneCUETP8M1_cfi',Kby(9,200))
steps['WpM_13']=gen2015('WpM_13TeV_TuneCUETP8M1_cfi',Kby(9,200))
steps['ZMM_13']=gen2015('ZMM_13TeV_TuneCUETP8M1_cfi',Kby(18,100))
steps['ZEEMM_13']=gen2015('ZEEMM_13TeV_TuneCUETP8M1_cfi',Kby(18,300))
steps['ZpMM_13']=gen2015('ZpMM_13TeV_TuneCUETP8M1_cfi',Kby(9,200))
steps['ZTT']=genS('ZTT_All_hadronic_8TeV_TuneCUETP8M1_cfi',Kby(9,150))
steps['H130GGgluonfusion']=genS('H130GGgluonfusion_8TeV_TuneCUETP8M1_cfi',Kby(9,100))
steps['PhotonJets_Pt_10']=genS('PhotonJet_Pt_10_8TeV_TuneCUETP8M1_cfi',Kby(9,150))
steps['QQH1352T']=genS('QQH1352T_8TeV_TuneCUETP8M1_cfi',Kby(9,100))
steps['ZTT_13']=gen2015('ZTT_All_hadronic_13TeV_TuneCUETP8M1_cfi',Kby(9,80))
steps['H125GGgluonfusion_13']=gen2015('H125GGgluonfusion_13TeV_TuneCUETP8M1_cfi',Kby(9,50))
steps['PhotonJets_Pt_10_13']=gen2015('PhotonJet_Pt_10_13TeV_TuneCUETP8M1_cfi',Kby(9,150))
steps['QQH1352T_13']=gen2015('QQH1352T_13TeV_TuneCUETP8M1_cfi',Kby(9,50))
#steps['ZmumuJets_Pt_20_300']=gen('ZmumuJets_Pt_20_300_GEN_8TeV_TuneCUETP8M1_cfg',Kby(25,100))
steps['ADDMonoJet_d3MD3']=genS('ADDMonoJet_8TeV_d3MD3_TuneCUETP8M1_cfi',Kby(9,100))
steps['ADDMonoJet_d3MD3_13']=gen2015('ADDMonoJet_13TeV_d3MD3_TuneCUETP8M1_cfi',Kby(9,100))
steps['RSKKGluon_m3000GeV_13']=gen2015('RSKKGluon_m3000GeV_13TeV_TuneCUETP8M1_cff',Kby(9,100))
steps['MinBias2INPUT']={'INPUT':InputInfo(dataSet='/RelValMinBias/%s/GEN-SIM'%(baseDataSetRelease[0],),location='STD')}
steps['Higgs200ChargedTausINPUT']={'INPUT':InputInfo(dataSet='/RelValHiggs200ChargedTaus/%s/GEN-SIM'%(baseDataSetRelease[0],),location='STD')}
steps['QCD_Pt_3000_3500_2INPUT']={'INPUT':InputInfo(dataSet='/RelValQCD_Pt_3000_3500/%s/GEN-SIM'%(baseDataSetRelease[0],),location='STD')}
steps['QCD_Pt_80_120_2INPUT']={'INPUT':InputInfo(dataSet='/RelValQCD_Pt_80_120/%s/GEN-SIM'%(baseDataSetRelease[0],),location='STD')}
steps['JpsiMMINPUT']={'INPUT':InputInfo(dataSet='/RelValJpsiMM/%s/GEN-SIM'%(baseDataSetRelease[0],),location='STD')}
steps['TTbar2INPUT']={'INPUT':InputInfo(dataSet='/RelValTTbar/%s/GEN-SIM'%(baseDataSetRelease[0],),location='STD')}
steps['WEINPUT']={'INPUT':InputInfo(dataSet='/RelValWE/%s/GEN-SIM'%(baseDataSetRelease[0],),location='STD')}
steps['WMINPUT']={'INPUT':InputInfo(dataSet='/RelValWM/%s/GEN-SIM'%(baseDataSetRelease[0],),location='STD')}
steps['ZEEINPUT']={'INPUT':InputInfo(dataSet='/RelValZEE/%s/GEN-SIM'%(baseDataSetRelease[0],),location='STD')}
steps['ZMMINPUT']={'INPUT':InputInfo(dataSet='/RelValZMM/%s/GEN-SIM'%(baseDataSetRelease[0],),location='STD')}
steps['ZTTINPUT']={'INPUT':InputInfo(dataSet='/RelValZTT/%s/GEN-SIM'%(baseDataSetRelease[0],),location='STD')}
steps['H130GGgluonfusionINPUT']={'INPUT':InputInfo(dataSet='/RelValH130GGgluonfusion/%s/GEN-SIM'%(baseDataSetRelease[0],),location='STD')}
steps['PhotonJets_Pt_10INPUT']={'INPUT':InputInfo(dataSet='/RelValPhotonJets_Pt_10/%s/GEN-SIM'%(baseDataSetRelease[0],),location='STD')}
#steps['QQH1352TINPUT']={'INPUT':InputInfo(dataSet='/RelValQQH1352T/%s/GEN-SIM'%(baseDataSetRelease[0],),location='STD')} #temporary comment out
steps['ADDMonoJet_d3MD3INPUT']={'INPUT':InputInfo(dataSet='/RelValADDMonoJet_d3MD3/%s/GEN-SIM'%(baseDataSetRelease[0],),location='STD')}
steps['WpMINPUT']={'INPUT':InputInfo(dataSet='/RelValWpM/%s/GEN-SIM'%(baseDataSetRelease[0],),location='STD')}
steps['ZpMMINPUT']={'INPUT':InputInfo(dataSet='/RelValZpMM/%s/GEN-SIM'%(baseDataSetRelease[0],),location='STD')}
steps['ZpMM_2250_8TeVINPUT']={'INPUT':InputInfo(dataSet='/RelValZpMM_2250_8TeV/%s/GEN-SIM'%(baseDataSetRelease[0],),location='STD')}
steps['ZpEE_2250_8TeVINPUT']={'INPUT':InputInfo(dataSet='/RelValZpEE_2250_8TeV/%s/GEN-SIM'%(baseDataSetRelease[0],),location='STD')}
steps['ZpTT_1500_8TeVINPUT']={'INPUT':InputInfo(dataSet='/RelValZpTT_1500_8TeV/%s/GEN-SIM'%(baseDataSetRelease[0],),location='STD')}
steps['Cosmics']=merge([{'cfg':'UndergroundCosmicMu_cfi.py','-n':'500','--scenario':'cosmics'},Kby(666,100000),step1Defaults])
steps['CosmicsSPLoose']=merge([{'cfg':'UndergroundCosmicSPLooseMu_cfi.py','-n':'2000','--scenario':'cosmics'},Kby(5000,100000),step1Defaults])
steps['CosmicsSPLoose_UP15']=merge([{'cfg':'UndergroundCosmicSPLooseMu_cfi.py','-n':'2000','--conditions':'auto:run2_mc_cosmics','--scenario':'cosmics'},Kby(5000,500000),step1Up2015Defaults])
steps['Cosmics_UP16']=merge([{'cfg':'UndergroundCosmicMu_cfi.py','-n':'500','--conditions':'auto:run2_mc_cosmics','--scenario':'cosmics','--era':'Run2_2016'},Kby(666,100000),step1Defaults])
steps['Cosmics_UP17']=merge([{'cfg':'UndergroundCosmicMu_cfi.py','-n':'500','--conditions':'auto:phase1_2017_cosmics','--scenario':'cosmics','--era':'Run2_2017'},Kby(666,100000),step1Defaults])
steps['Cosmics_UP18']=merge([{'cfg':'UndergroundCosmicMu_cfi.py','-n':'500','--conditions':'auto:phase1_2018_cosmics','--scenario':'cosmics','--era':'Run2_2018'},Kby(666,100000),step1Defaults])
steps['Cosmics_UP21']=merge([{'cfg':'UndergroundCosmicMu_cfi.py','-n':'500','--conditions':'auto:phase1_2021_cosmics','--scenario':'cosmics','--era':'Run3'},Kby(666,100000),step1Defaults])
steps['Cosmics_UP21_0T']=merge([{'--magField':'0T','--conditions':'auto:phase1_2021_cosmics_0T'},steps['Cosmics_UP21']])
steps['CosmicsSPLoose_UP17']=merge([{'cfg':'UndergroundCosmicSPLooseMu_cfi.py','-n':'2000','--conditions':'auto:phase1_2017_cosmics','--scenario':'cosmics','--era':'Run2_2017'},Kby(5000,500000),step1Up2015Defaults])
steps['CosmicsSPLoose_UP18']=merge([{'cfg':'UndergroundCosmicSPLooseMu_cfi.py','-n':'2000','--conditions':'auto:phase1_2018_cosmics','--scenario':'cosmics','--era':'Run2_2018'},Kby(5000,500000),step1Up2015Defaults])
steps['BeamHalo']=merge([{'cfg':'BeamHalo_cfi.py','--scenario':'cosmics'},Kby(9,100),step1Defaults])
steps['BeamHalo_13']=merge([{'cfg':'BeamHalo_13TeV_cfi.py','--scenario':'cosmics'},Kby(9,100),step1Up2015Defaults])
steps['BeamHalo_UP18']=merge([{'cfg':'BeamHalo_13TeV_cfi.py','-n':'500','--conditions':'auto:phase1_2018_cosmics','--scenario':'cosmics','--era':'Run2_2018','--beamspot':'Realistic25ns13TeVEarly2018Collision'},Kby(666,100000),step1Defaults])
steps['BeamHalo_UP21']=merge([{'cfg':'BeamHalo_13TeV_cfi.py','-n':'500','--conditions':'auto:phase1_2021_cosmics','--scenario':'cosmics','--era':'Run3','--beamspot':'Realistic25ns13TeVEarly2017Collision'},Kby(666,100000),step1Defaults])
# GF re-introduce INPUT command once GEN-SIM will be available
# steps['CosmicsINPUT']={'INPUT':InputInfo(dataSet='/RelValCosmics/%s/GEN-SIM'%(baseDataSetRelease[0],),location='STD')}
steps['BeamHaloINPUT']={'INPUT':InputInfo(dataSet='/RelValBeamHalo/%s/GEN-SIM'%(baseDataSetRelease[0],),location='STD')}
steps['QCD_Pt_50_80']=genS('QCD_Pt_50_80_8TeV_TuneCUETP8M1_cfi',Kby(25,100))
steps['QCD_Pt_15_20']=genS('QCD_Pt_15_20_8TeV_TuneCUETP8M1_cfi',Kby(25,100))
steps['ZTTHS']=merge([Kby(25,100),steps['ZTT']])
steps['QQH120Inv']=genS('QQH120Inv_8TeV_TuneCUETP8M1_cfi',Kby(25,100))
steps['TTbar2HS']=merge([Kby(25,100),steps['TTbar']])
steps['JpsiMM_Pt_20_inf']=genS('JpsiMM_Pt_20_inf_8TeV_TuneCUETP8M1_cfi',Kby(70,280))
steps['QCD_Pt_120_170']=genS('QCD_Pt_120_170_8TeV_TuneCUETP8M1_cfi',Kby(25,100))
steps['H165WW2L']=genS('H165WW2L_8TeV_TuneCUETP8M1_cfi',Kby(25,100))
steps['UpsMM']=genS('UpsMM_8TeV_TuneCUETP8M1_cfi',Kby(56250,225))
steps['RSGrav']=genS('RS750_quarks_and_leptons_8TeV_TuneCUETP8M1_cff',Kby(25,100))
steps['QCD_Pt_80_120_2HS']=merge([Kby(25,100),steps['QCD_Pt_80_120']])
steps['bJpsiX']=genS('bJpsiX_8TeV_TuneCUETP8M1_cfi',Mby(325,1300000))
steps['QCD_Pt_30_50']=genS('QCD_Pt_30_50_8TeV_TuneCUETP8M1_cfi',Kby(25,100))
steps['H200ZZ4L']=genS('H200ZZ4L_8TeV_TuneCUETP8M1_cfi',Kby(25,100))
steps['LM9p']=genS('LM9p_8TeV_cff',Kby(25,100))
steps['QCD_Pt_20_30']=genS('QCD_Pt_20_30_8TeV_TuneCUETP8M1_cfi',Kby(25,100))
steps['QCD_Pt_170_230']=genS('QCD_Pt_170_230_8TeV_TuneCUETP8M1_cfi',Kby(25,100))
## pPb tests
step1PPbDefaults={'--beamspot':'Realistic8TeVCollision'}
steps['AMPT_PPb_5020GeV_MinimumBias']=merge([{'-n':10},step1PPbDefaults,genS('AMPT_PPb_5020GeV_MinimumBias_cfi',Kby(9,100))])
## pPb Run2
step1PPbDefaultsUp15={'--beamspot':'RealisticPPbBoost8TeV2016Collision','--conditions':'auto:run2_mc_pa','--eventcontent':'RAWSIM', '--era':'Run2_2016_pA'}
steps['EPOS_PPb_8160GeV_MinimumBias']=merge([{'-n':10},step1PPbDefaultsUp15,gen2015('ReggeGribovPartonMC_EposLHC_4080_4080GeV_pPb_cfi',Kby(9,100))])
## heavy ions tests
U2000by1={'--relval': '2000,1'}
U80by1={'--relval': '80,1'}
hiAlca2011 = {'--conditions':'auto:run1_mc_hi'}
hiAlca2015 = {'--conditions':'auto:run2_mc_hi', '--era':'Run2_HI'}
hiAlca2017 = {'--conditions':'auto:phase1_2017_realistic', '--era':'Run2_2017_pp_on_XeXe'}
hiAlca2018 = {'--conditions':'auto:phase1_2018_realistic_hi', '--era':'Run2_2018'}
hiAlca2018_ppReco = {'--conditions':'auto:phase1_2018_realistic_hi', '--era':'Run2_2018_pp_on_AA'}
hiAlca2021_ppReco = {'--conditions':'auto:phase1_2021_realistic_hi', '--era':'Run3_pp_on_PbPb'}
hiDefaults2011=merge([hiAlca2011,{'--scenario':'HeavyIons','-n':2}])
hiDefaults2015=merge([hiAlca2015,{'--scenario':'HeavyIons','-n':2}])
hiDefaults2017=merge([hiAlca2017,{'-n':2}])
hiDefaults2018=merge([hiAlca2018,{'--scenario':'HeavyIons','-n':2}])
hiDefaults2018_ppReco=merge([hiAlca2018_ppReco,{'-n':2}])
hiDefaults2021_ppReco=merge([hiAlca2021_ppReco,{'-n':2}])
steps['HydjetQ_B12_5020GeV_2011']=merge([{'-n':1,'--beamspot':'RealisticHI2011Collision'},hiDefaults2011,genS('Hydjet_Quenched_B12_5020GeV_cfi',U2000by1)])
steps['HydjetQ_B12_5020GeV_2015']=merge([{'-n':1,'--beamspot':'RealisticHICollisionFixZ2015'},hiDefaults2015,genS('Hydjet_Quenched_B12_5020GeV_cfi',U2000by1)])
steps['HydjetQ_MinBias_XeXe_5442GeV_2017']=merge([{'-n':1},hiDefaults2017,gen2017('Hydjet_Quenched_MinBias_XeXe_5442GeV_cfi',U2000by1)])
steps['HydjetQ_B12_5020GeV_2018']=merge([{'-n':1},hiDefaults2018,gen2018prod('Hydjet_Quenched_B12_5020GeV_cfi',U2000by1)])
steps['HydjetQ_B12_5020GeV_2018_ppReco']=merge([{'-n':1},hiDefaults2018_ppReco,gen2018hiprod('Hydjet_Quenched_B12_5020GeV_cfi',U2000by1)])
steps['HydjetQ_B12_5020GeV_2018_ppReco_ml']=merge([concurrentLumis,steps['HydjetQ_B12_5020GeV_2018_ppReco']])
steps['HydjetQ_B12_5020GeV_2021_ppReco']=merge([{'-n':1},hiDefaults2021_ppReco,gen2021hiprod('Hydjet_Quenched_B12_5020GeV_cfi',U2000by1)])
steps['QCD_Pt_80_120_13_HI']=merge([hiDefaults2018_ppReco,gen2018hiprod('QCD_Pt_80_120_13TeV_TuneCUETP8M1_cfi',Kby(9,150))])
steps['PhotonJets_Pt_10_13_HI']=merge([hiDefaults2018_ppReco,gen2018hiprod('PhotonJet_Pt_10_13TeV_TuneCUETP8M1_cfi',Kby(9,150))])
steps['ZEEMM_13_HI']=merge([hiDefaults2018_ppReco,gen2018hiprod('ZEEMM_13TeV_TuneCUETP8M1_cfi',Kby(18,300))])
steps['QCD_Pt_80_120_14_HI_2021']=merge([hiDefaults2021_ppReco,gen2021hiprod('QCD_Pt_80_120_14TeV_TuneCP5_cfi',Kby(9,150))])
steps['PhotonJets_Pt_10_14_HI_2021']=merge([hiDefaults2021_ppReco,gen2021hiprod('PhotonJet_Pt_10_14TeV_TuneCP5_cfi',Kby(9,150))])
steps['ZMM_14_HI_2021']=merge([hiDefaults2021_ppReco,gen2021hiprod('ZMM_14TeV_TuneCP5_cfi',Kby(18,300))])
steps['ZEE_14_HI_2021']=merge([hiDefaults2021_ppReco,gen2021hiprod('ZEE_14TeV_TuneCP5_cfi',Kby(18,300))])
## pp reference tests
ppRefAlca2017 = {'--conditions':'auto:phase1_2017_realistic', '--era':'Run2_2017_ppRef'}
ppRefDefaults2017=merge([ppRefAlca2017,{'-n':2}])
steps['QCD_Pt_80_120_13_PPREF']=merge([ppRefDefaults2017,gen2017('QCD_Pt_80_120_13TeV_TuneCUETP8M1_cfi',Kby(9,150))])
#### fastsim section ####
##no forseen to do things in two steps GEN-SIM then FASTIM->end: maybe later
#step1FastDefaults =merge([{'-s':'GEN,SIM,RECOBEFMIX,DIGI:pdigi_valid,L1,DIGI2RAW,L1Reco,RECO,EI,HLT:@fake,VALIDATION:@standardValidation,DQM:@standardDQM',
step1FastDefaults =merge([{'-s':'GEN,SIM,RECOBEFMIX,DIGI:pdigi_valid,L1,DIGI2RAW,L1Reco,RECO,EI,VALIDATION:@standardValidation,DQM:@standardDQMFS',
'--fast':'',
'--beamspot' : 'Realistic8TeVCollision',
'--eventcontent':'FEVTDEBUGHLT,DQM',
'--datatier':'GEN-SIM-DIGI-RECO,DQMIO',
'--relval':'27000,3000'},
step1Defaults])
#step1FastUpg2015Defaults =merge([{'-s':'GEN,SIM,RECOBEFMIX,DIGI:pdigi_valid,L1,DIGI2RAW,L1Reco,RECO,EI,HLT:@relval2016,VALIDATION:@standardValidation,DQM:@standardDQM',
step1FastUpg2015Defaults =merge([{'-s':'GEN,SIM,RECOBEFMIX,DIGI:pdigi_valid,L1,DIGI2RAW,L1Reco,RECO,EI,VALIDATION:@standardValidation,DQM:@standardDQMFS',
'--fast':'',
'--conditions' :'auto:run2_mc',
'--beamspot' : 'Realistic25ns13TeV2016Collision',
'--era' :'Run2_2016',
'--eventcontent':'FEVTDEBUGHLT,DQM',
'--datatier':'GEN-SIM-DIGI-RECO,DQMIO',
'--relval':'27000,3000'},
step1Defaults])
step1FastPUNewMixing =merge([{'-s':'GEN,SIM,RECOBEFMIX',
'--eventcontent':'FASTPU',
'--datatier':'GEN-SIM-RECO'},
step1FastUpg2015Defaults])
step1FastUpg2015_trackingOnlyValidation = merge([{'-s':'GEN,SIM,RECOBEFMIX,DIGI:pdigi_valid,L1,DIGI2RAW,RECO,VALIDATION:@trackingOnlyValidation'},
step1FastUpg2015Defaults])
step1FastUpg2017Defaults =merge([{'-s':'GEN,SIM,RECOBEFMIX,DIGI:pdigi_valid,L1,DIGI2RAW,L1Reco,RECO,EI,VALIDATION:@standardValidation,DQM:@standardDQMFS',
'--fast':'',
'--conditions' :'auto:phase1_2017_realistic',
'--beamspot' : 'Realistic25ns13TeVEarly2017Collision',
'--era' :'Run2_2017_FastSim',
'--eventcontent':'FEVTDEBUGHLT,DQM',
'--datatier':'GEN-SIM-DIGI-RECO,DQMIO',
'--relval':'27000,3000'},
step1Defaults])
step1FastPU17NewMixing =merge([{'-s':'GEN,SIM,RECOBEFMIX',
'--eventcontent':'FASTPU',
'--datatier':'GEN-SIM-RECO'},
step1FastUpg2017Defaults])
step1FastUpg2017_trackingOnlyValidation = merge([{'-s':'GEN,SIM,RECOBEFMIX,DIGI:pdigi_valid,L1,DIGI2RAW,RECO,VALIDATION:@trackingOnlyValidation'},
step1FastUpg2017Defaults])
step1FastUpg2018Defaults =merge([{'-s':'GEN,SIM,RECOBEFMIX,DIGI:pdigi_valid,L1,DIGI2RAW,L1Reco,RECO,EI,VALIDATION:@standardValidation,DQM:@standardDQMFS',
'--fast':'',
'--conditions' :'auto:phase1_2018_realistic',
'--beamspot' :'Realistic25ns13TeVEarly2018Collision',
'--era' :'Run2_2018_FastSim',
'--eventcontent':'FEVTDEBUGHLT,DQM',
'--datatier':'GEN-SIM-DIGI-RECO,DQMIO',
'--relval':'27000,3000'},
step1Defaults])
step1FastPU18NewMixing =merge([{'-s':'GEN,SIM,RECOBEFMIX',
'--eventcontent':'FASTPU',
'--datatier':'GEN-SIM-RECO'},
step1FastUpg2018Defaults])
step1FastUpg2018_trackingOnlyValidation = merge([{'-s':'GEN,SIM,RECOBEFMIX,DIGI:pdigi_valid,L1,DIGI2RAW,RECO,VALIDATION:@trackingOnlyValidation'},
step1FastUpg2018Defaults])
#step1FastDefaults
steps['TTbarFS']=merge([{'cfg':'TTbar_8TeV_TuneCUETP8M1_cfi'},Kby(100,1000),step1FastDefaults])
steps['SingleMuPt1FS']=merge([{'cfg':'SingleMuPt1_pythia8_cfi'},step1FastDefaults])
steps['SingleMuPt10FS']=merge([{'cfg':'SingleMuPt10_pythia8_cfi'},step1FastDefaults])
steps['SingleMuPt100FS']=merge([{'cfg':'SingleMuPt100_pythia8_cfi'},step1FastDefaults])
steps['SinglePiPt1FS']=merge([{'cfg':'SinglePiPt1_pythia8_cfi'},step1FastDefaults])
steps['SinglePiPt10FS']=merge([{'cfg':'SinglePiPt10_pythia8_cfi'},step1FastDefaults])
steps['SinglePiPt100FS']=merge([{'cfg':'SinglePiPt100_pythia8_cfi'},step1FastDefaults])
steps['ZEEFS']=merge([{'cfg':'ZEE_8TeV_TuneCUETP8M1_cfi'},Kby(100,2000),step1FastDefaults])
steps['ZTTFS']=merge([{'cfg':'ZTT_Tauola_OneLepton_OtherHadrons_8TeV_TuneCUETP8M1_cfi'},Kby(100,2000),step1FastDefaults])
steps['QCDFlatPt153000FS']=merge([{'cfg':'QCDForPF_8TeV_TuneCUETP8M1_cfi'},Kby(27,2000),step1FastDefaults])
steps['QCD_Pt_80_120FS']=merge([{'cfg':'QCD_Pt_80_120_8TeV_TuneCUETP8M1_cfi'},Kby(100,500),stCond,step1FastDefaults])
steps['QCD_Pt_3000_3500FS']=merge([{'cfg':'QCD_Pt_3000_3500_8TeV_TuneCUETP8M1_cfi'},Kby(100,500),stCond,step1FastDefaults])
steps['H130GGgluonfusionFS']=merge([{'cfg':'H130GGgluonfusion_8TeV_TuneCUETP8M1_cfi'},step1FastDefaults])
steps['SingleGammaFlatPt10To10FS']=merge([{'cfg':'SingleGammaFlatPt10To100_pythia8_cfi'},Kby(100,500),step1FastDefaults])
#step1FastUpg2015Defaults
steps['TTbarFS_13']=merge([{'cfg':'TTbar_13TeV_TuneCUETP8M1_cfi'},Kby(100,1000),step1FastUpg2015Defaults])
steps['TTbarFS_13_trackingOnlyValidation']=merge([{'cfg':'TTbar_13TeV_TuneCUETP8M1_cfi'},Kby(100,1000),step1FastUpg2015_trackingOnlyValidation])
steps['SMS-T1tttt_mGl-1500_mLSP-100FS_13']=merge([{'cfg':'SMS-T1tttt_mGl-1500_mLSP-100_13TeV-pythia8_cfi'},Kby(100,1000),step1FastUpg2015Defaults])
steps['NuGunFS_UP15']=merge([{'cfg':'SingleNuE10_cfi'},Kby(100,1000),step1FastUpg2015Defaults])
steps['ZEEFS_13']=merge([{'cfg':'ZEE_13TeV_TuneCUETP8M1_cfi'},Kby(100,2000),step1FastUpg2015Defaults])
steps['ZTTFS_13']=merge([{'cfg':'ZTT_All_hadronic_13TeV_TuneCUETP8M1_cfi'},Kby(100,2000),step1FastUpg2015Defaults])
steps['ZMMFS_13']=merge([{'cfg':'ZMM_13TeV_TuneCUETP8M1_cfi'},Kby(100,2000),step1FastUpg2015Defaults])
steps['QCDFlatPt153000FS_13']=merge([{'cfg':'QCDForPF_13TeV_TuneCUETP8M1_cfi'},Kby(27,2000),step1FastUpg2015Defaults])
steps['QCD_Pt_80_120FS_13']=merge([{'cfg':'QCD_Pt_80_120_13TeV_TuneCUETP8M1_cfi'},Kby(100,500),step1FastUpg2015Defaults])
steps['QCD_Pt_3000_3500FS_13']=merge([{'cfg':'QCD_Pt_3000_3500_13TeV_TuneCUETP8M1_cfi'},Kby(100,500),step1FastUpg2015Defaults])
steps['H125GGgluonfusionFS_13']=merge([{'cfg':'H125GGgluonfusion_13TeV_TuneCUETP8M1_cfi'},step1FastUpg2015Defaults])
steps['SingleMuPt10FS_UP15']=merge([{'cfg':'SingleMuPt10_pythia8_cfi'},step1FastUpg2015Defaults])
steps['SingleMuPt100FS_UP15']=merge([{'cfg':'SingleMuPt100_pythia8_cfi'},step1FastUpg2015Defaults])
### FastSim: produce sample of minbias events for PU mixing
steps['MinBiasFS_13_ForMixing']=merge([{'cfg':'MinBias_13TeV_pythia8_TuneCUETP8M1_cfi'},Kby(100,1000),step1FastPUNewMixing])
#step1FastUpg2017Defaults
steps['TTbarFS_13_UP17']=merge([{'cfg':'TTbar_13TeV_TuneCUETP8M1_cfi'},Kby(100,1000),step1FastUpg2017Defaults])
steps['TTbarFS_13_trackingOnlyValidation_UP17']=merge([{'cfg':'TTbar_13TeV_TuneCUETP8M1_cfi'},Kby(100,1000),step1FastUpg2017_trackingOnlyValidation])
steps['SMS-T1tttt_mGl-1500_mLSP-100FS_13_UP17']=merge([{'cfg':'SMS-T1tttt_mGl-1500_mLSP-100_13TeV-pythia8_cfi'},Kby(100,1000),step1FastUpg2017Defaults])
steps['ZEEFS_13_UP17']=merge([{'cfg':'ZEE_13TeV_TuneCUETP8M1_cfi'},Kby(100,2000),step1FastUpg2017Defaults])
steps['ZTTFS_13_UP17']=merge([{'cfg':'ZTT_All_hadronic_13TeV_TuneCUETP8M1_cfi'},Kby(100,2000),step1FastUpg2017Defaults])
steps['ZMMFS_13_UP17']=merge([{'cfg':'ZMM_13TeV_TuneCUETP8M1_cfi'},Kby(100,2000),step1FastUpg2017Defaults])
steps['QCDFlatPt153000FS_13_UP17']=merge([{'cfg':'QCDForPF_13TeV_TuneCUETP8M1_cfi'},Kby(27,2000),step1FastUpg2017Defaults])
steps['QCD_Pt_80_120FS_13_UP17']=merge([{'cfg':'QCD_Pt_80_120_13TeV_TuneCUETP8M1_cfi'},Kby(100,500),step1FastUpg2017Defaults])
steps['H125GGgluonfusionFS_13_UP17']=merge([{'cfg':'H125GGgluonfusion_13TeV_TuneCUETP8M1_cfi'},step1FastUpg2017Defaults])
steps['SingleMuPt10FS_UP17']=merge([{'cfg':'SingleMuPt10_pythia8_cfi'},step1FastUpg2017Defaults])
steps['SingleMuPt100FS_UP17']=merge([{'cfg':'SingleMuPt100_pythia8_cfi'},step1FastUpg2017Defaults])
### FastSim: produce sample of minbias events for PU mixing
steps['MinBiasFS_13_UP17_ForMixing']=merge([{'cfg':'MinBias_13TeV_pythia8_TuneCUETP8M1_cfi'},Kby(100,1000),step1FastPU17NewMixing])
#step1FastUpg2018Defaults
steps['TTbarFS_13_UP18']=merge([{'cfg':'TTbar_13TeV_TuneCUETP8M1_cfi'},Kby(100,1000),step1FastUpg2018Defaults])
steps['TTbarFS_13_trackingOnlyValidation_UP18']=merge([{'cfg':'TTbar_13TeV_TuneCUETP8M1_cfi'},Kby(100,1000),step1FastUpg2018_trackingOnlyValidation])
steps['SMS-T1tttt_mGl-1500_mLSP-100FS_13_UP18']=merge([{'cfg':'SMS-T1tttt_mGl-1500_mLSP-100_13TeV-pythia8_cfi'},Kby(100,1000),step1FastUpg2018Defaults])
steps['ZEEFS_13_UP18']=merge([{'cfg':'ZEE_13TeV_TuneCUETP8M1_cfi'},Kby(100,2000),step1FastUpg2018Defaults])
steps['ZTTFS_13_UP18']=merge([{'cfg':'ZTT_All_hadronic_13TeV_TuneCUETP8M1_cfi'},Kby(100,2000),step1FastUpg2018Defaults])
steps['ZMMFS_13_UP18']=merge([{'cfg':'ZMM_13TeV_TuneCUETP8M1_cfi'},Kby(100,2000),step1FastUpg2018Defaults])
steps['QCDFlatPt153000FS_13_UP18']=merge([{'cfg':'QCDForPF_13TeV_TuneCUETP8M1_cfi'},Kby(27,2000),step1FastUpg2018Defaults])
steps['QCD_Pt_80_120FS_13_UP18']=merge([{'cfg':'QCD_Pt_80_120_13TeV_TuneCUETP8M1_cfi'},Kby(100,500),step1FastUpg2018Defaults])
steps['H125GGgluonfusionFS_13_UP18']=merge([{'cfg':'H125GGgluonfusion_13TeV_TuneCUETP8M1_cfi'},step1FastUpg2018Defaults])
steps['SingleMuPt10FS_UP18']=merge([{'cfg':'SingleMuPt10_pythia8_cfi'},step1FastUpg2018Defaults])
steps['SingleMuPt100FS_UP18']=merge([{'cfg':'SingleMuPt100_pythia8_cfi'},step1FastUpg2018Defaults])
### FastSim: produce sample of minbias events for PU mixing, 2018
steps['MinBiasFS_13_UP18_ForMixing']=merge([{'cfg':'MinBias_13TeV_pythia8_TuneCUETP8M1_cfi'},Kby(100,1000),step1FastPU18NewMixing])
### FastSim: template to produce signal and overlay with minbias events
PUFS25={'--pileup':'AVE_35_BX_25ns',
'--pileup_input':'das:/RelValMinBiasFS_13_ForMixing/%s/GEN-SIM-RECO'%(baseDataSetRelease[7],)}
FS_UP15_PU25_OVERLAY = merge([PUFS25,Kby(100,500),steps['TTbarFS_13']] )
### FastSim: template to produce signal and overlay with minbias events #PU50
PUFSAVE50={'--pileup':'AVE_50_BX_25ns',
'--pileup_input':'das:/RelValMinBiasFS_13_UP17_ForMixing/%s/GEN-SIM-RECO'%(baseDataSetRelease[15],)}
FS_UP17_PU50_OVERLAY = merge([PUFSAVE50,Kby(100,500),steps['TTbarFS_13_UP17']] )
### FastSim: template to produce signal and overlay with minbias events #PU50, 2018
PUFSAVE50UP18={'--pileup':'AVE_50_BX_25ns',
'--pileup_input':'das:/RelValMinBiasFS_13_UP18_ForMixing/%s/GEN-SIM-RECO'%(baseDataSetRelease[19],)}
FS_UP18_PU50_OVERLAY = merge([PUFSAVE50UP18,Kby(100,500),steps['TTbarFS_13_UP18']] )
### FastSim: produce sample of premixed minbias events
steps["FS_PREMIXUP15_PU25"] = merge([
{"cfg":"SingleNuE10_cfi",
"--fast":"",
"--conditions":"auto:run2_mc",
"-s":"GEN,SIM,RECOBEFMIX,DIGI",
"--eventcontent":"PREMIX",
"--datatier":"PREMIX",
"--procModifiers":"premix_stage1",
"--era":"Run2_2016",
},
PUFS25,Kby(100,500)])
### Fastsim: template to produce signal and overlay it with premixed minbias events
FS_PREMIXUP15_PU25_OVERLAY = merge([
# {"-s" : "GEN,SIM,RECOBEFMIX,DIGI:pdigi_valid,DATAMIX,L1,DIGI2RAW,L1Reco,RECO,HLT:@relval2016,VALIDATION",
{"-s" : "GEN,SIM,RECOBEFMIX,DIGI:pdigi_valid,DATAMIX,L1,DIGI2RAW,L1Reco,RECO,VALIDATION",
"--datamix" : "PreMix",
"--procModifiers": "premix_stage2",
"--pileup_input" : "dbs:/RelValFS_PREMIXUP15_PU25/%s/PREMIX"%(baseDataSetRelease[8],),
},
Kby(100,500),step1FastUpg2015Defaults])
# For combined premixing stage1+stage2 workflow
FS_PREMIXUP15_PU25_LOCAL_OVERLAY = merge([
{"--pileup_input": "file:step1.root"
},
FS_PREMIXUP15_PU25_OVERLAY
])
### FastSim: list of processes used in FastSim validation
fs_proclist = ["ZEE_13",'TTbar_13','H125GGgluonfusion_13','ZTT_13','ZMM_13','NuGun_UP15','QCD_FlatPt_15_3000HS_13','SMS-T1tttt_mGl-1500_mLSP-100_13']
### FastSim: produces sample of signal events, overlayed with premixed minbias events
for x in fs_proclist:
key = "FS_" + x + "_PRMXUP15_PU25"
steps[key] = merge([FS_PREMIXUP15_PU25_OVERLAY,{"cfg":steps[x]["cfg"]}])
key = key.replace("PRMXUP15", "PRMXLOCALUP15")
steps[key] = merge([FS_PREMIXUP15_PU25_LOCAL_OVERLAY,{"cfg":steps[x]["cfg"]}])
### FastSim: produce sample of signal events, overlayed with minbias events
for x in fs_proclist:
key = "FS_" + x + "_UP15_PU25"
steps[key] = merge([{"cfg":steps[x]["cfg"]},FS_UP15_PU25_OVERLAY])
### FastSim: produce sample of premixed minbias events UP17
steps["FS_PREMIXUP17_PU50"] = merge([
{"cfg":"SingleNuE10_cfi",
"--fast":"",
"--conditions":"auto:phase1_2017_realistic",
"-s":"GEN,SIM,RECOBEFMIX,DIGI",
"--eventcontent":"PREMIX",
"--datatier":"PREMIX",
"--procModifiers":"premix_stage1",
"--era":"Run2_2017_FastSim",
},
PUFSAVE50,Kby(100,500)])
### Fastsim: template to produce signal and overlay it with premixed minbias events
FS_PREMIXUP17_PU50_OVERLAY = merge([
# {"-s" : "GEN,SIM,RECOBEFMIX,DIGI:pdigi_valid,DATAMIX,L1,DIGI2RAW,L1Reco,RECO,HLT:@relval2016,VALIDATION",
{"-s" : "GEN,SIM,RECOBEFMIX,DIGI:pdigi_valid,DATAMIX,L1,DIGI2RAW,L1Reco,RECO,VALIDATION",
"--datamix" : "PreMix",
"--procModifiers": "premix_stage2",
"--pileup_input" : "dbs:/RelValFS_PREMIXUP17_PU50/%s/PREMIX"%(baseDataSetRelease[16],),
},
Kby(100,500),step1FastUpg2017Defaults])
# For combined premixing stage1+stage2 workflow
FS_PREMIXUP17_PU50_LOCAL_OVERLAY = merge([
{"--pileup_input": "file:step1.root"
},
FS_PREMIXUP17_PU50_OVERLAY
])
### FastSim: list of processes used in FastSim validation
fs_proclist = ["ZEE_13",'TTbar_13','H125GGgluonfusion_13','ZTT_13','ZMM_13','NuGun_UP17','QCD_FlatPt_15_3000HS_13','SMS-T1tttt_mGl-1500_mLSP-100_13']
### FastSim: produces sample of signal events, overlayed with premixed minbias events
for x in fs_proclist:
key = "FS_" + x + "_PRMXUP17_PU50"
steps[key] = merge([FS_PREMIXUP17_PU50_OVERLAY,{"cfg":steps[x]["cfg"]}])
key = key.replace("PRMXUP17", "PRMXLOCALUP17")
steps[key] = merge([FS_PREMIXUP17_PU50_LOCAL_OVERLAY,{"cfg":steps[x]["cfg"]}])
### FastSim: produce sample of signal events, overlayed with minbias events
for x in fs_proclist:
key = "FS_" + x + "_UP17_PU50"
steps[key] = merge([{"cfg":steps[x]["cfg"]},FS_UP17_PU50_OVERLAY])
###end UP17
###
### FastSim: produce sample of premixed minbias events UP18
steps["FS_PREMIXUP18_PU50"] = merge([
{"cfg":"SingleNuE10_cfi",
"--fast":"",
"--conditions":"auto:phase1_2018_realistic",
"-s":"GEN,SIM,RECOBEFMIX,DIGI",
"--eventcontent":"PREMIX",
"--datatier":"PREMIX",
"--procModifiers":"premix_stage1",
"--era":"Run2_2018_FastSim",
},
PUFSAVE50UP18,Kby(100,500)])
### Fastsim: template to produce signal and overlay it with premixed minbias events
FS_PREMIXUP18_PU50_OVERLAY = merge([
# {"-s" : "GEN,SIM,RECOBEFMIX,DIGI:pdigi_valid,DATAMIX,L1,DIGI2RAW,L1Reco,RECO,HLT:@relval2016,VALIDATION",
{"-s" : "GEN,SIM,RECOBEFMIX,DIGI:pdigi_valid,DATAMIX,L1,DIGI2RAW,L1Reco,RECO,VALIDATION",
"--datamix" : "PreMix",
"--procModifiers": "premix_stage2",
"--pileup_input" : "dbs:/RelValFS_PREMIXUP18_PU50/%s/PREMIX"%(baseDataSetRelease[20],),
},
Kby(100,500),step1FastUpg2018Defaults])
# For combined premixing stage1+stage2 workflow
FS_PREMIXUP18_PU50_LOCAL_OVERLAY = merge([
{"--pileup_input": "file:step1.root"
},
FS_PREMIXUP18_PU50_OVERLAY
])
### FastSim: list of processes used in FastSim validation
fs_proclist = ["ZEE_13",'TTbar_13','H125GGgluonfusion_13','ZTT_13','ZMM_13','NuGun_UP18','QCD_FlatPt_15_3000HS_13','SMS-T1tttt_mGl-1500_mLSP-100_13']
### FastSim: produces sample of signal events, overlayed with premixed minbias events
for x in fs_proclist:
key = "FS_" + x + "_PRMXUP18_PU50"
steps[key] = merge([FS_PREMIXUP18_PU50_OVERLAY,{"cfg":steps[x]["cfg"]}])
key = key.replace("PRMXUP18", "PRMXLOCALUP18")
steps[key] = merge([FS_PREMIXUP18_PU50_LOCAL_OVERLAY,{"cfg":steps[x]["cfg"]}])
### FastSim: produce sample of signal events, overlayed with minbias events
for x in fs_proclist:
key = "FS_" + x + "_UP18_PU50"
steps[key] = merge([{"cfg":steps[x]["cfg"]},FS_UP18_PU50_OVERLAY])
###end UP18
###
steps['TTbarSFS']=merge([{'cfg':'TTbar_8TeV_TuneCUETP8M1_cfi'},
{'-s':'GEN,SIM',
'--eventcontent':'FEVTDEBUG',
'--datatier':'GEN-SIM',
'--fast':''},
step1Defaults])
steps['TTbarSFSA']=merge([{'cfg':'TTbar_8TeV_TuneCUETP8M1_cfi',
# '-s':'GEN,SIM,RECO,EI,HLT:@fake,VALIDATION',
'-s':'GEN,SIM,RECO,EI,VALIDATION',
'--fast':''},
step1FastDefaults])
def identityFS(wf):
return merge([{'--restoreRND':'HLT','--process':'HLT2','--hltProcess':'HLT2', '--inputCommands':'"keep *","drop *TagInfo*_*_*_*"'},wf])
steps['SingleMuPt10FS_UP15_ID']=identityFS(steps['SingleMuPt10FS_UP15'])
steps['TTbarFS_13_ID']=identityFS(steps['TTbarFS_13'])
## GENERATORS
step1GenDefaults=merge([{'-s':'GEN,VALIDATION:genvalid',
'--relval':'250000,5000',
'--eventcontent':'RAWSIM,DQM',
'--datatier':'GEN,DQMIO',
'--conditions':'auto:run2_mc'
},
step1Defaults])
step1HadronizerDefaults=merge([{'--datatier':'GEN-SIM,DQMIO',
'--relval':'200000,5000'
},step1GenDefaults])
step1LHEDefaults=merge([{'-s':'LHE',
'--relval':'200000,5000',
'--eventcontent':'LHE',
'--datatier':'GEN',
'--conditions':'auto:run2_mc'
},
step1Defaults])
steps['DYToll01234Jets_5f_LO_MLM_Madgraph_LHE_13TeV']=genvalid('Configuration/Generator/python/DYToll01234Jets_5f_LO_MLM_Madgraph_LHE_13TeV_cff.py',step1LHEDefaults)
steps['DYToll012Jets_5f_LO_MLM_Madgraph_LHE_13TeV']=genvalid('Configuration/Generator/python/DYToll012Jets_5f_LO_MLM_Madgraph_LHE_13TeV_cff.py',step1LHEDefaults)
steps['TTbar012Jets_5f_NLO_FXFX_Madgraph_LHE_13TeV']=genvalid('Configuration/Generator/python/TTbar012Jets_5f_NLO_FXFX_Madgraph_LHE_13TeV_cff.py',step1LHEDefaults)
steps['TTbar_Pow_LHE_13TeV']=genvalid('Configuration/Generator/python/TTbar_Pow_LHE_13TeV_cff.py',step1LHEDefaults)
steps['DYToll012Jets_5f_NLO_FXFX_Madgraph_LHE_13TeV']=genvalid('Configuration/Generator/python/DYToll012Jets_5f_NLO_FXFX_Madgraph_LHE_13TeV_cff.py',step1LHEDefaults)
steps['WTolNu01234Jets_5f_LO_MLM_Madgraph_LHE_13TeV']=genvalid('Configuration/Generator/python/WTolNu01234Jets_5f_LO_MLM_Madgraph_LHE_13TeV_cff.py',step1LHEDefaults)
steps['WTolNu012Jets_5f_LO_MLM_Madgraph_LHE_13TeV']=genvalid('Configuration/Generator/python/WTolNu012Jets_5f_LO_MLM_Madgraph_LHE_13TeV_cff.py',step1LHEDefaults)
steps['GGToH_Pow_LHE_13TeV']=genvalid('Configuration/Generator/python/GGToH_Pow_LHE_13TeV_cff.py',step1LHEDefaults)
steps['VHToH_Pow_LHE_13TeV']=genvalid('Configuration/Generator/python/VHToH_Pow_LHE_13TeV_cff.py',step1LHEDefaults)
steps['VBFToH_Pow_JHU4l_LHE_13TeV']=genvalid('Configuration/Generator/python/VBFToH_Pow_JHU4l_LHE_13TeV_cff.py',step1LHEDefaults)
steps['BulkG_M1200_narrow_2L2Q_LHE_13TeV']=genvalid('Configuration/Generator/python/BulkG_M1200_narrow_2L2Q_LHE_13TeV_cff.py',step1LHEDefaults)
# all 6 workflows with root step 'DYToll01234Jets_5f_LO_MLM_Madgraph_LHE_13TeV' will recycle the same dataset, from wf [512] of generator set
# steps['DYToll01234Jets_5f_LO_MLM_Madgraph_LHE_13TeVINPUT']={'INPUT':InputInfo(dataSet='/DYToll01234Jets_5f_LO_MLM_Madgraph_LHE_13TeV_py8/CMSSW_7_4_0_pre0-MCRUN2_73_V5-v1/GEN',location='STD')}
steps['MinBias_TuneZ2star_13TeV_pythia6']=genvalid('MinBias_TuneZ2star_13TeV_pythia6_cff',step1GenDefaults)
steps['QCD_Pt-30_TuneZ2star_13TeV_pythia6']=genvalid('QCD_Pt_30_TuneZ2star_13TeV_pythia6_cff',step1GenDefaults)
steps['MinBias_13TeV_pythia8']=genvalid('MinBias_13TeV_pythia8_cff',step1GenDefaults)
steps['QCD_Pt-30_13TeV_pythia8']=genvalid('QCD_Pt_30_13TeV_pythia8_cff',step1GenDefaults)
steps['DYToLL_M-50_13TeV_pythia8']=genvalid('DYToLL_M-50_13TeV_pythia8_cff',step1GenDefaults)
steps['WToLNu_13TeV_pythia8']=genvalid('WToLNu_13TeV_pythia8_cff',step1GenDefaults)
steps['SoftQCDDiffractive_13TeV_pythia8']=genvalid('SoftQCDDiffractive_13TeV_pythia8_cff',step1GenDefaults)
steps['SoftQCDnonDiffractive_13TeV_pythia8']=genvalid('SoftQCDnonDiffractive_13TeV_pythia8_cff',step1GenDefaults)
steps['SoftQCDelastic_13TeV_pythia8']=genvalid('SoftQCDelastic_13TeV_pythia8_cff',step1GenDefaults)
steps['SoftQCDinelastic_13TeV_pythia8']=genvalid('SoftQCDinelastic_13TeV_pythia8_cff',step1GenDefaults)
steps['QCD_Pt-30_13TeV_aMCatNLO_herwig7']=genvalid('Herwig7_Matchbox_aMCatNLO_Herwig_ppTojj_cff',step1GenDefaults)
steps['ZprimeToll_M3000_13TeV_pythia8']=genvalid('ZprimeToll_M3000_13TeV_pythia8_cff',step1GenDefaults)
steps['WprimeTolNu_M3000_13TeV_pythia8']=genvalid('WprimeTolNu_M3000_13TeV_pythia8_cff',step1GenDefaults)
# Generator Hadronization (Hadronization of LHE)
steps['WJetsLNu_13TeV_madgraph-pythia8']=genvalid('Hadronizer_MgmMatchTuneCUETP8M1_13TeV_madgraph_pythia8_cff',step1GenDefaults,dataSet='/WJetsToLNu_13TeV-madgraph/Fall13wmLHE-START62_V1-v1/GEN')
steps['Hadronizer_TuneCUETP8M1_13TeV_MLM_5f_max4j_LHE_pythia8']=genvalid('Hadronizer_TuneCUETP8M1_13TeV_MLM_5f_max4j_LHE_pythia8_cff',step1HadronizerDefaults)
steps['Hadronizer_TuneCP5_13TeV_MLM_5f_max4j_LHE_pythia8']=genvalid('Hadronizer_TuneCP5_13TeV_MLM_5f_max4j_LHE_pythia8_cff',step1HadronizerDefaults)
steps['GGToH_13TeV_pythia8']=genvalid('GGToHtautau_13TeV_pythia8_cff',step1GenDefaults)
steps['WJetsLNutaupinu_13TeV_madgraph-pythia8']=genvalid('Hadronizer_MgmMatchTuneCUETP8M1_13TeV_madgraph_pythia8_taupinu_cff',step1GenDefaults,dataSet='/WJetsToLNu_13TeV-madgraph/Fall13wmLHE-START62_V1-v1/GEN')
steps['Hadronizer_TuneCUETP8M1_13TeV_MLM_5f_max4j_LHE_pythia8_taupinu']=genvalid('Hadronizer_TuneCUETP8M1_13TeV_MLM_5f_max4j_LHE_pythia8_taupinu_cff',step1HadronizerDefaults)
steps['Hadronizer_TuneCP5_13TeV_MLM_5f_max4j_LHE_pythia8_taupinu']=genvalid('Hadronizer_TuneCP5_13TeV_MLM_5f_max4j_LHE_pythia8_taupinu_cff',step1HadronizerDefaults)
steps['GGToHtaupinu_13TeV_pythia8']=genvalid('GGToHtautau_13TeV_pythia8_taupinu_cff',step1GenDefaults)
steps['WJetsLNutaurhonu_13TeV_madgraph-pythia8']=genvalid('Hadronizer_MgmMatchTuneCUETP8M1_13TeV_madgraph_pythia8_taurhonu_cff.py',step1GenDefaults,dataSet='/WJetsToLNu_13TeV-madgraph/Fall13wmLHE-START62_V1-v1/GEN')
steps['Hadronizer_TuneCUETP8M1_13TeV_MLM_5f_max4j_LHE_pythia8_taurhonu']=genvalid('Hadronizer_TuneCUETP8M1_13TeV_MLM_5f_max4j_LHE_pythia8_taurhonu_cff.py',step1HadronizerDefaults)
steps['Hadronizer_TuneCP5_13TeV_MLM_5f_max4j_LHE_pythia8_taurhonu']=genvalid('Hadronizer_TuneCP5_13TeV_MLM_5f_max4j_LHE_pythia8_taurhonu_cff.py',step1HadronizerDefaults)
steps['GGToHtaurhonu_13TeV_pythia8']=genvalid('GGToHtautau_13TeV_pythia8_taurhonu_cff',step1GenDefaults)
steps['Hadronizer_TuneCUETP8M1_13TeV_aMCatNLO_FXFX_5f_max2j_max1p_LHE_pythia8']=genvalid('Hadronizer_TuneCUETP8M1_13TeV_aMCatNLO_FXFX_5f_max2j_max1p_LHE_pythia8_cff',step1HadronizerDefaults)
steps['Hadronizer_TuneCP5_13TeV_aMCatNLO_FXFX_5f_max2j_max1p_LHE_pythia8']=genvalid('Hadronizer_TuneCP5_13TeV_aMCatNLO_FXFX_5f_max2j_max1p_LHE_pythia8_cff',step1HadronizerDefaults)
steps['Hadronizer_TuneCUETP8M1_13TeV_aMCatNLO_FXFX_5f_max2j_max1p_LHE_pythia8_evtgen']=genvalid('Hadronizer_TuneCUETP8M1_13TeV_aMCatNLO_FXFX_5f_max2j_max1p_LHE_pythia8_evtgen_cff',step1HadronizerDefaults)
steps['Hadronizer_TuneCP5_13TeV_aMCatNLO_FXFX_5f_max2j_max1p_LHE_pythia8_evtgen']=genvalid('Hadronizer_TuneCP5_13TeV_aMCatNLO_FXFX_5f_max2j_max1p_LHE_pythia8_evtgen_cff',step1HadronizerDefaults)
steps['Hadronizer_TuneCUETP8M1_13TeV_aMCatNLO_FXFX_5f_max2j_max0p_LHE_pythia8']=genvalid('Hadronizer_TuneCUETP8M1_13TeV_aMCatNLO_FXFX_5f_max2j_max0p_LHE_pythia8_cff',step1HadronizerDefaults)
steps['Hadronizer_TuneCP5_13TeV_aMCatNLO_FXFX_5f_max2j_max0p_LHE_pythia8']=genvalid('Hadronizer_TuneCP5_13TeV_aMCatNLO_FXFX_5f_max2j_max0p_LHE_pythia8_cff',step1HadronizerDefaults)
steps['Hadronizer_TuneCUETP8M1_13TeV_Hgg_powhegEmissionVeto_pythia8']=genvalid('Hadronizer_TuneCUETP8M1_13TeV_Hgg_powhegEmissionVeto_pythia8_cff',step1HadronizerDefaults)
steps['Hadronizer_TuneCP5_13TeV_Hgg_powhegEmissionVeto_pythia8']=genvalid('Hadronizer_TuneCP5_13TeV_Hgg_powhegEmissionVeto_pythia8_cff',step1HadronizerDefaults)
steps['Hadronizer_TuneCUETP8M1_13TeV_Httpinu_powhegEmissionVeto_pythia8']=genvalid('Hadronizer_TuneCUETP8M1_13TeV_Httpinu_powhegEmissionVeto_pythia8_cff',step1HadronizerDefaults)
steps['Hadronizer_TuneCP5_13TeV_Httpinu_powhegEmissionVeto_pythia8']=genvalid('Hadronizer_TuneCP5_13TeV_Httpinu_powhegEmissionVeto_pythia8_cff',step1HadronizerDefaults)
steps['Hadronizer_TuneCUETP8M1_13TeV_Httrhonu_powhegEmissionVeto_pythia8']=genvalid('Hadronizer_TuneCUETP8M1_13TeV_Httrhonu_powhegEmissionVeto_pythia8_cff',step1HadronizerDefaults)
steps['Hadronizer_TuneCP5_13TeV_Httrhonu_powhegEmissionVeto_pythia8']=genvalid('Hadronizer_TuneCP5_13TeV_Httrhonu_powhegEmissionVeto_pythia8_cff',step1HadronizerDefaults)
steps['Hadronizer_TuneCUETP8M1_13TeV_Htt_powhegEmissionVeto_pythia8']=genvalid('Hadronizer_TuneCUETP8M1_13TeV_Htt_powhegEmissionVeto_pythia8_cff',step1HadronizerDefaults)
steps['Hadronizer_TuneCP5_13TeV_Htt_powhegEmissionVeto_pythia8']=genvalid('Hadronizer_TuneCP5_13TeV_Htt_powhegEmissionVeto_pythia8_cff',step1HadronizerDefaults)
steps['Hadronizer_TuneCUETP8M1_13TeV_Htt_powhegEmissionVeto_pythia8_tauola']=genvalid('Hadronizer_TuneCUETP8M1_13TeV_Htt_powhegEmissionVeto_pythia8_tauola_cff',step1HadronizerDefaults)
steps['Hadronizer_TuneCP5_13TeV_Htt_powhegEmissionVeto_pythia8_tauola']=genvalid('Hadronizer_TuneCP5_13TeV_Htt_powhegEmissionVeto_pythia8_tauola_cff',step1HadronizerDefaults)
steps['Hadronizer_TuneCUETP8M1_13TeV_Httpinu_powhegEmissionVeto_pythia8_tauola']=genvalid('Hadronizer_TuneCUETP8M1_13TeV_Httpinu_powhegEmissionVeto_pythia8_tauola_cff',step1HadronizerDefaults)
steps['Hadronizer_TuneCP5_13TeV_Httpinu_powhegEmissionVeto_pythia8_tauola']=genvalid('Hadronizer_TuneCP5_13TeV_Httpinu_powhegEmissionVeto_pythia8_tauola_cff',step1HadronizerDefaults)
steps['Hadronizer_TuneCUETP8M1_13TeV_Httrhonu_powhegEmissionVeto_pythia8_tauola']=genvalid('Hadronizer_TuneCUETP8M1_13TeV_Httrhonu_powhegEmissionVeto_pythia8_tauola_cff',step1HadronizerDefaults)
steps['Hadronizer_TuneCP5_13TeV_Httrhonu_powhegEmissionVeto_pythia8_tauola']=genvalid('Hadronizer_TuneCP5_13TeV_Httrhonu_powhegEmissionVeto_pythia8_tauola_cff',step1HadronizerDefaults)
steps['Hadronizer_TuneCUETP8M1_13TeV_powhegEmissionVeto_pythia8']=genvalid('Hadronizer_TuneCUETP8M1_13TeV_powhegEmissionVeto_pythia8_cff',step1HadronizerDefaults)
steps['Hadronizer_TuneCP5_13TeV_powhegEmissionVeto_pythia8']=genvalid('Hadronizer_TuneCP5_13TeV_powhegEmissionVeto_pythia8_cff',step1HadronizerDefaults)
steps['Hadronizer_TuneCUETP8M1_13TeV_powhegEmissionVeto2p_pythia8']=genvalid('Hadronizer_TuneCUETP8M1_13TeV_powhegEmissionVeto2p_pythia8_cff',step1HadronizerDefaults)
steps['Hadronizer_TuneCP5_13TeV_powhegEmissionVeto2p_pythia8']=genvalid('Hadronizer_TuneCP5_13TeV_powhegEmissionVeto2p_pythia8_cff',step1HadronizerDefaults)
steps['Hadronizer_TuneCUETP8M1_Mad_pythia8']=genvalid('Hadronizer_TuneCUETP8M1_13TeV_generic_LHE_pythia8_cff',step1HadronizerDefaults)
# Generator External Decays
steps['TT_13TeV_pythia8-evtgen']=genvalid('Hadronizer_MgmMatchTuneCUETP8M1_13TeV_madgraph_pythia8_EvtGen_cff',step1GenDefaults,dataSet='/TTJets_MSDecaysCKM_central_13TeV-madgraph/Fall13wmLHE-START62_V1-v1/GEN')
steps['Hadronizer_TuneCUETP8M1_13TeV_MLM_5f_max4j_LHE_pythia8_Tauola']=genvalid('Hadronizer_TuneCUETP8M1_13TeV_MLM_5f_max4j_LHE_pythia8_Tauola_cff',step1HadronizerDefaults)
steps['Hadronizer_TuneCP5_13TeV_MLM_5f_max4j_LHE_pythia8_Tauola']=genvalid('Hadronizer_TuneCP5_13TeV_MLM_5f_max4j_LHE_pythia8_Tauola_cff',step1HadronizerDefaults)
steps['WToLNu_13TeV_pythia8-tauola']=genvalid('Hadronizer_MgmMatchTuneCUETP8M1_13TeV_madgraph_pythia8_Tauola_cff',step1GenDefaults,dataSet='/WJetsToLNu_13TeV-madgraph/Fall13wmLHE-START62_V1-v1/GEN')
steps['GGToH_13TeV_pythia8-tauola']=genvalid('GGToHtautau_13TeV_pythia8_Tauola_cff',step1GenDefaults)
steps['WToLNutaupinu_13TeV_pythia8-tauola']=genvalid('Hadronizer_MgmMatchTuneCUETP8M1_13TeV_madgraph_pythia8_Tauola_taupinu_cff',step1GenDefaults,dataSet='/WJetsToLNu_13TeV-madgraph/Fall13wmLHE-START62_V1-v1/GEN')
steps['Hadronizer_TuneCUETP8M1_13TeV_MLM_5f_max4j_LHE_pythia8_Tauola_taupinu']=genvalid('Hadronizer_TuneCUETP8M1_13TeV_MLM_5f_max4j_LHE_pythia8_Tauola_taupinu_cff',step1HadronizerDefaults)
steps['Hadronizer_TuneCP5_13TeV_MLM_5f_max4j_LHE_pythia8_Tauola_taupinu']=genvalid('Hadronizer_TuneCP5_13TeV_MLM_5f_max4j_LHE_pythia8_Tauola_taupinu_cff',step1HadronizerDefaults)
steps['GGToHtaupinu_13TeV_pythia8-tauola']=genvalid('GGToHtautau_13TeV_pythia8_Tauola_taupinu_cff',step1GenDefaults)
steps['WToLNutaurhonu_13TeV_pythia8-tauola']=genvalid('Hadronizer_MgmMatchTuneCUETP8M1_13TeV_madgraph_pythia8_Tauola_taurhonu_cff',step1GenDefaults,dataSet='/WJetsToLNu_13TeV-madgraph/Fall13wmLHE-START62_V1-v1/GEN')
steps['Hadronizer_TuneCUETP8M1_13TeV_MLM_5f_max4j_LHE_pythia8_Tauola_taurhonu']=genvalid('Hadronizer_TuneCUETP8M1_13TeV_MLM_5f_max4j_LHE_pythia8_Tauola_taurhonu_cff',step1HadronizerDefaults)
steps['Hadronizer_TuneCP5_13TeV_MLM_5f_max4j_LHE_pythia8_Tauola_taurhonu']=genvalid('Hadronizer_TuneCP5_13TeV_MLM_5f_max4j_LHE_pythia8_Tauola_taurhonu_cff',step1HadronizerDefaults)
steps['GGToHtaurhonu_13TeV_pythia8-tauola']=genvalid('GGToHtautau_13TeV_pythia8_Tauola_taurhonu_cff',step1GenDefaults)
# normal fullSim workflows using gridpack LHE generator
# LHE-GEN-SIM step
step1LHEGenSimDefault = { '--relval':'9000,50',
'-s':'LHE,GEN,SIM',
'-n' : 10,
'--conditions' : 'auto:run2_mc',
'--beamspot' : 'Realistic25ns13TeV2016Collision',
'--datatier' : 'GEN-SIM,LHE',
'--eventcontent': 'FEVTDEBUG,LHE',
'--era' : 'Run2_2016',
}
# LHE-GEN with DQM
step1LHEGenDQM = merge([{'-s':'LHE,GEN,VALIDATION:genvalid','--datatier' : 'LHE,GEN,DQMIO','--eventcontent': 'LHE,RAWSIM,DQM'},step1LHEDefaults])
def lhegensim(fragment,howMuch):
global step1LHEGenSimDefault
return merge([{'cfg':fragment},howMuch,step1LHEGenSimDefault])
# LHE-GEN-SIM step for 2017
step1LHEGenSimUp2017Default = merge ([{'--conditions':'auto:phase1_2017_realistic','--era':'Run2_2017','--beamspot':'Realistic25ns13TeVEarly2017Collision','--geometry':'DB:Extended'},step1LHEGenSimDefault])
def lhegensim2017(fragment,howMuch):
global step1LHEGenSimUp2017Default
return merge([{'cfg':fragment},howMuch,step1LHEGenSimUp2017Default])
# LHE-GEN-SIM step for 2018
step1LHEGenSimUp2018Default = merge ([{'--conditions':'auto:phase1_2018_realistic','--era':'Run2_2018','--beamspot':'Realistic25ns13TeVEarly2018Collision','--geometry':'DB:Extended'},step1LHEGenSimDefault])
def lhegensim2018(fragment,howMuch):
global step1LHEGenSimUp2018Default
return merge([{'cfg':fragment},howMuch,step1LHEGenSimUp2018Default])
# Run-Dependent MC
def gen2018RD(fragment,howMuch):
global step1Up2018Defaults
return merge([{'cfg':fragment},howMuch,{'--customise_commands': "\"process.source.numberEventsInLuminosityBlock=cms.untracked.uint32(5) \\n process.GlobalTag.toGet = cms.VPSet( cms.PSet( record = cms.string('EcalLaserAPDPNRatiosRcd'), tag = cms.string('EcalLaserAPDPNRatios_Run_Dep_MC'), connect = cms.string('frontier://FrontierProd/CMS_CONDITIONS') ) ) \""},step1Up2018Defaults])
steps['ZEE_13UP18_RD']=gen2018RD('ZEE_13TeV_TuneCUETP8M1_cfi',Kby(10,50))
steps['ZMM_13UP18_RD']=gen2018RD('ZMM_13TeV_TuneCUETP8M1_cfi',Kby(10,50))
steps['TTbar_13UP18_RD']=gen2018RD('TTbar_13TeV_TuneCUETP8M1_cfi',Kby(10,50))
steps['TTbar012Jets_NLO_Mad_py8_Evt_13']=lhegensim('Configuration/Generator/python/TTbar012Jets_5f_NLO_FXFX_Madgraph_LHE_13TeV_cfi.py',Kby(9,50))
steps['GluGluHToZZTo4L_M125_Pow_py8_Evt_13']=lhegensim('Configuration/Generator/python/GGHZZ4L_JHUGen_Pow_NNPDF30_LHE_13TeV_cfi.py', Kby(9,50))
steps['VBFHToZZTo4Nu_M125_Pow_py8_Evt_13']=lhegensim('Configuration/Generator/python/VBFHZZ4Nu_Pow_NNPDF30_LHE_13TeV_cfi.py',Kby(9,50))
steps['VBFHToBB_M125_Pow_py8_Evt_13']=lhegensim('Configuration/Generator/python/VBFHbb_Pow_NNPDF30_LHE_13TeV_cfi.py',Kby(9,50))
steps['GluGluHToZZTo4L_M125_Pow_py8_Evt_13UP17']=lhegensim2017('Configuration/Generator/python/GGHZZ4L_JHUGen_Pow_NNPDF30_LHE_13TeV_cfi.py', Kby(9,100))
steps['VBFHToZZTo4Nu_M125_Pow_py8_Evt_13UP17']=lhegensim2017('Configuration/Generator/python/VBFHZZ4Nu_Pow_NNPDF30_LHE_13TeV_cfi.py',Kby(9,100))
steps['VBFHToBB_M125_Pow_py8_Evt_13UP17']=lhegensim2017('Configuration/Generator/python/VBFHbb_Pow_NNPDF30_LHE_13TeV_cfi.py',Kby(9,100))
steps['GluGluHToZZTo4L_M125_Pow_py8_Evt_13UP18']=lhegensim2018('Configuration/Generator/python/GGHZZ4L_JHUGen_Pow_NNPDF30_LHE_13TeV_cfi.py', Kby(9,100))
steps['VBFHToZZTo4Nu_M125_Pow_py8_Evt_13UP18']=lhegensim2018('Configuration/Generator/python/VBFHZZ4Nu_Pow_NNPDF30_LHE_13TeV_cfi.py',Kby(9,100))
steps['VBFHToBB_M125_Pow_py8_Evt_13UP18']=lhegensim2018('Configuration/Generator/python/VBFHbb_Pow_NNPDF30_LHE_13TeV_cfi.py',Kby(9,100))
#GEN-SIM inputs for LHE-GEN-SIM workflows
#steps['TTbar012Jets_NLO_Mad_py8_Evt_13INPUT']={'INPUT':InputInfo(dataSet='/RelValTTbar012Jets_NLO_Mad_py8_Evt_13/%s/GEN-SIM'%(baseDataSetRelease[3],),location='STD')}
#steps['GluGluHToZZTo4L_M125_Pow_py8_Evt_13INPUT']={'INPUT':InputInfo(dataSet='/RelValGluGluHToZZTo4L_M125_Pow_py8_Evt_13/%s/GEN-SIM'%(baseDataSetRelease[3],),location='STD')}
#steps['VBFHToZZTo4Nu_M125_Pow_py8_Evt_13INPUT']={'INPUT':InputInfo(dataSet='/RelValVBFHToZZTo4Nu_M125_Pow_py8_Evt_13/%s/GEN-SIM'%(baseDataSetRelease[3],),location='STD')}
#steps['VBFHToBB_M125_Pow_py8_Evt_13INPUT']={'INPUT':InputInfo(dataSet='/RelValVBFHToBB_M125_Pow_py8_Evt_13/%s/GEN-SIM'%(baseDataSetRelease[3],),location='STD')}
#steps['VBFHToZZTo4Nu_M125_Pow_py8_Evt_13UP17INPUT']={'INPUT':InputInfo(dataSet='/RelValVBFHToZZTo4Nu_M125_Pow_py8_Evt_13UP17/%s/GEN-SIM'%(baseDataSetRelease[21],),location='STD')}
#steps['VBFHToBB_M125_Pow_py8_Evt_13UP17INPUT']={'INPUT':InputInfo(dataSet='/RelValVBFHToBB_M125_Pow_py8_Evt_13UP17/%s/GEN-SIM'%(baseDataSetRelease[21],),location='STD')}
#steps['VBFHToZZTo4Nu_M125_Pow_py8_Evt_13UP18INPUT']={'INPUT':InputInfo(dataSet='/RelValVBFHToZZTo4Nu_M125_Pow_py8_Evt_13UP18/%s/GEN-SIM'%(baseDataSetRelease[22],),location='STD')}
#steps['VBFHToBB_M125_Pow_py8_Evt_13UP18INPUT']={'INPUT':InputInfo(dataSet='/RelValVBFHToBB_M125_Pow_py8_Evt_13UP18/%s/GEN-SIM'%(baseDataSetRelease[22],),location='STD')}
# normal fullSim workflows using pLHE-GEN-SIM 2016 by default
# pLHE step
step1LHENormal = {'--relval' : '10000,50',
'--mc' : '',
'-s' : 'NONE',
'--conditions' : 'auto:run2_mc',
'--datatier' : 'LHE',
'--eventcontent': 'LHE',
'--era' : 'Run2_2016',
'-n' : 10,
}
# followed by GEN-SIM step
step1GENNormal = {'--relval' : '10000,50',
'-s' : 'GEN,SIM',
'--conditions' : 'auto:run2_mc',
'--beamspot' : 'Realistic25ns13TeV2016Collision',
'--eventcontent': 'FEVTDEBUG',
'--datatier' : 'GEN-SIM',
'--era' : 'Run2_2016',
}
# pLHE-GEN-SIM step for 2017 conditions
step1LHENormal2017Default = merge ([{'--conditions':'auto:phase1_2017_realistic','--era':'Run2_2017','--beamspot':'Realistic25ns13TeVEarly2017Collision','--geometry':'DB:Extended'},step1LHENormal])
step1GENNormal2017Default = merge ([{'--conditions':'auto:phase1_2017_realistic','--era':'Run2_2017','--beamspot':'Realistic25ns13TeVEarly2017Collision','--geometry':'DB:Extended'},step1GENNormal])
# pLHE-GEN-SIM step for 2018 conditions
step1LHENormal2018Default = merge ([{'--conditions':'auto:phase1_2018_realistic','--era':'Run2_2018','--beamspot':'Realistic25ns13TeVEarly2018Collision','--geometry':'DB:Extended'},step1LHENormal])
step1GENNormal2018Default = merge ([{'--conditions':'auto:phase1_2018_realistic','--era':'Run2_2018','--beamspot':'Realistic25ns13TeVEarly2018Collision','--geometry':'DB:Extended'},step1GENNormal])
# pLHE and GEN-SIM steps 2016 workflows
steps['GluGluHToGG_M125_Pow_MINLO_NNLOPS_py8_13']=merge ([{'--filein':'lhe:18334'},step1LHENormal])
steps['Hadronizer_TuneCUETP8M1_13TeV_powhegEmissionVeto_2p_HToGG_M125_13']=genvalid('Hadronizer_TuneCUETP8M1_13TeV_powhegEmissionVeto_2p_HToGG_M125_LHE_py8_cff',step1GENNormal)
# pLHE and GEN-SIM steps 2017 workflows
steps['GluGluHToGG_M125_Pow_MINLO_NNLOPS_py8_13UP17']=merge ([{'--filein':'lhe:18334'},step1LHENormal2017Default])
steps['Hadronizer_TuneCUETP8M1_13TeV_powhegEmissionVeto_2p_HToGG_M125_13UP17']=genvalid('Hadronizer_TuneCUETP8M1_13TeV_powhegEmissionVeto_2p_HToGG_M125_LHE_py8_cff',step1GENNormal2017Default)
# pLHE and GEN-SIM steps 2018 workflows
steps['GluGluHToGG_M125_Pow_MINLO_NNLOPS_py8_13UP18']=merge ([{'--filein':'lhe:18334'},step1LHENormal2018Default])
steps['Hadronizer_TuneCUETP8M1_13TeV_powhegEmissionVeto_2p_HToGG_M125_13UP18']=genvalid('Hadronizer_TuneCUETP8M1_13TeV_powhegEmissionVeto_2p_HToGG_M125_LHE_py8_cff',step1GENNormal2018Default)
#GEN-SIM inputs for pLHE-GEN-SIM workflows 2016,2017,2018
#steps['GluGluHToGG_M125_Pow_MINLO_NNLOPS_py8_13INPUT']={'INPUT':InputInfo(dataSet='/RelValGluGluHToGG_M125_Pow_MINLO_NNLOPS_py8_13/%s/GEN-SIM'%(baseDataSetRelease[3],),location='STD')}
#steps['GluGluHToGG_M125_Pow_MINLO_NNLOPS_py8_13UP17INPUT']={'INPUT':InputInfo(dataSet='/RelValGluGluHToGG_M125_Pow_MINLO_NNLOPS_py8_13UP17/%s/GEN-SIM'%(baseDataSetRelease[21],),location='STD')}
#steps['GluGluHToGG_M125_Pow_MINLO_NNLOPS_py8_13UP18INPUT']={'INPUT':InputInfo(dataSet='/RelValGluGluHToGG_M125_Pow_MINLO_NNLOPS_py8_13UP18/%s/GEN-SIM'%(baseDataSetRelease[22],),location='STD')}
#Sherpa
steps['sherpa_ZtoEE_0j_BlackHat_13TeV_MASTER']=genvalid('sherpa_ZtoEE_0j_BlackHat_13TeV_MASTER_cff',step1GenDefaults)
steps['sherpa_ZtoLL_2j_MEPSatNLO_13TeV_MASTER']=genvalid('sherpa_ZtoLL_2j_MEPSatNLO_13TeV_MASTER_cff',step1GenDefaults)
steps['sherpa_ttbar_2j_MENLOPS_13TeV_MASTER']=genvalid('sherpa_ttbar_2j_MENLOPS_13TeV_MASTER_cff',step1GenDefaults)
#Herwig7
steps['TTbar_13TeV_Pow_herwig7']=genvalid('Configuration/Generator/python/TT_13TeV_Pow_Herwig7_cff',step1LHEGenDQM)
# Heavy Ion
steps['ReggeGribovPartonMC_EposLHC_5TeV_pPb']=genvalid('GeneratorInterface/ReggeGribovPartonMCInterface/ReggeGribovPartonMC_EposLHC_5TeV_pPb_cfi',step1GenDefaults)
# B-physics
steps['BuToKstarJPsiToMuMu_forSTEAM_13TeV']=genvalid('BuToKstarJPsiToMuMu_forSTEAM_13TeV_cfi',step1GenDefaults)
steps['Upsilon4swithBuToKstarJPsiToMuMu_forSTEAM_13TeV_TuneCUETP8M1']=genvalid('Upsilon4swithBuToKstarJPsiToMuMu_forSTEAM_13TeV_TuneCUETP8M1_cfi',step1GenDefaults)
steps['Upsilon4sBaBarExample_BpBm_Dstarpipi_D0Kpi_nonres_forSTEAM_13TeV_TuneCUETP8M1']=genvalid('Upsilon4sBaBarExample_BpBm_Dstarpipi_D0Kpi_nonres_forSTEAM_13TeV_TuneCUETP8M1_cfi',step1GenDefaults)
steps['LambdaBToLambdaMuMuToPPiMuMu_forSTEAM_13TeV_TuneCUETP8M1']=genvalid('LambdaBToLambdaMuMuToPPiMuMu_forSTEAM_13TeV_TuneCUETP8M1_cfi',step1GenDefaults)
steps['BsToMuMu_forSTEAM_13TeV']=genvalid('BsToMuMu_forSTEAM_13TeV_cfi',step1GenDefaults)
# Workflows for multiple concurrent lumi blocks
def lhegensim2018ml(fragment,howMuch):
return merge([{'cfg':fragment},howMuch,{'--customise_commands': '"process.source.numberEventsInLuminosityBlock=cms.untracked.uint32(5)"'},concurrentLumis,step1LHEGenSimUp2018Default])
steps['GluGluHToZZTo4L_M125_Pow_py8_Evt_13UP18ml']=lhegensim2018ml('Configuration/Generator/python/GGHZZ4L_JHUGen_Pow_NNPDF30_LHE_13TeV_cfi.py',Kby(9,50))
# sometimes v1 won't be used - override it here - the dictionary key is gen fragment + '_' + geometry
overrideFragments={'H125GGgluonfusion_13UP18INPUT':'2'}
import re
for key in overrideFragments:
for inI in steps[key]:
DSold=steps[key][inI].dataSet
DS = re.sub('v[0-9]*/','v'+overrideFragments[key]+'/',DSold.rstrip())
del steps[key]
steps[key]={'INPUT':InputInfo(dataSet=DS,location='STD')}
#PU for FullSim
PU={'-n':10,'--pileup':'default','--pileup_input':'das:/RelValMinBias/%s/GEN-SIM'%(baseDataSetRelease[0],)}
# pu2 can be removed
PU2={'-n':10,'--pileup':'default','--pileup_input':'das:/RelValMinBias/%s/GEN-SIM'%(baseDataSetRelease[0],)}
PU25={'-n':10,'--pileup':'AVE_35_BX_25ns','--pileup_input':'das:/RelValMinBias_13/%s/GEN-SIM'%(baseDataSetRelease[3],)}
PU50={'-n':10,'--pileup':'AVE_35_BX_50ns','--pileup_input':'das:/RelValMinBias_13/%s/GEN-SIM'%(baseDataSetRelease[3],)}
PUHI={'-n':10,'--pileup_input':'das:/RelValHydjetQ_B12_5020GeV_2018/%s/GEN-SIM'%(baseDataSetRelease[9])}
PUHI2021={'-n':10,'--pileup_input':'das:/RelValHydjetQ_B12_5020GeV_2021_ppReco/%s/GEN-SIM'%(baseDataSetRelease[23])}
PU25UP17={'-n':10,'--pileup':'AVE_35_BX_25ns','--pileup_input':'das:/RelValMinBias_13/%s/GEN-SIM'%(baseDataSetRelease[13],)}
PU25UP18={'-n':10,'--pileup':'AVE_50_BX_25ns','--pileup_input':'das:/RelValMinBias_13/%s/GEN-SIM'%(baseDataSetRelease[18],)}
#PU for FastSim
# FS_PU_INPUT_13TEV = "file:/afs/cern.ch/work/l/lveldere/minbias.root" # placeholder for relval to be produced with wf 135.8
PUFS={'--pileup':'GEN_2012_Summer_50ns_PoissonOOTPU'}
# PUFS2={'--pileup':'2012_Startup_50ns_PoissonOOTPU'} # not used anywhere
PUFSAVE10={'--pileup':'GEN_AVE_10_BX_25ns'} # temporary: one or a few releases as back-up
PUFSAVE20={'--pileup':'GEN_AVE_20_BX_25ns'} # temporary: one or a few releases as back-up
PUFSAVE35={'--pileup':'GEN_AVE_35_BX_25ns'}
PUFSAVE10_DRMIX_ITO={'--pileup':'AVE_10_BX_25ns','--pileup_input':'das:/RelValMinBiasFS_13_ForMixing/%s/GEN-SIM-RECO'%(baseDataSetRelease[7],),'--era':'Run2_25ns','--customise':'FastSimulation/Configuration/Customs.disableOOTPU'}
PUFSAVE35_DRMIX_ITO={'--pileup':'AVE_35_BX_25ns','--pileup_input':'das:/RelValMinBiasFS_13_ForMixing/%s/GEN-SIM-RECO'%(baseDataSetRelease[7],),'--era':'Run2_25ns','--customise':'FastSimulation/Configuration/Customs.disableOOTPU'}
PUFS25={'--pileup':'AVE_35_BX_25ns','--pileup_input':'das:/RelValMinBiasFS_13_ForMixing/%s/GEN-SIM-RECO'%(baseDataSetRelease[7],)}
#pu25 for high stats workflows
PU25HS={'-n':10,'--pileup':'AVE_35_BX_25ns','--pileup_input':'das:/RelValMinBias_13/%s/GEN-SIM'%(baseDataSetRelease[11],)}
#
steps['TTbarFSPU']=merge([PUFS,Kby(100,500),steps['TTbarFS']] )
steps['FS_TTbar_13_PUAVE10']=merge([PUFSAVE10,Kby(100,500),steps['TTbarFS_13']] ) # temporary: one or a few releases as back-up
steps['FS_TTbar_13_PUAVE20']=merge([PUFSAVE20,Kby(100,500),steps['TTbarFS_13']] ) # temporary: one or a few releases as back-up
steps['FS_TTbar_13_PUAVE35']=merge([PUFSAVE35,Kby(100,500),steps['TTbarFS_13']] )
steps['FS_TTbar_13_PU25']=merge([PUFS25,Kby(100,500),steps['TTbarFS_13']] ) # needs the placeholder
steps['FS_NuGun_UP15_PU25']=merge([PUFS25,Kby(100,500),steps['NuGunFS_UP15']] ) # needs the placeholder
steps['FS_SMS-T1tttt_mGl-1500_mLSP-100_13_PU25']=merge([PUFS25,Kby(100,500),steps['SMS-T1tttt_mGl-1500_mLSP-100FS_13']] )
steps['FS__PU25']=merge([PUFS25,Kby(100,500),steps['NuGunFS_UP15']] ) # needs the placeholder
steps['FS_TTbar_13_PUAVE10_DRMIX_ITO']=merge([PUFSAVE10_DRMIX_ITO,Kby(100,500),steps['TTbarFS_13']] ) # needs the placeholder
steps['FS_TTbar_13_PUAVE35_DRMIX_ITO']=merge([PUFSAVE35_DRMIX_ITO,Kby(100,500),steps['TTbarFS_13']] ) # needs the placeholder
# step2
step2Defaults = { '-s' : 'DIGI:pdigi_valid,L1,DIGI2RAW,HLT:@fake',
'--datatier' : 'GEN-SIM-DIGI-RAW-HLTDEBUG',
'--eventcontent': 'FEVTDEBUGHLT',
'--conditions' : 'auto:run1_mc',
}
#for 2015
step2Upg2015Defaults = {'-s' :'DIGI:pdigi_valid,L1,DIGI2RAW,HLT:@relval2016',
'--conditions' :'auto:run2_mc',
'--datatier' :'GEN-SIM-DIGI-RAW-HLTDEBUG',
'--eventcontent':'FEVTDEBUGHLT',
'--era' :'Run2_2016',
'-n' :'10',
}
step2Upg2015Defaults50ns = merge([{'-s':'DIGI:pdigi_valid,L1,DIGI2RAW,HLT:@relval50ns','--conditions':'auto:run2_mc_50ns','--era':'Run2_50ns'},step2Upg2015Defaults])
step2Upg2015DefaultsAPVSimu = merge([{'--era': 'Run2_2016_HIPM'},step2Upg2015Defaults])
#for 2017
step2Upg2017Defaults = {'-s' :'DIGI:pdigi_valid,L1,DIGI2RAW,HLT:@relval2017',
'--conditions' :'auto:phase1_2017_realistic',
'--datatier' :'GEN-SIM-DIGI-RAW-HLTDEBUG',
'--eventcontent':'FEVTDEBUGHLT',
'--era' :'Run2_2017',
'-n' :'10',
'--geometry' :'DB:Extended',
}
#for 2018
step2Upg2018Defaults = {'-s' :'DIGI:pdigi_valid,L1,DIGI2RAW,HLT:@relval2018',
'--conditions' :'auto:phase1_2018_realistic',
'--datatier' :'GEN-SIM-DIGI-RAW-HLTDEBUG',
'--eventcontent':'FEVTDEBUGHLT',
'--era' :'Run2_2018',
'-n' :'10',
'--geometry' :'DB:Extended',
}
steps['DIGIUP15']=merge([step2Upg2015Defaults])
steps['DIGIUP15APVSimu']=merge([{'--era': 'Run2_2016_HIPM'},step2Upg2015Defaults])
steps['DIGIUP15PROD1']=merge([{'-s':'DIGI,L1,DIGI2RAW,HLT:@relval2016','--eventcontent':'RAWSIM','--datatier':'GEN-SIM-RAW'},step2Upg2015Defaults])
steps['DIGIUP15_PU25']=merge([PU25,step2Upg2015Defaults])
steps['DIGIUP15APVSimu_PU25']=merge([PU25,step2Upg2015DefaultsAPVSimu])
steps['DIGIUP15_PU50']=merge([PU50,step2Upg2015Defaults50ns])
steps['DIGIUP17']=merge([step2Upg2017Defaults])
steps['DIGIUP18']=merge([step2Upg2018Defaults])
steps['DIGIUP18ml']=merge([concurrentLumis,step2Upg2018Defaults])
steps['DIGIUP17PROD1']=merge([{'-s':'DIGI,L1,DIGI2RAW,HLT:@relval2017','--eventcontent':'RAWSIM','--datatier':'GEN-SIM-RAW'},step2Upg2017Defaults])
steps['DIGIUP18PROD1']=merge([{'-s':'DIGI,L1,DIGI2RAW,HLT:@relval2018','--eventcontent':'RAWSIM','--datatier':'GEN-SIM-RAW'},step2Upg2018Defaults])
steps['DIGIUP18PROD1ml']=merge([concurrentLumis,steps['DIGIUP18PROD1']])
steps['DIGIUP17_PU25']=merge([PU25UP17,step2Upg2017Defaults])
steps['DIGIUP18_PU25']=merge([PU25UP18,step2Upg2018Defaults])
# for Run2 PPb MC workflows
steps['DIGIUP15_PPb']=merge([{'-s':'DIGI:pdigi_valid,L1,DIGI2RAW,HLT:PIon','--conditions':'auto:run2_mc_pa', '--era':'Run2_2016_pA'}, steps['DIGIUP15']])
# PU25 for high stats workflows
steps['DIGIUP15_PU25HS']=merge([PU25HS,step2Upg2015Defaults])
steps['DIGIPROD1']=merge([{'-s':'DIGI,L1,DIGI2RAW,HLT:@fake','--eventcontent':'RAWSIM','--datatier':'GEN-SIM-RAW'},step2Defaults])
steps['DIGI']=merge([step2Defaults])
#steps['DIGI2']=merge([stCond,step2Defaults])
steps['DIGICOS']=merge([{'--scenario':'cosmics','--eventcontent':'FEVTDEBUG','--datatier':'GEN-SIM-DIGI-RAW'},stCond,step2Defaults])
steps['DIGIHAL']=merge([{'--scenario':'cosmics','--eventcontent':'FEVTDEBUG','--datatier':'GEN-SIM-DIGI-RAW'},step2Upg2015Defaults])
steps['DIGICOS_UP15']=merge([{'--conditions':'auto:run2_mc_cosmics','--scenario':'cosmics','--eventcontent':'FEVTDEBUG','--datatier':'GEN-SIM-DIGI-RAW'},step2Upg2015Defaults])
steps['DIGICOS_UP16']=merge([{'--conditions':'auto:run2_mc_cosmics','-s':'DIGI:pdigi_valid,L1,DIGI2RAW,HLT:@relval2016','--scenario':'cosmics','--eventcontent':'FEVTDEBUG','--datatier':'GEN-SIM-DIGI-RAW', '--era' : 'Run2_2016'},step2Upg2015Defaults])
steps['DIGICOS_UP17']=merge([{'--conditions':'auto:phase1_2017_cosmics','-s':'DIGI:pdigi_valid,L1,DIGI2RAW,HLT:@relval2017','--scenario':'cosmics','--eventcontent':'FEVTDEBUG','--datatier':'GEN-SIM-DIGI-RAW', '--era' : 'Run2_2017'},step2Upg2015Defaults])
steps['DIGICOS_UP18']=merge([{'--conditions':'auto:phase1_2018_cosmics','-s':'DIGI:pdigi_valid,L1,DIGI2RAW,HLT:@relval2018','--scenario':'cosmics','--eventcontent':'FEVTDEBUG','--datatier':'GEN-SIM-DIGI-RAW', '--era' : 'Run2_2018'},step2Upg2015Defaults])
steps['DIGICOS_UP21']=merge([{'--conditions':'auto:phase1_2021_cosmics','-s':'DIGI:pdigi_valid,L1,DIGI2RAW,HLT:@relval2017','--scenario':'cosmics','--eventcontent':'FEVTDEBUG','--datatier':'GEN-SIM-DIGI-RAW', '--era' : 'Run3'},step2Upg2015Defaults])
steps['DIGICOS_UP21_0T']=merge([{'--magField':'0T','--conditions':'auto:phase1_2021_cosmics_0T'},steps['DIGICOS_UP21']])
steps['DIGICOSPEAK_UP17']=merge([{'--conditions':'auto:phase1_2017_cosmics_peak','-s':'DIGI:pdigi_valid,L1,DIGI2RAW,HLT:@relval2017','--customise_commands': '"process.mix.digitizers.strip.APVpeakmode=cms.bool(True)"','--scenario':'cosmics','--eventcontent':'FEVTDEBUG','--datatier':'GEN-SIM-DIGI-RAW', '--era' : 'Run2_2017'},step2Upg2015Defaults])
steps['DIGICOSPEAK_UP18']=merge([{'--conditions':'auto:phase1_2018_cosmics_peak','-s':'DIGI:pdigi_valid,L1,DIGI2RAW,HLT:@relval2018','--customise_commands': '"process.mix.digitizers.strip.APVpeakmode=cms.bool(True)"','--scenario':'cosmics','--eventcontent':'FEVTDEBUG','--datatier':'GEN-SIM-DIGI-RAW', '--era' : 'Run2_2018'},step2Upg2015Defaults])
steps['DIGIPU1']=merge([PU,step2Defaults])
steps['DIGIPU2']=merge([PU2,step2Defaults])
steps['REDIGIPU']=merge([{'-s':'reGEN,reDIGI,L1,DIGI2RAW,HLT:@fake'},steps['DIGIPU1']])
steps['DIGIUP15_ID']=merge([{'--restoreRND':'HLT','--process':'HLT2'},steps['DIGIUP15']])
steps['RESIM']=merge([{'-s':'reGEN,reSIM','-n':10},steps['DIGI']])
#steps['RESIMDIGI']=merge([{'-s':'reGEN,reSIM,DIGI,L1,DIGI2RAW,HLT:@fake,RAW2DIGI,L1Reco','-n':10,'--restoreRNDSeeds':'','--process':'HLT'},steps['DIGI']])
steps['DIGIHI2021PPRECO']=merge([{'-s':'DIGI:pdigi_hi,L1,DIGI2RAW,HLT:@fake2'}, hiDefaults2021_ppReco, {'--pileup':'HiMixNoPU'}, step2Upg2015Defaults])
steps['DIGIHI2018PPRECO']=merge([{'-s':'DIGI:pdigi_hi,L1,DIGI2RAW,HLT:HIon'}, hiDefaults2018_ppReco, {'--pileup':'HiMixNoPU'}, step2Upg2015Defaults])
steps['DIGIHI2018PPRECOml']=merge([concurrentLumis,steps['DIGIHI2018PPRECO']])
steps['DIGIHI2018']=merge([{'-s':'DIGI:pdigi_hi,L1,DIGI2RAW,HLT:@fake2'}, hiDefaults2018, {'--pileup':'HiMixNoPU'}, step2Upg2015Defaults])
steps['DIGIHI2017']=merge([{'-s':'DIGI:pdigi_hi,L1,DIGI2RAW,HLT:@fake2'}, hiDefaults2017, step2Upg2015Defaults])
steps['DIGIHI2015']=merge([{'-s':'DIGI:pdigi_hi,L1,DIGI2RAW,HLT:@fake'}, hiDefaults2015, {'--pileup':'HiMixNoPU'}, step2Upg2015Defaults])
steps['DIGIHI2011']=merge([{'-s':'DIGI:pdigi_hi,L1,DIGI2RAW,HLT:@fake'}, hiDefaults2011, {'--pileup':'HiMixNoPU'}, step2Defaults])
steps['DIGIHI2021MIX']=merge([{'-s':'DIGI:pdigi_hi,L1,DIGI2RAW,HLT:@fake2', '-n':2}, hiDefaults2021_ppReco, {'--pileup':'HiMix'}, PUHI2021, step2Upg2015Defaults])
steps['DIGIHIMIX']=merge([{'-s':'DIGI:pdigi_hi,L1,DIGI2RAW,HLT:@fake2', '-n':2}, hiDefaults2018_ppReco, {'--pileup':'HiMix'}, PUHI, step2Upg2015Defaults])
steps['DIGIPPREF2017']=merge([{'-s':'DIGI:pdigi_valid,L1,DIGI2RAW,HLT:@fake2'}, ppRefDefaults2017, step2Upg2015Defaults])
# PRE-MIXING : https://twiki.cern.ch/twiki/bin/view/CMSPublic/SWGuideSimulation#Pre_Mixing_Instructions
premixUp2015Defaults = {
'--evt_type' : 'SingleNuE10_cfi',
'-s' : 'GEN,SIM,DIGI:pdigi_valid',
'-n' : '10',
'--conditions' : 'auto:run2_mc', # 25ns GT; dedicated dict for 50ns
'--datatier' : 'PREMIX',
'--eventcontent': 'PREMIX',
'--procModifiers':'premix_stage1',
'--era' : 'Run2_2016' # temporary replacement for premix; to be brought back to customisePostLS1 *EDIT - This comment possibly no longer relevant with switch to eras
}
premixUp2015Defaults50ns = merge([{'--conditions':'auto:run2_mc_50ns'},
{'--era':'Run2_50ns'},
premixUp2015Defaults])
premixUp2017Defaults = merge([{'--conditions':'auto:phase1_2017_realistic'},
{'--era':'Run2_2017'},
premixUp2015Defaults])
premixUp2018Defaults = merge([{'--conditions':'auto:phase1_2018_realistic'},
{'--era':'Run2_2018'},
premixUp2015Defaults])
steps['PREMIXUP15_PU25']=merge([PU25,Kby(100,100),premixUp2015Defaults])
steps['PREMIXUP15_PU50']=merge([PU50,Kby(100,100),premixUp2015Defaults50ns])
steps['PREMIXUP17_PU25']=merge([PU25UP17,Kby(100,100),premixUp2017Defaults])
steps['PREMIXUP18_PU25']=merge([PU25UP18,Kby(100,100),premixUp2018Defaults])
digiPremixUp2015Defaults25ns = {
'--conditions' : 'auto:run2_mc',
'-s' : 'DIGI:pdigi_valid,DATAMIX,L1,DIGI2RAW,HLT:@relval2016',
'--pileup_input' : 'das:/RelValPREMIXUP15_PU25/%s/PREMIX'%baseDataSetRelease[5],
'--eventcontent' : 'FEVTDEBUGHLT',
'--datatier' : 'GEN-SIM-DIGI-RAW-HLTDEBUG',
'--datamix' : 'PreMix',
'--procModifiers': 'premix_stage2',
'--era' : 'Run2_2016'
}
from .upgradeWorkflowComponents import digiPremixLocalPileup
digiPremixLocalPileupUp2015Defaults25ns = merge([digiPremixLocalPileup,
digiPremixUp2015Defaults25ns])
digiPremixUp2015Defaults50ns=merge([{'-s':'DIGI:pdigi_valid,DATAMIX,L1,DIGI2RAW,HLT:@relval50ns'},
{'--conditions':'auto:run2_mc_50ns'},
{'--pileup_input' : 'das:/RelValPREMIXUP15_PU50/%s/PREMIX'%baseDataSetRelease[6]},
{'--era' : 'Run2_50ns'},
digiPremixUp2015Defaults25ns])
digiPremixUp2017Defaults25ns = {
'--conditions' : 'auto:phase1_2017_realistic',
'-s' : 'DIGI:pdigi_valid,DATAMIX,L1,DIGI2RAW,HLT:@relval2017',
'--pileup_input' : 'das:/RelValPREMIXUP17_PU25/%s/PREMIX'%baseDataSetRelease[14],
'--eventcontent' : 'FEVTDEBUGHLT',
'--datatier' : 'GEN-SIM-DIGI-RAW-HLTDEBUG',
'--datamix' : 'PreMix',
'--procModifiers': 'premix_stage2',
'--era' : 'Run2_2017'
}
digiPremixLocalPileupUp2017Defaults25ns = merge([digiPremixLocalPileup,
digiPremixUp2017Defaults25ns])
digiPremixUp2018Defaults25ns = {
'--conditions' : 'auto:phase1_2018_realistic',
'-s' : 'DIGI:pdigi_valid,DATAMIX,L1,DIGI2RAW,HLT:@relval2018',
'--pileup_input' : 'das:/RelValPREMIXUP18_PU25/%s/PREMIX'%baseDataSetRelease[17],
'--eventcontent' : 'FEVTDEBUGHLT',
'--datatier' : 'GEN-SIM-DIGI-RAW-HLTDEBUG',
'--datamix' : 'PreMix',
'--procModifiers': 'premix_stage2',
'--era' : 'Run2_2018'
}
digiPremixLocalPileupUp2018Defaults25ns = merge([digiPremixLocalPileup,
digiPremixUp2018Defaults25ns])
steps['DIGIPRMXUP15_PU25']=merge([digiPremixUp2015Defaults25ns])
steps['DIGIPRMXUP15APVSimu_PU25']=merge([{'--era': 'Run2_2016_HIPM'},digiPremixUp2015Defaults25ns])
steps['DIGIPRMXLOCALUP15_PU25']=merge([digiPremixLocalPileupUp2015Defaults25ns])
steps['DIGIPRMXLOCALUP15APVSimu_PU25']=merge([{'--era': 'Run2_2016_HIPM'},digiPremixLocalPileupUp2015Defaults25ns])
steps['DIGIPRMXUP15_PU50']=merge([digiPremixUp2015Defaults50ns])
steps['DIGIPRMXUP17_PU25']=merge([digiPremixUp2017Defaults25ns])
steps['DIGIPRMXLOCALUP17_PU25']=merge([digiPremixLocalPileupUp2017Defaults25ns])
steps['DIGIPRMXUP18_PU25']=merge([digiPremixUp2018Defaults25ns])
steps['DIGIPRMXLOCALUP18_PU25']=merge([digiPremixLocalPileupUp2018Defaults25ns])
# Run-Dependent MC: DIGI step; 17 is a dummy test; 2018 with 2000 lumis
steps['DIGIPRMXUP17_PU25_RD']=merge([digiPremixUp2017Defaults25ns, { '--customise_commands':"\"process.EcalLaserCorrectionServiceMC = cms.ESProducer('EcalLaserCorrectionServiceMC') \\n process.GlobalTag.toGet = cms.VPSet( cms.PSet( record = cms.string('EcalLaserAPDPNRatiosMCRcd'), tag = cms.string('EcalLaserAPDPNRatios_UL_2017_mc'), connect = cms.string('frontier://FrontierProd/CMS_CONDITIONS') ) ) \\n process.mixData.workers.ecal.timeDependent=True\"" } ])
digiPremixRD2018 = {
'--pileup_input':'das:/RelValPREMIXUP18_PU25/%s/PREMIX'%baseDataSetRelease[24]
}
steps['DIGIPRMXUP18_PU25_RD']=merge([digiPremixRD2018, {'--customise_commands':"\"process.EcalLaserCorrectionServiceMC = cms.ESProducer('EcalLaserCorrectionServiceMC') \\n process.GlobalTag.toGet = cms.VPSet( cms.PSet( record = cms.string('EcalLaserAPDPNRatiosMCRcd'), tag = cms.string('EcalLaserAPDPNRatios_Run_Dep_MC_first_IOV'), connect = cms.string('frontier://FrontierProd/CMS_CONDITIONS') ), cms.PSet( record = cms.string('EcalLaserAPDPNRatiosRcd'), tag = cms.string('EcalLaserAPDPNRatios_Run_Dep_MC'), connect = cms.string('frontier://FrontierProd/CMS_CONDITIONS') ) ) \\n process.mixData.workers.ecal.timeDependent=True \\n process.source.firstLuminosityBlockForEachRun = cms.untracked.VLuminosityBlockID(*[cms.LuminosityBlockID(x,y) for x,y in ((315257, 1), (316082, 222), (316720, 445), (317527, 668), (320917, 890), (321414, 1112), (321973, 1334), (322492, 1556), (324245, 1779))]) \""}, digiPremixUp2018Defaults25ns])
premixProd25ns = {'-s' : 'DIGI,DATAMIX,L1,DIGI2RAW,HLT:@relval2016',
'--eventcontent' : 'PREMIXRAW',
'--datatier' : 'PREMIXRAW'}
premixProd50ns = merge([{'-s':'DIGI,DATAMIX,L1,DIGI2RAW,HLT:@relval50ns'},premixProd25ns])
premixProd25ns2017 = merge([{'-s':'DIGI,DATAMIX,L1,DIGI2RAW,HLT:@relval2017'},premixProd25ns])
premixProd25ns2018 = merge([{'-s':'DIGI,DATAMIX,L1,DIGI2RAW,HLT:@relval2018','--datatier':'GEN-SIM-RAW','--geometry':'DB:Extended'},premixProd25ns])
steps['DIGIPRMXUP15_PROD_PU25']=merge([premixProd25ns,digiPremixUp2015Defaults25ns])
steps['DIGIPRMXUP15_PROD_PU50']=merge([premixProd50ns,digiPremixUp2015Defaults50ns])
steps['DIGIPRMXUP17_PROD_PU25']=merge([premixProd25ns2017,digiPremixUp2017Defaults25ns])
steps['DIGIPRMXUP18_PROD_PU25']=merge([premixProd25ns2018,digiPremixUp2018Defaults25ns])
dataReco={'--conditions':'auto:run1_data',
'-s':'RAW2DIGI,L1Reco,RECO,EI,PAT,ALCA:SiStripCalZeroBias+SiStripCalMinBias+TkAlMinBias+EcalESAlign,DQM:@standardDQMFakeHLT+@miniAODDQM',
'--datatier':'RECO,MINIAOD,DQMIO',
'--eventcontent':'RECO,MINIAOD,DQM',
'--data':'',
'--process':'reRECO',
'--scenario':'pp',
}
dataRecoAlCaCalo=merge([{'-s':'RAW2DIGI,L1Reco,RECO,EI,ALCA:SiStripCalZeroBias+SiStripCalMinBias+TkAlMinBias+EcalCalZElectron+EcalCalWElectron+EcalUncalZElectron+EcalUncalWElectron+EcalTrg+HcalCalIsoTrk,DQM'}, dataReco])
hltKey='fake'
menu = autoHLT[hltKey]
steps['HLTD']=merge([{'--process':'reHLT',
'-s':'L1REPACK,HLT:@%s'%hltKey,
'--conditions':'auto:run1_hlt_%s'%menu,
'--data':'',
'--eventcontent': 'FEVTDEBUGHLT',
'--datatier': 'FEVTDEBUGHLT',
# '--output':'\'[{"e":"RAW","t":"RAW","o":["drop FEDRawDataCollection_rawDataCollector__LHC"]}]\'',
},])
steps['HLTDSKIM']=merge([{'--inputCommands':'"keep *","drop *_*_*_RECO"'},steps['HLTD']])
steps['HLTDSKIM2']=merge([{'--inputCommands':'"keep *","drop *_*_*_RECO","drop *_*_*_reRECO"'},steps['HLTD']])
steps['RECOD']=merge([{'--scenario':'pp',},dataReco])
steps['RECODR1']=merge([{'-s':'RAW2DIGI,L1Reco,RECO,EI,PAT,ALCA:SiStripCalZeroBias+SiStripCalMinBias+TkAlMinBias,DQM:@standardDQMFakeHLT+@miniAODDQM'},dataReco])
steps['RECODAlCaCalo']=merge([{'--scenario':'pp',},dataRecoAlCaCalo])
hltKey50ns='relval50ns'
menuR2_50ns = autoHLT[hltKey50ns]
steps['HLTDR2_50ns']=merge( [ {'-s':'L1REPACK,HLT:@%s'%hltKey50ns,},{'--conditions':'auto:run2_hlt_relval'},{'--era' : 'Run2_50ns'},steps['HLTD'] ] )
hltKey25ns='relval25ns'
menuR2_25ns = autoHLT[hltKey25ns]
steps['HLTDR2_25ns']=merge( [ {'-s':'L1REPACK:GT2,HLT:@%s'%hltKey25ns,},{'--conditions':'auto:run2_hlt_relval'},{'--era' : 'Run2_25ns'},steps['HLTD'] ] )
hltKey2016='relval2016'
menuR2_2016 = autoHLT[hltKey2016]
steps['HLTDR2_2016']=merge( [ {'-s':'L1REPACK:Full,HLT:@%s'%hltKey2016,},{'--conditions':'auto:run2_hlt_relval'},{'--era' : 'Run2_2016'},steps['HLTD'] ] )
hltKey2017='relval2017'
steps['HLTDR2_2017']=merge( [ {'-s':'L1REPACK:Full,HLT:@%s'%hltKey2017,},{'--conditions':'auto:run2_hlt_relval'},{'--era' : 'Run2_2017'},steps['HLTD'] ] )
hltKey2018='relval2018'
steps['HLTDR2_2018']=merge( [ {'-s':'L1REPACK:Full,HLT:@%s'%hltKey2018,},{'--conditions':'auto:run2_hlt_relval'},{'--era' : 'Run2_2018'},steps['HLTD'] ] )
# special setting for lumi section boundary crossing in RunEGamma2018Dml
steps['HLTDR2_2018ml']=merge( [ {'--customise_commands':'"process.source.skipEvents=cms.untracked.uint32(7000)"'},concurrentLumis,steps['HLTDR2_2018'] ] )
steps['HLTDR2_2018_hBStar']=merge( [ {'--era' : 'Run2_2018_highBetaStar'},steps['HLTDR2_2018'] ] )
steps['HLTDR2_2018_BadHcalMitig']=merge( [ {'--era' : 'Run2_2018,pf_badHcalMitigation'},steps['HLTDR2_2018'] ] )
# use --era
steps['RECODR2_50ns']=merge([{'--scenario':'pp','--conditions':'auto:run2_data_relval','--era':'Run2_50ns',},dataReco])
steps['RECODR2_25ns']=merge([{'--scenario':'pp','--conditions':'auto:run2_data_relval','--era':'Run2_25ns','--customise':'Configuration/DataProcessing/RecoTLR.customisePostEra_Run2_25ns'},dataReco])
steps['RECODR2_2016']=merge([{'--scenario':'pp','--conditions':'auto:run2_data_relval','--era':'Run2_2016','--customise':'Configuration/DataProcessing/RecoTLR.customisePostEra_Run2_2016'},dataReco])
steps['RECODR2_2017']=merge([{'--scenario':'pp','--conditions':'auto:run2_data_relval','--era':'Run2_2017','--customise':'Configuration/DataProcessing/RecoTLR.customisePostEra_Run2_2017'},dataReco])
steps['RECODR2_2018']=merge([{'--scenario':'pp','--conditions':'auto:run2_data_relval','--era':'Run2_2018','--customise':'Configuration/DataProcessing/RecoTLR.customisePostEra_Run2_2018'},dataReco])
steps['RECODR2AlCaEle']=merge([{'--scenario':'pp','--conditions':'auto:run2_data_relval','--customise':'Configuration/DataProcessing/RecoTLR.customisePromptRun2',},dataRecoAlCaCalo])
steps['RECODSplit']=steps['RECOD'] # finer job splitting
steps['RECOSKIMALCA']=merge([{'--inputCommands':'"keep *","drop *_*_*_RECO"'
},steps['RECOD']])
steps['RECOSKIMALCAR1']=merge([{'--inputCommands':'"keep *","drop *_*_*_RECO"'
},steps['RECODR1']])
steps['REPACKHID']=merge([{'--scenario':'HeavyIons',
'-s':'RAW2DIGI,REPACK',
'--datatier':'RAW',
'--eventcontent':'REPACKRAW'},
steps['RECOD']])
steps['RECOHID10']=merge([{'--scenario':'HeavyIons',
'-s':'RAW2DIGI,L1Reco,RECO,ALCA:SiStripCalZeroBias+SiStripCalMinBias+TkAlMinBiasHI+TkAlUpsilonMuMuHI+TkAlZMuMuHI+TkAlMuonIsolatedHI+TkAlJpsiMuMuHI+HcalCalMinBias,DQM:@standardDQMHIFakeHLT',
'--datatier':'RECO,DQMIO',
'--eventcontent':'RECO,DQM','-n':30},
steps['RECOD']])
steps['RECOHID11']=merge([{'--repacked':''},
steps['RECOHID10']])
steps['RECOHID10']['-s']+=',REPACK'
steps['RECOHID10']['--datatier']+=',RAW'
steps['RECOHID10']['--eventcontent']+=',REPACKRAW'
steps['HYBRIDRepackHI2015VR']={'--eventcontent':'RAW',
'--datatier':'RAW',
'--conditions':'auto:run2_hlt_hi',
'--step':'RAW2DIGI,REPACK:DigiToHybridRawRepack',
'--scenario':'HeavyIons',
'--data':'',
'--era':'Run2_HI',
'--customise':'RecoLocalTracker/SiStripZeroSuppression/customiseHybrid.addHybridEmulationBeforeRepack',
'--processName':'EMULATEHYBRID',
'--outputCommands':'"drop FEDRawDataCollection_*__HLT","drop FEDRawDataCollection_*__LHC"',
#'--customise_commands':'\'process.RAWoutput.outputCommands.append(\"drop *_*_*_HLT*\")\'',
'-n':100
}
steps['HYBRIDZSHI2015']=merge([{'--step': 'RAW2DIGI,REPACK:DigiToRawRepack',
'--processName':'REHLT',
'--customise': 'RecoLocalTracker/SiStripZeroSuppression/customiseHybrid.runOnHybridZS,RecoLocalTracker/SiStripZeroSuppression/customiseHybrid.repackZSHybrid',
'--customise_commands':'\'from Configuration.Applications.ConfigBuilder import MassReplaceInputTag; MassReplaceInputTag(process, new="hybridRawDataRepacker")\'' ,
},steps['HYBRIDRepackHI2015VR']])
steps['RECOHID15']=merge([{ '--conditions':'auto:run2_data',
'--era':'Run2_HI'
},steps['RECOHID11']])
steps['RECOHID18']=merge([{ '--scenario':'pp',
'--conditions':'auto:run2_data_promptlike_hi',
'-s':'RAW2DIGI,L1Reco,RECO,ALCA:SiStripCalZeroBias+SiPixelCalZeroBias,SKIM:PbPbEMu+PbPbZEE+PbPbZMM+PbPbZMu,EI,DQM:@commonFakeHLT+@standardDQMFakeHLT',
'--datatier':'AOD,DQMIO',
'--eventcontent':'AOD,DQM',
'--era':'Run2_2018_pp_on_AA',
'--customise':'Configuration/DataProcessing/RecoTLR.customisePostEra_Run2_2018_pp_on_AA',
'-n':'10'
},steps['RECOHID15']])
steps['REMINIAODHID18']={ '--scenario':'pp',
'--conditions':'auto:run2_data_promptlike_hi',
'-s':'PAT,DQM:@miniAODDQM',
'--datatier':'MINIAOD,DQMIO',
'--eventcontent':'MINIAOD,DQM',
'--era':'Run2_2018_pp_on_AA',
'--procModifiers':'run2_miniAOD_pp_on_AA_103X',
'--data':'',
'--processName':'PAT',
'-n':'100'
}
steps['TIER0']=merge([{'--customise':'Configuration/DataProcessing/RecoTLR.customisePrompt',
'-s':'RAW2DIGI,L1Reco,RECO,EI,ALCAPRODUCER:@allForPrompt,DQM:@allForPrompt,ENDJOB',
'--datatier':'RECO,AOD,ALCARECO,DQMIO',
'--eventcontent':'RECO,AOD,ALCARECO,DQM',
'--process':'RECO'
},dataReco])
steps['TIER0EXP']=merge([{'-s':'RAW2DIGI,L1Reco,RECO,EI,ALCAPRODUCER:@allForExpress+AlCaPCCZeroBiasFromRECO+AlCaPCCRandomFromRECO,DQM:@express,ENDJOB',
'--datatier':'ALCARECO,DQMIO',
'--eventcontent':'ALCARECO,DQM',
'--customise':'Configuration/DataProcessing/RecoTLR.customiseExpress',
},steps['TIER0']])
steps['TIER0EXPRUN2']=merge([{'-s':'RAW2DIGI,L1Reco,RECO,EI,ALCAPRODUCER:@allForExpress+AlCaPCCZeroBiasFromRECO+AlCaPCCRandomFromRECO,DQM:@express,ENDJOB',
'--process':'RECO',
'--datatier':'ALCARECO,DQMIO',
'--eventcontent':'ALCARECO,DQM',
'--customise':'Configuration/DataProcessing/RecoTLR.customiseExpress',
'--era':'Run2_2017',
'--conditions':'auto:run2_data'
},steps['TIER0']])
steps['TIER0EXPHI']={ '--conditions':'auto:run1_data',
'-s':'RAW2DIGI,L1Reco,RECO,ALCAPRODUCER:@allForExpressHI,DQM,ENDJOB',
'--datatier':'ALCARECO,DQMIO',
'--eventcontent':'ALCARECO,DQM',
'--data':'',
'--process':'RECO',
'--scenario':'HeavyIons',
'--customise':'Configuration/DataProcessing/RecoTLR.customiseExpressHI',
'--repacked':'',
'-n':'10'
}
steps['TIER0EXPTE']={'-s': 'ALCAPRODUCER:EcalTestPulsesRaw',
'--conditions': 'auto:run2_data',
'--datatier':'ALCARECO',
'--eventcontent':'ALCARECO',
'--data': '',
'--process': 'RECO',
'--scenario': 'pp',
#'--customise':'Configuration/DataProcessing/RecoTLR.customiseExpress',
}
steps['TIER0EXPLP']={'-s': 'ALCAPRODUCER:AlCaPCCRandom',
'--conditions': 'auto:run2_data',
'--datatier':'ALCARECO',
'--eventcontent':'ALCARECO',
'--data': '',
'--scenario': 'pp',
'--process': 'RECO',
# '--customise':'Configuration/DataProcessing/RecoTLR.customiseExpress',
}
steps['TIER0PROMPTLP']={'-s': 'ALCAPRODUCER:AlCaPCCZeroBias+RawPCCProducer',
'--conditions': 'auto:run2_data',
'--datatier':'ALCARECO',
'--eventcontent':'ALCARECO',
'--data': '',
'--scenario': 'pp',
'--process': 'RECO',
'--filein': 'filelist:step1_dasquery.log'
# '--customise':'Configuration/DataProcessing/RecoTLR.customiseExpress',
}
steps['ALCAEXPLP']={'-s':'ALCAOUTPUT:AlCaPCCRandom,ALCA:PromptCalibProdLumiPCC',
'--conditions':'auto:run2_data',
'--datatier':'ALCARECO',
'--eventcontent':'ALCARECO',
'--triggerResultsProcess': 'RECO'}
steps['ALCAHARVLP']={'-s':'ALCAHARVEST:%s'%(autoPCL['PromptCalibProdLumiPCC']),
'--conditions':'auto:run2_data',
'--scenario':'pp',
'--datatier':'DQM',
'--eventcontent': 'DQM',
'--data':'',
'--filein':'file:PromptCalibProdLumiPCC.root'}
steps['TIER0EXPHPBS']={'-s':'RAW2DIGI,L1Reco,RECO:reconstruction_trackingOnly,ALCAPRODUCER:TkAlMinBias,DQM:DQMOfflineTracking,ENDJOB',
'--process':'RECO',
'--scenario': 'pp',
'--era':'Run2_2017',
'--conditions':'auto:run2_data',
'--data': '',
'--datatier':'ALCARECO,DQMIO',
'--eventcontent':'ALCARECO,DQM',
'--customise':'Configuration/DataProcessing/RecoTLR.customisePostEra_Run2_2017_express_trackingOnly',
}
steps['TIER0RAWSIPIXELCAL']={'-s':'RAW2DIGI,L1Reco,RECO,EI,ALCAPRODUCER:SiPixelCalZeroBias,DQM:@express,ENDJOB',
'--process':'RECO',
'--scenario': 'pp',
'--era':'Run2_2017',
'--conditions':'auto:run2_data',
'--data': '',
'--datatier':'ALCARECO,DQMIO',
'--eventcontent':'ALCARECO,DQM',
'--customise':'Configuration/DataProcessing/RecoTLR.customiseExpress',
}
steps['TIER0EXPSIPIXELCAL']={'-s':'RAW2DIGI,L1Reco,ALCAPRODUCER:SiPixelCalZeroBias,ENDJOB',
'--process':'ALCARECO',
'--scenario': 'pp',
'--era':'Run2_2017',
'--conditions':'auto:run2_data',
'--data': '',
'--datatier':'ALCARECO',
'--eventcontent':'ALCARECO',
}
steps['ALCASPLITHPBS']={'-s':'ALCAOUTPUT:TkAlMinBias,ALCA:PromptCalibProdBeamSpotHP+PromptCalibProdBeamSpotHPLowPU',
'--scenario':'pp',
'--data':'',
'--era':'Run2_2017',
'--datatier':'ALCARECO',
'--eventcontent':'ALCARECO',
'--conditions':'auto:run2_data',
'--triggerResultsProcess':'RECO',
}
steps['ALCASPLITSIPIXELCAL']={'-s':'ALCAOUTPUT:SiPixelCalZeroBias,ALCA:PromptCalibProdSiPixel',
'--scenario':'pp',
'--data':'',
'--era':'Run2_2017',
'--datatier':'ALCARECO',
'--eventcontent':'ALCARECO',
'--conditions':'auto:run2_data',
'--triggerResultsProcess':'RECO',
#'--filein':'file:step2.root'
}
steps['ALCAHARVDHPBS']={'-s':'ALCAHARVEST:%s'%(autoPCL['PromptCalibProdBeamSpotHP']),
#'--conditions':'auto:run2_data_promptlike',
'--conditions':'auto:run3_data_express', # to replaced with line above once run2_data_promptlike will contain DropBoxMetadata
'--scenario':'pp',
'--data':'',
'--era':'Run2_2017',
'--customise':'Configuration/DataProcessing/RecoTLR.customisePostEra_Run2_2017_harvesting_trackingOnly',
'--filein':'file:PromptCalibProdBeamSpotHP.root'}
steps['ALCAHARVDHPBSLOWPU']={'-s':'ALCAHARVEST:%s'%(autoPCL['PromptCalibProdBeamSpotHPLowPU']),
#'--conditions':'auto:run2_data_promptlike',
'--conditions':'auto:run3_data_express', # to replaced with line above once run2_data_promptlike will contain DropBoxMetadata
'--scenario':'pp',
'--data':'',
'--era':'Run2_2017',
'--customise':'Configuration/DataProcessing/RecoTLR.customisePostEra_Run2_2017_harvesting_trackingOnly',
'--filein':'file:PromptCalibProdBeamSpotHPLowPU.root'}
steps['ALCAHARVDSIPIXELCAL']={'-s':'ALCAHARVEST:%s'%(autoPCL['PromptCalibProdSiPixel']),
'--conditions':'auto:run3_data_express',
'--scenario':'pp',
'--data':'',
'--era':'Run2_2017',
'--filein':'file:PromptCalibProdSiPixel.root'}
steps['ALCAHARVDSIPIXELCALRUN1']={'-s':'ALCAHARVEST:%s'%(autoPCL['PromptCalibProdSiPixel']),
'--conditions':'auto:run1_data',
'--scenario':'pp',
'--data':'',
'--filein':'file:PromptCalibProdSiPixel.root'}
steps['RECOCOSD']=merge([{'--scenario':'cosmics',
'-s':'RAW2DIGI,L1Reco,RECO,DQM',
'--datatier':'RECO,DQMIO', # no miniAOD for cosmics
'--eventcontent':'RECO,DQM',
'--customise':'Configuration/DataProcessing/RecoTLR.customiseCosmicData'
},dataReco])
steps['RECOCOSDRUN2']=merge([{'--conditions':'auto:run2_data','--era':'Run2_2016'},steps['RECOCOSD']])
steps['RECOCOSDRUN3']=merge([{'--conditions':'auto:run3_data_promptlike','--era':'Run3'},steps['RECOCOSD']])
steps['RECOCOSDEXPRUN3']=merge([{'--conditions':'auto:run3_data_express','--era':'Run3'},steps['RECOCOSD']])
# step1 gensim for HI mixing
step1Up2018HiMixDefaults = merge ([{'--beamspot':'MatchHI', '--pileup':'HiMixGEN', '--scenario':'HeavyIons'},hiDefaults2018_ppReco,PUHI,step1Up2018HiProdDefaults])
def gen2018HiMix(fragment,howMuch):
global step1Up2018HiMixDefaults
return merge([{'cfg':fragment},howMuch,step1Up2018HiMixDefaults])
step1Up2021HiMixDefaults = merge ([{'--beamspot':'MatchHI', '--pileup':'HiMixGEN', '--scenario':'HeavyIons'},hiDefaults2021_ppReco,PUHI2021,step1Up2021HiProdDefaults])
def gen2021HiMix(fragment,howMuch):
global step1Up2021HiMixDefaults
return merge([{'cfg':fragment},howMuch,step1Up2021HiMixDefaults])
steps['Pyquen_GammaJet_pt20_2760GeV']=gen2018HiMix('Pyquen_GammaJet_pt20_2760GeV_cfi',Kby(9,100))
steps['Pyquen_DiJet_pt80to120_2760GeV']=gen2018HiMix('Pyquen_DiJet_pt80to120_2760GeV_cfi',Kby(9,100))
steps['Pyquen_ZeemumuJets_pt10_2760GeV']=gen2018HiMix('Pyquen_ZeemumuJets_pt10_2760GeV_cfi',Kby(9,100))
steps['Pyquen_GammaJet_pt20_2760GeV_2021']=gen2021HiMix('Pyquen_GammaJet_pt20_2760GeV_cfi',Kby(9,100))
steps['Pyquen_DiJet_pt80to120_2760GeV_2021']=gen2021HiMix('Pyquen_DiJet_pt80to120_2760GeV_cfi',Kby(9,100))
steps['Pyquen_ZeemumuJets_pt10_2760GeV_2021']=gen2021HiMix('Pyquen_ZeemumuJets_pt10_2760GeV_cfi',Kby(9,100))
# step3
step3Defaults = {
'-s' : 'RAW2DIGI,L1Reco,RECO,RECOSIM,EI,PAT,VALIDATION:@standardValidationNoHLT+@miniAODValidation,DQM:@standardDQMFakeHLT+@miniAODDQM',
'--conditions' : 'auto:run1_mc',
'--no_exec' : '',
'--datatier' : 'GEN-SIM-RECO,MINIAODSIM,DQMIO',
'--eventcontent': 'RECOSIM,MINIAODSIM,DQM',
}
step3DefaultsAlCaCalo=merge([{'-s':'RAW2DIGI,L1Reco,RECO,EI,PAT,ALCA:EcalCalZElectron+EcalCalWElectron+EcalUncalZElectron+EcalUncalWElectron+HcalCalIsoTrk,VALIDATION:@standardValidationNoHLT+@miniAODValidation,DQM:@standardDQMFakeHLT+@miniAODDQM'},step3Defaults])
steps['DIGIPU']=merge([{'--process':'REDIGI'},steps['DIGIPU1']])
#for 2015
step3Up2015Defaults = {
#'-s':'RAW2DIGI,L1Reco,RECO,EI,VALIDATION,DQM',
'-s':'RAW2DIGI,L1Reco,RECO,RECOSIM,EI,PAT,VALIDATION:@standardValidationNoHLT+@miniAODValidation,DQM:@standardDQMFakeHLT+@miniAODDQM',
'--conditions':'auto:run2_mc',
'-n':'10',
'--datatier':'GEN-SIM-RECO,MINIAODSIM,DQMIO',
'--eventcontent':'RECOSIM,MINIAODSIM,DQM',
'--era' : 'Run2_2016'
}
step3Up2015Defaults50ns = merge([{'-s':'RAW2DIGI,L1Reco,RECO,EI,PAT,VALIDATION:@standardValidationNoHLT+@miniAODValidation,DQM:@standardDQMFakeHLT+@miniAODDQM','--conditions':'auto:run2_mc_50ns','--era':'Run2_50ns'},step3Up2015Defaults])
step3Up2015DefaultsAlCaCalo = merge([{'-s':'RAW2DIGI,L1Reco,RECO,EI,ALCA:EcalCalZElectron+EcalCalWElectron+EcalUncalZElectron+EcalUncalWElectron+EcalTrg+HcalCalIsoTrk,VALIDATION:@standardValidationNoHLT,DQM:@standardDQMFakeHLT'},step3Up2015Defaults])
step3Up2015DefaultsAlCaCalo50ns = merge([{'--conditions':'auto:run2_mc_50ns','--era':'Run2_50ns'},step3Up2015DefaultsAlCaCalo])
step3Up2015Hal = {'-s' :'RAW2DIGI,L1Reco,RECO,EI,VALIDATION,DQM',
'--conditions' :'auto:run2_mc',
'--datatier' :'GEN-SIM-RECO,DQMIO',
'--eventcontent':'RECOSIM,DQM',
'-n' :'10',
'--era' :'Run2_2016'
}
step3_pixelNtupleFit = {
'--procModifiers': 'pixelNtupleFit',
}
step3_gpu = {
'--procModifiers': 'gpu',
}
step3_trackingLowPU = {
'--era': 'Run2_2016_trackingLowPU'
}
step3_HIPM = {
'--era': 'Run2_2016_HIPM'
}
step3Up2015Defaults_trackingOnly = merge([step3_trackingOnly, step3Up2015Defaults])
steps['RECOUP15']=merge([step3Up2015Defaults]) # todo: remove UP from label
steps['RECOUP15_L1TEgDQM']=merge([{'-s':'RAW2DIGI,L1Reco,RECO,RECOSIM,EI,PAT,VALIDATION:@standardValidationNoHLT+@miniAODValidation,DQM:@standardDQMFakeHLT+@miniAODDQM+@L1TEgamma'},step3Up2015Defaults]) # todo: remove UP from label
steps['RECOUP15_L1TMuDQM']=merge([{'-s':'RAW2DIGI,L1Reco,RECO,RECOSIM,EI,PAT,VALIDATION:@standardValidationNoHLT+@miniAODValidation,DQM:@standardDQMFakeHLT+@miniAODDQM+@L1TMuon'},step3Up2015Defaults]) # todo: remove UP from label
steps['RECOUP15AlCaCalo']=merge([step3Up2015DefaultsAlCaCalo]) # todo: remove UP from label
steps['RECOUP15_trackingOnly']=merge([step3Up2015Defaults_trackingOnly]) # todo: remove UP from label
steps['RECOUP15_trackingLowPU']=merge([step3_trackingLowPU, step3Up2015Defaults]) # todo: remove UP from label
steps['RECOUP15_trackingOnlyLowPU']=merge([step3_trackingLowPU, step3Up2015Defaults_trackingOnly]) # todo: remove UP from label
steps['RECOUP15_HIPM']=merge([step3_HIPM,step3Up2015Defaults]) # todo: remove UP from label
steps['RECOUP17']=merge([{'--conditions':'auto:phase1_2017_realistic','--era' : 'Run2_2017','--geometry' : 'DB:Extended'},steps['RECOUP15']])
steps['RECOUP17_PU25']=merge([PU25UP17,steps['RECOUP17']])
steps['RECOUP18']=merge([{'--conditions':'auto:phase1_2018_realistic','--era' : 'Run2_2018','--geometry' : 'DB:Extended'},steps['RECOUP15']])
steps['RECOUP18ml']=merge([concurrentLumis,steps['RECOUP18']])
steps['RECOUP18_PU25']=merge([PU25UP18,steps['RECOUP18']])
# for Run1 PPb data workflow
steps['RECO_PPbData']=merge([{'-s':'RAW2DIGI,L1Reco,RECO,ALCA:TkAlMinBias+TkAlMuonIsolatedPA+TkAlUpsilonMuMuPA+TkAlZMuMuPA,SKIM:PAZMM+PAZEE+PAMinBias,EI,DQM','--scenario':'pp','--conditions':'auto:run1_data','--era':'Run1_pA','--datatier':'AOD,DQMIO','--eventcontent':'AOD,DQM'}, dataReco])
# for Run2 PPb MC workflow
steps['RECOUP15_PPb']=merge([{'-s':'RAW2DIGI,L1Reco,RECO,ALCA:TkAlMinBias+TkAlMuonIsolatedPA+TkAlUpsilonMuMuPA+TkAlZMuMuPA,EI,VALIDATION,DQM','--conditions':'auto:run2_mc_pa','--era':'Run2_2016_pA','--datatier':'AODSIM,DQMIO','--eventcontent':'AODSIM,DQM'}, steps['RECOUP15']])
#steps['RECOUP15PROD1']=merge([{ '-s' : 'RAW2DIGI,L1Reco,RECO,EI,DQM:DQMOfflinePOGMC', '--datatier' : 'AODSIM,DQMIO', '--eventcontent' : 'AODSIM,DQM'},step3Up2015Defaults])
steps['RECODreHLT']=merge([{'--hltProcess':'reHLT','--conditions':'auto:run1_data_%s'%menu},steps['RECOD']])
steps['RECODR1reHLT']=merge([{'--hltProcess':'reHLT','--conditions':'auto:run1_data_%s'%menu},steps['RECODR1']])
steps['RECODR1reHLT2']=merge([{'--process':'reRECO2'},steps['RECODR1reHLT']])
steps['RECODreHLTAlCaCalo']=merge([{'--hltProcess':'reHLT','--conditions':'auto:run1_data_%s'%menu},steps['RECODAlCaCalo']])
steps['RECODR2_25nsreHLT']=merge([{'--hltProcess':'reHLT'},steps['RECODR2_25ns']])
steps['RECODR2_25nsreHLT_ZB']=merge([{'--hltProcess':'reHLT','-s':'RAW2DIGI,L1Reco,RECO,EI,PAT,ALCA:SiStripCalZeroBias+SiStripCalMinBias+TkAlMinBias+EcalESAlign,DQM:@rerecoZeroBiasFakeHLT+@miniAODDQM'},steps['RECODR2_25ns']])
steps['RECODR2_50nsreHLT']=merge([{'--hltProcess':'reHLT'},steps['RECODR2_50ns']])
steps['RECODR2_50nsreHLT_ZB']=merge([{'--hltProcess':'reHLT','-s':'RAW2DIGI,L1Reco,RECO,EI,PAT,ALCA:SiStripCalZeroBias+SiStripCalMinBias+TkAlMinBias+EcalESAlign,DQM:@rerecoZeroBiasFakeHLT+@miniAODDQM'},steps['RECODR2_50ns']])
steps['RECODR2_2016reHLT']=merge([{'--hltProcess':'reHLT','--conditions':'auto:run2_data_relval'},steps['RECODR2_2016']])
steps['RECODR2_2016reHLT_ZB']=merge([{'--hltProcess':'reHLT','--conditions':'auto:run2_data_relval','-s':'RAW2DIGI,L1Reco,RECO,EI,PAT,ALCA:SiStripCalZeroBias+SiStripCalMinBias+TkAlMinBias+EcalESAlign,DQM:@rerecoZeroBiasFakeHLT+@miniAODDQM'},steps['RECODR2_2016']])
steps['RECODR2_2016reHLT_L1TEgDQM']=merge([{'-s':'RAW2DIGI,L1Reco,RECO,EI,PAT,ALCA:SiStripCalZeroBias+SiStripCalMinBias+TkAlMinBias+EcalESAlign,DQM:@standardDQMFakeHLT+@miniAODDQM+@L1TEgamma'},steps['RECODR2_2016reHLT']])
steps['RECODR2_2016reHLT_L1TMuDQM']=merge([{'-s':'RAW2DIGI,L1Reco,RECO,EI,PAT,ALCA:SiStripCalZeroBias+SiStripCalMinBias+TkAlMinBias+EcalESAlign,DQM:@standardDQMFakeHLT+@miniAODDQM+@L1TMuon'},steps['RECODR2_2016reHLT']])
steps['RECODR2_2017reHLT']=merge([{'--hltProcess':'reHLT','--conditions':'auto:run2_data_relval'},steps['RECODR2_2017']])
steps['RECODR2reHLTAlCaEle']=merge([{'--hltProcess':'reHLT','--conditions':'auto:run2_data_relval'},steps['RECODR2AlCaEle']])
steps['RECODR2reHLTAlCaTkCosmics']=merge([{'--hltProcess':'reHLT','--conditions':'auto:run2_data_relval','-s':'RAW2DIGI,L1Reco,RECO,SKIM:EXONoBPTXSkim,EI,PAT,ALCA:TkAlCosmicsInCollisions,DQM:@standardDQMFakeHLT+@miniAODDQM'},steps['RECODR2_2016']])
steps['RECODR2_2017reHLTAlCaTkCosmics']=merge([{'--hltProcess':'reHLT','--conditions':'auto:run2_data_relval','-s':'RAW2DIGI,L1Reco,RECO,SKIM:EXONoBPTXSkim,EI,PAT,ALCA:TkAlCosmicsInCollisions,DQM:@standardDQMFakeHLT+@miniAODDQM'},steps['RECODR2_2017']])
steps['RECODR2_2017reHLTSiPixelCalZeroBias']=merge([{'--hltProcess':'reHLT','--conditions':'auto:run2_data_relval','-s':'RAW2DIGI,L1Reco,RECO,EI,PAT,ALCA:SiPixelCalZeroBias,DQM:@standardDQMFakeHLT+@miniAODDQM'},steps['RECODR2_2017']])
steps['RECODR2_2018reHLTAlCaTkCosmics']=merge([{'--hltProcess':'reHLT','--conditions':'auto:run2_data_relval','-s':'RAW2DIGI,L1Reco,RECO,SKIM:EXONoBPTXSkim,EI,PAT,ALCA:TkAlCosmicsInCollisions,DQM:@standardDQMFakeHLT+@miniAODDQM'},steps['RECODR2_2018']])
steps['RECODR2_2016reHLT_skimSingleMu']=merge([{'-s':'RAW2DIGI,L1Reco,RECO,SKIM:ZMu+MuTau,EI,PAT,ALCA:SiPixelCalSingleMuon+SiStripCalZeroBias+SiStripCalMinBias+TkAlMinBias+EcalESAlign,DQM:@standardDQMFakeHLT+@miniAODDQM+@L1TMuon'},steps['RECODR2_2016reHLT']])
steps['RECODR2_2016reHLT_skimDoubleEG']=merge([{'-s':'RAW2DIGI,L1Reco,RECO,SKIM:ZElectron,EI,PAT,ALCA:SiStripCalZeroBias+SiStripCalMinBias+TkAlMinBias+EcalESAlign,DQM:@standardDQMFakeHLT+@miniAODDQM'},steps['RECODR2_2016reHLT']])
steps['RECODR2_2016reHLT_skimMuonEG']=merge([{'-s':'RAW2DIGI,L1Reco,RECO,SKIM:TopMuEG,EI,PAT,ALCA:SiPixelCalSingleMuon+SiStripCalZeroBias+SiStripCalMinBias+TkAlMinBias+EcalESAlign,DQM:@standardDQMFakeHLT+@miniAODDQM'},steps['RECODR2_2016reHLT']])
steps['RECODR2_2016reHLT_skimJetHT']=merge([{'-s':'RAW2DIGI,L1Reco,RECO,SKIM:JetHTJetPlusHOFilter,EI,PAT,ALCA:SiStripCalZeroBias+SiStripCalMinBias+TkAlMinBias+EcalESAlign,DQM:@standardDQMFakeHLT+@miniAODDQM'},steps['RECODR2_2016reHLT']])
steps['RECODR2_2016reHLT_skimMET']=merge([{'-s':'RAW2DIGI,L1Reco,RECO,SKIM:HighMET+EXOMONOPOLE,EI,PAT,ALCA:SiStripCalZeroBias+SiStripCalMinBias+TkAlMinBias+EcalESAlign,DQM:@standardDQMFakeHLT+@miniAODDQM'},steps['RECODR2_2016reHLT']])
steps['RECODR2_2016reHLT_skimSinglePh']=merge([{'-s':'RAW2DIGI,L1Reco,RECO,SKIM:SinglePhotonJetPlusHOFilter+EXOMONOPOLE,EI,PAT,ALCA:SiStripCalZeroBias+SiStripCalMinBias+TkAlMinBias+EcalESAlign,DQM:@standardDQMFakeHLT+@miniAODDQM'},steps['RECODR2_2016reHLT']])
steps['RECODR2_2016reHLT_skimMuOnia']=merge([{'-s':'RAW2DIGI,L1Reco,RECO,SKIM:BPHSkim,EI,PAT,ALCA:SiStripCalZeroBias+SiStripCalMinBias+TkAlMinBias+EcalESAlign,DQM:@standardDQMFakeHLT+@miniAODDQM'},steps['RECODR2_2016reHLT']])
steps['RECODR2_2017reHLT_skimSingleMu']=merge([{'-s':'RAW2DIGI,L1Reco,RECO,SKIM:MuonPOGSkim+ZMu+MuTau,EI,PAT,ALCA:SiPixelCalSingleMuon+SiStripCalZeroBias+SiStripCalMinBias+TkAlMinBias+EcalESAlign,DQM:@standardDQMFakeHLT+@miniAODDQM+@L1TMuon'},steps['RECODR2_2017reHLT']])
steps['RECODR2_2017reHLT_skimDoubleEG']=merge([{'-s':'RAW2DIGI,L1Reco,RECO,SKIM:ZElectron,EI,PAT,ALCA:SiStripCalZeroBias+SiStripCalMinBias+TkAlMinBias+EcalESAlign,DQM:@standardDQMFakeHLT+@miniAODDQM'},steps['RECODR2_2017reHLT']])
steps['RECODR2_2017reHLT_skimMuonEG']=merge([{'-s':'RAW2DIGI,L1Reco,RECO,SKIM:TopMuEG,EI,PAT,ALCA:SiStripCalZeroBias+SiStripCalMinBias+TkAlMinBias+EcalESAlign,DQM:@standardDQMFakeHLT+@miniAODDQM'},steps['RECODR2_2017reHLT']])
steps['RECODR2_2017reHLT_skimJetHT']=merge([{'-s':'RAW2DIGI,L1Reco,RECO,SKIM:JetHTJetPlusHOFilter,EI,PAT,ALCA:SiStripCalZeroBias+SiStripCalMinBias+TkAlMinBias+EcalESAlign,DQM:@standardDQMFakeHLT+@miniAODDQM'},steps['RECODR2_2017reHLT']])
steps['RECODR2_2017reHLT_skimDisplacedJet']=merge([{'-s':'RAW2DIGI,L1Reco,RECO,SKIM:EXODisplacedJet,EI,PAT,ALCA:SiStripCalZeroBias+SiStripCalMinBias+TkAlMinBias+EcalESAlign,DQM:@standardDQMFakeHLT+@miniAODDQM'},steps['RECODR2_2017reHLT']])
steps['RECODR2_2017reHLT_skimMET']=merge([{'-s':'RAW2DIGI,L1Reco,RECO,SKIM:HighMET+EXOMONOPOLE,EI,PAT,ALCA:SiStripCalZeroBias+SiStripCalMinBias+TkAlMinBias+EcalESAlign,DQM:@standardDQMFakeHLT+@miniAODDQM'},steps['RECODR2_2017reHLT']])
steps['RECODR2_2017reHLT_skimSinglePh']=merge([{'-s':'RAW2DIGI,L1Reco,RECO,SKIM:SinglePhotonJetPlusHOFilter+EXOMONOPOLE,EI,PAT,ALCA:SiStripCalZeroBias+SiStripCalMinBias+TkAlMinBias+EcalESAlign,DQM:@standardDQMFakeHLT+@miniAODDQM'},steps['RECODR2_2017reHLT']])
steps['RECODR2_2017reHLT_skimMuOnia']=merge([{'-s':'RAW2DIGI,L1Reco,RECO,SKIM:BPHSkim,EI,PAT,ALCA:SiStripCalZeroBias+SiStripCalMinBias+TkAlMinBias+EcalESAlign,DQM:@standardDQMFakeHLT+@miniAODDQM'},steps['RECODR2_2017reHLT']])
steps['RECODR2_2017reHLT_skimCharmonium']=merge([{'-s':'RAW2DIGI,L1Reco,RECO,SKIM:MuonPOGJPsiSkim+BPHSkim,EI,PAT,ALCA:SiStripCalZeroBias+SiStripCalMinBias+TkAlMinBias+EcalESAlign,DQM:@standardDQMFakeHLT+@miniAODDQM'},steps['RECODR2_2017reHLT']])
steps['RECODR2_2018reHLT']=merge([{'--hltProcess':'reHLT','--conditions':'auto:run2_data_relval'},steps['RECODR2_2018']])
steps['RECODR2_2018reHLT_skimSingleMu']=merge([{'-s':'RAW2DIGI,L1Reco,RECO,SKIM:MuonPOGSkim+ZMu+MuTau,EI,PAT,ALCA:SiPixelCalSingleMuon+SiStripCalZeroBias+SiStripCalMinBias+SiStripCalSmallBiasScan+TkAlMinBias+EcalESAlign,DQM:@standardDQMFakeHLT+@miniAODDQM+@L1TMuon'},steps['RECODR2_2018reHLT']])
steps['RECODR2_2018reHLT_skimEGamma_L1TEgDQM']=merge([{'-s':'RAW2DIGI,L1Reco,RECO,SKIM:ZElectron+SinglePhotonJetPlusHOFilter+EXOMONOPOLE,EI,PAT,ALCA:SiStripCalZeroBias+SiStripCalMinBias+SiStripCalSmallBiasScan+TkAlMinBias+EcalESAlign,DQM:@standardDQMFakeHLT+@miniAODDQM+@L1TEgamma'},steps['RECODR2_2018reHLT']])
steps['RECODR2_2018reHLT_skimMuonEG']=merge([{'-s':'RAW2DIGI,L1Reco,RECO,SKIM:TopMuEG,EI,PAT,ALCA:SiStripCalZeroBias+SiStripCalMinBias+SiStripCalSmallBiasScan+TkAlMinBias+EcalESAlign,DQM:@standardDQMFakeHLT+@miniAODDQM'},steps['RECODR2_2018reHLT']])
steps['RECODR2_2018reHLT_skimJetHT']=merge([{'-s':'RAW2DIGI,L1Reco,RECO,SKIM:JetHTJetPlusHOFilter,EI,PAT,ALCA:SiStripCalZeroBias+SiStripCalMinBias+SiStripCalSmallBiasScan+TkAlMinBias+EcalESAlign,DQM:@standardDQMFakeHLT+@miniAODDQM'},steps['RECODR2_2018reHLT']])
steps['RECODR2_2018reHLT_skimDisplacedJet']=merge([{'-s':'RAW2DIGI,L1Reco,RECO,SKIM:EXODisplacedJet,EI,PAT,ALCA:SiStripCalZeroBias+SiStripCalMinBias+SiStripCalSmallBiasScan+TkAlMinBias+EcalESAlign,DQM:@standardDQMFakeHLT+@miniAODDQM'},steps['RECODR2_2018reHLT']])
steps['RECODR2_2018reHLT_skimMET']=merge([{'-s':'RAW2DIGI,L1Reco,RECO,SKIM:HighMET+EXOMONOPOLE,EI,PAT,ALCA:SiStripCalZeroBias+SiStripCalMinBias+SiStripCalSmallBiasScan+TkAlMinBias+EcalESAlign,DQM:@standardDQMFakeHLT+@miniAODDQM'},steps['RECODR2_2018reHLT']])
steps['RECODR2_2018reHLT_skimMuOnia']=merge([{'-s':'RAW2DIGI,L1Reco,RECO,SKIM:BPHSkim,EI,PAT,ALCA:SiStripCalZeroBias+SiStripCalMinBias+SiStripCalSmallBiasScan+TkAlMinBias+EcalESAlign,DQM:@standardDQMFakeHLT+@miniAODDQM'},steps['RECODR2_2018reHLT']])
steps['RECODR2_2018reHLT_skimCharmonium']=merge([{'-s':'RAW2DIGI,L1Reco,RECO,SKIM:MuonPOGJPsiSkim+BPHSkim,EI,PAT,ALCA:SiStripCalZeroBias+SiStripCalMinBias+SiStripCalSmallBiasScan+TkAlMinBias+EcalESAlign,DQM:@standardDQMFakeHLT+@miniAODDQM'},steps['RECODR2_2018reHLT']])
steps['RECODR2_2018reHLT_skimParkingBPH']=merge([{'-s':'RAW2DIGI,L1Reco,RECO,SKIM:SkimBPark,EI,PAT,ALCA:SiStripCalZeroBias+SiStripCalMinBias+SiStripCalSmallBiasScan+TkAlMinBias+EcalESAlign,DQM:@standardDQMFakeHLT+@miniAODDQM','--era':'Run2_2018,bParking'},steps['RECODR2_2018reHLT']])
for sname in ['RECODR2_50nsreHLT','RECODR2_50nsreHLT_ZB',
'RECODR2_25nsreHLT','RECODR2_25nsreHLT_ZB',
'RECODR2_2016reHLT','RECODR2_2016reHLT_ZB',
'RECODR2_2016reHLT_L1TEgDQM', 'RECODR2_2016reHLT_L1TMuDQM',
'RECODR2_2016reHLT_skimDoubleEG', 'RECODR2_2016reHLT_skimJetHT', 'RECODR2_2016reHLT_skimMET',
'RECODR2_2016reHLT_skimMuonEG', 'RECODR2_2016reHLT_skimSingleMu',
'RECODR2_2016reHLT_skimSinglePh', 'RECODR2_2016reHLT_skimMuOnia',
'RECODR2reHLTAlCaTkCosmics']:
steps[sname+"_HIPM"] = merge([{'--era': steps[sname]['--era']+"_HIPM"},steps[sname]])
# RECO step with offline GT
steps['RECODR2_2016reHLT_Prompt']=merge([{'--conditions':'auto:run2_data'},steps['RECODR2_2016reHLT']])
steps['RECODR2_2016reHLT_ZBPrompt']=merge([{'--conditions':'auto:run2_data','-s':'RAW2DIGI,L1Reco,RECO,EI,PAT,ALCA:SiStripCalZeroBias+SiStripCalMinBias+TkAlMinBias+EcalESAlign,DQM:@rerecoZeroBiasFakeHLT+@miniAODDQM'},steps['RECODR2_2016reHLT']])
steps['RECODR2_2016reHLT_Prompt_L1TEgDQM']=merge([{'-s':'RAW2DIGI,L1Reco,RECO,EI,PAT,ALCA:SiStripCalZeroBias+SiStripCalMinBias+TkAlMinBias+EcalESAlign,DQM:@standardDQMFakeHLT+@miniAODDQM+@L1TEgamma'},steps['RECODR2_2016reHLT_Prompt']])
steps['RECODR2_2016reHLT_skimDoubleEG_Prompt']=merge([{'--conditions':'auto:run2_data'},steps['RECODR2_2016reHLT_skimDoubleEG']])
steps['RECODR2_2016reHLT_skimJetHT_Prompt']=merge([{'--conditions':'auto:run2_data'},steps['RECODR2_2016reHLT_skimJetHT']])
steps['RECODR2_2016reHLT_skimMET_Prompt']=merge([{'--conditions':'auto:run2_data'},steps['RECODR2_2016reHLT_skimMET']])
steps['RECODR2_2016reHLT_skimMuonEG_Prompt']=merge([{'--conditions':'auto:run2_data'},steps['RECODR2_2016reHLT_skimMuonEG']])
steps['RECODR2_2016reHLT_skimSingleMu_Prompt']=merge([{'--conditions':'auto:run2_data'},steps['RECODR2_2016reHLT_skimSingleMu']])
steps['RECODR2_2016reHLT_skimSinglePh_Prompt']=merge([{'--conditions':'auto:run2_data'},steps['RECODR2_2016reHLT_skimSinglePh']])
steps['RECODR2_2016reHLT_skimMuOnia_Prompt']=merge([{'--conditions':'auto:run2_data'},steps['RECODR2_2016reHLT_skimMuOnia']])
steps['RECODR2_2016reHLT_Prompt_Lumi']=merge([{'-s':'RAW2DIGI,L1Reco,RECO,EI,PAT,ALCA:SiStripCalZeroBias+SiStripCalMinBias+TkAlMinBias+EcalESAlign,DQM:@standardDQMFakeHLT+@miniAODDQM+@lumi'},steps['RECODR2_2016reHLT_Prompt']])
steps['RECODR2_2016reHLT_Prompt_Lumi_L1TMuDQM']=merge([{'-s':'RAW2DIGI,L1Reco,RECO,EI,PAT,ALCA:SiStripCalZeroBias+SiStripCalMinBias+TkAlMinBias+EcalESAlign,DQM:@standardDQMFakeHLT+@miniAODDQM+@lumi+@L1TMuon'},steps['RECODR2_2016reHLT_Prompt']])
steps['RECODR2_2017reHLT_Prompt']=merge([{'--conditions':'auto:run2_data'},steps['RECODR2_2017reHLT']])
steps['RECODR2_2017reHLT_ZBPrompt']=merge([{'--conditions':'auto:run2_data','-s':'RAW2DIGI,L1Reco,RECO,EI,PAT,ALCA:SiStripCalZeroBias+SiStripCalMinBias+TkAlMinBias+EcalESAlign,DQM:@rerecoZeroBiasFakeHLT+@miniAODDQM'},steps['RECODR2_2017reHLT']])
steps['RECODR2_2017reHLT_Prompt_L1TEgDQM']=merge([{'-s':'RAW2DIGI,L1Reco,RECO,EI,PAT,ALCA:SiStripCalZeroBias+SiStripCalMinBias+TkAlMinBias+EcalESAlign,DQM:@standardDQMFakeHLT+@miniAODDQM+@L1TEgamma'},steps['RECODR2_2017reHLT_Prompt']])
steps['RECODR2_2017reHLT_skimDoubleEG_Prompt']=merge([{'--conditions':'auto:run2_data'},steps['RECODR2_2017reHLT_skimDoubleEG']])
steps['RECODR2_2017reHLT_skimJetHT_Prompt']=merge([{'--conditions':'auto:run2_data'},steps['RECODR2_2017reHLT_skimJetHT']])
steps['RECODR2_2017reHLT_skimDisplacedJet_Prompt']=merge([{'--conditions':'auto:run2_data'},steps['RECODR2_2017reHLT_skimDisplacedJet']])
steps['RECODR2_2017reHLT_skimMET_Prompt']=merge([{'--conditions':'auto:run2_data'},steps['RECODR2_2017reHLT_skimMET']])
steps['RECODR2_2017reHLT_skimMuonEG_Prompt']=merge([{'--conditions':'auto:run2_data'},steps['RECODR2_2017reHLT_skimMuonEG']])
steps['RECODR2_2017reHLT_skimSingleMu_Prompt']=merge([{'--conditions':'auto:run2_data'},steps['RECODR2_2017reHLT_skimSingleMu']])
steps['RECODR2_2017reHLT_skimSinglePh_Prompt']=merge([{'--conditions':'auto:run2_data'},steps['RECODR2_2017reHLT_skimSinglePh']])
steps['RECODR2_2017reHLT_skimMuOnia_Prompt']=merge([{'--conditions':'auto:run2_data'},steps['RECODR2_2017reHLT_skimMuOnia']])
steps['RECODR2_2017reHLT_skimCharmonium_Prompt']=merge([{'--conditions':'auto:run2_data'},steps['RECODR2_2017reHLT_skimCharmonium']])
steps['RECODR2_2017reHLT_skimSingleMu_Prompt_Lumi']=merge([{'-s':'RAW2DIGI,L1Reco,RECO,SKIM:MuonPOGSkim+ZMu+MuTau,EI,PAT,ALCA:SiPixelCalSingleMuon+SiStripCalZeroBias+SiStripCalMinBias+TkAlMinBias+EcalESAlign,DQM:@standardDQMFakeHLT+@miniAODDQM+@lumi+@L1TMuon'},steps['RECODR2_2017reHLT_skimSingleMu_Prompt']])
steps['RECODR2_2017reHLTAlCaTkCosmics_Prompt']=merge([{'--conditions':'auto:run2_data'},steps['RECODR2_2017reHLTAlCaTkCosmics']])
steps['RECODR2_2017reHLTSiPixelCalZeroBias_Prompt']=merge([{'--conditions':'auto:run2_data'},steps['RECODR2_2017reHLTSiPixelCalZeroBias']])
steps['RECODR2_2018reHLT_Prompt']=merge([{'--conditions':'auto:run2_data'},steps['RECODR2_2018reHLT']])
steps['RECODR2_2018reHLT_ZBPrompt']=merge([{'--conditions':'auto:run2_data','-s':'RAW2DIGI,L1Reco,RECO,EI,PAT,ALCA:SiStripCalZeroBias+SiStripCalMinBias+TkAlMinBias+EcalESAlign,DQM:@rerecoZeroBias+@ExtraHLT+@miniAODDQM'},steps['RECODR2_2018reHLT']])
steps['RECODR2_2018reHLT_Prompt_pixelTrackingOnly']=merge([{'-s': 'RAW2DIGI:RawToDigi_pixelOnly,RECO:reconstruction_pixelTrackingOnly,DQM:@pixelTrackingOnlyDQM'},steps['RECODR2_2018reHLT_Prompt']])
steps['RECODR2_2018reHLT_Patatrack_PixelOnlyCPU']=merge([step3_pixelNtupleFit, steps['RECODR2_2018reHLT_Prompt_pixelTrackingOnly']])
steps['RECODR2_2018reHLT_Patatrack_PixelOnlyGPU']=merge([step3_gpu, steps['RECODR2_2018reHLT_Prompt_pixelTrackingOnly']])
steps['RECODR2_2018reHLT_ECALOnlyCPU']=merge([{'-s': 'RAW2DIGI:RawToDigi_ecalOnly,RECO:reconstruction_ecalOnly,DQM:@ecalOnly'},steps['RECODR2_2018reHLT_Prompt']])
steps['RECODR2_2018reHLT_ECALOnlyGPU']=merge([step3_gpu, steps['RECODR2_2018reHLT_ECALOnlyCPU']])
steps['RECODR2_2018reHLT_HCALOnlyCPU']=merge([{'-s': 'RAW2DIGI:RawToDigi_hcalOnly,RECO:reconstruction_hcalOnly,DQM:@hcalOnly+@hcal2Only'},steps['RECODR2_2018reHLT_Prompt']])
steps['RECODR2_2018reHLT_HCALOnlyGPU']=merge([step3_gpu, steps['RECODR2_2018reHLT_HCALOnlyCPU']])
steps['RECODR2_2018reHLT_Offline']=merge([{'--conditions':'auto:run2_data'},steps['RECODR2_2018reHLT']])
steps['RECODR2_2018reHLT_ZBOffline']=merge([{'--conditions':'auto:run2_data','-s':'RAW2DIGI,L1Reco,RECO,EI,PAT,ALCA:SiStripCalZeroBias+SiStripCalMinBias+TkAlMinBias+EcalESAlign,DQM:@rerecoZeroBias+@ExtraHLT+@miniAODDQM'},steps['RECODR2_2018reHLT']])
steps['RECODR2_2018reHLT_skimEGamma_Prompt_L1TEgDQM']=merge([{'--conditions':'auto:run2_data'},steps['RECODR2_2018reHLT_skimEGamma_L1TEgDQM']])
steps['RECODR2_2018reHLT_skimEGamma_Prompt_L1TEgDQMml']=merge([concurrentLumis,steps['RECODR2_2018reHLT_skimEGamma_Prompt_L1TEgDQM']])
steps['RECODR2_2018reHLT_skimEGamma_Offline_L1TEgDQM']=merge([{'--conditions':'auto:run2_data'},steps['RECODR2_2018reHLT_skimEGamma_L1TEgDQM']])
steps['RECODR2_2018reHLT_skimJetHT_Prompt']=merge([{'--conditions':'auto:run2_data'},steps['RECODR2_2018reHLT_skimJetHT']])
steps['RECODR2_2018reHLT_skimJetHT_Offline']=merge([{'--conditions':'auto:run2_data'},steps['RECODR2_2018reHLT_skimJetHT']])
steps['RECODR2_2018reHLT_skimDisplacedJet_Prompt']=merge([{'--conditions':'auto:run2_data'},steps['RECODR2_2018reHLT_skimDisplacedJet']])
steps['RECODR2_2018reHLT_skimDisplacedJet_Offline']=merge([{'--conditions':'auto:run2_data'},steps['RECODR2_2018reHLT_skimDisplacedJet']])
steps['RECODR2_2018reHLT_skimMET_Prompt']=merge([{'--conditions':'auto:run2_data'},steps['RECODR2_2018reHLT_skimMET']])
steps['RECODR2_2018reHLT_skimMET_Offline']=merge([{'--conditions':'auto:run2_data'},steps['RECODR2_2018reHLT_skimMET']])
steps['RECODR2_2018reHLT_skimMuonEG_Prompt']=merge([{'--conditions':'auto:run2_data'},steps['RECODR2_2018reHLT_skimMuonEG']])
steps['RECODR2_2018reHLT_skimMuonEG_Offline']=merge([{'--conditions':'auto:run2_data'},steps['RECODR2_2018reHLT_skimMuonEG']])
steps['RECODR2_2018reHLT_skimSingleMu_Prompt']=merge([{'--conditions':'auto:run2_data'},steps['RECODR2_2018reHLT_skimSingleMu']])
steps['RECODR2_2018reHLT_skimSingleMu_Offline']=merge([{'--conditions':'auto:run2_data'},steps['RECODR2_2018reHLT_skimSingleMu']])
steps['RECODR2_2018reHLT_skimMuOnia_Prompt']=merge([{'--conditions':'auto:run2_data'},steps['RECODR2_2018reHLT_skimMuOnia']])
steps['RECODR2_2018reHLT_skimMuOnia_Offline']=merge([{'--conditions':'auto:run2_data'},steps['RECODR2_2018reHLT_skimMuOnia']])
steps['RECODR2_2018reHLT_skimCharmonium_Prompt']=merge([{'--conditions':'auto:run2_data'},steps['RECODR2_2018reHLT_skimCharmonium']])
steps['RECODR2_2018reHLT_skimCharmonium_Offline']=merge([{'--conditions':'auto:run2_data'},steps['RECODR2_2018reHLT_skimCharmonium']])
steps['RECODR2_2018reHLT_skimParkingBPH_Prompt']=merge([{'--conditions':'auto:run2_data'},steps['RECODR2_2018reHLT_skimParkingBPH']])
steps['RECODR2_2018reHLT_skimParkingBPH_Offline']=merge([{'--conditions':'auto:run2_data'},steps['RECODR2_2018reHLT_skimParkingBPH']])
steps['RECODR2_2018reHLT_skimSingleMu_Prompt_Lumi']=merge([{'-s':'RAW2DIGI,L1Reco,RECO,SKIM:MuonPOGSkim+ZMu+MuTau,EI,PAT,ALCA:SiPixelCalSingleMuon+SiStripCalZeroBias+SiStripCalMinBias+TkAlMinBias+EcalESAlign,DQM:@standardDQMFakeHLT+@miniAODDQM+@lumi+@L1TMuon'},steps['RECODR2_2018reHLT_skimSingleMu_Prompt']])
steps['RECODR2_2018reHLT_skimSingleMu_Offline_Lumi']=merge([{'-s':'RAW2DIGI,L1Reco,RECO,SKIM:MuonPOGSkim+ZMu+MuTau,EI,PAT,ALCA:SiPixelCalSingleMuon+SiStripCalZeroBias+SiStripCalMinBias+TkAlMinBias+EcalESAlign,DQM:@standardDQMFakeHLT+@miniAODDQM+@lumi+@L1TMuon'},steps['RECODR2_2018reHLT_skimSingleMu_Offline']])
steps['RECODR2_2018reHLTAlCaTkCosmics_Prompt']=merge([{'--conditions':'auto:run2_data'},steps['RECODR2_2018reHLTAlCaTkCosmics']])
steps['RECODR2_2018reHLTAlCaTkCosmics_Offline']=merge([{'--conditions':'auto:run2_data'},steps['RECODR2_2018reHLTAlCaTkCosmics']])
steps['RECODR2_2018reHLT_Prompt_hBStar']=merge([{'--era':'Run2_2018_highBetaStar'},steps['RECODR2_2018reHLT_Prompt']])
steps['RECODR2_2018reHLT_Offline_hBStar']=merge([{'--era':'Run2_2018_highBetaStar'},steps['RECODR2_2018reHLT_Offline']])
steps['RECODR2_2018reHLT_skimJetHT_Prompt_HEfail']=merge([{'--conditions':'auto:run2_data_HEfail'},steps['RECODR2_2018reHLT_skimJetHT']])
steps['RECODR2_2018reHLT_skimJetHT_Prompt_BadHcalMitig']=merge([{'--conditions':'auto:run2_data_HEfail','--era':'Run2_2018,pf_badHcalMitigation'},steps['RECODR2_2018reHLT_skimJetHT']])
steps['RECO']=merge([step3Defaults])
steps['RECOAlCaCalo']=merge([step3DefaultsAlCaCalo])
steps['RECODBG']=merge([{'--eventcontent':'RECODEBUG,MINIAODSIM,DQM'},steps['RECO']])
steps['RECOPROD1']=merge([{ '-s' : 'RAW2DIGI,L1Reco,RECO,RECOSIM,EI,PAT', '--datatier' : 'AODSIM,MINIAODSIM', '--eventcontent' : 'AODSIM,MINIAODSIM'},step3Defaults])
#steps['RECOPRODUP15']=merge([{ '-s':'RAW2DIGI,L1Reco,RECO,EI,DQM:DQMOfflinePOGMC','--datatier':'AODSIM,DQMIO','--eventcontent':'AODSIM,DQM'},step3Up2015Defaults])
steps['RECOPRODUP15']=merge([{ '-s':'RAW2DIGI,L1Reco,RECO,RECOSIM,EI,PAT','--datatier':'AODSIM,MINIAODSIM','--eventcontent':'AODSIM,MINIAODSIM'},step3Up2015Defaults])
## for 2017 PROD
steps['RECOPRODUP17']=merge([{ '--era' :'Run2_2017','--conditions': 'auto:phase1_2017_realistic'},steps['RECOPRODUP15']])
## for 2018 PROD
steps['RECOPRODUP18']=merge([{ '-s':'RAW2DIGI,L1Reco,RECO,RECOSIM,EI','--era' :'Run2_2018','--conditions': 'auto:phase1_2018_realistic','--datatier':'AODSIM','--eventcontent':'AODSIM'},step3Up2015Defaults])
steps['RECOPRODUP18ml']=merge([concurrentLumis,steps['RECOPRODUP18']])
steps['RECOPRODUP18bParking']=merge([{ '-s':'RAW2DIGI,L1Reco,RECO,RECOSIM,EI','--era' :'Run2_2018,bParking','--conditions': 'auto:phase1_2018_realistic','--datatier':'AODSIM','--eventcontent':'AODSIM'},step3Up2015Defaults])
##
steps['RECOCOS']=merge([{'-s':'RAW2DIGI,L1Reco,RECO,DQM','--scenario':'cosmics','--datatier':'GEN-SIM-RECO,DQMIO','--eventcontent':'RECOSIM,DQM'},stCond,step3Defaults])
steps['RECOHAL']=merge([{'-s':'RAW2DIGI,L1Reco,RECO,DQM','--scenario':'cosmics'},step3Up2015Hal])
steps['RECOCOS_UP15']=merge([{'--conditions':'auto:run2_mc_cosmics','-s':'RAW2DIGI,L1Reco,RECO,ALCA:MuAlGlobalCosmics,DQM','--scenario':'cosmics'},step3Up2015Hal])
steps['RECOCOS_UP16']=merge([{'--conditions':'auto:run2_mc_cosmics','-s':'RAW2DIGI,L1Reco,RECO,ALCA:MuAlGlobalCosmics,DQM','--scenario':'cosmics','--era':'Run2_2016'},step3Up2015Hal])
steps['RECOCOS_UP17']=merge([{'--conditions':'auto:phase1_2017_cosmics','-s':'RAW2DIGI,L1Reco,RECO,ALCA:MuAlGlobalCosmics,DQM','--scenario':'cosmics','--era':'Run2_2017'},step3Up2015Hal])
steps['RECOCOS_UP18']=merge([{'--conditions':'auto:phase1_2018_cosmics','-s':'RAW2DIGI,L1Reco,RECO,ALCA:MuAlGlobalCosmics,DQM','--scenario':'cosmics','--era':'Run2_2018'},step3Up2015Hal])
steps['RECOCOS_UP21']=merge([{'--conditions':'auto:phase1_2021_cosmics','-s':'RAW2DIGI,L1Reco,RECO,ALCA:MuAlGlobalCosmics,DQM','--scenario':'cosmics','--era':'Run3'},step3Up2015Hal])
steps['RECOCOS_UP21_0T']=merge([{'--magField':'0T','--conditions':'auto:phase1_2021_cosmics_0T'},steps['RECOCOS_UP21']])
steps['RECOCOSPEAK_UP17']=merge([{'--conditions':'auto:phase1_2017_cosmics_peak','-s':'RAW2DIGI,L1Reco,RECO,ALCA:MuAlGlobalCosmics,DQM','--scenario':'cosmics','--era':'Run2_2017'},step3Up2015Hal])
steps['RECOCOSPEAK_UP18']=merge([{'--conditions':'auto:phase1_2018_cosmics_peak','-s':'RAW2DIGI,L1Reco,RECO,ALCA:MuAlGlobalCosmics,DQM','--scenario':'cosmics','--era':'Run2_2018'},step3Up2015Hal])
steps['RECOMIN']=merge([{'-s':'RAW2DIGI,L1Reco,RECO,RECOSIM,EI,ALCA:SiStripCalZeroBias+SiStripCalMinBias,VALIDATION,DQM'},stCond,step3Defaults])
steps['RECOMINUP15']=merge([{'-s':'RAW2DIGI,L1Reco,RECO,RECOSIM,EI,ALCA:SiStripCalZeroBias+SiStripCalMinBias,VALIDATION,DQM'},step3Up2015Defaults])
steps['RECOAODUP15']=merge([{'--datatier':'AODSIM,MINIAODSIM,DQMIO','--eventcontent':'AODSIM,MINIAODSIM,DQM'},step3Up2015Defaults])
steps['RECODDQM']=merge([{'-s':'RAW2DIGI,L1Reco,RECO,EI,DQM:@common+@muon+@hcal+@jetmet+@ecal'},steps['RECOD']])
steps['RECOPU1']=merge([PU,steps['RECO']])
steps['RECOPU2']=merge([PU2,steps['RECO']])
steps['RECOUP15_PU25']=merge([PU25,step3Up2015Defaults])
steps['RECOUP15_PU25_HIPM']=merge([step3_HIPM,steps['RECOUP15_PU25']])
steps['RECOUP15_PU25_L1TEgDQM']=merge([{'-s':'RAW2DIGI,L1Reco,RECO,RECOSIM,EI,PAT,VALIDATION:@standardValidationNoHLT+@miniAODValidation,DQM:@standardDQMFakeHLT+@miniAODDQM+@L1TEgamma'},steps['RECOUP15_PU25']])
steps['RECOUP15_PU25_L1TMuDQM']=merge([{'-s':'RAW2DIGI,L1Reco,RECO,RECOSIM,EI,PAT,VALIDATION:@standardValidationNoHLT+@miniAODValidation,DQM:@standardDQMFakeHLT+@miniAODDQM+@L1TMuon'},steps['RECOUP15_PU25']])
steps['RECOUP15_PU50']=merge([PU50,step3Up2015Defaults50ns])
steps['RECOUP15_PU50_L1TEgDQM']=merge([{'-s':'RAW2DIGI,L1Reco,RECO,EI,PAT,VALIDATION:@standardValidationNoHLT+@miniAODValidation,DQM:@standardDQMFakeHLT+@miniAODDQM+@L1TEgamma'},steps['RECOUP15_PU50']])
steps['RECOUP15_PU50_L1TMuDQM']=merge([{'-s':'RAW2DIGI,L1Reco,RECO,EI,PAT,VALIDATION:@standardValidationNoHLT+@miniAODValidation,DQM:@standardDQMFakeHLT+@miniAODDQM+@L1TMuon'},steps['RECOUP15_PU50']])
# for PU25 High stats workflows
steps['RECOUP15_PU25HS']=merge([PU25HS,step3Up2015Defaults])
# mask away - to be removed once we'll migrate the matrix to be fully unscheduled for RECO step
#steps['RECOmAOD']=merge([step3DefaultsUnsch])
#steps['RECOmAODUP15']=merge([step3Up2015DefaultsUnsch])
# for premixing: no --pileup_input for replay; GEN-SIM only available for in-time event, from FEVTDEBUGHLT previous step
steps['RECOPRMXUP15_PU25']=merge([
{'--era':'Run2_2016','--procModifiers':'premix_stage2'}, # temporary replacement for premix; to be brought back to customisePostLS1; DataMixer customize for rerouting inputs to mixed data.
step3Up2015Defaults])
steps['RECOPRMXUP15_HIPM_PU25']=merge([step3_HIPM,steps['RECOPRMXUP15_PU25']])
steps['RECOPRMXUP15_PU50']=merge([
{'--era':'Run2_50ns','--procModifiers':'premix_stage2'},
step3Up2015Defaults50ns])
steps['RECOPRMXUP17_PU25']=merge([
{'--conditions':'auto:phase1_2017_realistic','--era':'Run2_2017','--procModifiers':'premix_stage2'},
step3Up2015Defaults])
steps['RECOPRMXUP18_PU25']=merge([
{'--conditions':'auto:phase1_2018_realistic','--era':'Run2_2018','--procModifiers':'premix_stage2'},
step3Up2015Defaults])
steps['RECOPRMXUP18_PU25_L1TEgDQM']=merge([{'-s':'RAW2DIGI,L1Reco,RECO,RECOSIM,EI,PAT,VALIDATION:@standardValidationNoHLT+@miniAODValidation,DQM:@standardDQMFakeHLT+@miniAODDQM+@L1TEgamma'},steps['RECOPRMXUP18_PU25']])
steps['RECOPRMXUP18_PU25_L1TMuDQM']=merge([{'-s':'RAW2DIGI,L1Reco,RECO,RECOSIM,EI,PAT,VALIDATION:@standardValidationNoHLT+@miniAODValidation,DQM:@standardDQMFakeHLT+@miniAODDQM+@L1TMuon'},steps['RECOPRMXUP18_PU25']])
#Run-Dependent RECO
step_RECO18_RD = {'--customise_commands':"\"process.EcalLaserCorrectionServiceMC = cms.ESProducer('EcalLaserCorrectionServiceMC') \\n process.GlobalTag.toGet = cms.VPSet( cms.PSet( record = cms.string('EcalLaserAPDPNRatiosMCRcd'), tag = cms.string('EcalLaserAPDPNRatios_Run_Dep_MC_first_IOV'), connect = cms.string('frontier://FrontierProd/CMS_CONDITIONS') ), cms.PSet( record = cms.string('EcalLaserAPDPNRatiosRcd'), tag = cms.string('EcalLaserAPDPNRatios_Run_Dep_MC'), connect = cms.string('frontier://FrontierProd/CMS_CONDITIONS') ) ) \\n \""}
steps['RECOPRMXUP18_PU25_RD']=merge([step_RECO18_RD, steps['RECOPRMXUP18_PU25']])
steps['RECOPRMXUP18_PU25_L1TEgDQM_RD']=merge([step_RECO18_RD, steps['RECOPRMXUP18_PU25_L1TEgDQM']])
steps['RECOPRMXUP18_PU25_L1TMuDQM_RD']=merge([step_RECO18_RD, steps['RECOPRMXUP18_PU25_L1TMuDQM']])
recoPremixUp15prod = merge([
#{'-s':'RAW2DIGI,L1Reco,RECO,EI'}, # tmp
{'-s':'RAW2DIGI,L1Reco,RECO,RECOSIM,EI,PAT,DQM:DQMOfflinePOGMC'},
{'--datatier' : 'AODSIM,MINIAODSIM,DQMIO'},
{'--eventcontent' : 'AODSIM,MINIAODSIM,DQMIO'},
{'--era':'Run2_2016'}, # temporary replacement for premix; to be brought back to customisePostLS1
step3Up2015Defaults])
steps['RECOPRMXUP15PROD_PU25']=merge([
recoPremixUp15prod])
steps['RECOPRMXUP15PROD_PU50']=merge([
{'--conditions':'auto:run2_mc_50ns'},
{'--era':'Run2_50ns'},
recoPremixUp15prod])
recoPremixUp17prod = merge([
{'-s':'RAW2DIGI,L1Reco,RECO,RECOSIM,EI,PAT'},
{'--datatier' : 'AODSIM,MINIAODSIM'},
{'--eventcontent' : 'AODSIM,MINIAODSIM'},
{'--era':'Run2_2017'},
{'--conditions':'auto:phase1_2017_realistic'},
step3Up2015Defaults])
steps['RECOPRMXUP17PROD_PU25']=merge([recoPremixUp17prod])
steps['RECOPRMXUP18PROD_PU25']=merge([{'--era':'Run2_2018','--conditions':'auto:phase1_2018_realistic','--procModifiers':'premix_stage2'},recoPremixUp17prod])
steps['RECOPUDBG']=merge([{'--eventcontent':'RECODEBUG,MINIAODSIM,DQM'},steps['RECOPU1']])
steps['RERECOPU1']=merge([{'--hltProcess':'REDIGI'},steps['RECOPU1']])
steps['RECOUP15_ID']=merge([{'--hltProcess':'HLT2'},steps['RECOUP15']])
steps['RECOHI2021PPRECO']=merge([hiDefaults2021_ppReco,{'-s':'RAW2DIGI,L1Reco,RECO,EI,PAT,VALIDATION:@standardValidationNoHLT+@miniAODValidation,DQM:@standardDQMFakeHLT+@miniAODDQM',
'--datatier':'GEN-SIM-RECO,MINIAODSIM,DQMIO',
'--eventcontent':'RECOSIM,MINIAODSIM,DQM',
},step3Up2015Defaults])
steps['RECOHI2021PPRECOMB']=merge([hiDefaults2021_ppReco,{'-s':'RAW2DIGI,L1Reco,RECO,EI,PAT,VALIDATION:@standardValidationNoHLT+@miniAODValidation,DQM:@standardDQMFakeHLT+@miniAODDQM',
'--datatier':'GEN-SIM-RECO,MINIAODSIM,DQMIO',
'--eventcontent':'RECOSIM,MINIAODSIM,DQM',
'--era':'Run3_pp_on_PbPb',
'--procModifiers':'genJetSubEvent',
},step3Up2015Defaults])
steps['ALCARECOHI2021PPRECO']=merge([hiDefaults2021_ppReco,{'-s':'ALCA:TkAlMinBias+SiStripCalMinBias',
'--datatier':'ALCARECO',
'--eventcontent':'ALCARECO'
}])
steps['RECOHI2018PPRECO']=merge([hiDefaults2018_ppReco,{'-s':'RAW2DIGI,L1Reco,RECO,ALCA:SiStripCalZeroBias+SiPixelCalZeroBias,EI,PAT,VALIDATION:@standardValidationNoHLT+@miniAODValidation,DQM:@standardDQMFakeHLT+@miniAODDQM',
'--datatier':'GEN-SIM-RECO,MINIAODSIM,DQMIO,ALCARECO',
'--eventcontent':'RECOSIM,MINIAODSIM,DQM,ALCARECO',
},step3Up2015Defaults])
steps['RECOHI2018PPRECOMB']=merge([hiDefaults2018_ppReco,{'-s':'RAW2DIGI,L1Reco,RECO,ALCA:SiStripCalZeroBias+SiPixelCalZeroBias,EI,PAT,VALIDATION:@standardValidationNoHLT+@miniAODValidation,DQM:@standardDQMFakeHLT+@miniAODDQM',
'--datatier':'GEN-SIM-RECO,MINIAODSIM,DQMIO,ALCARECO',
'--eventcontent':'RECOSIM,MINIAODSIM,DQM,ALCARECO',
'--era':'Run2_2018_pp_on_AA',
'--procModifiers':'genJetSubEvent',
},step3Up2015Defaults])
steps['RECOHI2018PPRECOml']=merge([concurrentLumis,steps['RECOHI2018PPRECOMB']])
steps['REMINIAODHI2018PPRECO']=merge([{'-s':'PAT,VALIDATION:@miniAODValidation,DQM:@miniAODDQM',
'--datatier':'MINIAODSIM,DQMIO',
'--eventcontent':'MINIAODSIM,DQM',
'-n':100,
'--era':'Run2_2018_pp_on_AA',
'--procModifiers':'run2_miniAOD_pp_on_AA_103X',
},hiDefaults2018_ppReco,step3Up2015Defaults])
steps['REMINIAODHI2018PPRECOMB']=merge([{'-s':'PAT,VALIDATION:@miniAODValidation,DQM:@miniAODDQM',
'--datatier':'MINIAODSIM,DQMIO',
'--eventcontent':'MINIAODSIM,DQM',
'-n':100,
'--era':'Run2_2018_pp_on_AA',
'--procModifiers':'genJetSubEvent,run2_miniAOD_pp_on_AA_103X',
},hiDefaults2018_ppReco,step3Up2015Defaults])
steps['ALCARECOHI2018PPRECO']=merge([hiDefaults2018_ppReco,{'-s':'ALCA:TkAlMinBias+SiStripCalMinBias',
'--datatier':'ALCARECO',
'--eventcontent':'ALCARECO'
}])
steps['ALCARECOHI2018PPRECOml']=merge([concurrentLumis,steps['ALCARECOHI2018PPRECO']])
steps['RECOHI2018']=merge([hiDefaults2018,{'-s':'RAW2DIGI,L1Reco,RECO,VALIDATION,DQM'},step3Up2015Defaults])
steps['RECOHI2017']=merge([hiDefaults2017,{'-s':'RAW2DIGI,L1Reco,RECO,EI,PAT,VALIDATION:@standardValidationNoHLT+@miniAODValidation,DQM:@standardDQMFakeHLT+@miniAODDQM'},step3Up2015Defaults])
steps['RECOHI2015']=merge([hiDefaults2015,{'-s':'RAW2DIGI,L1Reco,RECO,VALIDATION,DQM'},step3Up2015Defaults])
steps['RECOHI2011']=merge([hiDefaults2011,{'-s':'RAW2DIGI,L1Reco,RECO,VALIDATION,DQM'},step3Defaults])
steps['RECOPPREF2017']=merge([ppRefDefaults2017,step3Up2015Defaults])
steps['RECOHID11St3']=merge([{
'--process':'ZStoRECO'},
steps['RECOHID11']])
steps['RECOHIR10D11']=merge([{'--filein':'file:step2_inREPACKRAW.root',
'--filtername':'reRECO'},
steps['RECOHID11St3']])
#steps['RECOFS']=merge([{'--fast':'',
# '-s':'RECO,EI,HLT:@fake,VALIDATION'},
# steps['RECO']])
#add this line when testing from an input file that is not strictly GEN-SIM
#addForAll(step3,{'--hltProcess':'DIGI'})
steps['ALCACOSD']={'--conditions':'auto:run1_data',
'--datatier':'ALCARECO',
'--eventcontent':'ALCARECO',
'--scenario':'cosmics',
'-s':'ALCA:TkAlCosmics0T+MuAlGlobalCosmics+HcalCalHOCosmics+DQM'
}
steps['ALCACOSDRUN2']=merge([{'--conditions':'auto:run2_data','--era':'Run2_2016','-s':'ALCA:SiPixelCalCosmics+TkAlCosmics0T+SiStripCalCosmics+MuAlGlobalCosmics+HcalCalHOCosmics+DQM'},steps['ALCACOSD']])
steps['ALCACOSDRUN3']=merge([{'--conditions':'auto:run3_data_promptlike','--era':'Run3','-s':'ALCA:SiPixelCalCosmics+TkAlCosmics0T+SiStripCalCosmics+MuAlGlobalCosmics+HcalCalHOCosmics+DQM'},steps['ALCACOSD']])
steps['ALCACOSDEXPRUN3']=merge([{'--conditions':'auto:run3_data_express','--era':'Run3','-s':'ALCA:SiPixelCalCosmics+TkAlCosmics0T+SiStripCalCosmics+MuAlGlobalCosmics+HcalCalHOCosmics+DQM'},steps['ALCACOSD']])
steps['ALCAPROMPT']={'-s':'ALCA:PromptCalibProd',
'--filein':'file:TkAlMinBias.root',
'--conditions':'auto:run1_data',
'--datatier':'ALCARECO',
'--eventcontent':'ALCARECO'}
steps['ALCAEXP']={'-s':'ALCAOUTPUT:SiStripCalZeroBias+TkAlMinBias+Hotline+LumiPixelsMinBias+AlCaPCCZeroBiasFromRECO+AlCaPCCRandomFromRECO,ALCA:PromptCalibProd+PromptCalibProdSiStrip+PromptCalibProdSiStripGains+PromptCalibProdSiStripGainsAAG+PromptCalibProdSiPixelAli+PromptCalibProdSiPixel',
'--conditions':'auto:run1_data',
'--datatier':'ALCARECO',
'--eventcontent':'ALCARECO',
'--triggerResultsProcess': 'RECO'}
steps['ALCAEXPRUN2']={'-s':'ALCAOUTPUT:SiStripCalZeroBias+TkAlMinBias+LumiPixelsMinBias+AlCaPCCZeroBiasFromRECO+AlCaPCCRandomFromRECO+SiPixelCalZeroBias,ALCA:PromptCalibProd+PromptCalibProdSiStrip+PromptCalibProdSiStripGains+PromptCalibProdSiStripGainsAAG+PromptCalibProdSiPixelAli+PromptCalibProdSiPixel',
'--conditions':'auto:run2_data',
'--datatier':'ALCARECO',
'--eventcontent':'ALCARECO',
'--triggerResultsProcess': 'RECO'}
steps['ALCAEXPHI']=merge([{'-s':'ALCA:PromptCalibProd+PromptCalibProdSiStrip+PromptCalibProdSiStripGains+PromptCalibProdSiStripGainsAAG',
'--scenario':'HeavyIons'},steps['ALCAEXP']])
steps['ALCAEXPTE']={'-s':'ALCA:PromptCalibProdEcalPedestals',
'--conditions':'auto:run2_data_relval',
'--datatier':'ALCARECO',
'--eventcontent':'ALCARECO',
'--triggerResultsProcess': 'RECO'}
# step4
step4Defaults = { '-s' : 'ALCA:TkAlMuonIsolated+TkAlMinBias+EcalCalZElectron+EcalCalWElectron+HcalCalIsoTrk+MuAlCalIsolatedMu+MuAlZMuMu+MuAlOverlaps',
'-n' : 1000,
'--conditions' : 'auto:run1_mc',
'--datatier' : 'ALCARECO',
'--eventcontent': 'ALCARECO',
}
step4Up2015Defaults = {
'-s' : 'ALCA:TkAlMuonIsolated+TkAlMinBias+MuAlOverlaps+EcalESAlign+EcalTrg',
'-n' : 1000,
'--conditions' : 'auto:run2_mc',
'--era' : 'Run2_2016',
'--datatier' : 'ALCARECO',
'--eventcontent': 'ALCARECO',
}
steps['RERECOPU']=steps['RERECOPU1']
steps['ALCATT']=merge([{'--filein':'file:step3.root','-s':'ALCA:TkAlMuonIsolated+TkAlMinBias+MuAlCalIsolatedMu+MuAlZMuMu+MuAlOverlaps'},step4Defaults])
steps['ALCATTUP15']=merge([{'--filein':'file:step3.root'},step4Up2015Defaults])
steps['ALCAMIN']=merge([{'-s':'ALCA:TkAlMinBias','--filein':'file:step3.root'},stCond,step4Defaults])
steps['ALCAMINUP15']=merge([{'-s':'ALCA:TkAlMinBias','--filein':'file:step3.root'},step4Up2015Defaults])
steps['ALCACOS']=merge([{'-s':'ALCA:TkAlCosmics0T+MuAlGlobalCosmics+HcalCalHOCosmics'},stCond,step4Defaults])
steps['ALCABH']=merge([{'-s':'ALCA:TkAlBeamHalo+MuAlBeamHaloOverlaps+MuAlBeamHalo'},stCond,step4Defaults])
steps['ALCABH_UP18']=merge([{'--conditions':'auto:phase1_2018_cosmics','-s':'ALCA:TkAlBeamHalo+MuAlBeamHaloOverlaps+MuAlBeamHalo','--era':'Run2_2018'},step4Up2015Defaults])
steps['ALCABH_UP21']=merge([{'--conditions':'auto:phase1_2021_cosmics','-s':'ALCA:TkAlBeamHalo+MuAlBeamHaloOverlaps+MuAlBeamHalo','--era':'Run3'},step4Up2015Defaults])
steps['ALCAHAL']=merge([{'-s':'ALCA:TkAlBeamHalo+MuAlBeamHaloOverlaps+MuAlBeamHalo'},step4Up2015Defaults])
steps['ALCACOS_UP15']=merge([{'--conditions':'auto:run2_mc_cosmics','-s':'ALCA:TkAlBeamHalo+MuAlBeamHaloOverlaps+MuAlBeamHalo'},step4Up2015Defaults])
steps['ALCACOS_UP16']=merge([{'--conditions':'auto:run2_mc_cosmics','-s':'ALCA:TkAlCosmics0T+SiStripCalCosmics+SiPixelCalCosmics+TkAlBeamHalo+MuAlBeamHaloOverlaps+MuAlBeamHalo','--era':'Run2_2016'},step4Up2015Defaults])
steps['ALCACOS_UP17']=merge([{'--conditions':'auto:phase1_2017_cosmics','-s':'ALCA:TkAlCosmics0T+SiStripCalCosmics+SiPixelCalCosmics+TkAlBeamHalo+MuAlBeamHaloOverlaps+MuAlBeamHalo','--era':'Run2_2017'},step4Up2015Defaults])
steps['ALCACOS_UP18']=merge([{'--conditions':'auto:phase1_2018_cosmics','-s':'ALCA:TkAlCosmics0T+SiStripCalCosmics+SiPixelCalCosmics+TkAlBeamHalo+MuAlBeamHaloOverlaps+MuAlBeamHalo','--era':'Run2_2018'},step4Up2015Defaults])
steps['ALCACOS_UP21']=merge([{'--conditions':'auto:phase1_2021_cosmics','-s':'ALCA:TkAlCosmics0T+SiStripCalCosmics+SiPixelCalCosmics+TkAlBeamHalo+MuAlBeamHaloOverlaps+MuAlBeamHalo','--era':'Run3'},step4Up2015Defaults])
steps['ALCACOS_UP21_0T']=merge([{'--magField':'0T','--conditions':'auto:phase1_2021_cosmics_0T'},steps['ALCACOS_UP21']])
steps['ALCAHARVD']={'-s':'ALCAHARVEST:BeamSpotByRun+BeamSpotByLumi+SiStripQuality',
'--conditions':'auto:run1_data',
'--scenario':'pp',
'--data':'',
'--filein':'file:PromptCalibProd.root'}
steps['ALCAHARVD1']={'-s':'ALCAHARVEST:%s'%(autoPCL['PromptCalibProd']),
'--conditions':'auto:run1_data',
'--scenario':'pp',
'--data':'',
'--filein':'file:PromptCalibProd.root'}
steps['ALCAHARVD1HI']=merge([{'--scenario':'HeavyIons'},steps['ALCAHARVD1']])
steps['ALCAHARVD2']={'-s':'ALCAHARVEST:%s'%(autoPCL['PromptCalibProdSiStrip']),
'--conditions':'auto:run1_data',
'--scenario':'pp',
'--data':'',
'--filein':'file:PromptCalibProdSiStrip.root'}
steps['ALCAHARVD2HI']=merge([{'--scenario':'HeavyIons'},steps['ALCAHARVD2']])
steps['ALCAHARVD3']={'-s':'ALCAHARVEST:%s'%(autoPCL['PromptCalibProdSiStripGains']),
'--conditions':'auto:run1_data',
'--scenario':'pp',
'--data':'',
'--filein':'file:PromptCalibProdSiStripGains.root'}
steps['ALCAHARVD3HI']=merge([{'--scenario':'HeavyIons'},steps['ALCAHARVD3']])
steps['ALCAHARVD4']={'-s':'ALCAHARVEST:%s'%(autoPCL['PromptCalibProdSiPixelAli']),
'--conditions':'auto:run1_data',
'--scenario':'pp',
'--data':'',
'--filein':'file:PromptCalibProdSiPixelAli.root'}
steps['ALCAHARVD4HI']=merge([{'--scenario':'HeavyIons'},steps['ALCAHARVD4']])
steps['ALCAHARVD5']={'-s':'ALCAHARVEST:%s'%(autoPCL['PromptCalibProdSiStripGainsAAG']),
'--conditions':'auto:run1_data',
'--scenario':'pp',
'--data':'',
'--filein':'file:PromptCalibProdSiStripGainsAAG.root'}
steps['ALCAHARVD6']={'-s':'ALCAHARVEST:%s'%(autoPCL['PromptCalibProdSiPixel']),
'--conditions':'auto:run1_data',
'--scenario':'pp',
'--data':'',
'--filein':'file:PromptCalibProdSiPixel.root'}
steps['ALCAHARVD5HI']=merge([{'--scenario':'HeavyIons'},steps['ALCAHARVD5']])
steps['ALCAHARVDTE']={'-s':'ALCAHARVEST:%s'%(autoPCL['PromptCalibProdEcalPedestals']),
'--conditions':'auto:run2_data',
'--scenario':'pp',
'--data':'',
'--filein':'file:PromptCalibProdEcalPedestals.root'}
steps['RECOHISt4']=steps['RECOHI2015']
steps['RECOHI2021MIX']=merge([steps['RECOHI2021PPRECO'],{'--pileup':'HiMix','--pileup_input':'das:/RelValHydjetQ_B12_5020GeV_2021_ppReco/%s/GEN-SIM'%(baseDataSetRelease[23])}])
steps['RECOHIMIX']=merge([steps['RECOHI2018PPRECO'],{'--pileup':'HiMix','--pileup_input':'das:/RelValHydjetQ_B12_5020GeV_2018/%s/GEN-SIM'%(baseDataSetRelease[9])}])
steps['ALCANZS']=merge([{'-s':'ALCA:HcalCalMinBias','--mc':''},step4Defaults])
steps['HARVESTGEN']={'-s':'HARVESTING:genHarvesting',
'--conditions':'auto:run2_mc',
'--mc':'',
'--era' :'Run2_2016',
'--filetype':'DQM',
'--filein':'file:step1_inDQM.root'
}
steps['HARVESTGEN2']=merge([{'--filein':'file:step2_inDQM.root'},steps['HARVESTGEN']])
#data
steps['HARVESTD']={'-s':'HARVESTING:@standardDQMFakeHLT+@miniAODDQM',
'--conditions':'auto:run1_data',
'--data':'',
'--filetype':'DQM',
'--scenario':'pp'}
steps['HARVESTDR1']={'-s':'HARVESTING:@standardDQMFakeHLT+@miniAODDQM',
'--conditions':'auto:run1_data',
'--data':'',
'--filetype':'DQM',
'--scenario':'pp'}
steps['HARVESTDreHLT'] = merge([ {'--conditions':'auto:run1_data_%s'%menu}, steps['HARVESTD'] ])
steps['HARVESTDR1reHLT'] = merge([ {'--conditions':'auto:run1_data_%s'%menu}, steps['HARVESTDR1'] ])
steps['HARVESTDR2'] = merge([ {'--conditions':'auto:run2_data_relval'}, steps['HARVESTD'] ])
steps['HARVESTDR2ZB'] = merge([ {'--conditions':'auto:run2_data_relval','-s':'HARVESTING:@rerecoZeroBiasFakeHLT+@miniAODDQM'}, steps['HARVESTD'] ])
steps['HARVEST2016'] = merge([ {'--era':'Run2_2016'}, steps['HARVESTDR2'] ])
steps['HARVEST2016_L1TEgDQM'] = merge([ {'-s':'HARVESTING:@standardDQMFakeHLT+@miniAODDQM+@L1TEgamma'}, steps['HARVEST2016'] ])
steps['HARVEST2016_L1TMuDQM'] = merge([ {'-s':'HARVESTING:@standardDQMFakeHLT+@miniAODDQM+@L1TMuon'}, steps['HARVEST2016'] ])
steps['HARVEST2017'] = merge([ {'--conditions':'auto:run2_data_relval','--era':'Run2_2017','--conditions':'auto:run2_data'}, steps['HARVESTD'] ])
steps['HARVEST2017ZB'] = merge([ {'--conditions':'auto:run2_data_relval','--era':'Run2_2017','--conditions':'auto:run2_data','-s':' HARVESTING:@rerecoZeroBiasFakeHLT+@miniAODDQM'}, steps['HARVESTD'] ])
steps['HARVEST2017_L1TEgDQM'] = merge([ {'-s':'HARVESTING:@standardDQMFakeHLT+@miniAODDQM+@L1TEgamma'}, steps['HARVEST2017'] ])
steps['HARVEST2017_L1TMuDQM'] = merge([ {'-s':'HARVESTING:@standardDQMFakeHLT+@miniAODDQM+@L1TMuon'}, steps['HARVEST2017'] ])
steps['HARVEST2018'] = merge([ {'--conditions':'auto:run2_data_relval','--era':'Run2_2018','--conditions':'auto:run2_data',}, steps['HARVESTD'] ])
steps['HARVEST2018ZB'] = merge([ {'--conditions':'auto:run2_data_relval','--era':'Run2_2018','--conditions':'auto:run2_data','-s':'HARVESTING:@rerecoZeroBiasFakeHLT+@miniAODDQM'}, steps['HARVESTD'] ])
steps['HARVEST2018_Prompt'] = merge([ {'--conditions':'auto:run2_data_relval','--era':'Run2_2018','--conditions':'auto:run2_data',}, steps['HARVESTD'] ])
steps['HARVEST2018_PromptZB'] = merge([ {'--conditions':'auto:run2_data_relval','--era':'Run2_2018','--conditions':'auto:run2_data','-s':'HARVESTING:@rerecoZeroBiasFakeHLT+@miniAODDQM'}, steps['HARVESTD'] ])
steps['HARVEST2018_L1TEgDQM'] = merge([ {'-s':'HARVESTING:@standardDQMFakeHLT+@miniAODDQM+@L1TEgamma'}, steps['HARVEST2018'] ])
steps['HARVEST2018_L1TEgDQM_Prompt'] = merge([ {'-s':'HARVESTING:@standardDQMFakeHLT+@miniAODDQM+@L1TEgamma'}, steps['HARVEST2018_Prompt'] ])
steps['HARVEST2018_L1TMuDQM'] = merge([ {'-s':'HARVESTING:@standardDQMFakeHLT+@miniAODDQM+@L1TMuon'}, steps['HARVEST2018'] ])
steps['HARVEST2018_L1TMuDQM_Prompt'] = merge([ {'-s':'HARVESTING:@standardDQMFakeHLT+@miniAODDQM+@L1TMuon'}, steps['HARVEST2018_Prompt'] ])
steps['HARVEST2018_pixelTrackingOnly'] = merge([ {'-s':'HARVESTING:@pixelTrackingOnlyDQM'}, steps['HARVEST2018'] ])
steps['HARVEST2018_ECALOnly'] = merge([ {'-s':'HARVESTING:@ecal'}, steps['HARVEST2018'] ])
steps['HARVEST2018_HCALOnly'] = merge([ {'-s':'HARVESTING:@hcalOnly'}, steps['HARVEST2018'] ])
steps['HARVEST2018_hBStar'] = merge([ {'--era' : 'Run2_2018_highBetaStar'}, steps['HARVEST2018'] ])
steps['HARVEST2018_HEfail'] = merge([ {'--conditions':'auto:run2_data_HEfail'}, steps['HARVEST2018'] ])
steps['HARVEST2018_BadHcalMitig'] = merge([ {'--era' : 'Run2_2018,pf_badHcalMitigation','--conditions':'auto:run2_data_HEfail'}, steps['HARVEST2018'] ])
steps['HARVEST2018_L1TEgDQM_MULTIRUN'] = merge([ {
'--customise':"Configuration/StandardSequences/DQMSaverAtJobEnd_cff",
# hardcode the input files since we need multiple, from each of the RECO steps.
'--filein':"file:step6_inDQM.root,file:step3_inDQM.root",
}, steps['HARVEST2018_L1TEgDQM'] ])
steps['DQMHLTonAOD_2017']={
'-s':'DQM:offlineHLTSourceOnAOD', ### DQM-only workflow on AOD input: for HLT
'--conditions':'auto:run2_data',
'--data':'',
'--eventcontent':'DQM',
'--datatier':'DQMIO',
'--era':'Run2_2017',
'--fileout':'DQMHLTonAOD.root',
'--procModifiers':'tau_readOldDiscriminatorFormat',
}
steps['DQMHLTonAODextra_2017'] = merge([ {'-s':'DQM:offlineHLTSourceOnAODextra'}, steps['DQMHLTonAOD_2017'] ])
steps['DQMHLTonRAWAOD_2017']={
'--process':'reHLT',
'-s':'HLT:@relval2017,DQM:offlineHLTSourceOnAOD',
'--conditions':'auto:run2_data',
'--data':'',
'--eventcontent':'DQM',
'--datatier':'DQMIO',
'--era':'Run2_2017',
'--secondfilein':'filelist:step1_dasparentquery.log',
'--customise_commands':'"process.HLTAnalyzerEndpath.remove(process.hltL1TGlobalSummary)"',
'--fileout':'DQMHLTonAOD.root',
'--procModifiers':'tau_readOldDiscriminatorFormat',
}
steps['HARVESTDQMHLTonAOD_2017'] = merge([ {'--filein':'file:DQMHLTonAOD.root','-s':'HARVESTING:hltOfflineDQMClient'}, steps['HARVEST2017'] ]) ### Harvesting step for the DQM-only workflow
steps['HARVESTDDQM']=merge([{'-s':'HARVESTING:@common+@muon+@hcal+@jetmet+@ecal'},steps['HARVESTD']])
steps['HARVESTDfst2']=merge([{'--filein':'file:step2_inDQM.root'},steps['HARVESTDR1']])
steps['HARVESTDC']={'-s':'HARVESTING:dqmHarvestingFakeHLT',
'--conditions':'auto:run1_data',
'--filetype':'DQM',
'--data':'',
'--filein':'file:step2_inDQM.root',
'--scenario':'cosmics'}
steps['HARVESTDCRUN2']=merge([{'--conditions':'auto:run2_data','--era':'Run2_2016'},steps['HARVESTDC']])
steps['HARVESTDCRUN3']=merge([{'--conditions':'auto:run3_data_promptlike','--era':'Run3'},steps['HARVESTDC']])
steps['HARVESTDCEXPRUN3']=merge([{'--conditions':'auto:run3_data_express','--era':'Run3'},steps['HARVESTDC']])
steps['HARVESTDR2_REMINIAOD_data2016']=merge([{'--data':'', '-s':'HARVESTING:@miniAODDQM','--era':'Run2_2016,run2_miniAOD_80XLegacy'},steps['HARVESTDR2']])
steps['HARVESTDR2_REMINIAOD_data2016_HIPM']=merge([{'--era':'Run2_2016_HIPM,run2_miniAOD_80XLegacy'},steps['HARVESTDR2_REMINIAOD_data2016']])
steps['HARVESTDR2_REMINIAOD_data2016UL']=merge([{'--era':'Run2_2016', '--procModifiers':'run2_miniAOD_UL_preSummer20'},steps['HARVESTDR2_REMINIAOD_data2016']])
steps['HARVESTDR2_REMINIAOD_data2016UL_HIPM']=merge([{'--era':'Run2_2016_HIPM'},steps['HARVESTDR2_REMINIAOD_data2016UL']])
steps['HARVEST2017_REMINIAOD_data2017']=merge([{'--era':'Run2_2017,run2_miniAOD_94XFall17'},steps['HARVESTDR2_REMINIAOD_data2016']])
steps['HARVEST2017_REMINIAOD_data2017UL']=merge([{'--era':'Run2_2017'},steps['HARVESTDR2_REMINIAOD_data2016UL']])
steps['HARVEST2018_REMINIAOD_data2018UL']=merge([{'--era':'Run2_2018'},steps['HARVESTDR2_REMINIAOD_data2016UL']])
steps['HARVESTDHI']={'-s':'HARVESTING:dqmHarvestingFakeHLT',
'--conditions':'auto:run1_data',
'--filetype':'DQM',
'--data':'',
'--scenario':'HeavyIons'}
steps['HARVESTDHI15']=merge([{ '--conditions':'auto:run2_data',
'--era':'Run2_HI',
'-n':-1},steps['HARVESTDHI']])
steps['HARVESTDHI18']=merge([{ '--era':'Run2_2018_pp_on_AA',
'--scenario':'pp' },steps['HARVESTDHI15']])
steps['HARVESTHI18MINIAOD']=merge([{'-s':'HARVESTING:@miniAODDQM',
'--filein':'file:step2_inDQM.root',
'--data':'',
'--era' : 'Run2_2018_pp_on_AA',
'--filetype':'DQM',
'-n':100},hiDefaults2018_ppReco])
#MC
steps['HARVEST']={'-s':'HARVESTING:validationHarvestingNoHLT+dqmHarvestingFakeHLT',
'--conditions':'auto:run1_mc',
'--mc':'',
'--filetype':'DQM',
'--scenario':'pp'}
steps['HARVESTCOS']={'-s':'HARVESTING:dqmHarvestingFakeHLT',
'--conditions':'auto:run1_mc',
'--mc':'',
'--filein':'file:step3_inDQM.root',
'--filetype':'DQM',
'--scenario':'cosmics'}
steps['HARVESTHAL']={'-s' :'HARVESTING:dqmHarvestingFakeHLT',
'--conditions':'auto:run2_mc',
'--mc' :'',
'--filein' :'file:step3_inDQM.root',
'--scenario' :'cosmics',
'--filein':'file:step3_inDQM.root', # unnnecessary
'--filetype':'DQM',
'--era' : 'Run2_2016',
}
steps['HARVESTCOS_UP15']={'-s' :'HARVESTING:dqmHarvestingFakeHLT',
'--conditions':'auto:run2_mc_cosmics',
'--mc' :'',
'--filein' :'file:step3_inDQM.root',
'--scenario' :'cosmics',
'--filein':'file:step3_inDQM.root', # unnnecessary
'--filetype':'DQM',
'--era' : 'Run2_2016',
}
steps['HARVESTCOS_UP16']={'-s' :'HARVESTING:dqmHarvestingFakeHLT',
'--conditions':'auto:run2_mc_cosmics',
'--mc' :'',
'--filein' :'file:step3_inDQM.root',
'--scenario' :'cosmics',
'--filein':'file:step3_inDQM.root', # unnnecessary
'--filetype':'DQM',
'--era' : 'Run2_2016'
}
steps['HARVESTCOS_UP17']={'-s' :'HARVESTING:dqmHarvestingFakeHLT',
'--conditions':'auto:phase1_2017_cosmics',
'--mc' :'',
'--filein' :'file:step3_inDQM.root',
'--scenario' :'cosmics',
'--filein':'file:step3_inDQM.root', # unnnecessary
'--filetype':'DQM',
'--era' : 'Run2_2017'
}
steps['HARVESTCOS_UP18']={'-s' :'HARVESTING:dqmHarvestingFakeHLT',
'--conditions':'auto:phase1_2018_cosmics',
'--mc' :'',
'--filein' :'file:step3_inDQM.root',
'--scenario' :'cosmics',
'--filein':'file:step3_inDQM.root', # unnnecessary
'--filetype':'DQM',
'--era' : 'Run2_2018'
}
steps['HARVESTCOS_UP21']={'-s' :'HARVESTING:dqmHarvesting',
'--conditions':'auto:phase1_2021_cosmics',
'--mc' :'',
'--filein' :'file:step3_inDQM.root',
'--scenario' :'cosmics',
'--filein':'file:step3_inDQM.root', # unnnecessary
'--filetype':'DQM',
'--era' : 'Run3'
}
steps['HARVESTCOS_UP21_0T']=merge([{'--magField':'0T','--conditions':'auto:phase1_2021_cosmics_0T'},steps['HARVESTCOS_UP21']])
steps['HARVESTFS']={'-s':'HARVESTING:validationHarvestingFS',
'--conditions':'auto:run1_mc',
'--mc':'',
'--fast':'',
'--filetype':'DQM',
'--scenario':'pp'}
steps['HARVESTHI2021PPRECO']=merge([hiDefaults2021_ppReco,{'-s':'HARVESTING:validationHarvestingNoHLT+dqmHarvestingFakeHLT',
'--filein':'file:step3_inDQM.root',
'--mc':'',
'--era' : 'Run3_pp_on_PbPb',
'--filetype':'DQM'}])
steps['HARVESTHI2018PPRECO']=merge([hiDefaults2018_ppReco,{'-s':'HARVESTING:validationHarvestingNoHLT+dqmHarvestingFakeHLT',
'--filein':'file:step3_inDQM.root',
'--mc':'',
'--era' : 'Run2_2018_pp_on_AA',
'--filetype':'DQM'}])
steps['HARVESTHI2018PPRECOMINIAOD']=merge([{'-s':'HARVESTING:@miniAODValidation+@miniAODDQM',
'--filein':'file:step2_inDQM.root',
'--mc':'',
'--era' : 'Run2_2018_pp_on_AA',
'--filetype':'DQM',
'-n':100},hiDefaults2018_ppReco])
steps['HARVESTHI2018']=merge([hiDefaults2018,{'-s':'HARVESTING:validationHarvesting+dqmHarvestingFakeHLT',
'--mc':'',
'--era' : 'Run2_2017',
'--filetype':'DQM',
'--scenario':'HeavyIons'}])
steps['HARVESTHI2017']=merge([hiDefaults2017,{'-s':'HARVESTING:validationHarvestingNoHLT+dqmHarvestingFakeHLT',
'--mc':'',
'--era' : 'Run2_2017_pp_on_XeXe',
'--filetype':'DQM'}])
steps['HARVESTHI2015']=merge([hiDefaults2015,{'-s':'HARVESTING:validationHarvesting+dqmHarvestingFakeHLT',
'--mc':'',
'--era' : 'Run2_2016,Run2_HI',
'--filetype':'DQM',
'--scenario':'HeavyIons'}])
steps['HARVESTHI2011']=merge([hiDefaults2011,{'-s':'HARVESTING:validationHarvesting+dqmHarvestingFakeHLT',
'--mc':'',
'--filetype':'DQM'}])
steps['HARVESTPPREF2017']=merge([ppRefDefaults2017,{'-s':'HARVESTING:validationHarvestingNoHLT+dqmHarvestingFakeHLT',
'--mc':'',
'--era' : 'Run2_2017_ppRef',
'--filetype':'DQM'}])
steps['HARVESTUP15']={
# '-s':'HARVESTING:validationHarvesting+dqmHarvesting', # todo: remove UP from label
'-s':'HARVESTING:@standardValidationNoHLT+@standardDQMFakeHLT+@miniAODValidation+@miniAODDQM', # todo: remove UP from label
'--conditions':'auto:run2_mc',
'--mc':'',
'--era' : 'Run2_2016',
'--filetype':'DQM',
}
steps['HARVESTUP15_L1TEgDQM']=merge([{'-s':'HARVESTING:@standardValidationNoHLT+@standardDQMFakeHLT+@miniAODValidation+@miniAODDQM+@L1TEgamma'},steps['HARVESTUP15']])
steps['HARVESTUP15_L1TMuDQM']=merge([{'-s':'HARVESTING:@standardValidationNoHLT+@standardDQMFakeHLT+@miniAODValidation+@miniAODDQM+@L1TMuon'},steps['HARVESTUP15']])
steps['HARVESTMINUP15']=merge([{'-s':'HARVESTING:validationHarvestingNoHLT+dqmHarvestingFakeHLT'},steps['HARVESTUP15']])
steps['HARVESTUP15_PU25']=steps['HARVESTUP15']
steps['HARVESTUP15_PU25_L1TEgDQM']=steps['HARVESTUP15_L1TEgDQM']
steps['HARVESTUP15_PU25_L1TMuDQM']=steps['HARVESTUP15_L1TMuDQM']
steps['HARVESTUP15_PU50']=merge([{'-s':'HARVESTING:@standardValidationNoHLT+@standardDQMFakeHLT+@miniAODValidation+@miniAODDQM','--era' : 'Run2_50ns'},steps['HARVESTUP15']])
steps['HARVESTUP15_PU50_L1TEgDQM']=merge([{'-s':'HARVESTING:@standardValidationNoHLT+@standardDQMFakeHLT+@miniAODValidation+@miniAODDQM+@L1TEgamma'},steps['HARVESTUP15_PU50']])
steps['HARVESTUP15_PU50_L1TMuDQM']=merge([{'-s':'HARVESTING:@standardValidationNoHLT+@standardDQMFakeHLT+@miniAODValidation+@miniAODDQM+@L1TMuon'},steps['HARVESTUP15_PU50']])
steps['HARVESTUP15_trackingOnly']=merge([{'-s': 'HARVESTING:@trackingOnlyValidation+@trackingOnlyDQM'}, steps['HARVESTUP15']])
steps['HARVESTUP17']=merge([{'--conditions':'auto:phase1_2017_realistic','--era' : 'Run2_2017','--geometry' : 'DB:Extended'},steps['HARVESTUP15']])
steps['HARVESTUP18']=merge([{'--conditions':'auto:phase1_2018_realistic','--era' : 'Run2_2018','--geometry' : 'DB:Extended'},steps['HARVESTUP15']])
steps['HARVESTUP18_L1TEgDQM']=merge([{'-s':'HARVESTING:@standardValidationNoHLT+@standardDQMFakeHLT+@miniAODValidation+@miniAODDQM+@L1TEgamma'},steps['HARVESTUP18']])
steps['HARVESTUP18_L1TMuDQM']=merge([{'-s':'HARVESTING:@standardValidationNoHLT+@standardDQMFakeHLT+@miniAODValidation+@miniAODDQM+@L1TMuon'},steps['HARVESTUP18']])
steps['HARVESTUP17_PU25']=steps['HARVESTUP17']
steps['HARVESTUP18_PU25']=steps['HARVESTUP18']
steps['HARVESTUP18_PU25_L1TEgDQM']=steps['HARVESTUP18_L1TEgDQM']
steps['HARVESTUP18_PU25_L1TMuDQM']=steps['HARVESTUP18_L1TMuDQM']
#Run-Dependent harvesting; using MultiRun harvesting + force RunNumber=1 for GUI MC
step_harvest_MRH_RD = {'--harvesting':'AtJobEnd','--customise_commands':'"process.dqmSaver.forceRunNumber = 1"'}
steps['HARVESTUP18_PU25_RD']=merge([step_harvest_MRH_RD,steps['HARVESTUP18']])
steps['HARVESTUP18_PU25_L1TEgDQM_RD']=merge([step_harvest_MRH_RD,steps['HARVESTUP18_L1TEgDQM']])
steps['HARVESTUP18_PU25_L1TMuDQM_RD']=merge([step_harvest_MRH_RD,steps['HARVESTUP18_L1TMuDQM']])
steps['HARVESTDR2_REMINIAOD_mc2016']=merge([{'-s':'HARVESTING:@miniAODValidation+@miniAODDQM','--era':'Run2_2016,run2_miniAOD_80XLegacy'},steps['HARVESTUP15']])
steps['HARVESTUP17_REMINIAOD_mc2017']=merge([{'-s':'HARVESTING:@miniAODValidation+@miniAODDQM','--era':'Run2_2017,run2_miniAOD_94XFall17'},steps['HARVESTUP17']])
# UL setup uses proc modifiers
steps['HARVESTDR2_REMINIAOD_mc2016UL_preVFP']=merge([{'--conditions':'auto:run2_mc_pre_vfp','--era':'Run2_2016_HIPM','--procModifiers':'run2_miniAOD_UL_preSummer20'},
steps['HARVESTDR2_REMINIAOD_mc2016']])
steps['HARVESTDR2_REMINIAOD_mc2016UL_postVFP']=merge([{'--conditions':'auto:run2_mc','--era':'Run2_2016'},steps['HARVESTDR2_REMINIAOD_mc2016UL_preVFP']])
steps['HARVESTUP17_REMINIAOD_mc2017UL']=merge([{'--era':'Run2_2017','--procModifiers':'run2_miniAOD_UL_preSummer20'},steps['HARVESTUP17_REMINIAOD_mc2017']])
steps['HARVESTUP18_REMINIAOD_mc2018UL']=merge([{'--conditions':'auto:phase1_2018_realistic','--era':'Run2_2018'},steps['HARVESTUP17_REMINIAOD_mc2017UL']])
# for Run1 PPb data workflow
steps['HARVEST_PPbData']=merge([{'--conditions':'auto:run1_data','-s':'HARVESTING:dqmHarvestingFakeHLT','--scenario':'pp','--era':'Run1_pA' }, steps['HARVESTDHI']])
# for Run2 PPb MC workflow
steps['HARVESTUP15_PPb']=merge([{'--conditions':'auto:run2_mc_pa','--era':'Run2_2016_pA'}, steps['HARVESTMINUP15']])
# unSchHarvestOverrides={'-s':'HARVESTING:@standardValidation+@standardDQM+@ExtraHLT+@miniAODValidation+@miniAODDQM'}
# steps['HARVESTmAODUP15']=merge([unSchHarvestOverrides,steps['HARVESTUP15']])
steps['HARVESTUP15FS']={'-s':'HARVESTING:validationHarvesting',
'--conditions':'auto:run2_mc',
'--fast':'',
'--mc':'',
'--era':'Run2_2016',
'--filetype':'DQM',
'--scenario':'pp'}
steps['HARVESTUP15FS_trackingOnly']=merge([{'-s': 'HARVESTING:@trackingOnlyValidation+@trackingOnlyDQM'}, steps['HARVESTUP15FS']])
steps['HARVESTUP17FS']=merge([{'--conditions':'auto:phase1_2017_realistic','--era' : 'Run2_2017_FastSim'},steps['HARVESTUP15FS']])
steps['HARVESTUP17FS_trackingOnly']=merge([{'-s': 'HARVESTING:@trackingOnlyValidation+@trackingOnlyDQM'}, steps['HARVESTUP17FS']])
steps['HARVESTUP18FS']=merge([{'--conditions':'auto:phase1_2018_realistic','--era' : 'Run2_2018_FastSim'},steps['HARVESTUP15FS']])
steps['HARVESTUP18FS_trackingOnly']=merge([{'-s': 'HARVESTING:@trackingOnlyValidation+@trackingOnlyDQM'}, steps['HARVESTUP18FS']])
steps['ALCASPLIT']={'-s':'ALCAOUTPUT:@allForPrompt',
'--conditions':'auto:run1_data',
'--scenario':'pp',
'--data':'',
'--triggerResultsProcess':'RECO',
'--filein':'file:step2_inALCARECO.root'}
steps['SKIMD']={'-s':'SKIM:all',
'--conditions':'auto:run1_data',
'--data':'',
'--scenario':'pp',
'--filein':'file:step2.root',
'--secondfilein':'filelist:step1_dasquery.log'}
steps['SKIMDreHLT'] = merge([ {'--conditions':'auto:run1_data_%s'%menu,'--filein':'file:step3.root'}, steps['SKIMD'] ])
steps['SKIMCOSD']={'-s':'SKIM:all',
'--conditions':'auto:run1_data',
'--data':'',
'--scenario':'cosmics',
'--filein':'file:step2.root',
'--secondfilein':'filelist:step1_dasquery.log'}
steps['RECOFROMRECO']=merge([{'-s':'RECO,EI',
'--filtername':'RECOfromRECO',
'--process':'reRECO',
'--datatier':'AODSIM',
'--eventcontent':'AODSIM',
'--procModifiers':'recoFromReco',
},
stCond,step3Defaults])
steps['RECOFROMRECOSt2']=steps['RECOFROMRECO']
steps['COPYPASTE']={'-s':'NONE',
'--conditions':'auto:run1_mc',
'--output':'\'[{"t":"RAW","e":"ALL"}]\'',
'--customise_commands':'"process.ALLRAWoutput.fastCloning=cms.untracked.bool(False)"'}
#miniaod
stepMiniAODDefaults = { '-s' : 'PAT',
'--era' : 'Run2_2016',
'-n' : '100'
}
stepMiniAODDataUP15 = merge([{'--conditions' : 'auto:run1_data',
'--data' : '',
'--datatier' : 'MINIAOD',
'--eventcontent' : 'MINIAOD',
'--filein' :'file:step3.root'
},stepMiniAODDefaults])
steps['REMINIAOD_data2016'] = merge([{'-s' : 'PAT,DQM:@miniAODDQM',
'--process' : 'PAT',
'--era' : 'Run2_2016,run2_miniAOD_80XLegacy',
'--conditions' : 'auto:run2_data_relval',
'--data' : '',
'--scenario' : 'pp',
'--eventcontent' : 'MINIAOD,DQM',
'--datatier' : 'MINIAOD,DQMIO'
},stepMiniAODDefaults])
steps['REMINIAOD_data2016_HIPM'] = merge([{'--era' : 'Run2_2016_HIPM,run2_miniAOD_80XLegacy'},steps['REMINIAOD_data2016']])
steps['REMINIAOD_data2016UL'] = merge([{'--era' : 'Run2_2016', '--procModifiers' : 'run2_miniAOD_UL_preSummer20'},steps['REMINIAOD_data2016']])
steps['REMINIAOD_data2016UL_HIPM'] = merge([{'--era' : 'Run2_2016_HIPM'},steps['REMINIAOD_data2016UL']])
stepReMiniAODData17 = merge([{'--era' : 'Run2_2017,run2_miniAOD_94XFall17'},steps['REMINIAOD_data2016']])
steps['REMINIAOD_data2017'] = stepReMiniAODData17
steps['REMINIAOD_data2017UL'] = merge([{'--era' : 'Run2_2017'},steps['REMINIAOD_data2016UL']])
steps['REMINIAOD_data2018UL'] = merge([{'--era' : 'Run2_2018'},steps['REMINIAOD_data2016UL']])
# Not sure whether the customisations are in the dict as "--customise" or "--era" so try to
# remove both. Currently premixing uses "--customise" and everything else uses "--era".
try : stepMiniAODData = remove(stepMiniAODDataUP15,'--era')
except : stepMiniAODData = remove(stepMiniAODDataUP15,'--customise')
stepMiniAODMC = merge([{'--conditions' : 'auto:run2_mc',
'--mc' : '',
'--era' : 'Run2_2016',
'--datatier' : 'MINIAODSIM',
'--eventcontent' : 'MINIAODSIM',
'--filein' :'file:step3.root'
},stepMiniAODDefaults])
steps['REMINIAOD_mc2016'] = merge([{'-s' : 'PAT,VALIDATION:@miniAODValidation,DQM:@miniAODDQM',
'--process' : 'PAT',
'--era' : 'Run2_2016,run2_miniAOD_80XLegacy',
'--conditions' : 'auto:run2_mc',
'--mc' : '',
'--scenario' : 'pp',
'--eventcontent' : 'MINIAODSIM,DQM',
'--datatier' : 'MINIAODSIM,DQMIO'
},stepMiniAODDefaults])
steps['REMINIAOD_mc2017'] =merge([{'--conditions':'auto:phase1_2017_realistic','--era':'Run2_2017,run2_miniAOD_94XFall17'},steps['REMINIAOD_mc2016']])
# UL uses procModifiers:
steps['REMINIAOD_mc2016UL_preVFP']=merge([{'--conditions':'auto:run2_mc_pre_vfp','--era':'Run2_2016_HIPM','--procModifiers':'run2_miniAOD_UL_preSummer20'},steps['REMINIAOD_mc2016']])
steps['REMINIAOD_mc2016UL_postVFP']=merge([{'--conditions':'auto:run2_mc','--era':'Run2_2016'},steps['REMINIAOD_mc2016UL_preVFP']])
steps['REMINIAOD_mc2017UL']=merge([{'--era':'Run2_2017','--procModifiers':'run2_miniAOD_UL_preSummer20'},steps['REMINIAOD_mc2017']])
steps['REMINIAOD_mc2018UL']=merge([{'--conditions':'auto:phase1_2018_realistic','--era':'Run2_2018','--procModifiers':'run2_miniAOD_UL_preSummer20'},steps['REMINIAOD_mc2017']])
#steps['MINIAODDATA'] =merge([stepMiniAODData])
#steps['MINIAODDreHLT'] =merge([{'--conditions':'auto:run1_data_%s'%menu},stepMiniAODData])
#steps['MINIAODDATAs2'] =merge([{'--filein':'file:step2.root'},stepMiniAODData])
#MiniAOD 2016
steps['MINIAODMCUP15'] =merge([stepMiniAODMC])
#steps['MINIAODMCUP1550'] =merge([{'--conditions':'auto:run2_mc_50ns','--era':'Run2_50ns'},stepMiniAODMC])
#steps['MINIAODMCUP15HI'] =merge([{'--conditions':'auto:run2_mc_hi','--era':'Run2_HI'},stepMiniAODMC])
steps['MINIAODMCUP15FS'] =merge([{'--filein':'file:step1.root','--fast':''},stepMiniAODMC])
steps['MINIAODMCUP15FS50'] =merge([{'--conditions':'auto:run2_mc_50ns','--era':'Run2_50ns'},steps['MINIAODMCUP15FS']])
steps['DBLMINIAODMCUP15NODQM'] = merge([{'--conditions':'auto:run2_mc',
'-s':'PAT',
'--datatier' : 'MINIAODSIM',
'--eventcontent':'MINIAOD',},stepMiniAODMC])
#MiniAOD 2017
steps['MINIAODMCUP17FS'] =merge([{'--filein':'file:step1.root','--fast':'','--conditions':'auto:phase1_2017_realistic','--era':'Run2_2017_FastSim'},stepMiniAODMC])
#MiniAOD 2018
steps['MINIAODMCUP18'] =merge([{'--conditions':'auto:phase1_2018_realistic','--era':'Run2_2018'},stepMiniAODMC])
steps['MINIAODMCUP18bParking'] =merge([{'--conditions':'auto:phase1_2018_realistic','--era':'Run2_2018,bParking'},stepMiniAODMC])
steps['MINIAODMCUP18FS'] =merge([{'--filein':'file:step1.root','--fast':'','--conditions':'auto:phase1_2018_realistic','--era':'Run2_2018_FastSim'},stepMiniAODMC])
steps['MINIAODMCUP18ml'] =merge([concurrentLumis,steps['MINIAODMCUP18']])
stepNanoAODDefaults = { '-s': 'NANO,DQM:@nanoAODDQM', '-n': 1000 }
stepNanoGenDefaults = { '-s': 'NANOGEN,DQM:@nanogenDQM', '-n': 1000 }
stepNanoAODData = merge([{ '--data':'', '--eventcontent' : 'NANOAOD,DQM' ,'--datatier': 'NANOAOD,DQMIO' }, stepNanoAODDefaults ])
stepNanoAODMC = merge([{ '--mc':'' , '--eventcontent' : 'NANOAODSIM,DQM','--datatier': 'NANOAODSIM,DQMIO' }, stepNanoAODDefaults ])
stepNanoEDMData = merge([{ '--data':'', '--eventcontent' : 'NANOEDMAOD,DQM' ,'--datatier': 'NANOAOD,DQMIO' }, stepNanoAODDefaults ])
stepNanoEDMMC = merge([{ '--mc':'' , '--eventcontent' : 'NANOEDMAODSIM,DQM','--datatier': 'NANOAODSIM,DQMIO' }, stepNanoAODDefaults ])
stepNanoEDMMCProd = merge([{ '--mc':'', '-s': 'NANO', '--eventcontent' : 'NANOEDMAODSIM','--datatier': 'NANOAODSIM' }, stepNanoAODDefaults ])
stepNanoGen = merge([{ '--mc':'' , '--eventcontent' : 'NANOAODGEN,DQM','--datatier': 'NANOAODSIM,DQMIO' }, stepNanoGenDefaults ])
steps['NANOGENFromGen'] = merge([{'--conditions': 'auto:run2_mc', '--customise' : 'PhysicsTools/NanoAOD/nanogen_cff.customizeNanoGEN'}, stepNanoGen ])
steps['NANOGENFromMini'] = merge([{'--conditions': 'auto:run2_mc'}, stepNanoGen ])
steps['NANOAOD2016'] = merge([{'--conditions': 'auto:run2_data_relval', '--era': 'Run2_2016'}, stepNanoAODData ])
steps['NANOAOD2017'] = merge([{'--conditions': 'auto:run2_data_relval', '--era': 'Run2_2017'}, stepNanoAODData ])
steps['NANOAOD2016_80X'] = merge([{'--era': 'Run2_2016,run2_miniAOD_80XLegacy'}, steps['NANOAOD2016'] ])
steps['NANOAOD2017_94XMiniAODv1'] = merge([{'--era': 'Run2_2017,run2_nanoAOD_94XMiniAODv1'}, steps['NANOAOD2017'] ])
steps['NANOAOD2017_94XMiniAODv2'] = merge([{'--era': 'Run2_2017,run2_nanoAOD_94XMiniAODv2'}, steps['NANOAOD2017'] ])
steps['NANOAODMC2016'] = merge([{'--conditions': 'auto:run2_mc', '--era': 'Run2_2016'}, stepNanoAODMC ])
steps['NANOAODMC2017'] = merge([{'--conditions': 'auto:phase1_2017_realistic', '--era': 'Run2_2017'}, stepNanoAODMC ])
steps['NANOAODMC2016_80X'] = merge([{'--era': 'Run2_2016,run2_miniAOD_80XLegacy'}, steps['NANOAODMC2016'] ])
steps['NANOAODMC2017_94XMiniAODv1'] = merge([{'--era': 'Run2_2017,run2_nanoAOD_94XMiniAODv1'}, steps['NANOAODMC2017'] ])
steps['NANOAODMC2017_94XMiniAODv2'] = merge([{'--era': 'Run2_2017,run2_nanoAOD_94XMiniAODv2'}, steps['NANOAODMC2017'] ])
steps['NANOAODMC2017_106XMiniAODv1'] = merge([{'--era': 'Run2_2017,run2_nanoAOD_106Xv1'}, steps['NANOAODMC2017'] ])
steps['NANOEDMMC2017'] = merge([{'--conditions': 'auto:phase1_2017_realistic', '--era': 'Run2_2017'}, stepNanoEDMMC ])
steps['NANOEDMMC2017_94XMiniAODv1'] = merge([{'--era': 'Run2_2017,run2_nanoAOD_94XMiniAODv1'}, steps['NANOEDMMC2017'] ])
steps['NANOEDMMC2017_106XMiniAODv1'] = merge([{'--era': 'Run2_2017,run2_nanoAOD_106Xv1'}, steps['NANOEDMMC2017'] ])
steps['NANOEDMMC2017_94XMiniAODv2'] = merge([{'--era': 'Run2_2017,run2_nanoAOD_94XMiniAODv2'}, steps['NANOEDMMC2017'] ])
steps['NANOEDMMC2016_80X'] = merge([{'--conditions': 'auto:run2_mc', '--era': 'Run2_2016,run2_miniAOD_80XLegacy'}, steps['NANOEDMMC2017'] ])
steps['NANOEDMMC2018_PROD'] = merge([{'--conditions': 'auto:phase1_2018_realistic', '--era': 'Run2_2018', '--filein':'file:step3_inMINIAODSIM.root'}, stepNanoEDMMCProd ])
steps['NANOUP15'] = merge([{ '--conditions':'auto:run2_mc', '--era':'Run2_2016','-n':'10', '--filein':'file:step3_inMINIAODSIM.root','--nThreads':'2'}, stepNanoEDMMCProd ])
steps['NANOUP15_PU25']=steps['NANOUP15']
steps['NANOUP17'] = merge([{'--conditions':'auto:phase1_2017_realistic','--era': 'Run2_2017','-n':'10' ,'--filein':'file:step3_inMINIAODSIM.root', '--geometry':'DB:Extended', '--nThreads':'2'}, stepNanoEDMMCProd])
steps['NANOUP15Had']=merge([{'--filein':'file:step4_inMINIAODSIM.root'},steps['NANOUP15']])
steps['NANOUP15MC_PU25_JME']=merge([{'--customise':'PhysicsTools/NanoAOD/custom_jme_cff.PrepJMECustomNanoAOD_MC'},steps['NANOUP15']])
steps['NANOUP15Data_PU25_JME']=merge([{'--customise':'PhysicsTools/NanoAOD/custom_jme_cff.PrepJMECustomNanoAOD_Data','--data':''},steps['NANOUP15']])
steps['NANOUP17Had']=merge([{'--filein':'file:step4_inMINIAODSIM.root'},steps['NANOUP17']])
steps['NANOUP18'] = merge([{'--conditions': 'auto:phase1_2018_realistic', '--era': 'Run2_2018','-n':'10', '--filein':'file:step3_inMINIAODSIM.root', '--nThreads':'2'}, stepNanoEDMMCProd ])
steps['NANOUP18Had']=merge([{'--filein':'file:step4_inMINIAODSIM.root'},steps['NANOUP18']])
steps['NANOPRODUP18']=merge([{'--filein':'file:step4.root'},steps['NANOUP18']])
steps['NANOUP18ml']=merge([concurrentLumis,steps['NANOUP18']])
steps['NANOPRODUP18ml']=merge([{'--filein':'file:step4.root'},steps['NANOUP18ml']])
steps['NANOEDM2017'] = merge([{'--conditions': 'auto:run2_data_relval', '--era': 'Run2_2017'}, stepNanoEDMData ])
steps['NANOEDM2017_94XMiniAODv1'] = merge([{'--era': 'Run2_2017,run2_nanoAOD_94XMiniAODv1'}, steps['NANOEDM2017'] ])
steps['NANOEDM2017_94XMiniAODv2'] = merge([{'--era': 'Run2_2017,run2_nanoAOD_94XMiniAODv2'}, steps['NANOEDM2017'] ])
steps['NANOEDM2016_80X'] = merge([{'--era': 'Run2_2016,run2_miniAOD_80XLegacy'}, steps['NANOEDM2017'] ])
steps['NANOEDM2018'] = merge([ {'--conditions': 'auto:run2_data', '--era': 'Run2_2018'}, stepNanoEDMData ])
steps['NANOEDM2018_102Xv1'] = merge([ {'--era': 'Run2_2018,run2_nanoAOD_102Xv1'}, steps['NANOEDM2018'] ])
steps['NANOEDM2018_106Xv1'] = merge([ {'--era': 'Run2_2018,run2_nanoAOD_106Xv1'}, steps['NANOEDM2018'] ])
steps['HARVESTNANOAODMC2017']=merge([{'-s':'HARVESTING:@nanoAODDQM','--conditions': 'auto:phase1_2017_realistic','--era': 'Run2_2017'},steps['HARVESTUP15']])
steps['HARVESTNANOAODMC2017_94XMiniAODv1']=merge([{'--era': 'Run2_2017,run2_nanoAOD_94XMiniAODv1'},steps['HARVESTNANOAODMC2017']])
steps['HARVESTNANOAODMC2017_94XMiniAODv2']=merge([{'--era': 'Run2_2017,run2_nanoAOD_94XMiniAODv2'},steps['HARVESTNANOAODMC2017']])
steps['HARVESTNANOAODMC2016_80X']=merge([{'--conditions': 'auto:run2_mc','--era': 'Run2_2016,run2_miniAOD_80XLegacy'},steps['HARVESTNANOAODMC2017']])
steps['HARVESTNANOAODMC2017_106XMiniAODv1']=merge([{'--era': 'Run2_2017,run2_nanoAOD_106Xv1'},steps['HARVESTNANOAODMC2017']])
steps['HARVESTNANOAOD2017']=merge([{'--data':'','-s':'HARVESTING:@nanoAODDQM','--conditions':'auto:run2_data_relval','--era':'Run2_2017'},steps['HARVESTDR2']])
steps['HARVESTNANOAOD2017_94XMiniAODv1']=merge([{'--era': 'Run2_2017,run2_nanoAOD_94XMiniAODv1'},steps['HARVESTNANOAOD2017']])
steps['HARVESTNANOAOD2017_94XMiniAODv2']=merge([{'--era': 'Run2_2017,run2_nanoAOD_94XMiniAODv2'},steps['HARVESTNANOAOD2017']])
steps['HARVESTNANOAOD2016_80X']=merge([{'--era': 'Run2_2016,run2_miniAOD_80XLegacy'},steps['HARVESTNANOAOD2017']])
steps['HARVESTNANOAOD2018']=merge([{'--conditions': 'auto:run2_data', '--era':'Run2_2018'}, steps['HARVESTNANOAOD2017']])
steps['HARVESTNANOAOD2018_102Xv1']=merge([{'--era':'Run2_2018,run2_nanoAOD_102Xv1'}, steps['HARVESTNANOAOD2018']])
steps['HARVESTNANOAOD2018_106Xv1']=merge([{'--era':'Run2_2018,run2_nanoAOD_106Xv1'}, steps['HARVESTNANOAOD2018']])
steps['NANOMERGE'] = { '-s': 'ENDJOB', '-n': 1000 , '--eventcontent' : 'NANOAODSIM','--datatier': 'NANOAODSIM', '--conditions': 'auto:run2_mc' }
steps['HEfail'] = {'--conditions':'auto:phase1_2018_realistic_HEfail',
'-n':'10',
'--pileup':'AVE_50_BX_25ns','--pileup_input':'das:/RelValMinBias_13/%s/GEN-SIM'%(baseDataSetRelease[18]),
'--era' : 'Run2_2018',
'--geometry' : 'DB:Extended'
}
steps['DigiFullHEfail']=merge([{'-s':'DIGI:pdigi_valid,L1,DIGI2RAW,HLT:@relval2018','--datatier':'GEN-SIM-DIGI-RAW','--eventcontent':'FEVTDEBUGHLT'}, steps['HEfail']])
steps['RecoFullHEfail']=merge([{'-s':'RAW2DIGI,L1Reco,RECO,RECOSIM,EI,PAT,VALIDATION:@standardValidationNoHLT+@miniAODValidation,DQM:@standardDQMFakeHLT+@miniAODDQM',
'--datatier':'GEN-SIM-RECO,MINIAODSIM,DQMIO',
'--eventcontent':'RECOSIM,MINIAODSIM,DQM',
}, steps['HEfail']])
steps['HARVESTFullHEfail']=merge([{'-s':'HARVESTING:@standardValidationNoHLT+@standardDQMFakeHLT+@miniAODValidation+@miniAODDQM','--mc':'','--scenario' : 'pp','--filetype':'DQM'}, steps['HEfail']])
steps['NanoFullHEfail']={'-s':'NANO',
'--conditions':'auto:phase1_2018_realistic_HEfail',
'-n':'10',
'--era' : 'Run2_2018',
'--geometry' : 'DB:Extended',
'--datatier':'NANOAODSIM',
'--eventcontent':'NANOEDMAODSIM',
'--filein':'file:step3_inMINIAODSIM.root'}
steps['DigiFullBadHcalMitig']=merge([{'--era' : 'Run2_2018,pf_badHcalMitigation'}, steps['DigiFullHEfail']])
steps['RecoFullBadHcalMitig']=merge([{'--era' : 'Run2_2018,pf_badHcalMitigation'}, steps['RecoFullHEfail']])
steps['HARVESTFullBadHcalMitig']=merge([{'--era' : 'Run2_2018,pf_badHcalMitigation'}, steps['HARVESTFullHEfail']])
steps['NanoFullBadHcalMitig']=merge([{'--era' : 'Run2_2018,pf_badHcalMitigation'}, steps['NanoFullHEfail']])
#################################################################################
####From this line till the end of the file :
####UPGRADE WORKFLOWS IN PREPARATION - Gaelle's sandbox -
#####Accessible only through the option --what upgrade
#####(unless specifically added to relval_2026.py)
#####Transparent for any of the standard workflows
#### list of worflows defined (not necessarly running though): runTheMatrix.py --what upgrade -n
####
###
#################################################################################
from Configuration.PyReleaseValidation.upgradeWorkflowComponents import *
# imported from above, only non-empty values should be provided here
defaultDataSets['2017']='CMSSW_11_2_0_pre8-112X_mc2017_realistic_v3-v'
defaultDataSets['2017Design']='CMSSW_11_2_0_pre8-112X_mc2017_design_v1-v'
defaultDataSets['2018']='CMSSW_11_2_0_pre8-112X_upgrade2018_realistic_v4-v'
defaultDataSets['2018Design']='CMSSW_11_2_0_pre8-112X_upgrade2018_design_v3-v'
defaultDataSets['2021']='CMSSW_11_2_0_pre8-112X_mcRun3_2021_realistic_v10-v'
defaultDataSets['2021Design']='CMSSW_11_2_0_pre8-112X_mcRun3_2021_design_v10-v'
defaultDataSets['2023']='CMSSW_11_2_0_pre8-112X_mcRun3_2023_realistic_v10-v'
defaultDataSets['2024']='CMSSW_11_2_0_pre8-112X_mcRun3_2024_realistic_v10-v'
defaultDataSets['2026D49']='CMSSW_11_2_0_pre8-112X_mcRun4_realistic_v3_2026D49noPU-v'
puDataSets = {}
for key, value in defaultDataSets.items(): puDataSets[key+'PU'] = value
defaultDataSets.update(puDataSets)
# sometimes v1 won't be used - override it here - the dictionary key is gen fragment + '_' + geometry
versionOverrides={'BuMixing_BMuonFilter_forSTEAM_13TeV_TuneCUETP8M1_2017':'2','HSCPstop_M_200_TuneCUETP8M1_13TeV_pythia8_2017':'2','RSGravitonToGammaGamma_kMpl01_M_3000_TuneCUETP8M1_13TeV_pythia8_2017':'2','WprimeToENu_M-2000_TuneCUETP8M1_13TeV-pythia8_2017':'2','DisplacedSUSY_stopToBottom_M_300_1000mm_TuneCUETP8M1_13TeV_pythia8_2017':'2','TenE_E_0_200_pythia8_2017':'2','TenE_E_0_200_pythia8_2017PU':'2', 'TenTau_E_15_500_pythia8_2018':'2','PhotonJet_Pt_10_13TeV_TuneCUETP8M1_2018':'2','Wjet_Pt_80_120_13TeV_TuneCUETP8M1_2018':'2'}
baseDataSetReleaseBetter={}
for gen in upgradeFragments:
for ds in defaultDataSets:
key=gen[:-4]+'_'+ds
version='1'
if key in versionOverrides:
version = versionOverrides[key]
baseDataSetReleaseBetter[key]=defaultDataSets[ds]+version
PUDataSets={}
for ds in defaultDataSets:
key='MinBias_14TeV_pythia8_TuneCP5'+'_'+ds
name=baseDataSetReleaseBetter[key]
if '2017' in ds:
PUDataSets[ds]={'-n':10,'--pileup':'AVE_35_BX_25ns','--pileup_input':'das:/RelValMinBias_13/%s/GEN-SIM'%(name,)}
elif '2018' in ds:
PUDataSets[ds]={'-n':10,'--pileup':'AVE_50_BX_25ns','--pileup_input':'das:/RelValMinBias_13/%s/GEN-SIM'%(name,)}
elif '2021' in ds:
PUDataSets[ds]={'-n':10,'--pileup':'Run3_Flat55To75_PoissonOOTPU','--pileup_input':'das:/RelValMinBias_14TeV/%s/GEN-SIM'%(name,)}
elif 'postLS2' in ds:
PUDataSets[ds]={'-n':10,'--pileup':'AVE_50_BX_25ns','--pileup_input':'das:/RelValMinBias_13/%s/GEN-SIM'%(name,)}
elif '2026' in ds:
PUDataSets[ds]={'-n':10,'--pileup':'AVE_200_BX_25ns','--pileup_input':'das:/RelValMinBias_14TeV/%s/GEN-SIM'%(name,)}
else:
PUDataSets[ds]={'-n':10,'--pileup':'AVE_35_BX_25ns','--pileup_input':'das:/RelValMinBias_14TeV/%s/GEN-SIM'%(name,)}
#PUDataSets[ds]={'-n':10,'--pileup':'AVE_50_BX_25ns','--pileup_input':'das:/RelValMinBias_13/%s/GEN-SIM'%(name,)}
#PUDataSets[ds]={'-n':10,'--pileup':'AVE_70_BX_25ns','--pileup_input':'das:/RelValMinBias_13/%s/GEN-SIM'%(name,)}
upgradeStepDict={}
for specialType,specialWF in six.iteritems(upgradeWFs):
specialWF.init(upgradeStepDict)
# just make all combinations - yes, some will be nonsense.. but then these are not used unless specified above
# collapse upgradeKeys using list comprehension
for year,k in [(year,k) for year in upgradeKeys for k in upgradeKeys[year]]:
k2=k
if 'PU' in k[-2:]:
k2=k[:-2]
geom=upgradeProperties[year][k]['Geom']
gt=upgradeProperties[year][k]['GT']
hltversion=upgradeProperties[year][k].get('HLTmenu')
beamspot=upgradeProperties[year][k].get('BeamSpot', None)
# setup baseline steps
upgradeStepDict['GenSim'][k]= {'-s' : 'GEN,SIM',
'-n' : 10,
'--conditions' : gt,
'--beamspot' : 'Realistic25ns13TeVEarly2017Collision',
'--datatier' : 'GEN-SIM',
'--eventcontent': 'FEVTDEBUG',
'--geometry' : geom
}
if beamspot is not None: upgradeStepDict['GenSim'][k]['--beamspot']=beamspot
upgradeStepDict['GenSimHLBeamSpot'][k]= {'-s' : 'GEN,SIM',
'-n' : 10,
'--conditions' : gt,
'--beamspot' : 'HLLHC',
'--datatier' : 'GEN-SIM',
'--eventcontent': 'FEVTDEBUG',
'--geometry' : geom
}
upgradeStepDict['GenSimHLBeamSpot14'][k]= {'-s' : 'GEN,SIM',
'-n' : 10,
'--conditions' : gt,
'--beamspot' : 'HLLHC14TeV',
'--datatier' : 'GEN-SIM',
'--eventcontent': 'FEVTDEBUG',
'--geometry' : geom
}
upgradeStepDict['Digi'][k] = {'-s':'DIGI:pdigi_valid,L1,DIGI2RAW,HLT:%s'%(hltversion),
'--conditions':gt,
'--datatier':'GEN-SIM-DIGI-RAW',
'-n':'10',
'--eventcontent':'FEVTDEBUGHLT',
'--geometry' : geom
}
# Adding Track trigger step in step2
upgradeStepDict['DigiTrigger'][k] = {'-s':'DIGI:pdigi_valid,L1TrackTrigger,L1,DIGI2RAW,HLT:%s'%(hltversion),
'--conditions':gt,
'--datatier':'GEN-SIM-DIGI-RAW',
'-n':'10',
'--eventcontent':'FEVTDEBUGHLT',
'--geometry' : geom
}
upgradeStepDict['Reco'][k] = {'-s':'RAW2DIGI,L1Reco,RECO,RECOSIM,EI,PAT,VALIDATION:@standardValidation+@miniAODValidation,DQM:@standardDQM+@ExtraHLT+@miniAODDQM',
'--conditions':gt,
'--datatier':'GEN-SIM-RECO,MINIAODSIM,DQMIO',
'-n':'10',
'--eventcontent':'RECOSIM,MINIAODSIM,DQM',
'--geometry' : geom
}
upgradeStepDict['RecoFakeHLT'][k] = {'-s':'RAW2DIGI,L1Reco,RECO,RECOSIM,EI,PAT,VALIDATION:@standardValidationNoHLT+@miniAODValidation,DQM:@standardDQMFakeHLT+@miniAODDQM',
'--conditions':gt,
'--datatier':'GEN-SIM-RECO,MINIAODSIM,DQMIO',
'-n':'10',
'--eventcontent':'RECOSIM,MINIAODSIM,DQM',
'--geometry' : geom
}
upgradeStepDict['RecoGlobal'][k] = {'-s':'RAW2DIGI,L1Reco,RECO,RECOSIM,PAT,VALIDATION:@phase2Validation+@miniAODValidation,DQM:@phase2+@miniAODDQM',
'--conditions':gt,
'--datatier':'GEN-SIM-RECO,MINIAODSIM,DQMIO',
'-n':'10',
'--eventcontent':'FEVTDEBUGHLT,MINIAODSIM,DQM',
'--geometry' : geom
}
upgradeStepDict['RecoLocal'][k] = {'-s':'RAW2DIGI,L1Reco,RECO:localreco',
'--conditions':gt,
'--datatier':'GEN-SIM-RECO',
'-n':'10',
'--eventcontent':'FEVTDEBUGHLT',
'--geometry' : geom
}
upgradeStepDict['MiniAOD'][k] = {'-s':'PAT',
'--conditions':gt,
'--datatier':'MINIAODSIM',
'-n':'10',
'--eventcontent':'MINIAODSIM',
'--geometry' : geom
}
upgradeStepDict['HARVEST'][k]={'-s':'HARVESTING:@standardValidation+@standardDQM+@ExtraHLT+@miniAODValidation+@miniAODDQM',
'--conditions':gt,
'--mc':'',
'--geometry' : geom,
'--scenario' : 'pp',
'--filetype':'DQM',
}
upgradeStepDict['HARVESTFakeHLT'][k]={'-s':'HARVESTING:@standardValidationNoHLT+@standardDQMFakeHLT+@miniAODValidation+@miniAODDQM',
'--conditions':gt,
'--mc':'',
'--geometry' : geom,
'--scenario' : 'pp',
'--filetype':'DQM',
}
upgradeStepDict['HARVESTGlobal'][k] = merge([{'-s': 'HARVESTING:@phase2Validation+@phase2+@miniAODValidation+@miniAODDQM'}, upgradeStepDict['HARVEST'][k]])
upgradeStepDict['ALCA'][k] = {'-s':'ALCA:SiPixelCalSingleMuon+TkAlMuonIsolated+TkAlMinBias+MuAlOverlaps+EcalESAlign+TkAlZMuMu+HcalCalHBHEMuonFilter+TkAlUpsilonMuMu+TkAlJpsiMuMu+SiStripCalMinBias',
'--conditions':gt,
'--datatier':'ALCARECO',
'-n':'10',
'--eventcontent':'ALCARECO',
'--geometry' : geom,
'--filein':'file:step3.root'
}
upgradeStepDict['FastSim'][k]={'-s':'GEN,SIM,RECO,VALIDATION',
'--eventcontent':'FEVTDEBUGHLT,DQM',
'--datatier':'GEN-SIM-DIGI-RECO,DQMIO',
'--conditions':gt,
'--fast':'',
'--geometry' : geom,
'--relval':'27000,3000'}
upgradeStepDict['HARVESTFast'][k]={'-s':'HARVESTING:validationHarvesting',
'--conditions':gt,
'--mc':'',
'--geometry' : geom,
'--scenario' : 'pp'
}
upgradeStepDict['Nano'][k] = {'-s':'NANO',
'--conditions':gt,
'--datatier':'NANOAODSIM',
'-n':'10',
'--eventcontent':'NANOEDMAODSIM',
'--filein':'file:step3_inMINIAODSIM.root',
'--geometry' : geom
}
# setup baseline and variations
for specialType,specialWF in six.iteritems(upgradeWFs):
specialWF.setup(upgradeStepDict, k, upgradeProperties[year][k])
# setup PU
if k2 in PUDataSets:
for specialType,specialWF in six.iteritems(upgradeWFs):
for step in specialWF.PU:
stepName = specialWF.getStepName(step)
stepNamePU = specialWF.getStepNamePU(step)
if k not in upgradeStepDict[stepName] or upgradeStepDict[stepName][k] is None:
upgradeStepDict[stepNamePU][k] = None
else:
upgradeStepDict[stepNamePU][k]=merge([PUDataSets[k2],upgradeStepDict[stepName][k]])
# in case special WF has PU-specific changes: apply *after* basic PU step is created
specialWF.setupPU(upgradeStepDict, k, upgradeProperties[year][k])
for step in upgradeStepDict.keys():
# we need to do this for each fragment
if 'Sim' in step or 'Premix' in step:
for frag,info in six.iteritems(upgradeFragments):
howMuch=info.howMuch
for key in [key for year in upgradeKeys for key in upgradeKeys[year]]:
k=frag[:-4]+'_'+key+'_'+step
if step in upgradeStepDict and key in upgradeStepDict[step]:
if upgradeStepDict[step][key] is None:
steps[k]=None
elif 'Premix' in step:
# Include premixing stage1 only for SingleNu, use special step name
if not 'SingleNu' in frag:
continue
stepKey = 'PREMIX_'+key+'_'+step
howMuch = Kby(100,100)
steps[stepKey]=merge([ {'--evt_type':frag},howMuch,upgradeStepDict[step][key]])
else:
steps[k]=merge([ {'cfg':frag},howMuch,upgradeStepDict[step][key]])
#get inputs in case of -i...but no need to specify in great detail
#however, there can be a conflict of beam spots but this is lost in the dataset name
#so please be careful
s=frag[:-4]+'_'+key
# exclude upgradeKeys without input dataset, and special WFs that disable reuse
istep = step+preventReuseKeyword
if 'FastSim' not in k and s+'INPUT' not in steps and s in baseDataSetReleaseBetter and defaultDataSets[key] != '' and \
(istep not in upgradeStepDict or key not in upgradeStepDict[istep] or upgradeStepDict[istep][key] is not None):
steps[k+'INPUT']={'INPUT':InputInfo(dataSet='/RelVal'+info.dataset+'/%s/GEN-SIM'%(baseDataSetReleaseBetter[s],),location='STD')}
else:
for key in [key for year in upgradeKeys for key in upgradeKeys[year]]:
k=step+'_'+key
if step in upgradeStepDict and key in upgradeStepDict[step]:
if upgradeStepDict[step][key] is None:
steps[k]=None
else:
steps[k]=merge([upgradeStepDict[step][key]])
| 78.34012 | 937 | 0.70005 |
d8c49909f143e1631d152917c6aaf740ddb9beee | 3,835 | py | Python | Game/Universe.py | NoNotCar/SpaceX | 803390fdf0ed3deee18d8f0d101ae575cf9b55e1 | [
"MIT"
] | null | null | null | Game/Universe.py | NoNotCar/SpaceX | 803390fdf0ed3deee18d8f0d101ae575cf9b55e1 | [
"MIT"
] | null | null | null | Game/Universe.py | NoNotCar/SpaceX | 803390fdf0ed3deee18d8f0d101ae575cf9b55e1 | [
"MIT"
] | null | null | null | from .Player import Player
from . import Area
from Lib import Img,Vector
from . import Generators,Boxes,Registry,Gamemodes,Research
from Engine import Items
from Objects import Transport,Special
from Objects.Machines import Production,Basic
from random import shuffle
import pickle,pygame
Registry.add_recipe({"Iron":2},Items.resources["Gear"])
Registry.add_process_recipe("Smelting",("Iron",5),(Items.resources["Steel"],1),200)
Registry.add_recipe({"Steel":5},Items.resources["Girder"])
Registry.add_recipe({"Iron":1,"Wire":2},Items.resources["Circuit"])
Registry.add_recipe({"Copper":1},(Items.resources["Wire"],3))
#Research.add_recipesearch({"ChaosCrystal":3,"Stone":10},(Items.resources["ChaosCrystal"],4),[1],10)
AREA_SIZE=100
class PSector(object):
def __init__(self,p,pos):
self.planet=p
self.pos=pos
def out_warp(self,area,pos,d):
return self.planet.out_warp(pos,d,self.pos)
class Planet(object):
def __init__(self,bands):
self.surface={}
self.bands=bands
self.bound=len(bands)
def update(self,events):
for a in list(self.surface.values()):
a.update(events)
def out_warp(self,pos,d,spos):
tpos=spos+d
if abs(tpos.y)<self.bound:
blength=self.bound-abs(tpos.y)-1
tpos=Vector.VectorX(min(max(tpos.x,-blength),blength) if d.y else (tpos.x+blength)%(blength*2+1)-blength,tpos.y)
return Vector.Coordinate(self[tpos],(pos+d)%AREA_SIZE)
def __getitem__(self, item):
if item not in self.surface:
self.surface[item]=Area.LargeArea(Vector.VectorX(AREA_SIZE,AREA_SIZE),self.bands[abs(item.y)](),PSector(self,item))
return self.surface[item]
class Universe(object):
AUTOSAVE_INTERVAL=3600
t=0
def __init__(self,js,ssz,gm):
self.planet=Planet([Generators.Islands, Generators.IceCap])
self.players=[Player(None,j) for j in js]
self.gm=gm
self.gm.setup(self.planet,self.players)
for p in self.players:
p.ssz=ssz
self.gm.starting_inv(p.inv)
def update(self,events):
for e in events:
if e.type==pygame.KEYDOWN and e.key==pygame.K_F1:
self.save()
self.t+=1
if self.t==self.AUTOSAVE_INTERVAL:
self.t=0
self.save()
self.planet.update(events)
def saved(self):
return self,Research.current_research,Research.done,Research.rprogs,Items.chaos_slots
def save(self):
with open(Img.np(Img.loc+"autosave.sav"),"wb") as f:
pickle.dump(self.saved(),f)
def reload(self,js,ssz):
for n,p in enumerate(self.players):
p.ssz=ssz
try:
p.j=js[n]
p.col=js[n].col
except IndexError:
p.coords.area.dobj(p,p.coords.pos)
self.players.remove(p)
p.gui=None
for x,j in enumerate(js[n+1:]):
np=Player(None,j)
np.spawn=self.players[min((x+n+1)%2,len(self.players)-1)].spawn
np.respawn()
np.team=(x+n+1)%2
self.players.append(np)
def re_search(r):
return None if r is None else [re for re in Research.all_researches if re.name==(r if isinstance(r,str) else r.name)][0]
def load(file):
Vector.special_mode=True
with open(Img.np(Img.loc + "%s.sav" % file),"rb") as f:
comp=pickle.load(f)
Vector.special_mode=False
universe = comp[0]
for k, v in comp[2].items():
for r in v:
Research.current_research[k] = re_search(r)
Research.on_complete(k)
for k,v in comp[1].items():
Research.current_research[k] = re_search(v)
Research.rprogs = comp[3]
Items.chaos_slots=comp[4]
return universe
| 38.737374 | 127 | 0.61721 |
21004514930074b11f0dd01a835fd2f4a55787f5 | 141,759 | py | Python | pypy/interpreter/astcompiler/ast.py | Qointum/pypy | c0ed88efbc135a75a535f4534ca1f3baf0bf39d8 | [
"Apache-2.0",
"OpenSSL"
] | null | null | null | pypy/interpreter/astcompiler/ast.py | Qointum/pypy | c0ed88efbc135a75a535f4534ca1f3baf0bf39d8 | [
"Apache-2.0",
"OpenSSL"
] | null | null | null | pypy/interpreter/astcompiler/ast.py | Qointum/pypy | c0ed88efbc135a75a535f4534ca1f3baf0bf39d8 | [
"Apache-2.0",
"OpenSSL"
] | null | null | null | # Generated by tools/asdl_py.py
from rpython.tool.pairtype import extendabletype
from rpython.tool.sourcetools import func_with_new_name
from pypy.interpreter import typedef
from pypy.interpreter.baseobjspace import W_Root
from pypy.interpreter.error import OperationError, oefmt
from pypy.interpreter.gateway import interp2app
def raise_attriberr(space, w_obj, name):
raise oefmt(space.w_AttributeError,
"'%T' object has no attribute '%s'", w_obj, name)
def check_string(space, w_obj):
if not (space.isinstance_w(w_obj, space.w_str) or
space.isinstance_w(w_obj, space.w_unicode)):
raise OperationError(space.w_TypeError, space.wrap(
'AST string must be of type str or unicode'))
return w_obj
def get_field(space, w_node, name, optional):
w_obj = w_node.getdictvalue(space, name)
if w_obj is None:
if not optional:
raise oefmt(space.w_TypeError,
"required field \"%s\" missing from %T", name, w_node)
w_obj = space.w_None
return w_obj
class AST(object):
__metaclass__ = extendabletype
def walkabout(self, visitor):
raise AssertionError("walkabout() implementation not provided")
def mutate_over(self, visitor):
raise AssertionError("mutate_over() implementation not provided")
class NodeVisitorNotImplemented(Exception):
pass
class _FieldsWrapper(W_Root):
"Hack around the fact we can't store tuples on a TypeDef."
def __init__(self, fields):
self.fields = fields
def __spacebind__(self, space):
return space.newtuple([space.wrap(field) for field in self.fields])
class W_AST(W_Root):
w_dict = None
def getdict(self, space):
if self.w_dict is None:
self.w_dict = space.newdict(instance=True)
return self.w_dict
def reduce_w(self, space):
w_dict = self.w_dict
if w_dict is None:
w_dict = space.newdict()
w_type = space.type(self)
w_fields = space.getattr(w_type, space.wrap("_fields"))
for w_name in space.fixedview(w_fields):
try:
space.setitem(w_dict, w_name,
space.getattr(self, w_name))
except OperationError:
pass
w_attrs = space.findattr(w_type, space.wrap("_attributes"))
if w_attrs:
for w_name in space.fixedview(w_attrs):
try:
space.setitem(w_dict, w_name,
space.getattr(self, w_name))
except OperationError:
pass
return space.newtuple([space.type(self),
space.newtuple([]),
w_dict])
def setstate_w(self, space, w_state):
for w_name in space.unpackiterable(w_state):
space.setattr(self, w_name,
space.getitem(w_state, w_name))
def W_AST_new(space, w_type, __args__):
node = space.allocate_instance(W_AST, w_type)
return space.wrap(node)
def W_AST_init(space, w_self, __args__):
args_w, kwargs_w = __args__.unpack()
fields_w = space.fixedview(space.getattr(space.type(w_self),
space.wrap("_fields")))
num_fields = len(fields_w) if fields_w else 0
if args_w and len(args_w) != num_fields:
if num_fields == 0:
raise oefmt(space.w_TypeError,
"%T constructor takes 0 positional arguments", w_self)
elif num_fields == 1:
raise oefmt(space.w_TypeError,
"%T constructor takes either 0 or %d positional argument", w_self, num_fields)
else:
raise oefmt(space.w_TypeError,
"%T constructor takes either 0 or %d positional arguments", w_self, num_fields)
if args_w:
for i, w_field in enumerate(fields_w):
space.setattr(w_self, w_field, args_w[i])
for field, w_value in kwargs_w.iteritems():
space.setattr(w_self, space.wrap(field), w_value)
W_AST.typedef = typedef.TypeDef("_ast.AST",
_fields=_FieldsWrapper([]),
_attributes=_FieldsWrapper([]),
__reduce__=interp2app(W_AST.reduce_w),
__setstate__=interp2app(W_AST.setstate_w),
__dict__ = typedef.GetSetProperty(typedef.descr_get_dict,
typedef.descr_set_dict, cls=W_AST),
__new__=interp2app(W_AST_new),
__init__=interp2app(W_AST_init),
)
class State:
AST_TYPES = []
@classmethod
def ast_type(cls, name, base, fields, attributes=None):
cls.AST_TYPES.append((name, base, fields, attributes))
def __init__(self, space):
self.w_AST = space.gettypeobject(W_AST.typedef)
for (name, base, fields, attributes) in self.AST_TYPES:
self.make_new_type(space, name, base, fields, attributes)
def make_new_type(self, space, name, base, fields, attributes):
w_base = getattr(self, 'w_%s' % base)
w_dict = space.newdict()
space.setitem_str(w_dict, '__module__', space.wrap('_ast'))
if fields is not None:
space.setitem_str(w_dict, "_fields",
space.newtuple([space.wrap(f) for f in fields]))
if attributes is not None:
space.setitem_str(w_dict, "_attributes",
space.newtuple([space.wrap(a) for a in attributes]))
w_type = space.call_function(
space.w_type,
space.wrap(name), space.newtuple([w_base]), w_dict)
setattr(self, 'w_%s' % name, w_type)
def get(space):
return space.fromcache(State)
class mod(AST):
@staticmethod
def from_object(space, w_node):
if space.is_w(w_node, space.w_None):
return None
if space.isinstance_w(w_node, get(space).w_Module):
return Module.from_object(space, w_node)
if space.isinstance_w(w_node, get(space).w_Interactive):
return Interactive.from_object(space, w_node)
if space.isinstance_w(w_node, get(space).w_Expression):
return Expression.from_object(space, w_node)
if space.isinstance_w(w_node, get(space).w_Suite):
return Suite.from_object(space, w_node)
raise oefmt(space.w_TypeError,
"Expected mod node, got %T", w_node)
State.ast_type('mod', 'AST', None, [])
class Module(mod):
def __init__(self, body):
self.body = body
def walkabout(self, visitor):
visitor.visit_Module(self)
def mutate_over(self, visitor):
if self.body:
visitor._mutate_sequence(self.body)
return visitor.visit_Module(self)
def to_object(self, space):
w_node = space.call_function(get(space).w_Module)
if self.body is None:
body_w = []
else:
body_w = [node.to_object(space) for node in self.body] # stmt
w_body = space.newlist(body_w)
space.setattr(w_node, space.wrap('body'), w_body)
return w_node
@staticmethod
def from_object(space, w_node):
w_body = get_field(space, w_node, 'body', False)
body_w = space.unpackiterable(w_body)
_body = [stmt.from_object(space, w_item) for w_item in body_w]
return Module(_body)
State.ast_type('Module', 'mod', ['body'])
class Interactive(mod):
def __init__(self, body):
self.body = body
def walkabout(self, visitor):
visitor.visit_Interactive(self)
def mutate_over(self, visitor):
if self.body:
visitor._mutate_sequence(self.body)
return visitor.visit_Interactive(self)
def to_object(self, space):
w_node = space.call_function(get(space).w_Interactive)
if self.body is None:
body_w = []
else:
body_w = [node.to_object(space) for node in self.body] # stmt
w_body = space.newlist(body_w)
space.setattr(w_node, space.wrap('body'), w_body)
return w_node
@staticmethod
def from_object(space, w_node):
w_body = get_field(space, w_node, 'body', False)
body_w = space.unpackiterable(w_body)
_body = [stmt.from_object(space, w_item) for w_item in body_w]
return Interactive(_body)
State.ast_type('Interactive', 'mod', ['body'])
class Expression(mod):
def __init__(self, body):
self.body = body
def walkabout(self, visitor):
visitor.visit_Expression(self)
def mutate_over(self, visitor):
self.body = self.body.mutate_over(visitor)
return visitor.visit_Expression(self)
def to_object(self, space):
w_node = space.call_function(get(space).w_Expression)
w_body = self.body.to_object(space) # expr
space.setattr(w_node, space.wrap('body'), w_body)
return w_node
@staticmethod
def from_object(space, w_node):
w_body = get_field(space, w_node, 'body', False)
_body = expr.from_object(space, w_body)
return Expression(_body)
State.ast_type('Expression', 'mod', ['body'])
class Suite(mod):
def __init__(self, body):
self.body = body
def walkabout(self, visitor):
visitor.visit_Suite(self)
def mutate_over(self, visitor):
if self.body:
visitor._mutate_sequence(self.body)
return visitor.visit_Suite(self)
def to_object(self, space):
w_node = space.call_function(get(space).w_Suite)
if self.body is None:
body_w = []
else:
body_w = [node.to_object(space) for node in self.body] # stmt
w_body = space.newlist(body_w)
space.setattr(w_node, space.wrap('body'), w_body)
return w_node
@staticmethod
def from_object(space, w_node):
w_body = get_field(space, w_node, 'body', False)
body_w = space.unpackiterable(w_body)
_body = [stmt.from_object(space, w_item) for w_item in body_w]
return Suite(_body)
State.ast_type('Suite', 'mod', ['body'])
class stmt(AST):
def __init__(self, lineno, col_offset):
self.lineno = lineno
self.col_offset = col_offset
@staticmethod
def from_object(space, w_node):
if space.is_w(w_node, space.w_None):
return None
if space.isinstance_w(w_node, get(space).w_FunctionDef):
return FunctionDef.from_object(space, w_node)
if space.isinstance_w(w_node, get(space).w_ClassDef):
return ClassDef.from_object(space, w_node)
if space.isinstance_w(w_node, get(space).w_Return):
return Return.from_object(space, w_node)
if space.isinstance_w(w_node, get(space).w_Delete):
return Delete.from_object(space, w_node)
if space.isinstance_w(w_node, get(space).w_Assign):
return Assign.from_object(space, w_node)
if space.isinstance_w(w_node, get(space).w_AugAssign):
return AugAssign.from_object(space, w_node)
if space.isinstance_w(w_node, get(space).w_For):
return For.from_object(space, w_node)
if space.isinstance_w(w_node, get(space).w_While):
return While.from_object(space, w_node)
if space.isinstance_w(w_node, get(space).w_If):
return If.from_object(space, w_node)
if space.isinstance_w(w_node, get(space).w_With):
return With.from_object(space, w_node)
if space.isinstance_w(w_node, get(space).w_Raise):
return Raise.from_object(space, w_node)
if space.isinstance_w(w_node, get(space).w_TryExcept):
return TryExcept.from_object(space, w_node)
if space.isinstance_w(w_node, get(space).w_TryFinally):
return TryFinally.from_object(space, w_node)
if space.isinstance_w(w_node, get(space).w_Assert):
return Assert.from_object(space, w_node)
if space.isinstance_w(w_node, get(space).w_Import):
return Import.from_object(space, w_node)
if space.isinstance_w(w_node, get(space).w_ImportFrom):
return ImportFrom.from_object(space, w_node)
if space.isinstance_w(w_node, get(space).w_Global):
return Global.from_object(space, w_node)
if space.isinstance_w(w_node, get(space).w_Nonlocal):
return Nonlocal.from_object(space, w_node)
if space.isinstance_w(w_node, get(space).w_Expr):
return Expr.from_object(space, w_node)
if space.isinstance_w(w_node, get(space).w_Pass):
return Pass.from_object(space, w_node)
if space.isinstance_w(w_node, get(space).w_Break):
return Break.from_object(space, w_node)
if space.isinstance_w(w_node, get(space).w_Continue):
return Continue.from_object(space, w_node)
raise oefmt(space.w_TypeError,
"Expected stmt node, got %T", w_node)
State.ast_type('stmt', 'AST', None, ['lineno', 'col_offset'])
class FunctionDef(stmt):
def __init__(self, name, args, body, decorator_list, returns, lineno, col_offset):
self.name = name
self.args = args
self.body = body
self.decorator_list = decorator_list
self.returns = returns
stmt.__init__(self, lineno, col_offset)
def walkabout(self, visitor):
visitor.visit_FunctionDef(self)
def mutate_over(self, visitor):
self.args = self.args.mutate_over(visitor)
if self.body:
visitor._mutate_sequence(self.body)
if self.decorator_list:
visitor._mutate_sequence(self.decorator_list)
if self.returns:
self.returns = self.returns.mutate_over(visitor)
return visitor.visit_FunctionDef(self)
def to_object(self, space):
w_node = space.call_function(get(space).w_FunctionDef)
w_name = space.wrap(self.name.decode('utf-8')) # identifier
space.setattr(w_node, space.wrap('name'), w_name)
w_args = self.args.to_object(space) # arguments
space.setattr(w_node, space.wrap('args'), w_args)
if self.body is None:
body_w = []
else:
body_w = [node.to_object(space) for node in self.body] # stmt
w_body = space.newlist(body_w)
space.setattr(w_node, space.wrap('body'), w_body)
if self.decorator_list is None:
decorator_list_w = []
else:
decorator_list_w = [node.to_object(space) for node in self.decorator_list] # expr
w_decorator_list = space.newlist(decorator_list_w)
space.setattr(w_node, space.wrap('decorator_list'), w_decorator_list)
w_returns = self.returns.to_object(space) if self.returns is not None else space.w_None # expr
space.setattr(w_node, space.wrap('returns'), w_returns)
w_lineno = space.wrap(self.lineno) # int
space.setattr(w_node, space.wrap('lineno'), w_lineno)
w_col_offset = space.wrap(self.col_offset) # int
space.setattr(w_node, space.wrap('col_offset'), w_col_offset)
return w_node
@staticmethod
def from_object(space, w_node):
w_name = get_field(space, w_node, 'name', False)
w_args = get_field(space, w_node, 'args', False)
w_body = get_field(space, w_node, 'body', False)
w_decorator_list = get_field(space, w_node, 'decorator_list', False)
w_returns = get_field(space, w_node, 'returns', True)
w_lineno = get_field(space, w_node, 'lineno', False)
w_col_offset = get_field(space, w_node, 'col_offset', False)
_name = space.identifier_w(w_name)
_args = arguments.from_object(space, w_args)
body_w = space.unpackiterable(w_body)
_body = [stmt.from_object(space, w_item) for w_item in body_w]
decorator_list_w = space.unpackiterable(w_decorator_list)
_decorator_list = [expr.from_object(space, w_item) for w_item in decorator_list_w]
_returns = expr.from_object(space, w_returns) if w_returns is not None else None
_lineno = space.int_w(w_lineno)
_col_offset = space.int_w(w_col_offset)
return FunctionDef(_name, _args, _body, _decorator_list, _returns, _lineno, _col_offset)
State.ast_type('FunctionDef', 'stmt', ['name', 'args', 'body', 'decorator_list', 'returns'])
class ClassDef(stmt):
def __init__(self, name, bases, keywords, starargs, kwargs, body, decorator_list, lineno, col_offset):
self.name = name
self.bases = bases
self.keywords = keywords
self.starargs = starargs
self.kwargs = kwargs
self.body = body
self.decorator_list = decorator_list
stmt.__init__(self, lineno, col_offset)
def walkabout(self, visitor):
visitor.visit_ClassDef(self)
def mutate_over(self, visitor):
if self.bases:
visitor._mutate_sequence(self.bases)
if self.keywords:
visitor._mutate_sequence(self.keywords)
if self.starargs:
self.starargs = self.starargs.mutate_over(visitor)
if self.kwargs:
self.kwargs = self.kwargs.mutate_over(visitor)
if self.body:
visitor._mutate_sequence(self.body)
if self.decorator_list:
visitor._mutate_sequence(self.decorator_list)
return visitor.visit_ClassDef(self)
def to_object(self, space):
w_node = space.call_function(get(space).w_ClassDef)
w_name = space.wrap(self.name.decode('utf-8')) # identifier
space.setattr(w_node, space.wrap('name'), w_name)
if self.bases is None:
bases_w = []
else:
bases_w = [node.to_object(space) for node in self.bases] # expr
w_bases = space.newlist(bases_w)
space.setattr(w_node, space.wrap('bases'), w_bases)
if self.keywords is None:
keywords_w = []
else:
keywords_w = [node.to_object(space) for node in self.keywords] # keyword
w_keywords = space.newlist(keywords_w)
space.setattr(w_node, space.wrap('keywords'), w_keywords)
w_starargs = self.starargs.to_object(space) if self.starargs is not None else space.w_None # expr
space.setattr(w_node, space.wrap('starargs'), w_starargs)
w_kwargs = self.kwargs.to_object(space) if self.kwargs is not None else space.w_None # expr
space.setattr(w_node, space.wrap('kwargs'), w_kwargs)
if self.body is None:
body_w = []
else:
body_w = [node.to_object(space) for node in self.body] # stmt
w_body = space.newlist(body_w)
space.setattr(w_node, space.wrap('body'), w_body)
if self.decorator_list is None:
decorator_list_w = []
else:
decorator_list_w = [node.to_object(space) for node in self.decorator_list] # expr
w_decorator_list = space.newlist(decorator_list_w)
space.setattr(w_node, space.wrap('decorator_list'), w_decorator_list)
w_lineno = space.wrap(self.lineno) # int
space.setattr(w_node, space.wrap('lineno'), w_lineno)
w_col_offset = space.wrap(self.col_offset) # int
space.setattr(w_node, space.wrap('col_offset'), w_col_offset)
return w_node
@staticmethod
def from_object(space, w_node):
w_name = get_field(space, w_node, 'name', False)
w_bases = get_field(space, w_node, 'bases', False)
w_keywords = get_field(space, w_node, 'keywords', False)
w_starargs = get_field(space, w_node, 'starargs', True)
w_kwargs = get_field(space, w_node, 'kwargs', True)
w_body = get_field(space, w_node, 'body', False)
w_decorator_list = get_field(space, w_node, 'decorator_list', False)
w_lineno = get_field(space, w_node, 'lineno', False)
w_col_offset = get_field(space, w_node, 'col_offset', False)
_name = space.identifier_w(w_name)
bases_w = space.unpackiterable(w_bases)
_bases = [expr.from_object(space, w_item) for w_item in bases_w]
keywords_w = space.unpackiterable(w_keywords)
_keywords = [keyword.from_object(space, w_item) for w_item in keywords_w]
_starargs = expr.from_object(space, w_starargs) if w_starargs is not None else None
_kwargs = expr.from_object(space, w_kwargs) if w_kwargs is not None else None
body_w = space.unpackiterable(w_body)
_body = [stmt.from_object(space, w_item) for w_item in body_w]
decorator_list_w = space.unpackiterable(w_decorator_list)
_decorator_list = [expr.from_object(space, w_item) for w_item in decorator_list_w]
_lineno = space.int_w(w_lineno)
_col_offset = space.int_w(w_col_offset)
return ClassDef(_name, _bases, _keywords, _starargs, _kwargs, _body, _decorator_list, _lineno, _col_offset)
State.ast_type('ClassDef', 'stmt', ['name', 'bases', 'keywords', 'starargs', 'kwargs', 'body', 'decorator_list'])
class Return(stmt):
def __init__(self, value, lineno, col_offset):
self.value = value
stmt.__init__(self, lineno, col_offset)
def walkabout(self, visitor):
visitor.visit_Return(self)
def mutate_over(self, visitor):
if self.value:
self.value = self.value.mutate_over(visitor)
return visitor.visit_Return(self)
def to_object(self, space):
w_node = space.call_function(get(space).w_Return)
w_value = self.value.to_object(space) if self.value is not None else space.w_None # expr
space.setattr(w_node, space.wrap('value'), w_value)
w_lineno = space.wrap(self.lineno) # int
space.setattr(w_node, space.wrap('lineno'), w_lineno)
w_col_offset = space.wrap(self.col_offset) # int
space.setattr(w_node, space.wrap('col_offset'), w_col_offset)
return w_node
@staticmethod
def from_object(space, w_node):
w_value = get_field(space, w_node, 'value', True)
w_lineno = get_field(space, w_node, 'lineno', False)
w_col_offset = get_field(space, w_node, 'col_offset', False)
_value = expr.from_object(space, w_value) if w_value is not None else None
_lineno = space.int_w(w_lineno)
_col_offset = space.int_w(w_col_offset)
return Return(_value, _lineno, _col_offset)
State.ast_type('Return', 'stmt', ['value'])
class Delete(stmt):
def __init__(self, targets, lineno, col_offset):
self.targets = targets
stmt.__init__(self, lineno, col_offset)
def walkabout(self, visitor):
visitor.visit_Delete(self)
def mutate_over(self, visitor):
if self.targets:
visitor._mutate_sequence(self.targets)
return visitor.visit_Delete(self)
def to_object(self, space):
w_node = space.call_function(get(space).w_Delete)
if self.targets is None:
targets_w = []
else:
targets_w = [node.to_object(space) for node in self.targets] # expr
w_targets = space.newlist(targets_w)
space.setattr(w_node, space.wrap('targets'), w_targets)
w_lineno = space.wrap(self.lineno) # int
space.setattr(w_node, space.wrap('lineno'), w_lineno)
w_col_offset = space.wrap(self.col_offset) # int
space.setattr(w_node, space.wrap('col_offset'), w_col_offset)
return w_node
@staticmethod
def from_object(space, w_node):
w_targets = get_field(space, w_node, 'targets', False)
w_lineno = get_field(space, w_node, 'lineno', False)
w_col_offset = get_field(space, w_node, 'col_offset', False)
targets_w = space.unpackiterable(w_targets)
_targets = [expr.from_object(space, w_item) for w_item in targets_w]
_lineno = space.int_w(w_lineno)
_col_offset = space.int_w(w_col_offset)
return Delete(_targets, _lineno, _col_offset)
State.ast_type('Delete', 'stmt', ['targets'])
class Assign(stmt):
def __init__(self, targets, value, lineno, col_offset):
self.targets = targets
self.value = value
stmt.__init__(self, lineno, col_offset)
def walkabout(self, visitor):
visitor.visit_Assign(self)
def mutate_over(self, visitor):
if self.targets:
visitor._mutate_sequence(self.targets)
self.value = self.value.mutate_over(visitor)
return visitor.visit_Assign(self)
def to_object(self, space):
w_node = space.call_function(get(space).w_Assign)
if self.targets is None:
targets_w = []
else:
targets_w = [node.to_object(space) for node in self.targets] # expr
w_targets = space.newlist(targets_w)
space.setattr(w_node, space.wrap('targets'), w_targets)
w_value = self.value.to_object(space) # expr
space.setattr(w_node, space.wrap('value'), w_value)
w_lineno = space.wrap(self.lineno) # int
space.setattr(w_node, space.wrap('lineno'), w_lineno)
w_col_offset = space.wrap(self.col_offset) # int
space.setattr(w_node, space.wrap('col_offset'), w_col_offset)
return w_node
@staticmethod
def from_object(space, w_node):
w_targets = get_field(space, w_node, 'targets', False)
w_value = get_field(space, w_node, 'value', False)
w_lineno = get_field(space, w_node, 'lineno', False)
w_col_offset = get_field(space, w_node, 'col_offset', False)
targets_w = space.unpackiterable(w_targets)
_targets = [expr.from_object(space, w_item) for w_item in targets_w]
_value = expr.from_object(space, w_value)
_lineno = space.int_w(w_lineno)
_col_offset = space.int_w(w_col_offset)
return Assign(_targets, _value, _lineno, _col_offset)
State.ast_type('Assign', 'stmt', ['targets', 'value'])
class AugAssign(stmt):
def __init__(self, target, op, value, lineno, col_offset):
self.target = target
self.op = op
self.value = value
stmt.__init__(self, lineno, col_offset)
def walkabout(self, visitor):
visitor.visit_AugAssign(self)
def mutate_over(self, visitor):
self.target = self.target.mutate_over(visitor)
self.value = self.value.mutate_over(visitor)
return visitor.visit_AugAssign(self)
def to_object(self, space):
w_node = space.call_function(get(space).w_AugAssign)
w_target = self.target.to_object(space) # expr
space.setattr(w_node, space.wrap('target'), w_target)
w_op = operator_to_class[self.op - 1]().to_object(space) # operator
space.setattr(w_node, space.wrap('op'), w_op)
w_value = self.value.to_object(space) # expr
space.setattr(w_node, space.wrap('value'), w_value)
w_lineno = space.wrap(self.lineno) # int
space.setattr(w_node, space.wrap('lineno'), w_lineno)
w_col_offset = space.wrap(self.col_offset) # int
space.setattr(w_node, space.wrap('col_offset'), w_col_offset)
return w_node
@staticmethod
def from_object(space, w_node):
w_target = get_field(space, w_node, 'target', False)
w_op = get_field(space, w_node, 'op', False)
w_value = get_field(space, w_node, 'value', False)
w_lineno = get_field(space, w_node, 'lineno', False)
w_col_offset = get_field(space, w_node, 'col_offset', False)
_target = expr.from_object(space, w_target)
_op = operator.from_object(space, w_op)
_value = expr.from_object(space, w_value)
_lineno = space.int_w(w_lineno)
_col_offset = space.int_w(w_col_offset)
return AugAssign(_target, _op, _value, _lineno, _col_offset)
State.ast_type('AugAssign', 'stmt', ['target', 'op', 'value'])
class For(stmt):
def __init__(self, target, iter, body, orelse, lineno, col_offset):
self.target = target
self.iter = iter
self.body = body
self.orelse = orelse
stmt.__init__(self, lineno, col_offset)
def walkabout(self, visitor):
visitor.visit_For(self)
def mutate_over(self, visitor):
self.target = self.target.mutate_over(visitor)
self.iter = self.iter.mutate_over(visitor)
if self.body:
visitor._mutate_sequence(self.body)
if self.orelse:
visitor._mutate_sequence(self.orelse)
return visitor.visit_For(self)
def to_object(self, space):
w_node = space.call_function(get(space).w_For)
w_target = self.target.to_object(space) # expr
space.setattr(w_node, space.wrap('target'), w_target)
w_iter = self.iter.to_object(space) # expr
space.setattr(w_node, space.wrap('iter'), w_iter)
if self.body is None:
body_w = []
else:
body_w = [node.to_object(space) for node in self.body] # stmt
w_body = space.newlist(body_w)
space.setattr(w_node, space.wrap('body'), w_body)
if self.orelse is None:
orelse_w = []
else:
orelse_w = [node.to_object(space) for node in self.orelse] # stmt
w_orelse = space.newlist(orelse_w)
space.setattr(w_node, space.wrap('orelse'), w_orelse)
w_lineno = space.wrap(self.lineno) # int
space.setattr(w_node, space.wrap('lineno'), w_lineno)
w_col_offset = space.wrap(self.col_offset) # int
space.setattr(w_node, space.wrap('col_offset'), w_col_offset)
return w_node
@staticmethod
def from_object(space, w_node):
w_target = get_field(space, w_node, 'target', False)
w_iter = get_field(space, w_node, 'iter', False)
w_body = get_field(space, w_node, 'body', False)
w_orelse = get_field(space, w_node, 'orelse', False)
w_lineno = get_field(space, w_node, 'lineno', False)
w_col_offset = get_field(space, w_node, 'col_offset', False)
_target = expr.from_object(space, w_target)
_iter = expr.from_object(space, w_iter)
body_w = space.unpackiterable(w_body)
_body = [stmt.from_object(space, w_item) for w_item in body_w]
orelse_w = space.unpackiterable(w_orelse)
_orelse = [stmt.from_object(space, w_item) for w_item in orelse_w]
_lineno = space.int_w(w_lineno)
_col_offset = space.int_w(w_col_offset)
return For(_target, _iter, _body, _orelse, _lineno, _col_offset)
State.ast_type('For', 'stmt', ['target', 'iter', 'body', 'orelse'])
class While(stmt):
def __init__(self, test, body, orelse, lineno, col_offset):
self.test = test
self.body = body
self.orelse = orelse
stmt.__init__(self, lineno, col_offset)
def walkabout(self, visitor):
visitor.visit_While(self)
def mutate_over(self, visitor):
self.test = self.test.mutate_over(visitor)
if self.body:
visitor._mutate_sequence(self.body)
if self.orelse:
visitor._mutate_sequence(self.orelse)
return visitor.visit_While(self)
def to_object(self, space):
w_node = space.call_function(get(space).w_While)
w_test = self.test.to_object(space) # expr
space.setattr(w_node, space.wrap('test'), w_test)
if self.body is None:
body_w = []
else:
body_w = [node.to_object(space) for node in self.body] # stmt
w_body = space.newlist(body_w)
space.setattr(w_node, space.wrap('body'), w_body)
if self.orelse is None:
orelse_w = []
else:
orelse_w = [node.to_object(space) for node in self.orelse] # stmt
w_orelse = space.newlist(orelse_w)
space.setattr(w_node, space.wrap('orelse'), w_orelse)
w_lineno = space.wrap(self.lineno) # int
space.setattr(w_node, space.wrap('lineno'), w_lineno)
w_col_offset = space.wrap(self.col_offset) # int
space.setattr(w_node, space.wrap('col_offset'), w_col_offset)
return w_node
@staticmethod
def from_object(space, w_node):
w_test = get_field(space, w_node, 'test', False)
w_body = get_field(space, w_node, 'body', False)
w_orelse = get_field(space, w_node, 'orelse', False)
w_lineno = get_field(space, w_node, 'lineno', False)
w_col_offset = get_field(space, w_node, 'col_offset', False)
_test = expr.from_object(space, w_test)
body_w = space.unpackiterable(w_body)
_body = [stmt.from_object(space, w_item) for w_item in body_w]
orelse_w = space.unpackiterable(w_orelse)
_orelse = [stmt.from_object(space, w_item) for w_item in orelse_w]
_lineno = space.int_w(w_lineno)
_col_offset = space.int_w(w_col_offset)
return While(_test, _body, _orelse, _lineno, _col_offset)
State.ast_type('While', 'stmt', ['test', 'body', 'orelse'])
class If(stmt):
def __init__(self, test, body, orelse, lineno, col_offset):
self.test = test
self.body = body
self.orelse = orelse
stmt.__init__(self, lineno, col_offset)
def walkabout(self, visitor):
visitor.visit_If(self)
def mutate_over(self, visitor):
self.test = self.test.mutate_over(visitor)
if self.body:
visitor._mutate_sequence(self.body)
if self.orelse:
visitor._mutate_sequence(self.orelse)
return visitor.visit_If(self)
def to_object(self, space):
w_node = space.call_function(get(space).w_If)
w_test = self.test.to_object(space) # expr
space.setattr(w_node, space.wrap('test'), w_test)
if self.body is None:
body_w = []
else:
body_w = [node.to_object(space) for node in self.body] # stmt
w_body = space.newlist(body_w)
space.setattr(w_node, space.wrap('body'), w_body)
if self.orelse is None:
orelse_w = []
else:
orelse_w = [node.to_object(space) for node in self.orelse] # stmt
w_orelse = space.newlist(orelse_w)
space.setattr(w_node, space.wrap('orelse'), w_orelse)
w_lineno = space.wrap(self.lineno) # int
space.setattr(w_node, space.wrap('lineno'), w_lineno)
w_col_offset = space.wrap(self.col_offset) # int
space.setattr(w_node, space.wrap('col_offset'), w_col_offset)
return w_node
@staticmethod
def from_object(space, w_node):
w_test = get_field(space, w_node, 'test', False)
w_body = get_field(space, w_node, 'body', False)
w_orelse = get_field(space, w_node, 'orelse', False)
w_lineno = get_field(space, w_node, 'lineno', False)
w_col_offset = get_field(space, w_node, 'col_offset', False)
_test = expr.from_object(space, w_test)
body_w = space.unpackiterable(w_body)
_body = [stmt.from_object(space, w_item) for w_item in body_w]
orelse_w = space.unpackiterable(w_orelse)
_orelse = [stmt.from_object(space, w_item) for w_item in orelse_w]
_lineno = space.int_w(w_lineno)
_col_offset = space.int_w(w_col_offset)
return If(_test, _body, _orelse, _lineno, _col_offset)
State.ast_type('If', 'stmt', ['test', 'body', 'orelse'])
class With(stmt):
def __init__(self, context_expr, optional_vars, body, lineno, col_offset):
self.context_expr = context_expr
self.optional_vars = optional_vars
self.body = body
stmt.__init__(self, lineno, col_offset)
def walkabout(self, visitor):
visitor.visit_With(self)
def mutate_over(self, visitor):
self.context_expr = self.context_expr.mutate_over(visitor)
if self.optional_vars:
self.optional_vars = self.optional_vars.mutate_over(visitor)
if self.body:
visitor._mutate_sequence(self.body)
return visitor.visit_With(self)
def to_object(self, space):
w_node = space.call_function(get(space).w_With)
w_context_expr = self.context_expr.to_object(space) # expr
space.setattr(w_node, space.wrap('context_expr'), w_context_expr)
w_optional_vars = self.optional_vars.to_object(space) if self.optional_vars is not None else space.w_None # expr
space.setattr(w_node, space.wrap('optional_vars'), w_optional_vars)
if self.body is None:
body_w = []
else:
body_w = [node.to_object(space) for node in self.body] # stmt
w_body = space.newlist(body_w)
space.setattr(w_node, space.wrap('body'), w_body)
w_lineno = space.wrap(self.lineno) # int
space.setattr(w_node, space.wrap('lineno'), w_lineno)
w_col_offset = space.wrap(self.col_offset) # int
space.setattr(w_node, space.wrap('col_offset'), w_col_offset)
return w_node
@staticmethod
def from_object(space, w_node):
w_context_expr = get_field(space, w_node, 'context_expr', False)
w_optional_vars = get_field(space, w_node, 'optional_vars', True)
w_body = get_field(space, w_node, 'body', False)
w_lineno = get_field(space, w_node, 'lineno', False)
w_col_offset = get_field(space, w_node, 'col_offset', False)
_context_expr = expr.from_object(space, w_context_expr)
_optional_vars = expr.from_object(space, w_optional_vars) if w_optional_vars is not None else None
body_w = space.unpackiterable(w_body)
_body = [stmt.from_object(space, w_item) for w_item in body_w]
_lineno = space.int_w(w_lineno)
_col_offset = space.int_w(w_col_offset)
return With(_context_expr, _optional_vars, _body, _lineno, _col_offset)
State.ast_type('With', 'stmt', ['context_expr', 'optional_vars', 'body'])
class Raise(stmt):
def __init__(self, exc, cause, lineno, col_offset):
self.exc = exc
self.cause = cause
stmt.__init__(self, lineno, col_offset)
def walkabout(self, visitor):
visitor.visit_Raise(self)
def mutate_over(self, visitor):
if self.exc:
self.exc = self.exc.mutate_over(visitor)
if self.cause:
self.cause = self.cause.mutate_over(visitor)
return visitor.visit_Raise(self)
def to_object(self, space):
w_node = space.call_function(get(space).w_Raise)
w_exc = self.exc.to_object(space) if self.exc is not None else space.w_None # expr
space.setattr(w_node, space.wrap('exc'), w_exc)
w_cause = self.cause.to_object(space) if self.cause is not None else space.w_None # expr
space.setattr(w_node, space.wrap('cause'), w_cause)
w_lineno = space.wrap(self.lineno) # int
space.setattr(w_node, space.wrap('lineno'), w_lineno)
w_col_offset = space.wrap(self.col_offset) # int
space.setattr(w_node, space.wrap('col_offset'), w_col_offset)
return w_node
@staticmethod
def from_object(space, w_node):
w_exc = get_field(space, w_node, 'exc', True)
w_cause = get_field(space, w_node, 'cause', True)
w_lineno = get_field(space, w_node, 'lineno', False)
w_col_offset = get_field(space, w_node, 'col_offset', False)
_exc = expr.from_object(space, w_exc) if w_exc is not None else None
_cause = expr.from_object(space, w_cause) if w_cause is not None else None
_lineno = space.int_w(w_lineno)
_col_offset = space.int_w(w_col_offset)
return Raise(_exc, _cause, _lineno, _col_offset)
State.ast_type('Raise', 'stmt', ['exc', 'cause'])
class TryExcept(stmt):
def __init__(self, body, handlers, orelse, lineno, col_offset):
self.body = body
self.handlers = handlers
self.orelse = orelse
stmt.__init__(self, lineno, col_offset)
def walkabout(self, visitor):
visitor.visit_TryExcept(self)
def mutate_over(self, visitor):
if self.body:
visitor._mutate_sequence(self.body)
if self.handlers:
visitor._mutate_sequence(self.handlers)
if self.orelse:
visitor._mutate_sequence(self.orelse)
return visitor.visit_TryExcept(self)
def to_object(self, space):
w_node = space.call_function(get(space).w_TryExcept)
if self.body is None:
body_w = []
else:
body_w = [node.to_object(space) for node in self.body] # stmt
w_body = space.newlist(body_w)
space.setattr(w_node, space.wrap('body'), w_body)
if self.handlers is None:
handlers_w = []
else:
handlers_w = [node.to_object(space) for node in self.handlers] # excepthandler
w_handlers = space.newlist(handlers_w)
space.setattr(w_node, space.wrap('handlers'), w_handlers)
if self.orelse is None:
orelse_w = []
else:
orelse_w = [node.to_object(space) for node in self.orelse] # stmt
w_orelse = space.newlist(orelse_w)
space.setattr(w_node, space.wrap('orelse'), w_orelse)
w_lineno = space.wrap(self.lineno) # int
space.setattr(w_node, space.wrap('lineno'), w_lineno)
w_col_offset = space.wrap(self.col_offset) # int
space.setattr(w_node, space.wrap('col_offset'), w_col_offset)
return w_node
@staticmethod
def from_object(space, w_node):
w_body = get_field(space, w_node, 'body', False)
w_handlers = get_field(space, w_node, 'handlers', False)
w_orelse = get_field(space, w_node, 'orelse', False)
w_lineno = get_field(space, w_node, 'lineno', False)
w_col_offset = get_field(space, w_node, 'col_offset', False)
body_w = space.unpackiterable(w_body)
_body = [stmt.from_object(space, w_item) for w_item in body_w]
handlers_w = space.unpackiterable(w_handlers)
_handlers = [excepthandler.from_object(space, w_item) for w_item in handlers_w]
orelse_w = space.unpackiterable(w_orelse)
_orelse = [stmt.from_object(space, w_item) for w_item in orelse_w]
_lineno = space.int_w(w_lineno)
_col_offset = space.int_w(w_col_offset)
return TryExcept(_body, _handlers, _orelse, _lineno, _col_offset)
State.ast_type('TryExcept', 'stmt', ['body', 'handlers', 'orelse'])
class TryFinally(stmt):
def __init__(self, body, finalbody, lineno, col_offset):
self.body = body
self.finalbody = finalbody
stmt.__init__(self, lineno, col_offset)
def walkabout(self, visitor):
visitor.visit_TryFinally(self)
def mutate_over(self, visitor):
if self.body:
visitor._mutate_sequence(self.body)
if self.finalbody:
visitor._mutate_sequence(self.finalbody)
return visitor.visit_TryFinally(self)
def to_object(self, space):
w_node = space.call_function(get(space).w_TryFinally)
if self.body is None:
body_w = []
else:
body_w = [node.to_object(space) for node in self.body] # stmt
w_body = space.newlist(body_w)
space.setattr(w_node, space.wrap('body'), w_body)
if self.finalbody is None:
finalbody_w = []
else:
finalbody_w = [node.to_object(space) for node in self.finalbody] # stmt
w_finalbody = space.newlist(finalbody_w)
space.setattr(w_node, space.wrap('finalbody'), w_finalbody)
w_lineno = space.wrap(self.lineno) # int
space.setattr(w_node, space.wrap('lineno'), w_lineno)
w_col_offset = space.wrap(self.col_offset) # int
space.setattr(w_node, space.wrap('col_offset'), w_col_offset)
return w_node
@staticmethod
def from_object(space, w_node):
w_body = get_field(space, w_node, 'body', False)
w_finalbody = get_field(space, w_node, 'finalbody', False)
w_lineno = get_field(space, w_node, 'lineno', False)
w_col_offset = get_field(space, w_node, 'col_offset', False)
body_w = space.unpackiterable(w_body)
_body = [stmt.from_object(space, w_item) for w_item in body_w]
finalbody_w = space.unpackiterable(w_finalbody)
_finalbody = [stmt.from_object(space, w_item) for w_item in finalbody_w]
_lineno = space.int_w(w_lineno)
_col_offset = space.int_w(w_col_offset)
return TryFinally(_body, _finalbody, _lineno, _col_offset)
State.ast_type('TryFinally', 'stmt', ['body', 'finalbody'])
class Assert(stmt):
def __init__(self, test, msg, lineno, col_offset):
self.test = test
self.msg = msg
stmt.__init__(self, lineno, col_offset)
def walkabout(self, visitor):
visitor.visit_Assert(self)
def mutate_over(self, visitor):
self.test = self.test.mutate_over(visitor)
if self.msg:
self.msg = self.msg.mutate_over(visitor)
return visitor.visit_Assert(self)
def to_object(self, space):
w_node = space.call_function(get(space).w_Assert)
w_test = self.test.to_object(space) # expr
space.setattr(w_node, space.wrap('test'), w_test)
w_msg = self.msg.to_object(space) if self.msg is not None else space.w_None # expr
space.setattr(w_node, space.wrap('msg'), w_msg)
w_lineno = space.wrap(self.lineno) # int
space.setattr(w_node, space.wrap('lineno'), w_lineno)
w_col_offset = space.wrap(self.col_offset) # int
space.setattr(w_node, space.wrap('col_offset'), w_col_offset)
return w_node
@staticmethod
def from_object(space, w_node):
w_test = get_field(space, w_node, 'test', False)
w_msg = get_field(space, w_node, 'msg', True)
w_lineno = get_field(space, w_node, 'lineno', False)
w_col_offset = get_field(space, w_node, 'col_offset', False)
_test = expr.from_object(space, w_test)
_msg = expr.from_object(space, w_msg) if w_msg is not None else None
_lineno = space.int_w(w_lineno)
_col_offset = space.int_w(w_col_offset)
return Assert(_test, _msg, _lineno, _col_offset)
State.ast_type('Assert', 'stmt', ['test', 'msg'])
class Import(stmt):
def __init__(self, names, lineno, col_offset):
self.names = names
stmt.__init__(self, lineno, col_offset)
def walkabout(self, visitor):
visitor.visit_Import(self)
def mutate_over(self, visitor):
if self.names:
visitor._mutate_sequence(self.names)
return visitor.visit_Import(self)
def to_object(self, space):
w_node = space.call_function(get(space).w_Import)
if self.names is None:
names_w = []
else:
names_w = [node.to_object(space) for node in self.names] # alias
w_names = space.newlist(names_w)
space.setattr(w_node, space.wrap('names'), w_names)
w_lineno = space.wrap(self.lineno) # int
space.setattr(w_node, space.wrap('lineno'), w_lineno)
w_col_offset = space.wrap(self.col_offset) # int
space.setattr(w_node, space.wrap('col_offset'), w_col_offset)
return w_node
@staticmethod
def from_object(space, w_node):
w_names = get_field(space, w_node, 'names', False)
w_lineno = get_field(space, w_node, 'lineno', False)
w_col_offset = get_field(space, w_node, 'col_offset', False)
names_w = space.unpackiterable(w_names)
_names = [alias.from_object(space, w_item) for w_item in names_w]
_lineno = space.int_w(w_lineno)
_col_offset = space.int_w(w_col_offset)
return Import(_names, _lineno, _col_offset)
State.ast_type('Import', 'stmt', ['names'])
class ImportFrom(stmt):
def __init__(self, module, names, level, lineno, col_offset):
self.module = module
self.names = names
self.level = level
stmt.__init__(self, lineno, col_offset)
def walkabout(self, visitor):
visitor.visit_ImportFrom(self)
def mutate_over(self, visitor):
if self.names:
visitor._mutate_sequence(self.names)
return visitor.visit_ImportFrom(self)
def to_object(self, space):
w_node = space.call_function(get(space).w_ImportFrom)
w_module = space.wrap(self.module.decode('utf-8')) if self.module is not None else space.w_None # identifier
space.setattr(w_node, space.wrap('module'), w_module)
if self.names is None:
names_w = []
else:
names_w = [node.to_object(space) for node in self.names] # alias
w_names = space.newlist(names_w)
space.setattr(w_node, space.wrap('names'), w_names)
w_level = space.wrap(self.level) # int
space.setattr(w_node, space.wrap('level'), w_level)
w_lineno = space.wrap(self.lineno) # int
space.setattr(w_node, space.wrap('lineno'), w_lineno)
w_col_offset = space.wrap(self.col_offset) # int
space.setattr(w_node, space.wrap('col_offset'), w_col_offset)
return w_node
@staticmethod
def from_object(space, w_node):
w_module = get_field(space, w_node, 'module', True)
w_names = get_field(space, w_node, 'names', False)
w_level = get_field(space, w_node, 'level', True)
w_lineno = get_field(space, w_node, 'lineno', False)
w_col_offset = get_field(space, w_node, 'col_offset', False)
_module = space.str_or_None_w(w_module)
names_w = space.unpackiterable(w_names)
_names = [alias.from_object(space, w_item) for w_item in names_w]
_level = space.int_w(w_level)
_lineno = space.int_w(w_lineno)
_col_offset = space.int_w(w_col_offset)
return ImportFrom(_module, _names, _level, _lineno, _col_offset)
State.ast_type('ImportFrom', 'stmt', ['module', 'names', 'level'])
class Global(stmt):
def __init__(self, names, lineno, col_offset):
self.names = names
stmt.__init__(self, lineno, col_offset)
def walkabout(self, visitor):
visitor.visit_Global(self)
def mutate_over(self, visitor):
return visitor.visit_Global(self)
def to_object(self, space):
w_node = space.call_function(get(space).w_Global)
if self.names is None:
names_w = []
else:
names_w = [space.wrap(node.decode('utf-8')) for node in self.names] # identifier
w_names = space.newlist(names_w)
space.setattr(w_node, space.wrap('names'), w_names)
w_lineno = space.wrap(self.lineno) # int
space.setattr(w_node, space.wrap('lineno'), w_lineno)
w_col_offset = space.wrap(self.col_offset) # int
space.setattr(w_node, space.wrap('col_offset'), w_col_offset)
return w_node
@staticmethod
def from_object(space, w_node):
w_names = get_field(space, w_node, 'names', False)
w_lineno = get_field(space, w_node, 'lineno', False)
w_col_offset = get_field(space, w_node, 'col_offset', False)
names_w = space.unpackiterable(w_names)
_names = [space.identifier_w(w_item) for w_item in names_w]
_lineno = space.int_w(w_lineno)
_col_offset = space.int_w(w_col_offset)
return Global(_names, _lineno, _col_offset)
State.ast_type('Global', 'stmt', ['names'])
class Nonlocal(stmt):
def __init__(self, names, lineno, col_offset):
self.names = names
stmt.__init__(self, lineno, col_offset)
def walkabout(self, visitor):
visitor.visit_Nonlocal(self)
def mutate_over(self, visitor):
return visitor.visit_Nonlocal(self)
def to_object(self, space):
w_node = space.call_function(get(space).w_Nonlocal)
if self.names is None:
names_w = []
else:
names_w = [space.wrap(node.decode('utf-8')) for node in self.names] # identifier
w_names = space.newlist(names_w)
space.setattr(w_node, space.wrap('names'), w_names)
w_lineno = space.wrap(self.lineno) # int
space.setattr(w_node, space.wrap('lineno'), w_lineno)
w_col_offset = space.wrap(self.col_offset) # int
space.setattr(w_node, space.wrap('col_offset'), w_col_offset)
return w_node
@staticmethod
def from_object(space, w_node):
w_names = get_field(space, w_node, 'names', False)
w_lineno = get_field(space, w_node, 'lineno', False)
w_col_offset = get_field(space, w_node, 'col_offset', False)
names_w = space.unpackiterable(w_names)
_names = [space.identifier_w(w_item) for w_item in names_w]
_lineno = space.int_w(w_lineno)
_col_offset = space.int_w(w_col_offset)
return Nonlocal(_names, _lineno, _col_offset)
State.ast_type('Nonlocal', 'stmt', ['names'])
class Expr(stmt):
def __init__(self, value, lineno, col_offset):
self.value = value
stmt.__init__(self, lineno, col_offset)
def walkabout(self, visitor):
visitor.visit_Expr(self)
def mutate_over(self, visitor):
self.value = self.value.mutate_over(visitor)
return visitor.visit_Expr(self)
def to_object(self, space):
w_node = space.call_function(get(space).w_Expr)
w_value = self.value.to_object(space) # expr
space.setattr(w_node, space.wrap('value'), w_value)
w_lineno = space.wrap(self.lineno) # int
space.setattr(w_node, space.wrap('lineno'), w_lineno)
w_col_offset = space.wrap(self.col_offset) # int
space.setattr(w_node, space.wrap('col_offset'), w_col_offset)
return w_node
@staticmethod
def from_object(space, w_node):
w_value = get_field(space, w_node, 'value', False)
w_lineno = get_field(space, w_node, 'lineno', False)
w_col_offset = get_field(space, w_node, 'col_offset', False)
_value = expr.from_object(space, w_value)
_lineno = space.int_w(w_lineno)
_col_offset = space.int_w(w_col_offset)
return Expr(_value, _lineno, _col_offset)
State.ast_type('Expr', 'stmt', ['value'])
class Pass(stmt):
def __init__(self, lineno, col_offset):
stmt.__init__(self, lineno, col_offset)
def walkabout(self, visitor):
visitor.visit_Pass(self)
def mutate_over(self, visitor):
return visitor.visit_Pass(self)
def to_object(self, space):
w_node = space.call_function(get(space).w_Pass)
w_lineno = space.wrap(self.lineno) # int
space.setattr(w_node, space.wrap('lineno'), w_lineno)
w_col_offset = space.wrap(self.col_offset) # int
space.setattr(w_node, space.wrap('col_offset'), w_col_offset)
return w_node
@staticmethod
def from_object(space, w_node):
w_lineno = get_field(space, w_node, 'lineno', False)
w_col_offset = get_field(space, w_node, 'col_offset', False)
_lineno = space.int_w(w_lineno)
_col_offset = space.int_w(w_col_offset)
return Pass(_lineno, _col_offset)
State.ast_type('Pass', 'stmt', [])
class Break(stmt):
def __init__(self, lineno, col_offset):
stmt.__init__(self, lineno, col_offset)
def walkabout(self, visitor):
visitor.visit_Break(self)
def mutate_over(self, visitor):
return visitor.visit_Break(self)
def to_object(self, space):
w_node = space.call_function(get(space).w_Break)
w_lineno = space.wrap(self.lineno) # int
space.setattr(w_node, space.wrap('lineno'), w_lineno)
w_col_offset = space.wrap(self.col_offset) # int
space.setattr(w_node, space.wrap('col_offset'), w_col_offset)
return w_node
@staticmethod
def from_object(space, w_node):
w_lineno = get_field(space, w_node, 'lineno', False)
w_col_offset = get_field(space, w_node, 'col_offset', False)
_lineno = space.int_w(w_lineno)
_col_offset = space.int_w(w_col_offset)
return Break(_lineno, _col_offset)
State.ast_type('Break', 'stmt', [])
class Continue(stmt):
def __init__(self, lineno, col_offset):
stmt.__init__(self, lineno, col_offset)
def walkabout(self, visitor):
visitor.visit_Continue(self)
def mutate_over(self, visitor):
return visitor.visit_Continue(self)
def to_object(self, space):
w_node = space.call_function(get(space).w_Continue)
w_lineno = space.wrap(self.lineno) # int
space.setattr(w_node, space.wrap('lineno'), w_lineno)
w_col_offset = space.wrap(self.col_offset) # int
space.setattr(w_node, space.wrap('col_offset'), w_col_offset)
return w_node
@staticmethod
def from_object(space, w_node):
w_lineno = get_field(space, w_node, 'lineno', False)
w_col_offset = get_field(space, w_node, 'col_offset', False)
_lineno = space.int_w(w_lineno)
_col_offset = space.int_w(w_col_offset)
return Continue(_lineno, _col_offset)
State.ast_type('Continue', 'stmt', [])
class expr(AST):
def __init__(self, lineno, col_offset):
self.lineno = lineno
self.col_offset = col_offset
@staticmethod
def from_object(space, w_node):
if space.is_w(w_node, space.w_None):
return None
if space.isinstance_w(w_node, get(space).w_BoolOp):
return BoolOp.from_object(space, w_node)
if space.isinstance_w(w_node, get(space).w_BinOp):
return BinOp.from_object(space, w_node)
if space.isinstance_w(w_node, get(space).w_UnaryOp):
return UnaryOp.from_object(space, w_node)
if space.isinstance_w(w_node, get(space).w_Lambda):
return Lambda.from_object(space, w_node)
if space.isinstance_w(w_node, get(space).w_IfExp):
return IfExp.from_object(space, w_node)
if space.isinstance_w(w_node, get(space).w_Dict):
return Dict.from_object(space, w_node)
if space.isinstance_w(w_node, get(space).w_Set):
return Set.from_object(space, w_node)
if space.isinstance_w(w_node, get(space).w_ListComp):
return ListComp.from_object(space, w_node)
if space.isinstance_w(w_node, get(space).w_SetComp):
return SetComp.from_object(space, w_node)
if space.isinstance_w(w_node, get(space).w_DictComp):
return DictComp.from_object(space, w_node)
if space.isinstance_w(w_node, get(space).w_GeneratorExp):
return GeneratorExp.from_object(space, w_node)
if space.isinstance_w(w_node, get(space).w_Yield):
return Yield.from_object(space, w_node)
if space.isinstance_w(w_node, get(space).w_Compare):
return Compare.from_object(space, w_node)
if space.isinstance_w(w_node, get(space).w_Call):
return Call.from_object(space, w_node)
if space.isinstance_w(w_node, get(space).w_Num):
return Num.from_object(space, w_node)
if space.isinstance_w(w_node, get(space).w_Str):
return Str.from_object(space, w_node)
if space.isinstance_w(w_node, get(space).w_Bytes):
return Bytes.from_object(space, w_node)
if space.isinstance_w(w_node, get(space).w_Ellipsis):
return Ellipsis.from_object(space, w_node)
if space.isinstance_w(w_node, get(space).w_Attribute):
return Attribute.from_object(space, w_node)
if space.isinstance_w(w_node, get(space).w_Subscript):
return Subscript.from_object(space, w_node)
if space.isinstance_w(w_node, get(space).w_Starred):
return Starred.from_object(space, w_node)
if space.isinstance_w(w_node, get(space).w_Name):
return Name.from_object(space, w_node)
if space.isinstance_w(w_node, get(space).w_List):
return List.from_object(space, w_node)
if space.isinstance_w(w_node, get(space).w_Tuple):
return Tuple.from_object(space, w_node)
if space.isinstance_w(w_node, get(space).w_Const):
return Const.from_object(space, w_node)
raise oefmt(space.w_TypeError,
"Expected expr node, got %T", w_node)
State.ast_type('expr', 'AST', None, ['lineno', 'col_offset'])
class BoolOp(expr):
def __init__(self, op, values, lineno, col_offset):
self.op = op
self.values = values
expr.__init__(self, lineno, col_offset)
def walkabout(self, visitor):
visitor.visit_BoolOp(self)
def mutate_over(self, visitor):
if self.values:
visitor._mutate_sequence(self.values)
return visitor.visit_BoolOp(self)
def to_object(self, space):
w_node = space.call_function(get(space).w_BoolOp)
w_op = boolop_to_class[self.op - 1]().to_object(space) # boolop
space.setattr(w_node, space.wrap('op'), w_op)
if self.values is None:
values_w = []
else:
values_w = [node.to_object(space) for node in self.values] # expr
w_values = space.newlist(values_w)
space.setattr(w_node, space.wrap('values'), w_values)
w_lineno = space.wrap(self.lineno) # int
space.setattr(w_node, space.wrap('lineno'), w_lineno)
w_col_offset = space.wrap(self.col_offset) # int
space.setattr(w_node, space.wrap('col_offset'), w_col_offset)
return w_node
@staticmethod
def from_object(space, w_node):
w_op = get_field(space, w_node, 'op', False)
w_values = get_field(space, w_node, 'values', False)
w_lineno = get_field(space, w_node, 'lineno', False)
w_col_offset = get_field(space, w_node, 'col_offset', False)
_op = boolop.from_object(space, w_op)
values_w = space.unpackiterable(w_values)
_values = [expr.from_object(space, w_item) for w_item in values_w]
_lineno = space.int_w(w_lineno)
_col_offset = space.int_w(w_col_offset)
return BoolOp(_op, _values, _lineno, _col_offset)
State.ast_type('BoolOp', 'expr', ['op', 'values'])
class BinOp(expr):
def __init__(self, left, op, right, lineno, col_offset):
self.left = left
self.op = op
self.right = right
expr.__init__(self, lineno, col_offset)
def walkabout(self, visitor):
visitor.visit_BinOp(self)
def mutate_over(self, visitor):
self.left = self.left.mutate_over(visitor)
self.right = self.right.mutate_over(visitor)
return visitor.visit_BinOp(self)
def to_object(self, space):
w_node = space.call_function(get(space).w_BinOp)
w_left = self.left.to_object(space) # expr
space.setattr(w_node, space.wrap('left'), w_left)
w_op = operator_to_class[self.op - 1]().to_object(space) # operator
space.setattr(w_node, space.wrap('op'), w_op)
w_right = self.right.to_object(space) # expr
space.setattr(w_node, space.wrap('right'), w_right)
w_lineno = space.wrap(self.lineno) # int
space.setattr(w_node, space.wrap('lineno'), w_lineno)
w_col_offset = space.wrap(self.col_offset) # int
space.setattr(w_node, space.wrap('col_offset'), w_col_offset)
return w_node
@staticmethod
def from_object(space, w_node):
w_left = get_field(space, w_node, 'left', False)
w_op = get_field(space, w_node, 'op', False)
w_right = get_field(space, w_node, 'right', False)
w_lineno = get_field(space, w_node, 'lineno', False)
w_col_offset = get_field(space, w_node, 'col_offset', False)
_left = expr.from_object(space, w_left)
_op = operator.from_object(space, w_op)
_right = expr.from_object(space, w_right)
_lineno = space.int_w(w_lineno)
_col_offset = space.int_w(w_col_offset)
return BinOp(_left, _op, _right, _lineno, _col_offset)
State.ast_type('BinOp', 'expr', ['left', 'op', 'right'])
class UnaryOp(expr):
def __init__(self, op, operand, lineno, col_offset):
self.op = op
self.operand = operand
expr.__init__(self, lineno, col_offset)
def walkabout(self, visitor):
visitor.visit_UnaryOp(self)
def mutate_over(self, visitor):
self.operand = self.operand.mutate_over(visitor)
return visitor.visit_UnaryOp(self)
def to_object(self, space):
w_node = space.call_function(get(space).w_UnaryOp)
w_op = unaryop_to_class[self.op - 1]().to_object(space) # unaryop
space.setattr(w_node, space.wrap('op'), w_op)
w_operand = self.operand.to_object(space) # expr
space.setattr(w_node, space.wrap('operand'), w_operand)
w_lineno = space.wrap(self.lineno) # int
space.setattr(w_node, space.wrap('lineno'), w_lineno)
w_col_offset = space.wrap(self.col_offset) # int
space.setattr(w_node, space.wrap('col_offset'), w_col_offset)
return w_node
@staticmethod
def from_object(space, w_node):
w_op = get_field(space, w_node, 'op', False)
w_operand = get_field(space, w_node, 'operand', False)
w_lineno = get_field(space, w_node, 'lineno', False)
w_col_offset = get_field(space, w_node, 'col_offset', False)
_op = unaryop.from_object(space, w_op)
_operand = expr.from_object(space, w_operand)
_lineno = space.int_w(w_lineno)
_col_offset = space.int_w(w_col_offset)
return UnaryOp(_op, _operand, _lineno, _col_offset)
State.ast_type('UnaryOp', 'expr', ['op', 'operand'])
class Lambda(expr):
def __init__(self, args, body, lineno, col_offset):
self.args = args
self.body = body
expr.__init__(self, lineno, col_offset)
def walkabout(self, visitor):
visitor.visit_Lambda(self)
def mutate_over(self, visitor):
self.args = self.args.mutate_over(visitor)
self.body = self.body.mutate_over(visitor)
return visitor.visit_Lambda(self)
def to_object(self, space):
w_node = space.call_function(get(space).w_Lambda)
w_args = self.args.to_object(space) # arguments
space.setattr(w_node, space.wrap('args'), w_args)
w_body = self.body.to_object(space) # expr
space.setattr(w_node, space.wrap('body'), w_body)
w_lineno = space.wrap(self.lineno) # int
space.setattr(w_node, space.wrap('lineno'), w_lineno)
w_col_offset = space.wrap(self.col_offset) # int
space.setattr(w_node, space.wrap('col_offset'), w_col_offset)
return w_node
@staticmethod
def from_object(space, w_node):
w_args = get_field(space, w_node, 'args', False)
w_body = get_field(space, w_node, 'body', False)
w_lineno = get_field(space, w_node, 'lineno', False)
w_col_offset = get_field(space, w_node, 'col_offset', False)
_args = arguments.from_object(space, w_args)
_body = expr.from_object(space, w_body)
_lineno = space.int_w(w_lineno)
_col_offset = space.int_w(w_col_offset)
return Lambda(_args, _body, _lineno, _col_offset)
State.ast_type('Lambda', 'expr', ['args', 'body'])
class IfExp(expr):
def __init__(self, test, body, orelse, lineno, col_offset):
self.test = test
self.body = body
self.orelse = orelse
expr.__init__(self, lineno, col_offset)
def walkabout(self, visitor):
visitor.visit_IfExp(self)
def mutate_over(self, visitor):
self.test = self.test.mutate_over(visitor)
self.body = self.body.mutate_over(visitor)
self.orelse = self.orelse.mutate_over(visitor)
return visitor.visit_IfExp(self)
def to_object(self, space):
w_node = space.call_function(get(space).w_IfExp)
w_test = self.test.to_object(space) # expr
space.setattr(w_node, space.wrap('test'), w_test)
w_body = self.body.to_object(space) # expr
space.setattr(w_node, space.wrap('body'), w_body)
w_orelse = self.orelse.to_object(space) # expr
space.setattr(w_node, space.wrap('orelse'), w_orelse)
w_lineno = space.wrap(self.lineno) # int
space.setattr(w_node, space.wrap('lineno'), w_lineno)
w_col_offset = space.wrap(self.col_offset) # int
space.setattr(w_node, space.wrap('col_offset'), w_col_offset)
return w_node
@staticmethod
def from_object(space, w_node):
w_test = get_field(space, w_node, 'test', False)
w_body = get_field(space, w_node, 'body', False)
w_orelse = get_field(space, w_node, 'orelse', False)
w_lineno = get_field(space, w_node, 'lineno', False)
w_col_offset = get_field(space, w_node, 'col_offset', False)
_test = expr.from_object(space, w_test)
_body = expr.from_object(space, w_body)
_orelse = expr.from_object(space, w_orelse)
_lineno = space.int_w(w_lineno)
_col_offset = space.int_w(w_col_offset)
return IfExp(_test, _body, _orelse, _lineno, _col_offset)
State.ast_type('IfExp', 'expr', ['test', 'body', 'orelse'])
class Dict(expr):
def __init__(self, keys, values, lineno, col_offset):
self.keys = keys
self.values = values
expr.__init__(self, lineno, col_offset)
def walkabout(self, visitor):
visitor.visit_Dict(self)
def mutate_over(self, visitor):
if self.keys:
visitor._mutate_sequence(self.keys)
if self.values:
visitor._mutate_sequence(self.values)
return visitor.visit_Dict(self)
def to_object(self, space):
w_node = space.call_function(get(space).w_Dict)
if self.keys is None:
keys_w = []
else:
keys_w = [node.to_object(space) for node in self.keys] # expr
w_keys = space.newlist(keys_w)
space.setattr(w_node, space.wrap('keys'), w_keys)
if self.values is None:
values_w = []
else:
values_w = [node.to_object(space) for node in self.values] # expr
w_values = space.newlist(values_w)
space.setattr(w_node, space.wrap('values'), w_values)
w_lineno = space.wrap(self.lineno) # int
space.setattr(w_node, space.wrap('lineno'), w_lineno)
w_col_offset = space.wrap(self.col_offset) # int
space.setattr(w_node, space.wrap('col_offset'), w_col_offset)
return w_node
@staticmethod
def from_object(space, w_node):
w_keys = get_field(space, w_node, 'keys', False)
w_values = get_field(space, w_node, 'values', False)
w_lineno = get_field(space, w_node, 'lineno', False)
w_col_offset = get_field(space, w_node, 'col_offset', False)
keys_w = space.unpackiterable(w_keys)
_keys = [expr.from_object(space, w_item) for w_item in keys_w]
values_w = space.unpackiterable(w_values)
_values = [expr.from_object(space, w_item) for w_item in values_w]
_lineno = space.int_w(w_lineno)
_col_offset = space.int_w(w_col_offset)
return Dict(_keys, _values, _lineno, _col_offset)
State.ast_type('Dict', 'expr', ['keys', 'values'])
class Set(expr):
def __init__(self, elts, lineno, col_offset):
self.elts = elts
expr.__init__(self, lineno, col_offset)
def walkabout(self, visitor):
visitor.visit_Set(self)
def mutate_over(self, visitor):
if self.elts:
visitor._mutate_sequence(self.elts)
return visitor.visit_Set(self)
def to_object(self, space):
w_node = space.call_function(get(space).w_Set)
if self.elts is None:
elts_w = []
else:
elts_w = [node.to_object(space) for node in self.elts] # expr
w_elts = space.newlist(elts_w)
space.setattr(w_node, space.wrap('elts'), w_elts)
w_lineno = space.wrap(self.lineno) # int
space.setattr(w_node, space.wrap('lineno'), w_lineno)
w_col_offset = space.wrap(self.col_offset) # int
space.setattr(w_node, space.wrap('col_offset'), w_col_offset)
return w_node
@staticmethod
def from_object(space, w_node):
w_elts = get_field(space, w_node, 'elts', False)
w_lineno = get_field(space, w_node, 'lineno', False)
w_col_offset = get_field(space, w_node, 'col_offset', False)
elts_w = space.unpackiterable(w_elts)
_elts = [expr.from_object(space, w_item) for w_item in elts_w]
_lineno = space.int_w(w_lineno)
_col_offset = space.int_w(w_col_offset)
return Set(_elts, _lineno, _col_offset)
State.ast_type('Set', 'expr', ['elts'])
class ListComp(expr):
def __init__(self, elt, generators, lineno, col_offset):
self.elt = elt
self.generators = generators
expr.__init__(self, lineno, col_offset)
def walkabout(self, visitor):
visitor.visit_ListComp(self)
def mutate_over(self, visitor):
self.elt = self.elt.mutate_over(visitor)
if self.generators:
visitor._mutate_sequence(self.generators)
return visitor.visit_ListComp(self)
def to_object(self, space):
w_node = space.call_function(get(space).w_ListComp)
w_elt = self.elt.to_object(space) # expr
space.setattr(w_node, space.wrap('elt'), w_elt)
if self.generators is None:
generators_w = []
else:
generators_w = [node.to_object(space) for node in self.generators] # comprehension
w_generators = space.newlist(generators_w)
space.setattr(w_node, space.wrap('generators'), w_generators)
w_lineno = space.wrap(self.lineno) # int
space.setattr(w_node, space.wrap('lineno'), w_lineno)
w_col_offset = space.wrap(self.col_offset) # int
space.setattr(w_node, space.wrap('col_offset'), w_col_offset)
return w_node
@staticmethod
def from_object(space, w_node):
w_elt = get_field(space, w_node, 'elt', False)
w_generators = get_field(space, w_node, 'generators', False)
w_lineno = get_field(space, w_node, 'lineno', False)
w_col_offset = get_field(space, w_node, 'col_offset', False)
_elt = expr.from_object(space, w_elt)
generators_w = space.unpackiterable(w_generators)
_generators = [comprehension.from_object(space, w_item) for w_item in generators_w]
_lineno = space.int_w(w_lineno)
_col_offset = space.int_w(w_col_offset)
return ListComp(_elt, _generators, _lineno, _col_offset)
State.ast_type('ListComp', 'expr', ['elt', 'generators'])
class SetComp(expr):
def __init__(self, elt, generators, lineno, col_offset):
self.elt = elt
self.generators = generators
expr.__init__(self, lineno, col_offset)
def walkabout(self, visitor):
visitor.visit_SetComp(self)
def mutate_over(self, visitor):
self.elt = self.elt.mutate_over(visitor)
if self.generators:
visitor._mutate_sequence(self.generators)
return visitor.visit_SetComp(self)
def to_object(self, space):
w_node = space.call_function(get(space).w_SetComp)
w_elt = self.elt.to_object(space) # expr
space.setattr(w_node, space.wrap('elt'), w_elt)
if self.generators is None:
generators_w = []
else:
generators_w = [node.to_object(space) for node in self.generators] # comprehension
w_generators = space.newlist(generators_w)
space.setattr(w_node, space.wrap('generators'), w_generators)
w_lineno = space.wrap(self.lineno) # int
space.setattr(w_node, space.wrap('lineno'), w_lineno)
w_col_offset = space.wrap(self.col_offset) # int
space.setattr(w_node, space.wrap('col_offset'), w_col_offset)
return w_node
@staticmethod
def from_object(space, w_node):
w_elt = get_field(space, w_node, 'elt', False)
w_generators = get_field(space, w_node, 'generators', False)
w_lineno = get_field(space, w_node, 'lineno', False)
w_col_offset = get_field(space, w_node, 'col_offset', False)
_elt = expr.from_object(space, w_elt)
generators_w = space.unpackiterable(w_generators)
_generators = [comprehension.from_object(space, w_item) for w_item in generators_w]
_lineno = space.int_w(w_lineno)
_col_offset = space.int_w(w_col_offset)
return SetComp(_elt, _generators, _lineno, _col_offset)
State.ast_type('SetComp', 'expr', ['elt', 'generators'])
class DictComp(expr):
def __init__(self, key, value, generators, lineno, col_offset):
self.key = key
self.value = value
self.generators = generators
expr.__init__(self, lineno, col_offset)
def walkabout(self, visitor):
visitor.visit_DictComp(self)
def mutate_over(self, visitor):
self.key = self.key.mutate_over(visitor)
self.value = self.value.mutate_over(visitor)
if self.generators:
visitor._mutate_sequence(self.generators)
return visitor.visit_DictComp(self)
def to_object(self, space):
w_node = space.call_function(get(space).w_DictComp)
w_key = self.key.to_object(space) # expr
space.setattr(w_node, space.wrap('key'), w_key)
w_value = self.value.to_object(space) # expr
space.setattr(w_node, space.wrap('value'), w_value)
if self.generators is None:
generators_w = []
else:
generators_w = [node.to_object(space) for node in self.generators] # comprehension
w_generators = space.newlist(generators_w)
space.setattr(w_node, space.wrap('generators'), w_generators)
w_lineno = space.wrap(self.lineno) # int
space.setattr(w_node, space.wrap('lineno'), w_lineno)
w_col_offset = space.wrap(self.col_offset) # int
space.setattr(w_node, space.wrap('col_offset'), w_col_offset)
return w_node
@staticmethod
def from_object(space, w_node):
w_key = get_field(space, w_node, 'key', False)
w_value = get_field(space, w_node, 'value', False)
w_generators = get_field(space, w_node, 'generators', False)
w_lineno = get_field(space, w_node, 'lineno', False)
w_col_offset = get_field(space, w_node, 'col_offset', False)
_key = expr.from_object(space, w_key)
_value = expr.from_object(space, w_value)
generators_w = space.unpackiterable(w_generators)
_generators = [comprehension.from_object(space, w_item) for w_item in generators_w]
_lineno = space.int_w(w_lineno)
_col_offset = space.int_w(w_col_offset)
return DictComp(_key, _value, _generators, _lineno, _col_offset)
State.ast_type('DictComp', 'expr', ['key', 'value', 'generators'])
class GeneratorExp(expr):
def __init__(self, elt, generators, lineno, col_offset):
self.elt = elt
self.generators = generators
expr.__init__(self, lineno, col_offset)
def walkabout(self, visitor):
visitor.visit_GeneratorExp(self)
def mutate_over(self, visitor):
self.elt = self.elt.mutate_over(visitor)
if self.generators:
visitor._mutate_sequence(self.generators)
return visitor.visit_GeneratorExp(self)
def to_object(self, space):
w_node = space.call_function(get(space).w_GeneratorExp)
w_elt = self.elt.to_object(space) # expr
space.setattr(w_node, space.wrap('elt'), w_elt)
if self.generators is None:
generators_w = []
else:
generators_w = [node.to_object(space) for node in self.generators] # comprehension
w_generators = space.newlist(generators_w)
space.setattr(w_node, space.wrap('generators'), w_generators)
w_lineno = space.wrap(self.lineno) # int
space.setattr(w_node, space.wrap('lineno'), w_lineno)
w_col_offset = space.wrap(self.col_offset) # int
space.setattr(w_node, space.wrap('col_offset'), w_col_offset)
return w_node
@staticmethod
def from_object(space, w_node):
w_elt = get_field(space, w_node, 'elt', False)
w_generators = get_field(space, w_node, 'generators', False)
w_lineno = get_field(space, w_node, 'lineno', False)
w_col_offset = get_field(space, w_node, 'col_offset', False)
_elt = expr.from_object(space, w_elt)
generators_w = space.unpackiterable(w_generators)
_generators = [comprehension.from_object(space, w_item) for w_item in generators_w]
_lineno = space.int_w(w_lineno)
_col_offset = space.int_w(w_col_offset)
return GeneratorExp(_elt, _generators, _lineno, _col_offset)
State.ast_type('GeneratorExp', 'expr', ['elt', 'generators'])
class Yield(expr):
def __init__(self, value, lineno, col_offset):
self.value = value
expr.__init__(self, lineno, col_offset)
def walkabout(self, visitor):
visitor.visit_Yield(self)
def mutate_over(self, visitor):
if self.value:
self.value = self.value.mutate_over(visitor)
return visitor.visit_Yield(self)
def to_object(self, space):
w_node = space.call_function(get(space).w_Yield)
w_value = self.value.to_object(space) if self.value is not None else space.w_None # expr
space.setattr(w_node, space.wrap('value'), w_value)
w_lineno = space.wrap(self.lineno) # int
space.setattr(w_node, space.wrap('lineno'), w_lineno)
w_col_offset = space.wrap(self.col_offset) # int
space.setattr(w_node, space.wrap('col_offset'), w_col_offset)
return w_node
@staticmethod
def from_object(space, w_node):
w_value = get_field(space, w_node, 'value', True)
w_lineno = get_field(space, w_node, 'lineno', False)
w_col_offset = get_field(space, w_node, 'col_offset', False)
_value = expr.from_object(space, w_value) if w_value is not None else None
_lineno = space.int_w(w_lineno)
_col_offset = space.int_w(w_col_offset)
return Yield(_value, _lineno, _col_offset)
State.ast_type('Yield', 'expr', ['value'])
class Compare(expr):
def __init__(self, left, ops, comparators, lineno, col_offset):
self.left = left
self.ops = ops
self.comparators = comparators
expr.__init__(self, lineno, col_offset)
def walkabout(self, visitor):
visitor.visit_Compare(self)
def mutate_over(self, visitor):
self.left = self.left.mutate_over(visitor)
if self.comparators:
visitor._mutate_sequence(self.comparators)
return visitor.visit_Compare(self)
def to_object(self, space):
w_node = space.call_function(get(space).w_Compare)
w_left = self.left.to_object(space) # expr
space.setattr(w_node, space.wrap('left'), w_left)
if self.ops is None:
ops_w = []
else:
ops_w = [cmpop_to_class[node - 1]().to_object(space) for node in self.ops] # cmpop
w_ops = space.newlist(ops_w)
space.setattr(w_node, space.wrap('ops'), w_ops)
if self.comparators is None:
comparators_w = []
else:
comparators_w = [node.to_object(space) for node in self.comparators] # expr
w_comparators = space.newlist(comparators_w)
space.setattr(w_node, space.wrap('comparators'), w_comparators)
w_lineno = space.wrap(self.lineno) # int
space.setattr(w_node, space.wrap('lineno'), w_lineno)
w_col_offset = space.wrap(self.col_offset) # int
space.setattr(w_node, space.wrap('col_offset'), w_col_offset)
return w_node
@staticmethod
def from_object(space, w_node):
w_left = get_field(space, w_node, 'left', False)
w_ops = get_field(space, w_node, 'ops', False)
w_comparators = get_field(space, w_node, 'comparators', False)
w_lineno = get_field(space, w_node, 'lineno', False)
w_col_offset = get_field(space, w_node, 'col_offset', False)
_left = expr.from_object(space, w_left)
ops_w = space.unpackiterable(w_ops)
_ops = [cmpop.from_object(space, w_item) for w_item in ops_w]
comparators_w = space.unpackiterable(w_comparators)
_comparators = [expr.from_object(space, w_item) for w_item in comparators_w]
_lineno = space.int_w(w_lineno)
_col_offset = space.int_w(w_col_offset)
return Compare(_left, _ops, _comparators, _lineno, _col_offset)
State.ast_type('Compare', 'expr', ['left', 'ops', 'comparators'])
class Call(expr):
def __init__(self, func, args, keywords, starargs, kwargs, lineno, col_offset):
self.func = func
self.args = args
self.keywords = keywords
self.starargs = starargs
self.kwargs = kwargs
expr.__init__(self, lineno, col_offset)
def walkabout(self, visitor):
visitor.visit_Call(self)
def mutate_over(self, visitor):
self.func = self.func.mutate_over(visitor)
if self.args:
visitor._mutate_sequence(self.args)
if self.keywords:
visitor._mutate_sequence(self.keywords)
if self.starargs:
self.starargs = self.starargs.mutate_over(visitor)
if self.kwargs:
self.kwargs = self.kwargs.mutate_over(visitor)
return visitor.visit_Call(self)
def to_object(self, space):
w_node = space.call_function(get(space).w_Call)
w_func = self.func.to_object(space) # expr
space.setattr(w_node, space.wrap('func'), w_func)
if self.args is None:
args_w = []
else:
args_w = [node.to_object(space) for node in self.args] # expr
w_args = space.newlist(args_w)
space.setattr(w_node, space.wrap('args'), w_args)
if self.keywords is None:
keywords_w = []
else:
keywords_w = [node.to_object(space) for node in self.keywords] # keyword
w_keywords = space.newlist(keywords_w)
space.setattr(w_node, space.wrap('keywords'), w_keywords)
w_starargs = self.starargs.to_object(space) if self.starargs is not None else space.w_None # expr
space.setattr(w_node, space.wrap('starargs'), w_starargs)
w_kwargs = self.kwargs.to_object(space) if self.kwargs is not None else space.w_None # expr
space.setattr(w_node, space.wrap('kwargs'), w_kwargs)
w_lineno = space.wrap(self.lineno) # int
space.setattr(w_node, space.wrap('lineno'), w_lineno)
w_col_offset = space.wrap(self.col_offset) # int
space.setattr(w_node, space.wrap('col_offset'), w_col_offset)
return w_node
@staticmethod
def from_object(space, w_node):
w_func = get_field(space, w_node, 'func', False)
w_args = get_field(space, w_node, 'args', False)
w_keywords = get_field(space, w_node, 'keywords', False)
w_starargs = get_field(space, w_node, 'starargs', True)
w_kwargs = get_field(space, w_node, 'kwargs', True)
w_lineno = get_field(space, w_node, 'lineno', False)
w_col_offset = get_field(space, w_node, 'col_offset', False)
_func = expr.from_object(space, w_func)
args_w = space.unpackiterable(w_args)
_args = [expr.from_object(space, w_item) for w_item in args_w]
keywords_w = space.unpackiterable(w_keywords)
_keywords = [keyword.from_object(space, w_item) for w_item in keywords_w]
_starargs = expr.from_object(space, w_starargs) if w_starargs is not None else None
_kwargs = expr.from_object(space, w_kwargs) if w_kwargs is not None else None
_lineno = space.int_w(w_lineno)
_col_offset = space.int_w(w_col_offset)
return Call(_func, _args, _keywords, _starargs, _kwargs, _lineno, _col_offset)
State.ast_type('Call', 'expr', ['func', 'args', 'keywords', 'starargs', 'kwargs'])
class Num(expr):
def __init__(self, n, lineno, col_offset):
self.n = n
expr.__init__(self, lineno, col_offset)
def walkabout(self, visitor):
visitor.visit_Num(self)
def mutate_over(self, visitor):
return visitor.visit_Num(self)
def to_object(self, space):
w_node = space.call_function(get(space).w_Num)
w_n = self.n # object
space.setattr(w_node, space.wrap('n'), w_n)
w_lineno = space.wrap(self.lineno) # int
space.setattr(w_node, space.wrap('lineno'), w_lineno)
w_col_offset = space.wrap(self.col_offset) # int
space.setattr(w_node, space.wrap('col_offset'), w_col_offset)
return w_node
@staticmethod
def from_object(space, w_node):
w_n = get_field(space, w_node, 'n', False)
w_lineno = get_field(space, w_node, 'lineno', False)
w_col_offset = get_field(space, w_node, 'col_offset', False)
_n = w_n
_lineno = space.int_w(w_lineno)
_col_offset = space.int_w(w_col_offset)
return Num(_n, _lineno, _col_offset)
State.ast_type('Num', 'expr', ['n'])
class Str(expr):
def __init__(self, s, lineno, col_offset):
self.s = s
expr.__init__(self, lineno, col_offset)
def walkabout(self, visitor):
visitor.visit_Str(self)
def mutate_over(self, visitor):
return visitor.visit_Str(self)
def to_object(self, space):
w_node = space.call_function(get(space).w_Str)
w_s = self.s # string
space.setattr(w_node, space.wrap('s'), w_s)
w_lineno = space.wrap(self.lineno) # int
space.setattr(w_node, space.wrap('lineno'), w_lineno)
w_col_offset = space.wrap(self.col_offset) # int
space.setattr(w_node, space.wrap('col_offset'), w_col_offset)
return w_node
@staticmethod
def from_object(space, w_node):
w_s = get_field(space, w_node, 's', False)
w_lineno = get_field(space, w_node, 'lineno', False)
w_col_offset = get_field(space, w_node, 'col_offset', False)
_s = check_string(space, w_s)
_lineno = space.int_w(w_lineno)
_col_offset = space.int_w(w_col_offset)
return Str(_s, _lineno, _col_offset)
State.ast_type('Str', 'expr', ['s'])
class Bytes(expr):
def __init__(self, s, lineno, col_offset):
self.s = s
expr.__init__(self, lineno, col_offset)
def walkabout(self, visitor):
visitor.visit_Bytes(self)
def mutate_over(self, visitor):
return visitor.visit_Bytes(self)
def to_object(self, space):
w_node = space.call_function(get(space).w_Bytes)
w_s = self.s # string
space.setattr(w_node, space.wrap('s'), w_s)
w_lineno = space.wrap(self.lineno) # int
space.setattr(w_node, space.wrap('lineno'), w_lineno)
w_col_offset = space.wrap(self.col_offset) # int
space.setattr(w_node, space.wrap('col_offset'), w_col_offset)
return w_node
@staticmethod
def from_object(space, w_node):
w_s = get_field(space, w_node, 's', False)
w_lineno = get_field(space, w_node, 'lineno', False)
w_col_offset = get_field(space, w_node, 'col_offset', False)
_s = check_string(space, w_s)
_lineno = space.int_w(w_lineno)
_col_offset = space.int_w(w_col_offset)
return Bytes(_s, _lineno, _col_offset)
State.ast_type('Bytes', 'expr', ['s'])
class Ellipsis(expr):
def __init__(self, lineno, col_offset):
expr.__init__(self, lineno, col_offset)
def walkabout(self, visitor):
visitor.visit_Ellipsis(self)
def mutate_over(self, visitor):
return visitor.visit_Ellipsis(self)
def to_object(self, space):
w_node = space.call_function(get(space).w_Ellipsis)
w_lineno = space.wrap(self.lineno) # int
space.setattr(w_node, space.wrap('lineno'), w_lineno)
w_col_offset = space.wrap(self.col_offset) # int
space.setattr(w_node, space.wrap('col_offset'), w_col_offset)
return w_node
@staticmethod
def from_object(space, w_node):
w_lineno = get_field(space, w_node, 'lineno', False)
w_col_offset = get_field(space, w_node, 'col_offset', False)
_lineno = space.int_w(w_lineno)
_col_offset = space.int_w(w_col_offset)
return Ellipsis(_lineno, _col_offset)
State.ast_type('Ellipsis', 'expr', [])
class Attribute(expr):
def __init__(self, value, attr, ctx, lineno, col_offset):
self.value = value
self.attr = attr
self.ctx = ctx
expr.__init__(self, lineno, col_offset)
def walkabout(self, visitor):
visitor.visit_Attribute(self)
def mutate_over(self, visitor):
self.value = self.value.mutate_over(visitor)
return visitor.visit_Attribute(self)
def to_object(self, space):
w_node = space.call_function(get(space).w_Attribute)
w_value = self.value.to_object(space) # expr
space.setattr(w_node, space.wrap('value'), w_value)
w_attr = space.wrap(self.attr.decode('utf-8')) # identifier
space.setattr(w_node, space.wrap('attr'), w_attr)
w_ctx = expr_context_to_class[self.ctx - 1]().to_object(space) # expr_context
space.setattr(w_node, space.wrap('ctx'), w_ctx)
w_lineno = space.wrap(self.lineno) # int
space.setattr(w_node, space.wrap('lineno'), w_lineno)
w_col_offset = space.wrap(self.col_offset) # int
space.setattr(w_node, space.wrap('col_offset'), w_col_offset)
return w_node
@staticmethod
def from_object(space, w_node):
w_value = get_field(space, w_node, 'value', False)
w_attr = get_field(space, w_node, 'attr', False)
w_ctx = get_field(space, w_node, 'ctx', False)
w_lineno = get_field(space, w_node, 'lineno', False)
w_col_offset = get_field(space, w_node, 'col_offset', False)
_value = expr.from_object(space, w_value)
_attr = space.identifier_w(w_attr)
_ctx = expr_context.from_object(space, w_ctx)
_lineno = space.int_w(w_lineno)
_col_offset = space.int_w(w_col_offset)
return Attribute(_value, _attr, _ctx, _lineno, _col_offset)
State.ast_type('Attribute', 'expr', ['value', 'attr', 'ctx'])
class Subscript(expr):
def __init__(self, value, slice, ctx, lineno, col_offset):
self.value = value
self.slice = slice
self.ctx = ctx
expr.__init__(self, lineno, col_offset)
def walkabout(self, visitor):
visitor.visit_Subscript(self)
def mutate_over(self, visitor):
self.value = self.value.mutate_over(visitor)
self.slice = self.slice.mutate_over(visitor)
return visitor.visit_Subscript(self)
def to_object(self, space):
w_node = space.call_function(get(space).w_Subscript)
w_value = self.value.to_object(space) # expr
space.setattr(w_node, space.wrap('value'), w_value)
w_slice = self.slice.to_object(space) # slice
space.setattr(w_node, space.wrap('slice'), w_slice)
w_ctx = expr_context_to_class[self.ctx - 1]().to_object(space) # expr_context
space.setattr(w_node, space.wrap('ctx'), w_ctx)
w_lineno = space.wrap(self.lineno) # int
space.setattr(w_node, space.wrap('lineno'), w_lineno)
w_col_offset = space.wrap(self.col_offset) # int
space.setattr(w_node, space.wrap('col_offset'), w_col_offset)
return w_node
@staticmethod
def from_object(space, w_node):
w_value = get_field(space, w_node, 'value', False)
w_slice = get_field(space, w_node, 'slice', False)
w_ctx = get_field(space, w_node, 'ctx', False)
w_lineno = get_field(space, w_node, 'lineno', False)
w_col_offset = get_field(space, w_node, 'col_offset', False)
_value = expr.from_object(space, w_value)
_slice = slice.from_object(space, w_slice)
_ctx = expr_context.from_object(space, w_ctx)
_lineno = space.int_w(w_lineno)
_col_offset = space.int_w(w_col_offset)
return Subscript(_value, _slice, _ctx, _lineno, _col_offset)
State.ast_type('Subscript', 'expr', ['value', 'slice', 'ctx'])
class Starred(expr):
def __init__(self, value, ctx, lineno, col_offset):
self.value = value
self.ctx = ctx
expr.__init__(self, lineno, col_offset)
def walkabout(self, visitor):
visitor.visit_Starred(self)
def mutate_over(self, visitor):
self.value = self.value.mutate_over(visitor)
return visitor.visit_Starred(self)
def to_object(self, space):
w_node = space.call_function(get(space).w_Starred)
w_value = self.value.to_object(space) # expr
space.setattr(w_node, space.wrap('value'), w_value)
w_ctx = expr_context_to_class[self.ctx - 1]().to_object(space) # expr_context
space.setattr(w_node, space.wrap('ctx'), w_ctx)
w_lineno = space.wrap(self.lineno) # int
space.setattr(w_node, space.wrap('lineno'), w_lineno)
w_col_offset = space.wrap(self.col_offset) # int
space.setattr(w_node, space.wrap('col_offset'), w_col_offset)
return w_node
@staticmethod
def from_object(space, w_node):
w_value = get_field(space, w_node, 'value', False)
w_ctx = get_field(space, w_node, 'ctx', False)
w_lineno = get_field(space, w_node, 'lineno', False)
w_col_offset = get_field(space, w_node, 'col_offset', False)
_value = expr.from_object(space, w_value)
_ctx = expr_context.from_object(space, w_ctx)
_lineno = space.int_w(w_lineno)
_col_offset = space.int_w(w_col_offset)
return Starred(_value, _ctx, _lineno, _col_offset)
State.ast_type('Starred', 'expr', ['value', 'ctx'])
class Name(expr):
def __init__(self, id, ctx, lineno, col_offset):
self.id = id
self.ctx = ctx
expr.__init__(self, lineno, col_offset)
def walkabout(self, visitor):
visitor.visit_Name(self)
def mutate_over(self, visitor):
return visitor.visit_Name(self)
def to_object(self, space):
w_node = space.call_function(get(space).w_Name)
w_id = space.wrap(self.id.decode('utf-8')) # identifier
space.setattr(w_node, space.wrap('id'), w_id)
w_ctx = expr_context_to_class[self.ctx - 1]().to_object(space) # expr_context
space.setattr(w_node, space.wrap('ctx'), w_ctx)
w_lineno = space.wrap(self.lineno) # int
space.setattr(w_node, space.wrap('lineno'), w_lineno)
w_col_offset = space.wrap(self.col_offset) # int
space.setattr(w_node, space.wrap('col_offset'), w_col_offset)
return w_node
@staticmethod
def from_object(space, w_node):
w_id = get_field(space, w_node, 'id', False)
w_ctx = get_field(space, w_node, 'ctx', False)
w_lineno = get_field(space, w_node, 'lineno', False)
w_col_offset = get_field(space, w_node, 'col_offset', False)
_id = space.identifier_w(w_id)
_ctx = expr_context.from_object(space, w_ctx)
_lineno = space.int_w(w_lineno)
_col_offset = space.int_w(w_col_offset)
return Name(_id, _ctx, _lineno, _col_offset)
State.ast_type('Name', 'expr', ['id', 'ctx'])
class List(expr):
def __init__(self, elts, ctx, lineno, col_offset):
self.elts = elts
self.ctx = ctx
expr.__init__(self, lineno, col_offset)
def walkabout(self, visitor):
visitor.visit_List(self)
def mutate_over(self, visitor):
if self.elts:
visitor._mutate_sequence(self.elts)
return visitor.visit_List(self)
def to_object(self, space):
w_node = space.call_function(get(space).w_List)
if self.elts is None:
elts_w = []
else:
elts_w = [node.to_object(space) for node in self.elts] # expr
w_elts = space.newlist(elts_w)
space.setattr(w_node, space.wrap('elts'), w_elts)
w_ctx = expr_context_to_class[self.ctx - 1]().to_object(space) # expr_context
space.setattr(w_node, space.wrap('ctx'), w_ctx)
w_lineno = space.wrap(self.lineno) # int
space.setattr(w_node, space.wrap('lineno'), w_lineno)
w_col_offset = space.wrap(self.col_offset) # int
space.setattr(w_node, space.wrap('col_offset'), w_col_offset)
return w_node
@staticmethod
def from_object(space, w_node):
w_elts = get_field(space, w_node, 'elts', False)
w_ctx = get_field(space, w_node, 'ctx', False)
w_lineno = get_field(space, w_node, 'lineno', False)
w_col_offset = get_field(space, w_node, 'col_offset', False)
elts_w = space.unpackiterable(w_elts)
_elts = [expr.from_object(space, w_item) for w_item in elts_w]
_ctx = expr_context.from_object(space, w_ctx)
_lineno = space.int_w(w_lineno)
_col_offset = space.int_w(w_col_offset)
return List(_elts, _ctx, _lineno, _col_offset)
State.ast_type('List', 'expr', ['elts', 'ctx'])
class Tuple(expr):
def __init__(self, elts, ctx, lineno, col_offset):
self.elts = elts
self.ctx = ctx
expr.__init__(self, lineno, col_offset)
def walkabout(self, visitor):
visitor.visit_Tuple(self)
def mutate_over(self, visitor):
if self.elts:
visitor._mutate_sequence(self.elts)
return visitor.visit_Tuple(self)
def to_object(self, space):
w_node = space.call_function(get(space).w_Tuple)
if self.elts is None:
elts_w = []
else:
elts_w = [node.to_object(space) for node in self.elts] # expr
w_elts = space.newlist(elts_w)
space.setattr(w_node, space.wrap('elts'), w_elts)
w_ctx = expr_context_to_class[self.ctx - 1]().to_object(space) # expr_context
space.setattr(w_node, space.wrap('ctx'), w_ctx)
w_lineno = space.wrap(self.lineno) # int
space.setattr(w_node, space.wrap('lineno'), w_lineno)
w_col_offset = space.wrap(self.col_offset) # int
space.setattr(w_node, space.wrap('col_offset'), w_col_offset)
return w_node
@staticmethod
def from_object(space, w_node):
w_elts = get_field(space, w_node, 'elts', False)
w_ctx = get_field(space, w_node, 'ctx', False)
w_lineno = get_field(space, w_node, 'lineno', False)
w_col_offset = get_field(space, w_node, 'col_offset', False)
elts_w = space.unpackiterable(w_elts)
_elts = [expr.from_object(space, w_item) for w_item in elts_w]
_ctx = expr_context.from_object(space, w_ctx)
_lineno = space.int_w(w_lineno)
_col_offset = space.int_w(w_col_offset)
return Tuple(_elts, _ctx, _lineno, _col_offset)
State.ast_type('Tuple', 'expr', ['elts', 'ctx'])
class Const(expr):
def __init__(self, value, lineno, col_offset):
self.value = value
expr.__init__(self, lineno, col_offset)
def walkabout(self, visitor):
visitor.visit_Const(self)
def mutate_over(self, visitor):
return visitor.visit_Const(self)
def to_object(self, space):
w_node = space.call_function(get(space).w_Const)
w_value = self.value # object
space.setattr(w_node, space.wrap('value'), w_value)
w_lineno = space.wrap(self.lineno) # int
space.setattr(w_node, space.wrap('lineno'), w_lineno)
w_col_offset = space.wrap(self.col_offset) # int
space.setattr(w_node, space.wrap('col_offset'), w_col_offset)
return w_node
@staticmethod
def from_object(space, w_node):
w_value = get_field(space, w_node, 'value', False)
w_lineno = get_field(space, w_node, 'lineno', False)
w_col_offset = get_field(space, w_node, 'col_offset', False)
_value = w_value
_lineno = space.int_w(w_lineno)
_col_offset = space.int_w(w_col_offset)
return Const(_value, _lineno, _col_offset)
State.ast_type('Const', 'expr', ['value'])
class expr_context(AST):
@staticmethod
def from_object(space, w_node):
if space.isinstance_w(w_node, get(space).w_Load):
return 1
if space.isinstance_w(w_node, get(space).w_Store):
return 2
if space.isinstance_w(w_node, get(space).w_Del):
return 3
if space.isinstance_w(w_node, get(space).w_AugLoad):
return 4
if space.isinstance_w(w_node, get(space).w_AugStore):
return 5
if space.isinstance_w(w_node, get(space).w_Param):
return 6
raise oefmt(space.w_TypeError,
"Expected expr_context node, got %T", w_node)
State.ast_type('expr_context', 'AST', None)
class _Load(expr_context):
def to_object(self, space):
return space.call_function(get(space).w_Load)
State.ast_type('Load', 'expr_context', None)
class _Store(expr_context):
def to_object(self, space):
return space.call_function(get(space).w_Store)
State.ast_type('Store', 'expr_context', None)
class _Del(expr_context):
def to_object(self, space):
return space.call_function(get(space).w_Del)
State.ast_type('Del', 'expr_context', None)
class _AugLoad(expr_context):
def to_object(self, space):
return space.call_function(get(space).w_AugLoad)
State.ast_type('AugLoad', 'expr_context', None)
class _AugStore(expr_context):
def to_object(self, space):
return space.call_function(get(space).w_AugStore)
State.ast_type('AugStore', 'expr_context', None)
class _Param(expr_context):
def to_object(self, space):
return space.call_function(get(space).w_Param)
State.ast_type('Param', 'expr_context', None)
Load = 1
Store = 2
Del = 3
AugLoad = 4
AugStore = 5
Param = 6
expr_context_to_class = [
_Load,
_Store,
_Del,
_AugLoad,
_AugStore,
_Param,
]
class slice(AST):
@staticmethod
def from_object(space, w_node):
if space.is_w(w_node, space.w_None):
return None
if space.isinstance_w(w_node, get(space).w_Slice):
return Slice.from_object(space, w_node)
if space.isinstance_w(w_node, get(space).w_ExtSlice):
return ExtSlice.from_object(space, w_node)
if space.isinstance_w(w_node, get(space).w_Index):
return Index.from_object(space, w_node)
raise oefmt(space.w_TypeError,
"Expected slice node, got %T", w_node)
State.ast_type('slice', 'AST', None, [])
class Slice(slice):
def __init__(self, lower, upper, step):
self.lower = lower
self.upper = upper
self.step = step
def walkabout(self, visitor):
visitor.visit_Slice(self)
def mutate_over(self, visitor):
if self.lower:
self.lower = self.lower.mutate_over(visitor)
if self.upper:
self.upper = self.upper.mutate_over(visitor)
if self.step:
self.step = self.step.mutate_over(visitor)
return visitor.visit_Slice(self)
def to_object(self, space):
w_node = space.call_function(get(space).w_Slice)
w_lower = self.lower.to_object(space) if self.lower is not None else space.w_None # expr
space.setattr(w_node, space.wrap('lower'), w_lower)
w_upper = self.upper.to_object(space) if self.upper is not None else space.w_None # expr
space.setattr(w_node, space.wrap('upper'), w_upper)
w_step = self.step.to_object(space) if self.step is not None else space.w_None # expr
space.setattr(w_node, space.wrap('step'), w_step)
return w_node
@staticmethod
def from_object(space, w_node):
w_lower = get_field(space, w_node, 'lower', True)
w_upper = get_field(space, w_node, 'upper', True)
w_step = get_field(space, w_node, 'step', True)
_lower = expr.from_object(space, w_lower) if w_lower is not None else None
_upper = expr.from_object(space, w_upper) if w_upper is not None else None
_step = expr.from_object(space, w_step) if w_step is not None else None
return Slice(_lower, _upper, _step)
State.ast_type('Slice', 'slice', ['lower', 'upper', 'step'])
class ExtSlice(slice):
def __init__(self, dims):
self.dims = dims
def walkabout(self, visitor):
visitor.visit_ExtSlice(self)
def mutate_over(self, visitor):
if self.dims:
visitor._mutate_sequence(self.dims)
return visitor.visit_ExtSlice(self)
def to_object(self, space):
w_node = space.call_function(get(space).w_ExtSlice)
if self.dims is None:
dims_w = []
else:
dims_w = [node.to_object(space) for node in self.dims] # slice
w_dims = space.newlist(dims_w)
space.setattr(w_node, space.wrap('dims'), w_dims)
return w_node
@staticmethod
def from_object(space, w_node):
w_dims = get_field(space, w_node, 'dims', False)
dims_w = space.unpackiterable(w_dims)
_dims = [slice.from_object(space, w_item) for w_item in dims_w]
return ExtSlice(_dims)
State.ast_type('ExtSlice', 'slice', ['dims'])
class Index(slice):
def __init__(self, value):
self.value = value
def walkabout(self, visitor):
visitor.visit_Index(self)
def mutate_over(self, visitor):
self.value = self.value.mutate_over(visitor)
return visitor.visit_Index(self)
def to_object(self, space):
w_node = space.call_function(get(space).w_Index)
w_value = self.value.to_object(space) # expr
space.setattr(w_node, space.wrap('value'), w_value)
return w_node
@staticmethod
def from_object(space, w_node):
w_value = get_field(space, w_node, 'value', False)
_value = expr.from_object(space, w_value)
return Index(_value)
State.ast_type('Index', 'slice', ['value'])
class boolop(AST):
@staticmethod
def from_object(space, w_node):
if space.isinstance_w(w_node, get(space).w_And):
return 1
if space.isinstance_w(w_node, get(space).w_Or):
return 2
raise oefmt(space.w_TypeError,
"Expected boolop node, got %T", w_node)
State.ast_type('boolop', 'AST', None)
class _And(boolop):
def to_object(self, space):
return space.call_function(get(space).w_And)
State.ast_type('And', 'boolop', None)
class _Or(boolop):
def to_object(self, space):
return space.call_function(get(space).w_Or)
State.ast_type('Or', 'boolop', None)
And = 1
Or = 2
boolop_to_class = [
_And,
_Or,
]
class operator(AST):
@staticmethod
def from_object(space, w_node):
if space.isinstance_w(w_node, get(space).w_Add):
return 1
if space.isinstance_w(w_node, get(space).w_Sub):
return 2
if space.isinstance_w(w_node, get(space).w_Mult):
return 3
if space.isinstance_w(w_node, get(space).w_Div):
return 4
if space.isinstance_w(w_node, get(space).w_Mod):
return 5
if space.isinstance_w(w_node, get(space).w_Pow):
return 6
if space.isinstance_w(w_node, get(space).w_LShift):
return 7
if space.isinstance_w(w_node, get(space).w_RShift):
return 8
if space.isinstance_w(w_node, get(space).w_BitOr):
return 9
if space.isinstance_w(w_node, get(space).w_BitXor):
return 10
if space.isinstance_w(w_node, get(space).w_BitAnd):
return 11
if space.isinstance_w(w_node, get(space).w_FloorDiv):
return 12
raise oefmt(space.w_TypeError,
"Expected operator node, got %T", w_node)
State.ast_type('operator', 'AST', None)
class _Add(operator):
def to_object(self, space):
return space.call_function(get(space).w_Add)
State.ast_type('Add', 'operator', None)
class _Sub(operator):
def to_object(self, space):
return space.call_function(get(space).w_Sub)
State.ast_type('Sub', 'operator', None)
class _Mult(operator):
def to_object(self, space):
return space.call_function(get(space).w_Mult)
State.ast_type('Mult', 'operator', None)
class _Div(operator):
def to_object(self, space):
return space.call_function(get(space).w_Div)
State.ast_type('Div', 'operator', None)
class _Mod(operator):
def to_object(self, space):
return space.call_function(get(space).w_Mod)
State.ast_type('Mod', 'operator', None)
class _Pow(operator):
def to_object(self, space):
return space.call_function(get(space).w_Pow)
State.ast_type('Pow', 'operator', None)
class _LShift(operator):
def to_object(self, space):
return space.call_function(get(space).w_LShift)
State.ast_type('LShift', 'operator', None)
class _RShift(operator):
def to_object(self, space):
return space.call_function(get(space).w_RShift)
State.ast_type('RShift', 'operator', None)
class _BitOr(operator):
def to_object(self, space):
return space.call_function(get(space).w_BitOr)
State.ast_type('BitOr', 'operator', None)
class _BitXor(operator):
def to_object(self, space):
return space.call_function(get(space).w_BitXor)
State.ast_type('BitXor', 'operator', None)
class _BitAnd(operator):
def to_object(self, space):
return space.call_function(get(space).w_BitAnd)
State.ast_type('BitAnd', 'operator', None)
class _FloorDiv(operator):
def to_object(self, space):
return space.call_function(get(space).w_FloorDiv)
State.ast_type('FloorDiv', 'operator', None)
Add = 1
Sub = 2
Mult = 3
Div = 4
Mod = 5
Pow = 6
LShift = 7
RShift = 8
BitOr = 9
BitXor = 10
BitAnd = 11
FloorDiv = 12
operator_to_class = [
_Add,
_Sub,
_Mult,
_Div,
_Mod,
_Pow,
_LShift,
_RShift,
_BitOr,
_BitXor,
_BitAnd,
_FloorDiv,
]
class unaryop(AST):
@staticmethod
def from_object(space, w_node):
if space.isinstance_w(w_node, get(space).w_Invert):
return 1
if space.isinstance_w(w_node, get(space).w_Not):
return 2
if space.isinstance_w(w_node, get(space).w_UAdd):
return 3
if space.isinstance_w(w_node, get(space).w_USub):
return 4
raise oefmt(space.w_TypeError,
"Expected unaryop node, got %T", w_node)
State.ast_type('unaryop', 'AST', None)
class _Invert(unaryop):
def to_object(self, space):
return space.call_function(get(space).w_Invert)
State.ast_type('Invert', 'unaryop', None)
class _Not(unaryop):
def to_object(self, space):
return space.call_function(get(space).w_Not)
State.ast_type('Not', 'unaryop', None)
class _UAdd(unaryop):
def to_object(self, space):
return space.call_function(get(space).w_UAdd)
State.ast_type('UAdd', 'unaryop', None)
class _USub(unaryop):
def to_object(self, space):
return space.call_function(get(space).w_USub)
State.ast_type('USub', 'unaryop', None)
Invert = 1
Not = 2
UAdd = 3
USub = 4
unaryop_to_class = [
_Invert,
_Not,
_UAdd,
_USub,
]
class cmpop(AST):
@staticmethod
def from_object(space, w_node):
if space.isinstance_w(w_node, get(space).w_Eq):
return 1
if space.isinstance_w(w_node, get(space).w_NotEq):
return 2
if space.isinstance_w(w_node, get(space).w_Lt):
return 3
if space.isinstance_w(w_node, get(space).w_LtE):
return 4
if space.isinstance_w(w_node, get(space).w_Gt):
return 5
if space.isinstance_w(w_node, get(space).w_GtE):
return 6
if space.isinstance_w(w_node, get(space).w_Is):
return 7
if space.isinstance_w(w_node, get(space).w_IsNot):
return 8
if space.isinstance_w(w_node, get(space).w_In):
return 9
if space.isinstance_w(w_node, get(space).w_NotIn):
return 10
raise oefmt(space.w_TypeError,
"Expected cmpop node, got %T", w_node)
State.ast_type('cmpop', 'AST', None)
class _Eq(cmpop):
def to_object(self, space):
return space.call_function(get(space).w_Eq)
State.ast_type('Eq', 'cmpop', None)
class _NotEq(cmpop):
def to_object(self, space):
return space.call_function(get(space).w_NotEq)
State.ast_type('NotEq', 'cmpop', None)
class _Lt(cmpop):
def to_object(self, space):
return space.call_function(get(space).w_Lt)
State.ast_type('Lt', 'cmpop', None)
class _LtE(cmpop):
def to_object(self, space):
return space.call_function(get(space).w_LtE)
State.ast_type('LtE', 'cmpop', None)
class _Gt(cmpop):
def to_object(self, space):
return space.call_function(get(space).w_Gt)
State.ast_type('Gt', 'cmpop', None)
class _GtE(cmpop):
def to_object(self, space):
return space.call_function(get(space).w_GtE)
State.ast_type('GtE', 'cmpop', None)
class _Is(cmpop):
def to_object(self, space):
return space.call_function(get(space).w_Is)
State.ast_type('Is', 'cmpop', None)
class _IsNot(cmpop):
def to_object(self, space):
return space.call_function(get(space).w_IsNot)
State.ast_type('IsNot', 'cmpop', None)
class _In(cmpop):
def to_object(self, space):
return space.call_function(get(space).w_In)
State.ast_type('In', 'cmpop', None)
class _NotIn(cmpop):
def to_object(self, space):
return space.call_function(get(space).w_NotIn)
State.ast_type('NotIn', 'cmpop', None)
Eq = 1
NotEq = 2
Lt = 3
LtE = 4
Gt = 5
GtE = 6
Is = 7
IsNot = 8
In = 9
NotIn = 10
cmpop_to_class = [
_Eq,
_NotEq,
_Lt,
_LtE,
_Gt,
_GtE,
_Is,
_IsNot,
_In,
_NotIn,
]
class comprehension(AST):
def __init__(self, target, iter, ifs):
self.target = target
self.iter = iter
self.ifs = ifs
def mutate_over(self, visitor):
self.target = self.target.mutate_over(visitor)
self.iter = self.iter.mutate_over(visitor)
if self.ifs:
visitor._mutate_sequence(self.ifs)
return visitor.visit_comprehension(self)
def walkabout(self, visitor):
visitor.visit_comprehension(self)
def to_object(self, space):
w_node = space.call_function(get(space).w_comprehension)
w_target = self.target.to_object(space) # expr
space.setattr(w_node, space.wrap('target'), w_target)
w_iter = self.iter.to_object(space) # expr
space.setattr(w_node, space.wrap('iter'), w_iter)
if self.ifs is None:
ifs_w = []
else:
ifs_w = [node.to_object(space) for node in self.ifs] # expr
w_ifs = space.newlist(ifs_w)
space.setattr(w_node, space.wrap('ifs'), w_ifs)
return w_node
@staticmethod
def from_object(space, w_node):
w_target = get_field(space, w_node, 'target', False)
w_iter = get_field(space, w_node, 'iter', False)
w_ifs = get_field(space, w_node, 'ifs', False)
_target = expr.from_object(space, w_target)
_iter = expr.from_object(space, w_iter)
ifs_w = space.unpackiterable(w_ifs)
_ifs = [expr.from_object(space, w_item) for w_item in ifs_w]
return comprehension(_target, _iter, _ifs)
State.ast_type('comprehension', 'AST', ['target', 'iter', 'ifs'])
class excepthandler(AST):
def __init__(self, lineno, col_offset):
self.lineno = lineno
self.col_offset = col_offset
@staticmethod
def from_object(space, w_node):
if space.is_w(w_node, space.w_None):
return None
if space.isinstance_w(w_node, get(space).w_ExceptHandler):
return ExceptHandler.from_object(space, w_node)
raise oefmt(space.w_TypeError,
"Expected excepthandler node, got %T", w_node)
State.ast_type('excepthandler', 'AST', None, ['lineno', 'col_offset'])
class ExceptHandler(excepthandler):
def __init__(self, type, name, body, lineno, col_offset):
self.type = type
self.name = name
self.body = body
excepthandler.__init__(self, lineno, col_offset)
def walkabout(self, visitor):
visitor.visit_ExceptHandler(self)
def mutate_over(self, visitor):
if self.type:
self.type = self.type.mutate_over(visitor)
if self.body:
visitor._mutate_sequence(self.body)
return visitor.visit_ExceptHandler(self)
def to_object(self, space):
w_node = space.call_function(get(space).w_ExceptHandler)
w_type = self.type.to_object(space) if self.type is not None else space.w_None # expr
space.setattr(w_node, space.wrap('type'), w_type)
w_name = space.wrap(self.name.decode('utf-8')) if self.name is not None else space.w_None # identifier
space.setattr(w_node, space.wrap('name'), w_name)
if self.body is None:
body_w = []
else:
body_w = [node.to_object(space) for node in self.body] # stmt
w_body = space.newlist(body_w)
space.setattr(w_node, space.wrap('body'), w_body)
w_lineno = space.wrap(self.lineno) # int
space.setattr(w_node, space.wrap('lineno'), w_lineno)
w_col_offset = space.wrap(self.col_offset) # int
space.setattr(w_node, space.wrap('col_offset'), w_col_offset)
return w_node
@staticmethod
def from_object(space, w_node):
w_type = get_field(space, w_node, 'type', True)
w_name = get_field(space, w_node, 'name', True)
w_body = get_field(space, w_node, 'body', False)
w_lineno = get_field(space, w_node, 'lineno', False)
w_col_offset = get_field(space, w_node, 'col_offset', False)
_type = expr.from_object(space, w_type) if w_type is not None else None
_name = space.str_or_None_w(w_name)
body_w = space.unpackiterable(w_body)
_body = [stmt.from_object(space, w_item) for w_item in body_w]
_lineno = space.int_w(w_lineno)
_col_offset = space.int_w(w_col_offset)
return ExceptHandler(_type, _name, _body, _lineno, _col_offset)
State.ast_type('ExceptHandler', 'excepthandler', ['type', 'name', 'body'])
class arguments(AST):
def __init__(self, args, vararg, varargannotation, kwonlyargs, kwarg, kwargannotation, defaults, kw_defaults):
self.args = args
self.vararg = vararg
self.varargannotation = varargannotation
self.kwonlyargs = kwonlyargs
self.kwarg = kwarg
self.kwargannotation = kwargannotation
self.defaults = defaults
self.kw_defaults = kw_defaults
def mutate_over(self, visitor):
if self.args:
visitor._mutate_sequence(self.args)
if self.varargannotation:
self.varargannotation = self.varargannotation.mutate_over(visitor)
if self.kwonlyargs:
visitor._mutate_sequence(self.kwonlyargs)
if self.kwargannotation:
self.kwargannotation = self.kwargannotation.mutate_over(visitor)
if self.defaults:
visitor._mutate_sequence(self.defaults)
if self.kw_defaults:
visitor._mutate_sequence(self.kw_defaults)
return visitor.visit_arguments(self)
def walkabout(self, visitor):
visitor.visit_arguments(self)
def to_object(self, space):
w_node = space.call_function(get(space).w_arguments)
if self.args is None:
args_w = []
else:
args_w = [node.to_object(space) for node in self.args] # arg
w_args = space.newlist(args_w)
space.setattr(w_node, space.wrap('args'), w_args)
w_vararg = space.wrap(self.vararg.decode('utf-8')) if self.vararg is not None else space.w_None # identifier
space.setattr(w_node, space.wrap('vararg'), w_vararg)
w_varargannotation = self.varargannotation.to_object(space) if self.varargannotation is not None else space.w_None # expr
space.setattr(w_node, space.wrap('varargannotation'), w_varargannotation)
if self.kwonlyargs is None:
kwonlyargs_w = []
else:
kwonlyargs_w = [node.to_object(space) for node in self.kwonlyargs] # arg
w_kwonlyargs = space.newlist(kwonlyargs_w)
space.setattr(w_node, space.wrap('kwonlyargs'), w_kwonlyargs)
w_kwarg = space.wrap(self.kwarg.decode('utf-8')) if self.kwarg is not None else space.w_None # identifier
space.setattr(w_node, space.wrap('kwarg'), w_kwarg)
w_kwargannotation = self.kwargannotation.to_object(space) if self.kwargannotation is not None else space.w_None # expr
space.setattr(w_node, space.wrap('kwargannotation'), w_kwargannotation)
if self.defaults is None:
defaults_w = []
else:
defaults_w = [node.to_object(space) for node in self.defaults] # expr
w_defaults = space.newlist(defaults_w)
space.setattr(w_node, space.wrap('defaults'), w_defaults)
if self.kw_defaults is None:
kw_defaults_w = []
else:
kw_defaults_w = [node.to_object(space) if node is not None else space.w_None for node in self.kw_defaults] # expr
w_kw_defaults = space.newlist(kw_defaults_w)
space.setattr(w_node, space.wrap('kw_defaults'), w_kw_defaults)
return w_node
@staticmethod
def from_object(space, w_node):
w_args = get_field(space, w_node, 'args', False)
w_vararg = get_field(space, w_node, 'vararg', True)
w_varargannotation = get_field(space, w_node, 'varargannotation', True)
w_kwonlyargs = get_field(space, w_node, 'kwonlyargs', False)
w_kwarg = get_field(space, w_node, 'kwarg', True)
w_kwargannotation = get_field(space, w_node, 'kwargannotation', True)
w_defaults = get_field(space, w_node, 'defaults', False)
w_kw_defaults = get_field(space, w_node, 'kw_defaults', False)
args_w = space.unpackiterable(w_args)
_args = [arg.from_object(space, w_item) for w_item in args_w]
_vararg = space.str_or_None_w(w_vararg)
_varargannotation = expr.from_object(space, w_varargannotation) if w_varargannotation is not None else None
kwonlyargs_w = space.unpackiterable(w_kwonlyargs)
_kwonlyargs = [arg.from_object(space, w_item) for w_item in kwonlyargs_w]
_kwarg = space.str_or_None_w(w_kwarg)
_kwargannotation = expr.from_object(space, w_kwargannotation) if w_kwargannotation is not None else None
defaults_w = space.unpackiterable(w_defaults)
_defaults = [expr.from_object(space, w_item) for w_item in defaults_w]
kw_defaults_w = space.unpackiterable(w_kw_defaults)
_kw_defaults = [expr.from_object(space, w_item) for w_item in kw_defaults_w]
return arguments(_args, _vararg, _varargannotation, _kwonlyargs, _kwarg, _kwargannotation, _defaults, _kw_defaults)
State.ast_type('arguments', 'AST', ['args', 'vararg', 'varargannotation', 'kwonlyargs', 'kwarg', 'kwargannotation', 'defaults', 'kw_defaults'])
class arg(AST):
def __init__(self, arg, annotation):
self.arg = arg
self.annotation = annotation
def mutate_over(self, visitor):
if self.annotation:
self.annotation = self.annotation.mutate_over(visitor)
return visitor.visit_arg(self)
def walkabout(self, visitor):
visitor.visit_arg(self)
def to_object(self, space):
w_node = space.call_function(get(space).w_arg)
w_arg = space.wrap(self.arg.decode('utf-8')) # identifier
space.setattr(w_node, space.wrap('arg'), w_arg)
w_annotation = self.annotation.to_object(space) if self.annotation is not None else space.w_None # expr
space.setattr(w_node, space.wrap('annotation'), w_annotation)
return w_node
@staticmethod
def from_object(space, w_node):
w_arg = get_field(space, w_node, 'arg', False)
w_annotation = get_field(space, w_node, 'annotation', True)
_arg = space.identifier_w(w_arg)
_annotation = expr.from_object(space, w_annotation) if w_annotation is not None else None
return arg(_arg, _annotation)
State.ast_type('arg', 'AST', ['arg', 'annotation'])
class keyword(AST):
def __init__(self, arg, value):
self.arg = arg
self.value = value
def mutate_over(self, visitor):
self.value = self.value.mutate_over(visitor)
return visitor.visit_keyword(self)
def walkabout(self, visitor):
visitor.visit_keyword(self)
def to_object(self, space):
w_node = space.call_function(get(space).w_keyword)
w_arg = space.wrap(self.arg.decode('utf-8')) # identifier
space.setattr(w_node, space.wrap('arg'), w_arg)
w_value = self.value.to_object(space) # expr
space.setattr(w_node, space.wrap('value'), w_value)
return w_node
@staticmethod
def from_object(space, w_node):
w_arg = get_field(space, w_node, 'arg', False)
w_value = get_field(space, w_node, 'value', False)
_arg = space.identifier_w(w_arg)
_value = expr.from_object(space, w_value)
return keyword(_arg, _value)
State.ast_type('keyword', 'AST', ['arg', 'value'])
class alias(AST):
def __init__(self, name, asname):
self.name = name
self.asname = asname
def mutate_over(self, visitor):
return visitor.visit_alias(self)
def walkabout(self, visitor):
visitor.visit_alias(self)
def to_object(self, space):
w_node = space.call_function(get(space).w_alias)
w_name = space.wrap(self.name.decode('utf-8')) # identifier
space.setattr(w_node, space.wrap('name'), w_name)
w_asname = space.wrap(self.asname.decode('utf-8')) if self.asname is not None else space.w_None # identifier
space.setattr(w_node, space.wrap('asname'), w_asname)
return w_node
@staticmethod
def from_object(space, w_node):
w_name = get_field(space, w_node, 'name', False)
w_asname = get_field(space, w_node, 'asname', True)
_name = space.identifier_w(w_name)
_asname = space.str_or_None_w(w_asname)
return alias(_name, _asname)
State.ast_type('alias', 'AST', ['name', 'asname'])
class ASTVisitor(object):
def visit_sequence(self, seq):
if seq is not None:
for node in seq:
node.walkabout(self)
def visit_kwonlydefaults(self, seq):
if seq is not None:
for node in seq:
if node:
node.walkabout(self)
def default_visitor(self, node):
raise NodeVisitorNotImplemented
def _mutate_sequence(self, seq):
for i in range(len(seq)):
if seq[i] is not None:
seq[i] = seq[i].mutate_over(self)
def visit_Module(self, node):
return self.default_visitor(node)
def visit_Interactive(self, node):
return self.default_visitor(node)
def visit_Expression(self, node):
return self.default_visitor(node)
def visit_Suite(self, node):
return self.default_visitor(node)
def visit_FunctionDef(self, node):
return self.default_visitor(node)
def visit_ClassDef(self, node):
return self.default_visitor(node)
def visit_Return(self, node):
return self.default_visitor(node)
def visit_Delete(self, node):
return self.default_visitor(node)
def visit_Assign(self, node):
return self.default_visitor(node)
def visit_AugAssign(self, node):
return self.default_visitor(node)
def visit_For(self, node):
return self.default_visitor(node)
def visit_While(self, node):
return self.default_visitor(node)
def visit_If(self, node):
return self.default_visitor(node)
def visit_With(self, node):
return self.default_visitor(node)
def visit_Raise(self, node):
return self.default_visitor(node)
def visit_TryExcept(self, node):
return self.default_visitor(node)
def visit_TryFinally(self, node):
return self.default_visitor(node)
def visit_Assert(self, node):
return self.default_visitor(node)
def visit_Import(self, node):
return self.default_visitor(node)
def visit_ImportFrom(self, node):
return self.default_visitor(node)
def visit_Global(self, node):
return self.default_visitor(node)
def visit_Nonlocal(self, node):
return self.default_visitor(node)
def visit_Expr(self, node):
return self.default_visitor(node)
def visit_Pass(self, node):
return self.default_visitor(node)
def visit_Break(self, node):
return self.default_visitor(node)
def visit_Continue(self, node):
return self.default_visitor(node)
def visit_BoolOp(self, node):
return self.default_visitor(node)
def visit_BinOp(self, node):
return self.default_visitor(node)
def visit_UnaryOp(self, node):
return self.default_visitor(node)
def visit_Lambda(self, node):
return self.default_visitor(node)
def visit_IfExp(self, node):
return self.default_visitor(node)
def visit_Dict(self, node):
return self.default_visitor(node)
def visit_Set(self, node):
return self.default_visitor(node)
def visit_ListComp(self, node):
return self.default_visitor(node)
def visit_SetComp(self, node):
return self.default_visitor(node)
def visit_DictComp(self, node):
return self.default_visitor(node)
def visit_GeneratorExp(self, node):
return self.default_visitor(node)
def visit_Yield(self, node):
return self.default_visitor(node)
def visit_Compare(self, node):
return self.default_visitor(node)
def visit_Call(self, node):
return self.default_visitor(node)
def visit_Num(self, node):
return self.default_visitor(node)
def visit_Str(self, node):
return self.default_visitor(node)
def visit_Bytes(self, node):
return self.default_visitor(node)
def visit_Ellipsis(self, node):
return self.default_visitor(node)
def visit_Attribute(self, node):
return self.default_visitor(node)
def visit_Subscript(self, node):
return self.default_visitor(node)
def visit_Starred(self, node):
return self.default_visitor(node)
def visit_Name(self, node):
return self.default_visitor(node)
def visit_List(self, node):
return self.default_visitor(node)
def visit_Tuple(self, node):
return self.default_visitor(node)
def visit_Const(self, node):
return self.default_visitor(node)
def visit_Slice(self, node):
return self.default_visitor(node)
def visit_ExtSlice(self, node):
return self.default_visitor(node)
def visit_Index(self, node):
return self.default_visitor(node)
def visit_comprehension(self, node):
return self.default_visitor(node)
def visit_ExceptHandler(self, node):
return self.default_visitor(node)
def visit_arguments(self, node):
return self.default_visitor(node)
def visit_arg(self, node):
return self.default_visitor(node)
def visit_keyword(self, node):
return self.default_visitor(node)
def visit_alias(self, node):
return self.default_visitor(node)
class GenericASTVisitor(ASTVisitor):
def visit_Module(self, node):
self.visit_sequence(node.body)
def visit_Interactive(self, node):
self.visit_sequence(node.body)
def visit_Expression(self, node):
node.body.walkabout(self)
def visit_Suite(self, node):
self.visit_sequence(node.body)
def visit_FunctionDef(self, node):
node.args.walkabout(self)
self.visit_sequence(node.body)
self.visit_sequence(node.decorator_list)
if node.returns:
node.returns.walkabout(self)
def visit_ClassDef(self, node):
self.visit_sequence(node.bases)
self.visit_sequence(node.keywords)
if node.starargs:
node.starargs.walkabout(self)
if node.kwargs:
node.kwargs.walkabout(self)
self.visit_sequence(node.body)
self.visit_sequence(node.decorator_list)
def visit_Return(self, node):
if node.value:
node.value.walkabout(self)
def visit_Delete(self, node):
self.visit_sequence(node.targets)
def visit_Assign(self, node):
self.visit_sequence(node.targets)
node.value.walkabout(self)
def visit_AugAssign(self, node):
node.target.walkabout(self)
node.value.walkabout(self)
def visit_For(self, node):
node.target.walkabout(self)
node.iter.walkabout(self)
self.visit_sequence(node.body)
self.visit_sequence(node.orelse)
def visit_While(self, node):
node.test.walkabout(self)
self.visit_sequence(node.body)
self.visit_sequence(node.orelse)
def visit_If(self, node):
node.test.walkabout(self)
self.visit_sequence(node.body)
self.visit_sequence(node.orelse)
def visit_With(self, node):
node.context_expr.walkabout(self)
if node.optional_vars:
node.optional_vars.walkabout(self)
self.visit_sequence(node.body)
def visit_Raise(self, node):
if node.exc:
node.exc.walkabout(self)
if node.cause:
node.cause.walkabout(self)
def visit_TryExcept(self, node):
self.visit_sequence(node.body)
self.visit_sequence(node.handlers)
self.visit_sequence(node.orelse)
def visit_TryFinally(self, node):
self.visit_sequence(node.body)
self.visit_sequence(node.finalbody)
def visit_Assert(self, node):
node.test.walkabout(self)
if node.msg:
node.msg.walkabout(self)
def visit_Import(self, node):
self.visit_sequence(node.names)
def visit_ImportFrom(self, node):
self.visit_sequence(node.names)
def visit_Global(self, node):
pass
def visit_Nonlocal(self, node):
pass
def visit_Expr(self, node):
node.value.walkabout(self)
def visit_Pass(self, node):
pass
def visit_Break(self, node):
pass
def visit_Continue(self, node):
pass
def visit_BoolOp(self, node):
self.visit_sequence(node.values)
def visit_BinOp(self, node):
node.left.walkabout(self)
node.right.walkabout(self)
def visit_UnaryOp(self, node):
node.operand.walkabout(self)
def visit_Lambda(self, node):
node.args.walkabout(self)
node.body.walkabout(self)
def visit_IfExp(self, node):
node.test.walkabout(self)
node.body.walkabout(self)
node.orelse.walkabout(self)
def visit_Dict(self, node):
self.visit_sequence(node.keys)
self.visit_sequence(node.values)
def visit_Set(self, node):
self.visit_sequence(node.elts)
def visit_ListComp(self, node):
node.elt.walkabout(self)
self.visit_sequence(node.generators)
def visit_SetComp(self, node):
node.elt.walkabout(self)
self.visit_sequence(node.generators)
def visit_DictComp(self, node):
node.key.walkabout(self)
node.value.walkabout(self)
self.visit_sequence(node.generators)
def visit_GeneratorExp(self, node):
node.elt.walkabout(self)
self.visit_sequence(node.generators)
def visit_Yield(self, node):
if node.value:
node.value.walkabout(self)
def visit_Compare(self, node):
node.left.walkabout(self)
self.visit_sequence(node.comparators)
def visit_Call(self, node):
node.func.walkabout(self)
self.visit_sequence(node.args)
self.visit_sequence(node.keywords)
if node.starargs:
node.starargs.walkabout(self)
if node.kwargs:
node.kwargs.walkabout(self)
def visit_Num(self, node):
pass
def visit_Str(self, node):
pass
def visit_Bytes(self, node):
pass
def visit_Ellipsis(self, node):
pass
def visit_Attribute(self, node):
node.value.walkabout(self)
def visit_Subscript(self, node):
node.value.walkabout(self)
node.slice.walkabout(self)
def visit_Starred(self, node):
node.value.walkabout(self)
def visit_Name(self, node):
pass
def visit_List(self, node):
self.visit_sequence(node.elts)
def visit_Tuple(self, node):
self.visit_sequence(node.elts)
def visit_Const(self, node):
pass
def visit_Slice(self, node):
if node.lower:
node.lower.walkabout(self)
if node.upper:
node.upper.walkabout(self)
if node.step:
node.step.walkabout(self)
def visit_ExtSlice(self, node):
self.visit_sequence(node.dims)
def visit_Index(self, node):
node.value.walkabout(self)
def visit_comprehension(self, node):
node.target.walkabout(self)
node.iter.walkabout(self)
self.visit_sequence(node.ifs)
def visit_ExceptHandler(self, node):
if node.type:
node.type.walkabout(self)
self.visit_sequence(node.body)
def visit_arguments(self, node):
self.visit_sequence(node.args)
if node.varargannotation:
node.varargannotation.walkabout(self)
self.visit_sequence(node.kwonlyargs)
if node.kwargannotation:
node.kwargannotation.walkabout(self)
self.visit_sequence(node.defaults)
self.visit_sequence(node.kw_defaults)
def visit_arg(self, node):
if node.annotation:
node.annotation.walkabout(self)
def visit_keyword(self, node):
node.value.walkabout(self)
def visit_alias(self, node):
pass
| 37.177813 | 143 | 0.647797 |
7b8a421675d544a0b0e1b3b5722a80a25b029581 | 3,665 | py | Python | tools/slabratetop.py | scripts/bcc | 903513e454c9847370f7e54797ff29f12e1de4d9 | [
"Apache-2.0"
] | 1 | 2020-10-27T17:58:28.000Z | 2020-10-27T17:58:28.000Z | tools/slabratetop.py | scripts/bcc | 903513e454c9847370f7e54797ff29f12e1de4d9 | [
"Apache-2.0"
] | null | null | null | tools/slabratetop.py | scripts/bcc | 903513e454c9847370f7e54797ff29f12e1de4d9 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python
# @lint-avoid-python-3-compatibility-imports
#
# slabratetop Summarize kmem_cache_alloc() calls.
# For Linux, uses BCC, eBPF.
#
# USAGE: slabratetop [-h] [-C] [-r MAXROWS] [interval] [count]
#
# This uses in-kernel BPF maps to store cache summaries for efficiency.
#
# SEE ALSO: slabtop(1), which shows the cache volumes.
#
# Copyright 2016 Netflix, Inc.
# Licensed under the Apache License, Version 2.0 (the "License")
#
# 15-Oct-2016 Brendan Gregg Created this.
from __future__ import print_function
from bcc import BPF
from bcc.utils import printb
from time import sleep, strftime
import argparse
import signal
from subprocess import call
# arguments
examples = """examples:
./slabratetop # kmem_cache_alloc() top, 1 second refresh
./slabratetop -C # don't clear the screen
./slabratetop 5 # 5 second summaries
./slabratetop 5 10 # 5 second summaries, 10 times only
"""
parser = argparse.ArgumentParser(
description="Kernel SLAB/SLUB memory cache allocation rate top",
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog=examples)
parser.add_argument("-C", "--noclear", action="store_true",
help="don't clear the screen")
parser.add_argument("-r", "--maxrows", default=20,
help="maximum rows to print, default 20")
parser.add_argument("interval", nargs="?", default=1,
help="output interval, in seconds")
parser.add_argument("count", nargs="?", default=99999999,
help="number of outputs")
parser.add_argument("--ebpf", action="store_true",
help=argparse.SUPPRESS)
args = parser.parse_args()
interval = int(args.interval)
countdown = int(args.count)
maxrows = int(args.maxrows)
clear = not int(args.noclear)
debug = 0
# linux stats
loadavg = "/proc/loadavg"
# signal handler
def signal_ignore(signal, frame):
print()
# define BPF program
bpf_text = """
#include <uapi/linux/ptrace.h>
#include <linux/mm.h>
#include <linux/slab.h>
#ifdef CONFIG_SLUB
#include <linux/slub_def.h>
#else
#include <linux/slab_def.h>
#endif
#define CACHE_NAME_SIZE 32
// the key for the output summary
struct info_t {
char name[CACHE_NAME_SIZE];
};
// the value of the output summary
struct val_t {
u64 count;
u64 size;
};
BPF_HASH(counts, struct info_t, struct val_t);
int kprobe__kmem_cache_alloc(struct pt_regs *ctx, struct kmem_cache *cachep)
{
struct info_t info = {};
const char *name = cachep->name;
bpf_probe_read(&info.name, sizeof(info.name), name);
struct val_t *valp, zero = {};
valp = counts.lookup_or_init(&info, &zero);
if (valp) {
valp->count++;
valp->size += cachep->size;
}
return 0;
}
"""
if debug or args.ebpf:
print(bpf_text)
if args.ebpf:
exit()
# initialize BPF
b = BPF(text=bpf_text)
print('Tracing... Output every %d secs. Hit Ctrl-C to end' % interval)
# output
exiting = 0
while 1:
try:
sleep(interval)
except KeyboardInterrupt:
exiting = 1
# header
if clear:
call("clear")
else:
print()
with open(loadavg) as stats:
print("%-8s loadavg: %s" % (strftime("%H:%M:%S"), stats.read()))
print("%-32s %6s %10s" % ("CACHE", "ALLOCS", "BYTES"))
# by-TID output
counts = b.get_table("counts")
line = 0
for k, v in reversed(sorted(counts.items(),
key=lambda counts: counts[1].size)):
printb(b"%-32s %6d %10d" % (k.name, v.count, v.size))
line += 1
if line >= maxrows:
break
counts.clear()
countdown -= 1
if exiting or countdown == 0:
print("Detaching...")
exit()
| 25.10274 | 76 | 0.649932 |
38e927ac0bbd111e31c51ea124f15bddd7fc0974 | 2,984 | py | Python | setup.py | alfa24/django-image-assets | 053e7a6792b74771b02ebffc7fc983fdc6d8d299 | [
"MIT"
] | 1 | 2021-12-26T02:54:38.000Z | 2021-12-26T02:54:38.000Z | setup.py | alfa24/django-image-assets | 053e7a6792b74771b02ebffc7fc983fdc6d8d299 | [
"MIT"
] | 80 | 2020-03-18T14:58:12.000Z | 2022-02-03T07:55:08.000Z | setup.py | alfa24/django-image-assets | 053e7a6792b74771b02ebffc7fc983fdc6d8d299 | [
"MIT"
] | 2 | 2020-03-18T14:26:34.000Z | 2021-11-10T14:14:29.000Z | import os
import re
import subprocess
from setuptools import setup, find_packages # type: ignore
from pathlib import Path
with open('README.md') as f:
long_description = f.read()
version_re = re.compile('^Version: (.+)$', re.M)
package_name = 'django-image-assets'
def get_version():
"""
Reads version from git status or PKG-INFO
https://gist.github.com/pwithnall/7bc5f320b3bdf418265a
"""
d: Path = Path(__file__).absolute().parent
git_dir = d.joinpath('.git')
if git_dir.is_dir():
# Get the version using "git describe".
cmd = 'git describe --tags --match [0-9]*'.split()
try:
version = subprocess.check_output(cmd).decode().strip()
except subprocess.CalledProcessError:
return None
# PEP 386 compatibility
if '-' in version:
version = '.post'.join(version.split('-')[:2])
# Don't declare a version "dirty" merely because a time stamp has
# changed. If it is dirty, append a ".dev1" suffix to indicate a
# development revision after the release.
with open(os.devnull, 'w') as fd_devnull:
subprocess.call(['git', 'status'],
stdout=fd_devnull, stderr=fd_devnull)
cmd = 'git diff-index --name-only HEAD'.split()
try:
dirty = subprocess.check_output(cmd).decode().strip()
except subprocess.CalledProcessError:
return None
if dirty != '':
version += '.dev1'
else:
# Extract the version from the PKG-INFO file.
try:
with open('PKG-INFO') as v:
version = version_re.search(v.read()).group(1)
except FileNotFoundError:
version = None
return version
setup(
name='django-image-assets',
version=get_version() or 'dev',
long_description=long_description,
long_description_content_type='text/markdown',
packages=find_packages(),
include_package_data=True,
url='https://github.com/just-work/django-image-assets',
license='MIT',
author='Sergey Tikhonov',
author_email='zimbler@gmail.com',
description='Django application for image assets management',
install_requires=[
'Django>=2.2,<3.3',
'Pillow>=8.1.2,<9.0.0',
'django-bitfield~=2.0',
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Framework :: Django :: 2.2',
'Framework :: Django :: 3.0',
'Framework :: Django :: 3.1',
'Framework :: Django :: 3.2',
'Operating System :: POSIX',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3.10',
('Topic :: Internet :: WWW/HTTP :: Dynamic Content :: '
'Content Management System'),
]
)
| 31.744681 | 73 | 0.591153 |
4e3eea40cb78c72d59466fead45ced0140a82142 | 1,564 | py | Python | venv/Lib/site-packages/setuptools/py27compat.py | jodieritchie/MLHPortfolio | 66d23165c1c3277a6b7320af1cfc353a07385e7a | [
"MIT"
] | 1 | 2020-01-29T13:35:30.000Z | 2020-01-29T13:35:30.000Z | venv/Lib/site-packages/setuptools/py27compat.py | jodieritchie/MLHPortfolio | 66d23165c1c3277a6b7320af1cfc353a07385e7a | [
"MIT"
] | 3 | 2020-06-17T16:01:27.000Z | 2022-01-13T02:52:53.000Z | venv/Lib/site-packages/setuptools/py27compat.py | jodieritchie/MLHPortfolio | 66d23165c1c3277a6b7320af1cfc353a07385e7a | [
"MIT"
] | null | null | null | """
Compatibility Support for Python 2.7 and earlier
"""
import sys
import platform
from setuptools.extern import six
def get_all_headers(message, key):
"""
Given an HTTPMessage, return all headers matching a given key.
"""
return message.get_all(key)
if six.PY2:
def get_all_headers(message, key): # noqa
return message.getheaders(key)
linux_py2_ascii = (
platform.system() == 'Linux' and
six.PY2
)
rmtree_safe = str if linux_py2_ascii else lambda x: x
"""Workaround for http://bugs.python.org/issue24672"""
try:
from ._imp import find_module, PY_COMPILED, PY_FROZEN, PY_SOURCE
from ._imp import get_frozen_object, get_module
except ImportError:
import imp
from imp import PY_COMPILED, PY_FROZEN, PY_SOURCE # noqa
def find_module(module, paths=None):
"""Just like 'imp.find_module()', but with package support"""
parts = module.split('.')
while parts:
part = parts.pop(0)
f, path, (suffix, mode, kind) = info = imp.find_module(part, paths)
if kind == imp.PKG_DIRECTORY:
parts = parts or ['__init__']
paths = [path]
elif parts:
raise ImportError("Can't find %r in %s" % (parts, module))
return info
def get_frozen_object(module, paths):
return imp.get_frozen_object(module)
def get_module(module, paths, info):
imp.load_module(module, *info)
return sys.modules[module]
| 25.639344 | 80 | 0.613811 |
5644f490d806520d83f7019e5329274fc61f133c | 983 | py | Python | examples/image_helper.py | PNProductions/py-seam-carving | 373a0979201f5c1b9bb418c042e20e6692dd3bf3 | [
"MIT"
] | 3 | 2017-07-06T07:58:59.000Z | 2018-09-26T19:19:23.000Z | utils/image_helper.py | PNProductions/cloud-python-opencv | 572e5820b5d6bbd7ca2c7ddac8a21576a6d0c6c0 | [
"MIT"
] | 1 | 2018-09-26T22:12:48.000Z | 2018-09-26T22:12:48.000Z | utils/image_helper.py | PNProductions/cloud-python-opencv | 572e5820b5d6bbd7ca2c7ddac8a21576a6d0c6c0 | [
"MIT"
] | 2 | 2015-06-09T18:44:04.000Z | 2020-05-25T07:00:13.000Z | import cv2
import os.path
from numpy import clip
def local_path(path):
return os.path.dirname(__file__) + '/' + path
def image_open(filename, mode=None):
if os.path.isfile(filename):
if mode is None:
image = cv2.imread(filename)
else:
image = cv2.imread(filename, mode)
if image is None:
IOError('Unable to open image file: ' + filename)
else:
return image
else:
raise IOError('Image file not found at: ' + filename)
def image_save(image, name, path='./', extension='.bmp'):
cv2.imwrite(path + name + extension, image)
def to_matlab_ycbcr(image):
# http://stackoverflow.com/questions/26078281/why-luma-parameter-differs-in-opencv-and-matlab
return clip(16 + (219 / 255.0) * image, 0, 255)
def from_matlab_ycbcr(image):
# http://stackoverflow.com/questions/26078281/why-luma-parameter-differs-in-opencv-and-matlab
# return clip(image * (255 / 219.0) - 16, 0, 255)
return clip(image * (255 / 219.0) - 16, 0, 255)
| 26.567568 | 95 | 0.677518 |
7fd24e53eb039a3e017147e459910fdaca12dd90 | 470 | py | Python | src/learning/migrations/0038_tutorialcomment_body_customized_bleachfield.py | Alirezaja1384/MajazAmooz | 9200e46bed33aeb60d578a5c4c02013a8032cf08 | [
"MIT"
] | 3 | 2021-04-01T19:42:53.000Z | 2022-03-01T09:50:17.000Z | src/learning/migrations/0038_tutorialcomment_body_customized_bleachfield.py | Alirezaja1384/MajazAmooz | 9200e46bed33aeb60d578a5c4c02013a8032cf08 | [
"MIT"
] | null | null | null | src/learning/migrations/0038_tutorialcomment_body_customized_bleachfield.py | Alirezaja1384/MajazAmooz | 9200e46bed33aeb60d578a5c4c02013a8032cf08 | [
"MIT"
] | null | null | null | # Generated by Django 3.2.3 on 2021-06-12 20:18
from django.db import migrations
import shared.models.fields
class Migration(migrations.Migration):
dependencies = [
('learning', '0037_tutorial_tutorialcomment_bleachfield'),
]
operations = [
migrations.AlterField(
model_name='tutorialcomment',
name='body',
field=shared.models.fields.BleachField(max_length=500, verbose_name='بدنه'),
),
]
| 23.5 | 88 | 0.651064 |
53d2c7afdfebb645b30d6157fa30040134ff1907 | 9,082 | py | Python | rally/plugins/openstack/context/glance/images.py | sergeygalkin/rally | 8b63ceb10dd36087403f6bbc62f768155ce96c22 | [
"Apache-2.0"
] | null | null | null | rally/plugins/openstack/context/glance/images.py | sergeygalkin/rally | 8b63ceb10dd36087403f6bbc62f768155ce96c22 | [
"Apache-2.0"
] | null | null | null | rally/plugins/openstack/context/glance/images.py | sergeygalkin/rally | 8b63ceb10dd36087403f6bbc62f768155ce96c22 | [
"Apache-2.0"
] | null | null | null | # All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from rally.common import logging
from rally.common import utils as rutils
from rally.common import validation
from rally import consts
from rally.plugins.openstack.cleanup import manager as resource_manager
from rally.plugins.openstack import osclients
from rally.plugins.openstack.services.image import image
from rally.task import context
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
@validation.add("required_platform", platform="openstack", users=True)
@context.configure(name="images", platform="openstack", order=410)
class ImageGenerator(context.Context):
"""Uploads specified Glance images to every tenant."""
CONFIG_SCHEMA = {
"type": "object",
"$schema": consts.JSON_SCHEMA,
"properties": {
"image_url": {
"type": "string",
"description": "Location of the source to create image from."
},
"disk_format": {
"description": "The format of the disk.",
"enum": ["qcow2", "raw", "vhd", "vmdk", "vdi", "iso", "aki",
"ari", "ami"]
},
"container_format": {
"description": "Format of the image container.",
"enum": ["aki", "ami", "ari", "bare", "docker", "ova", "ovf"]
},
"image_name": {
"type": "string",
"description": "The name of image to create. NOTE: it will be "
"ignored in case when `images_per_tenant` is "
"bigger then 1."
},
"min_ram": {
"description": "Amount of RAM in MB",
"type": "integer",
"minimum": 0
},
"min_disk": {
"description": "Amount of disk space in GB",
"type": "integer",
"minimum": 0
},
"visibility": {
"description": "Visibility for this image ('shared' and "
"'community' are available only in case of "
"Glance V2).",
"enum": ["public", "private", "shared", "community"]
},
"images_per_tenant": {
"description": "The number of images to create per one single "
"tenant.",
"type": "integer",
"minimum": 1
},
"image_args": {
"description": "This param is deprecated since Rally-0.10.0, "
"specify exact arguments in a root section of "
"context instead.",
"type": "object",
"additionalProperties": True
},
"image_container": {
"description": "This param is deprecated since Rally-0.10.0, "
"use `container_format` instead.",
"type": "string",
},
"image_type": {
"description": "This param is deprecated since Rally-0.10.0, "
"use `disk_format` instead.",
"enum": ["qcow2", "raw", "vhd", "vmdk", "vdi", "iso", "aki",
"ari", "ami"],
},
},
"oneOf": [{"description": "It is been used since Rally 0.10.0",
"required": ["image_url", "disk_format",
"container_format"]},
{"description": "One of backward compatible way",
"required": ["image_url", "image_type",
"container_format"]},
{"description": "One of backward compatible way",
"required": ["image_url", "disk_format",
"image_container"]},
{"description": "One of backward compatible way",
"required": ["image_url", "image_type",
"image_container"]}],
"additionalProperties": False
}
DEFAULT_CONFIG = {"images_per_tenant": 1}
def setup(self):
image_url = self.config.get("image_url")
disk_format = self.config.get("disk_format")
container_format = self.config.get("container_format")
images_per_tenant = self.config.get("images_per_tenant")
visibility = self.config.get("visibility", "private")
min_disk = self.config.get("min_disk", 0)
min_ram = self.config.get("min_ram", 0)
image_args = self.config.get("image_args", {})
if "image_type" in self.config:
LOG.warning("The 'image_type' argument is deprecated since "
"Rally 0.10.0, use disk_format argument instead")
if not disk_format:
disk_format = self.config["image_type"]
if "image_container" in self.config:
LOG.warning("The 'image_container' argument is deprecated since "
"Rally 0.10.0; use container_format argument instead")
if not container_format:
container_format = self.config["image_container"]
if image_args:
LOG.warning(
"The 'image_args' argument is deprecated since Rally 0.10.0; "
"specify arguments in a root section of context instead")
if "is_public" in image_args:
if "visibility" not in self.config:
visibility = ("public" if image_args["is_public"]
else "private")
if "min_ram" in image_args:
if "min_ram" not in self.config:
min_ram = image_args["min_ram"]
if "min_disk" in image_args:
if "min_disk" not in self.config:
min_disk = image_args["min_disk"]
# None image_name means that image.Image will generate a random name
image_name = None
if "image_name" in self.config and images_per_tenant == 1:
image_name = self.config["image_name"]
for user, tenant_id in rutils.iterate_per_tenants(
self.context["users"]):
current_images = []
clients = osclients.Clients(
user["credential"],
api_info=self.context["config"].get("api_versions"))
image_service = image.Image(
clients, name_generator=self.generate_random_name)
for i in range(images_per_tenant):
image_obj = image_service.create_image(
image_name=image_name,
container_format=container_format,
image_location=image_url,
disk_format=disk_format,
visibility=visibility,
min_disk=min_disk,
min_ram=min_ram)
current_images.append(image_obj.id)
self.context["tenants"][tenant_id]["images"] = current_images
def cleanup(self):
if self.context.get("admin", {}):
# NOTE(andreykurilin): Glance does not require the admin for
# listing tenant images, but the admin is required for
# discovering Cinder volumes which might be created for the
# purpose of caching. Removing such volumes are optional step,
# since Cinder should have own mechanism like garbage collector,
# but if we can, let's remove everything and make the cloud as
# close as possible to the original state.
admin = self.context["admin"]
admin_required = None
else:
admin = None
admin_required = False
if "image_name" in self.config:
matcher = rutils.make_name_matcher(self.config["image_name"])
else:
matcher = self.__class__
resource_manager.cleanup(names=["glance.images",
"cinder.image_volumes_cache"],
admin=admin,
admin_required=admin_required,
users=self.context.get("users", []),
api_versions=self.context["config"].get(
"api_versions"),
superclass=matcher,
task_id=self.get_owner_id())
| 42.839623 | 79 | 0.528738 |
f2d9f6c71d654a740cb403ecfba9c78f917f8fe5 | 4,873 | py | Python | src/app/aggregation/dag.py | odinlabs-io/odinlabsio-analytics-app-server | 8073edd13a5ac81ba6c3f189859073b2dfd9b6db | [
"MIT"
] | null | null | null | src/app/aggregation/dag.py | odinlabs-io/odinlabsio-analytics-app-server | 8073edd13a5ac81ba6c3f189859073b2dfd9b6db | [
"MIT"
] | null | null | null | src/app/aggregation/dag.py | odinlabs-io/odinlabsio-analytics-app-server | 8073edd13a5ac81ba6c3f189859073b2dfd9b6db | [
"MIT"
] | null | null | null | # MIT License
#
# Copyright (c) 2020 OdinLabs IO
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import abc
from collections import defaultdict
from typing import Type, Union, Any, List, Dict
class DAG:
def __init__(self, adj, attributes):
self._adj = adj
self._attributes = attributes
def ctx(self, node_id: int):
return self._attributes[node_id]['ctx']
def children(self, node_id: int):
return self._adj.get(node_id)
def attr(self, node_id: int):
return self._attributes.get(node_id)
def __repr__(self):
return str(self._adj) + str(self._attributes)
class Solver(abc.ABC):
@abc.abstractmethod
def solve_for_variable(self, value: Union[Any, List[Any]]) -> Union[Any, List[Any]]:
pass
class IdentitySolver(Solver):
def solve_for_variable(self, value: Union[Any, List[Any]]) -> Union[Any, List[Any]]:
return value
class EvalAlternative(abc.ABC):
def __init__(self):
super().__init__()
self._next = None
self._factory = None
self._solvers = None
@property
def next(self):
return self._next() if self._next else None
@next.setter
def next(self, value):
self._next = value
@next.deleter
def next(self):
del self._next
@property
def factory(self):
return self._factory
@factory.setter
def factory(self, value):
self._factory = value
@factory.deleter
def factory(self):
del self._factory
@property
def solvers(self):
return self._solvers
@solvers.setter
def solvers(self, value):
self._solvers = value
@solvers.deleter
def solvers(self):
del self._solvers
class AlternativeBuilder:
def __init__(self, cls: Type[EvalAlternative], solvers: Dict[str, Solver], alternative_builder, factory):
self._cls = cls
self._solvers = solvers
self._alternative_builder = alternative_builder
self._factory = factory
def __call__(self, *args, **kwargs):
eval_ctx = self._cls()
eval_ctx.solvers = self._solvers
eval_ctx.next = self._alternative_builder
eval_ctx.factory = self._factory
return eval_ctx
class EvalNodeFactory:
def __init__(self):
self._rules = dict()
self._solvers = defaultdict(IdentitySolver)
def evaluator(self, rule: str):
evaluator = self._rules.get(rule)
if not evaluator:
raise Exception('No evaluator defined for rule {}'.format(rule))
return self._rules.get(rule)()
def add_evaluator(self, rule: str, cls: Type[EvalAlternative]):
current_node = self._rules.get(rule)
self._rules[rule] = AlternativeBuilder(cls, self._solvers, current_node, self)
def add_solver(self, variable: str, solver: Solver):
self._solvers[variable] = solver
class EvalNode(EvalAlternative):
def __init__(self, rule: str, ctx: str):
super().__init__()
self._rule = rule
self._ctx = ctx
@property
def rule(self):
return self._rule
@property
def ctx(self):
return self._ctx
@abc.abstractmethod
def parser(self, current_node: int, dag: DAG):
pass
@abc.abstractmethod
def input_columns(self):
return []
@abc.abstractmethod
def emit_columns(self):
return []
def build(self, current_node: int, dag: DAG):
ctx = dag.ctx(current_node)
if self._ctx == ctx:
return self.parser(current_node, dag)
else:
next_in_rule = self.next
if next_in_rule:
return next_in_rule.build(current_node, dag)
else:
raise Exception('Reached end of rule {}. No Alternative found'.format(self._rule))
| 27.845714 | 109 | 0.657911 |
133046163a5353db6b04c902488a7aa258b43efc | 3,536 | py | Python | memory.py | inpg12345/Repozytorium | 7773f28455afc84c56e87914351eb134b188c610 | [
"Unlicense"
] | null | null | null | memory.py | inpg12345/Repozytorium | 7773f28455afc84c56e87914351eb134b188c610 | [
"Unlicense"
] | null | null | null | memory.py | inpg12345/Repozytorium | 7773f28455afc84c56e87914351eb134b188c610 | [
"Unlicense"
] | null | null | null | """
Memory, a game of remembering number pairs.
Copyright (c) 2014 Grant Jenks
http://www.grantjenks.com/
Exercises:
1. Change the image.
2. Change the background color to white and the foreground color to black.
3. Increase the number of tiles to an 8x8 grid.
"""
import sys, pygame, random, time
from pygame.locals import *
pygame.init()
# Initialize variables.
tile_size = 120
size = width, height = 480, 480
font = pygame.font.Font(None, 14)
clock = pygame.time.Clock()
screen = pygame.display.set_mode(size)
image = pygame.image.load('car.jpg')
font = pygame.font.Font(None, 60)
black, white = (0, 0, 0), (255, 255, 255)
# Create tiles.
tiles = []
for y_pos in range(0, height, tile_size):
for x_pos in range(0, width, tile_size):
area = pygame.Rect(x_pos, y_pos, tile_size, tile_size)
tile = pygame.Surface((tile_size, tile_size))
tile.blit(image, (0, 0), area)
tiles.append((area, tile))
def draw_game():
"""Draw the game state."""
# Clear the screen with a black background.
pygame.draw.rect(screen, black, (0, 0, width, height))
# Draw lines to create grid for tiles.
for pos in range(1, width / tile_size):
offset = pos * tile_size
pygame.draw.line(screen, white, (offset, 0), (offset, height))
for pos in range(1, height / tile_size):
offset = pos * tile_size
pygame.draw.line(screen, white, (0, offset), (width, offset))
# Draw tiles correctly guessed.
for pos, tile in enumerate(tiles):
if guesses[pos]:
screen.blit(tile[1], tile[0])
def draw_number(value):
"""Draw the number at a particular tile."""
surface = font.render(str(values[value]), True, white)
screen.blit(surface, tiles[value][0].move(45, 45))
if select is not None: draw_number(select)
if match is not None: draw_number(match)
pygame.display.flip()
def restart():
"""Reset the game with random values."""
global values, guesses, select, match, pause
select, match, pause = None, None, 0
count = len(tiles)
assert count % 2 == 0
values = list(range(count / 2)) * 2
random.shuffle(values)
guesses = [False] * count
draw_game()
restart()
while True:
event = pygame.event.wait()
pos = None
if event.type == pygame.QUIT:
pygame.quit()
sys.exit()
elif event.type == KEYDOWN:
if event.key == K_r:
restart()
elif event.key == K_q:
pygame.event.post(pygame.event.Event(QUIT))
elif event.type == MOUSEBUTTONDOWN and event.button == 1:
pos = event.pos
if pause > 0:
if pos is None:
if pause == 1:
select, match, pause = None, None, 0
draw_game()
else:
pause -= 1
continue
else:
select, match, pause = None, None, 0
draw_game()
if pos is None: continue
xxx = pos[0] / tile_size
yyy = pos[1] / tile_size
index = yyy * (width / tile_size) + xxx
if guesses[index]:
continue
else:
if select is None:
select = index
else:
if select == index:
continue
else:
if values[select] == values[index]:
guesses[select] = True
guesses[index] = True
select = None
else:
pause = 12 * 3
match = index
draw_game()
| 26 | 74 | 0.575509 |
16407e1967d20298e96085552f55a1951ad16ce4 | 350 | py | Python | multilingual_project_blog/app_settings.py | bigee/django-multilingual-project-blog | c62d568ac42b3c7f711624bf7029ac56e7342b6a | [
"MIT"
] | null | null | null | multilingual_project_blog/app_settings.py | bigee/django-multilingual-project-blog | c62d568ac42b3c7f711624bf7029ac56e7342b6a | [
"MIT"
] | 8 | 2020-02-11T23:25:46.000Z | 2022-03-11T23:15:31.000Z | multilingual_project_blog/app_settings.py | bigee/django-multilingual-project-blog | c62d568ac42b3c7f711624bf7029ac56e7342b6a | [
"MIT"
] | null | null | null | """Settings for the ``multilingual_project_blog`` app."""
from django.conf import settings
ENVIRONMENT = getattr(settings, 'ENVIRONMENT', 'prod')
SITE_URL = getattr(settings, 'SITE_URL', 'http://www.example.com')
SITE_URL_NAME = getattr(settings, 'SITE_URL_NAME', 'www.example.com')
INFO_EMAIL = getattr(settings, 'INFO_EMAIL', 'info@example.com')
| 38.888889 | 69 | 0.748571 |
966a3f32a499908a269b8e6041c0f2449d543eee | 5,896 | py | Python | src/eeWishart.py | ethuee/earthengine | 9786109a98ed3f7fcad630e1dd69f63591183a37 | [
"MIT"
] | 126 | 2017-03-22T14:20:30.000Z | 2022-03-10T22:05:38.000Z | src/eeWishart.py | ethuee/earthengine | 9786109a98ed3f7fcad630e1dd69f63591183a37 | [
"MIT"
] | 5 | 2017-05-27T14:53:17.000Z | 2020-11-11T12:17:37.000Z | src/eeWishart.py | ethuee/earthengine | 9786109a98ed3f7fcad630e1dd69f63591183a37 | [
"MIT"
] | 57 | 2017-03-23T18:22:06.000Z | 2022-03-17T06:56:38.000Z | '''
Created on 09.01.2017
@author: mort
'''
import ee
from eeMad import chi2cdf
ENL = 4.4
def multbyenl(image):
return ee.Image(image).multiply(ENL)
def log_det_sum(imList,j):
'''return the log of the the determinant of the sum of the first j images in imList'''
imList = ee.List(imList)
nbands = ee.Image(imList.get(0)).bandNames().length()
sumj = ee.ImageCollection(imList.slice(0,j)).reduce(ee.Reducer.sum())
return ee.Algorithms.If( nbands.eq(2),
sumj.expression('b(0)*b(1)').log(),
sumj.log() )
def log_det(imList,j):
'''return the log of the the determinant of the jth image in imList'''
im = ee.Image(ee.List(imList).get(j.subtract(1)))
nbands = im.bandNames().length()
return ee.Algorithms.If(nbands.eq(2),
im.expression('b(0)*b(1)').log(),
im.log() )
def pv(imList,p,median,j):
''' calculate -2log(R_ell,j) and return P-value '''
imList = ee.List(imList)
p = ee.Number(p)
j = ee.Number(j)
f = p
one = ee.Number(1.0)
# 1 - (1. + 1./(j*(j-1)))/(6.*p*n)
rhoj = one.subtract(one.add(one.divide(j.multiply(j.subtract(one)))).divide(6*ENL))
# -(f/4.)*(1.-1./rhoj)**2'
omega2j = one.subtract(one.divide(rhoj)).pow(2.0).multiply(f.divide(-4.0))
Z = ee.Image(ee.Image(log_det_sum(imList,j.subtract(1)))).multiply(j.subtract(1)) \
.add(log_det(imList,j)) \
.add(p.multiply(j).multiply(ee.Number(j).log())) \
.subtract(p.multiply(j.subtract(1)).multiply(j.subtract(1).log())) \
.subtract(ee.Image(log_det_sum(imList,j)).multiply(j)) \
.multiply(rhoj) \
.multiply(-2*ENL)
# (1.-omega2j)*stats.chi2.cdf(Z,[f])+omega2j*stats.chi2.cdf(Z,[f+4])
P = ee.Image( chi2cdf(Z,f).multiply(one.subtract(omega2j)).add(chi2cdf(Z,f.add(4)).multiply(omega2j)) )
# 3x3 median filter
return ee.Algorithms.If(median, P.focal_median(), P)
def js_iter(current,prev):
j = ee.Number(current)
prev = ee.Dictionary(prev)
median = prev.get('median')
p = prev.get('p')
imList = prev.get('imList')
pvs = ee.List(prev.get('pvs'))
return ee.Dictionary({'median':median,'p':p,'imList':imList,'pvs':pvs.add(pv(imList,p,median,j))})
def ells_iter(current,prev):
ell = ee.Number(current)
prev = ee.Dictionary(prev)
pv_arr = ee.List(prev.get('pv_arr'))
k = ee.Number(prev.get('k'))
median = prev.get('median')
p = prev.get('p')
imList = ee.List(prev.get('imList'))
imList_ell = imList.slice(ell.subtract(1))
js = ee.List.sequence(2,k.subtract(ell).add(1))
first = ee.Dictionary({'median':median,'p':p,'imList':imList_ell,'pvs':ee.List([])})
# list of P-values for R_ell,j, j = 2...k-ell+1
pvs = ee.List(ee.Dictionary(js.iterate(js_iter,first)).get('pvs'))
return ee.Dictionary({'k':k,'p':p,'median':median,'imList':imList,'pv_arr':pv_arr.add(pvs)})
def filter_j(current,prev):
P = ee.Image(current)
prev = ee.Dictionary(prev)
ell = ee.Number(prev.get('ell'))
cmap = ee.Image(prev.get('cmap'))
smap = ee.Image(prev.get('smap'))
fmap = ee.Image(prev.get('fmap'))
bmap = ee.Image(prev.get('bmap'))
threshold = ee.Image(prev.get('threshold'))
j = ee.Number(prev.get('j'))
cmapj = cmap.multiply(0).add(ell.add(j).subtract(1))
cmap1 = cmap.multiply(0).add(1)
tst = P.gt(threshold).And(cmap.eq(ell.subtract(1)))
cmap = cmap.where(tst,cmapj)
fmap = fmap.where(tst,fmap.add(1))
smap = ee.Algorithms.If(ell.eq(1),smap.where(tst,cmapj),smap)
idx = ell.add(j).subtract(2)
tmp = bmap.select(idx)
bname = bmap.bandNames().get(idx)
tmp = tmp.where(tst,cmap1)
tmp = tmp.rename([bname])
bmap = bmap.addBands(tmp,[bname],True)
return ee.Dictionary({'ell':ell,'j':j.add(1),'threshold':threshold,'cmap':cmap,'smap':smap,'fmap':fmap,'bmap':bmap})
def filter_ell(current,prev):
pvs = ee.List(current)
prev = ee.Dictionary(prev)
ell = ee.Number(prev.get('ell'))
threshold = ee.Image(prev.get('threshold'))
cmap = prev.get('cmap')
smap = prev.get('smap')
fmap = prev.get('fmap')
bmap = prev.get('bmap')
first = ee.Dictionary({'ell':ell,'j':1, 'threshold':threshold,'cmap':cmap,'smap':smap,'fmap':fmap,'bmap':bmap})
result = ee.Dictionary(ee.List(pvs).iterate(filter_j,first))
return ee.Dictionary({'ell':ell.add(1),'threshold':threshold,'cmap':result.get('cmap'),
'smap':result.get('smap'),
'fmap':result.get('fmap'),
'bmap':result.get('bmap')})
def omnibus(imList,significance=0.0001,median=False):
'''return change maps for sequential omnibus change algorithm'''
imList = ee.List(imList).map(multbyenl)
p = ee.Image(imList.get(0)).bandNames().length()
k = imList.length()
# pre-calculate p-value array
ells = ee.List.sequence(1,k.subtract(1))
first = ee.Dictionary({'k':k,'p':p,'median':median,'imList':imList,'pv_arr':ee.List([])})
pv_arr = ee.List(ee.Dictionary(ells.iterate(ells_iter,first)).get('pv_arr'))
# filter p-values to generate cmap, smap, fmap and bmap
cmap = ee.Image(imList.get(0)).select(0).multiply(0.0)
smap = ee.Image(imList.get(0)).select(0).multiply(0.0)
fmap = ee.Image(imList.get(0)).select(0).multiply(0.0)
bmap = ee.Image.constant(ee.List.repeat(0,k.subtract(1)))
threshold = ee.Image.constant(1-significance)
first = ee.Dictionary({'ell':1,'threshold':threshold,'cmap':cmap,'smap':smap,'fmap':fmap,'bmap':bmap})
return ee.Dictionary(pv_arr.iterate(filter_ell,first))
if __name__ == '__main__':
pass | 42.724638 | 120 | 0.59481 |
ad60ffd0d96784e56d86923b32d96924821555f1 | 1,171 | py | Python | tests/test_asset.py | zallarak/py-stellar-base | e6f3caa3980dbdc88ec17eacdfe6a13a714fb8ad | [
"Apache-2.0"
] | null | null | null | tests/test_asset.py | zallarak/py-stellar-base | e6f3caa3980dbdc88ec17eacdfe6a13a714fb8ad | [
"Apache-2.0"
] | null | null | null | tests/test_asset.py | zallarak/py-stellar-base | e6f3caa3980dbdc88ec17eacdfe6a13a714fb8ad | [
"Apache-2.0"
] | null | null | null | from nose.tools import raises
from stellar_base.asset import Asset
from stellar_base.stellarxdr import Xdr
class TestAsset:
def __init__(self):
self.source = 'GDJVFDG5OCW5PYWHB64MGTHGFF57DRRJEDUEFDEL2SLNIOONHYJWHA3Z'
def test_native(self):
assert 'XLM' == Asset.native().code
assert None == Asset.native().issuer
def test_is_native(self):
native = Asset('XLM')
cny = Asset('CNY', self.source)
assert native.is_native()
assert not cny.is_native()
def test_to_xdr_object(self):
cny = Asset('CNY', self.source)
assert isinstance(cny.to_xdr_object(), Xdr.types.Asset)
@raises(Exception)
def test_too_long(self):
Asset('123456789012TooLong', self.source)
@raises(Exception)
def test_no_issuer(self):
Asset('beer', None)
def test_xdr(self):
xdr = b'AAAAAUNOWQAAAAAA01KM3XCt1+LHD7jDTOYpe/HGKSDoQoyL1JbUOc0+E2M='
cny = Asset('CNY',self.source)
assert xdr == cny.xdr()
def test_unxdr(self):
cny = Asset('CNY', self.source)
xdr = cny.xdr()
cny_x = Asset.from_xdr(xdr)
assert cny == cny_x
| 27.880952 | 80 | 0.645602 |
56d5b95c4c28b3f549aec3fc7fb6fd1538e0fa56 | 9,059 | py | Python | src/m2_functions.py | delemojw/02-ObjectsFunctionsAndMethods | 29504456a741feb27dfa23bd87f3ef44765db202 | [
"MIT"
] | null | null | null | src/m2_functions.py | delemojw/02-ObjectsFunctionsAndMethods | 29504456a741feb27dfa23bd87f3ef44765db202 | [
"MIT"
] | null | null | null | src/m2_functions.py | delemojw/02-ObjectsFunctionsAndMethods | 29504456a741feb27dfa23bd87f3ef44765db202 | [
"MIT"
] | null | null | null | """
Practice DEFINING and CALLING
FUNCTIONS
Authors: David Mutchler, Dave Fisher, Valerie Galluzzi, Amanda Stouder,
their colleagues and Jabari-Aman Delemore.
""" # Done: 1. PUT YOUR NAME IN THE ABOVE LINE.
###############################################################################
#
# DONE: 2.
# Allow this module to use the rosegraphics.py module by marking the
# src
# folder in this project as a "Sources Root", as follows:
#
# In the Project window (to the left), right click on the src folder,
# then select Mark Directory As ~ Sources Root.
#
###############################################################################
import rosegraphics as rg
import random
def main():
"""
Makes a TurtleWindow,
calls the other functions in this module to demo them, and
waits for the user to click anywhere in the window to close it.
"""
# A TurtleWindow works "behind the scenes" to enable Turtle movement
window = rg.TurtleWindow()
turtle1()
turtle4()
turtle5()
turtle3()
turtle2()
turtle2()
window.close_on_mouse_click()
def turtle1():
"""
Constructs a square SimpleTurtle.
Makes that SimpleTurtle draw a yellow-filled circle.
"""
ada = rg.SimpleTurtle('square')
ada.speed = 10
ada.pen = rg.Pen('aquamarine', 30)
ada.paint_bucket = rg.PaintBucket('yellow')
ada.begin_fill()
ada.draw_circle(150)
ada.end_fill()
def turtle2():
"""
Constructs a triangle SimpleTurtle.
Makes that SimpleTurtle go to a RANDOM point,
draws a cool shape, and return to where it started from.
"""
grace = rg.SimpleTurtle('triangle')
grace.speed = 7
grace.pen = rg.Pen('blue', 15)
grace.paint_bucket = rg.PaintBucket('magenta')
# Keep track of where I am, to go back to it at the end.
# Then choose a RANDOM starting point for the motion in here.
i_began_here = rg.Point(grace.x_cor(), grace.y_cor())
i_am_going_here = rg.Point(random.randrange(-500, 500),
random.randrange(-300, 0))
grace.pen_up()
grace.go_to(i_am_going_here)
grace.pen_down()
# Do the motion.
grace.left(90)
grace.forward(200)
grace.begin_fill()
grace.draw_circle(25)
grace.end_fill()
# Go back to where I was when this function began its run.
grace.go_to(i_began_here)
def turtle3():
"""
Constructs a default SimpleTurtle.
Makes that SimpleTurtle go forward 300 units
and then draw a black-filled circle.
"""
maja = rg.SimpleTurtle()
maja.speed = 10
maja.pen = rg.Pen('black', 10)
maja.forward(300)
maja.begin_fill()
maja.draw_circle(50)
maja.end_fill()
def turtle4():
"""
Construct a default Simple turtle to do a task in the
the code like going foward and making a circle.
"""
shuri = rg.SimpleTurtle()
shuri.speed = 20
shuri.pen = rg.Pen('cyan', 10)
shuri.backward(300)
shuri.begin_fill()
shuri.draw_circle(60)
shuri.end_fill()
def turtle5():
"""
Construct a default SimpleTurtle to do task
"""
tchalla = rg.SimpleTurtle()
tchalla.speed = 15
tchalla.pen = rg.Pen('purple', 25)
tchalla.right(90)
tchalla.forward(200)
tchalla.begin_fill()
tchalla.draw_circle(100)
tchalla.end_fill()
###############################################################################
#
# DONE: 3.
# READ the code above. Be sure you understand:
# -- How many functions are defined above?
# (Answer: 4)
# -- For each function definition:
# -- Where does that function definition begin?
# Where does it end?
# -- How many times does main call the turtle1 function?
# (Answer: 1)
# -- How many times does main call the turtle2 function?
# (Hint: the answer is NOT 1.) Calls turtle2 twice
# -- What line of code calls the main function?
# (Answer: look at the LAST line of this module, far below.)
# line 237
# ** ASK QUESTIONS if you are uncertain about any of the answers. **
#
# RELATE what is DRAWN to the CODE above. Be sure you understand:
# -- WHEN does the code in main run? At the end of the code
# -- WHEN does the code in turtle1 run? First
# the code in turtle2 run? third and last
# the code in turtle3 run? second
# -- For each of the above, WHY does that code run when it does?
# Because the code runs in the order that they are called.
# ** ASK QUESTIONS if you are uncertain about any of the answers. **
#
# When you believe you understand the answers
# to all of the above questions, change the above TO-DO to DONE.
#
###############################################################################
###############################################################################
#
# DONE: 4.
# Define another function,
# immediately below the end of the definition of turtle3 above.
# Name your new function turtle4.
#
# The Python "pep8" coding standard says to leave exactly 2 blank
# lines between function definitions, so be sure to do so.
#
# Your new function should:
# 1. Define a SimpleTurtle.
# 2. Set your SimpleTurtle's
# pen
# to a new rg.Pen with a color and thickness of your own choosing.
# The COLORS.txt file in this project has a list of legal color-names.
# 3. Make your SimpleTurtle move around a bit.
#
# ----------------------------------------------------------------------
# ** IMPORTANT: **
# ** Nothing fancy is required. **
# ** Save fancy stuff for exercises later today. **
# ----------------------------------------------------------------------
#
# BTW, if you see a RED underline, that means that there is
# a SYNTAX (notation) error at that line or elsewhere.
# Get help as needed to fix any such errors.
#
###############################################################################
###############################################################################
#
# DONE: 5.
# Add a line to main that CALLS your new function immediately
# AFTER main calls turtle1. So:
# -- the SimpleTurtle from turtle1 should move,
# -- then YOUR SimpleTurtle should move,
# -- then the other 3 SimpleTurtles should move.
#
# Run this module. Check that there is another SimpleTurtle (yours)
# that uses the pen you chose and moves around a bit.
# If your code has errors (shows RED in the Console window)
# or does not do what it should, get help as needed to fix it.
#
###############################################################################
###############################################################################
#
# DONE: 6.
# The previous two TODOs IMPLEMENTED a function (TO-DO 4)
# and TESTED that function (TO-DO 5).
#
# Now implement AND test one more function, defining it immediately
# below the definition of your turtle4 function.
# Name your new function turtle5.
#
# The Python "pep8" coding standard says to leave exactly 2 blank
# lines between function definitions, so be sure to do so.
#
# Your new function should define TWO new SimpleTurtles,
# set their characteristics (i.e., instance variables) as you choose,
# and make each move a bit.
#
# ----------------------------------------------------------------------
# ** IMPORTANT: **
# ** Nothing fancy is required. **
# ** Save fancy stuff for exercises later today. **
# ----------------------------------------------------------------------
#
# Get help as needed on this (and every!) exercise!
#
###############################################################################
###############################################################################
#
# DONE: 7.
# COMMIT-and-PUSH your work (after changing this TO-DO to DONE).
#
# As a reminder, here is how you should do so:
# 1. Select VCS from the menu bar (above).
# 2. Choose Commit from the pull-down menu that appears.
# 3. In the Commit Changes window that pops up:
# - HOVER over the Commit button
# (in the lower-right corner of the window)
# - CLICK on Commit and Push.
# - (if asked again to Push, select Push)
#
# COMMIT adds the changed work to the version control on your computer
# and PUSH adds the changed work into your Github repository in the "cloud".
#
# COMMIT-and-PUSH your work as often as you want, but for sure
# after you have tested the module and believe that it is correct.
#
###############################################################################
# -----------------------------------------------------------------------------
# Calls main to start the ball rolling.
# -----------------------------------------------------------------------------
main()
| 33.305147 | 79 | 0.540347 |
10e06640682ecf71e567c60891b929e7e79fdef3 | 447 | py | Python | osrc/config.py | rackerlabs/osrc | a0e4b758e9430ed07490e53f99550efab0dfcb41 | [
"MIT"
] | 1 | 2021-01-05T16:51:12.000Z | 2021-01-05T16:51:12.000Z | osrc/config.py | rackerlabs/osrc | a0e4b758e9430ed07490e53f99550efab0dfcb41 | [
"MIT"
] | null | null | null | osrc/config.py | rackerlabs/osrc | a0e4b758e9430ed07490e53f99550efab0dfcb41 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import (division, print_function, absolute_import,
unicode_literals)
import os
DEBUG = False
TESTING = False
SECRET_KEY = os.environ["SECRET_KEY"]
LOG_FILENAME = os.environ["LOG_FILENAME"]
# GitHub API.
GITHUB_ID = os.environ["GITHUB_ID"]
GITHUB_SECRET = os.environ["GITHUB_SECRET"]
# Redis setup.
REDIS_PORT = int(os.environ.get("OSRC_REDIS_PORT", 6380))
| 22.35 | 66 | 0.704698 |
6d1d2e78b5be12203e883f0ced5c9704689d1e4d | 4,245 | py | Python | numba/tests/bytecode/test_indexing.py | glycerine/numba | 4cb9e4f0b3cabd9e6a144fa4f3f7e5d6bee45635 | [
"BSD-2-Clause"
] | 1 | 2019-04-17T10:03:53.000Z | 2019-04-17T10:03:53.000Z | numba/tests/bytecode/test_indexing.py | glycerine/numba | 4cb9e4f0b3cabd9e6a144fa4f3f7e5d6bee45635 | [
"BSD-2-Clause"
] | null | null | null | numba/tests/bytecode/test_indexing.py | glycerine/numba | 4cb9e4f0b3cabd9e6a144fa4f3f7e5d6bee45635 | [
"BSD-2-Clause"
] | null | null | null | #! /usr/bin/env python
# ______________________________________________________________________
'''test_indexing
Unit tests for checking Numba's indexing into Numpy arrays.
'''
# ______________________________________________________________________
from numba import double, int_
from numba.decorators import jit
import numpy
import unittest
# ______________________________________________________________________
def get_index_fn_0 (inarr):
return inarr[1,2,3]
def set_index_fn_0 (ioarr):
ioarr[1,2,3] = 0.
def set_index_fn_1 (min_x, max_x, min_y, out_arr):
'''Thinly veiled (and simplified) version of the Mandelbrot
driver...though this is very similar to just doing a zip of
arange(min_x,max_x + epsilon,delta)[mgrid[:width,:height][0]] (and
the corresponding y values).'''
width = out_arr.shape[0]
height = out_arr.shape[1]
delta = (max_x - min_x) / width
for x in range(width):
x_val = x * delta + min_x
for y in range(height):
y_val = y * delta + min_y
out_arr[x,y,0] = x_val
out_arr[x,y,1] = y_val
def set_index_fn_2(arr):
width = arr.shape[0]
height = arr.shape[1]
for x in range(width):
for y in range(height):
arr[x, y] = x*width+y
def get_shape_fn_0 (arr):
width = arr.shape[0]
return width
def get_shape_fn_1 (arr):
height = arr.shape[1]
return height
def get_shape_fn_2 (arr):
height = arr.shape[2]
return height
# ______________________________________________________________________
class TestIndexing (unittest.TestCase):
def test_get_index_fn_0 (self):
arr = numpy.ones((4,4,4), dtype=numpy.double)
arr[1,2,3] = 0.
compiled_fn = jit(restype=double,
argtypes=[double[:, :, ::1]],
backend='bytecode')(get_index_fn_0)
self.assertEqual(compiled_fn(arr), 0.)
def test_set_index_fn_0 (self):
arr = numpy.ones((4,4,4))
compiled_fn = jit(argtypes=[double[:,:,::1]],
backend='bytecode')(set_index_fn_0)
self.assertEqual(arr[1,2,3], 1.)
compiled_fn(arr)
self.assertEqual(arr[1,2,3], 0.)
def test_set_index_fn_1 (self):
control_arr = numpy.zeros((50, 50, 2), dtype=numpy.double)
test_arr = numpy.zeros_like(control_arr)
set_index_fn_1(-1., 1., -1., control_arr)
argtypes = double, double, double, double[:,:,:]
compiled_fn = jit(argtypes=argtypes,
backend='bytecode')(set_index_fn_1)
compiled_fn(-1., 1., -1., test_arr)
self.assertTrue((numpy.abs(control_arr - test_arr) < 1e9).all())
def test_get_shape_fn_0(self):
arr = numpy.zeros((5,6,7), dtype=numpy.double)
compiled_fn = jit(restype=int_,
argtypes=[double[:, :, ::1]],
backend='bytecode')(get_shape_fn_0)
self.assertEqual(compiled_fn(arr), 5)
def test_get_shape_fn_1(self):
arr = numpy.zeros((5,6,7), dtype=numpy.double)
compiled_fn = jit(restype=int_,
argtypes=[double[:, :, ::1]],
backend='bytecode')(get_shape_fn_1)
self.assertEqual(compiled_fn(arr), 6)
def test_get_shape_fn_2(self):
arr = numpy.zeros((5,6,7), dtype=numpy.double)
compiled_fn = jit(restype=int_,
argtypes=[double[:, :, ::1]],
backend='bytecode')(get_shape_fn_2)
self.assertEqual(compiled_fn(arr), 7)
def test_set_index_fn_2 (self):
control_arr = numpy.zeros((10, 10), dtype=numpy.double)
test_arr = numpy.zeros_like(control_arr)
set_index_fn_2(control_arr)
argtypes = double[:, :],
compiled_fn = jit(argtypes=argtypes,
backend='bytecode')(set_index_fn_2)
compiled_fn(test_arr)
self.assertTrue((numpy.abs(control_arr - test_arr) < 1e9).all())
# ______________________________________________________________________
if __name__ == "__main__":
unittest.main()
# ______________________________________________________________________
# End of test_indexing.py
| 32.40458 | 72 | 0.626855 |
3e0f8383c0f8de19969156a627ce4ecba53a2169 | 2,370 | py | Python | src/openweather_api.py | drapaiton/mini_proyecto_next-e | 632877d7c0513e1ff954818373cb6c0f97fa07a2 | [
"MIT"
] | null | null | null | src/openweather_api.py | drapaiton/mini_proyecto_next-e | 632877d7c0513e1ff954818373cb6c0f97fa07a2 | [
"MIT"
] | null | null | null | src/openweather_api.py | drapaiton/mini_proyecto_next-e | 632877d7c0513e1ff954818373cb6c0f97fa07a2 | [
"MIT"
] | null | null | null | from __future__ import annotations
from typing import List
import requests
from requests import Response
from config import DATE_FORMAT, APPID
from models import OpenWeatherInsight
class BaseApi:
API_DATE_FORMAT = DATE_FORMAT
BASE_URL = "https://api.openweathermap.org/"
APPID = APPID
def __init__(self, **kwargs):
for k, v in kwargs.items():
setattr(self, k, v)
def _request(self, endpoint, method="GET", **kwargs) -> Response:
_BASE_URL = self.BASE_URL if self.BASE_URL.endswith("/") else self.BASE_URL[:-1]
_ENDPOINT = endpoint[1:] if endpoint.startswith("/") else endpoint
url = _BASE_URL + _ENDPOINT
PARAMS = {"APPID": self.APPID}
final_kwargs = {"url": url, "method": method, "params": PARAMS}
if 'params' in kwargs:
final_kwargs['params'].update(kwargs.pop('params'))
return requests.request(**final_kwargs)
class OneCallAPI(BaseApi):
DATA_ONE_CALL_ENDPOINT = "/data/2.5/onecall"
EXCLUDE = "current,minutely,daily,alerts"
UNITS = "metric"
LANG = "es"
def __init__(self, latitude: float, longitude: float, **kwargs):
self.latitude, self.longitude = latitude, longitude
super().__init__(**kwargs)
def _request(self, method="GET", **kwargs):
PARAMS = dict(exclude=self.EXCLUDE, units=self.UNITS, lang=self.LANG)
_ENDPOINT = OneCallAPI.DATA_ONE_CALL_ENDPOINT
final_kwargs = dict(method=method, endpoint=_ENDPOINT, params=PARAMS)
# use default class variables, if they doesn't exists at kwargs
if 'params' in kwargs:
final_kwargs['params'].update(kwargs.pop('params'))
final_kwargs.update(**kwargs)
return super()._request(**final_kwargs)
def extract_next_48_hours(self, output_limit=6) -> List[OpenWeatherInsight]:
response = self._request(params=dict(lat=self.latitude, lon=self.longitude))
response.raise_for_status()
output: List[OpenWeatherInsight] = []
for index, hour in enumerate(response.json()["hourly"]):
weather: List[dict] = hour.pop("weather")
weather: dict = weather and weather.pop()
output += [OpenWeatherInsight(**hour, weather=weather)]
if output_limit and (index + 1 >= output_limit):
break
return output
| 33.857143 | 88 | 0.652321 |
b0046a04818e52d8d1ac3e3469730791a452785c | 6,315 | py | Python | music_assistant/models/enums.py | music-assistant/music-assistant | 7b4fd73b1281f74d61e29c23093d048a9acf541f | [
"Apache-2.0"
] | null | null | null | music_assistant/models/enums.py | music-assistant/music-assistant | 7b4fd73b1281f74d61e29c23093d048a9acf541f | [
"Apache-2.0"
] | null | null | null | music_assistant/models/enums.py | music-assistant/music-assistant | 7b4fd73b1281f74d61e29c23093d048a9acf541f | [
"Apache-2.0"
] | null | null | null | """All enums used by the Music Assistant models."""
from enum import Enum, IntEnum
class MediaType(Enum):
"""Enum for MediaType."""
ARTIST = "artist"
ALBUM = "album"
TRACK = "track"
PLAYLIST = "playlist"
RADIO = "radio"
UNKNOWN = "unknown"
class MediaQuality(IntEnum):
"""Enum for Media Quality."""
UNKNOWN = 0
LOSSY_MP3 = 1
LOSSY_OGG = 2
LOSSY_AAC = 3
FLAC_LOSSLESS = 10 # 44.1/48khz 16 bits
FLAC_LOSSLESS_HI_RES_1 = 20 # 44.1/48khz 24 bits HI-RES
FLAC_LOSSLESS_HI_RES_2 = 21 # 88.2/96khz 24 bits HI-RES
FLAC_LOSSLESS_HI_RES_3 = 22 # 176/192khz 24 bits HI-RES
FLAC_LOSSLESS_HI_RES_4 = 23 # above 192khz 24 bits HI-RES
class LinkType(Enum):
"""Enum wth link types."""
WEBSITE = "website"
FACEBOOK = "facebook"
TWITTER = "twitter"
LASTFM = "lastfm"
YOUTUBE = "youtube"
INSTAGRAM = "instagram"
SNAPCHAT = "snapchat"
TIKTOK = "tiktok"
DISCOGS = "discogs"
WIKIPEDIA = "wikipedia"
ALLMUSIC = "allmusic"
class ImageType(Enum):
"""Enum wth image types."""
THUMB = "thumb"
LANDSCAPE = "landscape"
FANART = "fanart"
LOGO = "logo"
CLEARART = "clearart"
BANNER = "banner"
CUTOUT = "cutout"
BACK = "back"
DISCART = "discart"
OTHER = "other"
class AlbumType(Enum):
"""Enum for Album type."""
ALBUM = "album"
SINGLE = "single"
COMPILATION = "compilation"
UNKNOWN = "unknown"
class StreamType(Enum):
"""Enum with stream types."""
EXECUTABLE = "executable"
URL = "url"
FILE = "file"
CACHE = "cache"
class ContentType(Enum):
"""Enum with audio content/container types supported by ffmpeg."""
OGG = "ogg"
FLAC = "flac"
MP3 = "mp3"
AAC = "aac"
MPEG = "mpeg"
ALAC = "alac"
WAVE = "wave"
AIFF = "aiff"
WMA = "wma"
M4A = "m4a"
PCM_S16LE = "s16le" # PCM signed 16-bit little-endian
PCM_S24LE = "s24le" # PCM signed 24-bit little-endian
PCM_S32LE = "s32le" # PCM signed 32-bit little-endian
PCM_F32LE = "f32le" # PCM 32-bit floating-point little-endian
PCM_F64LE = "f64le" # PCM 64-bit floating-point little-endian
@classmethod
def try_parse(
cls: "ContentType", string: str, fallback: str = "mp3"
) -> "ContentType":
"""Try to parse ContentType from (url)string."""
tempstr = string.lower()
if "." in tempstr:
tempstr = tempstr.split(".")[-1]
tempstr = tempstr.split("?")[0]
tempstr = tempstr.split("&")[0]
try:
return cls(tempstr)
except ValueError:
return cls(fallback)
def is_pcm(self):
"""Return if contentype is PCM."""
return self.name.startswith("PCM")
def sox_supported(self):
"""Return if ContentType is supported by SoX."""
return self.is_pcm() or self in [
ContentType.OGG,
ContentType.FLAC,
ContentType.MP3,
ContentType.WAVE,
ContentType.AIFF,
]
def sox_format(self):
"""Convert the ContentType to SoX compatible format."""
if not self.sox_supported():
raise NotImplementedError
return self.value.replace("le", "")
@classmethod
def from_bit_depth(
cls, bit_depth: int, floating_point: bool = False
) -> "ContentType":
"""Return (PCM) Contenttype from PCM bit depth."""
if floating_point and bit_depth > 32:
return cls.PCM_F64LE
if floating_point:
return cls.PCM_F32LE
if bit_depth == 16:
return cls.PCM_S16LE
if bit_depth == 24:
return cls.PCM_S24LE
return cls.PCM_S32LE
class QueueOption(Enum):
"""Enum representation of the queue (play) options."""
PLAY = "play"
REPLACE = "replace"
NEXT = "next"
ADD = "add"
class CrossFadeMode(Enum):
"""Enum with crossfade modes."""
DISABLED = "disabled" # no crossfading at all
STRICT = "strict" # do not crossfade tracks of same album
SMART = "smart" # crossfade if possible (do not crossfade different sample rates)
ALWAYS = "always" # all tracks - resample to fixed sample rate
class RepeatMode(Enum):
"""Enum with repeat modes."""
OFF = "off" # no repeat at all
ONE = "one" # repeat one/single track
ALL = "all" # repeat entire queue
class PlayerState(Enum):
"""Enum for the (playback)state of a player."""
IDLE = "idle"
PAUSED = "paused"
PLAYING = "playing"
OFF = "off"
class EventType(Enum):
"""Enum with possible Events."""
PLAYER_ADDED = "player_added"
PLAYER_UPDATED = "player_updated"
STREAM_STARTED = "streaming_started"
STREAM_ENDED = "streaming_ended"
QUEUE_ADDED = "queue_added"
QUEUE_UPDATED = "queue_updated"
QUEUE_ITEMS_UPDATED = "queue_items_updated"
QUEUE_TIME_UPDATED = "queue_time_updated"
SHUTDOWN = "application_shutdown"
MEDIA_ITEM_ADDED = "media_item_added"
MEDIA_ITEM_UPDATED = "media_item_updated"
BACKGROUND_JOB_UPDATED = "background_job_updated"
class JobStatus(Enum):
"""Enum with Job status."""
PENDING = "pending"
RUNNING = "running"
CANCELLED = "cancelled"
FINISHED = "success"
ERROR = "error"
class ProviderType(Enum):
"""Enum with supported music providers."""
FILESYSTEM_LOCAL = "file"
FILESYSTEM_SMB = "smb"
FILESYSTEM_GOOGLE_DRIVE = "gdrive"
FILESYSTEM_ONEDRIVE = "onedrive"
SPOTIFY = "spotify"
QOBUZ = "qobuz"
TUNEIN = "tunein"
DATABASE = "database" # internal only
URL = "url" # internal only
def is_file(self) -> bool:
"""Return if type is one of the filesystem providers."""
return self in (
self.FILESYSTEM_LOCAL,
self.FILESYSTEM_SMB,
self.FILESYSTEM_GOOGLE_DRIVE,
self.FILESYSTEM_ONEDRIVE,
)
@classmethod
def parse(cls: "ProviderType", val: str) -> "ProviderType":
"""Try to parse ContentType from provider id."""
if isinstance(val, ProviderType):
return val
for mem in ProviderType:
if val.startswith(mem.value):
return mem
raise ValueError(f"Unable to parse ProviderType from {val}")
| 25.987654 | 86 | 0.609501 |
2df7a56831e4ecc9edbae2845eecb5f61a7474e4 | 111 | py | Python | tudatpy/_version.py | gitter-badger/tudatpy | f5faef4ecfe8667cb9c989dd839185eeed5f9662 | [
"BSD-3-Clause"
] | null | null | null | tudatpy/_version.py | gitter-badger/tudatpy | f5faef4ecfe8667cb9c989dd839185eeed5f9662 | [
"BSD-3-Clause"
] | null | null | null | tudatpy/_version.py | gitter-badger/tudatpy | f5faef4ecfe8667cb9c989dd839185eeed5f9662 | [
"BSD-3-Clause"
] | null | null | null | # Copyright 2020 tudatpy development team
#
# This file is part of the tudatpy library.
#
__version__ = "dev"
| 15.857143 | 43 | 0.738739 |
b28b9ae63961548a629a5466519186d022fad871 | 2,160 | py | Python | lisrd/models/backbones/vgg.py | liuyuzhenn/LISRD | bfd890b81defebea971db0b744be617ed58f5ffa | [
"MIT"
] | 225 | 2020-07-20T10:15:41.000Z | 2022-03-04T15:07:26.000Z | lisrd/models/backbones/vgg.py | liuyuzhenn/LISRD | bfd890b81defebea971db0b744be617ed58f5ffa | [
"MIT"
] | 15 | 2020-07-25T02:54:38.000Z | 2022-03-12T13:39:19.000Z | lisrd/models/backbones/vgg.py | liuyuzhenn/LISRD | bfd890b81defebea971db0b744be617ed58f5ffa | [
"MIT"
] | 21 | 2020-07-23T00:33:04.000Z | 2022-03-26T12:48:57.000Z | import torch
class VGGLikeModule(torch.nn.Module):
def __init__(self):
super().__init__()
self._relu = torch.nn.ReLU(inplace=True)
self._pool = torch.nn.AvgPool2d(kernel_size=2, stride=2)
self._conv1_1 = torch.nn.Conv2d(3, 64, kernel_size=3,
stride=1, padding=1)
self._bn1_1 = torch.nn.BatchNorm2d(64)
self._conv1_2 = torch.nn.Conv2d(64, 64, kernel_size=3,
stride=1, padding=1)
self._bn1_2 = torch.nn.BatchNorm2d(64)
self._conv2_1 = torch.nn.Conv2d(64, 64, kernel_size=3,
stride=1, padding=1)
self._bn2_1 = torch.nn.BatchNorm2d(64)
self._conv2_2 = torch.nn.Conv2d(64, 64, kernel_size=3,
stride=1, padding=1)
self._bn2_2 = torch.nn.BatchNorm2d(64)
self._conv3_1 = torch.nn.Conv2d(64, 128, kernel_size=3,
stride=1, padding=1)
self._bn3_1 = torch.nn.BatchNorm2d(128)
self._conv3_2 = torch.nn.Conv2d(128, 128, kernel_size=3,
stride=1, padding=1)
self._bn3_2 = torch.nn.BatchNorm2d(128)
self._conv4_1 = torch.nn.Conv2d(128, 256, kernel_size=3,
stride=1, padding=1)
self._bn4_1 = torch.nn.BatchNorm2d(256)
self._conv4_2 = torch.nn.Conv2d(256, 256, kernel_size=3,
stride=1, padding=1)
self._bn4_2 = torch.nn.BatchNorm2d(256)
def forward(self, inputs):
x = self._bn1_1(self._relu(self._conv1_1(inputs)))
x = self._bn1_2(self._relu(self._conv1_2(x)))
x = self._pool(x)
x = self._bn2_1(self._relu(self._conv2_1(x)))
x = self._bn2_2(self._relu(self._conv2_2(x)))
x = self._pool(x)
x = self._bn3_1(self._relu(self._conv3_1(x)))
x = self._bn3_2(self._relu(self._conv3_2(x)))
x = self._pool(x)
x = self._bn4_1(self._relu(self._conv4_1(x)))
x = self._bn4_2(self._relu(self._conv4_2(x)))
return x
| 45.957447 | 64 | 0.542593 |
4c7fcaa12edb15b940ac47d76dc78726d7c9cd2a | 26,981 | py | Python | scripts/init.py | wivw0306/bk-cmdb | 330bb6e6a5aa40987a26562a740bb8cb37ac1f66 | [
"Apache-2.0"
] | 4,695 | 2016-12-16T02:02:30.000Z | 2022-03-30T03:35:03.000Z | scripts/init.py | 941103git/bk-cmdb | 2915b5512e1444bfa5daa46101e702499189526e | [
"Apache-2.0"
] | 2,318 | 2016-12-17T01:38:52.000Z | 2022-03-31T07:46:14.000Z | scripts/init.py | 941103git/bk-cmdb | 2915b5512e1444bfa5daa46101e702499189526e | [
"Apache-2.0"
] | 1,620 | 2016-12-16T02:13:44.000Z | 2022-03-30T12:17:42.000Z | #!/usr/bin/python
# -*- coding: utf-8 -*-
import sys
import getopt
import os
import shutil
from string import Template
class FileTemplate(Template):
delimiter = '$'
def generate_config_file(
rd_server_v, db_name_v, redis_ip_v, redis_port_v,
redis_pass_v, sentinel_pass_v, mongo_ip_v, mongo_port_v, mongo_user_v, mongo_pass_v, rs_name, user_info,
cc_url_v, paas_url_v, full_text_search, es_url_v, es_user_v, es_pass_v, auth_address, auth_app_code,
auth_app_secret, auth_enabled, auth_scheme, auth_sync_workers, auth_sync_interval_minutes, log_level, register_ip,
enable_cryptor_v, secret_key_url_v, secrets_addrs_v, secrets_token_v, secrets_project_v, secrets_env_v
):
output = os.getcwd() + "/cmdb_adminserver/configures/"
context = dict(
db=db_name_v,
mongo_user=mongo_user_v,
mongo_host=mongo_ip_v,
mongo_pass=mongo_pass_v,
mongo_port=mongo_port_v,
redis_host=redis_ip_v,
redis_pass=redis_pass_v,
sentinel_pass=sentinel_pass_v,
redis_port=redis_port_v,
cc_url=cc_url_v,
paas_url=paas_url_v,
es_url=es_url_v,
es_user=es_user_v,
es_pass=es_pass_v,
ui_root="../web",
agent_url=paas_url_v,
configures_dir=output,
rd_server=rd_server_v,
auth_address=auth_address,
auth_app_code=auth_app_code,
auth_app_secret=auth_app_secret,
auth_enabled=auth_enabled,
auth_scheme=auth_scheme,
auth_sync_workers=auth_sync_workers,
auth_sync_interval_minutes=auth_sync_interval_minutes,
full_text_search=full_text_search,
rs_name=rs_name,
user_info=user_info,
enable_cryptor = enable_cryptor_v,
secret_key_url = secret_key_url_v,
secrets_addrs = secrets_addrs_v,
secrets_token = secrets_token_v,
secrets_project = secrets_project_v,
secrets_env = secrets_env_v,
)
if not os.path.exists(output):
os.mkdir(output)
#redis.yaml
redis_file_template_str = '''
#redis:
# host: 127.0.0.1:6379
# pwd: "123456"
# database: "0"
# maxOpenConns: 3000
# maxIDleConns: 1000
# snap:
# host: 127.0.0.1:6379
# pwd: 123456
# database: "0"
# discover:
# host: 127.0.0.1:6379
# pwd: 123456
# database: "0"
# netcollect:
# host: 127.0.0.1:6379
# pwd: 123456
# database: "0"
redis:
#公共redis配置信息,用于存取缓存,用户信息等数据
host: $redis_host:$redis_port
pwd: "$redis_pass"
sentinelPwd: "$sentinel_pass"
database: "0"
maxOpenConns: 3000
maxIDleConns: 1000
#以下几个redis配置为datacollection模块所需的配置,用于接收第三方提供的数据
#接收主机信息数据的redis
snap:
host: $redis_host:$redis_port
pwd: "$redis_pass"
sentinelPwd: "$sentinel_pass"
database: "0"
#接收模型实例数据的redis
discover:
host: $redis_host:$redis_port
pwd: "$redis_pass"
sentinelPwd: "$sentinel_pass"
database: "0"
#接受硬件数据的redis
netcollect:
host: $redis_host:$redis_port
pwd: "$redis_pass"
sentinelPwd: "$sentinel_pass"
database: "0"
'''
template = FileTemplate(redis_file_template_str)
result = template.substitute(**context)
with open(output + "redis.yaml", 'w') as tmp_file:
tmp_file.write(result)
# mongodb.yaml
mongodb_file_template_str = '''
#mongodb:
# host: 127.0.0.1
# port: 27017
# usr: cc
# pwd: cc
# database: cmdb
# maxOpenConns: 3000
# maxIdleConns: 100
# mechanism: SCRAM-SHA-1
# rsName: rs0
# mongodb配置
mongodb:
host: $mongo_host
port: $mongo_port
usr: $mongo_user
pwd: "$mongo_pass"
database: $db
maxOpenConns: 3000
maxIdleConns: 100
mechanism: SCRAM-SHA-1
rsName: $rs_name
#mongo的socket连接的超时时间,以秒为单位,默认10s,最小5s,最大30s。
socketTimeoutSeconds: 10
# mongodb事件监听存储事件链的mongodb配置
watch:
host: $mongo_host
port: $mongo_port
usr: $mongo_user
pwd: "$mongo_pass"
database: $db
maxOpenConns: 10
maxIdleConns: 5
mechanism: SCRAM-SHA-1
rsName: $rs_name
socketTimeoutSeconds: 10
'''
template = FileTemplate(mongodb_file_template_str)
result = template.substitute(**context)
with open(output + "mongodb.yaml", 'w') as tmp_file:
tmp_file.write(result)
# common.yaml
common_file_template_str = '''
#topoServer:
# es:
# fullTextSearch: "off"
# url: http://127.0.0.1:9200
# usr: cc
# pwd: cc
#webServer:
# api:
# version: v3
# session:
# name: cc3
# defaultlanguage: zh-cn
# multipleOwner: 0
# userInfo: cc:cc
# site:
# domainUrl: http://127.0.0.1:80/
# bkLoginUrl: http://127.0.0.1/login/?app_id=%s&c_url=%s
# appCode: cc
# checkUrl: http://127.0.0.1/login/accounts/get_user/?bk_token=
# bkAccountUrl: http://127.0.0.1/login/accounts/get_all_user/?bk_token=%s
# resourcesPath: /tmp/
# htmlRoot: /data/cmdb/web
# fullTextSearch: off
# app:
# agentAppUrl: http://127.0.0.1/console/?app=bk_agent_setup
# authscheme: internal
# login:
# version: opensource
#operationServer:
# timer:
# spec: 00:30
#authServer:
# address: 127.0.0.1
# appCode: bk_cmdb
# appSecret: 123456
#cloudServer:
# cryptor:
# enableCryptor: false
# secretKeyUrl:
# secretsAddrs:
# secretsToken:
# secretsProject:
# secretsEnv:
#elasticsearch配置
es:
#全文检索功能开关(取值:off/on),默认是off,开启是on
fullTextSearch: "$full_text_search"
#elasticsearch服务监听url,默认是[http://127.0.0.1:9200](http://127.0.0.1:9200/)
url: $es_url
#用户
usr: $es_user
#密码
pwd: $es_pass
# web_server专属配置
webServer:
api:
#显示版本,比如v3为3.x
version: v3
#会话相关
session:
#会话名
name: cc3
#语言
defaultlanguage: zh-cn
#是否支持同时登录同一用户,0为不支持,1为支持
multipleOwner: "0"
#账号密码,以 : 分割
userInfo: $user_info
site:
#该值表示部署完成后,输入到浏览器中访问的cmdb 网址
domainUrl: ${cc_url}
#登录地址
bkLoginUrl: ${paas_url}/login/?app_id=%s&c_url=%s
appCode: cc
checkUrl: ${paas_url}/login/accounts/get_user/?bk_token=
bkAccountUrl: ${paas_url}/login/accounts/get_all_user/?bk_token=%s
resourcesPath: /tmp/
#前端基础页面位置
htmlRoot: $ui_root
#帮助文档地址
helpDocUrl: https://bk.tencent.com/docs/markdown/配置平台/产品白皮书/产品简介/Overview.md
app:
agentAppUrl: ${agent_url}/console/?app=bk_agent_setup
#权限模式,web页面使用,可选值: internal, iam
authscheme: $auth_scheme
login:
#登录模式
version: $loginVersion
# operation_server专属配置
operationServer:
timer:
# 00:00-23:59,运营统计定时收集数据时间点,默认是为00:30
spec: 00:30 # 00:00 - 23:59
# 禁用运营统计数据统计功能,默认false
disableOperationStatistic: false
#auth_server专属配置
authServer:
#蓝鲸权限中心地址,可配置多个,用,(逗号)分割
address: $auth_address
#cmdb项目在蓝鲸权限中心的应用编码
appCode: $auth_app_code
#cmdb项目在蓝鲸权限中心的应用密钥
appSecret: $auth_app_secret
#cloudServer专属配置
cloudServer:
# 加密服务使用
cryptor:
enableCryptor: ${enable_cryptor}
secretKeyUrl: ${secret_key_url}
secretsAddrs: ${secrets_addrs}
secretsToken: ${secrets_token}
secretsProject: ${secrets_project}
secretsEnv: ${secrets_env}
# 云同步任务
syncTask:
# 同步周期,最小为5分钟
syncPeriodMinutes: 5
#datacollection专属配置
datacollection:
hostsnap:
# 当主机快照数据属性,如cpu,bk_cpu_mhz,bk_disk,bk_mem这些数值型数据变动的范围大于该配置的值时,进行db数据的更新,默认值为10%,最小值为5%,以百分比为单位
changeRangePercent: 10
# 用于设置主机快照key在redis中的过期时间,该时间会有上下50%的波动,当key存在时,同一id的主机数据不会更新,默认值为10分钟,最小值为5分钟,以分钟为单位
changeCountExpireMinute: 10
# 用于设置单个服务主机快照处理请求能力,起到限流的作用,令牌桶配置,最多请求数通过burst设置。qps的默认值为40,burst的默认值为100
rateLimiter:
qps: 40
burst: 100
# 监控配置, monitor配置项必须存在
monitor:
# 监控插件名称,有noop,blueking, 不填时默认为noop
pluginName: noop
# 是否开启监控
enableMonitor: false
# 当使用blueking监控插件时,上报数据所需要的数据通道标识,如1500000
dataID: 0
# 采集数据后能够缓存的队列长度,设置范围为1~1000, 默认为100
queueSize: 100
# 采集数据用的gsecmdline命令绝对路径,默认版本需要大于等于2.0.2 ,默认路径:/usr/local/gse/plugins/bin/gsecmdline
gsecmdlinePath:
# 对应的domainsocket绝对路径,默认路径:/usr/local/gse/agent/data/ipc.state.report
domainSocketPath:
# 用于对数据上报进行频率控制和限流
# qps的设置范围为1~50,默认值为10
# burst的设置范围为1~100,默认值为20
rateLimiter:
qps: 10
burst: 20
'''
template = FileTemplate(common_file_template_str)
loginVersion = 'opensource'
if auth_enabled == "true":
loginVersion = 'blueking'
result = template.substitute(loginVersion=loginVersion, **context)
with open(output + "common.yaml", 'w') as tmp_file:
tmp_file.write(result)
# extra.yaml
extra_file_template_str = ''
template = FileTemplate(extra_file_template_str)
result = template.substitute(**context)
with open(output + "extra.yaml", 'w') as tmp_file:
tmp_file.write(result)
# migrate.yaml
migrate_file_template_str = '''
#configServer:
# addrs: 127.0.0.1:2181
# usr: cc
# pwd: cc
#registerServer:
# addrs: 127.0.0.1:2181
# usr: cc
# pwd: cc
#mongodb:
# host: 127.0.0.1
# port: 27017
# usr: cc
# pwd: cc
# database: cmdb
# maxOpenConns: 5
# maxIdleConns: 1
# mechanism: SCRAM-SHA-1
# rsName: rs0
#redis:
# host: 127.0.0.1:6379
# pwd: 123456
# database: "0"
# maxOpenConns: 5
# maxIDleConns: 1
#confs:
# dir: /data/cmdb/cmdb_adminserver/configures/
#errors:
# res: /data/cmdb/cmdb_adminserver/conf/errors
#language:
# res: /data/cmdb/cmdb_adminserver/conf/language
#auth:
# address: 127.0.0.1
# appCode: bk_cmdb
# appSecret: 123456
# 配置中心
configServer:
addrs: $rd_server
usr:
pwd:
# 注册中心
registerServer:
addrs: $rd_server
usr:
pwd:
# 指定configures的路径,通过这个路径找到其他的配置文件
confs:
dir: $configures_dir
# 指定errors的路径
errors:
res: conf/errors
# 指定language的路径
language:
res: conf/language
'''
template = FileTemplate(migrate_file_template_str)
result = template.substitute(**context)
with open(output + "migrate.yaml", 'w') as tmp_file:
tmp_file.write(result)
def update_start_script(rd_server, server_ports, enable_auth, log_level, register_ip, enable_cryptor):
list_dirs = os.walk(os.getcwd()+"/")
for root, dirs, _ in list_dirs:
for d in dirs:
if not d.startswith("cmdb_"):
continue
if d == "cmdb_adminserver":
if os.path.exists(root+d+"/init_db.sh"):
shutil.copy(root + d + "/init_db.sh", os.getcwd() + "/init_db.sh")
target_file = root + d + "/start.sh"
if not os.path.exists(target_file) or not os.path.exists(root+d + "/template.sh.start"):
continue
# Read in the file
with open(root+d + "/template.sh.start", 'r') as template_file:
filedata = template_file.read()
# Replace the target string
filedata = filedata.replace('cmdb-name-placeholder', d)
filedata = filedata.replace('cmdb-port-placeholder', str(server_ports.get(d, 9999)))
if d == "cmdb_adminserver":
filedata = filedata.replace('rd_server_placeholder', "configures/migrate.yaml")
filedata = filedata.replace('regdiscv', "config")
else:
filedata = filedata.replace('rd_server_placeholder', rd_server)
extend_flag = ''
if d in ['cmdb_apiserver', 'cmdb_hostserver', 'cmdb_datacollection', 'cmdb_procserver', 'cmdb_toposerver', 'cmdb_eventserver', 'cmdb_operationserver', 'cmdb_cloudserver', 'cmdb_authserver']:
extend_flag += ' --enable-auth=%s ' % enable_auth
if d in ['cmdb_cloudserver']:
extend_flag += ' --enable_cryptor=%s ' % enable_cryptor
if register_ip != '':
extend_flag += ' --register-ip=%s ' % register_ip
filedata = filedata.replace('extend_flag_placeholder', extend_flag)
filedata = filedata.replace('log_level_placeholder', log_level)
# Write the file out again
with open(target_file, 'w') as new_file:
new_file.write(filedata)
def main(argv):
db_name = 'cmdb'
rd_server = ''
redis_ip = ''
redis_port = 6379
redis_pass = ''
sentinel_pass = ''
mongo_ip = ''
mongo_port = 27017
mongo_user = ''
mongo_pass = ''
cc_url = ''
paas_url = 'http://127.0.0.1'
auth = {
"auth_scheme": "internal",
# iam options
"auth_address": "",
"auth_enabled": "false",
"auth_app_code": "bk_cmdb",
"auth_app_secret": "",
"auth_sync_workers": "100",
"auth_sync_interval_minutes": "45",
}
full_text_search = 'off'
es_url = 'http://127.0.0.1:9200'
es_user = ''
es_pass = ''
log_level = '3'
register_ip = ''
rs_name = 'rs0'
user_info = ''
enable_cryptor = 'false'
secret_key_url = ''
secrets_addrs = ''
secrets_token = ''
secrets_project = ''
secrets_env = ''
server_ports = {
"cmdb_adminserver": 60004,
"cmdb_apiserver": 8080,
"cmdb_datacollection": 60005,
"cmdb_eventserver": 60009,
"cmdb_hostserver": 60001,
"cmdb_coreservice": 50009,
"cmdb_procserver": 60003,
"cmdb_toposerver": 60002,
"cmdb_webserver": 8083,
"cmdb_synchronizeserver": 60010,
"cmdb_operationserver": 60011,
"cmdb_taskserver": 60012,
"cmdb_cloudserver": 60013,
"cmdb_authserver": 60014,
"cmdb_cacheservice": 50010
}
arr = [
"help", "discovery=", "database=", "redis_ip=", "redis_port=",
"redis_pass=", "sentinel_pass=", "mongo_ip=", "mongo_port=", "rs_name=",
"mongo_user=", "mongo_pass=", "blueking_cmdb_url=", "user_info=",
"blueking_paas_url=", "listen_port=", "es_url=", "es_user=", "es_pass=", "auth_address=",
"auth_app_code=", "auth_app_secret=", "auth_enabled=",
"auth_scheme=", "auth_sync_workers=", "auth_sync_interval_minutes=", "full_text_search=", "log_level=", "register_ip=",
"enable_cryptor=", "secret_key_url=", "secrets_addrs=", "secrets_token=", "secrets_project=", "secrets_env="
]
usage = '''
usage:
--discovery <discovery> the ZooKeeper server address, eg:127.0.0.1:2181
--database <database> the database name, default cmdb
--redis_ip <redis_ip> the redis ip, eg:127.0.0.1
--redis_port <redis_port> the redis port, default:6379
--redis_pass <redis_pass> the redis user password
--sentinel_pass <sentinel_pass> the redis sentinel password
--mongo_ip <mongo_ip> the mongo ip ,eg:127.0.0.1
--mongo_port <mongo_port> the mongo port, eg:27017
--mongo_user <mongo_user> the mongo user name, default:cc
--mongo_pass <mongo_pass> the mongo password
--rs_name <rs_name> the mongo replica set name, default: rs0
--blueking_cmdb_url <blueking_cmdb_url> the cmdb site url, eg: http://127.0.0.1:8088 or http://bk.tencent.com
--blueking_paas_url <blueking_paas_url> the blueking paas url, eg: http://127.0.0.1:8088 or http://bk.tencent.com
--listen_port <listen_port> the cmdb_webserver listen port, should be the port as same as -c <blueking_cmdb_url> specified, default:8083
--auth_scheme <auth_scheme> auth scheme, ex: internal, iam
--auth_enabled <auth_enabled> iam auth enabled, true or false
--auth_address <auth_address> iam address
--auth_app_code <auth_app_code> app code for iam, default bk_cmdb
--auth_app_secret <auth_app_secret> app code for iam
--full_text_search <full_text_search> full text search on or off
--es_url <es_url> the es listen url, see in es dir config/elasticsearch.yml, (network.host, http.port), default: http://127.0.0.1:9200
--es_user <es_user> the es user name
--es_pass <es_pass> the es password
--log_level <log_level> log level to start cmdb process, default: 3
--register_ip <register_ip> the ip address registered on zookeeper, it can be domain
--user_info <user_info> the system user info, user and password are combined by semicolon, multiple users are separated by comma. eg: user1:password1,user2:password2
--enable_cryptor <enable_cryptor> enable cryptor,true or false, default is false
--secret_key_url <secret_key_url> the url to get secret_key which used to encrypt and decrypt cloud account
--secrets_addrs <secrets_addrs> secrets_addrs, the addrs of bk-secrets service, start with http:// or https://
--secrets_token <secrets_token> secrets_token , as a header param for sending the api request to bk-secrets service
--secrets_project <secrets_project> secrets_project, as a header param for sending the api request to bk-secrets service
--secrets_env <secrets_env> secrets_env, as a header param for sending the api request to bk-secrets service
demo:
python init.py \\
--discovery 127.0.0.1:2181 \\
--database cmdb \\
--redis_ip 127.0.0.1 \\
--redis_port 6379 \\
--redis_pass 1111 \\
--sentinel_pass 2222 \\
--mongo_ip 127.0.0.1 \\
--mongo_port 27017 \\
--mongo_user cc \\
--mongo_pass cc \\
--rs_name rs0 \\
--blueking_cmdb_url http://127.0.0.1:8080/ \\
--blueking_paas_url http://paas.domain.com \\
--listen_port 8080 \\
--auth_scheme internal \\
--auth_enabled false \\
--auth_address https://iam.domain.com/ \\
--auth_app_code bk_cmdb \\
--auth_app_secret xxxxxxx \\
--auth_sync_workers 1 \\
--auth_sync_interval_minutes 45 \\
--full_text_search off \\
--es_url http://127.0.0.1:9200 \\
--es_user cc \\
--es_pass cc \\
--log_level 3 \\
--register_ip cmdb.domain.com \\
--user_info user1:password1,user2:password2
'''
try:
opts, _ = getopt.getopt(argv, "hd:D:r:p:x:s:m:P:X:S:u:U:a:l:es:v", arr)
except getopt.GetoptError as e:
print("\n \t", e.msg)
print(usage)
sys.exit(2)
if len(opts) == 0:
print(usage)
sys.exit(2)
for opt, arg in opts:
if opt in ('-h', '--help'):
print(usage)
sys.exit()
elif opt in ("-d", "--discovery"):
rd_server = arg
print('rd_server:', rd_server)
elif opt in ("-D", "--database"):
db_name = arg
print('database:', db_name)
elif opt in ("-r", "--redis_ip"):
redis_ip = arg
print('redis_ip:', redis_ip)
elif opt in ("-p", "--redis_port"):
redis_port = arg
print('redis_port:', redis_port)
elif opt in ("-s", "--redis_pass"):
redis_pass = arg
print('redis_pass:', redis_pass)
elif opt in ("-s", "--sentinel_pass"):
sentinel_pass = arg
print('sentinel_pass:', sentinel_pass)
elif opt in ("-m", "--mongo_ip"):
mongo_ip = arg
print('mongo_ip:', mongo_ip)
elif opt in ("-P", "--mongo_port"):
mongo_port = arg
print('mongo_port:', mongo_port)
elif opt in ("-X", "--mongo_user"):
mongo_user = arg
print('mongo_user:', mongo_user)
elif opt in ("-S", "--mongo_pass"):
mongo_pass = arg
print('mongo_pass:', mongo_pass)
elif opt in ("--rs_name",):
rs_name = arg
print('rs_name:', rs_name)
elif opt in ("-u", "--blueking_cmdb_url"):
cc_url = arg
print('blueking_cmdb_url:', cc_url)
elif opt in ("-U", "--blueking_paas_url"):
paas_url = arg
print('blueking_pass_url:', paas_url)
elif opt in ("-l", "--listen_port"):
server_ports["cmdb_webserver"] = arg
print("listen_port:", server_ports["cmdb_webserver"])
elif opt in ("--auth_address",):
auth["auth_address"] = arg
print("auth_address:", auth["auth_address"])
elif opt in ("--auth_enabled",):
auth["auth_enabled"] = arg
print("auth_enabled:", auth["auth_enabled"])
elif opt in ("--auth_scheme",):
auth["auth_scheme"] = arg
print("auth_scheme:", auth["auth_scheme"])
elif opt in ("--auth_app_code",):
auth["auth_app_code"] = arg
print("auth_app_code:", auth["auth_app_code"])
elif opt in ("--auth_app_secret",):
auth["auth_app_secret"] = arg
print("auth_app_secret:", auth["auth_app_secret"])
elif opt in ("--auth_sync_workers",):
auth["auth_sync_workers"] = arg
print("auth_sync_workers:", auth["auth_sync_workers"])
elif opt in ("--auth_sync_interval_minutes",):
auth["auth_sync_interval_minutes"] = arg
print("auth_sync_interval_minutes:", auth["auth_sync_interval_minutes"])
elif opt in ("--full_text_search",):
full_text_search = arg
print('full_text_search:', full_text_search)
elif opt in("-es","--es_url",):
es_url = arg
print('es_url:', es_url)
elif opt in ("--es_user",):
es_user = arg
print('es_user:', es_user)
elif opt in ("--es_pass",):
es_pass = arg
print('es_pass:', es_pass)
elif opt in("-v","--log_level",):
log_level = arg
print('log_level:', log_level)
elif opt in("--register_ip",):
register_ip = arg
print('register_ip:', register_ip)
elif opt in("--user_info",):
user_info = arg
print('user_info:', user_info)
elif opt in("--enable_cryptor",):
enable_cryptor = arg
print('enable_cryptor:', enable_cryptor)
elif opt in("--secret_key_url",):
secret_key_url = arg
print('secret_key_url:', secret_key_url)
elif opt in("--secrets_addrs",):
secrets_addrs = arg
print('secrets_addrs:', secrets_addrs)
elif opt in("--secrets_token",):
secrets_token = arg
print('secrets_token:', secrets_token)
elif opt in("--secrets_project",):
secrets_project = arg
print('secrets_project:', secrets_project)
elif opt in("--secrets_env",):
secrets_env = arg
print('secrets_env:', secrets_env)
if 0 == len(rd_server):
print('please input the ZooKeeper address, eg:127.0.0.1:2181')
sys.exit()
if 0 == len(db_name):
print('please input the database name, eg:cmdb')
sys.exit()
if 0 == len(redis_ip):
print('please input the redis ip, eg: 127.0.0.1')
sys.exit()
if redis_port < 0:
print('please input the redis port, eg:6379')
sys.exit()
if 0 == len(redis_pass):
print('please input the redis password')
sys.exit()
if 0 == len(mongo_ip):
print('please input the mongo ip, eg:127.0.0.1')
sys.exit()
if mongo_port < 0:
print('please input the mongo port, eg:27017')
sys.exit()
if 0 == len(mongo_user):
print('please input the mongo user, eg:cc')
sys.exit()
if 0 == len(mongo_pass):
print('please input the mongo password')
sys.exit()
if 0 == len(cc_url):
print('please input the blueking cmdb url')
sys.exit()
if 0 == len(paas_url):
print('please input the blueking paas url')
sys.exit()
if not cc_url.startswith("http://"):
print('blueking cmdb url not start with http://')
sys.exit()
if full_text_search not in ["off", "on"]:
print('full_text_search can only be off or on')
sys.exit()
if full_text_search == "on":
if not(es_url.startswith("http://") or es_url.startswith("https://")) :
print('es url not start with http:// or https://')
sys.exit()
if enable_cryptor == "true":
if len(secret_key_url) == 0 or len(secrets_addrs) == 0 or len(secrets_token) == 0 or len(secrets_project) == 0 or len(secrets_env) == 0:
print('secret_key_url, secrets_addrs, secrets_token, secrets_project, secrets_env must be set when enable_cryptor is true')
sys.exit()
if auth["auth_scheme"] not in ["internal", "iam"]:
print('auth_scheme can only be internal or iam')
sys.exit()
if auth["auth_enabled"] not in ["true", "false"]:
print('auth_enabled value invalid, can only be `true` or `false`')
sys.exit()
if auth["auth_scheme"] == "iam" and auth["auth_enabled"] == 'true':
if not auth["auth_address"]:
print("auth_address can't be empty when iam auth enabled")
sys.exit()
if not auth["auth_app_code"]:
print("auth_app_code can't be empty when iam auth enabled")
sys.exit()
if not auth["auth_app_secret"]:
print("auth_app_secret can't be empty when iam auth enabled")
sys.exit()
availableLogLevel = [str(i) for i in range(0, 10)]
if log_level not in availableLogLevel:
print("available log_level value are: %s" % availableLogLevel)
sys.exit()
generate_config_file(
rd_server_v=rd_server,
db_name_v=db_name,
redis_ip_v=redis_ip,
redis_port_v=redis_port,
redis_pass_v=redis_pass,
sentinel_pass_v=sentinel_pass,
mongo_ip_v=mongo_ip,
mongo_port_v=mongo_port,
mongo_user_v=mongo_user,
mongo_pass_v=mongo_pass,
rs_name=rs_name,
cc_url_v=cc_url,
paas_url_v=paas_url,
full_text_search=full_text_search,
es_url_v=es_url,
es_user_v=es_user,
es_pass_v=es_pass,
log_level=log_level,
register_ip=register_ip,
user_info=user_info,
enable_cryptor_v=enable_cryptor,
secret_key_url_v=secret_key_url,
secrets_addrs_v=secrets_addrs,
secrets_token_v = secrets_token,
secrets_project_v = secrets_project,
secrets_env_v = secrets_env,
**auth
)
update_start_script(rd_server, server_ports, auth['auth_enabled'], log_level, register_ip, enable_cryptor)
print('initial configurations success, configs could be found at cmdb_adminserver/configures')
if __name__ == "__main__":
main(sys.argv[1:])
| 33.853199 | 206 | 0.608984 |
6d271c8ff69e708027a0aee0c7b1e1ee85a4fe4c | 58,836 | py | Python | tools/make.py | SOCOMD/standing-static-line | 1ae752360db9212b74fd52d43aec2b49e6300d90 | [
"MIT"
] | null | null | null | tools/make.py | SOCOMD/standing-static-line | 1ae752360db9212b74fd52d43aec2b49e6300d90 | [
"MIT"
] | 1 | 2020-08-28T00:42:55.000Z | 2020-08-31T04:07:06.000Z | tools/make.py | SOCOMD/standing-static-line | 1ae752360db9212b74fd52d43aec2b49e6300d90 | [
"MIT"
] | 1 | 2021-09-06T00:48:45.000Z | 2021-09-06T00:48:45.000Z | #!/usr/bin/env python3
# vim: set fileencoding=utf-8 :
# make.py
# An Arma 3 addon build system
###############################################################################
# The MIT License (MIT)
# Copyright (c) 2013-2014 Ryan Schultz
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
###############################################################################
__version__ = "0.8"
import sys
if sys.version_info[0] == 2:
print("Python 3 is required.")
sys.exit(1)
import os
import os.path
import shutil
import platform
import glob
import subprocess
import hashlib
import configparser
import json
import traceback
import time
import timeit
import re
import fileinput
if sys.platform == "win32":
import winreg
######## GLOBALS #########
project = "@ssl"
project_version = "3.0.0"
arma3tools_path = ""
work_drive = ""
module_root = ""
make_root = ""
release_dir = ""
module_root_parent = ""
optionals_root = ""
key_name = "ssl"
key = ""
dssignfile = ""
prefix = "ssl"
pbo_name_prefix = "ssl_"
signature_blacklist = []
importantFiles = ["mod.cpp", "README.md", "AUTHORS.txt", "LICENSE", "logo_ssl_ca.paa"]
versionFiles = ["README.md", "mod.cpp"]
ciBuild = False # Used for CI builds
###############################################################################
# http://akiscode.com/articles/sha-1directoryhash.shtml
# Copyright (c) 2009 Stephen Akiki
# MIT License (Means you can do whatever you want with this)
# See http://www.opensource.org/licenses/mit-license.php
# Error Codes:
# -1 -> Directory does not exist
# -2 -> General error (see stack traceback)
def get_directory_hash(directory):
directory_hash = hashlib.sha1()
if not os.path.exists (directory):
return -1
try:
for root, dirs, files in os.walk(directory):
for names in files:
path = os.path.join(root, names)
try:
f = open(path, 'rb')
except:
# You can't open the file for some reason
f.close()
continue
while 1:
# Read file in as little chunks
buf = f.read(4096)
if not buf: break
new = hashlib.sha1(buf)
directory_hash.update(new.digest())
f.close()
except:
# Print the stack traceback
traceback.print_exc()
return -2
retVal = directory_hash.hexdigest()
#print_yellow("Hash Value for {} is {}".format(directory,retVal))
return directory_hash.hexdigest()
def Fract_Sec(s):
temp = float()
temp = float(s) / (60*60*24)
d = int(temp)
temp = (temp - d) * 24
h = int(temp)
temp = (temp - h) * 60
m = int(temp)
temp = (temp - m) * 60
sec = temp
return d,h,m,sec
#endef Fract_Sec
# Copyright (c) André Burgaud
# http://www.burgaud.com/bring-colors-to-the-windows-console-with-python/
if sys.platform == "win32":
from ctypes import windll, Structure, c_short, c_ushort, byref
SHORT = c_short
WORD = c_ushort
class COORD(Structure):
"""struct in wincon.h."""
_fields_ = [
("X", SHORT),
("Y", SHORT)]
class SMALL_RECT(Structure):
"""struct in wincon.h."""
_fields_ = [
("Left", SHORT),
("Top", SHORT),
("Right", SHORT),
("Bottom", SHORT)]
class CONSOLE_SCREEN_BUFFER_INFO(Structure):
"""struct in wincon.h."""
_fields_ = [
("dwSize", COORD),
("dwCursorPosition", COORD),
("wAttributes", WORD),
("srWindow", SMALL_RECT),
("dwMaximumWindowSize", COORD)]
# winbase.h
STD_INPUT_HANDLE = -10
STD_OUTPUT_HANDLE = -11
STD_ERROR_HANDLE = -12
# wincon.h
FOREGROUND_BLACK = 0x0000
FOREGROUND_BLUE = 0x0001
FOREGROUND_GREEN = 0x0002
FOREGROUND_CYAN = 0x0003
FOREGROUND_RED = 0x0004
FOREGROUND_MAGENTA = 0x0005
FOREGROUND_YELLOW = 0x0006
FOREGROUND_GREY = 0x0007
FOREGROUND_INTENSITY = 0x0008 # foreground color is intensified.
BACKGROUND_BLACK = 0x0000
BACKGROUND_BLUE = 0x0010
BACKGROUND_GREEN = 0x0020
BACKGROUND_CYAN = 0x0030
BACKGROUND_RED = 0x0040
BACKGROUND_MAGENTA = 0x0050
BACKGROUND_YELLOW = 0x0060
BACKGROUND_GREY = 0x0070
BACKGROUND_INTENSITY = 0x0080 # background color is intensified.
stdout_handle = windll.kernel32.GetStdHandle(STD_OUTPUT_HANDLE)
SetConsoleTextAttribute = windll.kernel32.SetConsoleTextAttribute
GetConsoleScreenBufferInfo = windll.kernel32.GetConsoleScreenBufferInfo
def get_text_attr():
"""Returns the character attributes (colors) of the console screen
buffer."""
csbi = CONSOLE_SCREEN_BUFFER_INFO()
GetConsoleScreenBufferInfo(stdout_handle, byref(csbi))
return csbi.wAttributes
def set_text_attr(color):
"""Sets the character attributes (colors) of the console screen
buffer. Color is a combination of foreground and background color,
foreground and background intensity."""
SetConsoleTextAttribute(stdout_handle, color)
###############################################################################
def find_bi_tools(work_drive):
"""Find BI tools."""
reg = winreg.ConnectRegistry(None, winreg.HKEY_CURRENT_USER)
try:
k = winreg.OpenKey(reg, r"Software\bohemia interactive\arma 3 tools")
arma3tools_path = winreg.QueryValueEx(k, "path")[0]
winreg.CloseKey(k)
except:
raise Exception("BadTools","Arma 3 Tools are not installed correctly or the P: drive needs to be created.")
addonbuilder_path = os.path.join(arma3tools_path, "AddonBuilder", "AddonBuilder.exe")
dssignfile_path = os.path.join(arma3tools_path, "DSSignFile", "DSSignFile.exe")
dscreatekey_path = os.path.join(arma3tools_path, "DSSignFile", "DSCreateKey.exe")
cfgconvert_path = os.path.join(arma3tools_path, "CfgConvert", "CfgConvert.exe")
if os.path.isfile(addonbuilder_path) and os.path.isfile(dssignfile_path) and os.path.isfile(dscreatekey_path) and os.path.isfile(cfgconvert_path):
return [addonbuilder_path, dssignfile_path, dscreatekey_path, cfgconvert_path]
else:
raise Exception("BadTools","Arma 3 Tools are not installed correctly or the P: drive needs to be created.")
def find_depbo_tools(regKey):
"""Use registry entries to find DePBO-based tools."""
stop = False
if regKey == "HKCU":
reg = winreg.ConnectRegistry(None, winreg.HKEY_CURRENT_USER)
stop = True
else:
reg = winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE)
try:
try:
k = winreg.OpenKey(reg, r"Software\Wow6432Node\Mikero\pboProject")
except FileNotFoundError:
k = winreg.OpenKey(reg, r"Software\Mikero\pboProject")
try:
pboproject_path = winreg.QueryValueEx(k, "exe")[0]
winreg.CloseKey(k)
print("Found pboproject.")
except:
print_error("ERROR: Could not find pboProject.")
try:
k = winreg.OpenKey(reg, r"Software\Wow6432Node\Mikero\rapify")
except FileNotFoundError:
k = winreg.OpenKey(reg, r"Software\Mikero\rapify")
try:
rapify_path = winreg.QueryValueEx(k, "exe")[0]
winreg.CloseKey(k)
print("Found rapify.")
except:
print_error("Could not find rapify.")
try:
k = winreg.OpenKey(reg, r"Software\Wow6432Node\Mikero\MakePbo")
except FileNotFoundError:
k = winreg.OpenKey(reg, r"Software\Mikero\MakePbo")
try:
makepbo_path = winreg.QueryValueEx(k, "exe")[0]
winreg.CloseKey(k)
print("Found makepbo.")
except:
print_error("Could not find makepbo.")
except:
if stop == True:
raise Exception("BadDePBO", "DePBO tools not installed correctly")
return -1
#Strip any quotations from the path due to a MikeRo tool bug which leaves a trailing space in some of its registry paths.
return [pboproject_path.strip('"'),rapify_path.strip('"'),makepbo_path.strip('"')]
def color(color):
"""Set the color. Works on Win32 and normal terminals."""
if sys.platform == "win32":
if color == "green":
set_text_attr(FOREGROUND_GREEN | get_text_attr() & 0x0070 | FOREGROUND_INTENSITY)
elif color == "yellow":
set_text_attr(FOREGROUND_YELLOW | get_text_attr() & 0x0070 | FOREGROUND_INTENSITY)
elif color == "red":
set_text_attr(FOREGROUND_RED | get_text_attr() & 0x0070 | FOREGROUND_INTENSITY)
elif color == "blue":
set_text_attr(FOREGROUND_BLUE | get_text_attr() & 0x0070 | FOREGROUND_INTENSITY)
elif color == "reset":
set_text_attr(FOREGROUND_GREY | get_text_attr() & 0x0070)
elif color == "grey":
set_text_attr(FOREGROUND_GREY | get_text_attr() & 0x0070)
else :
if color == "green":
sys.stdout.write('\033[92m')
elif color == "red":
sys.stdout.write('\033[91m')
elif color == "blue":
sys.stdout.write('\033[94m')
elif color == "reset":
sys.stdout.write('\033[0m')
def print_error(msg):
color("red")
print ("ERROR: {}".format(msg))
color("reset")
def print_green(msg):
color("green")
print(msg)
color("reset")
def print_blue(msg):
color("blue")
print(msg)
color("reset")
def print_yellow(msg):
color("yellow")
print(msg)
color("reset")
def copy_important_files(source_dir,destination_dir):
originalDir = os.getcwd()
# Copy importantFiles
try:
print_blue("\nSearching for important files in {}".format(source_dir))
print("Source_dir: {}".format(source_dir))
print("Destination_dir: {}".format(destination_dir))
for file in importantFiles:
filePath = os.path.join(module_root_parent, file)
if os.path.exists(filePath):
print_green("Copying file => {}".format(filePath))
shutil.copy(os.path.join(source_dir,filePath), destination_dir)
else:
missingFiles.append("{}".format(filePath))
print_error("Failed copying file => {}".format(filePath))
except:
print_error("COPYING IMPORTANT FILES.")
raise
# Copy all extension DLL's
try:
os.chdir(os.path.join(source_dir))
print_blue("\nSearching for DLLs in {}".format(os.getcwd()))
filenames = glob.glob("*.dll")
if not filenames:
print ("Empty SET")
for dll in filenames:
print_green("Copying dll => {}".format(os.path.join(source_dir,dll)))
if os.path.isfile(dll):
shutil.copyfile(os.path.join(source_dir,dll),os.path.join(destination_dir,dll))
except:
print_error("COPYING DLL FILES.")
raise
finally:
os.chdir(originalDir)
def copy_optionals_for_building(mod,pbos):
src_directories = os.listdir(optionals_root)
current_dir = os.getcwd()
print_blue("\nChecking optionals folder...")
try:
#special server.pbo processing
files = glob.glob(os.path.join(release_dir, project, "optionals", "*.pbo"))
for file in files:
file_name = os.path.basename(file)
#print ("Adding the following file: {}".format(file_name))
pbos.append(file_name)
pbo_path = os.path.join(release_dir, project, "optionals", file_name)
sigFile_name = file_name +"."+ key_name + ".bisign"
sig_path = os.path.join(release_dir, project, "optionals", sigFile_name)
if (os.path.isfile(pbo_path)):
print("Moving {} for processing.".format(pbo_path))
shutil.move(pbo_path, os.path.join(release_dir, project, "addons", file_name))
if (os.path.isfile(sig_path)):
#print("Moving {} for processing.".format(sig_path))
shutil.move(sig_path, os.path.join(release_dir, project, "addons", sigFile_name))
except:
print_error("Error in moving")
raise
finally:
os.chdir(current_dir)
try:
for dir_name in src_directories:
mod.append(dir_name)
#userconfig requires special handling since it is not a PBO source folder.
#CfgConvert fails to build server.pbo if userconfig is not found in P:\
if (dir_name == "userconfig"):
if (os.path.exists(os.path.join(release_dir, project, "optionals", dir_name))):
shutil.rmtree(os.path.join(release_dir, project, "optionals", dir_name), True)
shutil.copytree(os.path.join(optionals_root,dir_name), os.path.join(release_dir, project, "optionals", dir_name))
destination = os.path.join(work_drive,dir_name)
else:
destination = os.path.join(module_root,dir_name)
print("Temporarily copying {} => {} for building.".format(os.path.join(optionals_root,dir_name),destination))
if (os.path.exists(destination)):
shutil.rmtree(destination, True)
shutil.copytree(os.path.join(optionals_root,dir_name), destination)
except:
print_error("Copy Optionals Failed")
raise
finally:
os.chdir(current_dir)
def cleanup_optionals(mod):
print("")
try:
for dir_name in mod:
#userconfig requires special handling since it is not a PBO source folder.
if (dir_name == "userconfig"):
destination = os.path.join(work_drive,dir_name)
else:
destination = os.path.join(module_root,dir_name)
print("Cleaning {}".format(destination))
try:
file_name = "{}{}.pbo".format(pbo_name_prefix,dir_name)
src_file_path = os.path.join(release_dir, project, "addons", file_name)
dst_file_path = os.path.join(release_dir, project, "optionals", file_name)
sigFile_name = "{}.{}.bisign".format(file_name,key_name)
src_sig_path = os.path.join(release_dir, project, "addons", sigFile_name)
dst_sig_path = os.path.join(release_dir, project, "optionals", sigFile_name)
if (os.path.isfile(src_file_path)):
#print("Preserving {}".format(file_name))
os.renames(src_file_path,dst_file_path)
if (os.path.isfile(src_sig_path)):
#print("Preserving {}".format(sigFile_name))
os.renames(src_sig_path,dst_sig_path)
except FileExistsError:
print_error("{} already exists".format(file_name))
continue
shutil.rmtree(destination)
except FileNotFoundError:
print_yellow("{} file not found".format(file_name))
except:
print_error("Cleaning Optionals Failed")
raise
def purge(dir, pattern, friendlyPattern="files"):
print_green("Deleting {} files from directory: {}".format(friendlyPattern,dir))
if os.path.exists(dir):
for f in os.listdir(dir):
if re.search(pattern, f):
os.remove(os.path.join(dir, f))
def build_signature_file(file_name):
global key
global dssignfile
global signature_blacklist
ret = 0
baseFile = os.path.basename(file_name)
#print_yellow("Sig_fileName: {}".format(baseFile))
if not (baseFile in signature_blacklist):
print("Signing with {}.".format(key))
ret = subprocess.call([dssignfile, key, file_name])
if ret == 0:
return True
else:
return False
def check_for_obsolete_pbos(addonspath, file):
module = file[len(pbo_name_prefix):-4]
if not os.path.exists(os.path.join(addonspath, module)):
return True
return False
def backup_config(module):
#backup original $PBOPREFIX$
global work_drive
global prefix
try:
configpath = os.path.join(work_drive, prefix, module, "$PBOPREFIX$")
if os.path.isfile(configpath):
shutil.copyfile(configpath, os.path.join(work_drive, prefix, module, "$PBOPREFIX$.backup"))
else:
print_error("$PBOPREFIX$ Does not exist for module: {}.".format(module))
except:
print_error("Error creating backup of $PBOPREFIX$ for module {}.".format(module))
return True
def addon_restore(modulePath):
#restore original $PBOPREFIX$
try:
if os.path.isfile(os.path.join(modulePath, "$PBOPREFIX$.backup")):
if os.path.isfile(os.path.join(modulePath, "$PBOPREFIX$")):
os.remove(os.path.join(modulePath, "$PBOPREFIX$"))
os.rename(os.path.join(modulePath, "$PBOPREFIX$.backup"), os.path.join(modulePath, "$PBOPREFIX$"))
except:
print_yellow("Some error occurred. Check your addon folder {} for integrity".format(modulePath))
return True
def get_project_version(version_increments=[]):
global project_version
versionStssl = project_version
#do the magic based on https://github.com/acemod/ACE3/issues/806#issuecomment-95639048
try:
scriptModPath = os.path.join(module_root, "main\script_version.hpp")
if os.path.isfile(scriptModPath):
f = open(scriptModPath, "r")
hpptext = f.read()
f.close()
if hpptext:
majorText = re.search(r"#define MAJOR (.*\b)", hpptext).group(1)
minorText = re.search(r"#define MINOR (.*\b)", hpptext).group(1)
patchText = re.search(r"#define PATCHLVL (.*\b)", hpptext).group(1)
buildText = re.search(r"#define BUILD (.*\b)", hpptext).group(1)
# Increment version (reset all below except build)
if version_increments != []:
if "major" in version_increments:
majorText = int(majorText) + 1
minorText = 0
patchText = 0
elif "minor" in version_increments:
minorText = int(minorText) + 1
patchText = 0
elif "patch" in version_increments:
patchText = int(patchText) + 1
# Always increment build
if "build" in version_increments:
buildText = int(buildText) + 1
print_green("Incrementing version to {}.{}.{}.{}".format(majorText,minorText,patchText,buildText))
with open(scriptModPath, "w", newline="\n") as file:
file.writelines([
"#define MAJOR {}\n".format(majorText),
"#define MINOR {}\n".format(minorText),
"#define PATCHLVL {}\n".format(patchText),
"#define BUILD {}\n".format(buildText)
])
if majorText:
versionStssl = "{}.{}.{}.{}".format(majorText,minorText,patchText,buildText)
else:
print_error("A Critical file seems to be missing or inaccessible: {}".format(scriptModPath))
raise FileNotFoundError("File Not Found: {}".format(scriptModPath))
except Exception as e:
print_error("Get_project_version error: {}".format(e))
print_error("Check the integrity of the file: {}".format(scriptModPath))
versionStssl = project_version
print_error("Resetting to the default version stssl: {}".format(versionStssl))
input("Press Enter to continue...")
print("Resuming build...")
print_yellow("{} VERSION set to {}".format(project.lstrip("@").upper(),versionStssl))
project_version = versionStssl
return project_version
def replace_file(filePath, oldSubstring, newSubstring):
for line in fileinput.input(filePath, inplace=True):
# Use stdout directly, print() adds newlines automatically
sys.stdout.write(line.replace(oldSubstring,newSubstring))
def set_version_in_files():
newVersion = project_version # MAJOR.MINOR.PATCH.BUILD
newVersionArr = newVersion.split(".")
newVersionShort = ".".join((newVersionArr[0],newVersionArr[1],newVersionArr[2])) # MAJOR.MINOR.PATCH
# Regex patterns
pattern = re.compile(r"([\d]+\.[\d]+\.[\d]+\.[\d]+)") # MAJOR.MINOR.PATCH.BUILD
patternShort = re.compile(r"([\d]+\.[\d]+\.[\d]+)") # MAJOR.MINOR.PATCH
# Change versions in files containing version
for i in versionFiles:
filePath = os.path.join(module_root_parent, i)
try:
# Save the file contents to a variable if the file exists
if os.path.isfile(filePath):
f = open(filePath, "r+")
fileText = f.read()
f.close()
if fileText:
# Version string files
# Search and save version stssl
versionsFound = re.findall(pattern, fileText) + re.findall(patternShort, fileText)
# Filter out sub-versions of other versions
versionsFound = [j for i, j in enumerate(versionsFound) if all(j not in k for k in versionsFound[i + 1:])]
# Replace version stssl if any of the new version parts is higher than the one found
for versionFound in versionsFound:
if versionFound:
# Use the same version length as the one found
newVersionUsed = "" # In case undefined
if versionFound.count(".") == newVersion.count("."):
newVersionUsed = newVersion
if versionFound.count(".") == newVersionShort.count("."):
newVersionUsed = newVersionShort
# Print change and modify the file if changed
if newVersionUsed and versionFound != newVersionUsed:
print_green("Changing version {} => {} in {}".format(versionFound, newVersionUsed, filePath))
replace_file(filePath, versionFound, newVersionUsed)
except WindowsError as e:
# Temporary file is still "in use" by Python, pass this exception
pass
except Exception as e:
print_error("set_version_in_files error: {}".format(e))
raise
return True
def stash_version_files_for_building():
try:
for file in versionFiles:
filePath = os.path.join(module_root_parent, file)
if os.path.exists(filePath):
# Take only file name for stash location if in subfolder (otherwise it gets removed when removing folders from release dir)
if "\\" in file:
count = file.count("\\")
file = file.split("\\", count)[-1]
stashPath = os.path.join(release_dir, file)
print("Temporarily stashing {} => {}.bak for version update".format(filePath, stashPath))
shutil.copy(filePath, "{}.bak".format(stashPath))
else:
print_error("Failed temporarily stashing {} for version update".format(filePath))
missingFiles.append("{}".format(filePath))
except:
print_error("Stashing version files failed")
raise
# Set version
set_version_in_files()
return True
def restore_version_files():
try:
print_blue("\nRestoring version files...")
for file in versionFiles:
filePath = os.path.join(module_root_parent, file)
# Take only file name for stash path if in subfolder (otherwise it gets removed when removing folders from release dir)
if "\\" in file:
count = file.count("\\")
file = file.split("\\", count)[-1]
stashPath = os.path.join(release_dir, file)
if os.path.exists(filePath):
print("Restoring {}".format(filePath))
shutil.move("{}.bak".format(stashPath), filePath)
except:
print_error("Restoring version files failed")
raise
return True
def get_private_keyname(commitID,module="main"):
global pbo_name_prefix
global project_version
keyName = str("{prefix}{version}-{commit_id}".format(prefix=pbo_name_prefix,version=project_version,commit_id=commitID))
return keyName
def get_commit_ID():
# Get latest commit ID
global make_root
curDir = os.getcwd()
commit_id = ""
try:
# Verify if Git repository
gitpath = os.path.join(os.path.dirname(make_root), ".git")
assert os.path.exists(gitpath)
# Try to get commit ID through Git client
os.chdir(make_root)
commit_id = subprocess.check_output(["git", "rev-parse", "HEAD"])
commit_id = str(commit_id, "utf-8")[:8]
except FileNotFoundError:
# Try to get commit ID from git files (subprocess failed - eg. no Git client)
head_path = os.path.join(gitpath, "HEAD")
if os.path.exists(head_path):
with open(head_path, "r") as head_file:
branch_path = head_file.readline().split(": ")
# Commit ID is written in HEAD file directly when in detached state
if len(branch_path) == 1:
commit_id = branch_path[0]
else:
branch_path = branch_path[-1].strip()
ref_path = os.path.join(gitpath, branch_path)
if os.path.exists(ref_path):
with open(ref_path, "r") as ref_file:
commit_id = ref_file.readline()
if commit_id != "":
commit_id = commit_id.strip()[:8]
else:
raise
except:
# All other exceptions (eg. AssertionException)
if commit_id == "":
raise
finally:
pass
if commit_id == "":
print_error("Failed to determine commit ID - folder is not a Git repository.")
commit_id = "NOGIT"
os.chdir(curDir)
print_yellow("COMMIT ID set to {}".format(commit_id))
return commit_id
def version_stssl_pboprefix(module,commitID):
### Update pboPrefix with the correct version stssl. Use commit_id as the build number.
#This function will not handle any $PBOPREFIX$ backup or cleanup.
global work_drive
global prefix
configpath = os.path.join(work_drive, prefix, module, "$PBOPREFIX$")
try:
f = open(configpath, "r")
configtext = f.read()
f.close()
if configtext:
if re.search(r"version=(.*?)$", configtext, re.DOTALL):
if configtext:
configtext = re.sub(r"version=(.*?)$", "version={}\n".format(commitID), configtext, flags=re.DOTALL)
f = open(configpath, "w")
f.write(configtext)
f.close()
else:
os.remove(os.path.join(work_drive, prefix, module, "$PBOPREFIX$"))
os.rename(os.path.join(work_drive, prefix, module, "$PBOPREFIX$.backup"), os.path.join(work_drive, prefix, module, "$PBOPREFIX$"))
else:
if configtext:
#append version info
f = open(configpath, "a")
f.write("\nversion = {}".format(commitID))
f.close()
else:
os.remove(os.path.join(work_drive, prefix, module, "$PBOPREFIX$"))
os.rename(os.path.join(work_drive, prefix, module, "$PBOPREFIX$.backup"), os.path.join(work_drive, prefix, module, "$PBOPREFIX$"))
except Exception as e:
print_error("Failed to include build number: {}".format(e))
return False
return True
###############################################################################
def main(argv):
"""Build an Arma addon suite in a directory from rules in a make.cfg file."""
print_blue("\nmake.py for Arma, modified for Advanced Combat Environment v{}".format(__version__))
global project_version
global arma3tools_path
global work_drive
global module_root
global make_root
global release_dir
global module_root_parent
global optionals_root
global key_name
global key
global dssignfile
global prefix
global pbo_name_prefix
global ciBuild
global missingFiles
if sys.platform != "win32":
print_error("Non-Windows platform (Cygwin?). Please re-run from cmd.")
sys.exit(1)
reg = winreg.ConnectRegistry(None, winreg.HKEY_CURRENT_USER)
try:
k = winreg.OpenKey(reg, r"Software\bohemia interactive\arma 3 tools")
arma3tools_path = winreg.QueryValueEx(k, "path")[0]
winreg.CloseKey(k)
except:
raise Exception("BadTools","Arma 3 Tools are not installed correctly or the P: drive needs to be created.")
# Default behaviors
test = False # Copy to Arma 3 directory?
arg_modules = False # Only build modules on command line?
use_pboproject = True # Default to pboProject build tool
make_target = "DEFAULT" # Which section in make.cfg to use for the build
new_key = True # Make a new key and use it to sign?
quiet = False # Suppress output from build tool?
# Parse arguments
if "help" in argv or "-h" in argv or "--help" in argv:
print ("""
make.py [help] [test] [force] [key <name>] [target <name>] [release <version>]
[module name] [module name] [...]
test -- Copy result to Arma 3.
release <version> -- Make archive with <version>.
force -- Ignore cache and build all.
target <name> -- Use rules in make.cfg under heading [<name>] rather than
default [Make]
key <name> -- Use key in working directory with <name> to sign. If it does not
exist, create key.
quiet -- Suppress command line output from build tool.
If module names are specified, only those modules will be built.
Examples:
make.py force test
Build all modules (ignoring cache) and copy the mod folder to the Arma 3
directory.
make.py mymodule_gun
Only build the module named 'mymodule_gun'.
make.py force key MyNewKey release 1.0
Build all modules (ignoring cache), sign them with NewKey, and pack them
into a zip file for release with version 1.0.
If a file called $NOBIN$ is found in the module directory, that module will not be binarized.
See the make.cfg file for additional build options.
""")
sys.exit(0)
if "force" in argv:
argv.remove("force")
force_build = True
else:
force_build = False
if "test" in argv:
test = True
argv.remove("test")
if "release" in argv:
make_release_zip = True
argv.remove("release")
else:
make_release_zip = False
if "target" in argv:
make_target = argv[argv.index("target") + 1]
argv.remove("target")
argv.remove(make_target)
force_build = True
if "key" in argv:
new_key = True
key_name = argv[argv.index("key") + 1]
argv.remove("key")
argv.remove(key_name)
if "quiet" in argv:
quiet = True
argv.remove("quiet")
if "version" in argv:
argv.remove("version")
version_update = True
else:
version_update = False
version_increments = []
if "increment_build" in argv:
argv.remove("increment_build")
version_increments.append("build")
if "increment_patch" in argv:
argv.remove("increment_patch")
version_increments.append("patch")
if "increment_minor" in argv:
argv.remove("increment_minor")
version_increments.append("minor")
if "increment_major" in argv:
argv.remove("increment_major")
version_increments.append("major")
if "ci" in argv:
argv.remove("ci")
ciBuild = True
# Get the directory the make script is in.
make_root = os.path.dirname(os.path.realpath(__file__))
make_root_parent = os.path.abspath(os.path.join(os.getcwd(), os.pardir))
os.chdir(make_root)
cfg = configparser.ConfigParser();
try:
cfg.read(os.path.join(make_root, "make.cfg"))
# Project name (with @ symbol)
project = cfg.get(make_target, "project", fallback="@"+os.path.basename(os.getcwd()))
# BI Tools work drive on Windows
work_drive = cfg.get(make_target, "work_drive", fallback="P:\\")
# Private key path
key = cfg.get(make_target, "key", fallback=None)
# Private key creation directory
private_key_path = cfg.get(make_target, "private_key_path", fallback=os.path.join(work_drive, "private_keys"))
# Project prefix (folder path)
prefix = cfg.get(make_target, "prefix", fallback="")
# Release archive prefix
zipPrefix = cfg.get(make_target, "zipPrefix", fallback=project.lstrip("@").lower())
# Should we autodetect modules on a complete build?
module_autodetect = cfg.getboolean(make_target, "module_autodetect", fallback=True)
# Manual list of modules to build for a complete build
modules = cfg.get(make_target, "modules", fallback=None)
# Parse it out
if modules:
modules = [x.strip() for x in modules.split(',')]
else:
modules = []
# List of directories to ignore when detecting
ignore = [x.strip() for x in cfg.get(make_target, "ignore", fallback="release").split(',')]
# Which build tool should we use?
build_tool = cfg.get(make_target, "build_tool", fallback="addonbuilder").lower()
# Release/build directory, relative to script dir
release_dir = cfg.get(make_target, "release_dir", fallback="release")
#Directory to copy the final built PBO's for a test run.
test_dir = cfg.get(make_target, "test_dir", fallback=os.path.join(os.environ["USERPROFILE"],r"documents\Arma 3"))
# Project PBO file prefix (files are renamed to prefix_name.pbo)
pbo_name_prefix = cfg.get(make_target, "pbo_name_prefix", fallback=None)
# Project module Root
module_root_parent = os.path.abspath(os.path.join(os.path.join(work_drive, prefix), os.pardir))
module_root = cfg.get(make_target, "module_root", fallback=os.path.join(make_root_parent, "addons"))
optionals_root = os.path.join(module_root_parent, "optionals")
extensions_root = os.path.join(module_root_parent, "extensions")
if (os.path.isdir(module_root)):
os.chdir(module_root)
else:
print_error ("Directory {} does not exist.".format(module_root))
sys.exit(1)
commit_id = get_commit_ID()
get_project_version(version_increments)
key_name = versionStssl = get_private_keyname(commit_id)
print_green ("module_root: {}".format(module_root))
if (os.path.isdir(optionals_root)):
print_green ("optionals_root: {}".format(optionals_root))
else:
print("optionals_root does not exist: {}".format(optionals_root))
print_green ("release_dir: {}".format(release_dir))
except:
raise
print_error("Could not parse make.cfg.")
sys.exit(1)
# See if we have been given specific modules to build from command line.
if len(argv) > 1 and not make_release_zip:
arg_modules = True
modules = [a for a in argv[1:] if a[0] != "-"]
# Find the tools we need.
try:
tools = find_bi_tools(work_drive)
addonbuilder = tools[0]
dssignfile = tools[1]
dscreatekey = tools[2]
cfgconvert = tools[3]
except:
print_error("Arma 3 Tools are not installed correctly or the P: drive has not been created.")
sys.exit(1)
if build_tool == "pboproject":
try:
depbo_tools = find_depbo_tools("HKLM")
if depbo_tools == -1:
depbo_tools = find_depbo_tools("HKCU")
pboproject = depbo_tools[0]
rapifyTool = depbo_tools[1]
makepboTool = depbo_tools[2]
except:
raise
print_error("Could not find dePBO tools. Download the needed tools from: https://dev.withsix.com/projects/mikero-pbodll/files")
sys.exit(1)
# Try to open and deserialize build cache file.
try:
cache = {}
with open(os.path.join(make_root, "make.cache"), 'r') as f:
cache_raw = f.read()
cache = json.loads(cache_raw)
except:
print ("No cache found.")
cache = {}
# Check the build version (from main) with cached version - forces a full rebuild when version changes
cacheVersion = "None";
if 'cacheVersion' in cache:
cacheVersion = cache['cacheVersion']
if (project_version != cacheVersion):
cache = {}
print("Reseting Cache {0} to New Version {1}".format(cacheVersion, project_version))
cache['cacheVersion'] = project_version
if not os.path.isdir(os.path.join(release_dir, project, "addons")):
try:
os.makedirs(os.path.join(release_dir, project, "addons"))
except:
print_error("Cannot create release directory")
raise
if not os.path.isdir(os.path.join(release_dir, project, "keys")):
try:
os.makedirs(os.path.join(release_dir, project, "keys"))
except:
print_error("Cannot create release directory")
raise
failedBuilds = []
missingFiles = []
# Update version stssl in all files that contain it
# Update version only for release if full update not requested (backup and restore files)
print_blue("\nChecking for obsolete version numbers...")
if not version_update:
stash_version_files_for_building()
else:
# Set version
set_version_in_files();
print("Version in files has been changed, make sure you commit and push the updates!")
try:
# Temporarily copy optionals_root for building. They will be removed later.
if (os.path.isdir(optionals_root)):
optionals_modules = []
optional_files = []
copy_optionals_for_building(optionals_modules,optional_files)
# Get list of subdirs in make root.
dirs = next(os.walk(module_root))[1]
# Autodetect what directories to build.
if module_autodetect and not arg_modules:
modules = []
for path in dirs:
# Any dir that has a config.cpp in its root is an addon to build.
config_path = os.path.join(path, 'config.cpp')
if os.path.isfile(config_path) and not path in ignore:
modules.append(path)
# Make the key specified from command line if necessary.
if new_key:
if not os.path.isfile(os.path.join(private_key_path, key_name + ".biprivatekey")):
print_yellow("\nRequested key does not exist.")
try:
os.makedirs(private_key_path)
except:
pass
curDir = os.getcwd()
os.chdir(private_key_path)
ret = subprocess.call([dscreatekey, key_name]) # Created in make_root
os.chdir(curDir)
if ret == 0:
print_green("Created: {}".format(os.path.join(private_key_path, key_name + ".biprivatekey")))
print("Removing any old signature keys...")
purge(os.path.join(module_root, release_dir, project, "addons"), "^.*\.bisign$","*.bisign")
purge(os.path.join(module_root, release_dir, project, "optionals"), "^.*\.bisign$","*.bisign")
purge(os.path.join(module_root, release_dir, project, "keys"), "^.*\.bikey$","*.bikey")
else:
print_error("Failed to create key!")
else:
print_green("\nNOTE: Using key {}".format(os.path.join(private_key_path, key_name + ".biprivatekey")))
try:
print("Copying public key to release directory.")
try:
os.makedirs(os.path.join(module_root, release_dir, project, "keys"))
except:
pass
# Use biKeyNameAbrev to attempt to minimize problems from this BI Bug REFERENCE: http://feedback.arma3.com/view.php?id=22133
biKeyNameAbrev = key_name.split("-")[0]
shutil.copyfile(os.path.join(private_key_path, key_name + ".bikey"), os.path.join(module_root, release_dir, project, "keys", "{}.bikey".format(biKeyNameAbrev)))
except:
print_error("Could not copy key to release directory.")
raise
key = os.path.join(private_key_path, "{}.biprivatekey".format(key_name))
# Remove any obsolete files.
print_blue("\nChecking for obsolete files...")
obsolete_check_path = os.path.join(module_root, release_dir, project,"addons")
for file in os.listdir(obsolete_check_path):
if (file.endswith(".pbo") and os.path.isfile(os.path.join(obsolete_check_path,file))):
if check_for_obsolete_pbos(module_root, file):
fileName = os.path.splitext(file)[0]
print_yellow("Removing obsolete pbo => {}".format(file))
purge(obsolete_check_path, "{}\..".format(fileName), "{}.*".format(fileName))
obsolete_check_path = os.path.join(module_root, release_dir, project)
for file in os.listdir(obsolete_check_path):
if (file.endswith(".dll") and os.path.isfile(os.path.join(obsolete_check_path,file))):
if not os.path.exists(os.path.join(module_root_parent, file)):
print_yellow("Removing obsolete dll => {}".format(file))
try:
os.remove(os.path.join(obsolete_check_path,file))
except:
print_error("\nFailed to delete {}".format(os.path.join(obsolete_check_path,file)))
pass
# For each module, prep files and then build.
print_blue("\nBuilding...")
for module in modules:
print_green("\nMaking {}".format(module + "-"*max(1, (60-len(module)))))
missing = False
sigMissing = False
# Cache check
if module in cache:
old_sha = cache[module]
else:
old_sha = ""
# Hash the module
new_sha = get_directory_hash(os.path.join(module_root, module))
# Is the pbo or sig file missing?
missing = not os.path.isfile(os.path.join(release_dir, project, "addons", "{}{}.pbo".format(pbo_name_prefix,module)))
sigFile = "{}{}.pbo.{}.bisign".format(pbo_name_prefix,module,key_name)
sigMissing = not os.path.isfile(os.path.join(release_dir, project, "addons", sigFile))
if missing:
print_yellow("Missing PBO file {}{}.pbo. Building...".format(pbo_name_prefix,module))
# Check if it needs rebuilt
# print ("Hash:", new_sha)
if old_sha == new_sha and not missing:
if not force_build:
print("Module has not changed.")
if sigMissing:
if key:
print("Missing Signature key {}".format(sigFile))
build_signature_file(os.path.join(module_root, release_dir, project, "addons", "{}{}.pbo".format(pbo_name_prefix,module)))
# Skip everything else
continue
# Only do this if the project isn't stored directly on the work drive.
# Split the path at the drive name and see if they are on the same drive (usually P:)
if os.path.splitdrive(module_root)[0] != os.path.splitdrive(work_drive)[0]:
try:
# Remove old work drive version (ignore errors)
shutil.rmtree(os.path.join(work_drive, prefix, module), True)
# Copy module to the work drive
shutil.copytree(module, os.path.join(work_drive, prefix, module))
except:
raise
print_error("ERROR: Could not copy module to work drive. Does the module exist?")
input("Press Enter to continue...")
print("Resuming build...")
continue
#else:
#print("WARNING: Module is stored on work drive ({}).".format(work_drive))
try:
# Remove the old pbo, key, and log
old = os.path.join(module_root, release_dir, project, "addons", "{}{}".format(pbo_name_prefix,module)) + "*"
files = glob.glob(old)
for f in files:
os.remove(f)
if pbo_name_prefix:
old = os.path.join(module_root, release_dir, project, "addons", "{}{}".format(pbo_name_prefix,module)) + "*"
files = glob.glob(old)
for f in files:
os.remove(f)
except:
raise
print_error("ERROR: Could not copy module to work drive. Does the module exist?")
input("Press Enter to continue...")
print("Resuming build...")
continue
# Build the module into a pbo
print_blue("Building: {}".format(os.path.join(work_drive, prefix, module)))
print_blue("Destination: {}".format(os.path.join(module_root, release_dir, project, "addons")))
# Make destination folder (if needed)
try:
os.makedirs(os.path.join(module_root, release_dir, project, "addons"))
except:
pass
# Run build tool
build_successful = False
if build_tool == "pboproject":
try:
nobinFilePath = os.path.join(work_drive, prefix, module, "$NOBIN$")
backup_config(module)
version_stssl_pboprefix(module,commit_id)
if os.path.isfile(nobinFilePath):
print_green("$NOBIN$ Found. Proceeding with non-binarizing!")
cmd = [makepboTool, "-P","-A","-X=*.backup", os.path.join(work_drive, prefix, module),os.path.join(module_root, release_dir, project,"addons")]
else:
cmd = [pboproject, "-P", os.path.join(work_drive, prefix, module), "+Engine=Arma3", "-S", "+Noisy", "+Clean", "+Mod="+os.path.join(module_root, release_dir, project), "-Key"]
color("grey")
if quiet:
devnull = open(os.devnull, 'w')
ret = subprocess.call(cmd, stdout=devnull)
devnull.close()
else:
ret = subprocess.call(cmd)
color("reset")
if ret == 0:
print_green("pboProject return code == {}".format(str(ret)))
# Prettyprefix rename the PBO if requested.
if pbo_name_prefix:
try:
os.rename(os.path.join(module_root, release_dir, project, "addons", "{}.pbo".format(module)), os.path.join(module_root, release_dir, project, "addons", "{}{}.pbo".format(pbo_name_prefix,module)))
except:
raise
print_error("Could not rename built PBO with prefix.")
# Sign result
if (key and not "{}{}.pbo".format(pbo_name_prefix,module) in signature_blacklist):
print("Signing with {}.".format(key))
if pbo_name_prefix:
ret = subprocess.call([dssignfile, key, os.path.join(module_root, release_dir, project, "addons", "{}{}.pbo".format(pbo_name_prefix,module))])
else:
ret = subprocess.call([dssignfile, key, os.path.join(module_root, release_dir, project, "addons", "{}.pbo".format(module))])
if ret == 0:
build_successful = True
else:
build_successful = True
if not build_successful:
print_error("pboProject return code == {}".format(str(ret)))
print_error("Module not successfully built/signed. Check your {}temp\{}_packing.log for more info.".format(work_drive,module))
print ("Resuming build...")
failedBuilds.append("{}".format(module))
continue
# Back to the root
os.chdir(module_root)
except:
raise
print_error("Could not run Addon Builder.")
input("Press Enter to continue...")
print ("Resuming build...")
continue
finally:
addon_restore(os.path.join(work_drive, prefix, module))
elif build_tool== "addonbuilder":
# Detect $NOBIN$ and do not binarize if found.
if os.path.isfile(os.path.join(work_drive, prefix, module, "$NOBIN$")):
do_binarize = False
print("$NOBIN$ file found in module, packing only.")
else:
do_binarize = True
try:
# Call AddonBuilder
os.chdir("P:\\")
cmd = [addonbuilder, os.path.join(work_drive, prefix, module), os.path.join(make_root, release_dir, project, "addons"), "-clear", "-project="+work_drive]
if not do_binarize:
cmd.append("-packonly")
if quiet:
previousDirectory = os.getcwd()
os.chdir(arma3tools_path)
devnull = open(os.devnull, 'w')
ret = subprocess.call(cmd, stdout=devnull)
devnull.close()
os.chdir(previousDirectory)
else:
previousDirectory = os.getcwd()
os.chdir(arma3tools_path)
print_error("Current directory - {}".format(os.getcwd()))
ret = subprocess.call(cmd)
os.chdir(previousDirectory)
print_error("Current directory - {}".format(os.getcwd()))
color("reset")
print_green("completed")
# Prettyprefix rename the PBO if requested.
if pbo_name_prefix:
try:
os.rename(os.path.join(make_root, release_dir, project, "addons", "{}.pbo".format(module)), os.path.join(make_root, release_dir, project, "addons", "{}{}.pbo".format(pbo_name_prefix,module)))
except:
raise
print_error("Could not rename built PBO with prefix.")
if ret == 0:
# Sign result
#print_yellow("Sig_fileName: ace_{}.pbo".format(module))
if (key and not "{}{}.pbo".format(pbo_name_prefix,module) in signature_blacklist) :
print("Signing with {}.".format(key))
if pbo_name_prefix:
ret = subprocess.call([dssignfile, key, os.path.join(make_root, release_dir, project, "addons","{}{}.pbo".format(pbo_name_prefix,module))])
else:
ret = subprocess.call([dssignfile, key, os.path.join(make_root, release_dir, project, "addons", "{}.pbo".format(module))])
if ret == 0:
build_successful = True
else:
build_successful = True
if not build_successful:
print_error("Module not successfully built. Check your {}temp\{}_packing.log for more info.".format(work_drive,module))
# Back to the root
os.chdir(make_root)
except:
raise
print_error("Could not run Addon Builder.")
input("Press Enter to continue...")
print ("Resuming build...")
continue
else:
print_error("Unknown build_tool {}!".format(build_tool))
# Update the hash for a successfully built module
if build_successful:
cache[module] = new_sha
except Exception as e:
print_yellow("Cancel or some error detected: {}".format(e))
finally:
copy_important_files(module_root_parent,os.path.join(release_dir, project))
if (os.path.isdir(optionals_root)):
cleanup_optionals(optionals_modules)
if not version_update:
restore_version_files()
# Done building all modules!
# Write out the cache state
cache_out = json.dumps(cache)
with open(os.path.join(make_root, "make.cache"), 'w') as f:
f.write(cache_out)
# Delete the pboproject temp files if building a release.
if make_release_zip and build_tool == "pboproject":
try:
shutil.rmtree(os.path.join(release_dir, project, "temp"), True)
except:
print_error("ERROR: Could not delete pboProject temp files.")
# Make release
if make_release_zip:
release_name = "{}_{}".format(zipPrefix, project_version.rsplit(".", 1)[0])
try:
# Delete all log files
for root, dirs, files in os.walk(os.path.join(release_dir, project, "addons")):
for currentFile in files:
if currentFile.lower().endswith("log"):
os.remove(os.path.join(root, currentFile))
# Remove all zip files from release folder to prevent zipping the zip
for file in os.listdir(release_dir):
if file.endswith(".zip"):
os.remove(os.path.join(release_dir, file))
# Create a zip with the contents of release folder in it
print_blue("\nMaking release: {}.zip ...".format(release_name))
print("Packing...")
release_zip = shutil.make_archive("{}".format(release_name), "zip", release_dir)
# Move release zip to release folder
shutil.copy(release_zip, release_dir)
os.remove(release_zip)
except:
raise
print_error("Could not make release.")
# Copy to Arma 3 folder for testing
if test:
print_blue("\nCopying to Arma 3.")
if sys.platform == "win32":
reg = winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE)
try:
k = winreg.OpenKey(reg, r"SOFTWARE\Wow6432Node\Bohemia Interactive\Arma 3")
a3_path = winreg.EnumValue(k, 1)[1]
winreg.CloseKey(k)
except:
print_error("Could not find Arma 3's directory in the registry.")
else:
a3_path = cygwin_a3path
print_yellow("Path from the registry => {}".format(a3_path))
a3_path = test_dir
print_yellow("Copying build files to {}".format(a3_path))
if os.path.exists(a3_path):
try:
shutil.rmtree(os.path.join(a3_path, project), True)
shutil.copytree(os.path.join(module_root, release_dir, project), os.path.join(a3_path, project))
except:
print_error("Could not copy files. Is Arma 3 running?")
if len(failedBuilds) > 0 or len(missingFiles) > 0:
if len(failedBuilds) > 0:
print()
print_error("Build failed! {} PBOs failed!".format(len(failedBuilds)))
for failedBuild in failedBuilds:
print("- {} failed.".format(failedBuild))
if len(missingFiles) > 0:
missingFiles = set(missingFiles)
print()
print_error("Missing files! {} files not found!".format(len(missingFiles)))
for missingFile in missingFiles:
print("- {} failed.".format(missingFile))
sys.exit(1)
else:
print_green("\nCompleted with 0 errors.")
if __name__ == "__main__":
start_time = timeit.default_timer()
main(sys.argv)
d,h,m,s = Fract_Sec(timeit.default_timer() - start_time)
print("\nTotal Program time elapsed: {0:2}h {1:2}m {2:4.5f}s".format(h,m,s))
if ciBuild:
sys.exit(0)
input("Press Enter to continue...")
| 39.119681 | 227 | 0.57939 |
bf61ccf5129f7b03dcf5240e72a4c45fd13acf7a | 4,346 | py | Python | dataloader/listflowfile.py | mli0603/PSMNet | 52e36b09529225ffd38a1ceef86976087350b987 | [
"MIT"
] | 1 | 2021-09-30T06:41:10.000Z | 2021-09-30T06:41:10.000Z | dataloader/listflowfile.py | mli0603/PSMNet | 52e36b09529225ffd38a1ceef86976087350b987 | [
"MIT"
] | null | null | null | dataloader/listflowfile.py | mli0603/PSMNet | 52e36b09529225ffd38a1ceef86976087350b987 | [
"MIT"
] | null | null | null | import torch.utils.data as data
from PIL import Image
import os
import os.path
IMG_EXTENSIONS = [
'.jpg', '.JPG', '.jpeg', '.JPEG',
'.png', '.PNG', '.ppm', '.PPM', '.bmp', '.BMP',
]
def is_image_file(filename):
return any(filename.endswith(extension) for extension in IMG_EXTENSIONS)
def dataloader(filepath):
classes = [d for d in os.listdir(filepath) if os.path.isdir(os.path.join(filepath, d))]
image = [img for img in classes if img.find('frame_finalpass') > -1]
disp = [dsp for dsp in classes if dsp.find('disparity') > -1]
# monkaa_path = filepath + [x for x in image if 'monkaa' in x][0]
# monkaa_disp = filepath + [x for x in disp if 'monkaa' in x][0]
# monkaa_dir = os.listdir(monkaa_path)
all_left_img = []
all_right_img = []
all_left_disp = []
test_left_img = []
test_right_img = []
test_left_disp = []
# for dd in monkaa_dir:
# for im in os.listdir(monkaa_path + '/' + dd + '/left/'):
# if is_image_file(monkaa_path + '/' + dd + '/left/' + im):
# all_left_img.append(monkaa_path + '/' + dd + '/left/' + im)
# all_left_disp.append(monkaa_disp + '/' + dd + '/left/' + im.split(".")[0] + '.pfm')
#
# for im in os.listdir(monkaa_path + '/' + dd + '/right/'):
# if is_image_file(monkaa_path + '/' + dd + '/right/' + im):
# all_right_img.append(monkaa_path + '/' + dd + '/right/' + im)
flying_path = filepath + [x for x in image if x == 'frame_finalpass'][0]
flying_disp = filepath + [x for x in disp if x == 'disparity'][0]
flying_dir = flying_path + '/TRAIN/'
subdir = ['A', 'B', 'C']
for ss in subdir:
flying = os.listdir(flying_dir + ss)
for ff in flying:
imm_l = os.listdir(flying_dir + ss + '/' + ff + '/left/')
for im in imm_l:
if is_image_file(flying_dir + ss + '/' + ff + '/left/' + im):
all_left_img.append(flying_dir + ss + '/' + ff + '/left/' + im)
all_left_disp.append(flying_disp + '/TRAIN/' + ss + '/' + ff + '/left/' + im.split(".")[0] + '.pfm')
if is_image_file(flying_dir + ss + '/' + ff + '/right/' + im):
all_right_img.append(flying_dir + ss + '/' + ff + '/right/' + im)
flying_dir = flying_path + '/TEST/'
subdir = ['A', 'B', 'C']
for ss in subdir:
flying = os.listdir(flying_dir + ss)
for ff in flying:
imm_l = os.listdir(flying_dir + ss + '/' + ff + '/left/')
for im in imm_l:
if is_image_file(flying_dir + ss + '/' + ff + '/left/' + im):
test_left_img.append(flying_dir + ss + '/' + ff + '/left/' + im)
test_left_disp.append(flying_disp + '/TEST/' + ss + '/' + ff + '/left/' + im.split(".")[0] + '.pfm')
if is_image_file(flying_dir + ss + '/' + ff + '/right/' + im):
test_right_img.append(flying_dir + ss + '/' + ff + '/right/' + im)
# driving_dir = filepath + [x for x in image if 'driving' in x][0] + '/'
# driving_disp = filepath + [x for x in disp if 'driving' in x][0]
#
# subdir1 = ['35mm_focallength', '15mm_focallength']
# subdir2 = ['scene_backwards', 'scene_forwards']
# subdir3 = ['fast', 'slow']
#
# for i in subdir1:
# for j in subdir2:
# for k in subdir3:
# imm_l = os.listdir(driving_dir + i + '/' + j + '/' + k + '/left/')
# for im in imm_l:
# if is_image_file(driving_dir + i + '/' + j + '/' + k + '/left/' + im):
# all_left_img.append(driving_dir + i + '/' + j + '/' + k + '/left/' + im)
# all_left_disp.append(
# driving_disp + '/' + i + '/' + j + '/' + k + '/left/' + im.split(".")[0] + '.pfm')
#
# if is_image_file(driving_dir + i + '/' + j + '/' + k + '/right/' + im):
# all_right_img.append(driving_dir + i + '/' + j + '/' + k + '/right/' + im)
occ_path = os.path.join(filepath, 'occlusion', 'TEST', 'left')
occ_data = [os.path.join(occ_path, occ) for occ in os.listdir(occ_path)]
return all_left_img, all_right_img, all_left_disp, [], test_left_img, test_right_img, test_left_disp, occ_data
| 41.390476 | 116 | 0.518178 |
877c0f889eece563290f686bfc114fca996e105e | 521 | py | Python | data/scripts/templates/object/tangible/component/bio/shared_bio_component_clothing_casual_training_2.py | obi-two/GameServer | 7d37024e2291a97d49522610cd8f1dbe5666afc2 | [
"MIT"
] | 20 | 2015-02-23T15:11:56.000Z | 2022-03-18T20:56:48.000Z | data/scripts/templates/object/tangible/component/bio/shared_bio_component_clothing_casual_training_2.py | apathyboy/swganh | 665128efe9154611dec4cb5efc61d246dd095984 | [
"MIT"
] | null | null | null | data/scripts/templates/object/tangible/component/bio/shared_bio_component_clothing_casual_training_2.py | apathyboy/swganh | 665128efe9154611dec4cb5efc61d246dd095984 | [
"MIT"
] | 20 | 2015-04-04T16:35:59.000Z | 2022-03-24T14:54:37.000Z | #### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Tangible()
result.template = "object/tangible/component/bio/shared_bio_component_clothing_casual_training_2.iff"
result.attribute_template_id = -1
result.stfName("craft_bio_components_n","bio_component_clothing_casual_training_2")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result | 30.647059 | 102 | 0.763916 |
13f92e2190439c5da02cfc0647203952d7bdfd3d | 3,522 | py | Python | src/ssp/spark/streaming/analytics/sentiment_analysis.py | gyan42/spark-streaming-playground | 147ef9cbc31b7aed242663dee36143ebf0e8043f | [
"Apache-2.0"
] | 10 | 2020-03-12T11:51:46.000Z | 2022-03-24T04:56:05.000Z | src/ssp/spark/streaming/analytics/sentiment_analysis.py | gyan42/spark-streaming-playground | 147ef9cbc31b7aed242663dee36143ebf0e8043f | [
"Apache-2.0"
] | 12 | 2020-04-23T07:28:14.000Z | 2022-03-12T00:20:24.000Z | src/ssp/spark/streaming/analytics/sentiment_analysis.py | gyan42/spark-streaming-playground | 147ef9cbc31b7aed242663dee36143ebf0e8043f | [
"Apache-2.0"
] | 1 | 2020-04-20T14:48:38.000Z | 2020-04-20T14:48:38.000Z | #!/usr/bin/env python
__author__ = "Mageswaran Dhandapani"
__copyright__ = "Copyright 2020, The Spark Structured Playground Project"
__credits__ = []
__license__ = "Apache License"
__version__ = "2.0"
__maintainer__ = "Mageswaran Dhandapani"
__email__ = "mageswaran1989@gmail.com"
__status__ = "Education Purpose"
import gin
from ssp.spark.streaming.common.twitter_streamer_base import TwitterStreamerBase
from ssp.spark.streaming.ml import SentimentSparkModel
# from ssp.customudf.textblob_sentiment import textblob_sentiment_analysis_udf
@gin.configurable
class SentimentAnalysis(TwitterStreamerBase):
"""
Uses the :ssp.spark.streaming.ml.SentimentSparkModel to classify the stream text
:param kafka_bootstrap_servers:
:param kafka_topic: Kafka topic to listen for
:param checkpoint_dir: Spark Streaming checkpoint directory
:param parquet_dir: Parquet directory to read the streamed data
:param warehouse_location: Spark warehouse location
:param spark_master: Spark Master URL
:param is_live_stream: (bool) Use live stream or parquet diretory
:param processing_time: (bool) Spark Streaming processing trigger time delay
"""
def __init__(self,
kafka_bootstrap_servers="localhost:9092",
kafka_topic="ai_tweets_topic",
checkpoint_dir="hdfs://localhost:9000/tmp/ssp/data/lake/checkpoint/",
parquet_dir="hdfs://localhost:9000/tmp/ssp/data/lake/silver/",
warehouse_location="/opt/spark-warehouse/",
spark_master="spark://IMCHLT276:7077",
is_live_stream=True,
processing_time='5 seconds'):
TwitterStreamerBase.__init__(self,
spark_master=spark_master,
checkpoint_dir=checkpoint_dir,
warehouse_location=warehouse_location,
kafka_bootstrap_servers=kafka_bootstrap_servers,
kafka_topic=kafka_topic,
processing_time=processing_time)
self._spark_master = spark_master
self._checkpoint_dir = checkpoint_dir
self._parquet_dir = parquet_dir
self._warehouse_location = warehouse_location
self.spark = self._get_spark()
self._model = SentimentSparkModel(spark=self.spark)
self._is_live_stream = is_live_stream
def online_process(self):
tweet_stream = self._get_source_stream()
return tweet_stream
def hdfs_process(self):
userSchema = self.spark.read.parquet(self._bronze_parquet_dir).schema
tweet_stream = self.spark.readStream. \
schema(userSchema).\
format("parquet"). \
option("ignoreChanges", "true"). \
option("failOnDataLoss", "false"). \
load(self._bronze_parquet_dir)
return tweet_stream
def process(self):
if self._is_live_stream:
tweet_stream = self.online_process()
else:
tweet_stream = self.hdfs_process()
def foreach_batch_function(df, epoch_id):
# Transform and write batchDF
df = self._model.predict(df).select(["text", "prediction"])
# df = df.withColumn("sentiment", textblob_sentiment_analysis_udf("text"))
df.show(50, False)
tweet_stream.writeStream.foreachBatch(foreach_batch_function).start().awaitTermination()
| 39.573034 | 96 | 0.657297 |
daa861ca65509966e7e9b4c62676f02a9be6c77c | 977 | py | Python | FileManager/toolbar.py | kartikey-sahu/File-Manager | ad8c08b4d987a131f871d66b8fb7baef23d91ac9 | [
"MIT"
] | null | null | null | FileManager/toolbar.py | kartikey-sahu/File-Manager | ad8c08b4d987a131f871d66b8fb7baef23d91ac9 | [
"MIT"
] | null | null | null | FileManager/toolbar.py | kartikey-sahu/File-Manager | ad8c08b4d987a131f871d66b8fb7baef23d91ac9 | [
"MIT"
] | null | null | null | from tkinter import Frame
import ui.images as img
from ui.button import Button
class Toolbar(Frame):
def __init__(self, master, **kwargs):
super().__init__(master, **kwargs)
self.__draw_tools()
def __draw_tools(self):
self.newfolder = Button(self, text='New Folder', image=img.new_folder)
self.cut = Button(self, state='disabled', text='Cut', image=img.cut)
self.copy = Button(self, state='disabled', text='Copy', image=img.copy)
self.paste = Button(self, state='disabled',
text='Paste', image=img.paste)
self.delete = Button(self, state='disabled',
text='Delete', image=img.delete)
self.rename = Button(self, state='disabled',
text='Rename', image=img.rename)
for i, child in enumerate(self.winfo_children()):
child.config(bg=self.cget('bg'), padx=5)
child.grid(row=0, column=i)
| 34.892857 | 79 | 0.58956 |
4593a09ed083a9a40a6eefcff551e3463346bd4a | 26,833 | py | Python | src/oci/management_agent/models/management_agent.py | Manny27nyc/oci-python-sdk | de60b04e07a99826254f7255e992f41772902df7 | [
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null | src/oci/management_agent/models/management_agent.py | Manny27nyc/oci-python-sdk | de60b04e07a99826254f7255e992f41772902df7 | [
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null | src/oci/management_agent/models/management_agent.py | Manny27nyc/oci-python-sdk | de60b04e07a99826254f7255e992f41772902df7 | [
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null | # coding: utf-8
# Copyright (c) 2016, 2021, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel # noqa: F401
from oci.decorators import init_model_state_from_kwargs
@init_model_state_from_kwargs
class ManagementAgent(object):
"""
The details of the Management Agent inventory including the associated plugins.
"""
#: A constant which can be used with the platform_type property of a ManagementAgent.
#: This constant has a value of "LINUX"
PLATFORM_TYPE_LINUX = "LINUX"
#: A constant which can be used with the platform_type property of a ManagementAgent.
#: This constant has a value of "WINDOWS"
PLATFORM_TYPE_WINDOWS = "WINDOWS"
#: A constant which can be used with the availability_status property of a ManagementAgent.
#: This constant has a value of "ACTIVE"
AVAILABILITY_STATUS_ACTIVE = "ACTIVE"
#: A constant which can be used with the availability_status property of a ManagementAgent.
#: This constant has a value of "SILENT"
AVAILABILITY_STATUS_SILENT = "SILENT"
#: A constant which can be used with the availability_status property of a ManagementAgent.
#: This constant has a value of "NOT_AVAILABLE"
AVAILABILITY_STATUS_NOT_AVAILABLE = "NOT_AVAILABLE"
#: A constant which can be used with the lifecycle_state property of a ManagementAgent.
#: This constant has a value of "CREATING"
LIFECYCLE_STATE_CREATING = "CREATING"
#: A constant which can be used with the lifecycle_state property of a ManagementAgent.
#: This constant has a value of "UPDATING"
LIFECYCLE_STATE_UPDATING = "UPDATING"
#: A constant which can be used with the lifecycle_state property of a ManagementAgent.
#: This constant has a value of "ACTIVE"
LIFECYCLE_STATE_ACTIVE = "ACTIVE"
#: A constant which can be used with the lifecycle_state property of a ManagementAgent.
#: This constant has a value of "INACTIVE"
LIFECYCLE_STATE_INACTIVE = "INACTIVE"
#: A constant which can be used with the lifecycle_state property of a ManagementAgent.
#: This constant has a value of "TERMINATED"
LIFECYCLE_STATE_TERMINATED = "TERMINATED"
#: A constant which can be used with the lifecycle_state property of a ManagementAgent.
#: This constant has a value of "DELETING"
LIFECYCLE_STATE_DELETING = "DELETING"
#: A constant which can be used with the lifecycle_state property of a ManagementAgent.
#: This constant has a value of "DELETED"
LIFECYCLE_STATE_DELETED = "DELETED"
#: A constant which can be used with the lifecycle_state property of a ManagementAgent.
#: This constant has a value of "FAILED"
LIFECYCLE_STATE_FAILED = "FAILED"
def __init__(self, **kwargs):
"""
Initializes a new ManagementAgent object with values from keyword arguments.
The following keyword arguments are supported (corresponding to the getters/setters of this class):
:param id:
The value to assign to the id property of this ManagementAgent.
:type id: str
:param install_key_id:
The value to assign to the install_key_id property of this ManagementAgent.
:type install_key_id: str
:param display_name:
The value to assign to the display_name property of this ManagementAgent.
:type display_name: str
:param platform_type:
The value to assign to the platform_type property of this ManagementAgent.
Allowed values for this property are: "LINUX", "WINDOWS", 'UNKNOWN_ENUM_VALUE'.
Any unrecognized values returned by a service will be mapped to 'UNKNOWN_ENUM_VALUE'.
:type platform_type: str
:param platform_name:
The value to assign to the platform_name property of this ManagementAgent.
:type platform_name: str
:param platform_version:
The value to assign to the platform_version property of this ManagementAgent.
:type platform_version: str
:param version:
The value to assign to the version property of this ManagementAgent.
:type version: str
:param host:
The value to assign to the host property of this ManagementAgent.
:type host: str
:param host_id:
The value to assign to the host_id property of this ManagementAgent.
:type host_id: str
:param install_path:
The value to assign to the install_path property of this ManagementAgent.
:type install_path: str
:param plugin_list:
The value to assign to the plugin_list property of this ManagementAgent.
:type plugin_list: list[oci.management_agent.models.ManagementAgentPluginDetails]
:param compartment_id:
The value to assign to the compartment_id property of this ManagementAgent.
:type compartment_id: str
:param is_agent_auto_upgradable:
The value to assign to the is_agent_auto_upgradable property of this ManagementAgent.
:type is_agent_auto_upgradable: bool
:param time_created:
The value to assign to the time_created property of this ManagementAgent.
:type time_created: datetime
:param time_updated:
The value to assign to the time_updated property of this ManagementAgent.
:type time_updated: datetime
:param time_last_heartbeat:
The value to assign to the time_last_heartbeat property of this ManagementAgent.
:type time_last_heartbeat: datetime
:param availability_status:
The value to assign to the availability_status property of this ManagementAgent.
Allowed values for this property are: "ACTIVE", "SILENT", "NOT_AVAILABLE", 'UNKNOWN_ENUM_VALUE'.
Any unrecognized values returned by a service will be mapped to 'UNKNOWN_ENUM_VALUE'.
:type availability_status: str
:param lifecycle_state:
The value to assign to the lifecycle_state property of this ManagementAgent.
Allowed values for this property are: "CREATING", "UPDATING", "ACTIVE", "INACTIVE", "TERMINATED", "DELETING", "DELETED", "FAILED", 'UNKNOWN_ENUM_VALUE'.
Any unrecognized values returned by a service will be mapped to 'UNKNOWN_ENUM_VALUE'.
:type lifecycle_state: str
:param lifecycle_details:
The value to assign to the lifecycle_details property of this ManagementAgent.
:type lifecycle_details: str
:param is_customer_deployed:
The value to assign to the is_customer_deployed property of this ManagementAgent.
:type is_customer_deployed: bool
:param freeform_tags:
The value to assign to the freeform_tags property of this ManagementAgent.
:type freeform_tags: dict(str, str)
:param defined_tags:
The value to assign to the defined_tags property of this ManagementAgent.
:type defined_tags: dict(str, dict(str, object))
"""
self.swagger_types = {
'id': 'str',
'install_key_id': 'str',
'display_name': 'str',
'platform_type': 'str',
'platform_name': 'str',
'platform_version': 'str',
'version': 'str',
'host': 'str',
'host_id': 'str',
'install_path': 'str',
'plugin_list': 'list[ManagementAgentPluginDetails]',
'compartment_id': 'str',
'is_agent_auto_upgradable': 'bool',
'time_created': 'datetime',
'time_updated': 'datetime',
'time_last_heartbeat': 'datetime',
'availability_status': 'str',
'lifecycle_state': 'str',
'lifecycle_details': 'str',
'is_customer_deployed': 'bool',
'freeform_tags': 'dict(str, str)',
'defined_tags': 'dict(str, dict(str, object))'
}
self.attribute_map = {
'id': 'id',
'install_key_id': 'installKeyId',
'display_name': 'displayName',
'platform_type': 'platformType',
'platform_name': 'platformName',
'platform_version': 'platformVersion',
'version': 'version',
'host': 'host',
'host_id': 'hostId',
'install_path': 'installPath',
'plugin_list': 'pluginList',
'compartment_id': 'compartmentId',
'is_agent_auto_upgradable': 'isAgentAutoUpgradable',
'time_created': 'timeCreated',
'time_updated': 'timeUpdated',
'time_last_heartbeat': 'timeLastHeartbeat',
'availability_status': 'availabilityStatus',
'lifecycle_state': 'lifecycleState',
'lifecycle_details': 'lifecycleDetails',
'is_customer_deployed': 'isCustomerDeployed',
'freeform_tags': 'freeformTags',
'defined_tags': 'definedTags'
}
self._id = None
self._install_key_id = None
self._display_name = None
self._platform_type = None
self._platform_name = None
self._platform_version = None
self._version = None
self._host = None
self._host_id = None
self._install_path = None
self._plugin_list = None
self._compartment_id = None
self._is_agent_auto_upgradable = None
self._time_created = None
self._time_updated = None
self._time_last_heartbeat = None
self._availability_status = None
self._lifecycle_state = None
self._lifecycle_details = None
self._is_customer_deployed = None
self._freeform_tags = None
self._defined_tags = None
@property
def id(self):
"""
**[Required]** Gets the id of this ManagementAgent.
agent identifier
:return: The id of this ManagementAgent.
:rtype: str
"""
return self._id
@id.setter
def id(self, id):
"""
Sets the id of this ManagementAgent.
agent identifier
:param id: The id of this ManagementAgent.
:type: str
"""
self._id = id
@property
def install_key_id(self):
"""
Gets the install_key_id of this ManagementAgent.
agent install key identifier
:return: The install_key_id of this ManagementAgent.
:rtype: str
"""
return self._install_key_id
@install_key_id.setter
def install_key_id(self, install_key_id):
"""
Sets the install_key_id of this ManagementAgent.
agent install key identifier
:param install_key_id: The install_key_id of this ManagementAgent.
:type: str
"""
self._install_key_id = install_key_id
@property
def display_name(self):
"""
Gets the display_name of this ManagementAgent.
Management Agent Name
:return: The display_name of this ManagementAgent.
:rtype: str
"""
return self._display_name
@display_name.setter
def display_name(self, display_name):
"""
Sets the display_name of this ManagementAgent.
Management Agent Name
:param display_name: The display_name of this ManagementAgent.
:type: str
"""
self._display_name = display_name
@property
def platform_type(self):
"""
Gets the platform_type of this ManagementAgent.
Platform Type
Allowed values for this property are: "LINUX", "WINDOWS", 'UNKNOWN_ENUM_VALUE'.
Any unrecognized values returned by a service will be mapped to 'UNKNOWN_ENUM_VALUE'.
:return: The platform_type of this ManagementAgent.
:rtype: str
"""
return self._platform_type
@platform_type.setter
def platform_type(self, platform_type):
"""
Sets the platform_type of this ManagementAgent.
Platform Type
:param platform_type: The platform_type of this ManagementAgent.
:type: str
"""
allowed_values = ["LINUX", "WINDOWS"]
if not value_allowed_none_or_none_sentinel(platform_type, allowed_values):
platform_type = 'UNKNOWN_ENUM_VALUE'
self._platform_type = platform_type
@property
def platform_name(self):
"""
Gets the platform_name of this ManagementAgent.
Platform Name
:return: The platform_name of this ManagementAgent.
:rtype: str
"""
return self._platform_name
@platform_name.setter
def platform_name(self, platform_name):
"""
Sets the platform_name of this ManagementAgent.
Platform Name
:param platform_name: The platform_name of this ManagementAgent.
:type: str
"""
self._platform_name = platform_name
@property
def platform_version(self):
"""
Gets the platform_version of this ManagementAgent.
Platform Version
:return: The platform_version of this ManagementAgent.
:rtype: str
"""
return self._platform_version
@platform_version.setter
def platform_version(self, platform_version):
"""
Sets the platform_version of this ManagementAgent.
Platform Version
:param platform_version: The platform_version of this ManagementAgent.
:type: str
"""
self._platform_version = platform_version
@property
def version(self):
"""
**[Required]** Gets the version of this ManagementAgent.
Management Agent Version
:return: The version of this ManagementAgent.
:rtype: str
"""
return self._version
@version.setter
def version(self, version):
"""
Sets the version of this ManagementAgent.
Management Agent Version
:param version: The version of this ManagementAgent.
:type: str
"""
self._version = version
@property
def host(self):
"""
Gets the host of this ManagementAgent.
Management Agent host machine name
:return: The host of this ManagementAgent.
:rtype: str
"""
return self._host
@host.setter
def host(self, host):
"""
Sets the host of this ManagementAgent.
Management Agent host machine name
:param host: The host of this ManagementAgent.
:type: str
"""
self._host = host
@property
def host_id(self):
"""
Gets the host_id of this ManagementAgent.
Host resource ocid
:return: The host_id of this ManagementAgent.
:rtype: str
"""
return self._host_id
@host_id.setter
def host_id(self, host_id):
"""
Sets the host_id of this ManagementAgent.
Host resource ocid
:param host_id: The host_id of this ManagementAgent.
:type: str
"""
self._host_id = host_id
@property
def install_path(self):
"""
Gets the install_path of this ManagementAgent.
Path where Management Agent is installed
:return: The install_path of this ManagementAgent.
:rtype: str
"""
return self._install_path
@install_path.setter
def install_path(self, install_path):
"""
Sets the install_path of this ManagementAgent.
Path where Management Agent is installed
:param install_path: The install_path of this ManagementAgent.
:type: str
"""
self._install_path = install_path
@property
def plugin_list(self):
"""
Gets the plugin_list of this ManagementAgent.
list of managementAgentPlugins associated with the agent
:return: The plugin_list of this ManagementAgent.
:rtype: list[oci.management_agent.models.ManagementAgentPluginDetails]
"""
return self._plugin_list
@plugin_list.setter
def plugin_list(self, plugin_list):
"""
Sets the plugin_list of this ManagementAgent.
list of managementAgentPlugins associated with the agent
:param plugin_list: The plugin_list of this ManagementAgent.
:type: list[oci.management_agent.models.ManagementAgentPluginDetails]
"""
self._plugin_list = plugin_list
@property
def compartment_id(self):
"""
**[Required]** Gets the compartment_id of this ManagementAgent.
Compartment Identifier
:return: The compartment_id of this ManagementAgent.
:rtype: str
"""
return self._compartment_id
@compartment_id.setter
def compartment_id(self, compartment_id):
"""
Sets the compartment_id of this ManagementAgent.
Compartment Identifier
:param compartment_id: The compartment_id of this ManagementAgent.
:type: str
"""
self._compartment_id = compartment_id
@property
def is_agent_auto_upgradable(self):
"""
Gets the is_agent_auto_upgradable of this ManagementAgent.
true if the agent can be upgraded automatically; false if it must be upgraded manually.
:return: The is_agent_auto_upgradable of this ManagementAgent.
:rtype: bool
"""
return self._is_agent_auto_upgradable
@is_agent_auto_upgradable.setter
def is_agent_auto_upgradable(self, is_agent_auto_upgradable):
"""
Sets the is_agent_auto_upgradable of this ManagementAgent.
true if the agent can be upgraded automatically; false if it must be upgraded manually.
:param is_agent_auto_upgradable: The is_agent_auto_upgradable of this ManagementAgent.
:type: bool
"""
self._is_agent_auto_upgradable = is_agent_auto_upgradable
@property
def time_created(self):
"""
Gets the time_created of this ManagementAgent.
The time the Management Agent was created. An RFC3339 formatted datetime string
:return: The time_created of this ManagementAgent.
:rtype: datetime
"""
return self._time_created
@time_created.setter
def time_created(self, time_created):
"""
Sets the time_created of this ManagementAgent.
The time the Management Agent was created. An RFC3339 formatted datetime string
:param time_created: The time_created of this ManagementAgent.
:type: datetime
"""
self._time_created = time_created
@property
def time_updated(self):
"""
Gets the time_updated of this ManagementAgent.
The time the Management Agent was updated. An RFC3339 formatted datetime string
:return: The time_updated of this ManagementAgent.
:rtype: datetime
"""
return self._time_updated
@time_updated.setter
def time_updated(self, time_updated):
"""
Sets the time_updated of this ManagementAgent.
The time the Management Agent was updated. An RFC3339 formatted datetime string
:param time_updated: The time_updated of this ManagementAgent.
:type: datetime
"""
self._time_updated = time_updated
@property
def time_last_heartbeat(self):
"""
Gets the time_last_heartbeat of this ManagementAgent.
The time the Management Agent has last recorded its health status in telemetry. This value will be null if the agent has not recorded its health status in last 7 days. An RFC3339 formatted datetime string
:return: The time_last_heartbeat of this ManagementAgent.
:rtype: datetime
"""
return self._time_last_heartbeat
@time_last_heartbeat.setter
def time_last_heartbeat(self, time_last_heartbeat):
"""
Sets the time_last_heartbeat of this ManagementAgent.
The time the Management Agent has last recorded its health status in telemetry. This value will be null if the agent has not recorded its health status in last 7 days. An RFC3339 formatted datetime string
:param time_last_heartbeat: The time_last_heartbeat of this ManagementAgent.
:type: datetime
"""
self._time_last_heartbeat = time_last_heartbeat
@property
def availability_status(self):
"""
Gets the availability_status of this ManagementAgent.
The current availability status of managementAgent
Allowed values for this property are: "ACTIVE", "SILENT", "NOT_AVAILABLE", 'UNKNOWN_ENUM_VALUE'.
Any unrecognized values returned by a service will be mapped to 'UNKNOWN_ENUM_VALUE'.
:return: The availability_status of this ManagementAgent.
:rtype: str
"""
return self._availability_status
@availability_status.setter
def availability_status(self, availability_status):
"""
Sets the availability_status of this ManagementAgent.
The current availability status of managementAgent
:param availability_status: The availability_status of this ManagementAgent.
:type: str
"""
allowed_values = ["ACTIVE", "SILENT", "NOT_AVAILABLE"]
if not value_allowed_none_or_none_sentinel(availability_status, allowed_values):
availability_status = 'UNKNOWN_ENUM_VALUE'
self._availability_status = availability_status
@property
def lifecycle_state(self):
"""
Gets the lifecycle_state of this ManagementAgent.
The current state of managementAgent
Allowed values for this property are: "CREATING", "UPDATING", "ACTIVE", "INACTIVE", "TERMINATED", "DELETING", "DELETED", "FAILED", 'UNKNOWN_ENUM_VALUE'.
Any unrecognized values returned by a service will be mapped to 'UNKNOWN_ENUM_VALUE'.
:return: The lifecycle_state of this ManagementAgent.
:rtype: str
"""
return self._lifecycle_state
@lifecycle_state.setter
def lifecycle_state(self, lifecycle_state):
"""
Sets the lifecycle_state of this ManagementAgent.
The current state of managementAgent
:param lifecycle_state: The lifecycle_state of this ManagementAgent.
:type: str
"""
allowed_values = ["CREATING", "UPDATING", "ACTIVE", "INACTIVE", "TERMINATED", "DELETING", "DELETED", "FAILED"]
if not value_allowed_none_or_none_sentinel(lifecycle_state, allowed_values):
lifecycle_state = 'UNKNOWN_ENUM_VALUE'
self._lifecycle_state = lifecycle_state
@property
def lifecycle_details(self):
"""
Gets the lifecycle_details of this ManagementAgent.
A message describing the current state in more detail. For example, can be used to provide actionable information for a resource in Failed state.
:return: The lifecycle_details of this ManagementAgent.
:rtype: str
"""
return self._lifecycle_details
@lifecycle_details.setter
def lifecycle_details(self, lifecycle_details):
"""
Sets the lifecycle_details of this ManagementAgent.
A message describing the current state in more detail. For example, can be used to provide actionable information for a resource in Failed state.
:param lifecycle_details: The lifecycle_details of this ManagementAgent.
:type: str
"""
self._lifecycle_details = lifecycle_details
@property
def is_customer_deployed(self):
"""
Gets the is_customer_deployed of this ManagementAgent.
true, if the agent image is manually downloaded and installed. false, if the agent is deployed as a plugin in Oracle Cloud Agent.
:return: The is_customer_deployed of this ManagementAgent.
:rtype: bool
"""
return self._is_customer_deployed
@is_customer_deployed.setter
def is_customer_deployed(self, is_customer_deployed):
"""
Sets the is_customer_deployed of this ManagementAgent.
true, if the agent image is manually downloaded and installed. false, if the agent is deployed as a plugin in Oracle Cloud Agent.
:param is_customer_deployed: The is_customer_deployed of this ManagementAgent.
:type: bool
"""
self._is_customer_deployed = is_customer_deployed
@property
def freeform_tags(self):
"""
Gets the freeform_tags of this ManagementAgent.
Simple key-value pair that is applied without any predefined name, type or scope. Exists for cross-compatibility only.
Example: `{\"bar-key\": \"value\"}`
:return: The freeform_tags of this ManagementAgent.
:rtype: dict(str, str)
"""
return self._freeform_tags
@freeform_tags.setter
def freeform_tags(self, freeform_tags):
"""
Sets the freeform_tags of this ManagementAgent.
Simple key-value pair that is applied without any predefined name, type or scope. Exists for cross-compatibility only.
Example: `{\"bar-key\": \"value\"}`
:param freeform_tags: The freeform_tags of this ManagementAgent.
:type: dict(str, str)
"""
self._freeform_tags = freeform_tags
@property
def defined_tags(self):
"""
Gets the defined_tags of this ManagementAgent.
Defined tags for this resource. Each key is predefined and scoped to a namespace.
Example: `{\"foo-namespace\": {\"bar-key\": \"value\"}}`
:return: The defined_tags of this ManagementAgent.
:rtype: dict(str, dict(str, object))
"""
return self._defined_tags
@defined_tags.setter
def defined_tags(self, defined_tags):
"""
Sets the defined_tags of this ManagementAgent.
Defined tags for this resource. Each key is predefined and scoped to a namespace.
Example: `{\"foo-namespace\": {\"bar-key\": \"value\"}}`
:param defined_tags: The defined_tags of this ManagementAgent.
:type: dict(str, dict(str, object))
"""
self._defined_tags = defined_tags
def __repr__(self):
return formatted_flat_dict(self)
def __eq__(self, other):
if other is None:
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| 33.457606 | 245 | 0.655126 |
6c5c44565cc4bd05df39dc9b6860b73312d0945d | 1,921 | py | Python | tests/isolated/patcher_fork_after_monkey_patch.py | li-caspar/eventlet_0.30.2 | a431842e29c26e46cfcfff60c93ca92e07663044 | [
"MIT"
] | 1,044 | 2015-01-02T02:01:01.000Z | 2022-03-28T06:19:15.000Z | tests/isolated/patcher_fork_after_monkey_patch.py | li-caspar/eventlet_0.30.2 | a431842e29c26e46cfcfff60c93ca92e07663044 | [
"MIT"
] | 591 | 2015-01-04T14:02:06.000Z | 2022-03-15T09:09:18.000Z | tests/isolated/patcher_fork_after_monkey_patch.py | li-caspar/eventlet_0.30.2 | a431842e29c26e46cfcfff60c93ca92e07663044 | [
"MIT"
] | 269 | 2015-01-04T14:22:31.000Z | 2022-03-12T17:23:51.000Z | # Monkey patching interferes with threading in Python 3.7
# https://github.com/eventlet/eventlet/issues/592
__test__ = False
def check(n, mod, tag):
assert len(mod._active) == n, 'Expected {} {} threads, got {}'.format(n, tag, mod._active)
if __name__ == '__main__':
import eventlet
import eventlet.patcher
eventlet.monkey_patch()
import os
import sys
import threading
_threading = eventlet.patcher.original('threading')
import eventlet.green.threading
def target():
eventlet.sleep(0.1)
threads = [
threading.Thread(target=target, name='patched'),
_threading.Thread(target=target, name='original-1'),
_threading.Thread(target=target, name='original-2'),
eventlet.green.threading.Thread(target=target, name='green-1'),
eventlet.green.threading.Thread(target=target, name='green-2'),
eventlet.green.threading.Thread(target=target, name='green-3'),
]
for t in threads:
t.start()
check(2, threading, 'pre-fork patched')
check(3, _threading, 'pre-fork original')
check(4, eventlet.green.threading, 'pre-fork green')
if os.fork() == 0:
# Inside the child, we should only have a main thread,
# but old pythons make it difficult to ensure
if sys.version_info >= (3, 7):
check(1, threading, 'child post-fork patched')
check(1, _threading, 'child post-fork original')
check(1, eventlet.green.threading, 'child post-fork green')
sys.exit()
else:
os.wait()
check(2, threading, 'post-fork patched')
check(3, _threading, 'post-fork original')
check(4, eventlet.green.threading, 'post-fork green')
for t in threads:
t.join()
check(1, threading, 'post-join patched')
check(1, _threading, 'post-join original')
check(1, eventlet.green.threading, 'post-join green')
print('pass')
| 32.016667 | 94 | 0.645497 |
5c06f201e951c5815bfbb285d7b862964613e96e | 32,566 | py | Python | record.py | sugar-activities/4552-activity | 6e261fd900a5be44248eee267e7cd1be03ab0804 | [
"MIT"
] | null | null | null | record.py | sugar-activities/4552-activity | 6e261fd900a5be44248eee267e7cd1be03ab0804 | [
"MIT"
] | null | null | null | record.py | sugar-activities/4552-activity | 6e261fd900a5be44248eee267e7cd1be03ab0804 | [
"MIT"
] | null | null | null | #Copyright (c) 2008, Media Modifications Ltd.
#Permission is hereby granted, free of charge, to any person obtaining a copy
#of this software and associated documentation files (the "Software"), to deal
#in the Software without restriction, including without limitation the rights
#to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
#copies of the Software, and to permit persons to whom the Software is
#furnished to do so, subject to the following conditions:
#The above copyright notice and this permission notice shall be included in
#all copies or substantial portions of the Software.
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
#FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
#AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
#LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
#OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
#THE SOFTWARE.
import os
import logging
import shutil
from gettext import gettext as _
from gettext import ngettext
import gtk
from gtk import gdk
import cairo
import pango
import pangocairo
import pygst
pygst.require('0.10')
import gst
from sugar.activity import activity
from sugar.graphics.toolcombobox import ToolComboBox
from sugar.graphics.toolbarbox import ToolbarBox
from sugar.graphics.toolbarbox import ToolbarButton
from sugar.graphics.radiotoolbutton import RadioToolButton
from sugar.activity.widgets import StopButton
from sugar.activity.widgets import ActivityToolbarButton
from model import Model
from button import RecdButton
import constants
from instance import Instance
import utils
from tray import HTray
from mediaview import MediaView
import hw
from iconcombobox import IconComboBox
logger = logging.getLogger('record.py')
COLOR_BLACK = gdk.color_parse('#000000')
COLOR_WHITE = gdk.color_parse('#ffffff')
gst.debug_set_active(True)
gst.debug_set_colored(False)
if logging.getLogger().level <= logging.DEBUG:
gst.debug_set_default_threshold(gst.LEVEL_WARNING)
else:
gst.debug_set_default_threshold(gst.LEVEL_ERROR)
class Record(activity.Activity):
def __init__(self, handle):
super(Record, self).__init__(handle)
self.props.enable_fullscreen_mode = False
Instance(self)
self.add_events(gtk.gdk.VISIBILITY_NOTIFY_MASK)
self.connect("visibility-notify-event", self._visibility_changed)
#the main classes
self.model = Model(self)
self.ui_init()
#CSCL
self.connect("shared", self._shared_cb)
if self.get_shared_activity():
#have you joined or shared this activity yourself?
if self.get_shared():
self._joined_cb(self)
else:
self.connect("joined", self._joined_cb)
# Realize the video view widget so that it knows its own window XID
self._media_view.realize_video()
# Changing to the first toolbar kicks off the rest of the setup
if self.model.get_has_camera():
self.model.change_mode(constants.MODE_PHOTO)
else:
self.model.change_mode(constants.MODE_AUDIO)
def read_file(self, path):
self.model.read_file(path)
def write_file(self, path):
self.model.write_file(path)
def close(self):
self.model.gplay.stop()
self.model.glive.stop()
super(Record, self).close()
def _visibility_changed(self, widget, event):
self.model.set_visible(event.state != gtk.gdk.VISIBILITY_FULLY_OBSCURED)
def _shared_cb(self, activity):
self.model.collab.set_activity_shared()
def _joined_cb(self, activity):
self.model.collab.joined()
def ui_init(self):
self._fullscreen = False
self._showing_info = False
# FIXME: if _thumb_tray becomes some kind of button group, we wouldn't
# have to track which recd is active
self._active_recd = None
self.connect_after('key-press-event', self._key_pressed)
self._active_toolbar_idx = 0
self._toolbar_box = ToolbarBox()
activity_button = ActivityToolbarButton(self)
self._toolbar_box.toolbar.insert(activity_button, 0)
self.set_toolbar_box(self._toolbar_box)
self._toolbar = self.get_toolbar_box().toolbar
tool_group = None
if self.model.get_has_camera():
self._photo_button = RadioToolButton()
self._photo_button.props.group = tool_group
tool_group = self._photo_button
self._photo_button.props.icon_name = 'camera-external'
self._photo_button.props.label = _('Photo')
self._photo_button.mode = constants.MODE_PHOTO
self._photo_button.connect('clicked', self._mode_button_clicked)
self._toolbar.insert(self._photo_button, -1)
self._video_button = RadioToolButton()
self._video_button.props.group = tool_group
self._video_button.props.icon_name = 'media-video'
self._video_button.props.label = _('Video')
self._video_button.mode = constants.MODE_VIDEO
self._video_button.connect('clicked', self._mode_button_clicked)
self._toolbar.insert(self._video_button, -1)
else:
self._photo_button = None
self._video_button = None
self._audio_button = RadioToolButton()
self._audio_button.props.group = tool_group
self._audio_button.props.icon_name = 'media-audio'
self._audio_button.props.label = _('Audio')
self._audio_button.mode = constants.MODE_AUDIO
self._audio_button.connect('clicked', self._mode_button_clicked)
self._toolbar.insert(self._audio_button, -1)
self._toolbar.insert(gtk.SeparatorToolItem(), -1)
self._toolbar_controls = RecordControl(self._toolbar)
separator = gtk.SeparatorToolItem()
separator.props.draw = False
separator.set_expand(True)
self._toolbar.insert(separator, -1)
self._toolbar.insert(StopButton(self), -1)
self.get_toolbar_box().show_all()
main_box = gtk.VBox()
self.set_canvas(main_box)
main_box.get_parent().modify_bg(gtk.STATE_NORMAL, COLOR_BLACK)
main_box.show()
self._media_view = MediaView()
self._media_view.connect('media-clicked', self._media_view_media_clicked)
self._media_view.connect('pip-clicked', self._media_view_pip_clicked)
self._media_view.connect('info-clicked', self._media_view_info_clicked)
self._media_view.connect('full-clicked', self._media_view_full_clicked)
self._media_view.connect('tags-changed', self._media_view_tags_changed)
self._media_view.show()
self._controls_hbox = gtk.HBox()
self._controls_hbox.show()
self._shutter_button = ShutterButton()
self._shutter_button.connect("clicked", self._shutter_clicked)
self._controls_hbox.pack_start(self._shutter_button, expand=True, fill=False)
self._countdown_image = CountdownImage()
self._controls_hbox.pack_start(self._countdown_image, expand=True, fill=False)
self._play_button = PlayButton()
self._play_button.connect('clicked', self._play_pause_clicked)
self._controls_hbox.pack_start(self._play_button, expand=False)
self._playback_scale = PlaybackScale(self.model)
self._controls_hbox.pack_start(self._playback_scale, expand=True, fill=True)
self._progress = ProgressInfo()
self._controls_hbox.pack_start(self._progress, expand=True, fill=True)
self._title_label = gtk.Label()
self._title_label.set_markup("<b><span foreground='white'>"+_('Title:')+'</span></b>')
self._controls_hbox.pack_start(self._title_label, expand=False)
self._title_entry = gtk.Entry()
self._title_entry.modify_bg(gtk.STATE_INSENSITIVE, COLOR_BLACK)
self._title_entry.connect('changed', self._title_changed)
self._controls_hbox.pack_start(self._title_entry, expand=True, fill=True, padding=10)
container = RecordContainer(self._media_view, self._controls_hbox)
main_box.pack_start(container, expand=True, fill=True, padding=6)
container.show()
self._thumb_tray = HTray()
self._thumb_tray.set_size_request(-1, 150)
main_box.pack_end(self._thumb_tray, expand=False)
self._thumb_tray.show_all()
def serialize(self):
data = {}
data['timer'] = self._toolbar_controls.get_timer_idx()
data['duration'] = self._toolbar_controls.get_duration_idx()
data['quality'] = self._toolbar_controls.get_quality()
return data
def deserialize(self, data):
self._toolbar_controls.set_timer_idx(data.get('timer', 0))
self._toolbar_controls.set_duration_idx(data.get('duration', 0))
self._toolbar_controls.set_quality(data.get('quality', 0))
def _key_pressed(self, widget, event):
if self.model.ui_frozen():
return False
key = event.keyval
if key == gtk.keysyms.KP_Page_Up: # game key O
if self._shutter_button.props.visible:
if self._shutter_button.props.sensitive:
self._shutter_button.clicked()
else: # return to live mode
self.model.set_state(constants.STATE_READY)
elif key == gtk.keysyms.c and event.state == gdk.CONTROL_MASK:
self._copy_to_clipboard(self._active_recd)
elif key == gtk.keysyms.i:
self._toggle_info()
elif key == gtk.keysyms.Escape:
if self._fullscreen:
self._toggle_fullscreen()
return False
def _play_pause_clicked(self, widget):
self.model.play_pause()
def set_mode(self, mode):
self._toolbar_controls.set_mode(mode)
# can be called from gstreamer thread, so must not do any GTK+ stuff
def set_glive_sink(self, sink):
return self._media_view.set_video_sink(sink)
# can be called from gstreamer thread, so must not do any GTK+ stuff
def set_gplay_sink(self, sink):
return self._media_view.set_video2_sink(sink)
def get_selected_quality(self):
return self._toolbar_controls.get_quality()
def get_selected_timer(self):
return self._toolbar_controls.get_timer()
def get_selected_duration(self):
return self._toolbar_controls.get_duration()
def set_progress(self, value, text):
self._progress.set_progress(value)
self._progress.set_text(text)
def set_countdown(self, value):
if value == 0:
self._shutter_button.show()
self._countdown_image.hide()
self._countdown_image.clear()
return
self._shutter_button.hide()
self._countdown_image.show()
self._countdown_image.set_value(value)
def _title_changed(self, widget):
self._active_recd.setTitle(self._title_entry.get_text())
def _media_view_media_clicked(self, widget):
if self._play_button.props.visible and self._play_button.props.sensitive:
self._play_button.clicked()
def _media_view_pip_clicked(self, widget):
# clicking on the PIP always returns to live mode
self.model.set_state(constants.STATE_READY)
def _media_view_info_clicked(self, widget):
self._toggle_info()
def _toggle_info(self):
recd = self._active_recd
if not recd:
return
if self._showing_info:
self._show_recd(recd, play=False)
return
self._showing_info = True
if self.model.get_mode() in (constants.MODE_PHOTO, constants.MODE_AUDIO):
func = self._media_view.show_info_photo
else:
func = self._media_view.show_info_video
self._play_button.hide()
self._progress.hide()
self._playback_scale.hide()
self._title_entry.set_text(recd.title)
self._title_entry.show()
self._title_label.show()
func(recd.recorderName, recd.colorStroke, recd.colorFill, utils.getDateString(recd.time), recd.tags)
def _media_view_full_clicked(self, widget):
self._toggle_fullscreen()
def _media_view_tags_changed(self, widget, tbuffer):
text = tbuffer.get_text(tbuffer.get_start_iter(), tbuffer.get_end_iter())
self._active_recd.setTags(text)
def _toggle_fullscreen(self):
if not self._fullscreen:
self._toolbar_box.hide()
self._thumb_tray.hide()
else:
self._toolbar_box.show()
self._thumb_tray.show()
self._fullscreen = not self._fullscreen
self._media_view.set_fullscreen(self._fullscreen)
def _mode_button_clicked(self, button):
self.model.change_mode(button.mode)
def _shutter_clicked(self, arg):
self.model.do_shutter()
def set_shutter_sensitive(self, value):
self._shutter_button.set_sensitive(value)
def set_state(self, state):
radio_state = (state == constants.STATE_READY)
for item in (self._photo_button, self._audio_button, self._video_button):
if item:
item.set_sensitive(radio_state)
self._showing_info = False
if state == constants.STATE_READY:
self._set_cursor_default()
self._active_recd = None
self._title_entry.hide()
self._title_label.hide()
self._play_button.hide()
self._playback_scale.hide()
self._progress.hide()
self._controls_hbox.set_child_packing(self._shutter_button, expand=True, fill=False, padding=0, pack_type=gtk.PACK_START)
self._shutter_button.set_normal()
self._shutter_button.set_sensitive(True)
self._shutter_button.show()
self._media_view.show_live()
elif state == constants.STATE_RECORDING:
self._shutter_button.set_recording()
self._controls_hbox.set_child_packing(self._shutter_button, expand=False, fill=False, padding=0, pack_type=gtk.PACK_START)
self._progress.show()
elif state == constants.STATE_PROCESSING:
self._set_cursor_busy()
self._shutter_button.hide()
self._progress.show()
elif state == constants.STATE_DOWNLOADING:
self._shutter_button.hide()
self._progress.show()
def set_paused(self, value):
if value:
self._play_button.set_play()
else:
self._play_button.set_pause()
def _thumbnail_clicked(self, button, recd):
if self.model.ui_frozen():
return
self._active_recd = recd
self._show_recd(recd)
def add_thumbnail(self, recd, scroll_to_end):
button = RecdButton(recd)
clicked_handler = button.connect("clicked", self._thumbnail_clicked, recd)
remove_handler = button.connect("remove-requested", self._remove_recd)
clipboard_handler = button.connect("copy-clipboard-requested", self._thumbnail_copy_clipboard)
button.set_data('handler-ids', (clicked_handler, remove_handler, clipboard_handler))
self._thumb_tray.add_item(button)
button.show()
if scroll_to_end:
self._thumb_tray.scroll_to_end()
def _copy_to_clipboard(self, recd):
if recd == None:
return
if not recd.isClipboardCopyable():
return
media_path = recd.getMediaFilepath()
tmp_path = utils.getUniqueFilepath(media_path, 0)
shutil.copyfile(media_path, tmp_path)
gtk.Clipboard().set_with_data([('text/uri-list', 0, 0)], self._clipboard_get, self._clipboard_clear, tmp_path)
def _clipboard_get(self, clipboard, selection_data, info, path):
selection_data.set("text/uri-list", 8, "file://" + path)
def _clipboard_clear(self, clipboard, path):
if os.path.exists(path):
os.unlink(path)
def _thumbnail_copy_clipboard(self, recdbutton):
self._copy_to_clipboard(recdbutton.get_recd())
def _remove_recd(self, recdbutton):
recd = recdbutton.get_recd()
self.model.delete_recd(recd)
if self._active_recd == recd:
self.model.set_state(constants.STATE_READY)
self._remove_thumbnail(recdbutton)
def _remove_thumbnail(self, recdbutton):
handlers = recdbutton.get_data('handler-ids')
for handler in handlers:
recdbutton.disconnect(handler)
self._thumb_tray.remove_item(recdbutton)
recdbutton.cleanup()
def remove_all_thumbnails(self):
for child in self._thumb_tray.get_children():
self._remove_thumbnail(child)
def show_still(self, pixbuf):
self._media_view.show_still(pixbuf)
def _show_photo(self, recd):
path = self._get_photo_path(recd)
self._media_view.show_photo(path)
self._title_entry.set_text(recd.title)
self._title_entry.show()
self._title_label.show()
self._shutter_button.hide()
self._progress.hide()
def _show_audio(self, recd, play):
self._progress.hide()
self._shutter_button.hide()
self._title_entry.hide()
self._title_label.hide()
self._play_button.show()
self._playback_scale.show()
path = recd.getAudioImageFilepath()
self._media_view.show_photo(path)
if play:
self.model.play_audio(recd)
def _show_video(self, recd, play):
self._progress.hide()
self._shutter_button.hide()
self._title_entry.hide()
self._title_label.hide()
self._play_button.show()
self._playback_scale.show()
self._media_view.show_video()
if play:
self.model.play_video(recd)
def set_playback_scale(self, value):
self._playback_scale.set_value(value)
def _get_photo_path(self, recd):
# FIXME should live (partially) in recd?
#downloading = self.ca.requestMeshDownload(recd)
#self.MESHING = downloading
if True: #not downloading:
#self.progressWindow.updateProgress(0, "")
return recd.getMediaFilepath()
#maybe it is not downloaded from the mesh yet...
#but we can show the low res thumb in the interim
return recd.getThumbFilepath()
def _show_recd(self, recd, play=True):
self._showing_info = False
if recd.buddy and not recd.downloadedFromBuddy:
self.model.request_download(recd)
elif recd.type == constants.TYPE_PHOTO:
self._show_photo(recd)
elif recd.type == constants.TYPE_AUDIO:
self._show_audio(recd, play)
elif recd.type == constants.TYPE_VIDEO:
self._show_video(recd, play)
def remote_recd_available(self, recd):
if recd == self._active_recd:
self._show_recd(recd)
def update_download_progress(self, recd):
if recd != self._active_recd:
return
if not recd.meshDownloading:
msg = _('Download failed.')
elif recd.meshDownloadingProgress:
msg = _('Downloading...')
else:
msg = _('Requesting...')
self.set_progress(recd.meshDownlodingPercent, msg)
def _set_cursor_busy(self):
self.window.set_cursor(gdk.Cursor(gdk.WATCH))
def _set_cursor_default(self):
self.window.set_cursor(None)
class RecordContainer(gtk.Container):
"""
A custom Container that contains a media view area, and a controls hbox.
The controls hbox is given the first height that it requests, locked in
for the duration of the widget.
The media view is given the remainder of the space, but is constrained to
a strict 4:3 ratio, therefore deducing its width.
The controls hbox is given the same width, and both elements are centered
horizontall.y
"""
__gtype_name__ = 'RecordContainer'
def __init__(self, media_view, controls_hbox):
self._media_view = media_view
self._controls_hbox = controls_hbox
self._controls_hbox_height = 0
super(RecordContainer, self).__init__()
for widget in (self._media_view, self._controls_hbox):
if widget.flags() & gtk.REALIZED:
widget.set_parent_window(self.window)
widget.set_parent(self)
def do_realize(self):
self.set_flags(gtk.REALIZED)
self.window = gdk.Window(
self.get_parent_window(),
window_type=gdk.WINDOW_CHILD,
x=self.allocation.x,
y=self.allocation.y,
width=self.allocation.width,
height=self.allocation.height,
wclass=gdk.INPUT_OUTPUT,
colormap=self.get_colormap(),
event_mask=self.get_events() | gdk.VISIBILITY_NOTIFY_MASK | gdk.EXPOSURE_MASK)
self.window.set_user_data(self)
self.set_style(self.style.attach(self.window))
for widget in (self._media_view, self._controls_hbox):
widget.set_parent_window(self.window)
self.queue_resize()
# GTK+ contains on exit if remove is not implemented
def do_remove(self, widget):
pass
def do_size_request(self, req):
# always request 320x240 (as a minimum for video)
req.width = 320
req.height = 240
self._media_view.size_request()
w, h = self._controls_hbox.size_request()
# add on height requested by controls hbox
if self._controls_hbox_height == 0:
self._controls_hbox_height = h
req.height += self._controls_hbox_height
@staticmethod
def _constrain_4_3(width, height):
if (width % 4 == 0) and (height % 3 == 0) and ((width / 4) * 3) == height:
return width, height # nothing to do
ratio = 4.0 / 3.0
if ratio * height > width:
width = (width / 4) * 4
height = int(width / ratio)
else:
height = (height / 3) * 3
width = int(ratio * height)
return width, height
@staticmethod
def _center_in_plane(plane_size, size):
return (plane_size - size) / 2
def do_size_allocate(self, allocation):
self.allocation = allocation
# give the controls hbox the height that it requested
remaining_height = self.allocation.height - self._controls_hbox_height
# give the mediaview the rest, constrained to 4/3 and centered
media_view_width, media_view_height = self._constrain_4_3(self.allocation.width, remaining_height)
media_view_x = self._center_in_plane(self.allocation.width, media_view_width)
media_view_y = self._center_in_plane(remaining_height, media_view_height)
# send allocation to mediaview
alloc = gdk.Rectangle()
alloc.width = media_view_width
alloc.height = media_view_height
alloc.x = media_view_x
alloc.y = media_view_y
self._media_view.size_allocate(alloc)
# position hbox at the bottom of the window, with the requested height,
# and the same width as the media view
alloc = gdk.Rectangle()
alloc.x = media_view_x
alloc.y = self.allocation.height - self._controls_hbox_height
alloc.width = media_view_width
alloc.height = self._controls_hbox_height
self._controls_hbox.size_allocate(alloc)
if self.flags() & gtk.REALIZED:
self.window.move_resize(*allocation)
def do_forall(self, include_internals, callback, data):
for widget in (self._media_view, self._controls_hbox):
callback(widget, data)
class PlaybackScale(gtk.HScale):
def __init__(self, model):
self.model = model
self._change_handler = None
self._playback_adjustment = gtk.Adjustment(0.0, 0.00, 100.0, 0.1, 1.0, 1.0)
super(PlaybackScale, self).__init__(self._playback_adjustment)
self.set_draw_value(False)
self.set_update_policy(gtk.UPDATE_CONTINUOUS)
self.connect('button-press-event', self._button_press)
self.connect('button-release-event', self._button_release)
def set_value(self, value):
self._playback_adjustment.set_value(value)
def _value_changed(self, scale):
self.model.do_seek(scale.get_value())
def _button_press(self, widget, event):
self.model.start_seek()
self._change_handler = self.connect('value-changed', self._value_changed)
def _button_release(self, widget, event):
self.disconnect(self._change_handler)
self._change_handler = None
self.model.end_seek()
class ProgressInfo(gtk.VBox):
def __init__(self):
super(ProgressInfo, self).__init__()
self._progress_bar = gtk.ProgressBar()
self._progress_bar.modify_bg(gtk.STATE_NORMAL, COLOR_BLACK)
self._progress_bar.modify_bg(gtk.STATE_INSENSITIVE, COLOR_BLACK)
self.pack_start(self._progress_bar, expand=True, fill=True, padding=5)
self._label = gtk.Label()
self._label.modify_fg(gtk.STATE_NORMAL, COLOR_WHITE)
self.pack_start(self._label, expand=True, fill=True)
def show(self):
self._progress_bar.show()
self._label.show()
super(ProgressInfo, self).show()
def hide(self):
self._progress_bar.hide()
self._label.hide()
super(ProgressInfo, self).hide()
def set_progress(self, value):
self._progress_bar.set_fraction(value)
def set_text(self, text):
self._label.set_text(text)
class CountdownImage(gtk.Image):
def __init__(self):
super(CountdownImage, self).__init__()
self._countdown_images = {}
def _generate_image(self, num):
w = 55
h = w
pixmap = gdk.Pixmap(self.get_window(), w, h, -1)
ctx = pixmap.cairo_create()
ctx.rectangle(0, 0, w, h)
ctx.set_source_rgb(0, 0, 0)
ctx.fill()
x = 0
y = 4
ctx.translate(x, y)
circle_path = os.path.join(constants.GFX_PATH, 'media-circle.png')
surface = cairo.ImageSurface.create_from_png(circle_path)
ctx.set_source_surface(surface, 0, 0)
ctx.paint()
ctx.translate(-x, -y)
ctx.set_source_rgb(255, 255, 255)
pctx = pangocairo.CairoContext(ctx)
play = pctx.create_layout()
font = pango.FontDescription("sans 30")
play.set_font_description(font)
play.set_text(str(num))
dim = play.get_pixel_extents()
ctx.translate(-dim[0][0], -dim[0][1])
xoff = (w - dim[0][2]) / 2
yoff = (h - dim[0][3]) / 2
ctx.translate(xoff, yoff)
ctx.translate(-3, 0)
pctx.show_layout(play)
return pixmap
def set_value(self, num):
if num not in self._countdown_images:
self._countdown_images[num] = self._generate_image(num)
self.set_from_pixmap(self._countdown_images[num], None)
class ShutterButton(gtk.Button):
def __init__(self):
gtk.Button.__init__(self)
self.set_relief(gtk.RELIEF_NONE)
self.set_focus_on_click(False)
self.modify_bg(gtk.STATE_ACTIVE, COLOR_BLACK)
path = os.path.join(constants.GFX_PATH, 'media-record.png')
self._rec_image = gtk.image_new_from_file(path)
path = os.path.join(constants.GFX_PATH, 'media-record-red.png')
self._rec_red_image = gtk.image_new_from_file(path)
path = os.path.join(constants.GFX_PATH, 'media-insensitive.png')
self._insensitive_image = gtk.image_new_from_file(path)
self.set_normal()
def set_sensitive(self, sensitive):
if sensitive:
self.set_image(self._rec_image)
else:
self.set_image(self._insensitive_image)
super(ShutterButton, self).set_sensitive(sensitive)
def set_normal(self):
self.set_image(self._rec_image)
def set_recording(self):
self.set_image(self._rec_red_image)
class PlayButton(gtk.Button):
def __init__(self):
super(PlayButton, self).__init__()
self.set_relief(gtk.RELIEF_NONE)
self.set_focus_on_click(False)
self.modify_bg(gtk.STATE_ACTIVE, COLOR_BLACK)
path = os.path.join(constants.GFX_PATH, 'media-play.png')
self._play_image = gtk.image_new_from_file(path)
path = os.path.join(constants.GFX_PATH, 'media-pause.png')
self._pause_image = gtk.image_new_from_file(path)
self.set_play()
def set_play(self):
self.set_image(self._play_image)
def set_pause(self):
self.set_image(self._pause_image)
class RecordControl():
def __init__(self, toolbar):
self._timer_combo = TimerCombo()
toolbar.insert(self._timer_combo, -1)
self._duration_combo = DurationCombo()
toolbar.insert(self._duration_combo, -1)
preferences_toolbar = gtk.Toolbar()
combo = gtk.combo_box_new_text()
self.quality = ToolComboBox(combo=combo, label_text=_('Quality:'))
self.quality.combo.append_text(_('Low'))
if hw.get_xo_version() != 1:
# Disable High quality on XO-1. The system simply isn't beefy
# enough for recording to work well.
self.quality.combo.append_text(_('High'))
self.quality.combo.set_active(0)
self.quality.show_all()
preferences_toolbar.insert(self.quality, -1)
preferences_button = ToolbarButton()
preferences_button.set_page(preferences_toolbar)
preferences_button.props.icon_name = 'preferences-system'
preferences_button.props.label = _('Preferences')
toolbar.insert(preferences_button, -1)
def set_mode(self, mode):
if mode == constants.MODE_PHOTO:
self.quality.set_sensitive(True)
self._timer_combo.set_sensitive(True)
self._duration_combo.set_sensitive(False)
if mode == constants.MODE_VIDEO:
self.quality.set_sensitive(True)
self._timer_combo.set_sensitive(True)
self._duration_combo.set_sensitive(True)
if mode == constants.MODE_AUDIO:
self.quality.set_sensitive(False)
self._timer_combo.set_sensitive(True)
self._duration_combo.set_sensitive(True)
def get_timer(self):
return self._timer_combo.get_value()
def get_timer_idx(self):
return self._timer_combo.get_value_idx()
def set_timer_idx(self, idx):
self._timer_combo.set_value_idx(idx)
def get_duration(self):
return self._duration_combo.get_value()
def get_duration_idx(self):
return self._duration_combo.get_value_idx()
def set_duration_idx(self, idx):
return self._duration_combo.set_value_idx(idx)
def get_quality(self):
return self.quality.combo.get_active()
def set_quality(self, idx):
self.quality.combo.set_active(idx)
class TimerCombo(IconComboBox):
TIMERS = (0, 5, 10)
def __init__(self):
super(TimerCombo, self).__init__('timer')
for i in self.TIMERS:
if i == 0:
self.append_item(i, _('Immediate'))
else:
string = TimerCombo._seconds_string(i)
self.append_item(i, string)
self.combo.set_active(0)
def get_value(self):
return TimerCombo.TIMERS[self.combo.get_active()]
def get_value_idx(self):
return self.combo.get_active()
def set_value_idx(self, idx):
self.combo.set_active(idx)
@staticmethod
def _seconds_string(x):
return ngettext('%s second', '%s seconds', x) % x
class DurationCombo(IconComboBox):
DURATIONS = (2, 4, 6)
def __init__(self):
super(DurationCombo, self).__init__('duration')
for i in self.DURATIONS:
string = DurationCombo._minutes_string(i)
self.append_item(i, string)
self.combo.set_active(0)
def get_value(self):
return 60 * self.DURATIONS[self.combo.get_active()]
def get_value_idx(self):
return self.combo.get_active()
def set_value_idx(self, idx):
self.combo.set_active(idx)
@staticmethod
def _minutes_string(x):
return ngettext('%s minute', '%s minutes', x) % x
| 34.571125 | 134 | 0.661825 |
9c04fbbfbf1cc8849ff87a9df3b6e5370db6ecef | 2,818 | py | Python | models/graph_nn.py | princeton-vl/attach-juxtapose-parser | 20e0ebb1bf43fc69b6a4c46a54bb0362fc062eed | [
"BSD-2-Clause"
] | 23 | 2020-10-29T01:53:54.000Z | 2022-03-14T07:39:02.000Z | models/graph_nn.py | princeton-vl/attach-juxtapose-parser | 20e0ebb1bf43fc69b6a4c46a54bb0362fc062eed | [
"BSD-2-Clause"
] | 1 | 2021-06-11T08:29:33.000Z | 2021-06-11T12:28:59.000Z | models/graph_nn.py | princeton-vl/attach-juxtapose-parser | 20e0ebb1bf43fc69b6a4c46a54bb0362fc062eed | [
"BSD-2-Clause"
] | 5 | 2020-12-10T01:46:27.000Z | 2022-03-30T14:39:11.000Z | """
GNNs used for encoding the partial tree
"""
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch_geometric
from torch_geometric.nn import MessagePassing
from torch_geometric.utils import add_self_loops, degree
from typing import List
class SeparateGCNConv(MessagePassing): # type: ignore
"""
A variant of GCN that separates content and position information
Adapted from https://pytorch-geometric.readthedocs.io/en/latest/notes/create_gnn.html
"""
d_content: int
linear_c: nn.Module
linear_p: nn.Module
def __init__(self, d_model: int) -> None:
super().__init__(aggr="add")
self.d_content = d_model // 2
d_positional = d_model - self.d_content
self.linear_c = nn.Linear(self.d_content, self.d_content)
self.linear_p = nn.Linear(d_positional, d_positional)
def forward(self, x: torch.Tensor, edge_index: torch.Tensor) -> torch.Tensor:
edge_index, _ = add_self_loops(edge_index, num_nodes=x.size(0))
# apply linear transformations separately to content and position
x_c = self.linear_c(x[:, : self.d_content])
x_p = self.linear_p(x[:, self.d_content :])
x = torch.cat([x_c, x_p], dim=-1)
row, col = edge_index
deg = degree(row, x.size(0), dtype=x.dtype)
deg_inv_sqrt = deg.pow(-0.5)
norm = deg_inv_sqrt[row] * deg_inv_sqrt[col]
y: torch.Tensor = self.propagate(
edge_index, size=(x.size(0), x.size(0)), x=x, norm=norm
)
return y
def message(self, x_j: torch.Tensor, norm: torch.Tensor) -> torch.Tensor:
return norm.view(-1, 1) * x_j
def update(self, aggr_out: torch.Tensor) -> torch.Tensor:
return aggr_out
class GraphNeuralNetwork(nn.Module):
"""
Multiple GCN layers with layer normalization and residual connections
"""
convs: List[nn.Module]
def __init__(self, d_model: int, num_layers: int) -> None:
super().__init__() # type: ignore
self.convs = []
self.layernorms = []
for i in range(num_layers):
conv = SeparateGCNConv(d_model)
layernorm = nn.LayerNorm([d_model])
self.convs.append(conv)
self.layernorms.append(layernorm)
self.add_module("conv_%d" % i, conv)
self.add_module("layernorm_%d" % i, layernorm)
def forward(
self, graph: torch_geometric.data.Batch, nodes_of_interest: torch.Tensor,
) -> torch.Tensor:
x, edge_index = graph.x, graph.edge_index
for conv, layernorm in zip(self.convs[:-1], self.layernorms[:-1]):
x = x + F.relu(layernorm(conv(x, edge_index)))
x = x + self.layernorms[-1](self.convs[-1](x, edge_index))
return x[nodes_of_interest] # type: ignore
| 33.152941 | 89 | 0.64088 |
ecbbc14b94ecde51bc73e82c975e7dbda34c951e | 78 | py | Python | myfirst2021/__init__.py | roan2008/myfirst2021 | e3a82225a8ff625729da572cee04e2942d45fb8f | [
"MIT"
] | null | null | null | myfirst2021/__init__.py | roan2008/myfirst2021 | e3a82225a8ff625729da572cee04e2942d45fb8f | [
"MIT"
] | null | null | null | myfirst2021/__init__.py | roan2008/myfirst2021 | e3a82225a8ff625729da572cee04e2942d45fb8f | [
"MIT"
] | null | null | null | # __init__.py
from myfirst2021.studentclass import Student,Specialstudent
| 19.5 | 60 | 0.820513 |
cd5157dba1af2a45f5ea6d978cd8f38a1ce65429 | 1,455 | py | Python | stanCode_Projects/MyPhotoshop/green_screen.py | gibbs-shih/sc-projects | 12f130c881f8415623224fb20dbef5624fa8dff1 | [
"MIT"
] | null | null | null | stanCode_Projects/MyPhotoshop/green_screen.py | gibbs-shih/sc-projects | 12f130c881f8415623224fb20dbef5624fa8dff1 | [
"MIT"
] | null | null | null | stanCode_Projects/MyPhotoshop/green_screen.py | gibbs-shih/sc-projects | 12f130c881f8415623224fb20dbef5624fa8dff1 | [
"MIT"
] | null | null | null | """
File: green_screen.py
Name: Gibbs
-------------------------------
This file creates a new image that uses
MillenniumFalcon.png as background and
replace the green pixels in ReyGreenScreen.png
"""
from simpleimage import SimpleImage
def combine(background_img, figure_img):
"""
background_img is used as background and replaces the green pixels in figure_img.
:param background_img: the image used to replace the green pixels of the figure_img
:param figure_img: the image with green pixels
:return: figure_img after replaced the green pixels by background_img
"""
background_img.make_as_big_as(figure_img)
for y in range(figure_img.height):
for x in range(figure_img.width):
pixel = figure_img.get_pixel(x, y)
bigger = max(pixel.red, pixel.blue) # return the one that is bigger
if pixel.green > 2*bigger: # find green screens
background_pixel = background_img.get_pixel(x, y)
pixel.red = background_pixel.red
pixel.green = background_pixel.green
pixel.blue = background_pixel.blue
return figure_img
def main():
"""
This function will combine two images into a new image.
"""
space_ship = SimpleImage("images/MillenniumFalcon.png")
figure = SimpleImage("images/ReyGreenScreen.png")
result = combine(space_ship, figure)
result.show()
if __name__ == '__main__':
main()
| 32.333333 | 87 | 0.672852 |
e867064f7b037588e70dcf7f61dcd6f42ad3f8b4 | 2,962 | py | Python | learning/Database/accessDatabase.py | ricardodxu/AutoGrading | 1469fe63a546b00bad9fda3af7a5fca499dce789 | [
"MIT"
] | 6 | 2018-05-26T02:47:42.000Z | 2019-12-04T20:06:50.000Z | learning/Database/accessDatabase.py | ricardodxu/AutoGrading | 1469fe63a546b00bad9fda3af7a5fca499dce789 | [
"MIT"
] | 25 | 2017-09-19T20:37:55.000Z | 2019-04-26T21:50:46.000Z | learning/Database/accessDatabase.py | ricardodxu/AutoGrading | 1469fe63a546b00bad9fda3af7a5fca499dce789 | [
"MIT"
] | 9 | 2017-09-16T05:16:00.000Z | 2021-07-17T03:17:47.000Z | from __future__ import print_function
import mysql.connector
from mysql.connector import errorcode
def create_database(cursor):
try:
cursor.execute(
"CREATE DATABASE {} DEFAULT CHARACTER SET 'utf8'".format(DB_NAME))
except mysql.connector.Error as err:
print("Failed creating database: {}".format(err))
exit(1)
con = mysql.connector.connect(
user="Ruijie",
password="gengruijie123",
host="142.93.59.116",
database="mysql"
)
if __name__ == '__main__':
# qian mian dou shi chao wang shang de
cur=con.cursor()
try:
cur.execute('CREATE TABLE Density(province TEXT, population INTEGER, land_area REAL)')
except mysql.connector.Error as err:
if err.errno == errorcode.ER_TABLE_EXISTS_ERROR:
print("DATABASE already exists.")
else:
print(err.msg)
else:
print("OK")
data=[
('Newfoundland and Labrador', 512930, 370501.69),
('Prince Edward Island', 135294, 5684.39),
('Nova Scotia' ,908007 ,52917.43),
('New Brunswick' ,729498 ,71355.67),
('Quebec' ,7237479 ,1357743.08),
('Ontario', 11410046, 907655.59),
('Manitoba' ,1119583 ,551937.87),
('Saskatchewan' ,978933, 586561.35),
('Alberta',2974807 ,639987.12),
('British Columbia' ,3907738 ,926492.48),
('Yukon Territory', 28674 ,474706.97),
('Northwest Territories' ,37360 ,1141108.37),
('Nunavut' ,26745 ,1925460.18),
]
# hkjhjkhjh=====
#jlkjkljkljlkkl
# for element in data:
# a = "INSERT INTO Density(province,population,land_area) VALUES('{:s}',{:d},{:0.2f});".format(element[0],element[1],element[2])
# print(a)
# cur.execute(a)
# need to commit after insert data
# con.commit()
query = "select * from Density"
cur.execute(query)
data = cur.fetchall()
print(data)
#cursor=cnx.cursor()
#DB_NAME="Student_grade"
##try:
##cursor.execute("USE {}".format(DB_NAME))
##except mysql.connector.Error as err:
##print("Database {} does not exists.".format(DB_NAME))
##if err.errno == errorcode.ER_BAD_DB_ERROR:
##create_database(cursor)
##print("Database {} created successfully.".format(DB_NAME))
##cnx.database = DB_NAME
##else:
##print(err)
##exit(1)
##cursor.execute("DROP TABLE GRADE")
#cursor.execute("INSERT INTO GRADE VALUES('Wang Jiahang',100)")
#cursor.execute("INSERT INTO GRADE VALUES('Bill Li',100)")
#cursor.execute("INSERT INTO GRADE VALUES('Wu Tong',68)")
#cursor.execute("INSERT INTO GRADE VALUES('ssb',77)")
#cursor.execute("INSERT INTO GRADE VALUES('Su Laoshi',99)")
#cursor.execute("INSERT INTO GRADE VALUES('xhl',88)")
#cnx.commit()
#cursor.execute("SELECT name, grade FROM GRADE WHERE grade>65")
#for (name,grade) in cursor:
#print("{} gets {} in the test".format(name,grade))
#cursor.close() | 24.479339 | 136 | 0.62289 |
56f52d71c8c095599f6275d0c9e1461977e80ac0 | 276 | py | Python | test/bitcoinstore/api/v1/test_routes.py | robertshuford/bitcoin-store | 894b2d699d9d61aab340b8ca7393f641fb5c5266 | [
"MIT"
] | null | null | null | test/bitcoinstore/api/v1/test_routes.py | robertshuford/bitcoin-store | 894b2d699d9d61aab340b8ca7393f641fb5c5266 | [
"MIT"
] | null | null | null | test/bitcoinstore/api/v1/test_routes.py | robertshuford/bitcoin-store | 894b2d699d9d61aab340b8ca7393f641fb5c5266 | [
"MIT"
] | null | null | null | from flask import url_for
from lib.test import ViewTestMixin
class TestApiV1(ViewTestMixin):
def test_up_api(self):
"""Up api should respond with a success 200."""
response = self.client.get(url_for("api.up"))
assert response.status_code == 200
| 25.090909 | 55 | 0.692029 |
eb12ade5c241a0147dbf445f1cc1a78d2cedff4e | 12,530 | py | Python | ark/are/graph.py | aiplat/ARK | 0d6f232774f51dcaa40b3ee5deab4ef290625655 | [
"MIT"
] | 1 | 2019-04-15T09:41:32.000Z | 2019-04-15T09:41:32.000Z | ark/are/graph.py | aiplat/ARK | 0d6f232774f51dcaa40b3ee5deab4ef290625655 | [
"MIT"
] | null | null | null | ark/are/graph.py | aiplat/ARK | 0d6f232774f51dcaa40b3ee5deab4ef290625655 | [
"MIT"
] | null | null | null | # -*- coding: UTF-8 -*-
################################################################################
#
# Copyright (c) 2018 Baidu.com, Inc. All Rights Reserved
#
################################################################################
"""
**graph** 模块提供了基于图论的长流程控制模型,封装了两种长流程执行的标准作业流程。
当前普遍的运维操作方法,与最早起的全手工相比,已经有了很大变化,但基本模式并没有本质的
不同,其核心仍然是:查看服务状态→执行操作→检查执行结果的循环。
过去我们通过两种主要的方式来提升运维效率:
1. 部分组件自动化,代价是控制策略简单,风险较大。典型如使用报警回调方式自动故障发现和自动故障处理执行。
2. 固化完整运维操作的控制、处理流程,代价是可扩展性较差,业务变化需要大规模的调整流程。
ARK框架从过去的运维处理中,抽象核心的模式固化成框架,解决风险和扩展性不能两全的问题:
1. 按照运维操作流程和操作模式抽象,而非提供原生的技术实现抽象
2. 实际的技术抽象隐藏在用户视野之外,避免将功能组件用于非预期的场合
基于此,我们抽象出基于图算法的处理模式:通过定义一组离散可枚举的状态,及各状态下所对应的处理动作,实现面向状态的执行处理。根据状态间切换的确定性与否,又细分为两种运行模型:
1. 状态机模型(StateMachine):各状态处理完成后的后续状态是确定的,可直接进行后续状态的执行。
2. 流程图模型(DepedencyFlow):状态处理完成后的后续状态是不确定的,此时需要对各状态进行检查,确认是否进入该状态的处理。
使用标准作业流程的好处:
1. 面向状态运维,状态处理具有可复用性,并且流程动态生成,可以应对复杂业务变化,具有强大的扩展能力。
2. 长流程分步执行,可在运行关键点处进行checkpoint,更好地解决了单实例故障时的可用性问题。
"""
from are import exception
from are import log
class BaseGraph(object):
"""
图基类,基于图论的长流程控制基类,封装了长流程控制操作的基本接口
一个简单的demo状态机(流程图)::
#定义一组状态
class AddTask(Node):
def process(self, session, current_node, nodes_process):
print "add task success"
return "check_task"
class CheckTask(Node):
def process(self, session, current_node, nodes_process):
print "check task success"
return "ARK_NODE_END"
# 生成节点对象
node1 = AddTask(name="add_task", reentrance=False)
node2 = CheckTask(name="check_task", reentrance=True)
# 生成状态机对象,方便运行数据存储,这里session用{}
state_machine = StateMachine({})
state_machine.add_node(node1)
state_machine.add_node(node2)
state_machine.start()
# 控制状态机暂停
state_machine.pause()
# 控制状态机恢复运行
state_machine.resume()
# 控制状态机结束运行
state_machine.cancel()
"""
class Status(object):
"""
长流程控制状态,数值为有限值集合,用于整体流程生命周期控制。
"""
CREATED = 0
INITED = 1
RUNNING = 2
PAUSED = 3
CANCELLED = 4
FINISHED = 4
FAILED = 4
_ARK_NODE_END = "ARK_NODE_END"
def __init__(self, session):
"""
初始化方法
:param object session: 状态机会话对象,通常保存运行实例的运行信息
"""
self._session = session
self._nodes = []
self._nodes_process = {}
self._current_node = None
self._status = self.Status.CREATED
@property
def session(self):
"""
session属性
:return:
"""
return self._session
@property
def status(self):
"""
运行状态
:return:
"""
return self._status
@status.setter
def status(self, status):
"""
修改运行状态
:param int status: 运行状态
:return: 无返回
:rtype: None
"""
self._status = status
def add_node(self, node):
"""
增加节点
:param Node node: 节点
:return: None
:raises ETypeMismatch: 节点类型错误
:raises EInvalidOperation: 操作不合法
"""
if not isinstance(node, Node):
raise exception.ETypeMismatch("param node type must be Node")
elif node in self._nodes:
raise exception.EInvalidOperation(
"node {} already added".format(node.name))
else:
self._nodes.append(node)
self._nodes_process[node.name] = False
def get_node(self, node_name):
"""
获得节点
:param str node_name: 节点名字
:return: 节点对象引用
:rtype: Node引用
:raise EUnknownNode: 未知节点
"""
ret = None
for node in self._nodes:
if node.name == node_name:
ret = node
break
else:
continue
if not ret:
raise exception.EUnknownNode("node:{} unknown".format(node_name))
else:
return ret
def prepare(self):
"""
状态机创建之后初次检查, 并设置当前状态节点
:return: None
:raises EUnInited: 未初始化
:raises EStatusMismatch: 状态不匹配
"""
if self._status != self.Status.CREATED \
and self._status != self.Status.INITED:
raise exception.EStatusMismatch(
"Only in created or inited status can call "
"this method.current status:{}".format(self._status))
if not self._nodes:
raise exception.EUnInited("nodes list length is 0")
self.status = self.Status.INITED
self._current_node = self._nodes[0].name
def pause(self):
"""
暂停状态机
:return: None
:raises EStatusMismatch: 状态不匹配
"""
if self._status == self.Status.RUNNING:
self._status = self.Status.PAUSED
else:
raise exception.EStatusMismatch(
"Only in running status can call this method. "
"current status:{}".format(self._status))
def resume(self):
"""
恢复状态机
:return: None
:raises EStatusMismatch: 状态不匹配
"""
if self._status == self.Status.PAUSED:
self._status = self.Status.RUNNING
else:
raise exception.EStatusMismatch(
"Only in paused status can call this method."
"current status:{}".format(self._status))
def cancel(self):
"""
取消状态机
:return: None
"""
self._status = self.Status.CANCELLED
def run_next(self):
"""
接口方法, 进行一个状态的轮转,子类需要根据具体的轮转逻辑,实现该方法
:return: None
:raises ENotImplement: 该接口未实现
"""
raise exception.ENotImplement()
def start(self):
"""
启动状态机
:return: None
:raises EStatusMismatch: 状态不匹配
:raises Exception: 通用异常
"""
self.prepare()
if self._status == self.Status.INITED \
or self._status == self.Status.PAUSED:
self._status = self.Status.RUNNING
else:
raise exception.EStatusMismatch(
"Only in inited or paused status can call "
"this method.current status:{}".format(self._status))
while self._status == self.Status.RUNNING:
try:
self.run_next()
except Exception as e:
self._status = self.Status.FAILED
raise e
def load(self, session, node_process, current_node, status):
"""
重新加载实例,适用于实例迁移后运行状态的回滚操作
:param object session: 运行session信息
:param dict node_process: 节点运行状态
:param str current_node: 当前运行节点
:param str status: 当前运行状态
:return: 无返回
:rtype: None
"""
self._session = session
self._nodes_process = node_process
self._current_node = current_node
self._status = status
def dump(self):
"""
生成运行时动态信息
:return: 运行信息
:rtype: dict
"""
attribute = {
"status": self._status,
"current_node": self._current_node,
"nodes_process": self._nodes_process,
"session": self._session}
return attribute
class StateMachine(BaseGraph):
"""
状态机运行模式
"""
def run_next(self):
"""
进行一次状态轮转
.. Note:: 状态机模型中,每个状态处理完成后需要返回一个确定的状态,可直接进行处理;若返回的状态不存在,直接抛出异常
:return: 无返回
:rtype: None
:raise ECheckFailed: 检查失败
:raise EUnknownNode: 未知节点
"""
state = self.get_node(self._current_node)
if not state.reentrance and self._nodes_process[state.name]:
raise exception.ECheckFailed(
"node:{} is finished and not reentrance".format(state.name))
ret = state.check(self._session, self._current_node,
self._nodes_process)
log.info("node {} check ret:{}".format(self._current_node, ret))
if ret:
self._nodes_process[state.name] = True
current_state = state.process(self._session, self._current_node,
self._nodes_process)
log.info("node process finished, next node:{}".format(
current_state))
if current_state == self._ARK_NODE_END:
self._current_node = current_state
self._status = self.Status.FINISHED
return
elif current_state not in self._nodes_process:
raise exception.EUnknownNode(
"return state[{}] unkown".format(current_state))
else:
self._current_node = current_state
else:
raise exception.ECheckFailed(
"node:{} check failed".format(state.name))
class DependencyFlow(BaseGraph):
"""
工作流运行模式
"""
def run_next(self):
"""
进行一次状态轮转
.. Note:: 工作流模型中,每个状态处理完成后,下一次需要轮转的状态是不确定的(或者只提供下一个建议执行的状态),因此使用工作流模型,需要自己定义各个状态的 ``check``方法;
状态处理完成后启动对各状态的检查,检查通过的状态,进入处理阶段。
.. Note:: 在某个状态完成后,会从其返回的建议的下一个运行状态开始遍历(如未返回建议状态,则从状态列表中此状态的下一个开始),以提高命中效率
:return: 无返回
:rtype: None
"""
node = self.get_node(self._current_node)
index = self._nodes.index(node)
index_list = range(index, len(self._nodes))
index_list.extend(range(0, index))
for i in index_list:
node = self._nodes[i]
if not node.reentrance and self._nodes_process[node.name]:
continue
else:
ret = node.check(self._session, self._current_node,
self._nodes_process)
log.info("node {} check ret:{}".format(self._current_node, ret))
if ret:
self._nodes_process[node.name] = True
current_node = node.process(
self._session, self._current_node, self._nodes_process)
log.info("node process finished, suggest next "
"node:{}".format(current_node))
if current_node == self._ARK_NODE_END:
self._status = self.Status.FINISHED
elif current_node not in self._nodes_process:
self._current_node = self._nodes[
(i + 1) % len(self._nodes)].name
else:
self._current_node = current_node
return
else:
continue
class Node(object):
"""
状态机节点基类
"""
def __init__(self, name, reentrance=False):
"""
初始化方法
:param str name: 节点名字
:param bool reentrance: 节点是否可重入,此属性标识一个节点的执行是否是幂等(可重复)的,默认为False
"""
self.__name = name
self.__reentrance = reentrance
@property
def name(self):
"""
获取节点方法
:return: 节点名字
:rtype: str
"""
return self.__name
@property
def reentrance(self):
"""
获取节点是否可重入属性,此属性标识一个节点的执行是否是幂等(可重复)的。状态机轮转时(
或机器人实例迁移后运行状态恢复时)根据此标识,判断是否重新进入该状态的执行。
.. Note:: 通常情况下,我们认为读操作是可重入的,写操作是不可重入的。如发起任务的操作不可重入,
查询任务状态的操作可重入。在进行状态划分时,"是否可重入"是划分状态的一个重要维度
:return: True或False
:rtype: bool
"""
return self.__reentrance
def check(self, session, current_node, nodes_process):
"""
节点检查接口
:param object session: 状态机运行信息
:param str current_node: 当前节点
:param dict nodes_process: 节点运行情况
:return: 是否检查通过
:rtype: bool
:raises ENotImplement: 接口未实现
"""
raise exception.ENotImplement()
def process(self, session, current_node, nodes_process):
"""
节点处理接口
:param object session: 状态机运行信息
:param str current_node: 当前节点
:param dict nodes_process: 节点运行情况
:return: 返回下一个节点名
:raises ENotImplement: 接口未实现
"""
raise exception.ENotImplement()
class State(Node):
"""
状态机节点实现类
"""
def check(self, session, current_node, nodes_process):
"""
节点检查接口,状态机运行每次返回一个确定的后续状态,因此状态机节点的check结果默认为True
:param object session: 运行session信息
:param str current_node: 当前节点名
:param dict nodes_process: 所有节点的运行状态
:return: 是否检查通过
:rtype: bool
"""
return self.name == current_node
| 27.599119 | 102 | 0.553312 |
07e62d47a5940ee4428f668904364bce26f29c7a | 867 | py | Python | app/app.py | cipheraxat/Deploy-Models-with-TensorFlow-Serving-and-Flask | 8786ec8a167f1f36a77af2c4375c2635316549ed | [
"MIT"
] | 1 | 2020-06-15T05:40:17.000Z | 2020-06-15T05:40:17.000Z | app/app.py | cipheraxat/Deploy-Models-with-TensorFlow-Serving-and-Flask | 8786ec8a167f1f36a77af2c4375c2635316549ed | [
"MIT"
] | null | null | null | app/app.py | cipheraxat/Deploy-Models-with-TensorFlow-Serving-and-Flask | 8786ec8a167f1f36a77af2c4375c2635316549ed | [
"MIT"
] | 1 | 2021-05-16T18:04:56.000Z | 2021-05-16T18:04:56.000Z | from flask import Flask, render_template, url_for, request, redirect
from flask_bootstrap import Bootstrap
import os
import inference
app = Flask(__name__)
Bootstrap(app)
"""
Routes
"""
@app.route('/', methods=['GET', 'POST'])
def index():
if request.method == 'POST':
uploaded_file = request.files['file']
if uploaded_file.filename != '':
image_path = os.path.join('static', uploaded_file.filename)
uploaded_file.save(image_path)
class_name = inference.get_prediction(image_path)
print('CLASS NAME=' , class_name)
result = {
'class_name' : class_name,
'image_path' : image_path,
}
return render_template('show.html', result=result)
return render_template('index.html')
if __name__ == '__main__':
app.run(debug=True)
| 27.09375 | 71 | 0.622837 |
67f471c3ef21146afbbccd4ad32fe21b5d1d7f6f | 1,025 | py | Python | auth_backend/resources/interfaces.py | ZhuoZhuoCrayon/bk-sops | d1475d53c19729915727ce7adc24e3226f15e332 | [
"Apache-2.0"
] | 1 | 2020-08-16T09:21:58.000Z | 2020-08-16T09:21:58.000Z | auth_backend/resources/interfaces.py | ZhuoZhuoCrayon/bk-sops | d1475d53c19729915727ce7adc24e3226f15e332 | [
"Apache-2.0"
] | null | null | null | auth_backend/resources/interfaces.py | ZhuoZhuoCrayon/bk-sops | d1475d53c19729915727ce7adc24e3226f15e332 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2020 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
from __future__ import absolute_import, unicode_literals
from abc import ABCMeta
import six
class InstanceIterableResource(six.with_metaclass(ABCMeta, object)):
def count(self):
raise NotImplementedError()
def slice(self, start, end):
raise NotImplementedError()
| 37.962963 | 115 | 0.776585 |
e0cad48c382ff53262551491e095f15ea01e9884 | 22,724 | py | Python | Berkeley-Pacman-Project-3/utils.py | nassosanagn/Berkeley-Pacman-Projects | 392bff2d31ff4ca1785f46f9bc0d9933ac317ee7 | [
"MIT"
] | 1 | 2021-09-08T17:50:28.000Z | 2021-09-08T17:50:28.000Z | Berkeley-Pacman-Project-3/utils.py | nassosanagn/Berkeley-Pacman-Projects | 392bff2d31ff4ca1785f46f9bc0d9933ac317ee7 | [
"MIT"
] | null | null | null | Berkeley-Pacman-Project-3/utils.py | nassosanagn/Berkeley-Pacman-Projects | 392bff2d31ff4ca1785f46f9bc0d9933ac317ee7 | [
"MIT"
] | null | null | null | """Provides some utilities widely used by other modules"""
import bisect
import collections
import collections.abc
import functools
import heapq
import operator
import os.path
import random
from itertools import chain, combinations
from statistics import mean
#import numpy as np
# ______________________________________________________________________________
# Functions on Sequences and Iterables
def sequence(iterable):
"""Converts iterable to sequence, if it is not already one."""
return iterable if isinstance(iterable, collections.abc.Sequence) else tuple([iterable])
def remove_all(item, seq):
"""Return a copy of seq (or string) with all occurrences of item removed."""
if isinstance(seq, str):
return seq.replace(item, '')
elif isinstance(seq, set):
rest = seq.copy()
rest.remove(item)
return rest
else:
return [x for x in seq if x != item]
def unique(seq):
"""Remove duplicate elements from seq. Assumes hashable elements."""
return list(set(seq))
def count(seq):
"""Count the number of items in sequence that are interpreted as true."""
return sum(map(bool, seq))
def multimap(items):
"""Given (key, val) pairs, return {key: [val, ....], ...}."""
result = collections.defaultdict(list)
for (key, val) in items:
result[key].append(val)
return dict(result)
def multimap_items(mmap):
"""Yield all (key, val) pairs stored in the multimap."""
for (key, vals) in mmap.items():
for val in vals:
yield key, val
def product(numbers):
"""Return the product of the numbers, e.g. product([2, 3, 10]) == 60"""
result = 1
for x in numbers:
result *= x
return result
def first(iterable, default=None):
"""Return the first element of an iterable; or default."""
return next(iter(iterable), default)
def is_in(elt, seq):
"""Similar to (elt in seq), but compares with 'is', not '=='."""
return any(x is elt for x in seq)
def mode(data):
"""Return the most common data item. If there are ties, return any one of them."""
[(item, count)] = collections.Counter(data).most_common(1)
return item
def power_set(iterable):
"""power_set([1,2,3]) --> (1,) (2,) (3,) (1,2) (1,3) (2,3) (1,2,3)"""
s = list(iterable)
return list(chain.from_iterable(combinations(s, r) for r in range(len(s) + 1)))[1:]
def extend(s, var, val):
"""Copy dict s and extend it by setting var to val; return copy."""
return {**s, var: val}
def flatten(seqs):
return sum(seqs, [])
# ______________________________________________________________________________
# argmin and argmax
identity = lambda x: x
def argmin_random_tie(seq, key=identity):
"""Return a minimum element of seq; break ties at random."""
return min(shuffled(seq), key=key)
def argmax_random_tie(seq, key=identity):
"""Return an element with highest fn(seq[i]) score; break ties at random."""
return max(shuffled(seq), key=key)
def shuffled(iterable):
"""Randomly shuffle a copy of iterable."""
items = list(iterable)
random.shuffle(items)
return items
# ______________________________________________________________________________
# Statistical and mathematical functions
def histogram(values, mode=0, bin_function=None):
"""Return a list of (value, count) pairs, summarizing the input values.
Sorted by increasing value, or if mode=1, by decreasing count.
If bin_function is given, map it over values first."""
if bin_function:
values = map(bin_function, values)
bins = {}
for val in values:
bins[val] = bins.get(val, 0) + 1
if mode:
return sorted(list(bins.items()), key=lambda x: (x[1], x[0]), reverse=True)
else:
return sorted(bins.items())
def dot_product(x, y):
"""Return the sum of the element-wise product of vectors x and y."""
return sum(_x * _y for _x, _y in zip(x, y))
def element_wise_product(x, y):
"""Return vector as an element-wise product of vectors x and y."""
assert len(x) == len(y)
return np.multiply(x, y)
def matrix_multiplication(x, *y):
"""Return a matrix as a matrix-multiplication of x and arbitrary number of matrices *y."""
result = x
for _y in y:
result = np.matmul(result, _y)
return result
def vector_add(a, b):
"""Component-wise addition of two vectors."""
return tuple(map(operator.add, a, b))
def scalar_vector_product(x, y):
"""Return vector as a product of a scalar and a vector"""
return np.multiply(x, y)
def probability(p):
"""Return true with probability p."""
return p > random.uniform(0.0, 1.0)
def weighted_sample_with_replacement(n, seq, weights):
"""Pick n samples from seq at random, with replacement, with the
probability of each element in proportion to its corresponding
weight."""
sample = weighted_sampler(seq, weights)
return [sample() for _ in range(n)]
def weighted_sampler(seq, weights):
"""Return a random-sample function that picks from seq weighted by weights."""
totals = []
for w in weights:
totals.append(w + totals[-1] if totals else w)
return lambda: seq[bisect.bisect(totals, random.uniform(0, totals[-1]))]
def weighted_choice(choices):
"""A weighted version of random.choice"""
# NOTE: should be replaced by random.choices if we port to Python 3.6
total = sum(w for _, w in choices)
r = random.uniform(0, total)
upto = 0
for c, w in choices:
if upto + w >= r:
return c, w
upto += w
def rounder(numbers, d=4):
"""Round a single number, or sequence of numbers, to d decimal places."""
if isinstance(numbers, (int, float)):
return round(numbers, d)
else:
constructor = type(numbers) # Can be list, set, tuple, etc.
return constructor(rounder(n, d) for n in numbers)
def num_or_str(x): # TODO: rename as `atom`
"""The argument is a string; convert to a number if possible, or strip it."""
try:
return int(x)
except ValueError:
try:
return float(x)
except ValueError:
return str(x).strip()
def euclidean_distance(x, y):
return np.sqrt(sum((_x - _y) ** 2 for _x, _y in zip(x, y)))
def manhattan_distance(x, y):
return sum(abs(_x - _y) for _x, _y in zip(x, y))
def hamming_distance(x, y):
return sum(_x != _y for _x, _y in zip(x, y))
def cross_entropy_loss(x, y):
return (-1.0 / len(x)) * sum(_x * np.log(_y) + (1 - _x) * np.log(1 - _y) for _x, _y in zip(x, y))
def mean_squared_error_loss(x, y):
return (1.0 / len(x)) * sum((_x - _y) ** 2 for _x, _y in zip(x, y))
def rms_error(x, y):
return np.sqrt(ms_error(x, y))
def ms_error(x, y):
return mean((_x - _y) ** 2 for _x, _y in zip(x, y))
def mean_error(x, y):
return mean(abs(_x - _y) for _x, _y in zip(x, y))
def mean_boolean_error(x, y):
return mean(_x != _y for _x, _y in zip(x, y))
def normalize(dist):
"""Multiply each number by a constant such that the sum is 1.0"""
if isinstance(dist, dict):
total = sum(dist.values())
for key in dist:
dist[key] = dist[key] / total
assert 0 <= dist[key] <= 1 # probabilities must be between 0 and 1
return dist
total = sum(dist)
return [(n / total) for n in dist]
def random_weights(min_value, max_value, num_weights):
return [random.uniform(min_value, max_value) for _ in range(num_weights)]
def sigmoid(x):
"""Return activation value of x with sigmoid function."""
return 1 / (1 + np.exp(-x))
def sigmoid_derivative(value):
return value * (1 - value)
def elu(x, alpha=0.01):
return x if x > 0 else alpha * (np.exp(x) - 1)
def elu_derivative(value, alpha=0.01):
return 1 if value > 0 else alpha * np.exp(value)
def tanh(x):
return np.tanh(x)
def tanh_derivative(value):
return 1 - (value ** 2)
def leaky_relu(x, alpha=0.01):
return x if x > 0 else alpha * x
def leaky_relu_derivative(value, alpha=0.01):
return 1 if value > 0 else alpha
def relu(x):
return max(0, x)
def relu_derivative(value):
return 1 if value > 0 else 0
def step(x):
"""Return activation value of x with sign function"""
return 1 if x >= 0 else 0
def gaussian(mean, st_dev, x):
"""Given the mean and standard deviation of a distribution, it returns the probability of x."""
return 1 / (np.sqrt(2 * np.pi) * st_dev) * np.e ** (-0.5 * (float(x - mean) / st_dev) ** 2)
def linear_kernel(x, y=None):
if y is None:
y = x
return np.dot(x, y.T)
def polynomial_kernel(x, y=None, degree=2.0):
if y is None:
y = x
return (1.0 + np.dot(x, y.T)) ** degree
def rbf_kernel(x, y=None, gamma=None):
"""Radial-basis function kernel (aka squared-exponential kernel)."""
if y is None:
y = x
if gamma is None:
gamma = 1.0 / x.shape[1] # 1.0 / n_features
return np.exp(-gamma * (-2.0 * np.dot(x, y.T) +
np.sum(x * x, axis=1).reshape((-1, 1)) + np.sum(y * y, axis=1).reshape((1, -1))))
# ______________________________________________________________________________
# Grid Functions
orientations = EAST, NORTH, WEST, SOUTH = [(1, 0), (0, 1), (-1, 0), (0, -1)]
turns = LEFT, RIGHT = (+1, -1)
def turn_heading(heading, inc, headings=orientations):
return headings[(headings.index(heading) + inc) % len(headings)]
def turn_right(heading):
return turn_heading(heading, RIGHT)
def turn_left(heading):
return turn_heading(heading, LEFT)
def distance(a, b):
"""The distance between two (x, y) points."""
xA, yA = a
xB, yB = b
return np.hypot((xA - xB), (yA - yB))
def distance_squared(a, b):
"""The square of the distance between two (x, y) points."""
xA, yA = a
xB, yB = b
return (xA - xB) ** 2 + (yA - yB) ** 2
# ______________________________________________________________________________
# Misc Functions
class injection:
"""Dependency injection of temporary values for global functions/classes/etc.
E.g., `with injection(DataBase=MockDataBase): ...`"""
def __init__(self, **kwds):
self.new = kwds
def __enter__(self):
self.old = {v: globals()[v] for v in self.new}
globals().update(self.new)
def __exit__(self, type, value, traceback):
globals().update(self.old)
def memoize(fn, slot=None, maxsize=32):
"""Memoize fn: make it remember the computed value for any argument list.
If slot is specified, store result in that slot of first argument.
If slot is false, use lru_cache for caching the values."""
if slot:
def memoized_fn(obj, *args):
if hasattr(obj, slot):
return getattr(obj, slot)
else:
val = fn(obj, *args)
setattr(obj, slot, val)
return val
else:
@functools.lru_cache(maxsize=maxsize)
def memoized_fn(*args):
return fn(*args)
return memoized_fn
def name(obj):
"""Try to find some reasonable name for the object."""
return (getattr(obj, 'name', 0) or getattr(obj, '__name__', 0) or
getattr(getattr(obj, '__class__', 0), '__name__', 0) or
str(obj))
def isnumber(x):
"""Is x a number?"""
return hasattr(x, '__int__')
def issequence(x):
"""Is x a sequence?"""
return isinstance(x, collections.abc.Sequence)
def print_table(table, header=None, sep=' ', numfmt='{}'):
"""Print a list of lists as a table, so that columns line up nicely.
header, if specified, will be printed as the first row.
numfmt is the format for all numbers; you might want e.g. '{:.2f}'.
(If you want different formats in different columns,
don't use print_table.) sep is the separator between columns."""
justs = ['rjust' if isnumber(x) else 'ljust' for x in table[0]]
if header:
table.insert(0, header)
table = [[numfmt.format(x) if isnumber(x) else x for x in row]
for row in table]
sizes = list(map(lambda seq: max(map(len, seq)), list(zip(*[map(str, row) for row in table]))))
for row in table:
print(sep.join(getattr(str(x), j)(size) for (j, size, x) in zip(justs, sizes, row)))
def open_data(name, mode='r'):
aima_root = os.path.dirname(__file__)
aima_file = os.path.join(aima_root, *['aima-data', name])
return open(aima_file, mode=mode)
def failure_test(algorithm, tests):
"""Grades the given algorithm based on how many tests it passes.
Most algorithms have arbitrary output on correct execution, which is difficult
to check for correctness. On the other hand, a lot of algorithms output something
particular on fail (for example, False, or None).
tests is a list with each element in the form: (values, failure_output)."""
return mean(int(algorithm(x) != y) for x, y in tests)
# ______________________________________________________________________________
# Expressions
# See https://docs.python.org/3/reference/expressions.html#operator-precedence
# See https://docs.python.org/3/reference/datamodel.html#special-method-names
class Expr:
"""A mathematical expression with an operator and 0 or more arguments.
op is a str like '+' or 'sin'; args are Expressions.
Expr('x') or Symbol('x') creates a symbol (a nullary Expr).
Expr('-', x) creates a unary; Expr('+', x, 1) creates a binary."""
def __init__(self, op, *args):
self.op = str(op)
self.args = args
# Operator overloads
def __neg__(self):
return Expr('-', self)
def __pos__(self):
return Expr('+', self)
def __invert__(self):
return Expr('~', self)
def __add__(self, rhs):
return Expr('+', self, rhs)
def __sub__(self, rhs):
return Expr('-', self, rhs)
def __mul__(self, rhs):
return Expr('*', self, rhs)
def __pow__(self, rhs):
return Expr('**', self, rhs)
def __mod__(self, rhs):
return Expr('%', self, rhs)
def __and__(self, rhs):
return Expr('&', self, rhs)
def __xor__(self, rhs):
return Expr('^', self, rhs)
def __rshift__(self, rhs):
return Expr('>>', self, rhs)
def __lshift__(self, rhs):
return Expr('<<', self, rhs)
def __truediv__(self, rhs):
return Expr('/', self, rhs)
def __floordiv__(self, rhs):
return Expr('//', self, rhs)
def __matmul__(self, rhs):
return Expr('@', self, rhs)
def __or__(self, rhs):
"""Allow both P | Q, and P |'==>'| Q."""
if isinstance(rhs, Expression):
return Expr('|', self, rhs)
else:
return PartialExpr(rhs, self)
# Reverse operator overloads
def __radd__(self, lhs):
return Expr('+', lhs, self)
def __rsub__(self, lhs):
return Expr('-', lhs, self)
def __rmul__(self, lhs):
return Expr('*', lhs, self)
def __rdiv__(self, lhs):
return Expr('/', lhs, self)
def __rpow__(self, lhs):
return Expr('**', lhs, self)
def __rmod__(self, lhs):
return Expr('%', lhs, self)
def __rand__(self, lhs):
return Expr('&', lhs, self)
def __rxor__(self, lhs):
return Expr('^', lhs, self)
def __ror__(self, lhs):
return Expr('|', lhs, self)
def __rrshift__(self, lhs):
return Expr('>>', lhs, self)
def __rlshift__(self, lhs):
return Expr('<<', lhs, self)
def __rtruediv__(self, lhs):
return Expr('/', lhs, self)
def __rfloordiv__(self, lhs):
return Expr('//', lhs, self)
def __rmatmul__(self, lhs):
return Expr('@', lhs, self)
def __call__(self, *args):
"""Call: if 'f' is a Symbol, then f(0) == Expr('f', 0)."""
if self.args:
raise ValueError('Can only do a call for a Symbol, not an Expr')
else:
return Expr(self.op, *args)
# Equality and repr
def __eq__(self, other):
"""x == y' evaluates to True or False; does not build an Expr."""
return isinstance(other, Expr) and self.op == other.op and self.args == other.args
def __lt__(self, other):
return isinstance(other, Expr) and str(self) < str(other)
def __hash__(self):
return hash(self.op) ^ hash(self.args)
def __repr__(self):
op = self.op
args = [str(arg) for arg in self.args]
if op.isidentifier(): # f(x) or f(x, y)
return '{}({})'.format(op, ', '.join(args)) if args else op
elif len(args) == 1: # -x or -(x + 1)
return op + args[0]
else: # (x - y)
opp = (' ' + op + ' ')
return '(' + opp.join(args) + ')'
# An 'Expression' is either an Expr or a Number.
# Symbol is not an explicit type; it is any Expr with 0 args.
Number = (int, float, complex)
Expression = (Expr, Number)
def Symbol(name):
"""A Symbol is just an Expr with no args."""
return Expr(name)
def symbols(names):
"""Return a tuple of Symbols; names is a comma/whitespace delimited str."""
return tuple(Symbol(name) for name in names.replace(',', ' ').split())
def subexpressions(x):
"""Yield the subexpressions of an Expression (including x itself)."""
yield x
if isinstance(x, Expr):
for arg in x.args:
yield from subexpressions(arg)
def arity(expression):
"""The number of sub-expressions in this expression."""
if isinstance(expression, Expr):
return len(expression.args)
else: # expression is a number
return 0
# For operators that are not defined in Python, we allow new InfixOps:
class PartialExpr:
"""Given 'P |'==>'| Q, first form PartialExpr('==>', P), then combine with Q."""
def __init__(self, op, lhs):
self.op, self.lhs = op, lhs
def __or__(self, rhs):
return Expr(self.op, self.lhs, rhs)
def __repr__(self):
return "PartialExpr('{}', {})".format(self.op, self.lhs)
def expr(x):
"""Shortcut to create an Expression. x is a str in which:
- identifiers are automatically defined as Symbols.
- ==> is treated as an infix |'==>'|, as are <== and <=>.
If x is already an Expression, it is returned unchanged. Example:
>>> expr('P & Q ==> Q')
((P & Q) ==> Q)
"""
return eval(expr_handle_infix_ops(x), defaultkeydict(Symbol)) if isinstance(x, str) else x
infix_ops = '==> <== <=>'.split()
def expr_handle_infix_ops(x):
"""Given a str, return a new str with ==> replaced by |'==>'|, etc.
>>> expr_handle_infix_ops('P ==> Q')
"P |'==>'| Q"
"""
for op in infix_ops:
x = x.replace(op, '|' + repr(op) + '|')
return x
class defaultkeydict(collections.defaultdict):
"""Like defaultdict, but the default_factory is a function of the key.
>>> d = defaultkeydict(len); d['four']
4
"""
def __missing__(self, key):
self[key] = result = self.default_factory(key)
return result
class hashabledict(dict):
"""Allows hashing by representing a dictionary as tuple of key:value pairs.
May cause problems as the hash value may change during runtime."""
def __hash__(self):
return 1
# ______________________________________________________________________________
# Queues: Stack, FIFOQueue, PriorityQueue
# Stack and FIFOQueue are implemented as list and collection.deque
# PriorityQueue is implemented here
class PriorityQueue:
"""A Queue in which the minimum (or maximum) element (as determined by f and
order) is returned first.
If order is 'min', the item with minimum f(x) is
returned first; if order is 'max', then it is the item with maximum f(x).
Also supports dict-like lookup."""
def __init__(self, order='min', f=lambda x: x):
self.heap = []
if order == 'min':
self.f = f
elif order == 'max': # now item with max f(x)
self.f = lambda x: -f(x) # will be popped first
else:
raise ValueError("Order must be either 'min' or 'max'.")
def append(self, item):
"""Insert item at its correct position."""
heapq.heappush(self.heap, (self.f(item), item))
def extend(self, items):
"""Insert each item in items at its correct position."""
for item in items:
self.append(item)
def pop(self):
"""Pop and return the item (with min or max f(x) value)
depending on the order."""
if self.heap:
return heapq.heappop(self.heap)[1]
else:
raise Exception('Trying to pop from empty PriorityQueue.')
def __len__(self):
"""Return current capacity of PriorityQueue."""
return len(self.heap)
def __contains__(self, key):
"""Return True if the key is in PriorityQueue."""
return any([item == key for _, item in self.heap])
def __getitem__(self, key):
"""Returns the first value associated with key in PriorityQueue.
Raises KeyError if key is not present."""
for value, item in self.heap:
if item == key:
return value
raise KeyError(str(key) + " is not in the priority queue")
def __delitem__(self, key):
"""Delete the first occurrence of key."""
try:
del self.heap[[item == key for _, item in self.heap].index(True)]
except ValueError:
raise KeyError(str(key) + " is not in the priority queue")
heapq.heapify(self.heap)
# ______________________________________________________________________________
# Useful Shorthands
class Bool(int):
"""Just like `bool`, except values display as 'T' and 'F' instead of 'True' and 'False'."""
__str__ = __repr__ = lambda self: 'T' if self else 'F'
T = Bool(True)
F = Bool(False) | 28.801014 | 110 | 0.598222 |
2c90676f6e33b2b16a53c96163796fceb506f952 | 2,142 | py | Python | pytorch_lightning/accelerators/gpu.py | Flash-321/pytorch-lightning | cdb6f979a062a639a6d709a0e1915a07d5ed50f6 | [
"Apache-2.0"
] | 2 | 2021-06-25T08:42:32.000Z | 2021-06-25T08:49:33.000Z | pytorch_lightning/accelerators/gpu.py | Flash-321/pytorch-lightning | cdb6f979a062a639a6d709a0e1915a07d5ed50f6 | [
"Apache-2.0"
] | null | null | null | pytorch_lightning/accelerators/gpu.py | Flash-321/pytorch-lightning | cdb6f979a062a639a6d709a0e1915a07d5ed50f6 | [
"Apache-2.0"
] | null | null | null | # Copyright The PyTorch Lightning team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import os
import torch
import pytorch_lightning as pl
from pytorch_lightning.accelerators.accelerator import Accelerator
from pytorch_lightning.utilities.exceptions import MisconfigurationException
_log = logging.getLogger(__name__)
class GPUAccelerator(Accelerator):
"""Accelerator for GPU devices."""
def setup_environment(self) -> None:
super().setup_environment()
if "cuda" not in str(self.root_device):
raise MisconfigurationException(f"Device should be GPU, got {self.root_device} instead")
torch.cuda.set_device(self.root_device)
def setup(self, trainer: "pl.Trainer", model: "pl.LightningModule") -> None:
"""
Raises:
MisconfigurationException:
If the selected device is not GPU.
"""
self.set_nvidia_flags(trainer.local_rank)
return super().setup(trainer, model)
def on_train_start(self) -> None:
# clear cache before training
torch.cuda.empty_cache()
@staticmethod
def set_nvidia_flags(local_rank: int) -> None:
# set the correct cuda visible devices (using pci order)
os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID"
all_gpu_ids = ",".join([str(x) for x in range(torch.cuda.device_count())])
devices = os.getenv("CUDA_VISIBLE_DEVICES", all_gpu_ids)
_log.info(f"LOCAL_RANK: {local_rank} - CUDA_VISIBLE_DEVICES: [{devices}]")
def teardown(self) -> None:
super().teardown()
self._move_optimizer_state(torch.device("cpu"))
| 36.305085 | 100 | 0.702614 |
414bb1f2f4907948f108f843e1e22b416c0e89f0 | 7,081 | py | Python | examples/tests/test_loading_mithun_factverification.py | mithunpaul08/transformers | 55d5e0a1d88f0922dc2af3be140e077850c66fee | [
"Apache-2.0"
] | null | null | null | examples/tests/test_loading_mithun_factverification.py | mithunpaul08/transformers | 55d5e0a1d88f0922dc2af3be140e077850c66fee | [
"Apache-2.0"
] | null | null | null | examples/tests/test_loading_mithun_factverification.py | mithunpaul08/transformers | 55d5e0a1d88f0922dc2af3be140e077850c66fee | [
"Apache-2.0"
] | null | null | null | #how to run this file:
#first make sure you are pointing to the expected config file in CONFIG_FILE_TO_TEST_WITH
# if from command line, uncomment the line ./run_tests.sh inside mithun_scripts/run_all.sh and
# from command line do ` bash run_all.sh --epochs_to_run 1 --machine_to_run_on laptop `
#if from IDE like pycharm, run config test_examples_mithun_factverification
#to create a new debug config in pycharm:go to Run edit configurations, add python test, select pytest, then select this file
from unittest import TestCase
import configparser
from dataclasses import dataclass, field
import logging
import os
import sys
import logging
import os
import sys
from dataclasses import dataclass, field
from typing import Callable, Dict, Optional
import git
import numpy as np
from transformers import AutoConfig, AutoModelForSequenceClassification, AutoTokenizer, EvalPrediction, GlueDataset
from transformers import GlueDataTrainingArguments as DataTrainingArguments
from transformers import (
HfArgumentParser,
Trainer,
TrainingArguments,
glue_compute_metrics,
glue_output_modes,
glue_tasks_num_labels,
set_seed,
)
import math
from transformers import (
HfArgumentParser,
TrainingArguments,
)
from transformers import GlueDataTrainingArguments as DataTrainingArguments
import git
CONFIG_FILE_TO_TEST_WITH="config_combined_cased_load_and_test_trained_model_legendary_voice_1016.py"
SRC_DIRS = [
os.path.join(os.path.dirname(__file__), dirname)
for dirname in ["text-generation", "../text-classification", "language-modeling", "question-answering"]
]
sys.path.extend(SRC_DIRS)
if SRC_DIRS is not None:
import load_trained_model_predict
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger()
@dataclass
class ModelArguments:
"""
Arguments pertaining to which model/config/tokenizer we are going to fine-tune from.
"""
model_name_or_path: str = field(
metadata={"help": "Path to pretrained model or model identifier from huggingface.co/models"}
)
config_name: Optional[str] = field(
default=None, metadata={"help": "Pretrained config name or path if not the same as model_name"}
)
tokenizer_name: Optional[str] = field(
default=None, metadata={"help": "Pretrained tokenizer name or path if not the same as model_name"}
)
cache_dir: Optional[str] = field(
default=None, metadata={"help": "Where do you want to store the pretrained models downloaded from s3"}
)
def get_git_info():
repo = git.Repo(search_parent_directories=True)
repo_sha=str(repo.head.object.hexsha),
repo_short_sha= str(repo.git.rev_parse(repo_sha, short=6))
repo_infos = {
"repo_id": str(repo),
"repo_sha": str(repo.head.object.hexsha),
"repo_branch": str(repo.active_branch),
"repo_short_sha" :repo_short_sha
}
return repo_infos
logger = logging.getLogger(__name__)
def read_and_merge_config_entries():
config = configparser.ConfigParser()
config.read(CONFIG_FILE_TO_TEST_WITH)
assert not len(config.sections())==0
combined_configs=[]
for each_section in config.sections():
for (each_key, each_val) in config.items(each_section):
#some config entries of type bool just need to exist. doesnt need x=True.
# so now have to strip True out, until we findc a way to be able to pass it as bool itself
# so if True is a value, append only key
if (each_val=="True"):
combined_configs.append("--"+each_key)
else:
combined_configs.append("--" + each_key)
combined_configs.append(str(each_val).replace("\"",""))
combined_configs_str=" ".join(combined_configs)
return combined_configs_str
def test_run_loading_model():
configs=read_and_merge_config_entries()
print(f"value of configs is {configs}")
configs_split = configs.split()
parser = HfArgumentParser((ModelArguments, DataTrainingArguments, TrainingArguments))
model_args, data_args, training_args = parser.parse_args_into_dataclasses(args=configs_split)
git_details = get_git_info()
log_file_name = git_details['repo_short_sha'] + "_" + (training_args.task_type) + "_" + (
training_args.subtask_type) + "_" + str(model_args.model_name_or_path).replace("-",
"_") + "_" + data_args.task_name + ".log"
logging.basicConfig(
format="%(asctime)s - %(levelname)s - %(configs)s - %(message)s",
datefmt="%m/%d/%Y %H:%M:%S",
level=logging.INFO if training_args.local_rank in [-1, 0] else logging.WARN,
filename=log_file_name,
filemode='w'
)
logging.getLogger().addHandler(logging.StreamHandler(sys.stdout))
logger.warning(
"Process rank: %s, device: %s, n_gpu: %s, distributed training: %s, 16-bits training: %s",
training_args.local_rank,
training_args.device,
training_args.n_gpu,
bool(training_args.local_rank != -1),
training_args.fp16,
)
logger.info("Training/evaluation parameters %s", training_args)
stream_handler = logging.StreamHandler(sys.stdout)
logger.addHandler(stream_handler)
dev_partition_evaluation_result,test_partition_evaluation_result = load_trained_model_predict.run_loading_and_testing( model_args, data_args, training_args)
accuracy_dev_partition = dev_partition_evaluation_result['eval_acc']['in_domain_acc']
fnc_score_test_partition = test_partition_evaluation_result['eval_acc']['cross_domain_fnc_score']
accuracy_test_partition = test_partition_evaluation_result['eval_acc']['cross_domain_acc']
#logger.info(f"value of accuracy_dev_partition={accuracy_dev_partition}")
logger.info(f"value of fnc_score_test_partition={fnc_score_test_partition}")
logger.info(f"value of accuracy_test_partition={accuracy_test_partition}")
# check if the training meets minimum accuracy. note that in laptop we run on a toy data set of size 16 and
# in hpc (high performance computing server) we test on 100 data points. so the threshold accuracy to check
# is different in each case
assert training_args.fever_in_domain_accuracy_on_toy_data_17_datapoints != 1.0
assert training_args.fever_cross_domain_accuracy_on_toy_data_17_datapoints != 1.0
assert training_args.fever_cross_domain_fncscore_on_toy_data_17_datapoints != 1.0
assert accuracy_dev_partition == training_args.fever_in_domain_accuracy_on_toy_data_17_datapoints
assert accuracy_test_partition == training_args.fever_cross_domain_accuracy_on_toy_data_17_datapoints
assert fnc_score_test_partition == training_args.fever_cross_domain_fncscore_on_toy_data_17_datapoints
logger.info("done with fact verification related testing . going to exit")
| 41.409357 | 164 | 0.715436 |
2a79108c44f8b1dbaea335c70717763655725dc2 | 1,808 | py | Python | tests/catalyst/utils/test_config.py | tadejsv/catalyst | 2553ce8fd7cecc025ad88819aea73faf8abb229b | [
"Apache-2.0"
] | 206 | 2018-10-05T19:16:47.000Z | 2019-01-19T21:10:41.000Z | tests/catalyst/utils/test_config.py | tadejsv/catalyst | 2553ce8fd7cecc025ad88819aea73faf8abb229b | [
"Apache-2.0"
] | 20 | 2018-10-07T06:30:49.000Z | 2019-01-17T17:26:15.000Z | tests/catalyst/utils/test_config.py | tadejsv/catalyst | 2553ce8fd7cecc025ad88819aea73faf8abb229b | [
"Apache-2.0"
] | 22 | 2018-10-06T12:34:08.000Z | 2019-01-10T16:00:48.000Z | # flake8: noqa
import io
import json
import numpy as np
# from catalyst.dl.scripts._misc import parse_config_args
from catalyst.utils.config import _load_ordered_yaml
# def test_parse_config_args():
# configuration = {
# "stages": {"one": "uno", "two": "dos", "three": "tres"},
# "key": {"value": "key2"},
# }
# parser = argparse.ArgumentParser()
# parser.add_argument("--command")
# args, uargs = parser.parse_known_args(
# [
# "--command",
# "run",
# "--path=test.yml:str",
# "--stages/zero=cero:str",
# "-C=like:str",
# ]
# )
# configuration, args = parse_config_args(
# config=configuration, args=args, unknown_args=uargs
# )
# assert args.command == "run"
# assert args.path == "test.yml"
# assert configuration.get("stages") is not None
# assert "zero" in configuration["stages"]
# assert configuration["stages"]["zero"] == "cero"
# assert configuration.get("args") is not None
# assert configuration["args"]["path"] == "test.yml"
# assert configuration["args"]["C"] == "like"
# assert configuration["args"]["command"] == "run"
# for key, value in args._get_kwargs():
# v = configuration["args"].get(key)
# assert v is not None
# assert v == value
def test_parse_numbers():
configuration = {
"a": 1,
"b": 20,
"c": 303e5,
"d": -4,
"e": -50,
"f": -666e7,
"g": 0.35,
"h": 7.35e-5,
"k": 8e-10,
}
buffer = io.StringIO()
json.dump(configuration, buffer)
buffer.seek(0)
yaml_config = _load_ordered_yaml(buffer)
for key, item in configuration.items():
assert np.isclose(yaml_config[key], item)
| 26.202899 | 66 | 0.554204 |
918ddb2d2dadf102a6712184d0957e7212e79b49 | 49 | py | Python | gym_puddle/envs/__init__.py | netanelbi/gym-puddle | 39ab79433d14cadc911f1f7732782b60e690578d | [
"MIT"
] | 9 | 2018-05-09T14:07:08.000Z | 2021-08-20T07:30:27.000Z | gym_puddle/envs/__init__.py | netanelbi/gym-puddle | 39ab79433d14cadc911f1f7732782b60e690578d | [
"MIT"
] | null | null | null | gym_puddle/envs/__init__.py | netanelbi/gym-puddle | 39ab79433d14cadc911f1f7732782b60e690578d | [
"MIT"
] | 2 | 2020-03-19T14:01:42.000Z | 2021-02-17T11:42:12.000Z | from gym_puddle.envs.puddle_env import PuddleEnv
| 24.5 | 48 | 0.877551 |
a11f03a62a6ad93bd15aeb392268fb2be54b75dc | 397 | py | Python | codelieche/wsgi.py | codelieche/codelieche.com | 8f18a9f4064af81a6dd0203fbaa138565065fff5 | [
"MIT"
] | 2 | 2017-06-11T16:41:48.000Z | 2017-06-14T00:32:27.000Z | codelieche/wsgi.py | codelieche/codelieche.com | 8f18a9f4064af81a6dd0203fbaa138565065fff5 | [
"MIT"
] | 13 | 2020-02-11T21:33:40.000Z | 2022-03-11T23:12:16.000Z | codelieche/wsgi.py | codelieche/codelieche.com | 8f18a9f4064af81a6dd0203fbaa138565065fff5 | [
"MIT"
] | null | null | null | """
WSGI config for codelieche project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.1/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'codelieche.settings')
application = get_wsgi_application()
| 23.352941 | 78 | 0.788413 |
96054a3e6a883334fd0f9289f9d4a9bd56ae5bfa | 862 | py | Python | z3py/examples/list2.py | rainoftime/rainoftime.github.io | d6316f153db0ed96309d81a5f32af9887c6f1a9a | [
"MIT"
] | 1 | 2021-08-01T07:28:03.000Z | 2021-08-01T07:28:03.000Z | z3py/examples/list2.py | rainoftime/rainoftime.github.io | d6316f153db0ed96309d81a5f32af9887c6f1a9a | [
"MIT"
] | null | null | null | z3py/examples/list2.py | rainoftime/rainoftime.github.io | d6316f153db0ed96309d81a5f32af9887c6f1a9a | [
"MIT"
] | null | null | null | from z3 import *
# Create a solver.
s = Solver()
List = Datatype('List')
# Declare constructors.
List.declare('cons', ('car', IntSort()), ('cdr', List))
List.declare('nil')
# Create the datatype.
List = List.create()
# Create shorthands.
cons = List.cons
car = List.car
cdr = List.cdr
nil = List.nil
# Create an uninterpreted function 'atom'.
atom = Function('atom', List, BoolSort())
# Assert axioms for atom.
x = Const('x', List)
s.add(ForAll([x], Implies(Not(atom(x)), cons(car(x), cdr(x)) == x)))
x = Int('x')
y = Const('x', List)
s.add(ForAll([x, y], Not(atom(cons(x, y)))))
# Construct the example formula.
x = Const('x', List)
y = Const('y', List)
f = Function('f', List, List)
# Add assertions.
s.add(car(x) == car(y),
cdr(x) == cdr(y),
f(x) != f(y),
Not(atom(x)),
Not(atom(y)))
# Check satisfiability.
print s.check()
| 18.73913 | 68 | 0.605568 |
845289e258f4daf27b60f226cd7a19c2d312ecf2 | 235 | py | Python | Model/Critic/critic.py | swergio/Agent_A2CGAN | 2b284e8a9edae2c7bb177979701f114447631c76 | [
"MIT"
] | 3 | 2017-12-09T08:43:15.000Z | 2018-09-26T05:33:55.000Z | Model/Critic/critic.py | swergio/Agent_A2CGAN | 2b284e8a9edae2c7bb177979701f114447631c76 | [
"MIT"
] | 1 | 2021-09-07T22:05:54.000Z | 2021-09-07T22:05:54.000Z | Model/Critic/critic.py | swergio/Agent_A2CGAN | 2b284e8a9edae2c7bb177979701f114447631c76 | [
"MIT"
] | null | null | null | import tensorflow as tf
from Model.Utility.layers import fc
def Critic(X, reuse = False):
with tf.variable_scope("valuefun", reuse=reuse):
vf = fc(X, 'v', 1, act=lambda x:x)
v0 = vf[:, 0]
return v0, vf | 26.111111 | 52 | 0.6 |
31d05a0f30de55416a9b6d8923de2f0c3de61948 | 8,678 | py | Python | tensorflow_datasets/scripts/download_and_prepare.py | Erik-Tran/datasets | f30d8b975c02e1947f8b74fb790abb9d3ba89f64 | [
"Apache-2.0"
] | 1 | 2020-10-11T19:15:49.000Z | 2020-10-11T19:15:49.000Z | tensorflow_datasets/scripts/download_and_prepare.py | Erik-Tran/datasets | f30d8b975c02e1947f8b74fb790abb9d3ba89f64 | [
"Apache-2.0"
] | null | null | null | tensorflow_datasets/scripts/download_and_prepare.py | Erik-Tran/datasets | f30d8b975c02e1947f8b74fb790abb9d3ba89f64 | [
"Apache-2.0"
] | 1 | 2022-03-14T16:17:53.000Z | 2022-03-14T16:17:53.000Z | # coding=utf-8
# Copyright 2020 The TensorFlow Datasets Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
r"""Script to call download_and_prepare on DatasetBuilder.
Standalone script to generate specific dataset(s). This can be
used if you want to separate download/generation of dataset from actual usage.
By default, the dataset is generated in the default location
(~/tensorflow_datasets), which the same as when calling `tfds.load()`.
Instructions:
```
python -m tensorflow_datasets.scripts.download_and_prepare \
--datasets=cifar10
```
If you have your dataset defined outside of `tensorflow_datasets`, use
`--module_import="path.to.my.dataset_module"` to have your Python module
containing your `DatasetBuilder` definition imported.
"""
import importlib
import os
import pdb
import time
from absl import app
from absl import flags
from absl import logging
import tensorflow.compat.v2 as tf
import tensorflow_datasets as tfds
import termcolor
FLAGS = flags.FLAGS
DEFAULT_DATA_DIR = os.path.expanduser(os.path.join("~", "tensorflow_datasets"))
flags.DEFINE_string("datasets", "",
"Comma separated list of datasets to build, defaults to all"
"registered builders.")
flags.DEFINE_string("exclude_datasets", "",
"Comma separated list of datasets to exclude,"
"(no download, no prepare).")
flags.DEFINE_multi_string(
"module_import", None,
"Modules to import. Use this when your DatasetBuilder is defined outside "
"of tensorflow_datasets so that it is registered. Multiple imports can "
"be passed by calling the flag multiple times, or using coma separated "
"values.")
flags.DEFINE_integer(
"builder_config_id", None,
"If given 1 dataset with BUILDER_CONFIGS, id of config to build.")
flags.DEFINE_boolean(
"experimental_latest_version", False,
"Set to true to builder the latest version available, even if not default.")
flags.DEFINE_string("data_dir", DEFAULT_DATA_DIR, "Where to place the data.")
flags.DEFINE_string("download_dir", None, "Where to place downloads.")
flags.DEFINE_string("extract_dir", None, "Where to extract files.")
flags.DEFINE_string(
"manual_dir", None,
"Directory where dataset have manually been downloaded / extracted.")
flags.DEFINE_string("checksums_dir", None,
"For external datasets, specify the location of the "
"dataset checksums.")
flags.DEFINE_boolean(
"add_name_to_manual_dir", False, "If true, append the dataset name to the "
"`manual_dir`")
default_compute_stats = tfds.download.ComputeStatsMode.SKIP
flags.DEFINE_enum(
"compute_stats",
default_compute_stats.value,
[e.value for e in tfds.download.ComputeStatsMode],
"Whether to compute or not the dynamic statistics.")
flags.DEFINE_integer(
"max_examples_per_split", None,
"optional max number of examples to write into each split (for testing).")
# Beam flags
flags.DEFINE_list(
"beam_pipeline_options", [],
"A (comma-separated) list of flags to pass to `PipelineOptions` when "
"preparing with Apache Beam. Example: "
"`--beam_pipeline_options=job_name=my-job,project=my-project`")
# Development flags
flags.DEFINE_boolean("register_checksums", False,
"If True, store size and checksum of downloaded files.")
flags.DEFINE_boolean(
"force_checksums_validation",
False, "If True, raise an error if the checksums are not found.")
# Debug flags
flags.DEFINE_boolean("debug", False,
"If True, will drop into debugger after data generation")
flags.DEFINE_boolean("debug_start", False,
"If True, will drop into debugger on startup")
flags.DEFINE_boolean("sleep_start", False,
"If True, will sleep on startup; useful for ssh")
flags.DEFINE_boolean("disable_tqdm", False, "If True, disable tqdm.")
def download_config():
return tfds.download.DownloadConfig(
extract_dir=FLAGS.extract_dir,
manual_dir=FLAGS.manual_dir,
compute_stats=FLAGS.compute_stats,
# TODO(b/116270825): Add flag to force extraction / preparation.
download_mode=tfds.download.GenerateMode.REUSE_DATASET_IF_EXISTS,
max_examples_per_split=FLAGS.max_examples_per_split,
register_checksums=FLAGS.register_checksums,
force_checksums_validation=FLAGS.force_checksums_validation,
)
def download_and_prepare(builder):
"""Generate data for a given dataset."""
logging.info("download_and_prepare for dataset %s...", builder.info.full_name)
dl_config = download_config()
if isinstance(builder, tfds.core.BeamBasedBuilder):
beam = tfds.core.lazy_imports.apache_beam
# TODO(b/129149715): Restore compute stats. Currently skipped because not
# beam supported.
dl_config.compute_stats = tfds.download.ComputeStatsMode.SKIP
dl_config.beam_options = beam.options.pipeline_options.PipelineOptions(
flags=["--%s" % opt for opt in FLAGS.beam_pipeline_options])
if FLAGS.add_name_to_manual_dir:
dl_config.manual_dir = os.path.join(dl_config.manual_dir, builder.name)
builder.download_and_prepare(
download_dir=FLAGS.download_dir,
download_config=dl_config,
)
termcolor.cprint(str(builder.info.as_proto), attrs=["bold"])
if FLAGS.debug:
dataset = builder.as_dataset(split=tfds.Split.TRAIN)
pdb.set_trace()
del dataset
def import_modules(modules):
for module in modules:
for m in module.split(","): # Allow to pass imports as coma separated vals.
importlib.import_module(m)
def main(_):
if FLAGS.module_import:
import_modules(FLAGS.module_import)
if FLAGS.debug_start:
pdb.set_trace()
if FLAGS.sleep_start:
time.sleep(60*60*3)
if FLAGS.disable_tqdm:
logging.info("Disabling tqdm.")
tfds.disable_progress_bar()
if FLAGS.checksums_dir:
tfds.download.add_checksums_dir(FLAGS.checksums_dir)
datasets_to_build = set(FLAGS.datasets and FLAGS.datasets.split(",")
or tfds.list_builders())
datasets_to_build -= set(FLAGS.exclude_datasets.split(","))
# Only pass the version kwargs when required. Otherwise, `version=None`
# overwrite the version parsed from the name.
# `tfds.builder('my_dataset:1.2.0', version=None)`
if FLAGS.experimental_latest_version:
version_kwarg = {"version": "experimental_latest"}
else:
version_kwarg = {}
logging.info("Running download_and_prepare for dataset(s):\n%s",
"\n".join(datasets_to_build))
builders = {
name: tfds.builder(name, data_dir=FLAGS.data_dir, **version_kwarg)
for name in datasets_to_build
}
if FLAGS.builder_config_id is not None:
# Requesting a single config of a single dataset
if len(builders) > 1:
raise ValueError(
"--builder_config_id can only be used when building a single dataset")
builder = builders[list(builders.keys())[0]]
if not builder.BUILDER_CONFIGS:
raise ValueError(
"--builder_config_id can only be used with datasets with configs")
if FLAGS.builder_config_id >= len(builder.BUILDER_CONFIGS):
return
config = builder.BUILDER_CONFIGS[FLAGS.builder_config_id]
logging.info("Running download_and_prepare for config: %s", config.name)
builder_for_config = tfds.builder(
builder.name, data_dir=FLAGS.data_dir, config=config, **version_kwarg)
download_and_prepare(builder_for_config)
else:
for name, builder in builders.items():
if builder.BUILDER_CONFIGS and "/" not in name:
# If builder has multiple configs, and no particular config was
# requested, then compute all.
for config in builder.BUILDER_CONFIGS:
builder_for_config = tfds.builder(
builder.name,
data_dir=FLAGS.data_dir,
config=config,
**version_kwarg)
download_and_prepare(builder_for_config)
else:
# If there is a slash in the name, then user requested a specific
# dataset configuration.
download_and_prepare(builder)
if __name__ == "__main__":
tf.enable_v2_behavior()
app.run(main)
| 35.711934 | 80 | 0.718253 |
e9b41c20e0f2e7968b82873b61c276d7a0c92cd8 | 39,673 | py | Python | python_modules/dagster/dagster/core/execution/api.py | chasleslr/dagster | 88907f9473fb8e7a9b1af9a0a8b349d42f4b8153 | [
"Apache-2.0"
] | null | null | null | python_modules/dagster/dagster/core/execution/api.py | chasleslr/dagster | 88907f9473fb8e7a9b1af9a0a8b349d42f4b8153 | [
"Apache-2.0"
] | null | null | null | python_modules/dagster/dagster/core/execution/api.py | chasleslr/dagster | 88907f9473fb8e7a9b1af9a0a8b349d42f4b8153 | [
"Apache-2.0"
] | null | null | null | import sys
from contextlib import contextmanager
from typing import Any, Dict, FrozenSet, Iterator, List, Optional, Tuple, Union
from dagster import check
from dagster.core.definitions import IPipeline, PipelineDefinition
from dagster.core.definitions.pipeline import PipelineSubsetDefinition
from dagster.core.definitions.pipeline_base import InMemoryPipeline
from dagster.core.errors import DagsterExecutionInterruptedError, DagsterInvariantViolationError
from dagster.core.events import DagsterEvent
from dagster.core.execution.context.system import SystemPipelineExecutionContext
from dagster.core.execution.plan.execute_plan import inner_plan_execution_iterator
from dagster.core.execution.plan.plan import ExecutionPlan
from dagster.core.execution.resolve_versions import resolve_memoized_execution_plan
from dagster.core.execution.retries import Retries
from dagster.core.instance import DagsterInstance, is_memoized_run
from dagster.core.selector import parse_items_from_selection, parse_step_selection
from dagster.core.storage.mem_io_manager import InMemoryIOManager
from dagster.core.storage.pipeline_run import PipelineRun, PipelineRunStatus
from dagster.core.system_config.objects import EnvironmentConfig
from dagster.core.telemetry import log_repo_stats, telemetry_wrapper
from dagster.core.utils import str_format_set
from dagster.utils import merge_dicts
from dagster.utils.error import serializable_error_info_from_exc_info
from dagster.utils.interrupts import capture_interrupts
from .context_creation_pipeline import (
ExecutionContextManager,
PipelineExecutionContextManager,
PlanExecutionContextManager,
scoped_pipeline_context,
)
from .results import PipelineExecutionResult
## Brief guide to the execution APIs
# | function name | operates over | sync | supports | creates new PipelineRun |
# | | | | reexecution | in instance |
# | --------------------------- | ------------------ | ----- | ----------- | ----------------------- |
# | execute_pipeline_iterator | IPipeline | async | no | yes |
# | execute_pipeline | IPipeline | sync | no | yes |
# | execute_run_iterator | PipelineRun | async | (1) | no |
# | execute_run | PipelineRun | sync | (1) | no |
# | execute_plan_iterator | ExecutionPlan | async | (2) | no |
# | execute_plan | ExecutionPlan | sync | (2) | no |
# | reexecute_pipeline | IPipeline | sync | yes | yes |
# | reexecute_pipeline_iterator | IPipeline | async | yes | yes |
#
# Notes on reexecution support:
# (1) The appropriate bits must be set on the PipelineRun passed to this function. Specifically,
# parent_run_id and root_run_id must be set and consistent, and if a solids_to_execute or
# step_keys_to_execute are set they must be consistent with the parent and root runs.
# (2) As for (1), but the ExecutionPlan passed must also agree in all relevant bits.
def execute_run_iterator(
pipeline: IPipeline, pipeline_run: PipelineRun, instance: DagsterInstance
) -> Iterator[DagsterEvent]:
check.inst_param(pipeline, "pipeline", IPipeline)
check.inst_param(pipeline_run, "pipeline_run", PipelineRun)
check.inst_param(instance, "instance", DagsterInstance)
if pipeline_run.status == PipelineRunStatus.CANCELED:
# This can happen if the run was force-terminated while it was starting
def gen_execute_on_cancel():
yield instance.report_engine_event(
"Not starting execution since the run was canceled before execution could start",
pipeline_run,
)
return gen_execute_on_cancel()
check.invariant(
pipeline_run.status == PipelineRunStatus.NOT_STARTED
or pipeline_run.status == PipelineRunStatus.STARTING,
desc="Pipeline run {} ({}) in state {}, expected NOT_STARTED or STARTING".format(
pipeline_run.pipeline_name, pipeline_run.run_id, pipeline_run.status
),
)
if pipeline_run.solids_to_execute:
pipeline_def = pipeline.get_definition()
if isinstance(pipeline_def, PipelineSubsetDefinition):
check.invariant(
pipeline_run.solids_to_execute == pipeline.solids_to_execute,
"Cannot execute PipelineRun with solids_to_execute {solids_to_execute} that conflicts "
"with pipeline subset {pipeline_solids_to_execute}.".format(
pipeline_solids_to_execute=str_format_set(pipeline.solids_to_execute),
solids_to_execute=str_format_set(pipeline_run.solids_to_execute),
),
)
else:
# when `execute_run_iterator` is directly called, the sub pipeline hasn't been created
# note that when we receive the solids to execute via PipelineRun, it won't support
# solid selection query syntax
pipeline = pipeline.subset_for_execution_from_existing_pipeline(
pipeline_run.solids_to_execute
)
execution_plan = create_execution_plan(
pipeline,
run_config=pipeline_run.run_config,
mode=pipeline_run.mode,
step_keys_to_execute=pipeline_run.step_keys_to_execute,
)
return iter(
ExecuteRunWithPlanIterable(
execution_plan=execution_plan,
iterator=pipeline_execution_iterator,
execution_context_manager=PipelineExecutionContextManager(
execution_plan=execution_plan,
pipeline_run=pipeline_run,
instance=instance,
run_config=pipeline_run.run_config,
raise_on_error=False,
),
)
)
def execute_run(
pipeline: IPipeline,
pipeline_run: PipelineRun,
instance: DagsterInstance,
raise_on_error: bool = False,
) -> PipelineExecutionResult:
"""Executes an existing pipeline run synchronously.
Synchronous version of execute_run_iterator.
Args:
pipeline (IPipeline): The pipeline to execute.
pipeline_run (PipelineRun): The run to execute
instance (DagsterInstance): The instance in which the run has been created.
raise_on_error (Optional[bool]): Whether or not to raise exceptions when they occur.
Defaults to ``False``.
Returns:
PipelineExecutionResult: The result of the execution.
"""
if isinstance(pipeline, PipelineDefinition):
raise DagsterInvariantViolationError(
"execute_run requires an IPipeline but received a PipelineDefinition "
"directly instead. To support hand-off to other processes provide a "
"ReconstructablePipeline which can be done using reconstructable(). For in "
"process only execution you can use InMemoryPipeline."
)
check.inst_param(pipeline, "pipeline", IPipeline)
check.inst_param(pipeline_run, "pipeline_run", PipelineRun)
check.inst_param(instance, "instance", DagsterInstance)
if pipeline_run.status == PipelineRunStatus.CANCELED:
message = "Not starting execution since the run was canceled before execution could start"
instance.report_engine_event(
message,
pipeline_run,
)
raise DagsterInvariantViolationError(message)
check.invariant(
pipeline_run.status == PipelineRunStatus.NOT_STARTED
or pipeline_run.status == PipelineRunStatus.STARTING,
desc="Pipeline run {} ({}) in state {}, expected NOT_STARTED or STARTING".format(
pipeline_run.pipeline_name, pipeline_run.run_id, pipeline_run.status
),
)
pipeline_def = pipeline.get_definition()
if pipeline_run.solids_to_execute:
if isinstance(pipeline_def, PipelineSubsetDefinition):
check.invariant(
pipeline_run.solids_to_execute == pipeline.solids_to_execute,
"Cannot execute PipelineRun with solids_to_execute {solids_to_execute} that "
"conflicts with pipeline subset {pipeline_solids_to_execute}.".format(
pipeline_solids_to_execute=str_format_set(pipeline.solids_to_execute),
solids_to_execute=str_format_set(pipeline_run.solids_to_execute),
),
)
else:
# when `execute_run` is directly called, the sub pipeline hasn't been created
# note that when we receive the solids to execute via PipelineRun, it won't support
# solid selection query syntax
pipeline = pipeline.subset_for_execution_from_existing_pipeline(
pipeline_run.solids_to_execute
)
execution_plan = create_execution_plan(
pipeline,
run_config=pipeline_run.run_config,
mode=pipeline_run.mode,
step_keys_to_execute=pipeline_run.step_keys_to_execute,
)
if is_memoized_run(pipeline_run.tags):
execution_plan = resolve_memoized_execution_plan(execution_plan)
_execute_run_iterable = ExecuteRunWithPlanIterable(
execution_plan=execution_plan,
iterator=pipeline_execution_iterator,
execution_context_manager=PipelineExecutionContextManager(
execution_plan=execution_plan,
pipeline_run=pipeline_run,
instance=instance,
run_config=pipeline_run.run_config,
raise_on_error=raise_on_error,
),
)
event_list = list(_execute_run_iterable)
pipeline_context = _execute_run_iterable.pipeline_context
# workaround for mem_io_manager to work in reconstruct_context, e.g. result.result_for_solid
# in-memory values dict will get lost when the resource is re-initiated in reconstruct_context
# so instead of re-initiating every single resource, we pass the resource instances to
# reconstruct_context directly to avoid re-building from resource def.
resource_instances_to_override = {}
if pipeline_context: # None if we have a pipeline failure
for (
key,
resource_instance,
) in pipeline_context.scoped_resources_builder.resource_instance_dict.items():
if isinstance(resource_instance, InMemoryIOManager):
resource_instances_to_override[key] = resource_instance
return PipelineExecutionResult(
pipeline.get_definition(),
pipeline_run.run_id,
event_list,
lambda hardcoded_resources_arg: scoped_pipeline_context(
execution_plan,
pipeline_run.run_config,
pipeline_run,
instance,
intermediate_storage=pipeline_context.intermediate_storage,
resource_instances_to_override=hardcoded_resources_arg,
),
resource_instances_to_override=resource_instances_to_override,
)
def execute_pipeline_iterator(
pipeline: Union[PipelineDefinition, IPipeline],
run_config: Optional[dict] = None,
mode: Optional[str] = None,
preset: Optional[str] = None,
tags: Optional[Dict[str, Any]] = None,
solid_selection: Optional[List[str]] = None,
instance: Optional[DagsterInstance] = None,
) -> Iterator[DagsterEvent]:
"""Execute a pipeline iteratively.
Rather than package up the result of running a pipeline into a single object, like
:py:func:`execute_pipeline`, this function yields the stream of events resulting from pipeline
execution.
This is intended to allow the caller to handle these events on a streaming basis in whatever
way is appropriate.
Parameters:
pipeline (Union[IPipeline, PipelineDefinition]): The pipeline to execute.
run_config (Optional[dict]): The environment configuration that parametrizes this run,
as a dict.
mode (Optional[str]): The name of the pipeline mode to use. You may not set both ``mode``
and ``preset``.
preset (Optional[str]): The name of the pipeline preset to use. You may not set both
``mode`` and ``preset``.
tags (Optional[Dict[str, Any]]): Arbitrary key-value pairs that will be added to pipeline
logs.
solid_selection (Optional[List[str]]): A list of solid selection queries (including single
solid names) to execute. For example:
- ['some_solid']: select "some_solid" itself.
- ['*some_solid']: select "some_solid" and all its ancestors (upstream dependencies).
- ['*some_solid+++']: select "some_solid", all its ancestors, and its descendants
(downstream dependencies) within 3 levels down.
- ['*some_solid', 'other_solid_a', 'other_solid_b+']: select "some_solid" and all its
ancestors, "other_solid_a" itself, and "other_solid_b" and its direct child solids.
instance (Optional[DagsterInstance]): The instance to execute against. If this is ``None``,
an ephemeral instance will be used, and no artifacts will be persisted from the run.
Returns:
Iterator[DagsterEvent]: The stream of events resulting from pipeline execution.
"""
with ephemeral_instance_if_missing(instance) as execute_instance:
(
pipeline,
run_config,
mode,
tags,
solids_to_execute,
solid_selection,
) = _check_execute_pipeline_args(
pipeline=pipeline,
run_config=run_config,
mode=mode,
preset=preset,
tags=tags,
solid_selection=solid_selection,
)
pipeline_run = execute_instance.create_run_for_pipeline(
pipeline_def=pipeline.get_definition(),
run_config=run_config,
mode=mode,
solid_selection=solid_selection,
solids_to_execute=solids_to_execute,
tags=tags,
)
return execute_run_iterator(pipeline, pipeline_run, execute_instance)
@contextmanager
def ephemeral_instance_if_missing(
instance: Optional[DagsterInstance],
) -> Iterator[DagsterInstance]:
if instance:
yield instance
else:
with DagsterInstance.ephemeral() as ephemeral_instance:
yield ephemeral_instance
def execute_pipeline(
pipeline: Union[PipelineDefinition, IPipeline],
run_config: Optional[dict] = None,
mode: Optional[str] = None,
preset: Optional[str] = None,
tags: Optional[Dict[str, Any]] = None,
solid_selection: Optional[List[str]] = None,
instance: Optional[DagsterInstance] = None,
raise_on_error: bool = True,
) -> PipelineExecutionResult:
"""Execute a pipeline synchronously.
Users will typically call this API when testing pipeline execution, or running standalone
scripts.
Parameters:
pipeline (Union[IPipeline, PipelineDefinition]): The pipeline to execute.
run_config (Optional[dict]): The environment configuration that parametrizes this run,
as a dict.
mode (Optional[str]): The name of the pipeline mode to use. You may not set both ``mode``
and ``preset``.
preset (Optional[str]): The name of the pipeline preset to use. You may not set both
``mode`` and ``preset``.
tags (Optional[Dict[str, Any]]): Arbitrary key-value pairs that will be added to pipeline
logs.
instance (Optional[DagsterInstance]): The instance to execute against. If this is ``None``,
an ephemeral instance will be used, and no artifacts will be persisted from the run.
raise_on_error (Optional[bool]): Whether or not to raise exceptions when they occur.
Defaults to ``True``, since this is the most useful behavior in test.
solid_selection (Optional[List[str]]): A list of solid selection queries (including single
solid names) to execute. For example:
- ['some_solid']: select "some_solid" itself.
- ['*some_solid']: select "some_solid" and all its ancestors (upstream dependencies).
- ['*some_solid+++']: select "some_solid", all its ancestors, and its descendants
(downstream dependencies) within 3 levels down.
- ['*some_solid', 'other_solid_a', 'other_solid_b+']: select "some_solid" and all its
ancestors, "other_solid_a" itself, and "other_solid_b" and its direct child solids.
Returns:
:py:class:`PipelineExecutionResult`: The result of pipeline execution.
For the asynchronous version, see :py:func:`execute_pipeline_iterator`.
"""
with ephemeral_instance_if_missing(instance) as execute_instance:
return _logged_execute_pipeline(
pipeline,
instance=execute_instance,
run_config=run_config,
mode=mode,
preset=preset,
tags=tags,
solid_selection=solid_selection,
raise_on_error=raise_on_error,
)
@telemetry_wrapper
def _logged_execute_pipeline(
pipeline: Union[IPipeline, PipelineDefinition],
instance: DagsterInstance,
run_config: Optional[dict] = None,
mode: Optional[str] = None,
preset: Optional[str] = None,
tags: Optional[Dict[str, Any]] = None,
solid_selection: Optional[List[str]] = None,
raise_on_error: bool = True,
) -> PipelineExecutionResult:
check.inst_param(instance, "instance", DagsterInstance)
(
pipeline,
run_config,
mode,
tags,
solids_to_execute,
solid_selection,
) = _check_execute_pipeline_args(
pipeline=pipeline,
run_config=run_config,
mode=mode,
preset=preset,
tags=tags,
solid_selection=solid_selection,
)
log_repo_stats(instance=instance, pipeline=pipeline, source="execute_pipeline")
pipeline_run = instance.create_run_for_pipeline(
pipeline_def=pipeline.get_definition(),
run_config=run_config,
mode=mode,
solid_selection=solid_selection,
solids_to_execute=solids_to_execute,
tags=tags,
)
return execute_run(pipeline, pipeline_run, instance, raise_on_error=raise_on_error)
def reexecute_pipeline(
pipeline: Union[IPipeline, PipelineDefinition],
parent_run_id: str,
run_config: Optional[dict] = None,
step_selection: Optional[List[str]] = None,
mode: Optional[str] = None,
preset: Optional[str] = None,
tags: Optional[Dict[str, Any]] = None,
instance: DagsterInstance = None,
raise_on_error: bool = True,
) -> PipelineExecutionResult:
"""Reexecute an existing pipeline run.
Users will typically call this API when testing pipeline reexecution, or running standalone
scripts.
Parameters:
pipeline (Union[IPipeline, PipelineDefinition]): The pipeline to execute.
parent_run_id (str): The id of the previous run to reexecute. The run must exist in the
instance.
run_config (Optional[dict]): The environment configuration that parametrizes this run,
as a dict.
step_selection (Optional[List[str]]): A list of step selection queries (including single
step keys) to execute. For example:
- ['some_solid']: select the execution step "some_solid" itself.
- ['*some_solid']: select the step "some_solid" and all its ancestors
(upstream dependencies).
- ['*some_solid+++']: select the step "some_solid", all its ancestors,
and its descendants (downstream dependencies) within 3 levels down.
- ['*some_solid', 'other_solid_a', 'other_solid_b+']: select
"some_solid" and all its ancestors, "other_solid_a" itself, and
"other_solid_b" and its direct child execution steps.
mode (Optional[str]): The name of the pipeline mode to use. You may not set both ``mode``
and ``preset``.
preset (Optional[str]): The name of the pipeline preset to use. You may not set both
``mode`` and ``preset``.
tags (Optional[Dict[str, Any]]): Arbitrary key-value pairs that will be added to pipeline
logs.
instance (Optional[DagsterInstance]): The instance to execute against. If this is ``None``,
an ephemeral instance will be used, and no artifacts will be persisted from the run.
raise_on_error (Optional[bool]): Whether or not to raise exceptions when they occur.
Defaults to ``True``, since this is the most useful behavior in test.
Returns:
:py:class:`PipelineExecutionResult`: The result of pipeline execution.
For the asynchronous version, see :py:func:`reexecute_pipeline_iterator`.
"""
check.opt_list_param(step_selection, "step_selection", of_type=str)
check.str_param(parent_run_id, "parent_run_id")
with ephemeral_instance_if_missing(instance) as execute_instance:
(pipeline, run_config, mode, tags, _, _) = _check_execute_pipeline_args(
pipeline=pipeline,
run_config=run_config,
mode=mode,
preset=preset,
tags=tags,
)
parent_pipeline_run = execute_instance.get_run_by_id(parent_run_id)
check.invariant(
parent_pipeline_run,
"No parent run with id {parent_run_id} found in instance.".format(
parent_run_id=parent_run_id
),
)
# resolve step selection DSL queries using parent execution plan snapshot
if step_selection:
full_plan = create_execution_plan(pipeline, parent_pipeline_run.run_config, mode)
step_keys = parse_items_from_selection(step_selection)
# resolve execution plan with any resolved dynamic step keys
resolved_plan = full_plan.build_subset_plan(step_keys)
# parse selection using all step deps
step_keys_to_execute = parse_step_selection(
resolved_plan.get_all_step_deps(), step_selection
)
else:
step_keys_to_execute = None
pipeline_run = execute_instance.create_run_for_pipeline(
pipeline_def=pipeline.get_definition(),
run_config=run_config,
mode=mode,
tags=tags,
solid_selection=parent_pipeline_run.solid_selection,
solids_to_execute=parent_pipeline_run.solids_to_execute,
# convert to frozenset https://github.com/dagster-io/dagster/issues/2914
step_keys_to_execute=list(step_keys_to_execute) if step_keys_to_execute else None,
root_run_id=parent_pipeline_run.root_run_id or parent_pipeline_run.run_id,
parent_run_id=parent_pipeline_run.run_id,
)
return execute_run(pipeline, pipeline_run, execute_instance, raise_on_error=raise_on_error)
def reexecute_pipeline_iterator(
pipeline: Union[IPipeline, PipelineDefinition],
parent_run_id: str,
run_config: Optional[dict] = None,
step_selection: Optional[List[str]] = None,
mode: Optional[str] = None,
preset: Optional[str] = None,
tags: Optional[Dict[str, Any]] = None,
instance: DagsterInstance = None,
) -> Iterator[DagsterEvent]:
"""Reexecute a pipeline iteratively.
Rather than package up the result of running a pipeline into a single object, like
:py:func:`reexecute_pipeline`, this function yields the stream of events resulting from pipeline
reexecution.
This is intended to allow the caller to handle these events on a streaming basis in whatever
way is appropriate.
Parameters:
pipeline (Union[IPipeline, PipelineDefinition]): The pipeline to execute.
parent_run_id (str): The id of the previous run to reexecute. The run must exist in the
instance.
run_config (Optional[dict]): The environment configuration that parametrizes this run,
as a dict.
step_selection (Optional[List[str]]): A list of step selection queries (including single
step keys) to execute. For example:
- ['some_solid']: select the execution step "some_solid" itself.
- ['*some_solid']: select the step "some_solid" and all its ancestors
(upstream dependencies).
- ['*some_solid+++']: select the step "some_solid", all its ancestors,
and its descendants (downstream dependencies) within 3 levels down.
- ['*some_solid', 'other_solid_a', 'other_solid_b+']: select
"some_solid" and all its ancestors, "other_solid_a" itself, and
"other_solid_b" and its direct child execution steps.
mode (Optional[str]): The name of the pipeline mode to use. You may not set both ``mode``
and ``preset``.
preset (Optional[str]): The name of the pipeline preset to use. You may not set both
``mode`` and ``preset``.
tags (Optional[Dict[str, Any]]): Arbitrary key-value pairs that will be added to pipeline
logs.
instance (Optional[DagsterInstance]): The instance to execute against. If this is ``None``,
an ephemeral instance will be used, and no artifacts will be persisted from the run.
Returns:
Iterator[DagsterEvent]: The stream of events resulting from pipeline reexecution.
"""
check.opt_list_param(step_selection, "step_selection", of_type=str)
check.str_param(parent_run_id, "parent_run_id")
with ephemeral_instance_if_missing(instance) as execute_instance:
(pipeline, run_config, mode, tags, _, _) = _check_execute_pipeline_args(
pipeline=pipeline,
run_config=run_config,
mode=mode,
preset=preset,
tags=tags,
solid_selection=None,
)
parent_pipeline_run = execute_instance.get_run_by_id(parent_run_id)
check.invariant(
parent_pipeline_run,
"No parent run with id {parent_run_id} found in instance.".format(
parent_run_id=parent_run_id
),
)
# resolve step selection DSL queries using parent execution plan snapshot
if step_selection:
parent_execution_plan_snapshot = execute_instance.get_execution_plan_snapshot(
parent_pipeline_run.execution_plan_snapshot_id
)
step_keys_to_execute = parse_step_selection(
parent_execution_plan_snapshot.step_deps, step_selection
)
else:
step_keys_to_execute = None
pipeline_run = execute_instance.create_run_for_pipeline(
pipeline_def=pipeline.get_definition(),
run_config=run_config,
mode=mode,
tags=tags,
solid_selection=parent_pipeline_run.solid_selection,
solids_to_execute=parent_pipeline_run.solids_to_execute,
# convert to frozenset https://github.com/dagster-io/dagster/issues/2914
step_keys_to_execute=list(step_keys_to_execute) if step_keys_to_execute else None,
root_run_id=parent_pipeline_run.root_run_id or parent_pipeline_run.run_id,
parent_run_id=parent_pipeline_run.run_id,
)
return execute_run_iterator(pipeline, pipeline_run, execute_instance)
def execute_plan_iterator(
execution_plan: ExecutionPlan,
pipeline_run: PipelineRun,
instance: DagsterInstance,
retries: Optional[Retries] = None,
run_config: Optional[dict] = None,
) -> Iterator[DagsterEvent]:
check.inst_param(execution_plan, "execution_plan", ExecutionPlan)
check.inst_param(pipeline_run, "pipeline_run", PipelineRun)
check.inst_param(instance, "instance", DagsterInstance)
retries = check.opt_inst_param(retries, "retries", Retries, Retries.disabled_mode())
run_config = check.opt_dict_param(run_config, "run_config")
return iter(
ExecuteRunWithPlanIterable(
execution_plan=execution_plan,
iterator=inner_plan_execution_iterator,
execution_context_manager=PlanExecutionContextManager(
retries=retries,
execution_plan=execution_plan,
run_config=run_config,
pipeline_run=pipeline_run,
instance=instance,
raise_on_error=False,
),
)
)
def execute_plan(
execution_plan: ExecutionPlan,
instance: DagsterInstance,
pipeline_run: PipelineRun,
run_config: Optional[Dict] = None,
retries: Optional[Retries] = None,
) -> List[DagsterEvent]:
"""This is the entry point of dagster-graphql executions. For the dagster CLI entry point, see
execute_pipeline() above.
"""
check.inst_param(execution_plan, "execution_plan", ExecutionPlan)
check.inst_param(instance, "instance", DagsterInstance)
check.inst_param(pipeline_run, "pipeline_run", PipelineRun)
run_config = check.opt_dict_param(run_config, "run_config")
check.opt_inst_param(retries, "retries", Retries)
return list(
execute_plan_iterator(
execution_plan=execution_plan,
run_config=run_config,
pipeline_run=pipeline_run,
instance=instance,
retries=retries,
)
)
def _check_pipeline(pipeline: Union[PipelineDefinition, IPipeline]) -> IPipeline:
# backcompat
if isinstance(pipeline, PipelineDefinition):
pipeline = InMemoryPipeline(pipeline)
check.inst_param(pipeline, "pipeline", IPipeline)
return pipeline
def create_execution_plan(
pipeline: Union[IPipeline, PipelineDefinition],
run_config: Optional[dict] = None,
mode: Optional[str] = None,
step_keys_to_execute: Optional[List[str]] = None,
) -> ExecutionPlan:
pipeline = _check_pipeline(pipeline)
pipeline_def = pipeline.get_definition()
check.inst_param(pipeline_def, "pipeline_def", PipelineDefinition)
run_config = check.opt_dict_param(run_config, "run_config", key_type=str)
mode = check.opt_str_param(mode, "mode", default=pipeline_def.get_default_mode_name())
check.opt_list_param(step_keys_to_execute, "step_keys_to_execute", of_type=str)
environment_config = EnvironmentConfig.build(pipeline_def, run_config, mode=mode)
return ExecutionPlan.build(
pipeline, environment_config, mode=mode, step_keys_to_execute=step_keys_to_execute
)
def pipeline_execution_iterator(
pipeline_context: SystemPipelineExecutionContext, execution_plan: ExecutionPlan
) -> Iterator[DagsterEvent]:
"""A complete execution of a pipeline. Yields pipeline start, success,
and failure events.
Args:
pipeline_context (SystemPipelineExecutionContext):
execution_plan (ExecutionPlan):
"""
check.inst_param(pipeline_context, "pipeline_context", SystemPipelineExecutionContext)
check.inst_param(execution_plan, "execution_plan", ExecutionPlan)
yield DagsterEvent.pipeline_start(pipeline_context)
pipeline_exception_info = None
pipeline_canceled_info = None
failed_steps = []
generator_closed = False
try:
for event in pipeline_context.executor.execute(pipeline_context, execution_plan):
if event.is_step_failure:
failed_steps.append(event.step_key)
yield event
except GeneratorExit:
# Shouldn't happen, but avoid runtime-exception in case this generator gets GC-ed
# (see https://amir.rachum.com/blog/2017/03/03/generator-cleanup/).
generator_closed = True
pipeline_exception_info = serializable_error_info_from_exc_info(sys.exc_info())
raise
except (KeyboardInterrupt, DagsterExecutionInterruptedError):
pipeline_canceled_info = serializable_error_info_from_exc_info(sys.exc_info())
raise
except Exception: # pylint: disable=broad-except
pipeline_exception_info = serializable_error_info_from_exc_info(sys.exc_info())
raise # finally block will run before this is re-raised
finally:
if pipeline_canceled_info:
reloaded_run = pipeline_context.instance.get_run_by_id(pipeline_context.run_id)
if reloaded_run and reloaded_run.status == PipelineRunStatus.CANCELING:
event = DagsterEvent.pipeline_canceled(pipeline_context, pipeline_canceled_info)
else:
event = DagsterEvent.pipeline_failure(
pipeline_context,
"Execution was interrupted unexpectedly. "
"No user initiated termination request was found, treating as failure.",
pipeline_canceled_info,
)
elif pipeline_exception_info:
event = DagsterEvent.pipeline_failure(
pipeline_context,
"An exception was thrown during execution.",
pipeline_exception_info,
)
elif failed_steps:
event = DagsterEvent.pipeline_failure(
pipeline_context,
"Steps failed: {}.".format(failed_steps),
)
else:
event = DagsterEvent.pipeline_success(pipeline_context)
if not generator_closed:
yield event
class ExecuteRunWithPlanIterable:
"""Utility class to consolidate execution logic.
This is a class and not a function because, e.g., in constructing a `scoped_pipeline_context`
for `PipelineExecutionResult`, we need to pull out the `pipeline_context` after we're done
yielding events. This broadly follows a pattern we make use of in other places,
cf. `dagster.utils.EventGenerationManager`.
"""
def __init__(self, execution_plan, iterator, execution_context_manager):
self.execution_plan = check.inst_param(execution_plan, "execution_plan", ExecutionPlan)
self.iterator = check.callable_param(iterator, "iterator")
self.execution_context_manager = check.inst_param(
execution_context_manager, "execution_context_manager", ExecutionContextManager
)
self.pipeline_context = None
def __iter__(self):
# Since interrupts can't be raised at arbitrary points safely, delay them until designated
# checkpoints during the execution.
# To be maximally certain that interrupts are always caught during an execution process,
# you can safely add an additional `with capture_interrupts()` at the very beginning of the
# process that performs the execution.
with capture_interrupts():
yield from self.execution_context_manager.prepare_context()
self.pipeline_context = self.execution_context_manager.get_context()
generator_closed = False
try:
if self.pipeline_context: # False if we had a pipeline init failure
yield from self.iterator(
execution_plan=self.execution_plan,
pipeline_context=self.pipeline_context,
)
except GeneratorExit:
# Shouldn't happen, but avoid runtime-exception in case this generator gets GC-ed
# (see https://amir.rachum.com/blog/2017/03/03/generator-cleanup/).
generator_closed = True
raise
finally:
for event in self.execution_context_manager.shutdown_context():
if not generator_closed:
yield event
def _check_execute_pipeline_args(
pipeline: Union[PipelineDefinition, IPipeline],
run_config: Optional[dict],
mode: Optional[str],
preset: Optional[str],
tags: Optional[Dict[str, Any]],
solid_selection: Optional[List[str]] = None,
) -> Tuple[
IPipeline,
Optional[dict],
Optional[str],
Dict[str, Any],
FrozenSet[str],
Optional[List[str]],
]:
pipeline = _check_pipeline(pipeline)
pipeline_def = pipeline.get_definition()
check.inst_param(pipeline_def, "pipeline_def", PipelineDefinition)
run_config = check.opt_dict_param(run_config, "run_config")
check.opt_str_param(mode, "mode")
check.opt_str_param(preset, "preset")
check.invariant(
not (mode is not None and preset is not None),
"You may set only one of `mode` (got {mode}) or `preset` (got {preset}).".format(
mode=mode, preset=preset
),
)
tags = check.opt_dict_param(tags, "tags", key_type=str)
check.opt_list_param(solid_selection, "solid_selection", of_type=str)
if preset is not None:
pipeline_preset = pipeline_def.get_preset(preset)
if pipeline_preset.run_config is not None:
check.invariant(
(not run_config) or (pipeline_preset.run_config == run_config),
"The environment set in preset '{preset}' does not agree with the environment "
"passed in the `run_config` argument.".format(preset=preset),
)
run_config = pipeline_preset.run_config
# load solid_selection from preset
if pipeline_preset.solid_selection is not None:
check.invariant(
solid_selection is None or solid_selection == pipeline_preset.solid_selection,
"The solid_selection set in preset '{preset}', {preset_subset}, does not agree with "
"the `solid_selection` argument: {solid_selection}".format(
preset=preset,
preset_subset=pipeline_preset.solid_selection,
solid_selection=solid_selection,
),
)
solid_selection = pipeline_preset.solid_selection
check.invariant(
mode is None or mode == pipeline_preset.mode,
"Mode {mode} does not agree with the mode set in preset '{preset}': "
"('{preset_mode}')".format(preset=preset, preset_mode=pipeline_preset.mode, mode=mode),
)
mode = pipeline_preset.mode
tags = merge_dicts(pipeline_preset.tags, tags)
if mode is not None:
if not pipeline_def.has_mode_definition(mode):
raise DagsterInvariantViolationError(
(
"You have attempted to execute pipeline {name} with mode {mode}. "
"Available modes: {modes}"
).format(
name=pipeline_def.name,
mode=mode,
modes=pipeline_def.available_modes,
)
)
else:
if pipeline_def.is_multi_mode:
raise DagsterInvariantViolationError(
(
"Pipeline {name} has multiple modes (Available modes: {modes}) and you have "
"attempted to execute it without specifying a mode. Set "
"mode property on the PipelineRun object."
).format(name=pipeline_def.name, modes=pipeline_def.available_modes)
)
mode = pipeline_def.get_default_mode_name()
tags = merge_dicts(pipeline_def.tags, tags)
# generate pipeline subset from the given solid_selection
if solid_selection:
pipeline = pipeline.subset_for_execution(solid_selection)
return (
pipeline,
run_config,
mode,
tags,
pipeline.solids_to_execute,
solid_selection,
)
| 43.263904 | 103 | 0.666146 |
bac9e9198ab5b7e6a57a206c78f0ae3f927c87ca | 44,551 | py | Python | lib/galaxy/managers/base.py | crashGoBoom/galaxy | bb395a277839465dd490e1cc8c45dd17bddacb23 | [
"CC-BY-3.0"
] | null | null | null | lib/galaxy/managers/base.py | crashGoBoom/galaxy | bb395a277839465dd490e1cc8c45dd17bddacb23 | [
"CC-BY-3.0"
] | null | null | null | lib/galaxy/managers/base.py | crashGoBoom/galaxy | bb395a277839465dd490e1cc8c45dd17bddacb23 | [
"CC-BY-3.0"
] | null | null | null | """
Keeps the older BaseController security and fetching methods and also
defines a base ModelManager, ModelSerializer, and ModelDeserializer.
ModelManagers are used for operations on models that occur outside the scope of
a single model object, such as:
- object creation
- object lookup
- interactions between 2+ objects of different model classes
(Since these were to replace model Mixins from
web/framework/base/controller.py the rule of thumb used there also generally
has been applied here: if it uses the trans or sa_session, put it in a manager
and not the model.)
ModelSerializers allow flexible conversion of model objects to dictionaries.
They control what keys are sent, how values are simplified, can remap keys,
and allow both predefined and user controlled key sets.
ModelDeserializers control how a model validates and process an incoming
attribute change to a model object.
"""
# TODO: it may be there's a better way to combine the above three classes
# such as: a single flat class, serializers being singletons in the manager, etc.
# instead of the three separate classes. With no 'apparent' perfect scheme
# I'm opting to just keep them separate.
import datetime
import logging
import re
from typing import Callable, Dict, List, Optional, Set, Type
import routes
import sqlalchemy
from sqlalchemy.orm.scoping import scoped_session
from galaxy import exceptions
from galaxy import model
from galaxy.model import tool_shed_install
from galaxy.structured_app import BasicApp, MinimalManagerApp
from galaxy.util import namedtuple
log = logging.getLogger(__name__)
parsed_filter = namedtuple("ParsedFilter", "filter_type filter")
# ==== accessors from base/controller.py
def security_check(trans, item, check_ownership=False, check_accessible=False):
"""
Security checks for an item: checks if (a) user owns item or (b) item
is accessible to user. This is a generic method for dealing with objects
uniformly from the older controller mixin code - however whenever possible
the managers for a particular model should be used to perform security
checks.
"""
# all items are accessible to an admin
if trans.user_is_admin:
return item
# Verify ownership: there is a current user and that user is the same as the item's
if check_ownership:
if not trans.user:
raise exceptions.ItemOwnershipException("Must be logged in to manage Galaxy items", type='error')
if item.user != trans.user:
raise exceptions.ItemOwnershipException("%s is not owned by the current user" % item.__class__.__name__, type='error')
# Verify accessible:
# if it's part of a lib - can they access via security
# if it's something else (sharable) have they been added to the item's users_shared_with_dot_users
if check_accessible:
if type(item) in (trans.app.model.LibraryFolder, trans.app.model.LibraryDatasetDatasetAssociation, trans.app.model.LibraryDataset):
if not trans.app.security_agent.can_access_library_item(trans.get_current_user_roles(), item, trans.user):
raise exceptions.ItemAccessibilityException("%s is not accessible to the current user" % item.__class__.__name__, type='error')
else:
if (item.user != trans.user) and (not item.importable) and (trans.user not in item.users_shared_with_dot_users):
raise exceptions.ItemAccessibilityException("%s is not accessible to the current user" % item.__class__.__name__, type='error')
return item
def get_class(class_name):
"""
Returns the class object that a string denotes. Without this method, we'd have
to do eval(<class_name>).
"""
if class_name == 'ToolShedRepository':
item_class = tool_shed_install.ToolShedRepository
else:
if not hasattr(model, class_name):
raise exceptions.MessageException("Item class '%s' not available." % class_name)
item_class = getattr(model, class_name)
return item_class
def decode_id(app, id):
try:
# note: use str - occasionally a fully numeric id will be placed in post body and parsed as int via JSON
# resulting in error for valid id
return app.security.decode_id(str(id))
except (ValueError, TypeError):
msg = "Malformed id ( %s ) specified, unable to decode" % (str(id))
raise exceptions.MalformedId(msg, id=str(id))
def get_object(trans, id, class_name, check_ownership=False, check_accessible=False, deleted=None):
"""
Convenience method to get a model object with the specified checks. This is
a generic method for dealing with objects uniformly from the older
controller mixin code - however whenever possible the managers for a
particular model should be used to load objects.
"""
decoded_id = decode_id(trans.app, id)
try:
item_class = get_class(class_name)
assert item_class is not None
item = trans.sa_session.query(item_class).get(decoded_id)
assert item is not None
except Exception:
log.exception(f"Invalid {class_name} id ( {id} ) specified.")
raise exceptions.MessageException(f"Invalid {class_name} id ( {id} ) specified", type="error")
if check_ownership or check_accessible:
security_check(trans, item, check_ownership, check_accessible)
if deleted is True and not item.deleted:
raise exceptions.ItemDeletionException('%s "%s" is not deleted'
% (class_name, getattr(item, 'name', id)), type="warning")
elif deleted is False and item.deleted:
raise exceptions.ItemDeletionException('%s "%s" is deleted'
% (class_name, getattr(item, 'name', id)), type="warning")
return item
# =============================================================================
def munge_lists(listA, listB):
"""
Combine two lists into a single list.
(While allowing them to be None, non-lists, or lists.)
"""
# TODO: there's nothing specifically filter or model-related here - move to util
if listA is None:
return listB
if listB is None:
return listA
if not isinstance(listA, list):
listA = [listA]
if not isinstance(listB, list):
listB = [listB]
return listA + listB
# -----------------------------------------------------------------------------
class ModelManager:
"""
Base class for all model/resource managers.
Provides common queries and CRUD operations as a (hopefully) light layer
over the ORM.
"""
model_class: type = object
foreign_key_name: str
app: BasicApp
def __init__(self, app: BasicApp):
self.app = app
def session(self) -> scoped_session:
return self.app.model.context
def _session_setattr(self, item, attr, val, fn=None, flush=True):
if fn:
fn(item, attr, val)
else:
setattr(item, attr, val)
self.session().add(item)
if flush:
self.session().flush()
return item
# .... query foundation wrapper
def query(self, eagerloads=True, **kwargs):
"""
Return a basic query from model_class, filters, order_by, and limit and offset.
Set eagerloads to False to disable them for this query.
"""
query = self.session().query(self.model_class)
# joined table loading
if eagerloads is False:
query = query.enable_eagerloads(False)
return self._filter_and_order_query(query, **kwargs)
def _filter_and_order_query(self, query, filters=None, order_by=None, limit=None, offset=None, **kwargs):
# TODO: not a lot of functional cohesion here
query = self._apply_orm_filters(query, filters)
query = self._apply_order_by(query, order_by)
query = self._apply_orm_limit_offset(query, limit, offset)
return query
# .... filters
def _apply_orm_filters(self, query, filters):
"""
Add any filters to the given query.
"""
if filters is None:
return query
if not isinstance(filters, list):
filters = [filters]
# note: implicit AND
for filter in filters:
query = query.filter(filter)
return query
def _munge_filters(self, filtersA, filtersB):
"""
Combine two lists into a single list.
(While allowing them to be None, non-lists, or lists.)
"""
return munge_lists(filtersA, filtersB)
# .... order, limit, and offset
def _apply_order_by(self, query, order_by):
"""
Return the query after adding the order_by clauses.
Use the manager's default_order_by if order_by is None.
"""
if order_by is None:
return query.order_by(*self._default_order_by())
if isinstance(order_by, (list, tuple)):
return query.order_by(*order_by)
return query.order_by(order_by)
def _default_order_by(self):
"""
Returns a tuple of columns for the default order when getting multiple models.
"""
return (self.model_class.create_time, )
def _apply_orm_limit_offset(self, query, limit, offset):
"""
Return the query after applying the given limit and offset (if not None).
"""
if limit is not None:
query = query.limit(limit)
if offset is not None:
query = query.offset(offset)
return query
# .... query resolution
def one(self, **kwargs):
"""
Sends kwargs to build the query and returns one and only one model.
"""
query = self.query(**kwargs)
return self._one_with_recast_errors(query)
def _one_with_recast_errors(self, query):
"""
Call sqlalchemy's one and recast errors to serializable errors if any.
:raises exceptions.ObjectNotFound: if no model is found
:raises exceptions.InconsistentDatabase: if more than one model is found
"""
# overridden to raise serializable errors
try:
return query.one()
except sqlalchemy.orm.exc.NoResultFound:
raise exceptions.ObjectNotFound(self.model_class.__name__ + ' not found')
except sqlalchemy.orm.exc.MultipleResultsFound:
raise exceptions.InconsistentDatabase('found more than one ' + self.model_class.__name__)
def _one_or_none(self, query):
"""
Return the object if found, None if it's not.
:raises exceptions.InconsistentDatabase: if more than one model is found
"""
try:
return self._one_with_recast_errors(query)
except exceptions.ObjectNotFound:
return None
# NOTE: at this layer, all ids are expected to be decoded and in int form
def by_id(self, id, **kwargs):
"""
Gets a model by primary id.
"""
id_filter = self.model_class.id == id
return self.one(filters=id_filter, **kwargs)
# .... multirow queries
def list(self, filters=None, order_by=None, limit=None, offset=None, **kwargs):
"""
Returns all objects matching the given filters
"""
# list becomes a way of applying both filters generated in the orm (such as .user ==)
# and functional filters that aren't currently possible using the orm (such as instance calcluated values
# or annotations/tags). List splits those two filters and applies limits/offsets
# only after functional filters (if any) using python.
orm_filters, fn_filters = self._split_filters(filters)
if not fn_filters:
# if no fn_filtering required, we can use the 'all orm' version with limit offset
return self._orm_list(filters=orm_filters, order_by=order_by,
limit=limit, offset=offset, **kwargs)
# fn filters will change the number of items returnable by limit/offset - remove them here from the orm query
query = self.query(filters=orm_filters, order_by=order_by, limit=None, offset=None, **kwargs)
items = query.all()
# apply limit, offset after SQL filtering
items = self._apply_fn_filters_gen(items, fn_filters)
return list(self._apply_fn_limit_offset_gen(items, limit, offset))
def _split_filters(self, filters):
"""
Splits `filters` into a tuple of two lists:
a list of filters to be added to the SQL query
and a list of functional filters to be applied after the SQL query.
"""
orm_filters, fn_filters = ([], [])
if filters is None:
return (orm_filters, fn_filters)
if not isinstance(filters, list):
filters = [filters]
for filter_ in filters:
if not hasattr(filter_, 'filter_type'):
orm_filters.append(filter_)
elif filter_.filter_type == 'function':
fn_filters.append(filter_.filter)
elif filter_.filter_type == 'orm_function':
orm_filters.append(filter_.filter(self.model_class))
else:
orm_filters.append(filter_.filter)
return (orm_filters, fn_filters)
def _orm_list(self, query=None, **kwargs):
"""
Sends kwargs to build the query return all models found.
"""
query = query or self.query(**kwargs)
return query.all()
def _apply_fn_filters_gen(self, items, filters):
"""
If all the filter functions in `filters` return True for an item in `items`,
yield that item.
"""
# cpu-expensive
for item in items:
filter_results = [f(item) for f in filters]
if all(filter_results):
yield item
def _apply_fn_limit_offset_gen(self, items, limit, offset):
"""
Iterate over `items` and begin yielding items after
`offset` number of items and stop when we've yielded
`limit` number of items.
"""
# change negative limit, offset to None
if limit is not None and limit < 0:
limit = None
if offset is not None and offset < 0:
offset = None
yielded = 0
for i, item in enumerate(items):
if offset is not None and i < offset:
continue
if limit is not None and yielded >= limit:
break
yield item
yielded += 1
def by_ids(self, ids, filters=None, **kwargs):
"""
Returns an in-order list of models with the matching ids in `ids`.
"""
if not ids:
return []
ids_filter = parsed_filter("orm", self.model_class.id.in_(ids))
found = self.list(filters=self._munge_filters(ids_filter, filters), **kwargs)
# TODO: this does not order by the original 'ids' array
# ...could use get (supposedly since found are in the session, the db won't be hit twice)
# return map( self.session().query( self.model_class ).get, ids )
# ...could implement own version here - slow?
return self._order_items_by_id(ids, found)
def _order_items_by_id(self, ids, items):
"""
Given a list of (unique) ids and a list of items having an 'id' attribute,
return items that have the given ids in that order.
If an id in ids is not found or if an item in items doesn't have a given
id, they will not be in the returned list.
"""
ID_ATTR_NAME = 'id'
# TODO:?? aside from sqlalx.get mentioned above, I haven't seen an in-SQL way
# to make this happen. This may not be the most efficient way either.
# NOTE: that this isn't sorting by id - this is matching the order in items to the order in ids
# move items list into dict by id
item_dict = {}
for item in items:
item_id = getattr(item, ID_ATTR_NAME, None)
if item_id:
item_dict[item_id] = item
# pull from map in order of ids
in_order = []
for id in ids:
if id in item_dict:
in_order.append(item_dict[id])
return in_order
def create(self, flush=True, *args, **kwargs):
"""
Generically create a new model.
"""
# override in subclasses
item = self.model_class(*args, **kwargs)
self.session().add(item)
if flush:
self.session().flush()
return item
def copy(self, item, **kwargs):
"""
Clone or copy an item.
"""
raise exceptions.NotImplemented('Abstract method')
def update(self, item, new_values, flush=True, **kwargs):
"""
Given a dictionary of new values, update `item` and return it.
..note: NO validation or deserialization occurs here.
"""
self.session().add(item)
for key, value in new_values.items():
if hasattr(item, key):
setattr(item, key, value)
if flush:
self.session().flush()
return item
def associate(self, associate_with, item, foreign_key_name=None):
"""
Generically associate `item` with `associate_with` based on `foreign_key_name`.
"""
foreign_key_name = foreign_key_name or self.foreign_key_name
setattr(associate_with, foreign_key_name, item)
return item
def _foreign_key(self, associated_model_class, foreign_key_name=None):
foreign_key_name = foreign_key_name or self.foreign_key_name
return getattr(associated_model_class, foreign_key_name)
def query_associated(self, associated_model_class, item, foreign_key_name=None):
"""
Generically query other items that have been associated with this `item`.
"""
foreign_key = self._foreign_key(associated_model_class, foreign_key_name=foreign_key_name)
return self.session().query(associated_model_class).filter(foreign_key == item)
# a rename of sql DELETE to differentiate from the Galaxy notion of mark_as_deleted
# def destroy( self, item, **kwargs ):
# return item
# ---- code for classes that use one *main* model manager
# TODO: this may become unecessary if we can access managers some other way (class var, app, etc.)
class HasAModelManager:
"""
Mixin used where serializers, deserializers, filter parsers, etc.
need some functionality around the model they're mainly concerned with
and would perform that functionality with a manager.
"""
#: the class used to create this serializer's generically accessible model_manager
model_manager_class: Type[object]
# examples where this doesn't really work are ConfigurationSerializer (no manager)
# and contents (2 managers)
def __init__(self, app: MinimalManagerApp, manager=None, **kwargs):
self._manager = manager
@property
def manager(self):
"""Return an appropriate manager if it exists, instantiate if not."""
# PRECONDITION: assumes self.app is assigned elsewhere
if not self._manager:
# TODO: pass this serializer to it
self._manager = self.app[self.model_manager_class]
# this will error for unset model_manager_class'es
return self._manager
# ==== SERIALIZERS/to_dict,from_dict
class ModelSerializingError(exceptions.InternalServerError):
"""Thrown when request model values can't be serialized"""
class ModelDeserializingError(exceptions.ObjectAttributeInvalidException):
"""Thrown when an incoming value isn't usable by the model
(bad type, out of range, etc.)
"""
class SkipAttribute(Exception):
"""
Raise this inside a serializer to prevent the returned dictionary from having
a the associated key or value for this attribute.
"""
class ModelSerializer(HasAModelManager):
"""
Turns models into JSONable dicts.
Maintains a map of requestable keys and the Callable() serializer functions
that should be called for those keys.
E.g. { 'x' : lambda item, key: item.x, ... }
Note: if a key to serialize is not listed in the Serializer.serializable_keyset
or serializers, it will not be returned.
To serialize call:
my_serializer = MySerializer( app )
...
keys_to_serialize = [ 'id', 'name', 'attr1', 'attr2', ... ]
item_dict = MySerializer.serialize( my_item, keys_to_serialize )
"""
#: 'service' to use for getting urls - use class var to allow overriding when testing
url_for = staticmethod(routes.url_for)
default_view: Optional[str]
views: Dict[str, List[str]]
def __init__(self, app: MinimalManagerApp, **kwargs):
"""
Set up serializer map, any additional serializable keys, and views here.
"""
super().__init__(app, **kwargs)
self.app = app
# a list of valid serializable keys that can use the default (string) serializer
# this allows us to: 'mention' the key without adding the default serializer
# TODO: we may want to eventually error if a key is requested
# that is in neither serializable_keyset or serializers
self.serializable_keyset: Set[str] = set()
# a map of dictionary keys to the functions (often lambdas) that create the values for those keys
self.serializers: Dict[str, Callable] = {}
# add subclass serializers defined there
self.add_serializers()
# update the keyset by the serializers (removing the responsibility from subclasses)
self.serializable_keyset.update(self.serializers.keys())
# views are collections of serializable attributes (a named array of keys)
# inspired by model.dict_{view}_visible_keys
self.views = {}
self.default_view = None
def add_serializers(self):
"""
Register a map of attribute keys -> serializing functions that will serialize
the attribute.
"""
self.serializers.update({
'id': self.serialize_id,
'create_time': self.serialize_date,
'update_time': self.serialize_date,
})
def add_view(self, view_name, key_list, include_keys_from=None):
"""
Add the list of serializable attributes `key_list` to the serializer's
view dictionary under the key `view_name`.
If `include_keys_from` is a proper view name, extend `key_list` by
the list in that view.
"""
key_list = list(set(key_list + self.views.get(include_keys_from, [])))
self.views[view_name] = key_list
self.serializable_keyset.update(key_list)
return key_list
def serialize(self, item, keys, **context):
"""
Serialize the model `item` to a dictionary.
Given model `item` and the list `keys`, create and return a dictionary
built from each key in `keys` that also exists in `serializers` and
values of calling the keyed/named serializers on item.
"""
# TODO: constrain context to current_user/whos_asking when that's all we need (trans)
returned = {}
for key in keys:
# check both serializers and serializable keys
if key in self.serializers:
try:
returned[key] = self.serializers[key](item, key, **context)
except SkipAttribute:
# dont add this key if the deserializer threw this
pass
elif key in self.serializable_keyset:
returned[key] = self.default_serializer(item, key, **context)
# ignore bad/unreg keys
return returned
def skip(self, msg='skipped'):
"""
To be called from inside a serializer to skip it.
Handy for config checks, information hiding, etc.
"""
raise SkipAttribute(msg)
def _remap_from(self, original_key):
if original_key in self.serializers:
return self.serializers[original_key]
if original_key in self.serializable_keyset:
return lambda i, k, **c: self.default_serializer(i, original_key, **c)
raise KeyError('serializer not found for remap: ' + original_key)
def default_serializer(self, item, key, **context):
"""
Serialize the `item`'s attribute named `key`.
"""
# TODO:?? point of change but not really necessary?
return getattr(item, key)
# serializers for common galaxy objects
def serialize_date(self, item, key, **context):
"""
Serialize a date attribute of `item`.
"""
date = getattr(item, key)
return date.isoformat() if date is not None else None
def serialize_id(self, item, key, **context):
"""
Serialize an id attribute of `item`.
"""
id = getattr(item, key)
# Note: it may not be best to encode the id at this layer
return self.app.security.encode_id(id) if id is not None else None
def serialize_type_id(self, item, key, **context):
"""
Serialize an type-id for `item`.
"""
TYPE_ID_SEP = '-'
type_id = getattr(item, key)
if type_id is None:
return None
split = type_id.split(TYPE_ID_SEP, 1)
# Note: it may not be best to encode the id at this layer
return TYPE_ID_SEP.join((split[0], self.app.security.encode_id(split[1])))
# serializing to a view where a view is a predefied list of keys to serialize
def serialize_to_view(self, item, view=None, keys=None, default_view=None, **context):
"""
Use a predefined list of keys (the string `view`) and any additional keys
listed in `keys`.
The combinations can be:
`view` only: return those keys listed in the named view
`keys` only: return those keys listed
no `view` or `keys`: use the `default_view` if any
`view` and `keys`: combine both into one list of keys
"""
# TODO: default view + view makes no sense outside the API.index context - move default view there
all_keys = []
keys = keys or []
# chose explicit over concise here
if view:
if keys:
all_keys = self._view_to_keys(view) + keys
else:
all_keys = self._view_to_keys(view)
else:
if keys:
all_keys = keys
elif default_view:
all_keys = self._view_to_keys(default_view)
return self.serialize(item, all_keys, **context)
def _view_to_keys(self, view=None):
"""
Converts a known view into a list of keys.
:raises ModelSerializingError: if the view is not listed in `self.views`.
"""
if view is None:
view = self.default_view
if view not in self.views:
raise ModelSerializingError('unknown view', view=view, available_views=self.views)
return self.views[view][:]
class ModelDeserializer(HasAModelManager):
"""
An object that converts an incoming serialized dict into values that can be
directly assigned to an item's attributes and assigns them.
"""
# TODO:?? a larger question is: which should be first? Deserialize then validate - or - validate then deserialize?
def __init__(self, app: MinimalManagerApp, validator=None, **kwargs):
"""
Set up deserializers and validator.
"""
super().__init__(app, **kwargs)
self.app = app
self.deserializers: Dict[str, Callable] = {}
self.deserializable_keyset: Set[str] = set()
self.add_deserializers()
# a sub object that can validate incoming values
self.validate = validator or ModelValidator(self.app)
def add_deserializers(self):
"""
Register a map of attribute keys -> functions that will deserialize data
into attributes to be assigned to the item.
"""
# to be overridden in subclasses
def deserialize(self, item, data, flush=True, **context):
"""
Convert an incoming serialized dict into values that can be
directly assigned to an item's attributes and assign them
"""
# TODO: constrain context to current_user/whos_asking when that's all we need (trans)
sa_session = self.app.model.context
new_dict = {}
for key, val in data.items():
if key in self.deserializers:
new_dict[key] = self.deserializers[key](item, key, val, **context)
# !important: don't error on unreg. keys -- many clients will add weird ass keys onto the model
# TODO:?? add and flush here or in manager?
if flush and len(new_dict):
sa_session.add(item)
sa_session.flush()
return new_dict
# ... common deserializers for primitives
def default_deserializer(self, item, key, val, **context):
"""
If the incoming `val` is different than the `item` value change it
and, in either case, return the value.
"""
# TODO: sets the item attribute to value (this may not work in all instances)
# only do the following if val == getattr( item, key )
if hasattr(item, key) and getattr(item, key) != val:
setattr(item, key, val)
return val
def deserialize_basestring(self, item, key, val, convert_none_to_empty=False, **context):
val = '' if (convert_none_to_empty and val is None) else self.validate.basestring(key, val)
return self.default_deserializer(item, key, val, **context)
def deserialize_bool(self, item, key, val, **context):
val = self.validate.bool(key, val)
return self.default_deserializer(item, key, val, **context)
def deserialize_int(self, item, key, val, min=None, max=None, **context):
val = self.validate.int_range(key, val, min, max)
return self.default_deserializer(item, key, val, **context)
# def deserialize_date( self, item, key, val ):
# #TODO: parse isoformat date into date object
# ... common deserializers for Galaxy
def deserialize_genome_build(self, item, key, val, **context):
"""
Make sure `val` is a valid dbkey and assign it.
"""
val = self.validate.genome_build(key, val)
return self.default_deserializer(item, key, val, **context)
class ModelValidator(HasAModelManager):
"""
An object that inspects a dictionary (generally meant to be a set of
new/updated values for the model) and raises an error if a value is
not acceptable.
"""
def __init__(self, app, *args, **kwargs):
super().__init__(app, **kwargs)
self.app = app
def type(self, key, val, types):
"""
Check `val` against the type (or tuple of types) in `types`.
:raises exceptions.RequestParameterInvalidException: if not an instance.
"""
if not isinstance(val, types):
msg = 'must be a type: %s' % (str(types))
raise exceptions.RequestParameterInvalidException(msg, key=key, val=val)
return val
# validators for primitives and compounds of primitives
def basestring(self, key, val):
return self.type(key, val, (str,))
def bool(self, key, val):
return self.type(key, val, bool)
def int(self, key, val):
return self.type(key, val, int)
def nullable_basestring(self, key, val):
"""
Must be a basestring or None.
"""
return self.type(key, val, ((str,), type(None)))
def int_range(self, key, val, min=None, max=None):
"""
Must be a int between min and max.
"""
val = self.type(key, val, int)
if min is not None and val < min:
raise exceptions.RequestParameterInvalidException("less than minimum", key=key, val=val, min=min)
if max is not None and val > max:
raise exceptions.RequestParameterInvalidException("greater than maximum", key=key, val=val, max=max)
return val
def basestring_list(self, key, val):
"""
Must be a list of basestrings.
"""
# TODO: Here's where compound types start becoming a nightmare. Any more or more complex
# and should find a different way.
val = self.type(key, val, list)
return [self.basestring(key, elem) for elem in val]
# validators for Galaxy
def genome_build(self, key, val):
"""
Must be a valid base_string.
Note: no checking against installation's ref list is done as many
data sources consider this an open field.
"""
# TODO: is this correct?
if val is None:
return '?'
# currently, data source sites like UCSC are able to set the genome build to non-local build names
# afterwards, attempting to validate the whole model will choke here
# for genome_build_shortname, longname in self.app.genome_builds.get_genome_build_names( trans=trans ):
# if val == genome_build_shortname:
# return val
# raise exceptions.RequestParameterInvalidException( "invalid reference", key=key, val=val )
# IOW: fallback to string validation
return self.basestring(key, val)
# def slug( self, item, key, val ):
# """validate slug"""
# pass
# ==== Building query filters based on model data
class ModelFilterParser(HasAModelManager):
"""
Converts string tuples (partially converted query string params) of
attr, op, val into either:
- ORM based filters (filters that can be applied by the ORM at the SQL
level) or
- functional filters (filters that use derived values or values not
within the SQL tables)
These filters can then be applied to queries.
This abstraction allows 'smarter' application of limit and offset at either the
SQL level or the generator/list level based on the presence of functional
filters. In other words, if no functional filters are present, limit and offset
may be applied at the SQL level. If functional filters are present, limit and
offset need to applied at the list level.
These might be safely be replaced in the future by creating SQLAlchemy
hybrid properties or more thoroughly mapping derived values.
"""
# ??: this class kindof 'lives' in both the world of the controllers/param-parsing and to models/orm
# (as the model informs how the filter params are parsed)
# I have no great idea where this 'belongs', so it's here for now
model_class: type
parsed_filter = parsed_filter
orm_filter_parsers: Dict[str, Dict]
fn_filter_parsers: Dict[str, Dict]
def __init__(self, app: MinimalManagerApp, **kwargs):
"""
Set up serializer map, any additional serializable keys, and views here.
"""
super().__init__(app, **kwargs)
self.app = app
#: regex for testing/dicing iso8601 date strings, with optional time and ms, but allowing only UTC timezone
self.date_string_re = re.compile(r'^(\d{4}\-\d{2}\-\d{2})[T| ]{0,1}(\d{2}:\d{2}:\d{2}(?:\.\d{1,6}){0,1}){0,1}Z{0,1}$')
# dictionary containing parsing data for ORM/SQLAlchemy-based filters
# ..note: although kind of a pain in the ass and verbose, opt-in/allowlisting allows more control
# over potentially expensive queries
self.orm_filter_parsers = {}
#: dictionary containing parsing data for functional filters - applied after a query is made
self.fn_filter_parsers = {}
# set up both of the above
self._add_parsers()
def _add_parsers(self):
"""
Set up, extend, or alter `orm_filter_parsers` and `fn_filter_parsers`.
"""
# note: these are the default filters for all models
self.orm_filter_parsers.update({
# (prob.) applicable to all models
'id': {'op': ('in')},
'encoded_id': {'column': 'id', 'op': ('in'), 'val': self.parse_id_list},
# dates can be directly passed through the orm into a filter (no need to parse into datetime object)
'extension': {'op': ('eq', 'like', 'in')},
'create_time': {'op': ('le', 'ge', 'lt', 'gt'), 'val': self.parse_date},
'update_time': {'op': ('le', 'ge', 'lt', 'gt'), 'val': self.parse_date},
})
def parse_filters(self, filter_tuple_list):
"""
Parse string 3-tuples (attr, op, val) into orm or functional filters.
"""
# TODO: allow defining the default filter op in this class (and not 'eq' in base/controller.py)
parsed = []
for (attr, op, val) in filter_tuple_list:
filter_ = self.parse_filter(attr, op, val)
parsed.append(filter_)
return parsed
def parse_filter(self, attr, op, val):
"""
Attempt to parse filter as a custom/fn filter, then an orm filter, and
if neither work - raise an error.
:raises exceptions.RequestParameterInvalidException: if no functional or orm
filter can be parsed.
"""
try:
# check for a custom filter
fn_filter = self._parse_fn_filter(attr, op, val)
if fn_filter is not None:
return fn_filter
# if no custom filter found, try to make an ORM filter
# note: have to use explicit is None here, bool( sqlalx.filter ) == False
orm_filter = self._parse_orm_filter(attr, op, val)
if orm_filter is not None:
return orm_filter
# by convention, assume most val parsers raise ValueError
except ValueError as val_err:
raise exceptions.RequestParameterInvalidException('unparsable value for filter',
column=attr, operation=op, value=val, ValueError=str(val_err))
# if neither of the above work, raise an error with how-to info
# TODO: send back all valid filter keys in exception for added user help
raise exceptions.RequestParameterInvalidException('bad filter', column=attr, operation=op)
# ---- fn filters
def _parse_fn_filter(self, attr, op, val):
"""
Attempt to parse a non-ORM filter function.
"""
# fn_filter_list is a dict: fn_filter_list[ attr ] = { 'opname1' : opfn1, 'opname2' : opfn2, etc. }
# attr, op is a nested dictionary pointing to the filter fn
attr_map = self.fn_filter_parsers.get(attr, None)
if not attr_map:
return None
allowed_ops = attr_map.get('op')
# allowed ops is a map here, op => fn
filter_fn = allowed_ops.get(op, None)
if not filter_fn:
return None
# parse the val from string using the 'val' parser if present (otherwise, leave as string)
val_parser = attr_map.get('val', None)
if val_parser:
val = val_parser(val)
# curry/partial and fold the val in there now
return self.parsed_filter(filter_type="function", filter=lambda i: filter_fn(i, val))
# ---- ORM filters
def _parse_orm_filter(self, attr, op, val):
"""
Attempt to parse a ORM-based filter.
Using SQLAlchemy, this would yield a sql.elements.BinaryExpression.
"""
# orm_filter_list is a dict: orm_filter_list[ attr ] = <list of allowed ops>
column_map = self.orm_filter_parsers.get(attr, None)
if not column_map:
# no column mapping (not allowlisted)
return None
if callable(column_map):
return self.parsed_filter(filter_type="orm_function", filter=column_map(attr, op, val))
# attr must be an allowlisted column by attr name or by key passed in column_map
# note: column_map[ 'column' ] takes precedence
if 'column' in column_map:
attr = column_map['column']
column = self.model_class.table.columns.get(attr)
if column is None:
# could be a property (hybrid_property, etc.) - assume we can make a filter from it
column = getattr(self.model_class, attr)
if column is None:
# no orm column
return None
# op must be allowlisted: contained in the list orm_filter_list[ attr ][ 'op' ]
allowed_ops = column_map.get('op')
if op not in allowed_ops:
return None
op = self._convert_op_string_to_fn(column, op)
if not op:
return None
# parse the val from string using the 'val' parser if present (otherwise, leave as string)
val_parser = column_map.get('val', None)
if val_parser:
val = val_parser(val)
orm_filter = op(val)
return self.parsed_filter(filter_type="orm", filter=orm_filter)
#: these are the easier/shorter string equivalents to the python operator fn names that need '__' around them
UNDERSCORED_OPS = ('lt', 'le', 'eq', 'ne', 'ge', 'gt')
def _convert_op_string_to_fn(self, column, op_string):
"""
Convert the query string filter op shorthand into actual ORM usable
function names, then return the ORM function.
"""
# correct op_string to usable function key
fn_name = op_string
if op_string in self.UNDERSCORED_OPS:
fn_name = '__' + op_string + '__'
elif op_string == 'in':
fn_name = 'in_'
# get the column fn using the op_string and error if not a callable attr
# TODO: special case 'not in' - or disallow?
op_fn = getattr(column, fn_name, None)
if not op_fn or not callable(op_fn):
return None
return op_fn
# ---- preset fn_filters: dictionaries of standard filter ops for standard datatypes
def string_standard_ops(self, key):
return {
'op': {
'eq': lambda i, v: v == getattr(i, key),
'contains': lambda i, v: v in getattr(i, key),
}
}
# --- more parsers! yay!
# TODO: These should go somewhere central - we've got ~6 parser modules/sections now
def parse_bool(self, bool_string):
"""
Parse a boolean from a string.
"""
# Be strict here to remove complexity of options (but allow already parsed).
if bool_string in ('True', True):
return True
if bool_string in ('False', False):
return False
raise ValueError('invalid boolean: ' + str(bool_string))
def parse_id_list(self, id_list_string, sep=','):
"""
Split `id_list_string` at `sep`.
"""
# TODO: move id decoding out
id_list = [self.app.security.decode_id(id_) for id_ in id_list_string.split(sep)]
return id_list
def parse_int_list(self, int_list_string, sep=','):
"""
Split `int_list_string` at `sep` and parse as ints.
"""
# TODO: move id decoding out
int_list = [int(v) for v in int_list_string.split(sep)]
return int_list
def parse_date(self, date_string):
"""
Reformats a string containing either seconds from epoch or an iso8601 formated
date string into a new date string usable within a filter query.
Seconds from epoch can be a floating point value as well (i.e containing ms).
"""
# assume it's epoch if no date separator is present
try:
epoch = float(date_string)
datetime_obj = datetime.datetime.fromtimestamp(epoch)
return datetime_obj.isoformat(sep=' ')
except ValueError:
pass
match = self.date_string_re.match(date_string)
if match:
date_string = ' '.join(group for group in match.groups() if group)
return date_string
raise ValueError('datetime strings must be in the ISO 8601 format and in the UTC')
def raise_filter_err(self, attr, op, val, msg):
raise exceptions.RequestParameterInvalidException(msg, column=attr, operation=op, val=val)
def is_valid_slug(slug):
"""Returns true iff slug is valid."""
VALID_SLUG_RE = re.compile(r"^[a-z0-9\-]+$")
return VALID_SLUG_RE.match(slug)
| 39.114135 | 143 | 0.632556 |
7297791db05ab40bf0827824367abd990f8158d1 | 12,139 | py | Python | src/ref/WGAN_CNN_CNN_DISCRETE/Critic.py | chychen/nba_scrip_generation | 942df59cc0426aa30b54a0e09c0f646aa8fd4f18 | [
"MIT"
] | 1 | 2020-07-09T09:00:09.000Z | 2020-07-09T09:00:09.000Z | src/ref/WGAN_CNN_CNN_DISCRETE/Critic.py | chychen/bball_defensive_strategies_generation | 942df59cc0426aa30b54a0e09c0f646aa8fd4f18 | [
"MIT"
] | null | null | null | src/ref/WGAN_CNN_CNN_DISCRETE/Critic.py | chychen/bball_defensive_strategies_generation | 942df59cc0426aa30b54a0e09c0f646aa8fd4f18 | [
"MIT"
] | null | null | null | """
modeling
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import time
import shutil
import numpy as np
import tensorflow as tf
from tensorflow.contrib import rnn
from tensorflow.contrib import layers
from utils_cnn import Norm
class C_MODEL(object):
"""
"""
def __init__(self, config, graph):
""" TO build up the graph
Inputs
------
config :
* batch_size : mini batch size
* log_dir : path to save training summary
* learning_rate : adam's learning rate
* hidden_size : number of hidden units in LSTM
* rnn_layers : number of stacked LSTM
* seq_length : length of LSTM
* num_features : dimensions of input feature
* latent_dims : dimensions of latent feature
* penalty_lambda = gradient penalty's weight, ref from paper of 'improved-wgan'
graph :
tensorflow default graph
"""
self.normer = Norm()
# hyper-parameters
self.batch_size = config.batch_size
self.log_dir = config.log_dir
self.learning_rate = config.learning_rate
self.hidden_size = config.hidden_size
self.rnn_layers = config.rnn_layers
self.seq_length = config.seq_length
self.num_features = config.num_features
self.latent_dims = config.latent_dims
self.penalty_lambda = config.penalty_lambda
self.if_log_histogram = config.if_log_histogram
# steps
self.__global_steps = tf.train.get_or_create_global_step(graph=graph)
self.__D_steps = 0
# data
self.__G_samples = tf.placeholder(dtype=tf.float32, shape=[
self.batch_size, self.seq_length, self.normer.PLAYERS, self.normer.COLS, self.normer.ROWS], name='G_samples')
self.__X = tf.placeholder(dtype=tf.float32, shape=[
self.batch_size, self.seq_length, self.normer.PLAYERS, self.normer.COLS, self.normer.ROWS], name='real_data')
# adversarial learning : wgan
self.__build_wgan()
# summary
self.__summary_D_op = tf.summary.merge(tf.get_collection('D'))
self.__summary_D_valid_op = tf.summary.merge(
tf.get_collection('D_valid'))
self.D_summary_writer = tf.summary.FileWriter(
self.log_dir + 'D')
self.D_valid_summary_writer = tf.summary.FileWriter(
self.log_dir + 'D_valid')
def __build_wgan(self):
with tf.name_scope('WGAN'):
D_real = self.inference(self.__X, seq_len=None)
__D_fake = self.inference(
self.__G_samples, seq_len=None, reuse=True)
# loss function
self.__D_loss, F_real, F_fake, grad_pen = self.__D_loss_fn(
self.__X, self.__G_samples, __D_fake, D_real, self.penalty_lambda)
theta_D = self.__get_var_list()
with tf.name_scope('D_optimizer') as scope:
D_optimizer = tf.train.AdamOptimizer(
learning_rate=self.learning_rate, beta1=0.5, beta2=0.9)
D_grads = tf.gradients(self.__D_loss, theta_D)
D_grads = list(zip(D_grads, theta_D))
self.__D_train_op = D_optimizer.apply_gradients(
grads_and_vars=D_grads, global_step=self.__global_steps)
# logging
for grad, var in D_grads:
self.__summarize(var.name, grad, collections='D',
postfix='gradient')
tf.summary.scalar('D_loss', self.__D_loss,
collections=['D', 'D_valid'])
tf.summary.scalar('F_real', F_real, collections=['D'])
tf.summary.scalar('F_fake', F_fake, collections=['D'])
tf.summary.scalar('grad_pen', grad_pen, collections=['D'])
def __summarize(self, name, value, collections, postfix=''):
""" Helper to create summaries for activations.
Creates a summary that provides a histogram of activations.
Creates a summary that measures the sparsity of activations.
Args
----
name : string
value : Tensor
collections : list of string
postfix : string
Returns
-------
nothing
"""
if self.if_log_histogram:
tensor_name = name + '/' + postfix
tf.summary.histogram(tensor_name,
value, collections=collections)
# tf.summary.scalar(tensor_name + '/sparsity',
# tf.nn.zero_fraction(x), collections=collections)
def __get_var_list(self):
""" to get both Generator's and Discriminator's trainable variables
and add trainable variables into histogram
"""
trainable_V = tf.trainable_variables()
theta_D = []
for _, v in enumerate(trainable_V):
if v.name.startswith('D'):
theta_D.append(v)
self.__summarize(v.op.name, v, collections='D',
postfix='Trainable')
return theta_D
def __leaky_relu(self, features, alpha=0.7):
return tf.maximum(features, alpha * features)
def __lstm_cell(self):
return rnn.LSTMCell(self.hidden_size, use_peepholes=True, initializer=None,
forget_bias=1.0, state_is_tuple=True,
# activation=self.__leaky_relu, cell_clip=2,
activation=tf.nn.tanh, reuse=tf.get_variable_scope().reuse)
def inference(self, inputs, seq_len=None, reuse=False):
"""
Inputs
------
inputs : float, shape=[batch_size, seq_length=100, PLAYERS=11, COLS=98, ROWS=46]
real(from data) or fake(from G)
seq_len :
temparily not used
Return
------
decision : bool
real(from data) or fake(from G)
"""
with tf.variable_scope('D', reuse=reuse) as scope:
# unstack, axis=1 -> [batch, time, feature]
print(inputs)
inputs = tf.transpose(inputs, perm=[0, 1, 3, 4, 2])
print(inputs)
inputs = tf.unstack(inputs, num=self.seq_length, axis=1)
blstm_input = []
output_list = []
for time_step in range(self.seq_length):
with tf.variable_scope('conv') as scope:
if time_step > 0:
tf.get_variable_scope().reuse_variables()
filters_list = [32, 64, 128, 256]
next_input = inputs[time_step]
for i in range(len(filters_list)):
with tf.variable_scope('conv' + str(i)) as scope:
conv = layers.conv2d(
inputs=next_input,
num_outputs=filters_list[i],
kernel_size=[5, 5],
stride=2,
padding='SAME',
activation_fn=tf.nn.relu,
weights_initializer=layers.xavier_initializer(
uniform=False),
weights_regularizer=None,
biases_initializer=tf.zeros_initializer(),
reuse=scope.reuse,
scope=scope
)
next_input = conv
with tf.variable_scope('fc') as scope:
flat_input = layers.flatten(next_input)
fc = layers.fully_connected(
inputs=flat_input,
num_outputs=self.hidden_size,
activation_fn=tf.nn.relu,
weights_initializer=layers.xavier_initializer(
uniform=False),
biases_initializer=tf.zeros_initializer(),
reuse=scope.reuse,
scope=scope
)
blstm_input.append(fc)
with tf.variable_scope('stack_blstm') as scope:
stack_blstm, _, _ = rnn.stack_bidirectional_rnn(
cells_fw=[self.__lstm_cell()
for _ in range(self.rnn_layers)],
cells_bw=[self.__lstm_cell()
for _ in range(self.rnn_layers)],
inputs=blstm_input,
dtype=tf.float32,
sequence_length=seq_len
)
with tf.variable_scope('output') as scope:
for i, out_blstm in enumerate(stack_blstm):
if i > 0:
tf.get_variable_scope().reuse_variables()
with tf.variable_scope('fc') as scope:
fc = layers.fully_connected(
inputs=out_blstm,
num_outputs=1,
activation_fn=self.__leaky_relu,
weights_initializer=layers.xavier_initializer(
uniform=False),
biases_initializer=tf.zeros_initializer(),
reuse=scope.reuse,
scope=scope
)
output_list.append(fc)
# stack, axis=1 -> [batch, time, feature]
decisions = tf.stack(output_list, axis=1)
print('decisions', decisions)
decision = tf.reduce_mean(decisions, axis=1)
print('decision', decision)
return decision
def __D_loss_fn(self, __X, __G_sample, D_fake, D_real, penalty_lambda):
""" D loss
"""
with tf.name_scope('D_loss') as scope:
# grad_pen, base on paper (Improved WGAN)
epsilon = tf.random_uniform(
[self.batch_size, 1, 1, 1, 1], minval=0.0, maxval=1.0)
__X_inter = epsilon * __X + (1.0 - epsilon) * __G_sample
grad = tf.gradients(
self.inference(__X_inter, seq_len=None, reuse=True), [__X_inter])[0]
print(grad)
sum_ = tf.reduce_sum(tf.square(grad), axis=[1, 2, 3, 4])
print(sum_)
grad_norm = tf.sqrt(sum_)
grad_pen = penalty_lambda * tf.reduce_mean(
tf.square(grad_norm - 1.0))
f_fake = tf.reduce_mean(D_fake)
f_real = tf.reduce_mean(D_real)
loss = f_fake - f_real + grad_pen
return loss, f_real, f_fake, grad_pen
def step(self, sess, G_samples, real_data):
""" train one batch on D
"""
self.__D_steps += 1
feed_dict = {self.__G_samples: G_samples,
self.__X: real_data}
loss, global_steps, _ = sess.run(
[self.__D_loss, self.__global_steps, self.__D_train_op], feed_dict=feed_dict)
if not self.if_log_histogram or self.__D_steps % 500 == 0: # % 500 to save space
summary = sess.run(self.__summary_D_op, feed_dict=feed_dict)
# log
self.D_summary_writer.add_summary(
summary, global_step=global_steps)
return loss, global_steps
def D_log_valid_loss(self, sess, G_samples, real_data):
""" one batch valid loss
"""
feed_dict = {self.__G_samples: G_samples,
self.__X: real_data}
loss, global_steps = sess.run(
[self.__D_loss, self.__global_steps], feed_dict=feed_dict)
if not self.if_log_histogram or self.__D_steps % 500 == 0: # % 500 to save space
summary = sess.run(self.__summary_D_valid_op, feed_dict=feed_dict)
# log
self.D_valid_summary_writer.add_summary(
summary, global_step=global_steps)
return loss
| 43.199288 | 121 | 0.539583 |
b60237c6854b0d0a83a7b795c3afddfbe7334676 | 8,161 | py | Python | front-end/testsuite-python-lib/Python-3.0/Lib/distutils/command/build_clib.py | MalloyPower/parsing-python | b2bca5eed07ea2af7a2001cd4f63becdfb0570be | [
"MIT"
] | 1 | 2020-11-26T18:53:46.000Z | 2020-11-26T18:53:46.000Z | front-end/testsuite-python-lib/Python-3.0/Lib/distutils/command/build_clib.py | MalloyPower/parsing-python | b2bca5eed07ea2af7a2001cd4f63becdfb0570be | [
"MIT"
] | 1 | 2015-10-29T20:51:31.000Z | 2015-10-29T20:51:31.000Z | front-end/testsuite-python-lib/Python-3.0/Lib/distutils/command/build_clib.py | MalloyPower/parsing-python | b2bca5eed07ea2af7a2001cd4f63becdfb0570be | [
"MIT"
] | 1 | 2019-04-11T11:27:01.000Z | 2019-04-11T11:27:01.000Z | """distutils.command.build_clib
Implements the Distutils 'build_clib' command, to build a C/C++ library
that is included in the module distribution and needed by an extension
module."""
__revision__ = "$Id: build_clib.py 58495 2007-10-16 18:12:55Z guido.van.rossum $"
# XXX this module has *lots* of code ripped-off quite transparently from
# build_ext.py -- not surprisingly really, as the work required to build
# a static library from a collection of C source files is not really all
# that different from what's required to build a shared object file from
# a collection of C source files. Nevertheless, I haven't done the
# necessary refactoring to account for the overlap in code between the
# two modules, mainly because a number of subtle details changed in the
# cut 'n paste. Sigh.
import os
from distutils.core import Command
from distutils.errors import *
from distutils.sysconfig import customize_compiler
from distutils import log
def show_compilers():
from distutils.ccompiler import show_compilers
show_compilers()
class build_clib(Command):
description = "build C/C++ libraries used by Python extensions"
user_options = [
('build-clib', 'b',
"directory to build C/C++ libraries to"),
('build-temp', 't',
"directory to put temporary build by-products"),
('debug', 'g',
"compile with debugging information"),
('force', 'f',
"forcibly build everything (ignore file timestamps)"),
('compiler=', 'c',
"specify the compiler type"),
]
boolean_options = ['debug', 'force']
help_options = [
('help-compiler', None,
"list available compilers", show_compilers),
]
def initialize_options(self):
self.build_clib = None
self.build_temp = None
# List of libraries to build
self.libraries = None
# Compilation options for all libraries
self.include_dirs = None
self.define = None
self.undef = None
self.debug = None
self.force = 0
self.compiler = None
def finalize_options(self):
# This might be confusing: both build-clib and build-temp default
# to build-temp as defined by the "build" command. This is because
# I think that C libraries are really just temporary build
# by-products, at least from the point of view of building Python
# extensions -- but I want to keep my options open.
self.set_undefined_options('build',
('build_temp', 'build_clib'),
('build_temp', 'build_temp'),
('compiler', 'compiler'),
('debug', 'debug'),
('force', 'force'))
self.libraries = self.distribution.libraries
if self.libraries:
self.check_library_list(self.libraries)
if self.include_dirs is None:
self.include_dirs = self.distribution.include_dirs or []
if isinstance(self.include_dirs, str):
self.include_dirs = self.include_dirs.split(os.pathsep)
# XXX same as for build_ext -- what about 'self.define' and
# 'self.undef' ?
def run(self):
if not self.libraries:
return
# Yech -- this is cut 'n pasted from build_ext.py!
from distutils.ccompiler import new_compiler
self.compiler = new_compiler(compiler=self.compiler,
dry_run=self.dry_run,
force=self.force)
customize_compiler(self.compiler)
if self.include_dirs is not None:
self.compiler.set_include_dirs(self.include_dirs)
if self.define is not None:
# 'define' option is a list of (name,value) tuples
for (name,value) in self.define:
self.compiler.define_macro(name, value)
if self.undef is not None:
for macro in self.undef:
self.compiler.undefine_macro(macro)
self.build_libraries(self.libraries)
def check_library_list(self, libraries):
"""Ensure that the list of libraries (presumably provided as a
command option 'libraries') is valid, i.e. it is a list of
2-tuples, where the tuples are (library_name, build_info_dict).
Raise DistutilsSetupError if the structure is invalid anywhere;
just returns otherwise."""
# Yechh, blecch, ackk: this is ripped straight out of build_ext.py,
# with only names changed to protect the innocent!
if not isinstance(libraries, list):
raise DistutilsSetupError(
"'libraries' option must be a list of tuples")
for lib in libraries:
if not isinstance(lib, tuple) and len(lib) != 2:
raise DistutilsSetupError(
"each element of 'libraries' must a 2-tuple")
if isinstance(lib[0], str):
raise DistutilsSetupError(
"first element of each tuple in 'libraries' "
"must be a string (the library name)")
if '/' in lib[0] or (os.sep != '/' and os.sep in lib[0]):
raise DistutilsSetupError("bad library name '%s': "
"may not contain directory separators" % lib[0])
if not isinstance(lib[1], dict):
raise DistutilsSetupError(
"second element of each tuple in 'libraries' "
"must be a dictionary (build info)")
def get_library_names(self):
# Assume the library list is valid -- 'check_library_list()' is
# called from 'finalize_options()', so it should be!
if not self.libraries:
return None
lib_names = []
for (lib_name, build_info) in self.libraries:
lib_names.append(lib_name)
return lib_names
def get_source_files(self):
self.check_library_list(self.libraries)
filenames = []
for (lib_name, build_info) in self.libraries:
sources = build_info.get('sources')
if sources is None or not isinstance(sources, (list, tuple)):
raise DistutilsSetupError(
"in 'libraries' option (library '%s'), "
"'sources' must be present and must be "
"a list of source filenames" % lib_name)
filenames.extend(sources)
return filenames
def build_libraries(self, libraries):
for (lib_name, build_info) in libraries:
sources = build_info.get('sources')
if sources is None or not isinstance(sources, (list, tuple)):
raise DistutilsSetupError(
"in 'libraries' option (library '%s'), "
"'sources' must be present and must be "
"a list of source filenames" % lib_name)
sources = list(sources)
log.info("building '%s' library", lib_name)
# First, compile the source code to object files in the library
# directory. (This should probably change to putting object
# files in a temporary build directory.)
macros = build_info.get('macros')
include_dirs = build_info.get('include_dirs')
objects = self.compiler.compile(sources,
output_dir=self.build_temp,
macros=macros,
include_dirs=include_dirs,
debug=self.debug)
# Now "link" the object files together into a static library.
# (On Unix at least, this isn't really linking -- it just
# builds an archive. Whatever.)
self.compiler.create_static_lib(objects, lib_name,
output_dir=self.build_clib,
debug=self.debug)
| 39.425121 | 81 | 0.581179 |
7f2852ce37516cea085533915c38c89d579d5b8e | 2,794 | py | Python | modules/runtime/scenario/scenario_generation/scenario_generation.py | Lizhu-Chen/bark | fad029f658e462eb1772c28c2c0971faf5176dc1 | [
"MIT"
] | null | null | null | modules/runtime/scenario/scenario_generation/scenario_generation.py | Lizhu-Chen/bark | fad029f658e462eb1772c28c2c0971faf5176dc1 | [
"MIT"
] | null | null | null | modules/runtime/scenario/scenario_generation/scenario_generation.py | Lizhu-Chen/bark | fad029f658e462eb1772c28c2c0971faf5176dc1 | [
"MIT"
] | 1 | 2020-08-12T17:09:05.000Z | 2020-08-12T17:09:05.000Z | # Copyright (c) 2020 Julian Bernhard, Klemens Esterle, Patrick Hart and
# Tobias Kessler
#
# This work is licensed under the terms of the MIT license.
# For a copy, see <https://opensource.org/licenses/MIT>.
import pickle
import os
from modules.runtime.commons.parameters import ParameterServer
class ScenarioGeneration:
def __init__(self, params=None, num_scenarios=None, random_seed=1000):
self._params = params
self._current_scenario_idx = 0
self._random_seed = random_seed
if params is None:
self._params = ParameterServer()
else:
self._params = params
self.initialize_params(self._params)
self._scenario_list = self.create_scenarios(params, num_scenarios)
def initialize_params(self, params):
pass
@property
def params(self):
return self._params
def get_next_scenario(self):
if self._current_scenario_idx >= self.num_scenarios:
self._current_scenario_idx = 0
#print("Resetting scenario index to zero")
scenario = self.get_scenario(self._current_scenario_idx)
scenario_idx = self._current_scenario_idx
self._current_scenario_idx += 1
return scenario, scenario_idx
def get_num_scenarios(self):
return len(self._scenario_list)
def get_scenario(self, idx):
return self._scenario_list[idx].copy()
def __iter__(self):
self._current_iter_idx=0
return self
def __next__(self):
if self._current_iter_idx < self.get_num_scenarios():
scenario = self.get_scenario(self._current_iter_idx)
idx = self._current_iter_idx
self._current_iter_idx += 1
return scenario, idx
else:
raise StopIteration
def create_scenarios(self, params, num_scenarios):
""" Creates a list of scenario class instances which should be
deterministically reproducible given the random seed,
the params and the number of scenarios
Arguments:
params {[bark.common.ParameterServer]} -- [provides additional parameters]
num_scenarios {[int]} -- [how many scenarios should be created]
random_seed {[unsigned int]} -- [seed used to make scenario generation
based on random factors reproducible]
Returns:
scenario_list {[a list of instances of type scenario class]} --
[each scenario in this list defines one initial world condition]
"""
return None
@property
def num_scenarios(self):
return len(self._scenario_list)
def dump_scenario_list(self, filename):
with open(filename, "wb") as file:
# print("SAVE PATH:", os.path.abspath(filename))
pickle.dump(self._scenario_list, file)
def load_scenario_list(self, filename):
with open(filename, "rb") as file:
self._scenario_list = pickle.load(file)
| 31.75 | 82 | 0.702935 |
b6e993cb460a6f2f10de8d5fff9797e715e8cdd0 | 4,424 | py | Python | datasets/austin_bikeshare/bikeshare_stations/bikeshare_stations_dag.py | renovate-bot/public-datasets-pipelines | d2b5e527d9d2dcc8e01f5209e7b9409dfe2b62a8 | [
"Apache-2.0"
] | 90 | 2021-04-09T19:20:19.000Z | 2022-03-31T16:03:14.000Z | datasets/austin_bikeshare/bikeshare_stations/bikeshare_stations_dag.py | renovate-bot/public-datasets-pipelines | d2b5e527d9d2dcc8e01f5209e7b9409dfe2b62a8 | [
"Apache-2.0"
] | 125 | 2021-04-19T20:33:26.000Z | 2022-03-30T21:45:49.000Z | datasets/austin_bikeshare/bikeshare_stations/bikeshare_stations_dag.py | renovate-bot/public-datasets-pipelines | d2b5e527d9d2dcc8e01f5209e7b9409dfe2b62a8 | [
"Apache-2.0"
] | 54 | 2021-04-29T23:17:36.000Z | 2022-03-31T05:15:23.000Z | # Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from airflow import DAG
from airflow.contrib.operators import gcs_to_bq, kubernetes_pod_operator
default_args = {
"owner": "Google",
"depends_on_past": False,
"start_date": "2021-03-01",
}
with DAG(
dag_id="austin_bikeshare.bikeshare_stations",
default_args=default_args,
max_active_runs=1,
schedule_interval="@daily",
catchup=False,
default_view="graph",
) as dag:
# Run CSV transform within kubernetes pod
austin_bikeshare_stations_transform_csv = kubernetes_pod_operator.KubernetesPodOperator(
task_id="austin_bikeshare_stations_transform_csv",
name="bikeshare_stations",
namespace="composer",
service_account_name="datasets",
image_pull_policy="Always",
image="{{ var.json.austin_bikeshare.container_registry.run_csv_transform_kub }}",
env_vars={
"SOURCE_URL": "https://data.austintexas.gov/api/views/qd73-bsdg/rows.csv",
"SOURCE_FILE": "files/data.csv",
"TARGET_FILE": "files/data_output.csv",
"TARGET_GCS_BUCKET": "{{ var.value.composer_bucket }}",
"TARGET_GCS_PATH": "data/austin_bikeshare/bikeshare_stations/data_output.csv",
"PIPELINE_NAME": "bikeshare_stations",
"CSV_HEADERS": '["station_id","name","status","address","alternate_name","city_asset_number","property_type","number_of_docks","power_type","footprint_length","footprint_width","notes","council_district","modified_date"]',
"RENAME_MAPPINGS": '{"Kiosk ID": "station_id","Kiosk Name": "name","Kiosk Status": "status","Address": "address","Alternate Name": "alternate_name","City Asset Number": "city_asset_number","Property Type": "property_type","Number of Docks": "number_of_docks","Power Type": "power_type","Footprint Length": "footprint_length","Footprint Width": "footprint_width","Notes": "notes","Council District": "council_district","Modified Date": "modified_date"}',
},
resources={"request_memory": "4G", "request_cpu": "1"},
)
# Task to load CSV data to a BigQuery table
load_austin_bikeshare_stations_to_bq = (
gcs_to_bq.GoogleCloudStorageToBigQueryOperator(
task_id="load_austin_bikeshare_stations_to_bq",
bucket="{{ var.value.composer_bucket }}",
source_objects=["data/austin_bikeshare/bikeshare_stations/data_output.csv"],
source_format="CSV",
destination_project_dataset_table="austin_bikeshare.bikeshare_stations",
skip_leading_rows=1,
write_disposition="WRITE_TRUNCATE",
schema_fields=[
{"name": "station_id", "type": "INTEGER", "mode": "NULLABLE"},
{"name": "name", "type": "STRING", "mode": "NULLABLE"},
{"name": "status", "type": "STRING", "mode": "NULLABLE"},
{"name": "address", "type": "STRING", "mode": "NULLABLE"},
{"name": "alternate_name", "type": "STRING", "mode": "NULLABLE"},
{"name": "city_asset_number", "type": "INTEGER", "mode": "NULLABLE"},
{"name": "property_type", "type": "STRING", "mode": "NULLABLE"},
{"name": "number_of_docks", "type": "INTEGER", "mode": "NULLABLE"},
{"name": "power_type", "type": "STRING", "mode": "NULLABLE"},
{"name": "footprint_length", "type": "INTEGER", "mode": "NULLABLE"},
{"name": "footprint_width", "type": "FLOAT", "mode": "NULLABLE"},
{"name": "notes", "type": "STRING", "mode": "NULLABLE"},
{"name": "council_district", "type": "INTEGER", "mode": "NULLABLE"},
{"name": "modified_date", "type": "TIMESTAMP", "mode": "NULLABLE"},
],
)
)
austin_bikeshare_stations_transform_csv >> load_austin_bikeshare_stations_to_bq
| 51.44186 | 465 | 0.643761 |
934a5b0cd2567ccf419b56afd3c12a081ee3b3db | 7,278 | py | Python | src/data_preprocessing.py | harbenml/Kaggle-Ttitanic | fcce8eca3484a5a46e16af439cc070b3720e6a8b | [
"MIT"
] | 1 | 2020-07-14T10:02:59.000Z | 2020-07-14T10:02:59.000Z | src/data_preprocessing.py | harbenml/Kaggle-Ttitanic | fcce8eca3484a5a46e16af439cc070b3720e6a8b | [
"MIT"
] | null | null | null | src/data_preprocessing.py | harbenml/Kaggle-Ttitanic | fcce8eca3484a5a46e16af439cc070b3720e6a8b | [
"MIT"
] | null | null | null | """This module preprocesses the data for subsequent training and prediction steps.
The following procedure is applied:
1. Load the raw data.
2. Clean data by imputing missing values.
3. Create new features.
4. Encode features.
5. Export processed data and enocders.
"""
import pandas as pd
from sklearn import preprocessing
from typing import List, Tuple
# This dictionary is taken from
# https://medium.com/datadriveninvestor/start-with-kaggle-a-comprehensive-guide-to-solve-the-titanic-challenge-8ac5815b0473
Title_Dictionary = {
"Capt": "Officer",
"Col": "Officer",
"Major": "Officer",
"Jonkheer": "Royalty",
"Don": "Royalty",
"Dona": "Royalty",
"Sir": "Royalty",
"Dr": "Officer",
"Rev": "Officer",
"the Countess": "Royalty",
"Mme": "Mrs",
"Mlle": "Miss",
"Ms": "Mrs",
"Mr": "Mr",
"Mrs": "Mrs",
"Miss": "Miss",
"Master": "Master",
"Lady": "Royalty",
}
class DataPreprocessing:
def __init__(self, train_data_path: str = "", test_data_path: str = ""):
self.train = pd.DataFrame()
self.num_train_samples: int
self.test = pd.DataFrame()
self.full_data = pd.DataFrame()
self.train_data_path = train_data_path
self.test_data_path = test_data_path
self.target_col = str
def run_preprocessing(self):
# load data
self.get_data()
self.target_col = "Survived"
self.full_data = self.combine_train_test_set(
self.train, self.test, self.target_col
)
# data cleaning and feature engineering
self.create_new_features()
self.clean_data()
# label encoding of categorical features
features, num_cols = self.get_features(self.full_data)
self.full_data, _ = self.label_encode_features(
self.full_data, features, num_cols
)
# split and export data
self.train, self.test = self.split_train_test_set(
self.full_data, self.target_col
)
self.export_data()
def get_data(self):
"""Loads the data ,if paths are specified."""
if self.train_data_path:
self.train = pd.read_csv(self.train_data_path)
self.num_train_samples = len(self.train)
print("Train data loaded.")
if self.test_data_path:
self.test = pd.read_csv(self.test_data_path)
print("Test data loaded.")
def export_data(self):
self.train.to_csv("data/processed/train.csv", index=False)
self.test.to_csv("data/processed/test.csv", index=False)
def create_new_features(self):
# title and surname
self.get_titles_and_surnames()
# family size
self.get_family_size()
pass
def clean_data(self):
self.clean_missing_fare()
self.clean_missing_age()
self.clean_missing_embark()
self.clean_missing_cabin()
@staticmethod
def combine_train_test_set(
train: pd.DataFrame, test: pd.DataFrame, target_col
) -> pd.DataFrame:
"""Create dummy targets in test set and merge train and test set for feature engineering"""
test.loc[:, target_col] = -1
full_data = pd.concat([train, test]).reset_index(drop=True)
return full_data
@staticmethod
def split_train_test_set(
data: pd.DataFrame, target_col
) -> Tuple[pd.DataFrame, pd.DataFrame]:
"""Split full_data into training and test data"""
train = data[data[target_col] != -1].reset_index(drop=True)
test = data[data[target_col] == -1].reset_index(drop=True)
test = test.drop(target_col, axis=1)
return train, test
def get_titles_and_surnames(self):
"""Get surname and title from name column."""
surnames, titles = self._extract_features_from_names(self.full_data["Name"])
# Replace Name column with the newly generated features
self.full_data["Title"] = titles
self.full_data["Surname"] = surnames
self.full_data.drop("Name", axis=1, inplace=True)
@staticmethod
def _extract_features_from_names(names: List[str]) -> Tuple[List[str], List[str]]:
surnames = [name.split(",")[0] for name in names]
remainder = [name.split(",")[1].strip() for name in names]
titles = pd.Series([x.split(".")[0] for x in remainder])
# Reduce the number of titles.
titles = titles.map(Title_Dictionary).to_list()
return surnames, titles
def get_family_size(self):
"""Create new feature Family Size."""
self.full_data["Family_Size"] = (
self.full_data["SibSp"] + self.full_data["Parch"] + 1
)
def clean_missing_fare(self):
"""Replace the missing value with the median Fare value of Pclass 3."""
median_fare_pclass3 = self.full_data[self.full_data["Pclass"] == 3][
"Fare"
].median()
self.full_data["Fare"].fillna(median_fare_pclass3, inplace=True)
def clean_missing_age(self):
"""Replace NaNs in Age column with the median value."""
median_age = self.full_data["Age"].median()
self.full_data["Age"].fillna(median_age, inplace=True)
def clean_missing_embark(self):
"""Replace the NaNs with 'S'."""
self.full_data["Embarked"].fillna("S", inplace=True)
def clean_missing_cabin(self):
"""Take only the first letter of cabin values and replace the NaNs with 'NONE' string."""
cabins = self.full_data.loc[~self.full_data["Cabin"].isnull(), "Cabin"]
self.full_data.loc[~self.full_data["Cabin"].isnull(), "Cabin"] = cabins.map(
lambda x: x[0]
)
self.full_data["Cabin"] = self.full_data["Cabin"].fillna("NONE").astype(str)
@staticmethod
def get_features(df: pd.DataFrame) -> (List[str], List[str]):
# list of numerical columns
num_cols = ["Age", "Fare", "Parch", "SibSp", "Family_Size"]
# exclude the targets and PassengerId
excluded_cols = ["Survived", "PassengerId"]
# define features
features = [f for f in df.columns if f not in excluded_cols]
return features, num_cols
@staticmethod
def label_encode_features(
df: pd.DataFrame, features: List[str], num_cols: List[str]
) -> pd.DataFrame:
"""For all categorical features, encode the categories to numerical values"""
label_encoders = {}
for col in features:
if col not in num_cols:
lbl = preprocessing.LabelEncoder()
lbl.fit(df[col].values.tolist())
df.loc[:, col] = lbl.transform(df[col].values.tolist())
label_encoders[col] = lbl
return df, label_encoders
@staticmethod
def fill_na_with_none(
df: pd.DataFrame, features: List[str], num_cols: List[str]
) -> pd.DataFrame:
"""For each column, replace NaN values with NONE"""
for col in features:
if col not in num_cols:
df.loc[:, col] = df[col].astype(str).fillna("NONE")
return df
if __name__ == "__main__":
obj = DataPreprocessing(
train_data_path="data/raw/train.csv", test_data_path="data/raw/test.csv"
)
obj.run_preprocessing()
| 34.657143 | 123 | 0.624073 |
00a63f271a43fb1a3868f87e60cb750652e847fc | 7,439 | py | Python | construct/expr.py | nrrpinto/construct | cfc980c6edfbe33c56015b736f59fb3155b51317 | [
"MIT"
] | 629 | 2015-01-06T03:01:56.000Z | 2022-03-23T13:13:26.000Z | construct/expr.py | nrrpinto/construct | cfc980c6edfbe33c56015b736f59fb3155b51317 | [
"MIT"
] | 897 | 2015-02-28T15:46:06.000Z | 2022-03-30T08:19:13.000Z | construct/expr.py | nrrpinto/construct | cfc980c6edfbe33c56015b736f59fb3155b51317 | [
"MIT"
] | 151 | 2015-01-08T16:36:24.000Z | 2022-03-10T16:59:49.000Z | import operator
if not hasattr(operator, "div"):
operator.div = operator.truediv
opnames = {
operator.add : "+",
operator.sub : "-",
operator.mul : "*",
operator.div : "/",
operator.floordiv : "//",
operator.mod : "%",
operator.pow : "**",
operator.xor : "^",
operator.lshift : "<<",
operator.rshift : ">>",
operator.and_ : "and",
operator.or_ : "or",
operator.not_ : "not",
operator.neg : "-",
operator.pos : "+",
operator.contains : "in",
operator.gt : ">",
operator.ge : ">=",
operator.lt : "<",
operator.le : "<=",
operator.eq : "==",
operator.ne : "!=",
}
class ExprMixin(object):
def __add__(self, other):
return BinExpr(operator.add, self, other)
def __sub__(self, other):
return BinExpr(operator.sub, self, other)
def __mul__(self, other):
return BinExpr(operator.mul, self, other)
def __floordiv__(self, other):
return BinExpr(operator.floordiv, self, other)
def __truediv__(self, other):
return BinExpr(operator.div, self, other)
__div__ = __floordiv__
def __mod__(self, other):
return BinExpr(operator.mod, self, other)
def __pow__(self, other):
return BinExpr(operator.pow, self, other)
def __xor__(self, other):
return BinExpr(operator.xor, self, other)
def __rshift__(self, other):
return BinExpr(operator.rshift, self, other)
def __lshift__(self, other):
return BinExpr(operator.lshift, self, other)
def __and__(self, other):
return BinExpr(operator.and_, self, other)
def __or__(self, other):
return BinExpr(operator.or_, self, other)
def __radd__(self, other):
return BinExpr(operator.add, other, self)
def __rsub__(self, other):
return BinExpr(operator.sub, other, self)
def __rmul__(self, other):
return BinExpr(operator.mul, other, self)
def __rfloordiv__(self, other):
return BinExpr(operator.floordiv, other, self)
def __rtruediv__(self, other):
return BinExpr(operator.div, other, self)
__rdiv__ = __rfloordiv__
def __rmod__(self, other):
return BinExpr(operator.mod, other, self)
def __rpow__(self, other):
return BinExpr(operator.pow, other, self)
def __rxor__(self, other):
return BinExpr(operator.xor, other, self)
def __rrshift__(self, other):
return BinExpr(operator.rshift, other, self)
def __rlshift__(self, other):
return BinExpr(operator.lshift, other, self)
def __rand__(self, other):
return BinExpr(operator.and_, other, self)
def __ror__(self, other):
return BinExpr(operator.or_, other, self)
def __neg__(self):
return UniExpr(operator.neg, self)
def __pos__(self):
return UniExpr(operator.pos, self)
def __invert__(self):
return UniExpr(operator.not_, self)
__inv__ = __invert__
def __contains__(self, other):
return BinExpr(operator.contains, self, other)
def __gt__(self, other):
return BinExpr(operator.gt, self, other)
def __ge__(self, other):
return BinExpr(operator.ge, self, other)
def __lt__(self, other):
return BinExpr(operator.lt, self, other)
def __le__(self, other):
return BinExpr(operator.le, self, other)
def __eq__(self, other):
return BinExpr(operator.eq, self, other)
def __ne__(self, other):
return BinExpr(operator.ne, self, other)
def __getstate__(self):
attrs = {}
if hasattr(self, "__dict__"):
attrs.update(self.__dict__)
slots = []
c = self.__class__
while c is not None:
if hasattr(c, "__slots__"):
slots.extend(c.__slots__)
c = c.__base__
for name in slots:
if hasattr(self, name):
attrs[name] = getattr(self, name)
return attrs
def __setstate__(self, attrs):
for name, value in attrs.items():
setattr(self, name, value)
class UniExpr(ExprMixin):
def __init__(self, op, operand):
self.op = op
self.operand = operand
def __repr__(self):
return "%s %r" % (opnames[self.op], self.operand)
def __str__(self):
return "%s %s" % (opnames[self.op], self.operand)
def __call__(self, obj, *args):
operand = self.operand(obj) if callable(self.operand) else self.operand
return self.op(operand)
class BinExpr(ExprMixin):
def __init__(self, op, lhs, rhs):
self.op = op
self.lhs = lhs
self.rhs = rhs
def __repr__(self):
return "(%r %s %r)" % (self.lhs, opnames[self.op], self.rhs)
def __str__(self):
return "(%s %s %s)" % (self.lhs, opnames[self.op], self.rhs)
def __call__(self, obj, *args):
lhs = self.lhs(obj) if callable(self.lhs) else self.lhs
rhs = self.rhs(obj) if callable(self.rhs) else self.rhs
return self.op(lhs, rhs)
class Path(ExprMixin):
def __init__(self, name, field=None, parent=None):
self.__name = name
self.__field = field
self.__parent = parent
def __repr__(self):
if self.__parent is None:
return self.__name
else:
return "%s[%r]" % (self.__parent, self.__field)
def __str__(self):
if self.__parent is None:
return self.__name
else:
return "%s[%r]" % (self.__parent, self.__field)
def __call__(self, obj, *args):
if self.__parent is None:
return obj
else:
return self.__parent(obj)[self.__field]
def __getfield__(self):
return self.__field
def __getattr__(self, name):
return Path(self.__name, name, self)
def __getitem__(self, name):
return Path(self.__name, name, self)
class Path2(ExprMixin):
def __init__(self, name, index=None, parent=None):
self.__name = name
self.__index = index
self.__parent = parent
def __repr__(self):
if self.__parent is None:
return self.__name
else:
return "%r[%r]" % (self.__parent, self.__index)
def __call__(self, *args):
if self.__parent is None:
return args[1]
else:
return self.__parent(*args)[self.__index]
def __getitem__(self, index):
return Path2(self.__name, index, self)
class FuncPath(ExprMixin):
def __init__(self, func, operand=None):
self.__func = func
self.__operand = operand
def __repr__(self):
if self.__operand is None:
return "%s_" % (self.__func.__name__)
else:
return "%s_(%r)" % (self.__func.__name__, self.__operand)
def __str__(self):
if self.__operand is None:
return "%s_" % (self.__func.__name__)
else:
return "%s_(%s)" % (self.__func.__name__, self.__operand)
def __call__(self, operand, *args):
if self.__operand is None:
return FuncPath(self.__func, operand) if callable(operand) else operand
else:
return self.__func(self.__operand(operand) if callable(self.__operand) else self.__operand)
this = Path("this")
obj_ = Path("obj_")
list_ = Path2("list_")
len_ = FuncPath(len)
sum_ = FuncPath(sum)
min_ = FuncPath(min)
max_ = FuncPath(max)
abs_ = FuncPath(abs)
| 28.945525 | 103 | 0.602769 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.