index int64 0 1,000k | blob_id stringlengths 40 40 | code stringlengths 7 10.4M |
|---|---|---|
16,100 | 8fc74d5c33cd7a5ee6cd3320ad7e6726fd7c5b71 | from django.urls import path
from .views import(
entry_list_view,
entry_create_view
)
app_name = 'entries'
urlpatterns = [
path('entries/', entry_list_view, name='entry-list'),
path('entries/create/', entry_create_view, name='entry-create')
] |
16,101 | a69426a7b2695d1ea5ff4e9b387ad148b2d28bcc | import numpy as np
from data import *
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.utils.data import Dataset
from transformers import (
InputFeatures,
AdamW,
AutoConfig,
AutoTokenizer,
AutoModelForTokenClassification
)
model_name = 'clulab/roberta-timex-semeval'
config = AutoConfig.from_pretrained(model_name)
tokenizer = AutoTokenizer.from_pretrained(model_name, config=config, use_fast=True)
fix_model = AutoModelForTokenClassification.from_pretrained(model_name, config=config)
train_model = AutoModelForTokenClassification.from_pretrained(model_name, config=config)
fix_model.cuda()
trainable_cls = train_model.classifier
trainable_cls.train().cuda()
class DefinedDistance(nn.Module):
def __init__(self):
super(DefinedDistance, self).__init__()
self.fc1 = trainable_cls
self.fc2 = nn.Linear(130, 1)
def forward(self, vec_1, vec_2):
cls_1 = F.relu(self.fc1(vec_1))
cls_2 = F.relu(self.fc1(vec_2))
cls_out = torch.cat((cls_1, cls_2))
x = self.fc2(cls_out)
return x
difdis = DefinedDistance().cuda()
difdis.train()
optimizer = AdamW(difdis.parameters(), lr = 5e-5)
epoch = 3
for e in range(epoch):
for batch in target_train_dl:
optimizer.zero_grad()
input_ids = batch[0]
attention_mask = batch[1]
input_ids = input_ids.cuda()
attention_mask = attention_mask.cuda()
attention_mask_note = attention_mask.squeeze(0)
attention_mask_note = torch.nonzero(attention_mask_note)
attention_mask_note = len(attention_mask_note)
fc1_lbcorr = fix_model.roberta.forward(input_ids, attention_mask)[0].squeeze(0)
fc1_lbcorr_list = fc1_lbcorr[:attention_mask_note]
logits = fix_model.classifier.forward(fc1_lbcorr).view(-1, 65)
logits_list = logits[:attention_mask_note]
m = nn.Softmax(dim=1)
after_softmax = m(logits_list)
pseudo_label = torch.argmax(after_softmax, dim=1)
same_labels = torch.Tensor([]).cuda()
diff_labels = torch.Tensor([]).cuda()
#0.1필터 추가할 것
entropy = torch.sum(-after_softmax*torch.log(after_softmax+1e-10), dim=1, keepdim=True)
entropy_norm = entropy / np.log(after_softmax.size(1))
entropy_norm = entropy_norm.squeeze(1)
entropy_norm_list = entropy_norm[:attention_mask_note]
over_threshold = (entropy_norm_list < 0.1).nonzero(as_tuple=True)[0]
fc1_lbcorr_list = fc1_lbcorr_list[over_threshold]
for i in fc1_lbcorr_list:
for j in fc1_lbcorr_list:
if pseudo_label[(fc1_lbcorr == i).nonzero(as_tuple=True)[0][0]] == pseudo_label[(fc1_lbcorr == j).nonzero(as_tuple=True)[0][0]]:
same_labels = torch.cat((same_labels, difdis.forward(i, j)))
else:
diff_labels = torch.cat((diff_labels, difdis.forward(i, j)))
loss_1 = torch.mean(same_labels)
loss_2 = torch.mean(diff_labels)
loss = loss_1 - loss_2
loss.backward()
optimizer.step()
torch.save({
'model':difdis
}, './difdis_2')
|
16,102 | 3e2745d156b21819d6fd057676a39aa73383ba31 | # -*- coding: utf-8 -*-
from scrapy_redis.spiders import RedisSpider
from scrapy import FormRequest
from spiderframe.download import you_get_download
class VideoBaseSpider(RedisSpider):
name = 'video_base'
redis_key = 'video_bilibili_link'
# start_urls = ["https://v.qq.com/x/cover/24bvvcald9bq5kz/p0387cjmac7.html"]
custom_settings = {
'REDIS_HOST': '123.56.11.156',
'REDIS_PORT': 8888,
'REDIS_PARAMS': {
'password': '',
'db': 0
},
}
headers = {
"Host": "search.bilibili.com",
"User-Agent": "Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:70.0) Gecko/20100101 Firefox/70.0",
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",
"Accept-Language": "zh-CN,zh;q=0.8,zh-TW;q=0.7,zh-HK;q=0.5,en-US;q=0.3,en;q=0.2",
"Connection": "keep-alive",
"Cache-Control": "max-age=0",
"TE": "Trailers",
# "Cookie":"INTVER=1; _uuid=83B988FE-7888-58DE-B8EB-E35E0414D76E16344infoc; buvid3=B009CAB2-83B4-4166-8118-7680C3B19210155820infoc; arrange=matrix; CURRENT_FNVAL=16; LIVE_BUVID=AUTO4115754250295499; stardustvideo=1; sid=9ieqnc9g; rpdid=|(k|Y~RkY|RY0J'ul~lRk~k|~"
}
def make_requests_from_url(self, url):
return FormRequest(url, dont_filter=True,headers=self.headers)
def parse(self, response):
# try:
# if "/sf?pd=" not in response.url:
you_get_download(response.url)
# except Exception as e:
# with open("err.txt","w+",encoding='utf-8') as f:
# f.write(response.url)
|
16,103 | 5565f1e97b00da0f6c9fb0aa750cad4a762da7fc | # -*- coding: utf-8 -*-
"""Untitled16.ipynb
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/drive/1w7qOuuGMSa-n9TVJkPg7PPUjpL2XBTl1
"""
import nltk
nltk.download('punkt')
from nltk.tokenize import sent_tokenize
text = "Hello, you are reading about Tokenization. There are many forms of Tokenizers."
sent_tokenize(text)
import nltk.data
newsent= "Tokenization is a way of separating a piece of text into smaller units called tokens.Here, tokens can be either words, characters, or subwords. Hence, tokenization can be broadly classified into 3 types – word, character, and subword (n-gram characters) tokenization.For example, consider the sentence:Never give up.The most common way of forming tokens is based on space. Assuming space as a delimiter, the tokenization of the sentence results in 3 tokens – Never-give-up. As each token is a word, it becomes an example of Word tokenization.Similarly, tokens can be either characters or subwords. For example, let us consider “smarter”:Character tokens: s-m-a-r-t-e-rSubword tokens: smart-er.But then is this necessary? Do we really need tokenization to do all of this?"
# Loading PunktSentenceTokenizer using English pickle file
tokenizer = nltk.data.load('tokenizers/punkt/PY3/english.pickle')
tokenizer.tokenize(newsent)
import nltk.data
spanish_tokenizer = nltk.data.load('tokenizers/punkt/PY3/spanish.pickle')
text1 = 'Hola amigo. Estoy bien.'
spanish_tokenizer.tokenize(text1)
from nltk.tokenize import word_tokenize
text = "Hello, you are reading about Tokenization. There are many forms of Tokenizers."
word_tokenize(text)
from nltk.tokenize import TreebankWordTokenizer
tokenizer = TreebankWordTokenizer()
tokenizer.tokenize(text)
from nltk.tokenize import WordPunctTokenizer
tokenizer = WordPunctTokenizer()
tokenizer.tokenize("Let's see how it's working.")
from nltk.tokenize import RegexpTokenizer
tokenizer = RegexpTokenizer("[\w']+")
text = "Hello, you are reading about Tokenization. There are many forms of Tokenizers."
tokenizer.tokenize(text)
from nltk.tokenize import regexp_tokenize
text = "Hello, you are reading about Tokenization. There are many forms of Tokenizers."
regexp_tokenize(text, "[\w']+")
from nltk.tokenize import TweetTokenizer
tknzr = TweetTokenizer(strip_handles=True, reduce_len=True)
s1 = '@remy: This is waaaaayyyy too much for you!!!!!!'
tknzr.tokenize(s1)
from nltk.tokenize import SExprTokenizer
SExprTokenizer().tokenize('(a b (c d)) e f (g)')
|
16,104 | 46bad0125c32ae4b8e907819e820ad46e382ff0b | t = input()
s = 0
for _ in xrange(t):
a = input()
if a > 0:
s += a
print s
|
16,105 | c2ddd0beeea4a71e1634dadd563c4fcaf3ee4b3a | def solution(name):
# 1. name 을 ascii 로 변경한 뒤 위 또는 아래로 몇번 움직여야 되는지 확인한다.
# min( ord(char) - ord('A'), ord('Z') - ord(char) + 1 ) # 위로 갈때는 Z 로 가는 1번의 횟수를 더해준다.
#
# 2. 좌우 이동 최소횟수
# A 를 만나면 정방향으로 가는게 이득인지 역방향으로 돌아가는게 이득인지 확인해야함
# A 가 연속해서 나올 수 있으니 A 가 나오면 다음 index 도 A 인지 확인해야함
# 정방향 = len(name) - 1
# 역방향 = idx + idx + (len(name) - next_idx)
answer = 0
move_count = len(name) - 1
for idx, char in enumerate(name):
answer += min(ord(char) - ord('A'), ord('Z') - ord(char) + 1)
next_idx = idx + 1
while next_idx < len(name) and name[next_idx] == 'A':
next_idx += 1
move_count = min(move_count, idx + idx + (len(name) - next_idx))
return answer + move_count
name = "JAN"
print(solution(name))
|
16,106 | 777b5d845afa6f30d145ae458a35b783d467eeb9 | from .city_info import dp
__all__ = ["dp"]
|
16,107 | 843295adf31d9f35df953b84a6371e32b943b121 | """
api pages related to events
"""
from . import log, db_conn
from . import utils
from datetime import datetime
#** Variables **#
_event_args = ['title', 'description', 'start_date', 'end_date']
#** Functions **#
def event_create(req):
"""PUT => create a event account or error"""
try:
utils.assert_keys(req.form, ['creator_id']+_event_args)
event_id = db_conn.event_new(**req.form)
json = {'event_id': event_id}
except Exception as e:
json = {'errors': [str(e)]}
return req.Response(json=json)
def event_exists(req):
"""GET => return true/false if event exists"""
event_id = req.match_dict['event_id']
try:
db_conn.event_exists(event_id)
json = {'exists': True}
except Exception as e:
json = {'errors': [str(e)]}
return req.Response(json=json)
def event_delete(req):
"""DELETE => delete an event or error"""
event_id = req.match_dict['event_id']
try:
db_conn.event_delete(event_id)
json = {'deleted': True}
except Exception as e:
json = {'errors': [str(e)]}
return req.Response(json=json)
def event_summary(req):
"""GET => collect summary of event or error"""
event_id = req.match_dict['event_id']
try:
json = db_conn.event_summary(event_id)
json = {k.lower():str(v) for k,v in json.items()}
except Exception as e:
json = {'errors': [str(e)]}
return req.Response(json=json)
def event_update(req):
"""POST => update values for specific fields as given"""
event_id = req.match_dict['event_id']
try:
data = utils.find_keys(req.form, _event_args)
db_conn.event_update(**data)
json = {'updated': True}
except Exception as e:
json = {'errors': [str(e)]}
return req.Response(json=json)
|
16,108 | 309867959391ab7c2dc68bda034b11bb3821c625 | # -*- coding: utf-8 -*-
"""
Created on Tue Dec 10 11:36:26 2019
@author: fmna
"""
def fx(cx,x,cy,y,cz,z,cw,w,b):
return (b-(cy*y+cz*z+cw*w))/cx
def fy(cx,x,cy,y,cz,z,cw,w,b):
return (b-(cx*x+cz*z+cw*w))/cy
def fz(cx,x,cy,y,cz,z,cw,w,b):
return (b-(cy*y+cx*x+cw*w))/cz
def fw(cx,x,cy,y,cz,z,cw,w,b):
return (b-(cy*y+cz*z+cx*x))/cw
def gaussJacobi(numItr):
x=0.25
y=0.25
z=0.25
w=0.25
for i in range(numItr):
xn=fx(4.5,x,-1,y,-1,z,1,w,1)
yn=fy(-1,x,4.5,y,1,z,-1,w,-1)
zn=fz(-1,x,2,y,4.5,z,-1,w,-1)
wn=fw(2,x,-1,y,-1,z,4.5,w,0)
x=xn
y=yn
z=zn
w=wn
print("x: ",x)
print("y: ",y)
print("z: ",z)
print("w: ",w)
print("") |
16,109 | 5bde2838ac2fb84017da842b35073183e8195376 | s=input()
a,b,c,d=map(int,input().split())
if(a!=0):
print(s[:a]+'"'+s[a:b]+'"'+s[b:c]+'"'+s[c:d]+'"'+s[d:])
else:
print('"'+s[:b]+'"'+s[b:c]+'"'+s[c:d]+'"'+s[d:]) |
16,110 | 92dd1185619ed6366271ecc9b4b5791d6f91c34b | import collections
def fillCommand(command):
return "|"+command+"|"
class Commands(object):
join = "|J|"
leave = "|L|"
chat = "|C|"
nick = "|N|"
setPos = "|P|"
setFrame = "|F|"
setAnim = "|A|"
setSpells = "|S|"
class HXeption(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
def unifyData(data):
if isinstance(data, collections.Iterable):
assert "/" not in data
return str(data)+"/"
def unifyID(id):
assert "!" not in id
return "!"+str(id)+"!"
PCK_DELI = "/"
class SocketHandler(object):
def __init__(self, sock):
self.buffer = ""
self.sock = sock
def read(self):
stream = self.sock.recv(1024)
if len(stream) > 0:
self.buffer += stream
if PCK_DELI in stream:
packetCount = self.buffer.count(PCK_DELI)
totalPackets = []
for x in xrange(packetCount):
packet, self.buffer = self.buffer.split(PCK_DELI, 1)
totalPackets.append(packet)
return totalPackets
else:
print "Disconnected"
raise HXeption("Disconnected")
return None
def close(self):
self.sock.close()
class SocketHandlerServer(SocketHandler):
def __init__(self, sock):
SocketHandler.__init__(self,sock)
def transmitMessage(self, sender, data):
data = unifyData(data)
sender = unifyID(sender)
packet = sender+data
print "sending data: |"+packet+"|"
self.sock.send(packet)
class SocketHandlerClient(SocketHandler):
def __init__(self, sock, nick):
SocketHandler.__init__(self, sock)
self.nick = nick
self.registerNick(self.nick)
def writeToChat(self, data):
data = unifyData(data)
self.sock.send(Commands.chat+data)
def registerNick(self, nick):
nick = unifyData(nick)
self.sock.send(Commands.nick+nick)
#============================================= data parsing
def sendPos(self, pos):
data = str(pos[0])+"$"+str(pos[1])
data = unifyData(data)
self.sock.send(Commands.setPos+data)
def parsePos(self, data):
pos = data.split("$")
pos = (int(pos[0]), int(pos[1]))
return pos
def sendFrame(self, frame):
data = unifyData(frame)
self.sock.send(Commands.setFrame+data)
def parseFrame(self, data):
return int(data)
def sendAnim(self, anim):
data = unifyData(anim)
self.sock.send(Commands.setAnim+data)
def parseAnim(self, data):
return data
|
16,111 | ef446cb7c01d4c54e4f475789259f3e1093b54fd | import numpy as np
from netCDF4 import Dataset
import matplotlib.pyplot as plt
import matplotlib.ticker as mtick
import sys
import os
def filt1(X, yvals, xvals, ny, nx):
""" Spatially filters the first two dimensions of array X into windows of size nx*ny. I.E. nx+1 and ny+1 are the number of grid points contained in each window, in each direction."""
ylen = X.shape[0]
xlen = X.shape[1]
yflen = (ylen-1)//ny
xflen = (xlen-1)//nx
Y = np.zeros((X.shape))
#Y = Y[0:yflen,0:xflen,]
ymax = ny*yflen+1
xmax = nx*xflen+1
Y = Y[0:ymax,0:xmax,]
Xnew = X[0:ymax,0:xmax,]
yvals = yvals[0:ymax,0:xmax,]
xvals = xvals[0:ymax,0:xmax,]
counter = np.zeros((Y.shape))
for i in range(xflen):
xmin = nx*i
xmax = nx*(i+1)+1
for j in range(yflen):
ymin = ny*j
ymax = ny*(j + 1)+1
#print((xmin,xmax), (ymin,ymax))
Y[ymin:ymax,xmin:xmax,] = Y[ymin:ymax,xmin:xmax,] + np.mean(X[ymin:ymax,xmin:xmax,], axis=(0,1))
counter[ymin:ymax,xmin:xmax,] = counter[ymin:ymax,xmin:xmax,] + 1
Y = Y/counter #We take the average of the points that appear more than once
return Xnew, Y, yvals, xvals
def filt2(X, yvals, xvals, ny, nx):
"""Different type of spatial filter which returns the average over the neighbours within a window of size nx*ny"""
Y = dofilter2(X,nx,ny)
Xnew = dofilter2(X,nx%2,ny%2)
xvalsnew = dofilter2(xvals,ny%2,nx%2)
yvalsnew = dofilter2(yvals,ny%2,nx%2)
return Xnew, Y, yvalsnew, xvalsnew
def dofilter2(X,ny,nx):
Y = np.zeros((X.shape))
Y = Y[(ny%2):,(nx%2):,] #If ny or nx is odd then we need to shift the grid on which Y is defined
print("X.shape=",X.shape)
print("Y.shape=",Y.shape)
for i in range(Y.shape[1]):
if (i%(Y.shape[1]//10)==0): print(str(100*i/Y.shape[1]) + '%')
if nx%2==0:
xmin = max(0, i-nx//2)
xmax = min(Y.shape[1], i+1+nx//2)
else:
xmin = max(0, i-(nx-1)//2)
xmax = min(Y.shape[1], i+1+(nx+1)//2)
for j in range(Y.shape[0]):
if ny%2==0:
ymin = max(0, j-ny//2)
ymax = min(Y.shape[0], j+1+ny//2)
else:
ymin = j-(ny-1)//2
ymax = j+1 + (ny+1)//2
ymin = max(0, ymin)
ymax = min(Y.shape[0], ymax )
Y[j,i,] = np.mean( X[ymin:ymax,xmin:xmax,] , axis = (0,1) )
return Y
def project(X,ny,nx):
if (ny%2==0):
Xnew = X[ny//2: -ny//2,]
ny2 = ny//2
else:
ny2=(ny-1)//2
if (nx%2==0):
Xnew = X[ny//2: -ny//2,]
nx2 = nx//2
else:
nx2 = (nx-1)//2
Xnew = dofilter2(X,ny%2,nx%2)
if ny!=1:
Xnew = Xnew[ny2:-ny2,]
if nx!=1:
Xnew = Xnew[:,nx2:-nx2,]
return Xnew
def detrend(X):
nym1 = X.shape[0]-1
nxm1 = X.shape[1]-1
Xdetrend = np.zeros((X.shape))
Xdetrend = Xdetrend[1:,1:,]
for j in range(0,nxm1):
for i in range(0,nym1):
Xdetrend[i,j,] = X[i,j,] - (X[nym1,j,]-X[0,j,])*i/nym1 - (X[i,nxm1,]-X[i,0,])*j/nxm1 + \
(X[nym1,nxm1,] - X[nym1,0,] - X[0,nxm1,] + X[0,0,])*i*j/(nym1*nxm1)
return(Xdetrend)
def errico(Y,dx,dy):
#See Errico 1985, page 1555
nxm1 = Y.shape[1]
nym1 = Y.shape[0]
if len(Y.shape)==3:
nz = Y.shape[2]
else:
print("")
DX = np.mean(dx, axis=(0,1))
DY = np.mean(dy, axis=(0,1))
ap = DX*nxm1
aq = DY*nym1
A = max(ap,aq)
if abs(A-ap)<0.00000000001:
kmax = (nym1+1)//2
else:
kmax = (nxm1+1)//2
try:
S = np.zeros((kmax,nz))
except:
S = np.zeros((kmax))
Yrescaled = Y/(nxm1*nym1) #This scaling just keeps the numbers from getting too large
for k in range(0,kmax):
if k%(kmax//10)==0: print(str(100*k/kmax) + '%')
qmax = math.ceil(min(nxm1,aq*(k+0.5)/A))
p0 = ap*(k-0.5)/A
q0 = aq*(k-0.5)/A
for q in range(0,qmax):
pmin = ((k-0.5)/A)**2- (q/aq)**2
if pmin < 0:
pmin = 0
else:
pmin = ap*np.sqrt(pmin)
pmax = ap*np.sqrt(((k+0.5)/A)**2- (q/aq)**2)
pmin = max(math.floor(pmin),0)
pmax = min(math.ceil(pmax),nym1)
for p in range(pmin,pmax):
if abs( A*np.sqrt( (p/ap)**2 + (q/aq)**2 ) - k ) < 0.5:
S[k,] = S[k,] + np.absolute(Yrescaled[p,q,])
K = np.array(range(0,kmax))
K = 2*np.pi*K/A
return S, K
def delta(y):
dy = np.zeros((y.shape))
dy=dy[1:,] #the first dimension should be one less than that of y
for n in range(0,dy.shape[0]):
dy[n,] = y[n+1,] - y[n,]
return dy
def deg2km(x,y):
R = 6371
pi = np.pi
xnew = x*R*np.cos(y*pi/180)*pi/180
ynew = y*R*pi/180
return xnew, ynew
def deletefile(filename_full):
try:
f = open(filename_full, 'r')
f.close()
except:
print("File " + filename_full + " does not exist")
else:
print('Removing file ' + filename_full + '...')
try:
os.remove(filename_full)
print('File removed.')
except:
print("Could not remove file " + filename_full)
def gettimes(datapath,date):
prefix = "wrfout_xhka_"
times = []
for f in os.listdir(datapath):
if (f.startswith(prefix+date + ".t") and f.endswith(".nc")):
t = f.replace(prefix+date+".t","")
t = t.replace(".nc","")
times = times + [t]
return times
|
16,112 | a43a5679f1999e87f02bf98c3ac6b89ab7447601 | #!/usr/bin/env python3
import sys, utils, random # import the modules we will need
utils.check_version((3,7)) # make sure we are running at least Python 3.7
utils.clear() # clear the screen
print('Greetings!') # prints out "Greetings!" in the terminal.
colors = ['red','orange','yellow','green','blue','violet','purple'] # creates a list of colors which will be saved for future use.
play_again = '' # creates a variable called "play_again" that is just a space at the moemnt
best_count = sys.maxsize # the biggest number, which makes it so that the first time they play the game, they will get their best guess so far.
while (play_again != 'n' and play_again != 'no'): # will repeat the game, as long as the player has not responded negatively to playing again.
match_color = random.choice(colors) # the program picks a random color from the list we created earlier so the game is different every time.
count = 0 # starts a counter at 0 that will be used to check how many attempts the user had to go through in order to guess the correct color
color = '' # creates the variable color, which will soon be replaced by the user's input.
while (color != match_color): # will run this loop while the color does not match the randomly selected color
color = input("\nWhat is my favorite color? ") #\n is a special code that adds a new line this is also taking an input from the user after printing "What is my favorite color?" in the window.
color = color.lower().strip() # this line takes the user's guessed color and strips it of spaces as well as downcasing all letters
count += 1 # this adds one to the count variable, tracking that the user just made a guess.
if (color == match_color): # checks if the guessed color matches the randomly selected color.
print('Correct!') # if so the program will print "Correct!"
else: # if the above check does not return true, the program will run what falls under this line.
print('Sorry, try again. You have guessed {guesses} times.'.format(guesses=count)) # the program prints the text within the quotes while replacing {guesses} with the variable saved in count
print('\nYou guessed it in {0} tries!'.format(count)) #the program prints the text within the counts and replaces {0} with the variable stored in count
if (count < best_count): # checks if the player had to use less guesses then their best run of this game so far.
print('This was your best guess so far!') # if the above check returns true, then the program prints the text within the quotes.
best_count = count # if the above check returns true, the current count for this game replaces best_count as the new record.
play_again = input("\nWould you like to play again? ").lower().strip() #checks if the player would like to play again, and strips and downcases the input to save as the play_again input
print('Thanks for playing!') #once the player has ended the game by responded with "n" or "no" the program prints the text with quotes on this line. |
16,113 | 73345fc83a8b3a2362559b4c0da3de7ee28a5b3c | from typing import List
class Solution:
def furthestBuilding(self, heights: List[int], bricks: int, ladders: int) -> int:
size = len(heights)
def go(now, brick, ladder):
# 尽可能地向前走
while now < size - 1 and heights[now] >= heights[now + 1]:
now += 1
# 处理已经走到尽头的情况
if now == size - 1:
return now
# 处理还没有走到尽头的情况
height = heights[now + 1] - heights[now]
if brick < height:
if ladder == 0:
# 砖块不够且没有梯子的情况
return now
else:
# 砖块不够但有梯子的情况
return go(now + 1, brick, ladder - 1)
else:
if ladder == 0:
# 砖块够但没有梯子的情况
return go(now + 1, brick - height, ladder)
else:
# 砖块够且有梯子的情况
return max(
go(now + 1, brick, ladder - 1),
go(now + 1, brick - height, ladder)
)
return go(0, bricks, ladders)
if __name__ == "__main__":
print(Solution().furthestBuilding(heights=[4, 2, 7, 6, 9, 14, 12], bricks=5, ladders=1)) # 4
print(Solution().furthestBuilding(heights=[4, 12, 2, 7, 3, 18, 20, 3, 19], bricks=10, ladders=2)) # 7
print(Solution().furthestBuilding(heights=[14, 3, 19, 3], bricks=17, ladders=0)) # 3
|
16,114 | 0ac3b42e8b044c4a091531ad94306de4e3a3ed99 | import numpy as np
import time
import pygame
from pygame.color import THECOLORS
from constants import *
import globals
from geometry import *
# ___ _ _
# | _ \___ _ _ __| |___ _ _(_)_ _ __ _
# | / -_) ' \/ _` / -_) '_| | ' \/ _` |
# |_|_\___|_||_\__,_\___|_| |_|_||_\__, |
# |___/
def set_up_screen(pause=0.75):
pygame.init()
globals.screen = pygame.display.set_mode((SCREEN_WIDTH, SCREEN_HEIGHT))
# clock = pygame.time.Clock()
globals.screen.set_alpha(None)
time.sleep(pause)
def draw_int_tuples(int_tuples: List[Tuple[int, int]],
color=THECOLORS['yellow']):
pygame.draw.polygon(globals.screen, color, int_tuples, 1)
def draw_collinear_point_and_param(
u=Vec2d(10, SCREEN_HEIGHT - 10 - 1),
v=Vec2d(SCREEN_WIDTH - 10 - 1, SCREEN_HEIGHT - 10 - 1),
p=Vec2d(SCREEN_WIDTH / 2 + DEMO_STEPS - 1,
SCREEN_HEIGHT / 2 + (DEMO_STEPS - 1) / 2),
point_color=THECOLORS['white'],
line_color=THECOLORS['cyan']):
dont_fill_bit = 0
q, t = collinear_point_and_parameter(u, v, p)
pygame.draw.circle(globals.screen, point_color, p.int_tuple, SPOT_RADIUS,
dont_fill_bit)
# pygame.draw.line(screen, point_color, q.int_tuple, q.int_tuple)
pygame.draw.line(globals.screen, line_color, p.int_tuple, q.int_tuple)
def draw_vector(p0: Vec2d, p1: Vec2d, color):
pygame.draw.line(globals.screen, color, p0.int_tuple, p1.int_tuple)
pygame.draw.circle(globals.screen, color, p1.int_tuple, SPOT_RADIUS, 0)
def draw_centered_arrow(loc, vel):
arrow_surface = globals.screen.copy()
arrow_surface.set_alpha(175)
arrow_pts = (
(0, 100),
(0, 200),
(200, 200),
(200, 300),
(300, 150),
(200, 0),
(200, 100))
speed = vel.length
sps = [scale_seq(p, speed / 4, 1 / 4) for p in arrow_pts]
ctr = centroid_seq(sps)
cps = [translate_seq(p, -ctr[0], -ctr[1]) for p in sps]
angle = np.arctan2(vel[1], vel[0])
c = np.cos(angle)
s = np.sin(angle)
qs = [rotate_seq(p, c, s) for p in cps]
ps = [translate_seq(p, loc[0], loc[1]) for p in qs]
pygame.draw.polygon(
arrow_surface,
THECOLORS['white'], # (0, 0, 0),
ps,
0)
# ns = pygame.transform.rotate(arrow_surface, -angle)
globals.screen.blit(
source=arrow_surface,
dest=((0, 0))) # ((loc - Vec2d(0, 150)).int_tuple))
def screen_cage():
return [SCREEN_TL, SCREEN_BL, SCREEN_BR, SCREEN_TR]
def draw_cage():
draw_int_tuples([p.int_tuple for p in screen_cage()], THECOLORS['green'])
def clear_screen(color=THECOLORS['black']):
globals.screen.fill(color)
|
16,115 | d91ef47e5cb23abcc1ca40894d05fcb0aaaeba9d | from django.contrib.auth.base_user import AbstractBaseUser, BaseUserManager
from django.contrib.auth.models import PermissionsMixin
from django.db import models
class UserManager(BaseUserManager):
def _create_user(
self, email, password, is_superuser=False, **extra_fields
):
email = self.normalize_email(email)
user = self.model(
email=email, is_superuser=is_superuser, **extra_fields
)
user.set_password(password)
user.save()
return user
def create_user(self, email, password, **extra_fields):
return self._create_user(email, password, **extra_fields)
def create_superuser(self, email, password, **extra_fields):
return self._create_user(
email, password, is_superuser=True, **extra_fields
)
class User(AbstractBaseUser, PermissionsMixin):
first_name = models.CharField(max_length=100)
last_name = models.CharField(max_length=100)
email = models.EmailField(
max_length=150, null=False, blank=False, unique=True
)
phone_number = models.IntegerField(
null=False, blank=False, default=702260027
)
address_line_1 = models.CharField(
max_length=255, null=False, blank=False, default=""
)
address_line_2 = models.CharField(max_length=255, null=True, blank=True)
objects = UserManager()
REQUIRED_FIELDS = []
USERNAME_FIELD = "email"
def __str__(self):
return f"{self.first_name} {self.last_name}"
|
16,116 | 14d766c6de3decd0ebc48156845cb9db1d0533eb | # Code generated by `stubgen`. DO NOT EDIT.
from kubernetes.client.configuration import Configuration as Configuration
from typing import Any
class V1NodeSpec:
openapi_types: Any
attribute_map: Any
local_vars_configuration: Any
discriminator: Any
def __init__(self, config_source: Any | None = ..., external_id: Any | None = ..., pod_cidr: Any | None = ..., pod_cid_rs: Any | None = ..., provider_id: Any | None = ..., taints: Any | None = ..., unschedulable: Any | None = ..., local_vars_configuration: Any | None = ...) -> None: ...
@property
def config_source(self): ...
@config_source.setter
def config_source(self, config_source) -> None: ...
@property
def external_id(self): ...
@external_id.setter
def external_id(self, external_id) -> None: ...
@property
def pod_cidr(self): ...
@pod_cidr.setter
def pod_cidr(self, pod_cidr) -> None: ...
@property
def pod_cid_rs(self): ...
@pod_cid_rs.setter
def pod_cid_rs(self, pod_cid_rs) -> None: ...
@property
def provider_id(self): ...
@provider_id.setter
def provider_id(self, provider_id) -> None: ...
@property
def taints(self): ...
@taints.setter
def taints(self, taints) -> None: ...
@property
def unschedulable(self): ...
@unschedulable.setter
def unschedulable(self, unschedulable) -> None: ...
def to_dict(self): ...
def to_str(self): ...
def __eq__(self, other): ...
def __ne__(self, other): ...
|
16,117 | fc4f0c049f84070bec6be84a0870cb1f5134935d | # -*- coding:utf-8 -*-
'''
Created on 2017.7.7
@author: Administrator
'''
from framework.get_testsuite import TestSuites
from framework.excelparser import ExcelParser
from framework.get_teststeps import TestSteps
class KeyCases(object):
#最终执行关键字驱动列表,模板[(u'type', u'id=>kw'), (u'search', u"xpath=>//*[@id='su']"), (u'input', u'id=>do')]
def key_case_list(self):
filePath = r"D:\je_workspace\PythonCase\src\excel\testcase.xlsx"
sheetName = "PageElements"
data = ExcelParser(filePath, sheetName)
keycase_list = data.dict_data()
#用例测试步骤表:['输入搜索', '点击搜索', '输入用户名及密码'......]
do_testkey = TestSteps().do_teststeps()
do_keylist = []
for i in range(len(keycase_list)):
if keycase_list[i]['页面元素'.decode('utf-8')] in do_testkey:
do_keylist.append((keycase_list[i]['关键字'.decode('utf-8')], keycase_list[i]['元素定位表达式'.decode('utf-8')], keycase_list[i]['页面元素'.decode('utf-8')]))
return do_keylist
#excel表内关键字列表 [('type', 'id=>kw', '输入搜索'), (), ()]
def key_case_list_all(self):
filePath = r"D:\je_workspace\PythonCase\src\excel\testcase.xls"
sheetName = "PageElements"
data = ExcelParser(filePath, sheetName)
keycase_list = data.dict_data()
#用例测试步骤表:['输入搜索', '点击搜索', '输入用户名及密码'......]
do_testkey = TestSteps().do_teststeps()
do_keylist = []
for i in range(len(keycase_list)):
if keycase_list[i]['页面元素'.decode('utf-8')] in do_testkey:
do_keylist.append((keycase_list[i]['关键字'.decode('utf-8')], keycase_list[i]['元素定位表达式'.decode('utf-8')], keycase_list[i]['页面元素'.decode('utf-8')]))
return do_keylist
pass
#print KeyCases().key_case_list_all()
#print KeyCases().key_case_list_all()[0][2]
#print len(KeyCases().key_case_list_all())
#print range(3) |
16,118 | d9460b40d7d2a6b8ffdc9a188e2914b454dfb76c | # -*- coding: utf-8 -*-
"""
Created on Mon Feb 8 08:33:54 2016
@author: hedi
"""
import numpy as np
import sys
from time import time
import os
from sim_model import m_sim_mean
import theano
from itertools import product
import json
from joblib import Parallel, delayed
fwrite = sys.stdout.write
dtype = theano.config.floatX
def run_gristep(parameters, Xim_train, Xtxt_train):
lr_img, lr_txt, K, batch_size = parameters
model = m_sim_mean.create(dim_img, dim_txt, dim_multi)
model_name = '_'.join(str(p).translate(None, '.') for p in parameters)
f = open('gridsearch/%s.txt' % model_name, 'w')
train_idxs = np.arange(n_train)
min_rank = np.Inf
patience = 3
imp_threshold = 0.9
patience_increase = 1.8
fwrite("Starting %s...\n" % model_name)
f.write("%s\n\n" % model_name)
f.flush()
sys.stdout.flush()
for epoch in xrange(30):
test_rank = model.test(Xim_test, Xtxt_test, epoch, saving_path=False)
f.write('\tTest median rank = %d\n' % test_rank)
f.flush()
if test_rank < min_rank:
if test_rank < imp_threshold * min_rank:
patience = max(patience, epoch*patience_increase)
min_rank = test_rank
if patience<epoch:
break
f.write('Epoch: %d, W_im = %f, W_txt = %f\n' % (epoch,(model.W_img**2).mean(),
(model.W_txt**2).mean()))
f.flush()
np.random.shuffle(train_idxs)
Xim_train = Xim_train[train_idxs]
Xtxt_train = Xtxt_train[train_idxs]
tic = time()
model.train(Xim_train, Xtxt_train, K,lr_img,lr_txt, batch_size=batch_size)
toc = time()-tic
f.write('\tTime = %fs\n' % toc)
f.flush()
res = {"lr_img":lr_img,"lr_txt":lr_txt,"K":K, "batch_size":batch_size, "epochs":epoch, "min_rank":min_rank}
f.write(json.dumps(res))
f.flush()
f.close()
fwrite("%s : %d epochs, %d median rank\n" % (model_name, epoch, min_rank))
sys.stdout.flush()
if __name__=='__main__':
np.random.seed(1234)
which_set = 'r'
data = np.load(which_set+'_similarity_data').all()
d = data[data.keys()[0]]
dim_img = d['image_emb'].shape[0]
dim_txt = d['text_emb'].shape[1]
dim_multi = 150
image_embeddings = []
text_embeddings = []
product_ids = []
product_ids = data.keys()
for idx in product_ids:
d = data.pop(idx)
text_embeddings.append(d['text_emb'].mean(axis=0).astype(dtype))
image_embeddings.append(d['image_emb'].astype(dtype))
del data
n_data = len(image_embeddings)
n_data = 200
product_ids = np.array(product_ids[:n_data])
image_embeddings = np.array(image_embeddings[:n_data])
text_embeddings = np.array(text_embeddings[:n_data])
indexes = np.arange(n_data)
np.random.shuffle(indexes)
image_embeddings = image_embeddings[indexes]
text_embeddings = text_embeddings[indexes]
product_ids = product_ids[indexes]
n_train = int(0.7*n_data)
Xim_train = image_embeddings[:n_train]
Xim_test = image_embeddings[n_train+1:]
Xtxt_train = text_embeddings[:n_train]
Xtxt_test = text_embeddings[n_train+1:]
product_ids_train = product_ids[:n_train]
product_ids_test = product_ids[n_train + 1 :]
n_test = Xim_test.shape[0]
lr_imgs = [0.001, 0.01, 0.1, 1., 10.]
lr_txts = [0.001, 0.01, 0.1, 1., 10.]
Ks = [5, 10, 15]
batch_sizes = [16, 32, 64]
iter_params = product(lr_imgs, lr_txts, Ks, batch_sizes)
Parallel(n_jobs=10)(delayed(run_gristep)(parameters, Xim_train, Xtxt_train) for parameters in iter_params) |
16,119 | b457242f77b487f6d9eaeb8ea1d4d00b2f2caecc | import pandas as pd
import sys
sys.path.insert(1, sys.path[0] + '/../../../library')
import loglib
f1, f2, clumped, g1k, out = sys.argv[1:]
logger = loglib.get_logger(out)
logger.info(f"reading {f1}")
df1 = pd.read_csv(f1, sep='\s+')
logger.info(f"{len(df1)} snps read\n")
df_clumped = pd.read_csv(clumped, sep='\s+')
df1 = df1[df1.SNP.isin(df_clumped.SNP)]
logger.info(f"{len(df1)} snps from {f1} in clumped\n")
logger.info(f"reading {f2}")
df2 = pd.read_csv(f2, sep='\s+')
logger.info(f"{len(df2)} snps read\n")
df1 = df1.rename(columns={'A1': 'a1', 'A2': 'a2', 'FRQ': 'freq', 'OR': 'bzx', 'SE': 'bzx_se', 'N': 'bzx_n', 'P': 'bzx_pval'})
df2 = df2.rename(columns={'A1': 'a1', 'A2': 'a2', 'FRQ': 'freqy', 'OR': 'bzy', 'SE': 'bzy_se', 'N': 'bzy_n', 'P': 'bzy_pval'})
df = pd.merge(df1, df2, on=['CHR', 'POS', 'a1', 'a2'])
logger.info(f"{f1} and {f2} merged. {len(df)} snps.\n")
logger.info(f"reading {g1k}")
g1kf = pd.read_csv(g1k, sep='\s+', header=None, names=['CHR', 'SNP', 'GPOS','POS', 'A1', 'A2'])
logger.info(f"{len(g1kf)} snps read\n")
m = pd.merge(df, g1kf, left_on='SNP_x', right_on='SNP')
mis = m[~(((m.A1 == m.a1) | (m.A1 == m.a2)) & ((m.A2 == m.a1) | (m.A2 == m.a2)))]
df = df[~df.SNP_x.isin(mis.SNP)]
logger.info(f"removed snps with mismatched alleles. {len(df)} snps remaining\n")
df = df.drop(['CHR', 'POS', 'SNP_y', 'freqy'], axis=1)
df = df.rename(columns={'SNP_x': 'SNP'})
logger.info(f"Writing snps to {out}.sumstats and {out}.allele")
df.to_csv(f"{out}.sumstats", sep='\t', index=False)
df[['SNP', 'a1', 'a2']].to_csv(f"{out}.allele", sep='\t', index=False)
|
16,120 | 9fbc9496c49a9a5105b42094f3bf4a21cb1f3cfa | import pandas as pd
from itertools import combinations
import requests
import json
from math import trunc
from datetime import datetime, timedelta
import re
from collections import Counter
from itertools import chain
from DatabaseAccess.Connector import Connector
import DatabaseAccess.sql_requests as sql
import bdd_management
import algorithms
import visualization
#Liste des clefs générées pour utiliser l'API
#API_KEY = '6cf28a3a-59c3-4c82-8cbf-8fa5e64b01da'
#API_KEY = '3fd6041b-beda-4a79-9f1a-09bc263a1dfd'
#API_KEY = 'd3f69ecb-68f5-477e-b1bb-d58208f936c5'
#API_KEY = '78cc6f8e-68d6-450d-89d0-8a085b6c5af5'
API_KEY = 'b84ebebd-476c-4204-b195-7ffeb67043e7'
#API_KEY = 'cc3bc7b1-4c27-4176-aefd-15017c363178'
#API_KEY = '57f195e9-78a9-4fd7-a10c-312f0502d659'
#constantes
API_NAVITIA = "https://api.sncf.com/v1/coverage/sncf/journeys?key={3}&from=admin:fr:{0}&to=admin:fr:{1}&datetime={2}&count=20"
all_waypoints = None
def datetime_str_to_datetime_str(datetime_str, fromFormat="%Y%m%dT%H%M%S", toFormat="%d/%m/%Y à %H:%M"):
"""Convert datetime in string format to another datetime string
Args:
datetime_str (str): input string
fromFormat (str, optional): input datetime format. Defaults to "%Y%m%dT%H%M%S".
toFormat (str, optional): output datetime format. Defaults to "%d/%m/%Y à %H:%M".
Returns:
str: output datetime to string formated in the given format
"""
date_time = datetime.strptime(datetime_str, fromFormat)
return date_time.strftime(toFormat)
def store_section(db_connector, description, geo_point_from, geo_point_to, section_type, duration=None, co2=None):
"""store trip section information in db
Args:
db_connector (psycopg 'connection'): connection instance to the database. Used to keep all requests in the same transaction
description (str): trip section resume
geo_point_from (str): start city coord (lat;long)
geo_point_to (float): end city coord (lat;long)
section_type (int): type of trip section [INFO, SECTION, SUB_SECTION, DELAY]
duration (int, optional): duration of the travel. Defaults to None.
co2 (float, optional): co2 emission for the travel. Defaults to None.
"""
indentation = ''
if section_type == 'DELAY' or section_type == 'SUB_SECTION':
indentation = ' -> '
print(indentation + description)
db_connector.execute_nonquery(sql.SQL_INSERT_FRENCH_TRIP_SECTION, [
geo_point_from, geo_point_to, description, section_type, duration, co2])
def save_trip_section(db_connector, all_waypoints, from_city_insee, to_city_insee, best_travel):
"""format trip section informations then print & store in db
Args:
db_connector (psycopg 'connection'): connection instance to the database. Used to keep all requests in the same transaction
from_city_insee (str): from city insee code
to_city_insee (str): to city insee code
best_travel (json): data about trip section
"""
from_city_name = all_waypoints.loc[all_waypoints[0]
== from_city_insee].values[0][2]
to_city_name = all_waypoints.loc[all_waypoints[0]
== to_city_insee].values[0][2]
from_city_gps = all_waypoints.loc[all_waypoints[0]
== to_city_insee].values[0][3]
to_city_gps = all_waypoints.loc[all_waypoints[0]
== to_city_insee].values[0][3]
store_section(db_connector, 'Voyage de {} à {}. Départ le {} - Arrivée le {} après {} transferts '.format(from_city_name, to_city_name, datetime_str_to_datetime_str(best_travel['departure_date_time']), datetime_str_to_datetime_str(best_travel['arrival_date_time']), best_travel['nb_transfers']),
None,
None,
'SECTION',
best_travel['duration'],
best_travel['co2_emission']["value"]
)
for section in best_travel['sections']:
if 'from' in section:
if not section['type'] == 'crow_fly':
# vilaine faute d'orthographe sur transfer_type
if not 'transfer_type' in section or not section['transfer_type'] == 'walking':
store_section(db_connector, '{} - {} ({})'.format(section['from']['name'], section['to']['name'], section['display_informations']['physical_mode']),
from_city_gps,
to_city_gps,
'SUB_SECTION')
# else : initiale section, not used
else:
store_section(db_connector, 'Waiting {} minutes'.format(section['duration']/60),
None,
None,
'DELAY')
def run_travel_optimisation(trip_start_date, is_min_co2_search = False, is_force_compute = False):
"""run the treatment to find the best optimized trip
Args:
trip_start_date (datetime): trip start date in format "%Y%m%dT%H%M%S"
is_min_co2_search (bool): specify is optimisation is based on co2 emission or duration
is_force_compute (bool): force the re-calculation of trips betweens all prefecture (very slow)
"""
waypoint_co2 = {}
waypoint_durations = {}
# get all prefectures referential
db_connector = Connector()
with db_connector:
results = db_connector.execute_query(sql.SQL_GET_ALL_PREFECTURE)
all_waypoints = pd.DataFrame(results.fetchall())
# Vérification si les trajets péfecture à préfecture ont été déjà calculés
db_connector = Connector()
with db_connector:
saved_waypoints = db_connector.execute_query(sql.SQL_GET_WAYPOINTS)
# Dans le précalcul des trajets optimaux, utilisation de la date courante
travel_date = datetime.now().strftime("%Y%m%dT%H%M%S")
bad_waypoints = []
if saved_waypoints.rowcount > 0 and not is_force_compute:
print("le référentiel des voyage existe déjà")
else:
try:
bdd_management.truncate_journey()
for (from_city, to_city) in combinations(all_waypoints[0].values, 2):
try:
if int(from_city) in bad_waypoints or int(to_city) in bad_waypoints:
continue
route = requests.get(API_NAVITIA.format(
int(from_city), int(to_city), travel_date, API_KEY))
response = json.loads(route.text)
mid_duration = 0
mid_co2 = 0
for journey in response["journeys"]:
mid_duration += journey["duration"]
mid_co2 += journey["co2_emission"]["value"]
waypoint_co2[frozenset([from_city, to_city])
] = mid_co2/len(response["journeys"])
waypoint_durations[frozenset(
[from_city, to_city])] = mid_duration/len(response["journeys"])
except Exception as e:
print("Error with finding the route between %s and %s : %s" %
(from_city, to_city, response["error"]["message"]))
if 'no destination point' == response["error"]["message"]:
bad_waypoints.append(int(to_city))
if 'no origin point' == response["error"]["message"]:
bad_waypoints.append(int(from_city))
for bad_insee_code in re.findall('The entry point: admin:fr:([0-9]+) is not valid', response["error"]["message"]):
if not int(bad_insee_code) in bad_waypoints:
bad_waypoints.append(int(bad_insee_code))
# Enregistrement des trajets point à point (préfecture à préfecture)
db_connector = Connector()
with db_connector:
for (waypoint1, waypoint2) in waypoint_co2.keys():
waypoint = [waypoint1,
waypoint2,
str(waypoint_co2[frozenset([waypoint1, waypoint2])]),
str(int(waypoint_durations[frozenset([waypoint1, waypoint2])]))]
db_connector.execute_nonquery(sql.SQL_INSERT_WAYPOINT, waypoint)
# commit trajets unitaires dans la bdd
db_connector.commit()
# enregistrement des préfectures non trouvée (pas de gare)
print(bad_waypoints)
db_connector = Connector()
with db_connector:
for bad_city in bad_waypoints:
db_connector.execute_nonquery(
sql.SQL_INSERT_CITY_WITHOUT_STATION, str(bad_city))
#db_connector.commit()
except Exception as e:
print('Erreur durant la génération des trajets de préfecture en préfecture. Rollback effectué')
waypoint_co2 = {}
waypoint_durations = {}
processed_waypoints = set()
db_connector = Connector()
with db_connector:
waypoints = db_connector.execute_query(sql.SQL_GET_WAYPOINTS)
for row in waypoints:
waypoint_co2[frozenset([int(row[0]), int(row[1])])] = row[2]
waypoint_durations[frozenset([int(row[0]), int(row[1])])] = row[3]
processed_waypoints.update([row[0], row[1]])
travel_results = algorithms.run_genetic_algorithm(waypoints = list(processed_waypoints), is_min_co2_search = is_min_co2_search, generations=300, population_size=100 )
# take most represented trip order
journey_groups = Counter(chain(*travel_results))
top_journeys = journey_groups.most_common(1)[0][0]
print('Le voyage le plus représentatif est :')
print(top_journeys)
# calcul des horaires de voyage réels pour le trajet le plus optimisé
print('Départ du calcul du voyage le %s' %
(datetime_str_to_datetime_str(trip_start_date)))
travel_date = trip_start_date
db_connector = Connector()
with db_connector:
try:
#vidage de la table contenant les informations du voyage
bdd_management.truncate_roadtrip()
for i in range(len(top_journeys)-1):
try:
from_city_insee = top_journeys[i]
to_city_insee = top_journeys[i+1]
route = requests.get(API_NAVITIA.format(
int(from_city_insee), int(to_city_insee), travel_date, API_KEY))
travels = json.loads(route.text)
# Contrôle des voyage reçus pour identifier le plus adapté à recherche
best_travel = travels["journeys"][0]
for travel in travels["journeys"]:
if is_min_co2_search and float(best_travel['co2_emission']['value']) > float(travel['co2_emission']['value']):
best_travel = travel
if best_travel['arrival_date_time'] > travel['arrival_date_time']:
best_travel = travel
# sauvegarde du trajet 'i' en base
save_trip_section(db_connector, all_waypoints, from_city_insee, to_city_insee, best_travel)
# le prochain trajet devra avoir une date de départ > à la date de ce trajet
travel_date = best_travel['arrival_date_time']
except Exception as e:
print("!! Erreur durant le calcul du trajet entre '%s' et '%s'" %
(from_city_insee, to_city_insee))
#Ecriture du résumé du voyage
resume = db_connector.execute_query(sql.SQL_GET_C02_CONSUMPTION_RESUME)
resume = resume.fetchone()
resume_description = """Début du voyage le {} . Arrivée le {}.
Le voyage à durée {} pour un total de {:d} kgeC""".format(
datetime_str_to_datetime_str(trip_start_date),
datetime_str_to_datetime_str(travel_date),
str(timedelta(seconds=resume[0])) ,
trunc( resume[1]/1000))
store_section(db_connector, resume_description, None, None, 'INFO', resume[0], resume[1])
db_connector.commit()
except Exception as e:
db_connector.rollback()
print('Erreur durant la création du voyage. rollback effectué!!!')
print('print map with road-trip data')
visualization.generate_visualization()
print('Travel complete. Have nive trip!!!')
|
16,121 | a59d28c4c2337ab162839bfea6f03c4d28455259 | def fact(n):
if n==1:
return 1;
else:
return n*fact(n-1);
#ans=fact(5);
#print(ans)
def factorialList(a,b):
global l
l={};
for each in range(a,b):
l.setdefault(each,fact(each));
return l;
ans=factorialList(5,8)
print(ans)
print(ans[6]);
|
16,122 | 283c0034b92f0d7e0dd3d3e588d503253c820ab5 | def pandigital(n):
pan = False
if len(set(str(n))) == len(str(n)):
if int(max(list(str(n)))) == 9 and int(min(list(str(n)))) == 1:
pan = True
return pan
m = 0
for num in range(9, 10000):
s = ''
c = 1
while True:
s+=str(num*c)
c+=1
if len(s) >= 9:
break
if pandigital(int(s)):
if int(s) > m:
m = int(s)
print(m) |
16,123 | 7d1e54a15e7bc5dfa5694aff647273a096d943ec | from urllib import parse
import requests
def get_fragments(reference):
""""
Returns fragments for a given reference. Uses the resolver service. See the API docs for the resolver service at:
https://alpha.nationalarchives.gov.uk/idresolver/apidocs/
The commit history for this file also has a version that does this parsing without any external dependency.
:param reference: a reference
:return: a dict representing the fragments
:rtype: dict
"""
url = "https://alpha.nationalarchives.gov.uk/idresolver/lookup/%s" % parse.quote(reference.strip())
r = requests.get(url)
data = r.json()
result = {}
try:
result['reference'] = data['canonical'][0]['catalogue_ref']
result['letter_code'] = data['canonical'][0]['letter_code']
if data['canonical'][0]['path']['Series']:
result['series'] = '%s %s' % (data['canonical'][0]['letter_code'], data['canonical'][0]['path']['Series'])
else:
result['series'] = ''
except KeyError:
result['error'] = 'There was a KeyError'
return result
|
16,124 | 16d749522b785f1295eae22a0326061251af7aa1 | def unpack(tupple_list):
new_lst=[]
print("LIST OF TUPPLES")
print("----------------")
print(tupple_list)
print("-------------------------------------")
for tpple in tupple_list:
for ele in tpple:
new_lst.append(ele)
print("TUPPPLE IN LIST------------>")
print(new_lst)
new_lst.clear()
if __name__=="__main__":
t='yogendra','katewa','dharmesh'
t1=14112,527235,623328
t2='ravi','amit','abhishek'
tupple_list=[t,t1,t2]
unpack(tupple_list)
|
16,125 | 720e1e6b1ac65164220b4e5c1022789ebad5a245 | #mark counting with condition ( +=, *=, -=, /=) / 按照情況予分
bill = 0
size = input("plz choose a size,S,M,L").lower()
if size == 's':
bill += 15
elif size == 'm':
bill += 20
else:
bill += 25
print(bill)
|
16,126 | e8b0ea4899c6ef31b501952fe88f84f7a5934417 | """
Adapt some Python types to the core protocol.
"""
from core import call, primitive_vtables
def install():
primitive_vtables[type(None)] = {}
primitive_vtables[bool] = bool_vtable
for t in num_types: primitive_vtables[t] = num_vtable
for t in str_types: primitive_vtables[t] = str_vtable
primitive_vtables[list] = list_vtable
primitive_vtables[tuple] = tuple_vtable
str_types = (str, unicode)
num_types = (int, long, float)
bool_vtable = {
('if-so', 'if-not'): lambda rcvr, args, k: call(args[not rcvr], ('run',), (), k),
('||',): lambda rcvr, (thunk,), k: (k, rcvr) if rcvr else call(thunk, ('run',), (), k),
('&&',): lambda rcvr, (thunk,), k: call(thunk, ('run',), (), k) if rcvr else (k, rcvr),
}
def as_bool(thing):
if isinstance(thing, bool):
return thing
assert False, "Not a claim: %r" % (thing,)
num_vtable = {
('+',): lambda rcvr, (other,), k: (k, rcvr + as_number(other)),
('*',): lambda rcvr, (other,), k: (k, rcvr * as_number(other)),
('-',): lambda rcvr, (other,), k: (k, rcvr - as_number(other)),
('=',): lambda rcvr, (other,), k: (k, rcvr == other), # XXX object method
('<',): lambda rcvr, (other,), k: (k, rcvr < other),
}
def as_number(thing):
if isinstance(thing, num_types):
return thing
assert False, "Not a number: %r" % (thing,)
# Sequence types
def find_default(rcvr, (other, default), k):
try:
return k, rcvr.index(other)
except ValueError:
return call(default, ('run',), (), k)
def has(rcvr, (other,), k): return (k, other in rcvr)
def at(rcvr, (i,), k): return (k, rcvr[i])
def find(rcvr, (other,), k): return (k, rcvr.index(other))
def size(rcvr, _, k): return (k, len(rcvr))
def add(rcvr, (other,), k): return (k, rcvr + other)
def eq(rcvr, (other,), k): return (k, rcvr == other)
def lt(rcvr, (other,), k): return (k, rcvr < other)
def as_string(thing):
if isinstance(thing, str_types):
return thing
assert False, "Not a string: %r" % (thing,)
sequence_vtable = {
('has',): has,
('at',): at,
('find',): find,
('find', 'default',): find_default,
('size',): size,
('++',): add,
('=',): eq,
('<',): lt,
}
str_vtable = sequence_vtable
list_vtable = sequence_vtable # XXX sure you want this?
tuple_vtable = sequence_vtable
|
16,127 | 9f872ff40bab805255e952694bf59a569b01aef7 | '''
This program should be able to define the search problem with clearly labeled components that point to the state, actions, transition function and goal test.
Define a problem in terms of state, actions, transition function, and goal test
TileProblem labels the components (variables, methods) that point to these elements
'''
class TileProblem:
def __init__ (self, new, current = None, action = ""):
self.state = new
self.prevState = current
for i in range(len(new)):
for j in range(len(new[i])):
if new[i][j] == 0:
self.emptyCell = [i,j]
self.prevAction = action
self.h = 0
self.g = 0
self.f = 0
# Actions that can be taken
def actions(self):
moves = []
i = self.emptyCell[0]
j = self.emptyCell[1]
if i > 0:
moves.append('U')
if i < len(self.state)-1:
moves.append('D')
if j > 0:
moves.append('L')
if j < len(self.state[0])-1:
moves.append('R')
return moves
# Transition Function
def transition(self, move):
move = move.upper()
# Copy puzzle
copy = []
for i in range(len(self.state)):
row = []
for j in range(len(self.state[i])):
row.append(self.state[i][j])
copy.append(row)
i = self.emptyCell[0]
j = self.emptyCell[1]
# Left
if move == 'L' and j > 0:
copy[i][j], copy[i][j-1] = copy[i][j-1], 0
# Right
elif move == 'R' and j < len(copy)-1:
copy[i][j], copy[i][j+1] = copy[i][j+1], 0
# Up
elif move == 'U' and i > 0:
copy[i][j], copy[i-1][j] = copy[i-1][j], 0
# Down
elif move == 'D' and i < len(copy[0]):
copy[i][j], copy[i+1][j] = copy[i+1][j], 0
else:
print("Invalid Move")
return copy
# Return true if at goal state
def goalState(self):
prev = 0
for i in range(len(self.state)):
for j in range(len(self.state[i])):
if i == len(self.state)-1 and j == len(self.state[i])-1 and self.state[i][j] == 0:
return True
if self.state[i][j] != prev+1:
return False
else:
prev = self.state[i][j]
return True
def __eq__(self, other):
return (self.g + self.h) == (other.g + other.h)
def __gt__(self, other):
return (self.g + self.h) > (other.g + other.h)
|
16,128 | 28c5af28fdf75eede7a094a1780f2786a3c1f3cb | # Print the name of the module that's running
print(f"Running: {__name__}")
def pprint_dict(header, d):
print('\n--------------------------------------')
print(f'***** pprint_dict of {header} *****')
for k, v in d.items():
print("\t", k, v)
print('--------------------------------------\n')
# Print the dict of the current scope's global variables
pprint_dict('module1.globals', globals())
print(f'{"*" * 10} End of {__name__} {"*" * 10}')
# Modules have an execution space any they execute
|
16,129 | 9d88563f4b9a9a17413fad19672ed151b19d4e17 | from pyecharts.charts import ThemeRiver
import pyecharts.options as opts
import pandas as pd
# 导入数据
data = pd.read_csv('E:\python\sale_amount.csv',index_col='date')
series = data.columns.values
data_list = []
for se in series:
for x,y in zip(data[se].index,data[se].values):
data_list.append([x,int(y),se])
# 绘制,设置类型为时间
wc = ThemeRiver(init_opts=opts.InitOpts(height='600px'))\
.add(series_name=series, data=data_list, singleaxis_opts=opts.SingleAxisOpts(type_='time'))\
.render()
|
16,130 | bed7e853f5af9760e9a7ae435814779fb7006ec3 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Test model in other period than modcel uses
"""
import numpy as np
from prints import print_realtest_results
from user import backtesting_details
from matrix_manip import calc_min_variance_portfolio, calc_portfolio_stdev
from plots import plot_points
def realtest(r,stds,mean_returns,covar,start2,end2,tickers):
"""
Takes in 2d array of weights of all the calculated efficient portfolios,
applies them to a set of different dates for purpose of testing predictability
"""
# Get allocation weights of minimum variance portfolio for r during model time period
weights = calc_min_variance_portfolio(mean_returns,covar,r)
# Get data during backtesting time period
from find_optimized_portfolio import process_data
stds_new,mean_returns_new,covar_new,tickers,start2,end2=process_data(start2,end2,tickers)
# Dot backtesting period's returns with model's weights to get the portfolio's return
# during backtest period
realtest_r = weights @ mean_returns_new
# Get stdev during backtest period
realtest_std=calc_portfolio_stdev(covar_new,weights)
return realtest_r,realtest_std, weights, tickers, start2, end2
def realtest_frontier(stds,mean_returns,covar,start1,end1,tickers):
# Interact with user to choose a predictability test region (dates) and a range of returns
rs,start2,end2 = backtesting_details(start1,end1)
count = len(rs)
realtest_rs = np.zeros(count)
realtest_stds = np.zeros(count)
# gets min variance portfolio stats for list of desired returns, rs
for i in range(count):
realtest_r,realtest_std,weights,tickers,start2,end2=realtest(rs[i],stds,mean_returns,covar,start2,end2,tickers)
print_realtest_results(realtest_r,realtest_std,rs[i],weights,tickers,start1,end1,start2,end2,i)
# Input returns and stdevs of each portfolio into list
realtest_rs[i]=realtest_r
realtest_stds[i]=realtest_std
# print and plot results of backtest
plot_points(realtest_stds,realtest_rs,rs)
|
16,131 | f38803bf6420bacc44ddfb31e86b72b1975f56d2 | import numpy as np
import pytest
import autocti as ac
from autocti import exc
def test__trail_size_to_array_edge():
layout = ac.Layout1D(shape_1d=(5,), region_list=[ac.Region1D(region=(0, 3))])
assert layout.trail_size_to_array_edge == 2
layout = ac.Layout1D(shape_1d=(7,), region_list=[ac.Region1D(region=(0, 3))])
assert layout.trail_size_to_array_edge == 4
layout = ac.Layout1D(
shape_1d=(15,),
region_list=[
ac.Region1D(region=(0, 2)),
ac.Region1D(region=(5, 8)),
ac.Region1D(region=(11, 14)),
],
)
assert layout.trail_size_to_array_edge == 1
layout = ac.Layout1D(
shape_1d=(20,),
region_list=[
ac.Region1D(region=(0, 2)),
ac.Region1D(region=(5, 8)),
ac.Region1D(region=(11, 14)),
],
)
assert layout.trail_size_to_array_edge == 6
|
16,132 | 72890d3434795b96c76a1220653323230dd116fb | import argparse
import datetime
import os
from cryptography import x509
from cryptography.hazmat import backends
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives import serialization
from cryptography.hazmat.primitives.asymmetric import rsa
def create_rsa_private_key(key_size=2048, public_exponent=65537):
private_key = rsa.generate_private_key(
public_exponent=public_exponent,
key_size=key_size,
backend=backends.default_backend()
)
return private_key
def create_self_signed_certificate(subject_name, private_key, days_valid=365):
subject = x509.Name([
x509.NameAttribute(x509.NameOID.ORGANIZATION_NAME, u"Test, Inc."),
x509.NameAttribute(x509.NameOID.COMMON_NAME, subject_name)
])
certificate = x509.CertificateBuilder().subject_name(
subject
).issuer_name(
subject
).public_key(
private_key.public_key()
).serial_number(
x509.random_serial_number()
).add_extension(
x509.BasicConstraints(ca=True, path_length=None), critical=True
).not_valid_before(
datetime.datetime.utcnow()
).not_valid_after(
datetime.datetime.utcnow() + datetime.timedelta(days=days_valid)
).sign(private_key, hashes.SHA256(), backends.default_backend())
return certificate
def create_certificate(
subject_name, private_key, signing_certificate, signing_key, days_valid=365, client_auth=False
):
subject = x509.Name([
x509.NameAttribute(x509.NameOID.ORGANIZATION_NAME, u"Test, Inc."),
x509.NameAttribute(x509.NameOID.COMMON_NAME, subject_name)
])
builder = x509.CertificateBuilder().subject_name(
subject
).issuer_name(
signing_certificate.subject
).public_key(
private_key.public_key()
).serial_number(
x509.random_serial_number()
).not_valid_before(
datetime.datetime.utcnow()
).not_valid_after(
datetime.datetime.utcnow() + datetime.timedelta(days=days_valid)
)
if client_auth:
builder = builder.add_extension(
x509.ExtendedKeyUsage([x509.ExtendedKeyUsageOID.CLIENT_AUTH]),
critical=True
)
certificate = builder.sign(
signing_key,
hashes.SHA256(),
backends.default_backend()
)
return certificate
def create_root_certificate(root_ca_cn):
root_key = create_rsa_private_key()
return root_key, create_self_signed_certificate(root_ca_cn, root_key)
def create_server_certificate(server_cert_cn, root_cert, root_key):
server_key = create_rsa_private_key()
return server_key, create_certificate(server_cert_cn, server_key, root_cert, root_key)
def create_client_certificate(client_cn, root_cert, root_key):
client_key = create_rsa_private_key()
return client_key, create_certificate(client_cn, client_key, root_cert, root_key, client_auth=True)
def write_key_to_file(key, path):
with open(path, 'wb') as f:
f.write(key.private_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PrivateFormat.PKCS8,
encryption_algorithm=serialization.NoEncryption()
))
def write_cert_to_file(cert, path):
with open(path, 'wb') as f:
f.write(cert.public_bytes(serialization.Encoding.PEM))
def setup_certs(root_ca_cn, server_cert_cn, clients, destination):
if not destination:
destination = os.path.join(os.path.dirname(__file__), '..', 'certificates')
root_key, root_certificate = create_root_certificate(root_ca_cn)
server_key, server_certificate = create_server_certificate(server_cert_cn, root_certificate, root_key)
client_certs = []
for client in (clients or []):
client_certs.append(create_client_certificate(client, root_certificate, root_key))
write_cert_to_file(root_certificate, os.path.join(destination, 'root_certificate.pem'))
write_key_to_file(root_key, os.path.join(destination, 'root_key.pem'))
write_cert_to_file(server_certificate, os.path.join(destination, 'server_certificate.pem'))
write_key_to_file(server_key, os.path.join(destination, 'server_key.pem'))
for client_cn, client in zip(clients, client_certs):
write_cert_to_file(client[1], os.path.join(destination, f'client_certificate_{client_cn}.pem'))
write_key_to_file(client[0], os.path.join(destination, f'client_key_{client_cn}.pem'))
if __name__ == '__main__':
parser = argparse.ArgumentParser()
subparsers = parser.add_subparsers(help='sub-command help', dest='action')
parser_setup = subparsers.add_parser('setup', help='Setup certificates')
parser_setup.add_argument('--root_cn', help='Common name of ROOT CA', required=True)
parser_setup.add_argument('--server_cn', help='Common name of Server Certificate', required=True)
parser_setup.add_argument('-cl', '--client_list', nargs='+', help='Client certificates CN', required=True)
parser_setup.add_argument(
'--destination', help='Destination directory for writing certificates, defaults to certificates directory'
)
args = parser.parse_args()
if args.action == 'setup':
setup_certs(args.root_cn, args.server_cn, args.client_list, args.destination)
else:
parser.print_help()
|
16,133 | 177eb584d7f14c5c7e481a46f4407bd538badca8 | #!/usr/bin/python3
#-*-coding:UTF-8-*-
#__init__构造函数可以在对象创建后直接调用
class FooBar(object):
def __init__(self,value):
self.name=value
f=FooBar('xls')
print(f.name)
class Foobar(object):
def __init__(self,name):
self.name=name
def foo(self):
return self.name
fo=Foobar('zyp')
print(fo.foo())
|
16,134 | af50ead0821965b6fe085b9f38588415a02461f4 | #!/usr/bin/env -S PATH="${PATH}:/usr/local/bin" python3
import requests
def ping():
requests.get('https://notion-heroku.herokuapp.com/')
|
16,135 | 6704e98a95dcfe554d1b0d203d06d66926134afa | class Pilha:
def __init__ (self):
self.items = []
def isVazio (self):
if (len(self._itens) == 0):
return "Vazio"
def push (self, item):
if item not in self.items:
self._itens.append(item)
def pop(self):
return self.items.pop()
def peek(self):
return self.items[len(self.items)-1]
def lenght(self):
return len(self.items)
pilha = Pilha()
|
16,136 | f3a5cfd007a4251a26af66d5de6c4f151ba87b59 | """
@Author: Robbe Heirman
@Version: 0.01
@Description:
Main python module for SushiGo!
Contains the GameLoop and event handler.
"""
import os
import sys
from typing import List, Tuple
import pygame
from source.model.deck import Deck
from source.model.player import Player
from source.view.entity_view import EntityView
from source.view.hand_view import HandView
from source.view.picked_cards_view import PickedCardsView
SCREEN_WIDTH = 700
SCREEN_HEIGHT = 700
def is_clicked(views: List[EntityView], mouse_pos: Tuple[int, int]) -> EntityView:
"""
Returns the view that is clicked in the game
"""
for view in views:
rct = view.rect
if rct.collidepoint(mouse_pos):
return view.clicked()
def main():
"""
Main function of our program.
"""
# call to OS for positioning window
os.environ['SDL_VIDEO_WINDOW_POS'] = "%d,%d" % (0, 25)
# Initialization block
pygame.init() # Initialize pygame module
screen = pygame.display.set_mode((SCREEN_WIDTH, SCREEN_HEIGHT)) # initialize screen
# Testing
# model_card = m_card.Card(m_card.CardType.TEMPURA)
# view_card = v_card.CardView(screen, model_card)
deck = Deck()
player = Player()
b_pack = deck.generate_booster(10)
player.booster_pack = b_pack
hand_view = HandView(screen, (0, SCREEN_HEIGHT - SCREEN_HEIGHT / 5), (SCREEN_WIDTH, SCREEN_HEIGHT / 5), player)
pick_crds = PickedCardsView(screen, (0, 0), (SCREEN_WIDTH, SCREEN_HEIGHT / 5), player, 0)
pick_crds2 = PickedCardsView(screen, (0, 0), (SCREEN_WIDTH, SCREEN_HEIGHT / 5), player, 180)
# Game loop
while True:
for event in pygame.event.get():
if event.type == pygame.QUIT:
sys.exit()
elif event.type == pygame.MOUSEBUTTONUP:
is_clicked([hand_view, pick_crds, pick_crds2], pygame.mouse.get_pos())
screen.fill((0, 0, 0))
hand_view.draw()
pick_crds.draw()
pick_crds2.draw()
pygame.display.flip()
if __name__ == "__main__":
main()
|
16,137 | 3eb65e122b35b0cc763c9c0362390b39ef0a5da0 | import unittest
from unittest.mock import call
from gitopscli.git_api import GitProvider, GitRepoApi, GitRepoApiFactory
from gitopscli.commands.create_pr_preview import CreatePrPreviewCommand, CreatePreviewCommand
from .mock_mixin import MockMixin
DUMMY_GIT_HASH = "5f65cfa04c66444fcb756d6d7f39304d1c18b199"
class CreatePrPreviewCommandTest(MockMixin, unittest.TestCase):
def setUp(self):
self.init_mock_manager(CreatePrPreviewCommand)
self.create_preview_command_mock = self.monkey_patch(CreatePreviewCommand)
self.create_preview_command_mock.Args = CreatePreviewCommand.Args
self.create_preview_command_mock.return_value = self.create_preview_command_mock
self.create_preview_command_mock.register_callbacks.return_value = None
self.create_preview_command_mock.execute.return_value = None
self.git_repo_api_mock = self.create_mock(GitRepoApi)
self.git_repo_api_mock.get_pull_request_branch.side_effect = lambda pr_id: f"BRANCH_OF_PR_{pr_id}"
self.git_repo_api_mock.get_branch_head_hash.return_value = DUMMY_GIT_HASH
self.git_repo_api_mock.add_pull_request_comment.return_value = None
self.git_repo_api_factory_mock = self.monkey_patch(GitRepoApiFactory)
self.git_repo_api_factory_mock.create.return_value = self.git_repo_api_mock
self.seal_mocks()
def test_create_pr_preview(self):
args = CreatePrPreviewCommand.Args(
username="USERNAME",
password="PASSWORD",
git_user="GIT_USER",
git_email="GIT_EMAIL",
organisation="ORGA",
repository_name="REPO",
git_provider=GitProvider.GITHUB,
git_provider_url="URL",
pr_id=4711,
parent_id=42,
)
CreatePrPreviewCommand(args).execute()
callbacks = self.create_preview_command_mock.register_callbacks.call_args.kwargs
deployment_already_up_to_date_callback = callbacks["deployment_already_up_to_date_callback"]
deployment_updated_callback = callbacks["deployment_updated_callback"]
deployment_created_callback = callbacks["deployment_created_callback"]
assert self.mock_manager.method_calls == [
call.GitRepoApiFactory.create(args, "ORGA", "REPO"),
call.GitRepoApi.get_pull_request_branch(4711),
call.GitRepoApi.get_branch_head_hash("BRANCH_OF_PR_4711"),
call.CreatePreviewCommand(
CreatePreviewCommand.Args(
username="USERNAME",
password="PASSWORD",
git_user="GIT_USER",
git_email="GIT_EMAIL",
organisation="ORGA",
repository_name="REPO",
git_provider=GitProvider.GITHUB,
git_provider_url="URL",
git_hash=DUMMY_GIT_HASH,
preview_id="BRANCH_OF_PR_4711",
)
),
call.CreatePreviewCommand.register_callbacks(
deployment_already_up_to_date_callback=deployment_already_up_to_date_callback,
deployment_updated_callback=deployment_updated_callback,
deployment_created_callback=deployment_created_callback,
),
call.CreatePreviewCommand.execute(),
]
self.mock_manager.reset_mock()
deployment_already_up_to_date_callback(
"The version `5f65cfa04c66444fcb756d6d7f39304d1c18b199` has already been deployed. Access it here: https://my-route.baloise.com"
)
assert self.mock_manager.method_calls == [
call.GitRepoApi.add_pull_request_comment(
4711,
f"The version `{DUMMY_GIT_HASH}` has already been deployed. "
"Access it here: https://my-route.baloise.com",
42,
)
]
self.mock_manager.reset_mock()
deployment_updated_callback(
"Preview environment updated to version `5f65cfa04c66444fcb756d6d7f39304d1c18b199`. Access it here: https://my-route.baloise.com"
)
assert self.mock_manager.method_calls == [
call.GitRepoApi.add_pull_request_comment(
4711,
f"Preview environment updated to version `{DUMMY_GIT_HASH}`. "
"Access it here: https://my-route.baloise.com",
42,
)
]
self.mock_manager.reset_mock()
deployment_created_callback(
"New preview environment created for version `5f65cfa04c66444fcb756d6d7f39304d1c18b199`. Access it here: https://my-route.baloise.com"
)
assert self.mock_manager.method_calls == [
call.GitRepoApi.add_pull_request_comment(
4711,
f"New preview environment created for version `{DUMMY_GIT_HASH}`. "
"Access it here: https://my-route.baloise.com",
42,
)
]
|
16,138 | 8fbc3f0142863bcf8ffd81bb112b571657253b96 | #!/usr/bin/python
import sys
from random import randint
sys.stderr.write("Started mapper.\n");
subject = "god"
max_timestamp = 24*7
def main(argv):
for line in sys.stdin:
try:
if line.startswith("@") and line.find(subject)!=-1:
print line.rstrip()+"#######"+str(randint(0,max_timestamp))
except Exception, e:
continue
if __name__ == "__main__":
main(sys.argv) |
16,139 | 80ab1900db9c0269a0dd2f4a87dead55ef3ba729 | #!/usr/bin/env python
"""
hopefully doesn't mess anything up too badly
Significant inspiration was taken from:
https://github.com/python-poetry/poetry/blob/c967a4a5abc6a0edd29c57eca307894f6e1c4f16/install-poetry.py
Steps:
- Ensure dependencies (git)
- Download repository
- Run dotdrop from the repo
"""
import os
import sys
from contextlib import asynccontextmanager
from pathlib import Path
from shutil import which
from subprocess import (
PIPE,
STDOUT,
CalledProcessError,
CompletedProcess,
Popen,
TimeoutExpired,
run,
)
from tempfile import TemporaryDirectory
from typing import TYPE_CHECKING, Dict, List, Optional, Union
from unittest.mock import patch
from urllib.request import urlopen
trio = None
if TYPE_CHECKING:
from io import BufferedWriter
from typing import AsyncIterator, List, Tuple, Union
import trio
from trio import MemoryReceiveChannel, MemorySendChannel, Process
WINDOWS = sys.platform.startswith(("win", "cygwin")) or (
sys.platform == "cli" and os.name == "nt"
)
UNIX = sys.platform.startswith(("linux", "freebsd", "openbsd"))
MACOS = sys.platform.startswith("darwin")
if WINDOWS:
import winreg
def win_get_user_env(name: str) -> Optional[str]:
if not WINDOWS:
raise NotImplementedError(
"can only update environment variables on Windows for now"
)
with winreg.ConnectRegistry(None, winreg.HKEY_CURRENT_USER) as root:
with winreg.OpenKey(root, "Environment", 0, winreg.KEY_ALL_ACCESS) as key:
value, _ = winreg.QueryValueEx(key, name)
return value
# pylint: disable=too-many-instance-attributes,too-many-arguments
class Expect:
"""
Manages running a process as a subprocess, and communicating with it, while
echoing its output
"""
# From:
# https://github.com/mawillcockson/dotfiles/blob/08e973f122b66ceadb009379dfed018a4b9e4eea/trio_watch_and_copy_demo.py
# Which is inspired by:
# https://github.com/python-trio/trio/blob/v0.19.0/trio/_subprocess.py#L587-L643
def __init__(
self,
process: "Process",
printer_send_channel: "MemorySendChannel[bytes]",
printer_receive_channel: "MemoryReceiveChannel[bytes]",
notifier_send_channel: "MemorySendChannel[bytes]",
opened_notifier_receive_channel: "MemoryReceiveChannel[bytes]",
print_buffer: "BufferedWriter" = sys.stdout.buffer, # type: ignore
):
self.process = process
self.printer_send_channel = printer_send_channel
self.printer_receive_channel = printer_receive_channel
self.notifier_send_channel = notifier_send_channel
self.opened_notifier_receive_channel = opened_notifier_receive_channel
self.print_buffer = print_buffer
self.stdout: bytes = b""
self.response_sent = False
# NOTE: may be able to be combined with copier_recorder()
async def printer(
self,
) -> None:
"echoes the process' output, dropping data if necessary"
if not self.process:
raise Exception("missing process; was this called inside a with statement?")
async with self.printer_receive_channel:
async for chunk in self.printer_receive_channel:
try:
self.print_buffer.write(chunk)
except BlockingIOError:
pass
self.print_buffer.flush()
async def copier_recorder(
self,
) -> None:
"""
records the process' stdout, and mirrors it to printer()
also sends notifications to expect() every time the process prints
something
"""
if not self.process:
raise Exception("missing process; was this called inside a with statement?")
assert (
self.process.stdout is not None
), "process must be opened with stdout=PIPE and stderr=STDOUT"
async with self.process.stdout, self.printer_send_channel, self.notifier_send_channel:
async for chunk in self.process.stdout:
# print(f"seen chunk: '{chunk!r}'", flush=True) # debug
self.stdout += chunk
await self.printer_send_channel.send(chunk)
# send notification
# if it's full, that's fine: if expect() is run, it'll see
# there's a "pending" notification and check stdout, then wait
# for another notification
try:
self.notifier_send_channel.send_nowait(b"")
except trio.WouldBlock:
pass
except trio.BrokenResourceError as err:
print(f"cause '{err.__cause__}'")
raise err
async def expect(
self,
watch_for: bytes,
respond_with: bytes,
) -> None:
"""
called inside Expect.open_process()'s with block to watch for, and
respond to, the process' output
"""
if not self.process:
raise Exception("missing process; was this called inside a with statement?")
assert self.process.stdin is not None, "process must be opened with stdin=PIPE"
# NOTE: This could be improved to show which responses were sent, and which
# weren't
self.response_sent = False
async with self.opened_notifier_receive_channel.clone() as notifier_receive_channel:
# print("expect --> opened notifier channel", flush=True) # debug
async for _ in notifier_receive_channel:
# print("expect --> received chunk notification", flush=True) # debug
if not self.response_sent and watch_for in self.stdout:
# print("expect --> sending response...", flush=True) # debug
await self.process.stdin.send_all(respond_with)
self.response_sent = True
# print("expect --> response sent", flush=True) # debug
@classmethod
@asynccontextmanager
async def open_process(
cls, args: "Union[str, List[str]]", env_additions: Dict[str, str] = {}
) -> "AsyncIterator[Expect]":
"""
entry point for using Expect()
opens the process, opens a nursery, and starts the copier and printer
this waits until the process is finished, so wrapping in a
trio.move_on_after() is good to use as a timeout
"""
printer_channels: (
"Tuple[MemorySendChannel[bytes], MemoryReceiveChannel[bytes]]"
) = trio.open_memory_channel(1)
printer_send_channel, printer_receive_channel = printer_channels
notifier_channels: (
"Tuple[MemorySendChannel[bytes], MemoryReceiveChannel[bytes]]"
) = trio.open_memory_channel(0)
notifier_send_channel, notifier_receive_channel = notifier_channels
async with notifier_receive_channel:
with patch.dict("os.environ", values=env_additions) as patched_env:
async with await trio.open_process(
args, stdin=PIPE, stdout=PIPE, stderr=STDOUT, env=patched_env
) as process:
async with trio.open_nursery() as nursery:
expect = cls(
process=process,
printer_send_channel=printer_send_channel,
printer_receive_channel=printer_receive_channel,
notifier_send_channel=notifier_send_channel,
opened_notifier_receive_channel=notifier_receive_channel,
)
nursery.start_soon(expect.copier_recorder)
nursery.start_soon(expect.printer)
yield expect
# print("waiting for process") # debug
await expect.process.wait()
class Bootstrapper:
UPDATED_ENVIRONMENT: Dict[str, str] = {}
SHELL: Optional[Path] = None
_SCOOP_INSTALLED = False
_PIP_INSTALLED = False
TEMP_DIR: Optional[Path] = None
PIP_DIR: Optional[Path] = None
VIRTUALENV_INSTALL_DIR: Optional[Path] = None
VENV_DIR: Optional[Path] = None
CACHE_DIR: Optional[Path] = None
PYTHON_EXECUTABLE: str = sys.executable
def __init__(self, temp_dir: Path) -> None:
if WINDOWS:
powershell_str = which("powershell")
powershell_path = Path(powershell_str).resolve()
if not (powershell_str and powershell_path.is_file()):
raise FileNotFoundError(
f"powershell not found at '{powershell_str}' or '{powershell_path}'"
)
self.SHELL = powershell_path
self.REPOSITORY_DIR = Path("~/projects/dotfiles/").expanduser().resolve()
self.TEMP_DIR = temp_dir
assert self.TEMP_DIR.is_dir()
self.PIP_DIR = self.TEMP_DIR / "pip"
self.PIP_DIR.mkdir(exist_ok=True)
self.VIRTUALENV_INSTALL_DIR = self.TEMP_DIR / "virtualenv"
self.VIRTUALENV_INSTALL_DIR.mkdir(exist_ok=True)
self.VENV_DIR = self.TEMP_DIR / "venv"
self.VENV_DIR.mkdir(exist_ok=True)
self.CACHE_DIR = self.TEMP_DIR / "cache"
self.CACHE_DIR.mkdir(exist_ok=True)
self._PIP_INSTALLED = (
self.cmd([self.PYTHON_EXECUTABLE, "-m", "pip", "--version"]).returncode == 0
)
self._PIP_INSTALLED = False
def cmd(self, args: List[str], stdin: str = "") -> CompletedProcess:
print(f"running -> {args!r}")
with patch.dict("os.environ", values=self.UPDATED_ENVIRONMENT) as patched_env:
result = run(
args,
stdin=(stdin or PIPE),
stderr=STDOUT,
stdout=PIPE,
check=False,
env=patched_env,
)
print(result.stdout.decode() or "")
return result
def shell(self, code: str) -> CompletedProcess:
print(f'shell -> "{code}"')
if self.UPDATED_ENVIRONMENT:
with patch.dict(
"os.environ", values=self.UPDATED_ENVIRONMENT
) as patched_env:
result = run(
code,
text=True,
capture_output=True,
check=False,
shell=True,
executable=str(self.SHELL) or None,
env=patched_env,
)
else:
result = run(
code,
text=True,
capture_output=True,
check=False,
shell=True,
executable=str(self.SHELL) or None,
)
if result.stdout:
print(result.stdout)
if result.stderr:
print(result.stderr)
return result
def main(self) -> None:
try:
import virtualenv
except ImportError:
self.bootstrap_virtualenv()
import virtualenv # isort:skip
session = virtualenv.cli_run([str(self.VENV_DIR), "--clear", "--download"])
if WINDOWS:
venv_python = self.VENV_DIR / "Scripts" / "python.exe"
venv_modules = self.VENV_DIR / "Lib" / "site-packages"
else:
raise NotImplementedError("only Windows supported right now")
if not (venv_python and venv_python.is_file()):
raise Exception(
f"could not find a virtual environment python at '{venv_python}'"
)
assert venv_modules.is_dir(), f"missing directory '{venv_modules}'"
self.PYTHON_EXECUTABLE = str(venv_python)
sys.path.insert(0, str(venv_modules))
# Install trio
self.pip(["install", "trio"])
import trio as trio_module # isort:skip
global trio
trio = trio_module
installer = Installer(
temp_dir=self.TEMP_DIR,
repository_dir=self.REPOSITORY_DIR,
shell=self.SHELL,
venv_dir=self.VENV_DIR,
cache_dir=self.CACHE_DIR,
python_executable=self.PYTHON_EXECUTABLE,
updated_environment=self.UPDATED_ENVIRONMENT,
)
trio.run(installer.main)
def bootstrap_virtualenv(self) -> None:
if not self._PIP_INSTALLED:
self.bootstrap_pip()
self.VIRTUALENV_INSTALL_DIR.mkdir(exist_ok=True)
self.pip(
["install", "virtualenv", "--target", str(self.VIRTUALENV_INSTALL_DIR)]
)
sys.path.insert(0, str(self.VIRTUALENV_INSTALL_DIR))
import virtualenv # isort:skip
def bootstrap_pip(self) -> None:
if self._PIP_INSTALLED:
return
# NOTE: On Windows, the SSL certificates for some reason aren't
# available until a web request is made that absolutely requires
# them
# If it's a truly fresh install, then any urlopen() call to an
# https:// url will fail with an SSL context error:
# >> ssl.SSLCertVerificationError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: unable to get local issuer certificate
self.shell("iwr -useb https://bootstrap.pypa.io")
# https://pip.pypa.io/en/stable/installation/#get-pip-py
get_pip_file = self.CACHE_DIR / "get_pip.py"
get_pip_file.touch()
with get_pip_file.open(mode="wb") as file:
with urlopen("https://bootstrap.pypa.io/get-pip.py") as request:
while request.peek(1):
file.write(request.read(8192))
# NOTE: pip forces the --user flag on Microsoft Store Pythons:
# https://stackoverflow.com/q/63783587
self.cmd(
[
self.PYTHON_EXECUTABLE,
str(get_pip_file),
"--target",
str(self.PIP_DIR),
"--no-user",
]
)
sys.path.insert(0, str(self.PIP_DIR))
# Causes Python to find the downloaded pip module
self.UPDATED_ENVIRONMENT["PYTHONPATH"] = str(self.PIP_DIR)
self._PIP_INSTALLED = True
def pip(self, args: List[str]) -> None:
if not self._PIP_INSTALLED:
self.bootstrap_pip()
# NOTE: pip forces the --user flag on Microsoft Store Pythons:
# https://stackoverflow.com/q/63783587
self.cmd([self.PYTHON_EXECUTABLE, "-m", "pip", *args, "--no-user"])
class Installer:
SHELL: Optional[Path] = None
PYTHON_EXECUTABLE: str = sys.executable
UPDATED_ENVIRONMENT: Dict[str, str] = {}
_SCOOP_INSTALLED: bool = False
PROCESS_TYPES: Dict[str, str] = {
"cmd": "{0!r}",
"shell": '"{0}"',
"pip": "{0}",
"scoop": "{0}",
}
REPO_URL = "https://github.com/mawillcockson/dotfiles.git"
def __init__(
self,
temp_dir: Path,
repository_dir: Path,
shell: Optional[Path] = None,
venv_dir: Optional[Path] = None,
cache_dir: Optional[Path] = None,
python_executable: str = sys.executable,
updated_environment: Dict[str, str] = {},
) -> None:
if WINDOWS:
if not shell:
powershell_str = which("powershell")
powershell_path = Path(powershell_str).resolve()
if not (powershell_str and powershell_path.is_file()):
raise FileNotFoundError(
f"powershell not found at '{powershell_str}' or '{powershell_path}'"
)
self.SHELL = powershell_path
else:
self.SHELL = shell
self.REPOSITORY_DIR = repository_dir
self.TEMP_DIR = temp_dir
assert self.TEMP_DIR.is_dir()
self.VENV_DIR = venv_dir or (self.TEMP_DIR / "venv")
self.VENV_DIR.mkdir(exist_ok=True)
self.CACHE_DIR = cache_dir or (self.TEMP_DIR / "cache")
self.CACHE_DIR.mkdir(exist_ok=True)
self.PYTHON_EXECUTABLE = python_executable
self.UPDATED_ENVIRONMENT.update(updated_environment)
async def cmd(
self,
args: List[str],
check: bool = True,
process_type: str = "cmd",
) -> "Expect":
args_str = self.PROCESS_TYPES.get(
process_type, self.PROCESS_TYPES["cmd"]
).format(args)
cmd_str = f"{process_type} -> {args_str}"
print(cmd_str)
async with Expect.open_process(
args,
env_additions=self.UPDATED_ENVIRONMENT,
) as expect:
pass
if check and expect.process.returncode != 0:
raise CalledProcessError("returncode is not 0")
return expect
async def pip(self, args: List[str]) -> "Expect":
return await self.cmd(
[self.PYTHON_EXECUTABLE, "-m", "pip", *args, "--no-user"],
process_type="pip",
)
async def shell(
self, code: str, check: bool = True, process_type: str = "shell"
) -> "Expect":
# NOTE: "{shell} -c {script}" works with powershell, sh (bash, dash, etc), not sure about other platforms
return await self.cmd(
[str(self.SHELL), "-c", code], check=check, process_type=process_type
)
async def scoop(self, args: str) -> "Expect":
if not (WINDOWS and self._SCOOP_INSTALLED):
raise Exception(
"not running scoop when not on Windows or scoop not installed"
)
return await self.shell(f"scoop {args}", check=True, process_type="scoop")
async def install_scoop(self) -> None:
if not WINDOWS:
raise Exception("not installing scoop when not on Windows")
# Check if scoop is already installed
self.UPDATED_ENVIRONMENT["PATH"] = win_get_user_env("PATH")
expect = await self.shell("scoop which scoop", check=False)
self._SCOOP_INSTALLED = (
"is not recognized as the name of" not in expect.stdout.decode()
and expect.process.returncode == 0
)
if not self._SCOOP_INSTALLED:
# Set PowerShell's Execution Policy
args = [
str(self.SHELL),
"-c",
"& {Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser}",
]
print(f"running -> {args!r}")
with trio.move_on_after(7):
async with Expect.open_process(
args, env_additions=self.UPDATED_ENVIRONMENT
) as expect:
with trio.move_on_after(2):
await expect.expect(
watch_for=b'(default is "N"):',
respond_with=b"A",
)
# NOTE: don't have to check if the response was sent, because
# sometimes the execution policy is set without ever sending a
# response (i.e. if the execution policy was already set).
# Instead, just check if the policy is set correctly.
result = await self.cmd(
[str(self.SHELL), "-c", "& {Get-ExecutionPolicy}"], check=False
)
if not "RemoteSigned" in result.stdout.decode():
raise Exception("could not set PowerShell Execution Policy")
# Install Scoop
result = await self.cmd(
[str(self.SHELL), "-c", "& {iwr -useb https://get.scoop.sh | iex}"]
)
stdout = result.stdout.decode().lower()
if not (
"scoop was installed successfully!" in stdout
or "scoop is already installed" in stdout
):
raise Exception("scoop was not installed")
self.UPDATED_ENVIRONMENT["PATH"] = win_get_user_env("PATH")
self._SCOOP_INSTALLED = True
installed_apps = (await self.scoop("list")).stdout.decode()
for requirement in ["aria2", "git", "python"]:
if requirement in installed_apps:
continue
await self.scoop(f"install {requirement}")
wanted_buckets = ["extras"]
added_buckets = (await self.scoop("bucket list")).stdout.decode()
for bucket in wanted_buckets:
if bucket in added_buckets:
continue
await self.scoop(f"bucket add {bucket}")
async def main(self) -> None:
# Install rest of dependencies
if MACOS or UNIX:
raise NotImplementedError("only Windows supported currently")
if WINDOWS:
# implicitly installs git as well
await self.install_scoop()
for dependency_check in (["git", "--version"], ["python", "--version"]):
try:
await self.cmd(dependency_check, check=True)
except CalledProcessError as err:
raise Exception(
f"dependency '{dependency_check!r}' was not found"
) from err
## Clone dotfiles repository
self.REPOSITORY_DIR.mkdir(parents=True, exist_ok=True)
# Check if there's an existing repository, and if that repository is clean
# NOTE::FUTURE dulwich does not support submodules
# https://github.com/dulwich/dulwich/issues/506
repo_status = await self.cmd(["git", "-C", str(self.REPOSITORY_DIR), "status", "--porcelain"], check=False)
if "not a git repository" in repo_status.stdout.decode().lower():
await self.cmd(
[
"git",
"clone",
"--recurse-submodules",
self.REPO_URL,
str(self.REPOSITORY_DIR),
]
)
# Three scenarios:
# - Repo exists and is completely clean and up to date
# - Repo exists and there are uncommitted changes
# - Repo exists and there are un-pushed changes
#
# The last one can be helped with dulwich if issue 506 is resolved, or
# complex git commands, like:
# https://stackoverflow.com/a/6133968
#
# For now I'm saying "deal with it manually"
# - Repo exists and there are changes
# NOTE: optimistically try to pull in new upstream changes; could fail in numerous ways
await self.cmd(["git", "-C", str(self.REPOSITORY_DIR), "pull", "--ff-only"])
# Run dotdrop
raise NotImplementedError("setup dotfiles")
if __name__ == "__main__":
with TemporaryDirectory() as temp_dir:
temp_dir_path = Path(temp_dir).resolve(strict=True)
bootstrapper = Bootstrapper(temp_dir_path)
bootstrapper.main()
# import trio # isort:skip
# installer = Installer(
# temp_dir=bootstrapper.TEMP_DIR,
# repository_dir=bootstrapper.REPOSITORY_DIR,
# shell=bootstrapper.SHELL,
# venv_dir=bootstrapper.VENV_DIR,
# cache_dir=bootstrapper.CACHE_DIR,
# python_executable=bootstrapper.PYTHON_EXECUTABLE,
# updated_environment=bootstrapper.UPDATED_ENVIRONMENT,
# )
# trio.run(installer.main)
|
16,140 | 727a542bb1e407a1e5b3405a60a764b4e38fab8d | import os
import torch
import numpy as np
import wandb
from ..train import train
from ..evaluate import evaluate
from chemprop.data import MoleculeDataLoader
from chemprop.utils import save_checkpoint
from chemprop.bayes.swag import SWAG
from chemprop.bayes_utils import scheduler_const
from torch.optim.lr_scheduler import OneCycleLR
def train_swag_pdts(
model_core,
train_data_loader,
loss_func,
scaler,
features_scaler,
args,
save_dir,
batch_no):
# define no_cov_mat from cov_mat
if args.cov_mat:
no_cov_mat = False
else:
no_cov_mat = True
# instantiate SWAG model (wrapper)
swag_model = SWAG(
model_core,
args,
no_cov_mat,
args.max_num_models,
var_clamp=1e-30
)
############## DEFINE COSINE OPTIMISER AND SCHEDULER ##############
# define optimiser
optimizer = torch.optim.SGD([
{'params': model_core.encoder.parameters()},
{'params': model_core.ffn.parameters()},
{'params': model_core.log_noise, 'lr': args.lr_swag/5/25, 'weight_decay': 0}
], lr=args.lr_swag/25, weight_decay=args.weight_decay_swag, momentum=args.momentum_swag)
# define scheduler
num_param_groups = len(optimizer.param_groups)
if batch_no == 0:
scheduler = OneCycleLR(
optimizer,
max_lr = [args.lr_swag, args.lr_swag, args.lr_swag/5],
epochs=args.epochs_swag,
steps_per_epoch=-(-args.train_data_size // args.batch_size),
pct_start=5/args.epochs_swag,
anneal_strategy='cos',
cycle_momentum=False,
div_factor=25.0,
final_div_factor=1/25)
else:
scheduler = scheduler_const([args.lr_swag])
###################################################################
# freeze log noise
for name, parameter in model_core.named_parameters():
if name == 'log_noise':
parameter.requires_grad = False
print("----------SWAG training----------")
# training loop
n_iter = 0
for epoch in range(args.epochs_swag):
print(f'SWAG epoch {epoch}')
loss_avg, n_iter = train(
model=model_core,
data_loader=train_data_loader,
loss_func=loss_func,
optimizer=optimizer,
scheduler=scheduler,
args=args,
n_iter=n_iter
)
# SWAG update
if (epoch >= args.burnin_swag) and (loss_avg < args.loss_threshold):
swag_model.collect_model(model_core)
print('***collection***')
# save final swag model
save_checkpoint(os.path.join(save_dir, f'model_{batch_no}.pt'), swag_model, scaler, features_scaler, args)
return swag_model
|
16,141 | 0492f73c068f883596e4efdcd29ff77b5f3e5a5d | n = int(input())
arr = list(map(int, input().split()))
c = 0
h = set([])
for i in range(n):
for j in range(i+1,n):
for k in range(j+1,n):
t = [arr[i], arr[j],arr[k]]
t.sort()
if t[0] + t[1] > t[2]:
if t[0]!=t[1]!=t[2]:#if (i,j,k) not in h:
h.add((i,j,k))
c+=1
print(c) |
16,142 | b95e80915a30ca78df088fbf928dbf9677bd8793 | import cv2
import time
from imutils.video import VideoStream
import imutils
#fuction that should be run as a thread and that interrupts when motion is detected
def MotionDetectionThread(thresholdBackground, minAreaToBeDetectedBackground,
thresholdMotion, minAreaToBeDetectedMotion,frameUpdateFreq):
videoStr = VideoStream(src = 0).start()
time.sleep(3)
print("Created video stream")
firstFrame = None
mostRecentMotionFrame = None
grayFrame = None
frameUpdateCounter = 0
consecutiveMotionDetected = 0
consecutiveNoMotion = 0
#Loop forever trying to detect motion
while True:
frameUpdateCounter += 1
if frameUpdateCounter == frameUpdateFreq:
mostRecentMotionFrame = grayFrame
frameUpdateCounter = 0
#print("CALCULATING FRAMES")
hasDiffToFirst = False
hasDiffToLast = False
currentFrame = videoStr.read()
if currentFrame is None:
print("Received empty frame")
break
# Resize, convert to gray scale and blur to smooth out and ease calculations
currentFrame = imutils.resize(currentFrame, width=500)
grayFrame = cv2.cvtColor(currentFrame,cv2.COLOR_BGR2GRAY)
grayFrame = cv2.GaussianBlur(grayFrame, (21,21), 0)
if firstFrame is None:
firstFrame = grayFrame
mostRecentMotionFrame = grayFrame
continue
## --- CALCULATES DIFF TO LAST UPDATED FRAME ---
# Calculate Diff between current frame and last frame
frameDiff = cv2.absdiff(mostRecentMotionFrame, grayFrame)
frameThresh = frameDiff
# Remove frame diff below the threshold
frameThresh = cv2.threshold(frameDiff, thresholdMotion, 255, cv2.THRESH_BINARY)[1]
# Dilating image to fill the possible holes between pixel groups
frameThresh = cv2.dilate(frameThresh, None, iterations=2)
# Calculating contours of the moving pixels
frameContours = cv2.findContours(frameThresh.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
frameContours = frameContours[0] if imutils.is_cv2() else frameContours[1]
# Loop over every contour in the frame
for contour in frameContours:
# If the contour is bigger than minArea, than there is motion
if cv2.contourArea(contour) < minAreaToBeDetectedMotion:
continue
hasDiffToLast = True
## --- CALCULATES DIFF TO FIRST FRAME ---
# Calculate Diff between current frame and starting frame
frameDiff = cv2.absdiff(firstFrame, grayFrame)
# Remove frame diff below the threshold
frameThresh = cv2.threshold(frameDiff, thresholdBackground, 255, cv2.THRESH_BINARY)[1]
# Dilating image to fill the possible holes between pixel groups
frameThresh = cv2.dilate(frameThresh, None, iterations=2)
# Calculating contours of the moving pixels
frameContours = cv2.findContours(frameThresh.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
frameContours = frameContours[0] if imutils.is_cv2() else frameContours[1]
# Loop over every contour in the frame
for contour in frameContours:
# If the contour is smaller than minArea, it is ignored
if cv2.contourArea(contour) < minAreaToBeDetectedBackground:
continue
hasDiffToFirst = True
if hasDiffToLast:
consecutiveMotionDetected += 1
consecutiveNoMotion = 0
if consecutiveMotionDetected > 40:
print("MOTION DETECTED")
consecutiveMotionDetected = 0
else:
consecutiveMotionDetected = 0
consecutiveNoMotion += 1
if consecutiveNoMotion > 40:
print("NO MOTION")
consecutiveNoMotion = 0
if hasDiffToFirst:
print("SOMETHING WAS LEFT")
frameUpdateCounter += 1
videoStr.stop()
MotionDetectionThread(100, 500, 10, 50, 100)
|
16,143 | 4352bcf96aa60b4631a75d398f9f59bbb8ce1463 | import os
print "uploading file %s" %_myfile.filename
"""
f = _myfile.file # file-like object
dest_name = os.path.basename(_myfile.filename)
out = open(dest_name,'wb')
import shutil
shutil.copyfileobj(f,out)
out.close()
"""
print ' <a href="mianshi.pih"> Back to Index</a>'
|
16,144 | 21755abec5bd088e6292c2758cc13dfb87fa1235 | from flask import Flask, jsonify
import mysql.connector as mysql
service = Flask (__name__)
IS_ALIVE = "yes"
DEBUG = True
PAGE_LIMIT = 4
MYSQL_SERVER = "database"
MYSQL_USER = "root"
MYSQL_PASS = "admin"
MYSQL_BANCO = "vencendochefes"
def getConnectionDb():
conenection = mysql.connect(
host=MYSQL_SERVER, user=MYSQL_USER, password=MYSQL_PASS, database=MYSQL_BANCO
)
return conenection
def generateComments(commentResult):
comment = {
"_id": commentResult["_id"],
"name": commentResult["name"],
"comment": commentResult["comment"]
}
return comment
@service.route("/commentById/<int:id>")
def getCommentById(id):
comment = []
connection = getConnectionDb()
cursor = connection.cursor(dictionary = True)
cursor.execute(
"select comments.id as _id, comments.name as name, comments.comment as comment from comments join boss on comments.boss = boss.id where boss.id = " + str(id)
)
queryResult = cursor.fetchall()
for result in queryResult:
comment.append(generateComments(result))
return jsonify(comment)
if __name__ == "__main__":
service.run(
host='0.0.0.0',
port='5002',
debug=DEBUG
)
|
16,145 | 48f3cd4f8e0db496367e9f1d91e8bd72f33e86b2 | import os
import h5py
import keras
from keras.models import Sequential, Model
from keras.layers import Dense, Flatten, Dropout, Input, merge, Activation
from keras.layers.convolutional import Convolution2D, MaxPooling2D, ZeroPadding2D
from keras.layers.recurrent import SimpleRNN, LSTM
from keras.layers.advanced_activations import LeakyReLU
from keras.layers.wrappers import TimeDistributed
from keras.layers.normalization import BatchNormalization
from keras.optimizers import SGD
import keras.backend as K
from keras.callbacks import ModelCheckpoint, LearningRateScheduler
import numpy as np
import keras_utils
from keras import backend as K
from keras.engine import Layer
from keras.layers.core import Lambda
#from keras.layers.core import Merge
from keras.layers import Merge
def splittensor(axis=1, ratio_split=1, id_split=0, **kwargs):
def f(X):
div = X.shape[axis] // ratio_split
if axis == 0:
output = X[id_split * div:(id_split + 1) * div, :, :, :]
elif axis == 1:
output = X[:, id_split * div:(id_split + 1) * div, :, :]
elif axis == 2:
output = X[:, :, id_split * div:(id_split + 1) * div, :]
elif axis == 3:
output = X[:, :, :, id_split * div:(id_split + 1) * div]
else:
raise ValueError('This axis is not possible')
return output
def g(input_shape):
output_shape = list(input_shape)
output_shape[axis] = output_shape[axis] // ratio_split
return tuple(output_shape)
return Lambda(f, output_shape=lambda input_shape: g(input_shape), **kwargs)
def crosschannelnormalization(alpha=1e-4, k=2, beta=0.75, n=5, **kwargs):
"""
This is the function used for cross channel normalization in the original
Alexnet
"""
def f(X):
b, ch, r, c = X.shape
half = n // 2
square = K.square(X)
extra_channels = K.spatial_2d_padding(K.permute_dimensions(square, (0, 2, 3, 1))
, (0, half))
extra_channels = K.permute_dimensions(extra_channels, (0, 3, 1, 2))
scale = k
for i in range(n):
scale += alpha * extra_channels[:, i:i + ch, :, :]
scale = scale ** beta
return X / scale
return Lambda(f, output_shape=lambda input_shape: input_shape, **kwargs)
def build_alexnet_orig(weights_path=None, heatmap=False, orig=False, mod=0):
if heatmap:
inputs = Input(shape=(3, None, None))
else:
inputs = Input(shape=(3, 227, 227))
conv_1 = Convolution2D(96, 11, 11, subsample=(4, 4), activation='relu', name='conv_1')(inputs)
conv_2 = MaxPooling2D((3, 3), strides=(2, 2))(conv_1)
conv_2 = crosschannelnormalization(name='convpool_1')(conv_2)
conv_2 = ZeroPadding2D((2, 2))(conv_2)
#conv_2 = Convolution2D(256, 5, 5, activation='relu', name='conv_2')(conv_2)
conv_2 = merge([
Convolution2D(128, 5, 5, activation='relu', name='conv_2_' + str(i + 1))(
splittensor(ratio_split=2, id_split=i)(conv_2)
) for i in range(2)], mode='concat', concat_axis=1, name='conv_2')
conv_3 = MaxPooling2D((3, 3), strides=(2, 2))(conv_2)
conv_3 = crosschannelnormalization()(conv_3)
conv_3 = ZeroPadding2D((1, 1))(conv_3)
conv_3 = Convolution2D(384, 3, 3, activation='relu', name='conv_3')(conv_3)
conv_4 = ZeroPadding2D((1, 1))(conv_3)
#conv_4 = Convolution2D(384, 3, 3, activation='relu', name='conv_4')(conv_4)
conv_4 = merge([
Convolution2D(192, 3, 3, activation='relu', name='conv_4_' + str(i + 1))(
splittensor(ratio_split=2, id_split=i)(conv_4)
) for i in range(2)], mode='concat', concat_axis=1, name='conv_4')
conv_5 = ZeroPadding2D((1, 1))(conv_4)
#conv_5 = Convolution2D(256, 3, 3, activation='relu', name='conv_5')(conv_5)
conv_5 = merge([
Convolution2D(128, 3, 3, activation='relu', name='conv_5_' + str(i + 1))(
splittensor(ratio_split=2, id_split=i)(conv_5)
) for i in range(2)], mode='concat', concat_axis=1, name='conv_5')
dense_1 = MaxPooling2D((3, 3), strides=(2, 2), name='convpool_5')(conv_5)
if heatmap:
dense_1 = Convolution2D(4096, 6, 6, activation='relu', name='dense_1')(dense_1)
dense_2 = Convolution2D(4096, 1, 1, activation='relu', name='dense_2')(dense_1)
dense_3 = Convolution2D(1000, 1, 1, name='dense_3')(dense_2)
prediction = Softmax4D(axis=1, name='softmax')(dense_3)
else:
dense_1 = Flatten(name='flatten')(dense_1)
dense_1 = Dense(4096, activation='relu', name='dense_1')(dense_1)
dense_2 = Dropout(0.5)(dense_1)
dense_2 = Dense(4096, activation='relu', name='dense_2')(dense_2)
dense_3 = Dropout(0.5)(dense_2)
if orig:
prediction = Dense(1000, name='out')(dense_3)
else:
if mod == 1:
dense_3 = Dense(1024, activation='relu', name='dense_3')(dense_3)
prediction = Dense(3, name='out')(dense_3)
model = Model(input=inputs, output=prediction)
if weights_path:
model.load_weights(weights_path)
return model
def build_alexnet(weights_path=None, heatmap=False, orig=False, num_out=3):
if heatmap:
inputs = Input(shape=(3, None, None))
else:
inputs = Input(shape=(3, 227, 227))
conv_1 = Convolution2D(96, 11, 11, subsample=(4, 4), activation='relu', name='conv_1')(inputs)
conv_2 = MaxPooling2D((3, 3), strides=(2, 2))(conv_1)
#conv_2 = crosschannelnormalization(name='convpool_1')(conv_2)
conv_2 = ZeroPadding2D((2, 2))(conv_2)
conv_2 = Convolution2D(256, 5, 5, activation='relu', name='conv_2')(conv_2)
'''
conv_2 = merge([
Convolution2D(128, 5, 5, activation='relu', name='conv_2_' + str(i + 1))(
splittensor(ratio_split=2, id_split=i)(conv_2)
) for i in range(2)], mode='concat', concat_axis=1, name='conv_2')
'''
conv_3 = MaxPooling2D((3, 3), strides=(2, 2))(conv_2)
#conv_3 = crosschannelnormalization()(conv_3)
conv_3 = ZeroPadding2D((1, 1))(conv_3)
conv_3 = Convolution2D(384, 3, 3, activation='relu', name='conv_3')(conv_3)
conv_4 = ZeroPadding2D((1, 1))(conv_3)
conv_4 = Convolution2D(384, 3, 3, activation='relu', name='conv_4')(conv_4)
'''
conv_4 = merge([
Convolution2D(192, 3, 3, activation='relu', name='conv_4_' + str(i + 1))(
splittensor(ratio_split=2, id_split=i)(conv_4)
) for i in range(2)], mode='concat', concat_axis=1, name='conv_4')
'''
conv_5 = ZeroPadding2D((1, 1))(conv_4)
conv_5 = Convolution2D(256, 3, 3, activation='relu', name='conv_5')(conv_5)
'''
conv_5 = merge([
Convolution2D(128, 3, 3, activation='relu', name='conv_5_' + str(i + 1))(
splittensor(ratio_split=2, id_split=i)(conv_5)
) for i in range(2)], mode='concat', concat_axis=1, name='conv_5')
'''
dense_1 = MaxPooling2D((3, 3), strides=(2, 2), name='convpool_5')(conv_5)
if heatmap:
dense_1 = Convolution2D(4096, 6, 6, activation='relu', name='dense_1')(dense_1)
dense_2 = Convolution2D(4096, 1, 1, activation='relu', name='dense_2')(dense_1)
dense_3 = Convolution2D(1000, 1, 1, name='dense_3')(dense_2)
prediction = Softmax4D(axis=1, name='softmax')(dense_3)
else:
dense_1 = Flatten(name='flatten')(dense_1)
dense_1 = Dense(4096, activation='relu', name='dense_1')(dense_1)
dense_2 = Dropout(0.5)(dense_1)
dense_2 = Dense(4096, activation='relu', name='dense_2')(dense_2)
dense_3 = Dropout(0.5)(dense_2)
if orig:
prediction = Dense(1000, name='out')(dense_3)
else:
if num_out == 1024:
dense_3 = Dense(1024, activation='relu', name='dense_3')(dense_3)
if num_out == 5:
prediction = Dense(5, activation='softmax', name='out_5')(dense_3)
elif num_out == 3:
prediction = Dense(3, activation='softmax', name='out')(dense_3)
model = Model(input=inputs, output=prediction)
if weights_path:
model.load_weights(weights_path)
return model
if __name__=='__main__':
net = build_alexnet()
print (net.summary())
#net.load_weights('models/hp_model.h5')
|
16,146 | 72154fb956bee8de6298214e4f9a56d98a0cbffb | # coding=utf-8
"""
功能:
1.个人中心
index、user与detail
2.消息
new
3.订单商品
order、goods与list
"""
from flask import Flask
# 导入蓝图对象
from E10_OrderGoods import api
app = Flask(__name__)
# 第三步:注册蓝图对象到程序实例上
app.register_blueprint(api)
@app.route('/')
def indedx():
return 'index'
@app.route('/user')
def user():
return 'user'
@app.route('/detail')
def detail():
return 'detail'
if __name__ == '__main__':
print app.url_map
app.run(debug=True)
|
16,147 | 26d355227957f21a3a7bcfb4c0e8321e14ee9956 | from tensorflow.keras.models import load_model
from tensorflow.keras.preprocessing.image import ImageDataGenerator
import numpy as np
from sklearn.metrics import classification_report, confusion_matrix
model = load_model("vgg16_1_acc0.90.h5")
model_loss = load_model("vgg16_1_bestloss.h5")
img_height, img_width = 150,150
#validation_data_dir = "MWI-Dataset-test"
validation_data_dir = "Weather_Dataset"
test_datagen = ImageDataGenerator(
rescale=1./255,
horizontal_flip=True,
fill_mode="nearest",
zoom_range=0.3,
width_shift_range=0.3,
height_shift_range=0.3,
rotation_range=30)
validation_generator = test_datagen.flow_from_directory(
validation_data_dir,
target_size=(img_height, img_width),
shuffle=False,
class_mode="categorical")
""" scores = model.evaluate_generator(validation_generator, verbose=1)
print("With best accuracy model:", scores)
scores_loss = model_loss.evaluate_generator(validation_generator, verbose=1)
print("With lowest val_loss model:",scores_loss)
"""
preds = model.predict_generator(validation_generator, verbose=1)
Ypred = np.argmax(preds, axis=1)
Ytest = validation_generator.classes # shuffle=False in test_generator
print(classification_report(Ytest, Ypred, labels=None,
target_names=["Haze", "Rainy", "Snowy", "Sunny"], digits=3))
|
16,148 | 540aca234b3ac84c561eea9a1098e2a2d3c734be |
words = []
highlow = []
numright = 0
with open("adventofcode/day02.txt") as inpt:
for line in inpt:
words = line.split(" ")
highlow = words[0].split("-")
lowestnumletters = int(highlow[0])
highestnumletters = int(highlow[1])
letters = words[1]
letter = letters[0]
numletters = 0
for char in (words[2]):
if char == letter:
numletters += 1
if numletters <= highestnumletters and numletters >= lowestnumletters:
numright += 1
print(numright) |
16,149 | be35d8206b48519bd21023667508b2b43256a319 | """
Author : HarperHao
TIME : 2020/12/9
FUNCTION: 表单的基类
"""
from wtforms import Form
class BaseForm(Form):
def get_error(self):
message = self.errors.popitem()[1][0]
return message
|
16,150 | 21d000b35d90d0e2ad789b55cf3108b665cf7e16 | """
1. 이중 for문으로 두개 뽑기
2. 뽑은 결과의 합이 <=200 이므로, 200짜리 배열 만들기
3. 배열에서 >=1 애들만 찾아서 넣기
"""
def solution(numbers) :
exists = [0] * 201
for i in range(len(numbers)) :
for j in range(i+1,len(numbers)) :
exists[numbers[i] + numbers[j]] += 1
answer = []
for i in range(0,201) :
if exists[i] >= 1:
answer.append(i)
return answer
numbers = [2,1,3,4,1]
numbers = [5,0,2,7]
result = solution(numbers)
print(result) |
16,151 | d0d6df38c7706361c5bb157a1ce8760c6651e58e | import pandas as pd
from twitter_get_data import query_str
import os
path=os.getcwd()
os.chdir(query_str)
data=pd.read_csv('Final_Results_FB.csv',encoding='ISO-8859-1')
data_pos=0
data_neg=0
data_neu=0
data_happiness=0
data_sad=0
data_worry=0
data_angry=0
data_relief=0
data_surprise=0
data_neutral=0
data_love=0
data_hate=0
data_boredom=0
data_empty=0
data_enthusiasm=0
data_fun=0
for i in range(len(data)):
if(data.loc[:,'Sentiment_Type'][i]=='Positive'):
data_pos+=1
if(data.loc[:,'Sentiment_Type'][i]=='Negative'):
data_neg+=1
if(data.loc[:,'Sentiment_Type'][i]=='Neutral'):
data_neu+=1
if(data.loc[:,'Emotion_Prediction'][i]=='worry'):
data_worry+=1
if(data.loc[:,'Emotion_Prediction'][i]=='sadness'):
data_sad+=1
if(data.loc[:,'Emotion_Prediction'][i]=='anger'):
data_angry+=1
if(data.loc[:,'Emotion_Prediction'][i]=='happiness'):
data_happiness+=1
if(data.loc[:,'Emotion_Prediction'][i]=='relief'):
data_relief+=1
if(data.loc[:,'Emotion_Prediction'][i]=='surprise'):
data_surprise+=1
if(data.loc[:,'Emotion_Prediction'][i]=='neutral'):
data_neutral+=1
if(data.loc[:,'Emotion_Prediction'][i]=='love'):
data_love+=1
if(data.loc[:,'Emotion_Prediction'][i]=='hate'):
data_hate+=1
if(data.loc[:,'Emotion_Prediction'][i]=='boredom'):
data_boredom+=1
if(data.loc[:,'Emotion_Prediction'][i]=='empty'):
data_empty+=1
if(data.loc[:,'Emotion_Prediction'][i]=='enthusiasm'):
data_enthusiasm+=1
if(data.loc[:,'Emotion_Prediction'][i]=='fun'):
data_fun+=1
total_length=len(data)
print(str(data_pos/total_length*100)+'% of positive tweets related to the hashtag')
print(str(data_neg/total_length*100)+'% of negative tweets related to the hashtag')
print(str(data_neu/total_length*100)+'% of neutral tweets related to the hashtag')
print(str(data_worry/total_length*100)+'% of "worry" tweets related to the hashtag')
print(str(data_sad/total_length*100)+'% of "sad" tweets related to the hashtag')
print(str(data_angry/total_length*100)+'% of "angry" tweets related to the hashtag')
print(str(data_happiness/total_length*100)+'% of "happiness" tweets related to the hashtag')
print(str(data_relief/total_length*100)+'% of "relief" tweets related to the hashtag')
print(str(data_surprise/total_length*100)+'% of "surprise" tweets related to the hashtag')
print(str(data_neutral/total_length*100)+'% of "neutral" tweets related to the hashtag')
print(str(data_love/total_length*100)+'% of "love" tweets related to the hashtag')
print(str(data_hate/total_length*100)+'% of "hate" tweets related to the hashtag')
print(str(data_boredom/total_length*100)+'% of "boredom" tweets related to the hashtag')
print(str(data_empty/total_length*100)+'% of "empty" tweets related to the hashtag')
print(str(data_enthusiasm/total_length*100)+'% of "enthusiasm" tweets related to the hashtag')
print(str(data_fun/total_length*100)+'% of "fun" tweets related to the hashtag')
import numpy as np
import matplotlib.pyplot as plt
top=[('Positive',data_pos),('Negative',data_neg),('Neutral',data_neu)]
labels, ys = zip(*top)
xs = np.arange(len(labels))
width = 1
plt.bar(xs, ys)
plt.xticks(xs, labels) #Replace default x-ticks with xs, then replace xs with labels
plt.yticks(ys)
plt.savefig('FB_Visualiztion_Sentiment.png')
plt.show()
top=[('Worry',data_worry),('Sad',data_sad),('Angry',data_angry),\
('Happy',data_happiness),('Sad',data_sad),('Boredom',data_boredom)\
,('Empty',data_empty),('Enthusiasm',data_enthusiasm),('Fun',data_fun)\
,('Surprise',data_surprise),('Relief',data_relief),('Hate',data_hate),('Love',data_love)]
labels, ys = zip(*top)
xs = np.arange(len(labels))
width = 1
plt.bar(xs, ys)
plt.xticks(xs, labels) #Replace default x-ticks with xs, then replace xs with labels
plt.yticks(ys)
plt.savefig('FB_Visualiztion_Emotion.png')
plt.show()
os.chdir(path) |
16,152 | ef25b654a855e2cafddcfe5719c442c5e637b49d | # -*- coding:utf-8 -*-
'''
https://www.nowcoder.com/practice/9e5e3c2603064829b0a0bbfca10594e9?tpId=117&&tqId=37846&&companyId=665&rp=1&ru=/company/home/code/665&qru=/ta/job-code-high/question-ranking
'''
'''
题目描述
假定你知道某只股票每一天价格的变动。
你最多可以同时持有一只股票。但你可以无限次的交易(买进和卖出均无手续费)。
请设计一个函数,计算你所能获得的最大收益。
示例1
输入
复制
[5,4,3,2,1]
返回值
复制
0
说明
由于每天股票都在跌,因此不进行任何交易最优。最大收益为0。
示例2
输入
复制
[1,2,3,4,5]
返回值
复制
4
说明
第一天买进,最后一天卖出最优。中间的当天买进当天卖出不影响最终结果。最大收益为4。
这道题看似困难, 实则简单. 至少最简单的方法的思路是很好理解的.
理论上, 只要没涨到最高就不卖. 一旦开始跌, 马上卖掉; 一旦跌到谷底马上买. 掌握上述理论, 就能获得最大化的利益. 是不是?
所以, 我们只要看当第i天的记录比前一天的高, 就在res里加上prices[i] - prices[i - 1]就行了.
自然就能求得最优解.
'''
class Solution:
def maxProfit(self , prices ):
res = 0
for i in range(1, len(prices)):
if prices[i] > prices[i - 1]:
res += prices[i] - prices[i - 1]
return res
print(Solution().maxProfit([1, 5, 8, 4, 2, 3, 6, 4, 10, 7, 3, 2, 1, 6])) |
16,153 | e1f8424e0d08b91a96e8e8fa4e7271d7a13cf0a3 | import FWCore.ParameterSet.Config as cms
maxEvents = cms.untracked.PSet( input = cms.untracked.int32(-1) )
readFiles = cms.untracked.vstring()
secFiles = cms.untracked.vstring()
source = cms.Source ("PoolSource",fileNames = readFiles, secondaryFileNames = secFiles)
readFiles.extend( [
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/002F2CE1-38BB-E611-AF9F-0242AC130005.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/009CE684-45BB-E611-A261-001E67E6F8FA.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/044FF9CC-42BB-E611-ACB0-0CC47AD98BC2.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/06103109-48BB-E611-86BE-001E673968A6.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/0843C79F-FCBD-E611-B38C-001E67A3F8A8.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/0881BCD8-8FBE-E611-8796-002590FD5A72.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/08E524F3-0ABC-E611-984F-141877639F59.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/08F5FD50-23BC-E611-A4C2-00259073E3DA.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/0A85AA82-45BB-E611-8ACD-001E674FB063.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/0CA050B2-57BB-E611-8A7A-001E674FBA1D.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/0CC4E5F0-8EBE-E611-81A0-FA163E0546A6.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/10104CB2-51BB-E611-BCDC-FA163E2D421C.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/10B1C835-51BB-E611-962E-0025901D08B8.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/10F3C0E6-BDBD-E611-B15C-001E674FB24D.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/144E9DCA-3ABD-E611-B140-0025905B85EE.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/147E1208-0EBC-E611-8AB4-20CF307C9897.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/16409928-3FBB-E611-B72C-002590E2F5CE.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/16602767-48BB-E611-B7A6-0CC47AD98BC8.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/16BBACA8-FBBD-E611-BEC0-FA163E72410F.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/16D0A23B-0EBD-E611-A7D4-00266CFF090C.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/184B6801-D9BC-E611-8E6A-00259073E52C.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/1C48C9F3-58BB-E611-95E5-FA163E897AAE.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/203922CF-19BD-E611-A4CB-002590D0AF54.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/245AB734-3DBB-E611-A2BE-0090FAA575B0.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/260BAF20-F9BD-E611-AB0D-141877411FCD.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/269219CA-42BB-E611-9B4D-001E67444EAC.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/2A5DBC04-3CBB-E611-9C43-0CC47AA99436.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/2C22DEF7-8EBE-E611-9D17-0025905A497A.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/3048BFB6-48BD-E611-B2AE-FA163E7B239E.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/3232416A-4CBB-E611-9301-001E67348055.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/3665009B-FFBD-E611-9358-0025905A610A.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/38F82D20-42BB-E611-AA3B-002590747E14.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/3C994553-4DBB-E611-829E-00259048BF92.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/3CB1F166-3EBB-E611-BBAC-001E674FB24D.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/3E96493A-F2BD-E611-B4D4-24BE05C6E561.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/44E3620A-31BB-E611-B8EE-001E67444EAC.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/44F1F2FC-3BBB-E611-9596-0CC47A546E5E.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/48723720-30BC-E611-906D-0025905B855C.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/4C6573B5-F8BC-E611-8B6D-0CC47A7C340E.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/5A85E548-3FBB-E611-9AF8-001E674FCAE9.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/6064DD7D-27BC-E611-9269-FA163E3A554D.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/6079A2C9-5CBB-E611-9D23-0CC47A546E5E.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/64BFBA66-2ABC-E611-9884-02163E013C92.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/68FD6C45-4EBB-E611-8CE3-0CC47A7452D8.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/6AAB2667-DFBC-E611-BCE9-44A842CFCA0D.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/6C7AF5E2-51BB-E611-944C-0025905A60B8.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/6CB42E13-4ABB-E611-B37A-B083FECFF6AB.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/6E2754F6-49BB-E611-A8B6-00259074AE8A.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/6E4F2DD7-3FBB-E611-A5F6-0CC47A13CD44.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/6E7B7470-FEBD-E611-9FD6-0CC47A78A446.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/6E8A3BE5-4CBB-E611-A86D-00259073E4E4.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/70F436E2-3DBB-E611-92D6-0CC47A546E5E.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/7240B6CA-D6BC-E611-8854-B083FED04276.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/74368CB5-42BB-E611-B3D9-0CC47AD98BC8.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/74390FDB-35BD-E611-932E-02163E013EF0.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/76C36A06-48BB-E611-984D-0090FAA58204.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/78EB0B24-02BE-E611-B6ED-FA163E275D07.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/7A70E7A0-52BB-E611-A35E-001E674FC800.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/7AA6AA1E-30BC-E611-8E7E-0025905A610A.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/7AF0E4F9-4EBB-E611-9B9B-0CC47A13D284.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/7CE9BE91-0EBC-E611-A5DA-180373FF8446.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/7CF35EDF-E8BC-E611-A47E-24BE05C488E1.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/7EDEC297-2EBE-E611-857F-0242AC130003.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/8044F84B-44BB-E611-8915-001E674440E2.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/864E3740-E6BC-E611-AD01-0CC47AD98BC8.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/88A401C7-48BB-E611-A057-001E67348055.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/88FE7E84-17BC-E611-B83A-001EC94BF93F.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/90410DC3-0EBC-E611-AAC2-001E675A6AA9.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/9488E5A5-4ABB-E611-8F1A-0025905A60AA.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/94F362D7-5DBB-E611-AB61-FA163E508270.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/98E58784-40BB-E611-ACF5-0CC47AD98D0C.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/9A25B43C-3DBB-E611-917E-001E674FB149.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/9E2603C9-4DBB-E611-A64D-001E674FBA1D.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/A09103A1-3ABB-E611-9459-001E6745764D.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/A29BDC40-47BB-E611-93D2-B083FED42A1A.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/A416944E-41BB-E611-9753-0CC47AD99144.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/A86280DE-51BB-E611-B051-0025905A6118.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/A8BEF298-12BD-E611-90EE-E41D2D08DE30.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/AC1C716C-51BB-E611-BA14-0025907D1D6C.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/AC289CA3-4CBB-E611-83E8-001EC94BF6CA.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/AC9BCBDD-19BC-E611-9B23-002590791DA2.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/B0050E8E-32BB-E611-B390-0025900E3508.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/B08DA7E7-43BB-E611-993E-002590E2F5CE.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/B64CF6DF-3CBB-E611-BB5A-001E674FC800.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/B6B969CA-4EBB-E611-AF09-FA163EC9E920.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/B82DE45D-4DBB-E611-88CE-0025905B85FE.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/BC7EA562-4BBB-E611-BE25-0025901D08B8.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/BEDB4181-97BE-E611-A59F-001E675A67BB.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/C0306F0A-8FBE-E611-9828-0025905A6132.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/C0A1AF5B-44BB-E611-BD4B-0CC47AA992B4.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/C8F2E99E-3BBB-E611-9324-0090FAA59864.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/CA4A3834-2BBB-E611-834E-90B11C2CA3F8.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/CC245041-E2BC-E611-8171-00266CFCCB44.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/CEDA35B7-15BC-E611-B1AD-001E67E6F819.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/D21EA92D-07BC-E611-BD74-70106F4A93E8.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/D453B2C6-8FBE-E611-9644-141877411FCD.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/D4E99AD0-46BB-E611-BB98-001E67348055.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/D62C98A8-08BC-E611-A9AC-B8CA3A70A410.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/DA92E40D-48BB-E611-B782-0025905A612E.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/E2B687E9-3DBB-E611-943D-0025907253B6.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/E2E24C18-5ABB-E611-A1C2-FA163E0546A6.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/E8411037-15BE-E611-A222-002590D9D8B6.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/EAEF3807-51BB-E611-92CA-001E674440E2.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/EC9F5605-4CBB-E611-B15D-002590A88800.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/F463E2E0-4ABB-E611-ADEE-001E6745764D.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/F8CFD45A-4BBB-E611-9F9E-001E67A40523.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/06C6936D-7EBC-E611-B990-0025905A60B4.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/0C858DC7-3CBD-E611-AD67-0242AC130002.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/1A6F76CE-3BBD-E611-83FD-FA163E02238B.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/1C9D4463-20BE-E611-85E9-A0000420FE80.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/1CEA7D79-34BD-E611-8312-24BE05C33C81.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/2274F4B0-90BE-E611-833A-001E67DDC88A.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/2E44EBA0-38BD-E611-B455-FA163E66032D.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/34003670-98BD-E611-BB5A-001E67586A2F.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/3AB85B19-57BD-E611-B55E-24BE05C6C741.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/3CD657F2-06BE-E611-8DDA-00259075D72E.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/4A993269-E8BE-E611-ADF4-FA163E0EFB0F.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/560F7D9C-7FBE-E611-BFD8-D067E5F910F5.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/5C89D593-7EBD-E611-A808-0CC47A4D7654.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/6CA4DF87-5FBD-E611-9B94-A4BADB1E6B36.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/6CA9275C-C0BD-E611-B377-0025905B8566.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/7200B868-E8BE-E611-A7F9-FA163E702259.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/74104143-88BD-E611-86FF-0CC47A6C0716.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/746D6C2B-86BD-E611-84C8-001E67A42A71.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/7AD42ECF-E8BE-E611-8E74-B083FECFF2BF.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/8A977455-4BBD-E611-A087-0025905B8606.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/8E66F610-E9BE-E611-A5DC-0CC47A57CCEA.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/8EA28415-7CBC-E611-BD99-B083FED02AD1.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/908E6B68-5DBD-E611-BB8F-02163E014939.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/927E9406-82BC-E611-B9D4-0CC47A4C8EBA.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/92AD2277-E8BE-E611-A1A7-0CC47A4C8E64.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/9AC5A158-82BD-E611-81A7-0025905B856E.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/9C342D41-0BBE-E611-B0AF-001E67E95A40.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/A6D46533-83BC-E611-951A-0025907D2502.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/B0545BAA-BDBD-E611-B355-0CC47A4C8EEA.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/B0DB98A8-45BE-E611-8171-B083FED4239C.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/B871CA8E-5EBD-E611-8C0F-003048FFD79E.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/BAA9F89F-E8BE-E611-A262-0CC47AD98F6A.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/CAEC4CA1-0EBE-E611-B31F-02163E00C8AF.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/D6D2C6BC-9ABE-E611-A0FB-0CC47A745294.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/DCC635A5-77BD-E611-9430-14187740D279.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/E6D0DEEE-3EBD-E611-B8C3-002590E3A004.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/E6FE08F6-48BD-E611-A296-B083FED40671.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/F4C2F65E-39BD-E611-8630-E0DB550BA718.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/FABC8385-E8BE-E611-B290-0025905AA9CC.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/04AA3519-78BB-E611-BE3C-001E67E69879.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/04F715A6-08BC-E611-B9C2-001E67457A5D.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/06B5DD90-8BBC-E611-A7F0-008CFA0527CC.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/06CABBDA-3EBD-E611-9E1F-02163E00B58A.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/06D9FFFF-1CBC-E611-8BAB-001E67DFFB86.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/0C744224-61BB-E611-9685-001E67E6F8AF.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/1019A18E-4EBB-E611-9A28-FA163E6CC06D.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/1023F365-48BB-E611-A2DC-0025905A4964.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/10759B01-49BB-E611-BD64-00259048812E.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/12723348-74BC-E611-9153-008CFA1980B8.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/1E13B576-8FBC-E611-A831-6C3BE5B50170.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/1E17922C-59BB-E611-806D-0CC47A78A3EE.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/1EE8E7C8-B6BD-E611-9244-001E67586629.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/2041BDC3-51BB-E611-B061-24BE05C44BB1.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/24919263-BFBB-E611-9813-B8CA3A70BAC8.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/2610F510-B1BB-E611-AE92-00259073E4E4.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/2616102B-5DBB-E611-86E7-02163E012D2E.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/262EC509-5CBB-E611-9D05-002590E7DE20.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/26D730F4-5CBB-E611-9F20-0025901AC3F6.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/2A624B00-61BB-E611-83FC-E0CB4E1277DF.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/2AEC73E7-B7BB-E611-AC2E-549F3525DB98.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/2C8ABFE0-E5BC-E611-BD07-848F69FD29DF.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/2CC5B687-5EBB-E611-AB4D-0CC47A13D110.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/2E2BDEC8-47BB-E611-9247-0025905A60B8.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/30AE7047-57BB-E611-854C-002590494FA8.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/343F470A-5BBB-E611-A1F7-0025905B85F6.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/365648D1-54BD-E611-B6F7-008CFA0A5818.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/36591B5B-05BC-E611-8151-0CC47A57D036.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/36FE3A3A-5DBB-E611-8566-002590494FA8.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/3C24D130-4FBB-E611-8B0F-0CC47AD98F64.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/3E266B78-5ABB-E611-A814-001E674FCAE9.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/3E5CC689-5DBB-E611-8C42-0025905A6066.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/3E8A1C73-8ABC-E611-9B5E-24BE05CEEB81.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/44DB4315-57BB-E611-995E-90B11C2CA3F8.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/469B9982-5EBB-E611-9769-001E67444EAC.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/46B827C3-98BD-E611-83DD-001E67E6F7F6.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/480070FD-B9BB-E611-AD45-002590E3A0D4.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/4840F306-6BBB-E611-BA80-FA163E71673B.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/487D6062-51BB-E611-886C-0025905A4964.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/4A7F614C-1BBC-E611-A31E-3417EBE6453D.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/4A858527-ADBB-E611-B14A-0025904AC2CC.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/4C6F5203-55BB-E611-AAF7-0025905A4964.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/4CDB60CF-54BB-E611-844C-001E675A6630.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/4E7ADF82-AEBB-E611-80A0-001E67A40514.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/4EADF3B2-4BBB-E611-851C-24BE05CE3C91.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/500C9FDC-51BB-E611-BCF8-F04DA2752F68.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/54C17AF5-58BB-E611-A76C-0CC47A7C34EE.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/54DFEB5B-4FBB-E611-93D4-1866DAEEB344.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/5634EE46-C9BB-E611-A3BE-20CF305B04D2.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/5AD25019-57BB-E611-90E6-1418774118F6.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/5EA1A28D-4ABB-E611-ADD9-001E67E6F774.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/604C2313-C4BB-E611-8A16-02163E01308C.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/60F0052B-58BB-E611-A737-0CC47AA98F9A.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/62372BCA-54BB-E611-86FB-001E67457E7C.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/626AEFCB-56BB-E611-99BB-002590E7E00A.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/62E2233B-4FBB-E611-A733-0025905AA9F0.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/68E69FBA-4EBB-E611-AAB6-24BE05CE3C91.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/6A6B6985-BABB-E611-A8E2-0CC47A4D765A.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/6AE8E49B-47BB-E611-BB27-24BE05CECB81.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/6C2DAC6D-53BB-E611-AF58-5065F381F1C1.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/6C657BC3-8BBC-E611-8748-0CC47A4D76D2.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/6C77BB1A-69BD-E611-9B17-047D7BD6DD56.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/6EAB5B6E-55BB-E611-A77D-00259073E34C.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/70B16FDD-C8BC-E611-91E7-001E67E71BFF.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/721640BC-59BB-E611-B026-842B2B1814E3.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/72B03B93-72BD-E611-9645-00259021A39E.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/760C07CF-A8BB-E611-B4AE-FA163E3C6237.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/765D3877-90BD-E611-B86C-0025905B85DE.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/76F7FCAB-94BC-E611-AA4D-00259073E30E.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/78405767-51BD-E611-AB62-141877639F59.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/7A883500-BABB-E611-A0C2-001E67DDBEDA.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/7CE0E1E0-55BB-E611-853C-D4AE527EDBD4.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/7EC34512-C0BD-E611-A475-002590D9D88C.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/80616104-5BBB-E611-B899-0CC47A4C8E5E.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/8208268F-4EBB-E611-A943-24BE05C68671.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/8273D41F-63BB-E611-A1C4-02163E00ADA2.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/82805D06-55BB-E611-864B-02163E012D2E.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/82935CEA-D0BC-E611-AE9E-B083FED02AD1.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/86B5F6E7-B4BB-E611-A22E-FA163EC14324.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/885AED4A-50BB-E611-B113-E0CB4E1277DF.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/88A64B17-57BB-E611-BFCA-001E673986B0.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/88C17D9F-63BB-E611-AF7F-0CC47A537688.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/8A5F6492-62BB-E611-AC86-002590494E18.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/8CAA98C4-59BB-E611-871D-002590E7D7CE.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/8E4600AE-5BBB-E611-9ABC-0CC47AD98F70.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/92233F29-C5BB-E611-AC46-20CF3027A613.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/98A6F3C0-54BB-E611-BA22-1418774126FB.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/A4E9EDC3-F1BD-E611-8E1D-0025901D0C52.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/A6A1CA79-57BB-E611-8C5C-0025907D250C.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/B098D6CC-95BD-E611-8126-A0000420FE80.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/B450D260-51BB-E611-8EBA-FA163E4625E4.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/BA4BF56A-86BC-E611-B038-0CC47A4D7628.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/BA5BFCCA-74BB-E611-9BC4-FA163E0D029A.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/C07F1E43-47BB-E611-89F3-0025908653C4.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/C4DF8DFA-5CBB-E611-ABE3-002590E3A222.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/C6621154-64BB-E611-AA61-FA163E5E91D1.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/C845816C-9FBD-E611-BF21-001E674FBFC2.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/C86658F5-57BB-E611-B6C0-0CC47A4C8E14.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/CA8946A8-0EBC-E611-A208-0CC47A7E6BDE.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/CAF439BC-51BB-E611-B2C6-001E673972F6.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/CEE671C4-83BC-E611-A0FA-0CC47AD98C86.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/D256DBB1-5FBB-E611-ADDF-0025905B8580.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/D29F6AD8-4CBB-E611-B0FA-24BE05C68671.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/D2DDB5C9-45BB-E611-AE58-0CC47AD98C88.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/D2E83075-55BB-E611-9525-0025905B85F6.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/D6940CC7-54BB-E611-B0F4-0CC47A13D110.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/D8F8A24A-57BB-E611-82B6-FA163EE1F3FE.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/DA5A8C92-58BB-E611-9661-001E674FC800.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/E03CEBF1-57BB-E611-BFC4-001E67E69E32.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/E2032456-D8BB-E611-98B6-001E6779242E.root',
] )
readFiles.extend( [
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/E2EDC4CA-51BB-E611-A622-782BCB20E959.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/E2FC9494-CFBC-E611-AE9C-00304867FDFB.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/E40BBD77-5ABB-E611-89F6-0025907253B6.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/E8DE1702-7BBB-E611-A982-FA163EBEC103.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/EADBF61A-A7BB-E611-B3DB-5065F382C221.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/EC5A4AC2-BCBB-E611-A28C-001E674820F0.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/EC7674EF-B7BB-E611-95FC-0025905A606A.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/EEAEC58B-03BC-E611-83F4-B083FED42FB0.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/F2689A6D-25BC-E611-B763-0CC47ABAC11C.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/F2B33A97-4DBB-E611-83CD-A0369F30FFD2.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/F4182A72-62BB-E611-B7AA-0025905A60B0.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/F47230CE-46BB-E611-B3CA-0025905B8560.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/F63829CB-BABB-E611-AEA5-002590D9D8AA.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/F8663E21-61BB-E611-BE1E-FA163EB7FE16.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/FA9974E9-5ABB-E611-A277-02163E011591.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/FCD03ECE-54BB-E611-A054-002590D9D976.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/FE93D59C-6BBB-E611-A870-FA163E832397.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/0049958C-41BB-E611-9FFD-24BE05C44BC1.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/021F1221-9CBD-E611-B1C8-0CC47A4D75F8.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/0A9D4A81-E3BB-E611-AF73-B083FED42FC4.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/0ABF0BC9-40BB-E611-B8A9-A0000420FE80.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/0C04ACF9-D6BC-E611-900D-3417EBE64561.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/0C6EFA8D-12BC-E611-84DB-047D7BD6DEC4.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/142169C1-4CBB-E611-94B6-0025901D08E8.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/16A11D58-FABD-E611-A9A2-0019B9CABE16.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/22125190-07BE-E611-BC7C-0025905B85B8.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/227D0461-C4BC-E611-9876-0025905B8560.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/264B8DB8-0DBC-E611-AE25-24BE05C62711.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/2C23BF13-44BB-E611-BB17-24BE05C44BC1.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/2E3653CF-CCBE-E611-B469-0CC47A4D76AA.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/30CF49E8-4DBB-E611-959E-90B11C2AA430.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/30E73B14-40BB-E611-ABAD-5065F3818291.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/3644C9A5-4ABB-E611-8834-0025905A612A.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/3C3E54EF-C7BE-E611-9C19-1866DAEA6520.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/3CBAF338-B0BC-E611-A51A-001C23C0A63C.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/42310A70-02BE-E611-A992-0025905A4964.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/42498B58-F0BC-E611-B2BE-0CC47A13CBEA.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/4256CB52-FDBD-E611-938D-782BCB539695.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/462DFAB6-50BB-E611-BF53-002590D9D8A4.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/46F6CE79-E7BE-E611-AE4C-D4AE52AAF583.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/483DC42D-FBBD-E611-AFA9-02163E00C3B6.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/48670377-C0BE-E611-A5A8-FA163E6A92FB.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/4AD91F7E-01BE-E611-BAB7-0025901F8740.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/52570313-0FBC-E611-A1B3-20CF307C98F1.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/52FAD6C9-F8BD-E611-B9B1-0025904A91F6.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/5476EB9C-03BC-E611-BD70-7845C4FC3B48.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/5A0C37F4-37BC-E611-8385-02163E013DF6.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/5EBF73F5-C7BE-E611-9F4D-002590E3A0FA.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/60A9F704-42BB-E611-B922-0CC47A13D416.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/6234FF81-E8BC-E611-9298-5065F382A241.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/62500CF2-D6BC-E611-BF36-6CC2173BB830.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/6269E13B-27BC-E611-9298-0CC47A78A3D8.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/64620C4D-7BBD-E611-89D8-002590D9D8B6.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/66EEABF3-49BB-E611-A0CD-B083FED42A6E.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/6ABE11F4-45BC-E611-8DEC-0242AC130003.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/7018CEDA-3EBB-E611-AEA6-24BE05BDCEF1.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/723CD3A0-66BB-E611-81EF-FA163E89941A.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/724216E2-45BB-E611-82A0-0CC47A4C8E2A.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/7476C568-B8BE-E611-986C-24BE05C63721.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/74A358CD-E6BB-E611-8ABC-0025905A612E.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/7A69DDA2-6BBB-E611-ACD7-FA163E9B46B5.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/7E732764-CCBE-E611-A5F4-0025901AC0FA.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/82A199BB-51BB-E611-83DD-0025905A6090.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/84E70035-3DBB-E611-BDD5-24BE05C48801.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/86E7D7D9-3CBB-E611-99C4-24BE05C656A1.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/8A416855-4BBB-E611-B3AE-0CC47AD98F72.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/90EF4B08-C1BE-E611-9781-001E674FB149.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/94250B23-1EBC-E611-B3F7-002590D9D896.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/9AE25501-FEBD-E611-AC08-0025904A91F6.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/A215AB70-ECBB-E611-9643-0CC47A78A45A.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/A6638BCC-3BBB-E611-9BA0-24BE05C44BC1.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/A6FE78CC-E8BC-E611-83B6-001E67E71BE1.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/A8E416C8-54BB-E611-9E1F-001E67397DF5.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/AAFDCF92-1EBC-E611-AA68-00259073E3DA.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/AC116782-8ABD-E611-B894-001E674DA1AD.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/B249E9C0-23BC-E611-A811-001E67A4061D.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/B458B32A-58BB-E611-923C-001E674FB24D.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/B699C90F-C6BE-E611-91A1-02163E0176B7.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/BA00965D-44BB-E611-B0B8-90B11C27F8B2.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/C20853C9-4CBB-E611-9206-0025905A60D6.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/C607932E-F7BD-E611-9737-141877410522.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/D0ECFAFA-CABC-E611-861A-B083FED12B5C.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/D49903B9-0DBC-E611-8CED-001E67E6F92C.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/D898D6EE-52BB-E611-8FA5-0CC47A13CDB0.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/DC35E165-24BD-E611-9422-00259073E34A.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/E04BDCDF-50BB-E611-BE70-0025905A6104.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/E0F9A1D7-3ABB-E611-90F6-24BE05CEBD61.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/EE420CAF-42BB-E611-AC1A-A0000420FE80.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/EEB1A329-49BB-E611-BA64-0CC47A0AD3BC.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/F0F3A9FC-20BC-E611-BAE8-00238BCE45A0.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/F41F5D98-48BB-E611-A02A-00259048B754.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/F646B19D-4DBB-E611-AC98-002590D9D8A4.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/F8EBBFBF-48BB-E611-A6CD-001E67A3FEAC.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/00BEFF95-8BBF-E611-BCC4-001E67A3FDF8.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/040ACF1F-1BBE-E611-AE01-B083FED42ED0.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/0430EC9F-30BD-E611-A35D-001E677927CE.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/04FE23CC-96BB-E611-833A-24BE05C6E561.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/08CBBABB-1FBD-E611-BF0B-0025907DCA4A.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/0C1D3A78-E6BE-E611-B2C9-0CC47AA98B8C.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/22186CA1-1CBD-E611-8549-00266CF3DF00.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/22BC7763-79BC-E611-9078-0CC47A13CFC0.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/26FEEC01-83BD-E611-A580-B083FED42488.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/2E5B21E8-8ABD-E611-BE0C-782BCB539226.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/2EE43C20-C8BC-E611-B7AA-0CC47A4C8E20.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/300D2854-30BF-E611-88B3-001E67397AF3.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/30E3FE1D-A5BF-E611-AEA3-0025905A609E.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/3219F233-89BD-E611-B566-0CC47A7C360E.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/329B50BF-9ABD-E611-BCF5-0025905A607E.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/347FB199-2FBC-E611-BA30-24BE05C68671.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/360DC072-DDBE-E611-A1EC-02163E011461.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/36D28836-0FBE-E611-8134-02163E01306F.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/38438E1C-E7BE-E611-8A6B-842B2B1807B2.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/3C5CA12B-0DBE-E611-81B3-B8CA3A70BAC8.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/3E1F3A57-DDBE-E611-8568-B8CA3A70A5E8.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/48DA281F-95BD-E611-A3E8-0CC47A4D7626.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/52D7EA1D-96BD-E611-A0A1-0025905A48EC.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/56AE7F11-55BE-E611-A2C1-001E674FBFC2.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/5A9C30E8-64BC-E611-8C05-02163E017617.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/5C2F8031-08BE-E611-81FF-24BE05C488E1.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/5CFCF20D-34BC-E611-A337-FA163E7D40E5.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/5E80613A-11BE-E611-9C4A-0CC47A4D76B8.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/60D1CC5D-11BE-E611-93DB-842B2B61B189.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/629C97E0-66BC-E611-B2E2-001E675A5262.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/64C28EE1-95BD-E611-94B2-FA163E161622.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/6C48D8F7-5BBE-E611-A887-0CC47A4D762A.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/6C888D57-16BE-E611-B45B-0025905B859E.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/6E1C219B-FABC-E611-8512-5065F38152E1.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/70EBA687-ECBE-E611-A55E-002590FD5A3A.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/78A9CE65-77BD-E611-BFD3-24BE05CEACA1.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/7CFA279F-80BD-E611-8B2D-02163E00C0E5.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/864BB370-9DBB-E611-A9AB-B8CA3A70A410.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/88578F79-71BD-E611-8809-0CC47AD98D74.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/92C3A341-14BE-E611-B7CC-0CC47AD98CF0.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/9ADE011D-21BE-E611-811A-003048CB7A8A.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/9AECC385-10BE-E611-A60C-0CC47A78A3EE.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/9C1102AC-D9BE-E611-9226-FA163EFC5115.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/9EDFEED8-87BD-E611-9C78-001E674DA1AD.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/AA68B747-6BBD-E611-A597-24BE05C6C7E1.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/AE4E3765-95BB-E611-9FD1-5065F382C261.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/B030888F-BBBD-E611-8DC4-D067E5F914D3.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/B6051A30-1DBD-E611-9665-B499BAAC098C.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/BC62DFD9-B2BE-E611-91B6-0CC47A78A426.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/C2563EBF-D0BC-E611-9EB4-001E673985D4.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/C2FA0461-6EBD-E611-8CB7-A4BADB22B643.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/C6EB9299-89BE-E611-A29C-001E67DDC88A.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/C8CF45B9-EFBD-E611-9D79-0CC47A4C8E14.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/CE8A7FB4-12BE-E611-B541-0025905B8586.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/D27B375A-1ABE-E611-9A88-0025905B859E.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/D4F72FCA-41BF-E611-A3F4-0CC47A4C8E70.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/D655D521-9BBD-E611-8432-002590D9D8C0.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/D69E04F7-3EBD-E611-919B-FA163E1ED279.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/D827AE80-ABBF-E611-AD4E-001E67579498.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/DA019C97-17BE-E611-A30D-B083FED42ED0.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/DC455948-64BD-E611-99CD-B083FED424C4.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/DC761094-ABBF-E611-A032-001E67DDC4AC.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/E0D43F48-46BC-E611-BDB0-0CC47A78A340.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/E24DFD45-6ABC-E611-83F5-002590D9D8B2.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/E42697AE-12BE-E611-839D-A0000420FE80.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/EC519E35-89BD-E611-9419-02163E015242.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/F082A6D2-8BBD-E611-9D10-0025905A6132.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/F21CF2FD-8CBD-E611-9E79-0CC47AA98D60.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/F6C1E4E0-12BE-E611-BCEF-02163E0165EF.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/F6CA0AB9-D5BC-E611-879E-549F3525DEFC.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/F80D0344-1BBE-E611-B358-0CC47AD98C5E.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/F85ADDCF-A6BB-E611-A362-0025905B85BA.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/FC0D7084-74BD-E611-948C-001EC94B6929.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/FE6128F4-23BD-E611-A913-0090FAA57F14.root',
] )
|
16,154 | 35c1aacb601c8a57d39a11495044078d3908a198 | from django.shortcuts import render, redirect
from seller.models import Product, Category
from ShoppingApp.models import UserProfile
from .models import Cart
from django.http import HttpResponse
from django.contrib.auth.decorators import login_required
from django.core.mail import send_mail
# Create your views here.
@login_required
def home(request):
pObjs = Product.objects.raw("select * from seller_product")
catObjs = Category.objects.all()
return render(request, 'WelcomeBuyer.html', {'data' : pObjs, 'cat' : catObjs})
@login_required
def cart(request, id):
pObj = Product.objects.get(id=id)
uObj = UserProfile.objects.get(user__username=request.user)
url = "/buyer/home/"
try:
c = Cart(user_profile =uObj , product=pObj)
c.save()
except:
return HttpResponse('<script>alert("Product is already added to your cart");\
window.location="%s"</script>' %url)
return HttpResponse('<script>alert("Product has been added to your cart");\
window.location="%s"</script>' %url)
#return redirect('/buyer/home/')
@login_required
def cartvalue(request):
user = UserProfile.objects.get(user__username=request.user)
p = Cart.objects.filter(user_profile_id=user.id)
items = []
for i in p:
items.append(Product.objects.get(id=i.product_id))
return render(request,'cartdetails.html',{'added_products' : items})
def cartcalculate(request):
if request.method == 'POST':
q = request.POST.getlist('productqty')
pid = request.POST.getlist('pid')
price = request.POST.getlist('price')
sum = 0
for i in range(len(q)):
sum = sum + int(q[i]) * float(price[i])
updateProduct = Product.objects.filter(id=pid[i])
updateQty = updateProduct[0].qty-int(q[i])
updateProduct.update(qty=updateQty)
cartObjs = Cart.objects.filter(user_profile__user_id=request.user)
cartObjs.delete()
#use query of raw
message = 'Your order is processed of {}'.format(sum)
send_mail('Order details', message, 'sakshipatne206@gmail.com', ['patnesakshi206@gmail.com'] )
return render(request,'checkout.html',{'data' : sum})
def deletecart(request, id):
pObj = Product.objects.get(id=id)
uObj = UserProfile.objects.get(user__username=request.user)
c = Cart.objects.get(user_profile=uObj , product=pObj)
c.delete()
return redirect('/buyer/cartvalue/')
|
16,155 | af752b3c391a1d0af023c22f211a476fc780e91a | # Usually you would have to create list to store the numbers
# Generators are good for large data
def gencubes(n):
for num in range(n):
yield num**3
for x in gencubes(10):
print(x)
def genfibon(n):
a = 1
b = 1
for i in range(n):
yield a
a, b = b, a + b
for num in genfibon(10):
print(num)
def fibon(n):
a = 1
b = 1
output = []
for i in range(n):
output.append(a)
a, b = b, a+b
return output
print(fibon(10))
def simple_gen():
for x in range(3):
yield x
g = simple_gen()
print(next(g))
print(next(g))
s = 'hello'
for let in s:
print(let)
s_iter = iter(s)
print(next(s_iter)) |
16,156 | b4acd1f20e22c6595661f873946cf5384b773419 | import mutable_attr
import unittest
class T(unittest.TestCase):
def test_foo(self):
mutable_attr.y = 3
|
16,157 | e3ffaff0b7d907b789a1b2583c36ebd76d3cc3f7 | def show_characters_v1(name):
for index in range(len(name)):
print(name[index], end=' ')
else:
print()
def show_characters_v2(name):
end_count = -1 * (len(name) + 1)
for index in range(-1, end_count, -1):
print(name[index], end=' ')
else:
print()
def show_reverse_string(data):
print(data[::-1])
def show_swapcase(data):
print(data.swapcase())
|
16,158 | 25cfb7b3b3db9c29a5634826424c7f372c2ff387 | # Generated by Django 2.0.5 on 2018-05-26 18:29
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('core', '0009_auto_20180526_1703'),
]
operations = [
migrations.CreateModel(
name='FlightNumber',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('airline_letters', models.CharField(max_length=2)),
('number', models.CharField(max_length=5)),
('number_ordering', models.IntegerField()),
('airline', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.Airline')),
],
options={
'db_table': 'flight_number',
},
),
migrations.RenameField(
model_name='airport',
old_name='icoa',
new_name='icao',
),
]
|
16,159 | 60bdf45624e27d27b344b3e5a637e27036fcda29 | from django.contrib import admin
from django.urls import path,include
import blog.views
import portfolio.views
from django.conf import settings
from django.conf.urls.static import static
urlpatterns = [
path('',blog.views.home,name='home'),
path('admin/', admin.site.urls),
path('blog/',include('blog.urls')),
path('accounts/', include('accounts.urls')),
path('portfolio/',portfolio.views.portfolio,name="portfolio"),
]+ static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
16,160 | 0c21194637def469619988ab25ad681fa1d594ac | import serial
import crc16
import sys
# hex(intAddress)[2:].zfill(6).decode('hex')
# more intellegent
def convert(int_value):
encoded = format(int_value, 'x')
length = len(encoded)
encoded = encoded.zfill(length+length%2)
return encoded.decode('hex')
def oneRXTX(intAddress, strCmd):
rq = '\x00'+convert(intAddress)+strCmd
rq = rq + convert(crc16.calcString(rq, crc16.INITIAL_MODBUS))[::-1]
ser.write(rq)
rs = ser.read(size=100)
if convert( crc16.calcString(rs[:-2], crc16.INITIAL_MODBUS) ) == rs[-2:][::-1]:
crcCheck = "CRC OK"
else:
crcCheck = "CRC BAD!!!"
print "request:\t", hexString(rq), "\nresponse:\t", hexString(rs), '\n',crcCheck
#if __name__ == '__main__':
hexString = lambda byteString : " ".join(x.encode('hex') for x in byteString)
ser = serial.Serial(port='/dev/ttyUSB0',
baudrate=9600,
bytesize=8,
parity='N',
stopbits=1,
timeout=0.3,
xonxoff=0,
rtscts=0)
#sample rq&rs
getPowerCRC = "\x00\x08\x11\xE1\x26\xBF\xEF"
ser.write(getPowerCRC)
response = ser.read(size=100)
hexString(response)
address = 528865
if len(sys.argv)==2:
cmd = sys.argv[1]
else:
cmd = '\x63'
oneRXTX(address,cmd)
|
16,161 | fc0f3b4d6cb96a5a6a217b7cb13e28b8fcb85277 | #CODING QUESTIONS:5
a=str(input())
b=set()
for x in a:
if x not in b:
b.add(x)
print(x,end="")
|
16,162 | 701275a903520147b79b559b7121b29824b96599 | #! /usr/bin/env python
import os
import sys
import xml.dom.minidom
import requests
def manifest_analysis(source):
#List of Android dangerous permissions
DangPer = ['android.permission.READ_CALENDAR','android.permission.WRITE_CALENDAR','android.permission.CAMERA','android.permission.READ_CONTACTS','android.permission.WRITE_CONTACTS','android.permission.GET_ACCOUNTS','android.permission.ACCESS_FINE_LOCATION','android.permission.ACCESS_COARSE_LOCATION','android.permission.RECORD_AUDIO','android.permission.READ_PHONE_STATE','android.permission.READ_PHONE_NUMBERS','android.permission.CALL_PHONE','android.permission.ANSWER_PHONE_CALLS','android.permission.READ_CALL_LOG','android.permission.WRITE_CALL_LOG','com.android.voicemail.permission.ADD_VOICEMAIL','android.permission.USE_SIP','android.permission.PROCESS_OUTGOING_CALLS','android.permission.ANSWER_PHONE_CALLS','android.permission.BODY_SENSORS','android.permission.SEND_SMS','android.permission.RECEIVE_SMS','android.permission.READ_SMS','android.permission.RECEIVE_WAP_PUSH','android.permission.RECEIVE_MMS','android.permission.READ_EXTERNAL_STORAGE','android.permission.WRITE_EXTERNAL_STORAGE']
keywords = ['key','Key','KEY','password']
Buffer = open(source,'r')
manifest = xml.dom.minidom.parse(Buffer)
application = manifest.getElementsByTagName('application')
#activities = manifest.getElementsByTagName('activity')
uses_permissions = manifest.getElementsByTagName('uses-permission')
permissions = manifest.getElementsByTagName('permission')
intent_filters = manifest.getElementsByTagName('intent-filter')
meta_datas = manifest.getElementsByTagName('meta-data')
services = manifest.getElementsByTagName('service')
providers = manifest.getElementsByTagName('provider')
receivers = manifest.getElementsByTagName('receiver')
uses_sdks = manifest.getElementsByTagName('uses-sdk')
application_permission = None #define the general permission of the app declared in <application>
targetSdkVersion = None
#Analysing Application's parameters"
for app in application:
if app.getAttribute("android:debuggable")=="true":
des="Allow the application to generate debugging messages."
imp ="allowing application debugging leads to application critical information leaking."
recom = "you must set android:debuggable parameter to false in AndroidManifest file."
state = 'ERROR'
print "\nparameter : Debugging\nvalue : True\ndescription : %s\nimpact : %s\nrecommandation : %s\nstatus : %s\n"%(des,imp,recom,state)
else:
des="The application does not generate debugging Messages."
state = 'Good'
print "\nparameter : Debugging\nvalue : False\ndescription %s\nstatus : %s\n"%(des,state)
if app.getAttribute("android:permission"):
application_permission = app.getAttribute("android:permission")
if app.getAttribute("android:allowBackup") == "true":
des="Allowing the application to create and restore a copy of its internal data."
imp = "generating the applications backup increase the possibility of user data leakge."
recom ="AllowBackup parameter must be set to false in AndroidManifest file."
state = 'ERROR'
print "\nparameter : backup\nvalue : True\ndescription : %s\nimpact : %s\nrecommandation : %s\nstatus : %s\n"%(des,imp,recom,state)
else:
des="The application does not create or restore a copy of its internal data."
state = 'Good'
print "\nparameter : backup\nvalue : False\ndescription %s\nstatus : %s\n"%(des,state)
#Analysing application's uses-permissions
for usesper in uses_permissions:
value = usesper.getAttribute("android:name")
if value == "android.permission.WRITE_EXTERNAL_STORAGE":
des=" This permission Allows the application to write,modify or delete the contents of the SD card"
imp = "Data stored in extrnal storage can be accessed by any application with read access or modified by any application with write access, this may violate data confidentiality and integrity."
recom = "it is highly recommended to use internal storage."
state = 'Warning'
print "\nparameter : %s\nvalue : Dangerous permission\ndescription : %s\nimpact : %s\nrecommandation : %s\nstatus : %s\n"%(usesper.getAttribute("android:name"),des,imp,recom,state)
elif value == "android.permission.READ_EXTERNAL_STORAGE":
des="This permission Allows the application to read the contents of the SD card."
imp = "This permission allows the application to read other applications data stored in the SD card which violate data confidentiality."
recom = "The application must not request this permission unless it is required for the application to function correctly."
state = 'Warning'
print "\nparameter : %s\nvalue : Dangerous permission\ndescription : %s\nimpact : %s\nrecommandation : %s\nstatus : %s\n"%(usesper.getAttribute("android:name"),des,imp,recom,state)
elif value == "android.permission.RECEIVE_MMS":
des="This permission Allows the application to monitor incoming MMS messages."
imp = "MMS messages may contain user personal data. Using this permission may lead to violate data confidentiality."
recom = "The application must not request this permission unless it is required for the application to function correctly."
state = 'Warning'
print "\nparameter : %s\nvalue : Dangerous permission\ndescription : %s\nimpact : %s\nrecommandation : %s\nstatus : %s\n"%(usesper.getAttribute("android:name"),des,imp,recom,state)
elif value == "android.permission.RECEIVE_WAP_PUSH":
des="This permission Allows the application to receive WAP push messages."
imp = "This permission allows the applications to monitor users messages or delete them without his/her knowledge, this may be used to violate messages availibility and integrity."
recom = "The application must not request this permission unless it is required for the application to function correctly."
state = 'Warning'
print "\nparameter : %s\nvalue : Dangerous permission\ndescription : %s\nimpact : %s\nrecommandation : %s\nstatus : %s\n"%(usesper.getAttribute("android:name"),des,imp,recom,state)
elif value == "android.permission.READ_SMS":
des="This permission Allows the application to read SMS messages."
imp = "SMS may contain user personal data,this leads to violate data confidentiality."
recom = "The application must not request this permission unless it is required for the application to function correctly."
state = 'Warning'
print "\nparameter : %s\nvalue : Dangerous permission\ndescription : %s\nimpact : %s\nrecommandation : %s\nstatus : %s\n"%(usesper.getAttribute("android:name"),des,imp,recom,state)
elif value == "android.permission.RECEIVE_SMS":
des="This permission Allows the application to receive SMS messages."
imp = "SMS may contain user personal data,this leads to violate data confidentiality."
recom = "The application must not request this permission unless it is required for the application to function correctly."
state = 'Warning'
print "\nparameter : %s\nvalue : Dangerous permission\ndescription : %s\nimpact : %s\nrecommandation : %s\nstatus : %s\n"%(usesper.getAttribute("android:name"),des,imp,recom,state)
elif value == "android.permission.SEND_SMS":
des="This permission Allows the application to send SMS messages."
imp = "The permission may result in unexpected charges without user confirmation."
recom = "The application must not request this permission unless it is required for the application to function correctly."
state = 'Warning'
print "\nparameter : %s\nvalue : Dangerous permission\ndescription : %s\nimpact : %s\nrecommandation : %s\nstatus : %s\n"%(usesper.getAttribute("android:name"),des,imp,recom,state)
elif value == "android.permission.BODY_SENSORS":
des="This permission Allows the application to access data from sensors that the user uses to measure what is happening inside his/her body, such as heart rate."
imp = "The permission allows access to users critical and personal health data, which violate user privacy."
recom = "The application must not request this permission unless it is required for the application to function correctly."
state = 'Warning'
print "\nparameter : %s\nvalue : Dangerous permission\ndescription : %s\nimpact : %s\nrecommandation : %s\nstatus : %s\n"%(usesper.getAttribute("android:name"),des,imp,recom,state)
elif value == "android.permission.READ_PHONE_NUMBERS":
des="This permission Allows the application to access to the devices phone numbers."
imp = "The permission allows access to users critical and personal data, which violate user privacy."
recom = "The application must not request this permission unless it is required for the application to function correctly."
state = 'Warning'
print "\nparameter : %s\nvalue : Dangerous permission\ndescription : %s\nimpact : %s\nrecommandation : %s\nstatus : %s\n"%(usesper.getAttribute("android:name"),des,imp,recom,state)
elif value == "android.permission.PROCESS_OUTGOING_CALLS":
des="This permission Allows the application to see the number being dialed during an outgoing call with the option to redirect the call to a different number or abort the call altogether."
imp = "The permission may affect the user privacy and the calling service availability."
recom = "The application must not request this permission unless it is required for the application to function correctly."
state = 'Warning'
print "\nparameter : %s\nvalue : Dangerous permission\ndescription : %s\nimpact : %s\nrecommandation : %s\nstatus : %s\n"%(usesper.getAttribute("android:name"),des,imp,recom,state)
elif value == "com.android.voicemail.permission.ADD_VOICEMAIL":
des="This permission Allows the application to add voicemails into the system."
imp = "The permission may affects voicemail data integrity."
recom = "The application must not request this permission unless it is required for the application to function correctly."
state = 'Warning'
print "\nparameter : %s\nvalue : Dangerous permission\ndescription : %s\nimpact : %s\nrecommandation : %s\nstatus : %s\n"%(usesper.getAttribute("android:name"),des,imp,recom,state)
elif value == "android.permission.READ_CALL_LOG":
des="This permission Allows the application to read the users calls log."
imp = "The permission may be used to violate the calls log confidentiality."
recom = "The application must not request this permission unless it is required for the application to function correctly."
state = 'Warning'
print "\nparameter : %s\nvalue : Dangerous permission\ndescription : %s\nimpact : %s\nrecommandation : %s\nstatus : %s\n"%(usesper.getAttribute("android:name"),des,imp,recom,state)
elif value == "android.permission.WRITE_CALL_LOG":
des="This permission Allows the application to write and modify the users calls log."
imp = "The permission may be used to violate the calls log integrity."
recom = "The application must not request this permission unless it is required for the application to function correctly."
state = 'Warning'
print "\nparameter : %s\nvalue : Dangerous permission\n description : %s\nimpact : %s\nrecommandation : %s\nstatus : %s\n"%(usesper.getAttribute("android:name"),des,imp,recom,state)
elif value == "android.permission.ANSWER_PHONE_CALLS":
des="This permission Allows the application to answer an incoming phone call."
imp = "The permission may be used to violate the user privacy."
recom = "The application must not request this permission unless it is required for the application to function correctly."
state = 'Warning'
print "\nparameter : %s\nvalue : Dangerous permission\n description : %s\nimpact : %s\nrecommandation : %s\nstatus : %s\n"%(usesper.getAttribute("android:name"),des,imp,recom,state)
elif value == "android.permission.CALL_PHONE":
des="This permission Allows the application to initiate a phone call without going through the Dialer user interface for the user to confirm the call."
imp = "The permission may result in unexpected charges without user confirmation."
recom = "The application must not request this permission unless it is required for the application to function correctly."
state = 'Warning'
print "\nparameter : %s\nvalue : Dangerous permission\ndescription : %s\nimpact : %s\nrecommandation : %s\nstatus : %s\n"%(usesper.getAttribute("android:name"),des,imp,recom,state)
elif value == "android.permission.READ_PHONE_STATE":
des="This permission Allows the application to access the phone state, including the phone number of the device, current cellular network information, the status of any ongoing calls, and a list of any PhoneAccounts registered on the device."
imp = "The permission may be used to violate the user privacy."
recom = "The application must not request this permission unless it is required for the application to function correctly."
state = 'Warning'
print "\nparameter : %s\nvalue : Dangerous permission\ndescription : %s\nimpact : %s\nrecommandation : %s\nstatus : %s\n"%(usesper.getAttribute("android:name"),des,imp,recom,state)
elif value == "android.permission.RECORD_AUDIO":
des="This permission Allows the application to record audio with the microphone at any time without the user confirmation."
imp = "The permission may be used to spy on the user and violate their privacy."
recom = "The application must not request this permission unless it is required for the application to function correctly."
state = 'Warning'
print "\nparameter : %s\nvalue : Dangerous permission\ndescription : %s\nimpact : %s\nrecommandation : %s\nstatus : %s\n"%(usesper.getAttribute("android:name"),des,imp,recom,state)
elif value == "android.permission.ACCESS_COARSE_LOCATION":
des="This permission Allows the application to access approximate location."
imp = "This permission Allows the application to spy on the user and determine their location, this violate users privacy."
recom = "The application must not request this permission unless it is required for the application to function correctly."
state = 'Warning'
print "\nparameter : %s\nvalue : Dangerous permission\ndescription : %s\nimpact : %s\nrecommandation : %s\nstatus : %s\n"%(usesper.getAttribute("android:name"),des,imp,recom,state)
elif value == "android.permission.ACCESS_FINE_LOCATION":
des="This permission Allows the application to access precise location."
imp = "This permission Allows the application to spy on users and determine their location, this violate users privacy."
recom = "The application must not request this permission unless it is required for the application to function correctly."
state = 'Warning'
print "\nparameter : %s\nvalue : Dangerous permission\ndescription : %s\nimpact : %s\nrecommandation : %s\nstatus : %s\n"%(usesper.getAttribute("android:name"),des,imp,recom,state)
elif value == "android.permission.GET_ACCOUNTS":
des="This permission Allows the application to get the list of accounts in the Accounts Service include accounts created by other applications installed on the same device."
imp = "The permission may violate users data confidentiality."
recom = "The application must not request this permission unless it is required for the application to function correctly."
state = 'Warning'
print "\nparameter : %s\nvalue : Dangerous permission\ndescription : %s\nimpact : %s\nrecommandation : %s\nstatus : %s\n"%(usesper.getAttribute("android:name"),des,imp,recom,state)
elif value == "android.permission.WRITE_CONTACTS":
des="This permission Allows the application to modify the data about user contacts."
imp = "The permission may be used to violate contacts data integrity."
recom = "The application must not request this permission unless it is required for the application to function correctly."
state = 'Warning'
print "\nparameter : %s\nvalue : Dangerous permission\ndescription : %s\nimpact : %s\nrecommandation : %s\nstatus : %s\n"%(usesper.getAttribute("android:name"),des,imp,recom,state)
elif value == "android.permission.READ_CONTACTS":
des="This permission Allows the application to read the user contacts data including the frequency with which the user have called, emailed, or communicated with them in other ways."
imp = "The permission allows access to users critical and personal data, which violate user privacy."
recom = "The application must not request this permission unless it is required for the application to function correctly."
state = 'Warning'
print "\nparameter : %s\nvalue : Dangerous permission\ndescription : %s\nimpact : %s\nrecommandation : %s\nstatus : %s\n"%(usesper.getAttribute("android:name"),des,imp,recom,state)
elif value == "android.permission.CAMERA":
des="This permission Allows the application to take pictures and videos with the device camera."
imp = "The permission allows the app to use the camera at any time without the user confirmation. This may be used to spy on users and violate their privacy."
recom = "The application must not request this permission unless it is required for the application to function correctly."
state = 'Warning'
print "\nparameter : %s\nvalue : Dangerous permission\ndescription : %s\nimpact : %s\nrecommandation : %s\nstatus : %s\n"%(usesper.getAttribute("android:name"),des,imp,recom,state)
elif value == "android.permission.WRITE_CALENDAR":
des="This permission Allows the application to write or modify the users calendar data."
imp = " The permission may be used to violate Calendar integrity."
recom = "The application must not request this permission unless it is required for the application to function correctly."
state = 'Warning'
print "\nparameter : %s\nvalue : Dangerous permission\ndescription : %s\nimpact : %s\nrecommandation : %s\nstatus : %s\n"%(usesper.getAttribute("android:name"),des,imp,recom,state)
elif value == "android.permission.READ_CALENDAR":
des="This permission Allows the application to read the users calendar data."
imp = "The permission may be used to violate Calendar confidentiality."
recom = "The application must not request this permission unless it is required for the application to function correctly."
state = 'Warning'
print "\nparameter : %s\nvalue : Dangerous permission\ndescription : %s\nimpact : %s\nrecommandation : %s\nstatus : %s\n"%(usesper.getAttribute("android:name"),des,imp,recom,state)
else:
des=" The application asks for a SAFE PERMISSION, this kind of permission is granted automatically and do not violate the user privacy."
state = 'Good'
print "\nparameter : %s\nvalue : Safe permission\ndescription %s\nstatus : %s\n"%(usesper.getAttribute("android:name"),des,state)
#Analysing application's permissions"
for permission in permissions:
protectLevel = permission.getAttribute("android:protectionLevel")
if (not protectLevel) or (protectLevel == "normal"):
des="The system will automatically grant this permission to a requesting application at installation, without asking for the user explicit approval."
imp = "normal protection level permissions may lead to critical data and features sharing if not used carefuly."
recom = "Define and user custom permissions carefuly. if you want to share data between your own applications, it is recommanded to use Signature protection level permissions."
state = 'Warning'
print "\nparameter : %s\nvalue : normal Protection Level\n description : %s\nimpact : %s\nrecommandation : %s\nstatus : %s\n"%(permission.getAttribute("android:name"),des,imp,recom,state)
elif protectLevel == "dangerous":
des="The system will not automatically grant this permission to a requesting application unless the user explicitly confirm it."
state = 'Good'
print "\nparameter : %s\nvalue : dangerous Protection Level\ndescription %s\nstatus : %s\n"%(permission.getAttribute("android:name"),des,state)
elif protectLevel == "signature":
des="The system will grant this permission only if a requesting application is signed with the same certificate as your application."
state = 'Good'
print "\nparameter : %s\nvalue : signature Protection Level\ndescription %s\nstatus : %s\n"%(permission.getAttribute("android:name"),des,state)
#Analysing application's uses-sdks"
for uses_sdk in uses_sdks:
minVersion = uses_sdk.getAttribute("android:minSdkVersion")
maxVersion = uses_sdk.getAttribute("android:maxSdkVersion")
targetSdkVersion = uses_sdk.getAttribute("android:targetSdkVersion")
flag = 0
if not(minVersion):
flag = 1
des="minSdkVersion is not declared which indicate that your application is compatible with all android versions."
imp = "This parameter impact your application disponibility.which means that your application will crush at runtime if not compatible with a given android version."
recom = "minSdkVersion parameter must be set to a value above 1."
state = 'ERROR'
print "\nparameter : minSdkVersion\nvalue : None\ndescription : %s\nimpact : %s\nrecommandation : %s\nstatus : %s\n"%(des,imp,recom,state)
elif minVersion == 1:
des="minSdkVersion is set to 1 which indicate that your application is compatible with all android versions."
imp = "This parameter impact your application disponibility.which means that your application will crush at runtime if not compatible with a given android version"
recom = "minSdkVersion parameter must be set to a value above 1."
state = 'ERROR'
print "\nparameter : minSdkVersion\nvalue : 1\ndescription : %s\nimpact : %s\nrecommandation : %s\nstatus : %s\n"%(des,imp,recom,state)
if maxVersion:
flag = 1
des="The application is only compatible with systems with API Level below or equal to %d"%(maxVersion)
imp = "This parameter impact your application disponibility.If the API Level used by the system is higher than the maxSdkVersion, the system will prevent the installation of the application furthermore it will result in your application being removed from users devices after a system update to a higher API Level."
recom = "Declaring maxSdkVersion attribute is not recommended and should be keeped void."
state = 'ERROR'
print "\nparameter : maxSdkVersion\nvalue : %s\ndescription : %s\nimpact : %s\nrecommandation : %s\nstatus : %s\n"%(maxVersion,des,imp,recom,state)
#Analysing application's services"
for service in services:
if service.getAttribute("android:permission"):
des="the service is protected with a specific permission, this way the service data are only shared with legitime applications."
state = 'Good'
print "\nparameter : %s\nvalue : Service\ndescription %s\nstatus : %s\n"%(service.getAttribute("android:name"),des,state)
elif not(service.getAttribute("android:exported")):
ifilter = False
sp = False
for node in service.childNodes:
if node.nodeName == 'intent-filter':
ifilter = True
if service.getAttribute("android:permission") or application_permission:
sp = True
if ifilter and (not(sp)):
des="the service is exported but not protected by any specific permission."
imp = "Exporting services without any permission may lead to critical features sharing with other application."
recom ="It is recommanded to define a permission when exporting a service using android:permission parameter, this way you limit the acces to applications services."
state = 'Warning'
print "\nparameter : %s\nvalue : Service\ndescription : %s\nimpact : %s\nrecommandation : %s\nstatus : %s\n"%(service.getAttribute("android:name"),des,imp,recom,state)
elif not(ifilter):
des="the service is not exported with external applications, which means that its data is internal to the application"
state = 'Good'
print "\nparameter : %s\nvalue : Service\ndescription %s\nstatus : %s\n"%(service.getAttribute("android:name"),des,state)
elif ifilter and sp:
des="the service is exported but only with applications which have specific permission. this way service data are only shared with legitime applications"
state = 'Good'
print "\nparameter : %s\nvalue : Service\ndescription %s\nstatus : %s\n"%(service.getAttribute("android:name"),des,state)
elif service.getAttribute("android:exported") == 'true':
if not(service.getAttribute("android:permission") or application_permission):
des="the service is exported but not protected by any specific permission."
imp = "Exporting services without any permission may lead to critical features sharing with other application."
recom ="It is recommanded to define a permission when exporting a service using android:permission parameter, this way you limit the acces to applications services."
state = 'Warning'
print "\nparameter : %s\nvalue : Service\ndescription : %s\nimpact : %s\nrecommandation : %s\nstatus : %s\n"%(service.getAttribute("android:name"),des,imp,recom,state)
else:
des="the service is exported but only with applications which have specific permission. this way service data are only shared with legitime applications."
state = 'Good'
print "\nparameter : %s\nvalue : Service\ndescription %s\nstatus : %s\n"%(service.getAttribute("android:name"),des,state)
else:
des="the service is not exported with external applications, which means that its data is internal to the application."
state = 'Good'
print "\nparameter : %s\nvalue : Service\ndescription %s\nstatus : %s\n"%(service.getAttribute("android:name"),des,state)
#Analysing application's receivers"
for receiver in receivers:
if receiver.getAttribute("android:permission"):
des="the Broadcast receiver is protected with a specific permission, this way the receiver data are only shared with legitime applications."
state = 'Good'
print "\nparameter : %s\nvalue : Broadcast receiver\ndescription %s\nstatus : %s\n"%(receiver.getAttribute("android:name"),des,state)
elif not(receiver.getAttribute("android:exported")):
ifilter = False
sp = False
for node in receiver.childNodes:
if node.nodeName == 'intent-filter':
ifilter = True
if receiver.getAttribute("android:permission") or application_permission:
sp = True
if ifilter and (not(sp)):
des="the Broadcast receiver is exported but not protected by any specific permission."
imp = "Exporting Broadcast receivers without any permission may allow malicious or unautorized applications to receive critical broadcast data."
recom ="It is recommanded to define a permission when exporting a Broadcast receivers using android:permission parameter, this way you limit the acces to applications Broadcast receivers."
state = 'Warning'
print "\nparameter : %s\nvalue : Broadcast receiver\ndescription : %s\nimpact : %s\nrecommandation : %s\nstatus : %s\n"%(receiver.getAttribute("android:name"),des,imp,recom,state)
elif not(ifilter):
des="the Broadcast receiver is not exported with external applications, which means that its data is internal to the application"
state = 'Good'
print "\nparameter : %s\nvalue : Broadcast receiver\ndescription %s\nstatus : %s\n"%(receiver.getAttribute("android:name"),des,state)
elif ifilter and sp:
des="the Broadcast receiver is exported but only with applications which have specific permission. this way receiver data are only shared with legitime applications"
state = 'Good'
print "\nparameter : %s\nvalue : Broadcast receiver\ndescription %s\nstatus : %s\n"%(receiver.getAttribute("android:name"),des,state)
elif receiver.getAttribute("android:exported") == 'true':
if not(receiver.getAttribute("android:permission") or application_permission):
des="the Broadcast receiver is exported but not protected by any specific permission."
imp = "Exporting Broadcast receivers without any permission may allow malicious or unautorized applications to receive critical broadcast data."
recom ="It is recommanded to define a permission when exporting a Broadcast receivers using android:permission parameter, this way you limit the acces to applications Broadcast receivers."
state = 'Warning'
print "\nparameter : %s\nvalue : Broadcast receiver\ndescription : %s\nimpact : %s\nrecommandation : %s\nstatus : %s\n"%(receiver.getAttribute("android:name"),des,imp,recom,state)
else:
des="the receiver is exported but only with applications which have specific permission. this way receiver data are only shared with legitime applications"
state = 'Good'
print "\nparameter : %s\nvalue : Broadcast receiver\ndescription %s\nstatus : %s\n"%(receiver.getAttribute("android:name"),des,state)
else:
des="the receiver is not exported with external applications, which means that its data is internal to the application"
state = 'Good'
print "\nparameter : %s\nvalue : Broadcast receiver\ndescription %s\nstatus : %s\n"%(receiver.getAttribute("android:name"),des,state)
#Analysing application's providers"
for provider in providers:
# before API level 17 content providers were exported by default
tmp_grants = provider.getElementsByTagName('grant-uri-permission')
if targetSdkVersion <= 17:
if provider.getAttribute("android:permission") or provider.getAttribute("android:readpermission") or provider.getAttribute("android:writepermission"):
des="the Content provider is protected with a specific permission, this way the provider data are only shared with legitime applications"
state = 'Good'
print "\nparameter : %s\nvalue : Content provider\ndescription %s\nstatus : %s\n"%(provider.getAttribute("android:name"),des,state)
elif provider.getAttribute("android:exported") == 'false':
des="the provider is not exported with external applications, which means that its data is internal to the application"
state = 'Good'
print "\nparameter : %s\nvalue : Content provider\ndescription %s\nstatus : %s\n"%(provider.getAttribute("android:name"),des,state)
else:
des="the content provider is shared with other applications without being protected by any specific permission."
imp ="Exporting Content providers without any permission, allow other applications to read the content providers data, which leads to data confidentiality violation."
recom = "It is recommanded to define a permission when exporting a content provider using android:permission, android:readpermission or android:writepermission parameter, this way you limit the acces to applications Content providers."
state = 'Warning'
print "\nparameter : %s\nvalue : Provider\ndescription : %s\nimpact : %s\nrecommandation : %s\nstatus : %s\n"%(provider.getAttribute("android:name"),des,imp,recom,state)
elif provider.getAttribute("android:grantUriPermissions") == 'true':
des="the Content provider is exported temporarily with other applications.this way the content provider data are only shared when truely needed."
state = 'Good'
print "\nparameter : %s\nvalue : Content provider\ndescription %s\nstatus : %s\n"%(provider.getAttribute("android:name"),des,state)
elif tmp_grants :
des="the Content provider only share a subsets of app data temporarily with other applications.this way only a subsets of the content provider data are shared when truely needed."
state = 'Good'
print "\nparameter : %s\nvalue : Content provider\ndescription %s\nstatus : %s\n"%(provider.getAttribute("android:name"),des,state)
elif provider.getAttribute("android:exported")=='true':
if not(provider.getAttribute("android:permission") or provider.getAttribute("android:readpermission") or provider.getAttribute("android:writepermission")):
des="the content provider is shared with other applications without being protected by any specific permission."
imp ="Exporting Content providers without any permission, allow other applications to read the content providers data, which leads to data confidentiality violation."
recom = "It is recommanded to define a permission when exporting a content provider using android:permission, android:readpermission or android:writepermission parameter, this way you limit the acces to applications Content providers."
state = 'Warning'
print "\nparameter : %s\nvalue : Provider\ndescription : %s\nimpact : %s\nrecommandation : %s\nstatus : %s\n"%(provider.getAttribute("android:name"),des,imp,recom,state)
else:
des="the provider is exported but only with applications which have specific permission. this way the content provider data are only shared with legitime applications"
state = 'Good'
print "\nparameter : %s\nvalue : Content provider\ndescription %s\nstatus : %s\n"%(provider.getAttribute("android:name"),des,state)
else:
des="the provider is not exported with external applications, which means that its data is internal to the application"
state = 'Good'
print "\nparameter : %s\nvalue : Content provider\ndescription %s\nstatus : %s\n"%(provider.getAttribute("android:name"),des,state)
#Analysing application's meta-data"
boolean=0
for meta_data in meta_datas:
if any(x in meta_data.getAttribute("android:name") for x in keywords):
boolean = 1
if meta_data.getAttribute("android:name").lower() == "com.google.android.geo.api_key":
apikey = meta_data.getAttribute("android:value")
response = requests.post(url="https://www.googleapis.com/geolocation/v1/geolocate?key=%s"%apikey,params={'Content-Type':'application/json'})
response_code = response.status_code
if response_code == 200:
des="you account Google geo services is accessible using your apikey found in AndroidManifest."
imp ="Hardcoded Apikeys may be used to abuse the developpers service account, by either consuming credit for paying services or making the serivce unavailable."
recom ="Apply SDK restriction on for your Google geo services account."
state = 'ERROR'
print "\nparameter : %s\nvalue : %s\ndescription : %s\nimpact : %s\nrecommandation : %s\nstatus : %s\n"%(meta_data.getAttribute("android:name"),meta_data.getAttribute("android:value"),des,imp,recom,state)
if response_code == 403:
des="Access to your Google geo services account is restricted"
state = 'Good'
print "\nparameter : Hardcoded ApiKeys\nvalue : None\ndescription %s\nstatus : %s\n"%(des,state)
else:
des="Hardcoded ApiKey found in AndroidManifest file."
imp ="Hardcoded Apikeys may be used to abuse the developpers service account, by either consuming credit for paying services or making the serivce unavailable."
recom ="It is recommended to store Apikeys at a remote endpoint and get them on runtime or save them in resource files or as building variables in gradle."
state = 'ERROR'
print "\nparameter : %s\nvalue : %s\ndescription : %s\nimpact : %s\nrecommandation : %s\nstatus : %s\n"%(meta_data.getAttribute("android:name"),meta_data.getAttribute("android:value"),des,imp,recom,state)
if (meta_data.getAttribute("android:name") == "android.webkit.WebView.EnableSafeBrowsing") and (meta_data.getAttribute("android:value") == "false"):
des="Android SafeBrowing is disabled for webview within your application"
imp ="When disabling SafeBrowsing for webview within your application your application is exposed to a security risk and could load URLs wich may contain malicious content such as Trojans."
recom ="It is recommended to enable SafeBrowsing for webview in your application. Furthermore you can customize your application response to URLs with known threats using Android SafeBrowsing API."
state = 'ERROR'
print "\nparameter : %s\nvalue : %s\ndescription : %s\nimpact : %s\nrecommandation : %s\nstatus : %s\n"%(meta_data.getAttribute("android:name"),meta_data.getAttribute("android:value"),des,imp,recom,state)
if boolean == 0:
des="No hardcoded Apikey found in resources files"
state = 'Good'
print "\nparameter : Hardcoded ApiKeys\nvalue : None\ndescription %s\nstatus : %s\n"%(des,state)
#Analysing Intent-filter for URL Schemes
boolean = 0
for intent in intent_filters:
mimeType = False
scheme = False
AppLinks = None
deepLink = None
datas = intent.getElementsByTagName("data")
if targetSdkVersion >=23:
if intent.getAttribute("android:autoVerify") == "true":
AppLinks = True
else:
AppLinks = False
else:
deepLink = True
for data in datas:
if data.getAttribute("android:scheme") and data.getAttribute("android:host"):
scheme = True
if data.getAttribute("android:mimeType"):
mimeType = True
if scheme and AppLinks:
des="App Links assure that the OS launch the URL directly by the application.misconfiguration could be tricky and leads to link hijacking and phishing make sure to implement it properly."
state = 'Good'
print "\nparameter : AppLinks\nvalue : Enabled\ndescription : %s\nstatus : %s\n"%(des,state)
boolean = 1
elif scheme and not AppLinks:
des="The application uses URL scheme to open URLs."
imp = "URL scheme are vulnerable to hijacking and phishing attacks"
recom = "you must consider implementing App Links, to associate the app component with your website and prventing other apps from opening the URL."
state = 'Warning'
print "\nparameter : AppLinks\nvalue : Disabled\ndescription : %s\nimpact : %s\nrecommandation : %s\nstatus : %s\n"%(des,imp,recom,state)
boolean = 1
elif scheme and deepLink:
des="The application uses URL scheme to open URLs."
imp = "URL scheme are vulnerable to hijacking and phishing attacks."
recom = "you must consider implementing App Links, to associate the app component with your website and prventing other apps from opening the URL. Availble for API Level 23 and above."
state = 'Warning'
print "\nparameter : DeepLinks\nvalue : Enabled\ndescription : %s\nimpact : %s\nrecommandation : %s\nstatus : %s\n"%(des,imp,recom,state)
boolean = 1
if boolean == 0:
des="No URL scheme are used within the application which decrease hijacking and phishing attacks possibility."
state = 'Good'
print "\nparameter : DeepLinks\nvalue : Disabled\ndescription : %s\nstatus : %s\n"%(des,state)
def main():
source = sys.argv[1]
manifest_analysis(source)
if __name__ == "__main__":
main()
|
16,163 | 4dd8073d316831ff645a6549703c1497e316d8e2 | from django.shortcuts import render,redirect
from django.contrib.auth.models import User,auth
from django.contrib import messages
# Create your views here.
def login(request):
if request.method == 'POST':
username=request.POST['username']
password=request.POST['password']
user = auth.authenticate(username=username,password=password)
if user is not None:
auth.login(request,user)
return redirect('/')
else:
return redirect('login')
else:
return render(request,'login.html')
def register(request):
if request.method == 'POST':
first_name=request.POST['first_name']
last_name=request.POST['last_name']
email=request.POST['email']
username=request.POST['username']
password1=request.POST['password1']
password2=request.POST['password2']
#contact=request.POST['contact']
if password1 == password2:
if User.objects.filter(username=username).exists():
messages.info(request,"Username exist")
return redirect('register')
elif User.objects.filter(email=email).exists():
messages.info(request,"Email is already exist")
return redirect('register')
else:
user = User.objects.create_user(first_name=first_name,last_name=last_name,email=email,username=username,password=password1)
user.save();
messages.info(request,"User is created")
return redirect('login')
else:
messages.info(request,"Password not matching")
return redirect('register')
else:
return render(request,'register.html')
#return render(request,'register.html')
def logout(request):
auth.logout(request)
return redirect('/')
def profile(request):
return render(request,'profile.html')
|
16,164 | 05cc64f0bbfbcc96688feab238e0c3f22ae25998 | import sys
import importlib
importlib.reload(sys) # Reload does the tricsys.setdefaultencoding('UTF8')
sys.path.insert(0, '../../../src/')
from models import Updatelog
sys.path.insert(0, '../')
from database_session import get_nex_session as get_session
from config import CREATED_BY
__author__ = 'sweng66'
file = "data/update_log.txt-1"
def load_data():
nex_session = get_session()
# bud_id_to_id = dict([(x.bud_id, x.updatelog_id) for x in nex_session.query(Updatelog).all()])
i = 0
j = 0
f = open(file)
for line in f:
pieces = line.strip().split("\t")
# if int(pieces[0]) in bud_id_to_id:
# continue
insert_update_log(nex_session, pieces)
i = i + 1
j = j + 1
if i == 500:
nex_session.commit()
i = 0
if j == 200000:
nex_session.close()
nex_session = get_session()
j = 0
f.close()
# nex_session.rollback()
nex_session.commit()
def insert_update_log(nex_session, x):
print("Load ", x[0], x[1], x[2], x[3], x[4], x[5], x[6], x[7])
y = Updatelog(bud_id = int(x[0]),
tab_name = x[1],
col_name = x[2],
primary_key = int(x[3]),
old_value = x[4].replace("\r", "\n"),
new_value = x[5].replace("\r", "\n"),
date_created = str(x[6]),
created_by = x[7])
nex_session.add(y)
if __name__ == '__main__':
load_data()
|
16,165 | f4fbd5fcb3201d748645f61db511bdeb497ad0d3 | class C:
def method(self):
print(r'foobar' f'{self}') |
16,166 | 0d6e36f22098d573a50f34b6fedc70da1be1ecfc | # -*- coding: utf-8 -*-
# Projeto 9 - Processamento de imagens
from __future__ import print_function
import cv2 as cv
import numpy as np
def imprime_imagem(tipo, imagem):
"""Monta a imagem novamente e imprime"""
cv.imshow(tipo, imagem)
cv.waitKey(0)
cv.destroyAllWindows()
def cria_nova_imagem(altura, largura):
"""Cria uma nova imagem vazia com base nas dimensões passadas"""
imagem = np.empty((altura, largura, 3), np.uint8)
imagem[:] = 255
return imagem
def quantiza(imagem):
"""Executa o algoritmo de quantização na imagem"""
for i in range(imagem.shape[0]):
for j in range(imagem.shape[1]):
if imagem[i, j] < 127:
imagem[i, j] = 0
else:
imagem[i, j] = 255
return imagem
def erosao(imagem):
"""Realiza a operação de erosão na imagem"""
nova_imagem = cria_nova_imagem(imagem.shape[0], imagem.shape[1])
for i in range(imagem.shape[0] - 2):
for j in range(imagem.shape[1] - 2):
if not precisa_erodir(imagem, i, j):
nova_imagem[i, j] = 0
# print(nova_imagem)
return nova_imagem
def precisa_erodir(imagem, i, j):
"""Checa se é necessário remover o pixel no método da erosão"""
if imagem[i, j] == 0 and \
imagem[i - 1, j] == 0 and \
imagem[i, j - 1] == 0 and \
imagem[i, j + 1] == 0 and \
imagem[i + 1, j] == 0:
return False
return True
def dilatacao(imagem):
"""Realiza a operação de dilatação na imagem"""
nova_imagem = cria_nova_imagem(imagem.shape[0], imagem.shape[1])
for i in range(imagem.shape[0] - 2):
for j in range(imagem.shape[1] - 2):
if precisa_dilatar(imagem, i, j):
nova_imagem[i - 1, j] = 0
nova_imagem[i, j - 1] = 0
nova_imagem[i, j + 1] = 0
nova_imagem[i + 1, j] = 0
return nova_imagem
def precisa_dilatar(image, i, j):
"""Checa se é possível dilatar o pixel no método da dilatação"""
if imagem[i, j] == 0:
return True
return False
def abertura(imagem):
"""Aplica o método da abertura na imagem"""
imagem = erosao(imagem)
imprime_imagem('Imagem erodida', imagem)
imagem = dilatacao(imagem)
imprime_imagem('Imagem dilatada', imagem)
return imagem
imagem = cv.imread('../Imagens/lena.bmp', cv.IMREAD_GRAYSCALE)
imprime_imagem('Imagem original', imagem)
imagem = quantiza(imagem)
imprime_imagem('Imagem binarizada', imagem)
imagem = abertura(imagem)
|
16,167 | 7cc8dfbd0f3491bb6ad26ec32d6d24bf869ba9b9 | class Solution:
def numUniqueEmails(self, emails) -> int:
emails_modified = []
for email in emails:
at_index = email.index('@')
local_name = email[:at_index]
domain_name = email[at_index:]
# ignore everything after first plus sign in local name
plus_index = local_name.find('+')
if plus_index != -1:
local_name = local_name[:plus_index]
# remove '.'
local_name = ''.join([c for c in local_name if c != '.'])
email = local_name + domain_name
emails_modified.append(email)
unique_emails = []
count = 0
for email in emails_modified:
if email not in unique_emails:
count += 1
unique_emails.append(email)
return count
|
16,168 | ae4f8f72366c2bbf09ba0f1d622d638869803aa4 | #!/usr/bin/python
from pathlib import Path
from requests import get as req
from lxml import html
import re
import json
head = {'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/66.0.3359.139 Safari/537.36'}
root = Path(__file__).parent
scale_dump = root / 'scale_data.json'
pages_dump = root / 'scale_pages.json'
result_file = root / 'scale_data.txt'
scales_list = (root / 'scales.txt').read_text().splitlines()
scale_page_template = {
'title': '//span[@id="productTitle"]',
'feats': '//div[@id="feature-bullets"]',
'desc': '//div[normalize-space(translate(h2/text(), "PD", "pd"))="product description"]',
'dets': '//div[contains(@id, "etail") and starts-with(normalize-space(.//h2/text()), "Product")]',
'price': '//span[starts-with(@id, "priceblock_")]'
}
regexs = {
'model': re.compile('model.+', re.I | re.M),
'size': re.compile('[.\d]+[ x\W]{0,9}[.\d]+', re.A),
'lcd': re.compile('[.\d]+.+lcd', re.I | re.M),
'glass_thickness': re.compile('[.\d]+.+glass', re.I | re.M),
'bmi': re.compile('\bbmi\b', re.I),
'tracking': re.compile('memory', re.I),
'tape': re.compile('\btape\b', re.I),
'precision': re.compile('(increment.+)?(\d?\.\d+ ?lb)(.+increment)?', re.I | re.M),
'handle': re.compile('handle', re.I),
'price': re.compile('\$\d+\.\d+')
}
page_parts = {
'model': ['dets'],
'size': ['dets', 'desc', 'feats'],
'lcd': ['feats', 'desc'],
'glass_thickness': ['feats', 'desc'],
'bmi': ['title', 'feats', 'desc'],
'tracking': ['title', 'feats', 'desc'],
'tape': ['title', 'feats', 'desc'],
'precision': ['feats', 'desc'],
'handle': ['title', 'feats', 'desc'],
'price': ['price']
}
bool_keys = ['bmi', 'tracking', 'tape', 'handle']
scales = []
pages = {}
def scrapez(url, xdict):
doc = html.fromstring(req(url, headers=head).content)
for key, xpath in xdict.items():
try:
xdict[key] = doc.xpath(xpath)[0].text_content().strip()
except:
if key == 'feats':
xdict[key] = ''
continue
print('url = ' + url)
print('key = ' + key)
raise SystemExit
return xdict
def parse_page(link, page_dict):
props = {}
props['link'] = link
for key, rx in regexs.items():
for part in page_parts[key]:
match = rx.search(page_dict[part])
if match:
props[key] = 'Yes' if key in bool_keys else match[0]
break
else:
props[key] = 'No' if key in bool_keys else ''
scales.append(props)
for i, link in enumerate(scales_list):
print(f'Processing Scale {i+1}/{len(scales_list)}...')
scale_page = scrapez(link, scale_page_template.copy())
parse_page(link, scale_page)
pages[link] = scale_page
scale_dump.write_text(json.dumps(scales, indent=2))
pages_dump.write_text(json.dumps(pages, indent=2))
key_list = list(scales[0])
results = ['\t'.join(scale[k] for k in key_list) for scale in scales]
results.insert(0, '\t'.join(key_list))
result_file.write_text('\n'.join(results)) |
16,169 | 8e44203a13dd7f5802e498c27328c54f3d6a833c | """@author: Bryan Silverthorn <bcs@cargo-cult.org>"""
import os.path
import csv
import numpy
import sklearn
import condor
import borg
import borg.experiments.simulate_runs
logger = borg.get_logger(__name__, default_level = "INFO")
def simulate_run(run, maker, all_data, train_mask, test_mask, instances, independent, mixture):
"""Simulate portfolio execution on a train/test split."""
train_data = all_data.masked(train_mask)
test_data = all_data.masked(test_mask)
if instances is not None:
ids = sorted(train_data.run_lists, key = lambda _: numpy.random.rand())[:instances]
train_data = train_data.filter(*ids)
if independent:
train_data = train_data.collect_independent(mixture).only_nonempty()
else:
train_data = train_data.collect_systematic(mixture).only_nonempty()
budget = test_data.common_budget
#budget = test_data.common_budget / 2 # XXX
suite = borg.fake.FakeSuite(test_data)
if maker.subname == "preplanning-dir":
model_kwargs = {"K": 64}
if "set_alpha" in maker.variants:
model_kwargs["alpha"] = 1e-2
else:
model_kwargs = {}
solver = maker(suite, train_data, model_kwargs = model_kwargs)
successes = []
for (i, instance_id) in enumerate(test_data.run_lists):
logger.info("simulating run %i/%i on %s", i, len(test_data), instance_id)
with suite.domain.task_from_path(instance_id) as instance:
with borg.accounting() as accountant:
answer = solver.start(instance).run_then_stop(budget)
succeeded = suite.domain.is_final(instance, answer)
logger.info(
"%s %s on %s (%.2f CPU s)",
maker.name,
"succeeded" if succeeded else "failed",
os.path.basename(instance),
accountant.total.cpu_seconds,
)
if succeeded:
successes.append(accountant.total.cpu_seconds)
logger.info(
"%s had %i successes over %i instances",
maker.name,
len(successes),
len(test_data),
)
description = "{0} ({1})".format(mixture, "Sep." if independent else "Sys.")
return (
description,
maker.name,
instances,
len(successes),
numpy.mean(successes),
numpy.median(successes),
)
@borg.annotations(
out_path = ("results CSV output path"),
runs = ("path to JSON runs specification", "positional", None, borg.util.load_json),
repeats = ("number of times to repeat each run", "option", None, int),
workers = ("submit jobs?", "option", "w"),
local = ("workers are local?", "flag"),
)
def main(out_path, runs, repeats = 128, workers = 0, local = False):
"""Simulate portfolio and solver behavior."""
logger.info("simulating %i runs", len(runs))
get_run_data = borg.util.memoize(borg.storage.RunData.from_bundle)
def yield_jobs():
for run in runs:
all_data = get_run_data(run["bundle"])
validation = sklearn.cross_validation.ShuffleSplit(len(all_data), repeats, test_fraction = 0.2, indices = False)
if run["portfolio_name"] == "-":
makers = map(borg.experiments.simulate_runs.SolverMaker, all_data.solver_names)
else:
makers = [borg.experiments.simulate_runs.PortfolioMaker(run["portfolio_name"])]
max_instances = len(all_data) * 0.8
for (train_mask, test_mask) in validation:
for instances in map(int, map(round, numpy.r_[10.0:max_instances:32j])):
for maker in makers:
yield (
simulate_run,
[
run,
maker,
all_data,
train_mask,
test_mask,
instances,
run["independent"],
run["mixture"],
],
)
with borg.util.openz(out_path, "wb") as out_file:
writer = csv.writer(out_file)
writer.writerow(["description", "solver", "instances", "successes", "mean_time", "median_time"])
for (_, row) in condor.do(yield_jobs(), workers, local):
writer.writerow(row)
out_file.flush()
if __name__ == "__main__":
borg.script(main)
|
16,170 | 830a0d362f7bde79dec14e3f61a723ac3fdf7185 | from django.contrib import admin
from .models import PostUser
# Register your models here.
class PostUserAdmin(admin.ModelAdmin):
readonly_fields = ('date_post', )
admin.site.register(PostUser, PostUserAdmin)
|
16,171 | 72f1610911e18acfa9b8fc49cd9c4473d24c069c | import sys, bz2, re, string
from collections import namedtuple
# # A language model scores sequences, and must account
# # for both beginning and end of each sequence. Example API usage:
# lm = LM(filename, n=6, verbose=False)
# sentence = "This is a test ."
# lm_state = lm.begin() # initial state is always <s>
# logprob = 0.0
# for t in sentence:
# (lm_state, logprob) = lm.score(lm_state, t)
# logprob += logprob
# logprob += lm.end(lm_state) # transition to </s>, can also use lm.score(lm_state, "</s>")[1]
ngram_stats = namedtuple("ngram_stats", "logprob, backoff")
class LM:
def __init__(self, filename, n=6, verbose=False):
print("Reading language model from {}...".format(filename), file=sys.stderr)
self.table = {}
self.n = n
self.history = n-1
self.verbose = verbose
for line in bz2.open(filename, 'rt'):
entry = line.strip().split("\t")
if len(entry) > 1 and entry[0] != "ngram":
(logprob, ngram, backoff) = (float(entry[0]), tuple(entry[1].split()), float(entry[2] if len(entry)==3 else 0.0))
self.table[ngram] = ngram_stats(logprob, backoff)
print("Done.", file=sys.stderr)
def begin(self):
return ("<s>",)
def score(self, state, token):
ngram = state + (token,)
score = 0.0
while len(ngram)> 0:
if ngram in self.table:
return (ngram[-self.history:], score + self.table[ngram].logprob)
else: #backoff
score += self.table[ngram[:-1]].backoff if len(ngram) > 1 else 0.0
ngram = ngram[1:]
return ((), score - 99.0) # bad score for missing unigrams
def end(self, state):
return self.score(state, "</s>")[1]
def clean_seq(self, sequence):
return(sequence.translate(dict.fromkeys(map(ord, string.punctuation + ' '), None)).lower())
def maybe_write(self, msg):
if self.verbose:
print(msg, file=sys.stderr)
def score_seq(self, sequence):
lm_state = self.begin()
lm_logprob = 0.0
for token in list(self.clean_seq(sequence)):
self.maybe_write("state: {}".format(lm_state + (token,)))
(lm_state, logprob) = self.score(lm_state, token)
lm_logprob += logprob
self.maybe_write("logprob={}".format(logprob))
lm_logprob += self.end(lm_state)
return lm_logprob
def get_bitstring_spans(self, bitstring):
"""get a list of spans that are contiguous and have 'o' in
the string position. ignore '.' positions"""
return { i.span()[0] : i.span()[1] for i in re.finditer('o', bitstring) }
def score_bitstring(self, sequence, bitstring):
"""a bitstring is a string where 'o' represents an item to
be scored and '.' represents an item to be ignored while
scoring the sequence. the sequence string and bitstring
must be of the same length and the sequence cannot contain
punctuation or spaces"""
spans = self.get_bitstring_spans(bitstring)
# we use the tab character \t to represent the positions
# to skip when scoring the sequence
seq_by_bits = [ sequence[i] if i in spans else '\t' for i in range(len(sequence)) ]
self.maybe_write("seq_by_bits: {}".format(seq_by_bits))
lm_state = self.begin()
lm_logprob = 0.0
for token in list(seq_by_bits):
if token == '\t': # should we skip this token?
lm_state = ()
continue
self.maybe_write("state: {}".format(lm_state + (token,)))
(lm_state, logprob) = self.score(lm_state, token)
lm_logprob += logprob
self.maybe_write("logprob={}".format(logprob))
lm_logprob += self.end(lm_state)
return lm_logprob
if __name__ == '__main__':
sequence = 'In a few cases, a multilingual artifact has been necessary to facilitate decipherment, the Rosetta Stone being the classic example. Statistical techniques provide another pathway to decipherment, as does the analysis of modern languages derived from ancient languages in which undeciphered texts are written. Archaeological and historical information is helpful in verifying hypothesized decipherments.'
lm = LM("data/6-gram-wiki-char.lm.bz2", n=6, verbose=False)
lm_logprob = lm.score_seq(sequence)
print("TOTAL LM LOGPROB for \"{}\": {}".format(sequence, lm_logprob), file=sys.stderr)
s1 = 'zkxxuqxzpuq'
s2 = 'thisisatest'
print("TOTAL LM LOGPROB for \"{}\": {}".format(s1, lm.score_seq(s1)), file=sys.stderr)
print("TOTAL LM LOGPROB for \"{}\": {}".format(s2, lm.score_seq(s2)), file=sys.stderr)
print(lm.get_bitstring_spans('..oo...ooo..'))
print(lm.score_bitstring('thisisatest', 'oo...oo.ooo'))
|
16,172 | 0bfdc3e63bf8bac60401ef5d3dd86eae22d90e4b | import os
import sys
import smtplib
from email.mime.text import MIMEText
from email.mime.multipart import MIMEMultipart
mail_host = "" # SMTP服务器 如:smtp.126.com
mail_user = "" # 用户名
mail_pass = "" # 授权密码,非登录密码
sender = '' # 发件人邮箱(最好写全, 不然会失败)
receivers = [''] # 接收邮件,可设置为你的QQ邮箱或者其他邮箱
def sendEmail(title, content_lst):
message = MIMEMultipart()
message['From'] = "{}".format(sender)
message['To'] = ",".join(receivers)
message['Subject'] = title
message.attach(MIMEText(str(content_lst))) # 邮件正文内容
#构造附件1,传送当前目录下的 data.txt 文件
# att1 = MIMEText(open(file_name, 'r').read(), 'base64', 'utf-8')
# att1['Content-Type'] = 'application/octet-stream'
# 这里的filename可以任意写,写什么名字,邮件中显示什么名字
# att1['Content-Disposition'] = 'attachment; filename="data.txt"'
# message.attach(att1)
try:
smtpObj = smtplib.SMTP_SSL(mail_host, 465) # 启用SSL发信, 端口一般是465
smtpObj.login(mail_user, mail_pass) # 登录验证
smtpObj.sendmail(sender, receivers, message.as_string()) # 发送
print("mail has been send successfully.")
except smtplib.SMTPException as e:
print(e)
|
16,173 | 82ace6ca6942104b85e7eb450b05033fff2247e7 | import discord
from discord.ext import commands
class Rules(commands.Cog):
def __init__(self, bot):
self.bot = bot
@commands.command()
async def rules(self, ctx):
embed=discord.Embed(
title="Here are the rules of the server",
colour=discord.Colour.blue()
)
embed.add_field(name="No NSFW", value="No NSFW in this server", inline=True)
embed.add_field(name="No Gore", value="Please don't post any Gore", inline=True)
embed.add_field(name="Please be respectful to other members and staff", value="Don't disrespect any other members or staff", inline=True)
embed.add_field(name="Please report all bad behaviour", value="Please DM or tell a staff member If there is any bad behaviour you could be rewarded.", inline=True)
embed.add_field(name="Please Suggest suggestions", value="If you have an opinion on something feel free to DM a admin or owner", inline=True)
embed.add_field(name="No Doxxing", value="Doxxing means exposing personal information. Please don't Dox anyone in this server", inline=True)
embed.add_field(name="No Spamming", value="Please don't spam!'", inline=True)
embed.add_field(name="Please Follow Discord's TOS", value="Please follow Discord TOS or you will be punished", inline=True)
embed.add_field(name="Don't advertize", value="Please don't advertize your server in DMS or the server unless you are allowed to", inline=True)
embed.add_field(name="Please post according to the channel", value="Please post the correct content in the correct channel", inline=True)
await ctx.send(embed=embed)
def setup(bot):
bot.add_cog(Rules(bot)) |
16,174 | 2f606f8ec6c537d20d4eedfd459308f64cd71580 | # Definition for singly-linked list.
# class ListNode:
# def __init__(self, val=0, next=None):
# self.val = val
# self.next = next
class Solution:
def reverseBetween(self, head: ListNode, left: int, right: int) -> ListNode:
a = []
cur = head
while (cur):
a.append(cur.val)
cur = cur.next
if left>1:
a = a[:left-1] + a[right-1:left-2:-1] + a[right:]
else:
a = a[right-1::-1] + a[right:]
cur = head
i = 0
while (cur):
cur.val = a[i]
cur = cur.next
i += 1
return head |
16,175 | 9510be848f075c5e2a81cc3bdb752c6b93caf9d6 | import matplotlib.pyplot as plt
import cv2
import numpy as np
from Kmean_color import Kmeans
def Figure_colors(path,n_clusters):
img = cv2.imread(path)
img_size= cv2.cvtColor(img,cv2.COLOR_BGR2RGB)
X = img.reshape((img_size.shape[0]*img_size.shape[1],3))
plt.xlabel('x')
plt.ylabel('y')
plt.plot(X[:, 0], X[:, 1], 'bo', markersize=10)
plt.plot()
plt.show()
####################################################
km = Kmeans(n_clusters=n_clusters)
km.fit(X)
centers = []
plt.xlabel('x') # label trục x
plt.ylabel('y') # label trục y
#plt.zlabel('z')
plt.title("Kmeans") # title của đồ thị
plt_colors = ['b', 'g', 'r', 'c', 'm', 'y', 'k', 'w'] # danh sách các màu hỗ trợ
for i in range(km.n_clusters):
centroids=km.centroids
centers.append(centroids)
#print(centers)
data = X[km.labels == i] # lấy dữ liệu của cụm i
plt.plot(data[:, 0], data[:, 1], plt_colors[i]+'o', markersize = 4, label = 'cluster_' + str(i)) # Vẽ cụm i lên đồ thị
#plt.plot(centers[i][0], centers[i][1], plt_colors[i] + '*', markersize = 4, label = 'center_' + str(i)) # Vẽ tâm cụm i lên đồ thị
plt.legend() # Hiện bảng chú thích
plt.show()
|
16,176 | b7a9ffbdf7d95f776f8a1a8d9fd184d0ab72cb4f | import sys
n=int(input())
for i in range(0,1000):
if 2**i==n:
print("yes")
sys.exit()
print("no")
|
16,177 | 39382c5793e50b438ed69cd74e72d561980c0644 | """Swap apirs of node in ll"""
class ListNode:
def __init__(self, val):
self.val = val
self.next = None
class LinkedList:
def __init__(self):
self.head = None
def append(self, val):
new_node = ListNode(val)
if not self.head:
self.head = new_node
return
mover = self.head
while mover.next:
mover = mover.next
mover.next = new_node
return
def print_list(self):
temp = self.head
while temp:
print (temp.val, end=" ")
temp = temp.next
print()
def swap_pairs(self, head):
if not head or not head.next:
return head
# store the new_node
new_head = head.next
# store the reamining list
remaining_list = head.next.next
# change the next pointer of the second node to be the first node
head.next.next = head
# recursive call to swap pairs of the remaining list
head.next = self.swap_pairs(remaining_list)
# return the new head
return new_head
if __name__ == "__main__":
ll = LinkedList()
ll.append(1)
ll.append(2)
ll.append(3)
ll.append(4)
ll.print_list()
head = ll.swap_pairs(ll.head)
while head:
print(head.val, end=" ")
head = head.next
print() |
16,178 | fc628756f7fcced98dc6cb863e34b165724c2d94 | class FactorialDigitSum():
"""
n! means n × (n − 1) × ... × 3 × 2 × 1
For example, 10! = 10 × 9 × ... × 3 × 2 × 1 = 3628800,
and the sum of the digits in the number 10! is
3 + 6 + 2 + 8 + 8 + 0 + 0 = 27.
Find the sum of the digits in the number 100!
"""
def factorial(self, input):
result = 1
for i in range(1, input + 1):
result *= i
return(result)
def sum_of_digits(self, input):
helper = str(input)
result = 0
for i in helper:
result += int(i)
return(result)
if __name__ == '__main__':
task = FactorialDigitSum()
factorial_100 = task.factorial(100)
print(task.sum_of_digits(factorial_100))
|
16,179 | 56e9e35aeb8c5da4f852d2fd5f14bb0ffe5c5955 | #triangle
N = int(input())
a = []
for N in range(1,N+1):
a.append(N)
print(''.join(str(a))) |
16,180 | 19e2427aaf09a0c24ebcd623f755ebbda0be10bb | import puzzle
def checkSudoku(rows):
def isValidSet(s):
s = sorted(s)
for i in range (0, len(s)):
if s[i] != i+1:
return False
return True
for i in range(0, 9):
row = rows[i]
column = []
section = []
for j in range(0, 9):
rowMarker = ((i / 3) * 3) + j / 3
columnMarker = (i * 3) % 9 + j % 3
column.append(row[j])
section.append(rows[rowMarker][columnMarker])
if not isValidSet(column) or not isValidSet(row) or not isValidSet(section):
return False
return True
print checkSudoku(puzzle.solved)
|
16,181 | d99bda52b761d09eb6616e161f09617ea6ac338e | from django.contrib import admin
from .models import Function, Collaborator, Apartament, Resident
# Register your models here.
admin.site.register(Function)
admin.site.register(Collaborator)
admin.site.register(Apartament)
admin.site.register(Resident) |
16,182 | be2b95cf4d7ad5dfa872c4f17990475939837398 | import kidney_ndds
from kidney_digraph import *
EPS = 0.00001
class KidneyOptimException(Exception):
pass
def check_validity(opt_result, digraph, ndds, max_cycle, max_chain, min_chain = None):
"""Check that the solution is valid.
This method checks that:
- all used edges exist
- no vertex or NDD is used twice (which also ensures that no edge is used twice)
- cycle and chain caps are respected
- chain does not contain cycle (check for repeated tgt vertices)
"""
# all used edges exist
for chain in opt_result.chains:
if chain.vtx_indices[0] not in [e.tgt.id for e in ndds[chain.ndd_index].edges]:
raise KidneyOptimException("Edge from NDD {} to vertex {} is used but does not exist".format(
chain.ndd_index, chain.vtx_indices[0]))
for cycle in opt_result.cycles:
for i in range(len(cycle)):
if digraph.adj_mat[cycle[i-1].id][cycle[i].id] is None:
raise KidneyOptimException("Edge from vertex {} to vertex {} is used but does not exist".format(
cycle[i-1].id, cycle[i].id))
# no vertex or NDD is used twice
ndd_used = [False] * len(ndds)
vtx_used = [False] * len(digraph.vs)
for chain in opt_result.chains:
if ndd_used[chain.ndd_index]:
raise KidneyOptimException("NDD {} used more than once".format(chain.ndd_index))
ndd_used[chain.ndd_index] = True
for vtx_index in chain.vtx_indices:
if vtx_used[vtx_index]:
raise KidneyOptimException("Vertex {} used more than once".format(vtx_index))
vtx_used[vtx_index] = True
for cycle in opt_result.cycles:
for vtx in cycle:
if vtx_used[vtx.id]:
raise KidneyOptimException("Vertex {} used more than once".format(vtx.id))
vtx_used[vtx.id] = True
# cycle and chain caps are respected
for chain in opt_result.chains:
if len(chain.vtx_indices) > max_chain:
raise KidneyOptimException("The chain cap is violated")
for cycle in opt_result.cycles:
if len(cycle) > max_cycle:
raise KidneyOptimException("The cycle cap is violated")
if not min_chain is None:
for chain in opt_result.chains:
if len(chain.vtx_indices) < min_chain:
raise KidneyOptimException("The min-chain cap is violated")
# # min chain length is respected
# if cfg.min_chain_len is not None:
# for chain in opt_result.chains:
# if len(set(chain.vtx_indices)) < cfg.min_chain_len:
# raise KidneyOptimException("The chain is below the min length (%d):\n %s" %
# (cfg.min_chain_len,chain.display()))
# chains do not contain loops
for chain in opt_result.chains:
if len(set(chain.vtx_indices)) < len(chain.vtx_indices):
raise KidneyOptimException("The chain contains loops:\n %s" % chain.display())
def get_dist_from_nearest_ndd(digraph, ndds):
""" For each donor-patient pair V, this returns the length of the
shortest path from an NDD to V, or 999999999 if no path from an NDD
to V exists.
"""
# Get a set of donor-patient pairs who are the target of an edge from an NDD
ndd_targets = set()
for ndd in ndds:
for edge in ndd.edges:
ndd_targets.add(edge.tgt)
# Breadth-first search
q = deque(ndd_targets)
distances = [999999999] * len(digraph.vs)
for v in ndd_targets:
distances[v.id] = 1
while q:
v = q.popleft()
for e in v.edges:
w = e.tgt
if distances[w.id] == 999999999:
distances[w.id] = distances[v.id] + 1
q.append(w)
return distances
def find_vertex_chain_participation(digraph, ndds,max_chain):
""" For each donor-patient pair V, add a property "can_be_in_chain_list",
which is a list of booleans: can_be_in_chain_list[i] = True if v can be in a chain
initiated by ndd i (True if v is within the chain cap of ndd i, False otherwise)
"""
for v in digraph.vs:
v.can_be_in_chain_list = [False for _ in ndds]
for i_ndd,ndd in enumerate(ndds):
# Get a set of donor-patient pairs who are the target of an edge from an NDD
ndd_targets = set()
for edge in ndd.edges:
ndd_targets.add(edge.tgt)
# Breadth-first search
q = deque(ndd_targets)
distances = [999999999] * len(digraph.vs)
for v in ndd_targets:
distances[v.id] = 1
while q:
v = q.popleft()
for e in v.edges:
w = e.tgt
if distances[w.id] == 999999999:
distances[w.id] = distances[v.id] + 1
q.append(w)
for v,dist in zip(digraph.vs,distances):
if dist <= max_chain:
v.can_be_in_chain_list[i_ndd] = True
def find_selected_path(v_id, next_vv):
path = [v_id]
while v_id in next_vv:
v_id = next_vv[v_id]
path.append(v_id)
return path
def find_selected_cycle(v_id, next_vv):
cycle = [v_id]
while v_id in next_vv:
v_id = next_vv[v_id]
if v_id in cycle:
return cycle
else:
cycle.append(v_id)
return None
def get_optimal_chains(digraph, ndds, edge_success_prob=1):
# Chain edges
chain_next_vv = {e.src.id: e.tgt.id
for e in digraph.es
for var in e.grb_vars
if var.x > 0.1} # changed to Xn from x by Duncan
optimal_chains = []
for i, ndd in enumerate(ndds):
for e in ndd.edges:
if e.edge_var.x > 0.1:
vtx_indices = find_selected_path(e.tgt.id, chain_next_vv)
# Get score of edge from NDD
score = e.score * edge_success_prob
# Add scores of edges between vertices
for j in range(len(vtx_indices) - 1):
score += digraph.adj_mat[vtx_indices[j]][vtx_indices[j + 1]].score * edge_success_prob ** (j + 2)
optimal_chains.append(kidney_ndds.Chain(i, vtx_indices, score))
return optimal_chains
# added by duncan
def get_optimal_chains_pctsp(digraph, ndds):
# Chain edges
edge_success_prob = 1.0
chain_next_vv = {e.src.id: e.tgt.id
for e in digraph.es
if e.edge_var.x > 0.5}
optimal_chains = []
for i, ndd in enumerate(ndds):
for e in ndd.edges:
if e.edge_var.x > 0.5:
vtx_indices = find_selected_path(e.tgt.id, chain_next_vv)
# Get score of edge from NDD
score = e.score * edge_success_prob
# Add scores of edges between vertices
for j in range(len(vtx_indices) - 1):
score += digraph.adj_mat[vtx_indices[j]][vtx_indices[j + 1]].score * edge_success_prob ** (j + 2)
optimal_chains.append(kidney_ndds.Chain(i, vtx_indices, score))
return optimal_chains
def selected_edges_to_cycles(digraph, cycle_start_vv, cycle_next_vv):
cycles = [find_selected_cycle(start_v, cycle_next_vv) for start_v in cycle_start_vv]
# Remove "cycles" that are really part of a chain
cycles = [c for c in cycles if c is not None]
# Remove duplicated cycles
cycles = [c for c in cycles if c[0] == min(c)]
# Use vertices instead of indices
return [[digraph.vs[v_id] for v_id in c] for c in cycles]
# return True if cycle c contains edge e
# c is a list of kidney_digraph.Vertex objects (with the first vertex not repeated
# edge is a kidney_digraph.Edge objects
def cycle_contains_edge(c,e):
if e.src in c:
i = c.index(e.src)
if e.tgt == c[(i+1) % len(c)]:
return True
else:
return False
return False
# -------------------------------------------------------------------------------------------------
#
# Functions for Variable Uncertainty Budget
#
# -------------------------------------------------------------------------------------------------
from scipy.special import binom
from scipy.optimize import minimize
import math
def B_bound(num_E,gamma):
'''
The upper-bound on probability that realized edge weights fall outside of the U-set:
Assuming symmetric interval uncertainty, and realized edge weights symmetrically distributed about
their nominal value.
From Bertsimas, Price of Robustness
'''
eta = (gamma + num_E)/2.0
fl_eta = int(math.floor(eta))
mu = float(eta - fl_eta)
return math.pow(2,-num_E)*((1.0-mu)*binom(num_E,fl_eta)
+ sum( binom(num_E,l) for l in range(fl_eta+1,int(num_E)+1) ))
def gamma_symmetric_edge_weights(x_norm,epsilon):
'''
Variable budget function for symmetric cost uncertainty (from Poss & Bergamo)
input:
- x_norm : number of edges in the solution
- epsilon : protection level (realized edge weights will be outside of U-set with prob. epsilon
'''
# the first constraint is that B_bound <= epsilon,
# the second is that gamma >= 0
# the third is that gamma <= x_norm
constr = ({'type':'ineq',
'fun':lambda g: epsilon - B_bound(x_norm,g)
},
{'type':'ineq',
'fun':lambda g: g},
{'type': 'ineq',
'fun': lambda g: x_norm - g})
func = lambda gamma: gamma # we just want to minimize gamma
# method = Constrained Optimization BY Linear Approximation (COBYLA)
res = minimize(func,0.01, constraints=constr,method='COBYLA')
# if the minimization is succesful, return the result. otherwise, return x_norm
if res.success:
return max(round(res.fun,4),0)
else:
return x_norm
from scipy.special import betainc # betainc(a,b,x)
from scipy.stats import binom as binomial_dist # pmf(x,n,p)
from math import floor
def G_bound(n, m, p, k, gamma):
pk = math.pow(1 - p, k)
floor_gamma = int(floor(gamma))
if gamma >= m:
s1 = sum(binomial_dist.pmf(y, n, 1 - pk) for y in range(0, floor_gamma - m + 1))
s2 = sum(betainc(m - gamma + y, gamma - y + 1, 1 - p) * binomial_dist.pmf(y, n, 1 - pk) for y in
range(floor_gamma - m + 1, min(n, floor_gamma) + 1))
return s1 + s2
else:
return sum(betainc(m - gamma + y, gamma - y + 1, 1 - p) * binomial_dist.pmf(y, n, 1 - pk) for y in
range(0, min(n, floor_gamma) + 1))
def G_bound_2(n, m, p, k, gamma):
pk = math.pow(1 - p, k)
floor_gamma = int(floor(gamma))
return sum(binom_cdf(gamma - y, m, p) * binomial_dist.pmf(y, n, 1 - pk) for y in range(0, n + 1))
def binom_cdf(y, n, p):
'''
CDF of the binomial distribution, using the regularized incomplete beta function
'''
if y >= n:
return 1.0
elif y < 0:
return 0.0
else:
return betainc(n - y, y + 1, 1 - p)
# @np.vectorize
def gamma_homogeneous_edge_failure(n, m, p, k, epsilon):
'''
Variable budget function for homogeneous edge failure probability p
input:
- n : number of cycles in the matching
- m : number of chains in the matching
- k : cycle cap
- p : edge failure probability
- epsilon : protection level (realized cycle/chain weights will be outside of U-set with prob. epsilon)
'''
# the first constraint is that 1 - G_bound <= epsilon,
# the second is that gamma >= 0
# the third is that gamma <= n+m
constr = ({'type': 'ineq',
'fun': lambda g: epsilon - (1 - G_bound_2(n, m, p, k, g))
},
{'type': 'ineq',
'fun': lambda g: g},
{'type': 'ineq',
'fun': lambda g: n + m - g})
func = lambda gamma: gamma # we just want to minimize gamma
# method = Constrained Optimization BY Linear Approximation (COBYLA)
res = minimize(func, 0.01, constraints=constr, method='COBYLA')
# if the minimization is succesful, return the result. otherwise, return x_norm
if res.success:
return max(round(res.fun, 4), 0)
else:
return n + m
|
16,183 | 8fdd11675915db6aaa6f21ed4e9de12907924ce8 | # Definition for a binary tree node.
# class TreeNode:
# def __init__(self, val=0, left=None, right=None):
# self.val = val
# self.left = left
# self.right = right
class Solution:
def maxPathSum(self, root: TreeNode) -> int:
#res = [maxPathSum]
def dfs(cur, res):
if cur.left is None and cur.right is None:
res[0] = max(res[0], cur.val)
return cur.val
fromLeft, fromRight = 0, 0
if cur.left is not None:
fromLeft = dfs(cur.left, res)
if cur.right is not None:
fromRight = dfs(cur.right, res)
res[0] = max([res[0], cur.val, cur.val + fromLeft, cur.val + fromRight, cur.val + fromLeft + fromRight])
return max(cur.val, cur.val + fromLeft, cur.val + fromRight)
res = [-float('inf')]
dfs(root, res)
return res[0]
|
16,184 | 6feebb1d4344fdd70908dd8d5a564326c854a460 | import colorgram
import turtle as turtle_module
import random
'''extract color palette from image using colorgram'''
# colors = colorgram.extract('hirst.jpg', 10)
# rgb_colors = []
# for color in colors:
# r = color.rgb.r
# g = color.rgb.g
# b = color.rgb.b
# new_color = (r, g, b)
# rgb_colors.append(new_color)
turtle_module.colormode(255)
tim = turtle_module.Turtle()
tim.speed("fastest")
tim.penup()
tim.hideturtle()
colors = [(131, 166, 205), (222, 148, 106), (31, 42, 61), (199, 134, 147), (165, 59, 48),(140, 184, 162)]
#the closer the colors are to '255' the more likely they're a shade of white, aka the background color
tim.setheading(225)
tim.forward(300)
tim.setheading(0)
number_of_dots = 100
# tim.dot(random.choice(colors))
for dot_count in range(1, number_of_dots + 1):
tim.dot(20, random.choice(colors))
tim.forward(50)
if dot_count % 10 == 0:
tim.setheading(90)
tim.forward(50)
tim.setheading(180)
tim.forward(500)
tim.setheading(0)
# print(rgb_colors)
screen = turtle_module.Screen()
screen.exitonclick() |
16,185 | 4d6cb54465f2b0a31accfc0037bf0ca5805d765f | n=int(input("Enter the limit:"))
a=[]
print("Enter the elements")
for i in range(n):
b=int(input())
a.append(b)
def mm():
min=a[0]
max=a[0]
for i in range(len(a)):
if min>a[i]:
min=a[i]
if max<a[i]:
max=a[i]
print("Maximum value is",max)
print("Minimum value is",min)
mm()
|
16,186 | 39cc857a11ce44cd3bdfaeeea670fffe00052e67 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.shortcuts import render,redirect
from django.contrib.auth.forms import UserCreationForm
from django.views.decorators.http import require_POST
from django.contrib import messages
from django.contrib.auth import authenticate, login, logout
from django.contrib.auth.decorators import login_required
from .models import Todo
from .forms import TodoForm, CreateUserForm
def registerPage(request):
form = CreateUserForm()
if request.user.is_authenticated:
return redirect('index')
else:
if request.method == "POST":
form = CreateUserForm(request.POST)
if form.is_valid():
form.save()
user = form.cleaned_data.get('username')
messages.success(request, 'Аккаунт с логином ' + user + ' успешно зарегестрирован')
return redirect('loginPage')
context = {'form':form}
return render(request, 'loginForm/regPage.html', context)
def loginPage(request):
if request.user.is_authenticated:
return redirect('index')
else:
if request.method == 'POST':
username = request.POST.get('username')
password = request.POST.get('password')
user = authenticate(request, username = username, password = password)
if user is not None:
login(request, user)
return redirect('index')
else:
messages.info('Username OR password invalid')
context = {}
return render(request,'loginForm/loginPage.html')
def logoutUser(request):
logout(request)
return redirect('loginPage')
@login_required(login_url = 'loginPage')
def index(request):
todo_list = Todo.objects.order_by('id')
form = TodoForm()
context = {'todo_list' : todo_list, 'form':form}
return render(request, 'todolist/index.html',context)
@require_POST
def addTodo(request):
form = TodoForm(request.POST)
print(request.POST)
if form.is_valid():
new_todo = Todo(text=request.POST['text'], username=request.POST['username'])
new_todo.save()
return redirect('index')
@login_required(login_url = 'loginPage')
def completeTodo(request,todo_id):
todo = Todo.objects.get(pk=todo_id)
todo.complete = True
todo.save()
return redirect('index')
@login_required(login_url = 'loginPage')
def deleteCompleted(request):
Todo.objects.filter(complete__exact = True).delete()
return redirect('index')
@login_required(login_url = 'loginPage')
def deleteAll(request):
Todo.objects.all().delete()
return redirect('index') |
16,187 | d6c4af0f5003f2cb002a55776b4154849dba254a | from decorators import do_twice
@do_twice
def say_whee():
print("Whee!")
say_whee() |
16,188 | 863d0dba5f0aa79b2270f7da35ba75e2bb7a0a2d | # Author: Eric Bezzam
# Date: Feb 1, 2016
"""Class for real-time STFT analysis and processing."""
from __future__ import division
import numpy as np
from .dft import DFT
class STFT(object):
"""
A class STFT processing.
Parameters
-----------
N : int
number of samples per frame
hop : int
hop size
analysis_window : numpy array
window applied to block before analysis
synthesis : numpy array
window applied to the block before synthesis
channels : int
number of signals
transform: str, optional
which FFT package to use: 'numpy', 'pyfftw', or 'mkl'
"""
def __init__(self, N, hop=None, analysis_window=None,
synthesis_window=None, channels=1, transform='numpy'):
# initialize parameters
self.N = N # number of samples per frame
self.D = channels # number of channels
if hop is not None: # hop size
self.hop = hop
else:
self.hop = self.N/2
self.hop = int(np.floor(self.hop))
# analysis window
if analysis_window is not None:
self.analysis_window = analysis_window
elif analysis_window is None and self.hop ==self.N/2:
self.analysis_window = np.hanning(self.N)
else:
self.analysis_window = None
# synthesis window
if synthesis_window is not None:
self.synthesis_window = synthesis_window
elif synthesis_window is None and self.hop ==self.N/2:
self.synthesis_window = None # rectangular window
else:
self.synthesis_window = None
# create DFT object
self.transform = transform
self.nfft = self.N # differ when there is zero-padding
self.nbin = self.nfft // 2 + 1
self.dft = DFT(nfft=self.nfft,D=self.D,
analysis_window=self.analysis_window,
synthesis_window=self.synthesis_window,
transform=self.transform)
self.fft_out_buffer = np.zeros(self.nbin, dtype=np.complex64)
# initialize filter + zero padding --> use set_filter
self.zf = 0; self.zb = 0
self.H = None # filter frequency spectrum
# state variables
self.num_frames = 0 # number of frames processed so far
self.n_state = self.N - self.hop
# allocate all the required buffers
self._make_buffers()
def _make_buffers(self):
if self.D==1: # need this distinction for fftw
# The input buffer, float32 for speed!
self.fft_in_buffer = np.zeros(self.nfft, dtype=np.float32)
# a number of useful views on the input buffer
self.fft_in_state = self.fft_in_buffer[self.zf:self.zf+self.n_state] # Location of state
self.fresh_samples = self.fft_in_buffer[self.zf+self.n_state:self.zf+self.n_state+self.hop]
self.old_samples = self.fft_in_buffer[self.zf+self.hop:self.zf+self.hop+self.n_state]
self.x_p = np.zeros(self.n_state, dtype=np.float32) # State buffer
self.y_p = np.zeros(self.nfft - self.hop, dtype=np.float32) # prev reconstructed samples
self.X = np.zeros(self.nbin, dtype=np.complex64) # current frame in STFT domain
self.out = np.zeros(self.hop, dtype=np.float32)
else:
# The input buffer, float32 for speed!
self.fft_in_buffer = np.zeros((self.nfft, self.D), dtype=np.float32)
# a number of useful views on the input buffer
self.fft_in_state = self.fft_in_buffer[self.zf:self.zf+self.n_state,:] # Location of state
self.fresh_samples = self.fft_in_buffer[self.zf+self.n_state:self.zf+self.n_state+self.hop,:]
self.old_samples = self.fft_in_buffer[self.zf+self.hop:self.zf+self.hop+self.n_state,:]
self.x_p = np.zeros((self.n_state, self.D), dtype=np.float32) # State buffer
self.y_p = np.zeros((self.nfft - self.hop, self.D), dtype=np.float32) # prev reconstructed samples
self.X = np.zeros((self.nbin, self.D), dtype=np.complex64) # current frame in STFT domain
self.out = np.zeros((self.hop,self.D), dtype=np.float32)
def reset(self):
"""
Reset state variables. Necesary after changing or setting the filter or zero padding.
"""
self.num_frames = 0
self.nbin = self.nfft // 2 + 1
if self.D==1:
self.fft_in_buffer[:] = 0.
self.X[:] = 0.
self.y_p[:] = 0.
else:
self.fft_in_buffer[:,:] = 0.
self.X[:,:] = 0.
self.y_p[:,:] = 0.
self.dft = DFT(nfft=self.nfft,D=self.D,
analysis_window=self.analysis_window,
synthesis_window=self.synthesis_window,
transform=self.transform)
def zero_pad_front(self, zf):
"""
Set zero-padding at beginning of frame.
"""
self.zf = zf
self.nfft = self.N+self.zb+self.zf
if self.analysis_window is not None:
self.analysis_window = np.concatenate((np.zeros(zf), self.analysis_window))
if self.synthesis_window is not None:
self.synthesis_window = np.concatenate((np.zeros(zf), self.synthesis_window))
def zero_pad_back(self, zb):
"""
Set zero-padding at end of frame.
"""
self.zb = zb
self.nfft = self.N+self.zb+self.zf
if self.analysis_window is not None:
self.analysis_window = np.concatenate((self.analysis_window, np.zeros(zb)))
if self.synthesis_window is not None:
self.synthesis_window = np.concatenate((self.synthesis_window, np.zeros(zb)))
def set_filter(self, coeff, zb=None, zf=None, freq=False):
"""
Set time-domain filter with appropriate zero-padding.
Frequency spectrum of the filter is computed and set for the object.
There is also a check for sufficient zero-padding.
Parameters
-----------
coeff : numpy array
Filter in time domain.
zb : int
Amount of zero-padding added to back/end of frame.
zf : int
Amount of zero-padding added to front/beginning of frame.
"""
# apply zero-padding
if zb is not None:
self.zero_pad_back(zb)
if zf is not None:
self.zero_pad_front(zf)
self.reset()
if not freq:
# compute filter magnitude and phase spectrum
self.H = np.complex64(np.fft.rfft(coeff, self.nfft, axis=0))
# check for sufficient zero-padding
if self.nfft < (self.N+len(coeff)-1):
raise ValueError('Insufficient zero-padding for chosen number of samples per frame (L) and filter length (h). Require zero-padding such that new length is at least (L+h-1).')
else:
if len(coeff)!=self.nbin:
raise ValueError('Invalid length for frequency domain coefficients.')
self.H = coeff
# We need to reallocate buffers after changing zero padding
self._make_buffers()
def analysis(self, x_n):
"""
Transform new samples to STFT domain for analysis.
Parameters
-----------
x_n : numpy array
[self.hop] new samples.
Returns
-----------
self.X : numpy array
Frequency spectrum of given frame.
"""
# check for valid input - already done by self.dft
# if x_n.shape[0]!=self.hop:
# raise ValueError('Invalid input dimensions.')
# if self.D > 1 and x_n.shape[1]!=self.D:
# raise ValueError('Invalid input dimensions.')
self.fresh_samples[:,] = x_n[:,] # introduce new samples
self.x_p[:,] = self.old_samples # save next state
# apply DFT to current frame
self.X[:] = self.dft.analysis(self.fft_in_buffer)
# shift backwards in the buffer the state
self.fft_in_state[:,] = self.x_p[:,]
# self.num_frames += 1
def process(self):
"""
Apply filtering in STFT domain.
Returns
-----------
self.X : numpy array
Frequency spectrum of given frame.
"""
if self.H is not None:
np.multiply(self.X, self.H, self.X)
def synthesis(self, X=None):
"""
Transform to time domain and reconstruct output with overlap-and-add.
Returns
-------
numpy array
Reconstructed array of samples of length <self.hop> (Optional)
"""
if X is not None:
self.X[:] = X
# apply IDFT to current frame
self.dft.synthesis(self.X)
# reconstruct output
L = self.y_p.shape[0] # length of output state vector
self.out[:,] = self.dft.x[0:self.hop,] # fresh output samples
# add state from previous frames when overlap is used
if L > 0:
m = np.minimum(self.hop, L)
self.out[:m,] += self.y_p[:m,]
# update state variables
self.y_p[:-self.hop,] = self.y_p[self.hop:,] # shift out left
self.y_p[-self.hop:,] = 0.
self.y_p[:,] += self.dft.x[-L:,]
return self.out
def get_prev_samples(self):
"""
Get reconstructed previous samples.
"""
return self.y_p
|
16,189 | 9fc9aecb0d02f2422d4b1e8c0b97c7d53d7db583 | #coding=utf-8
#author=Garfield
def ai_judge(board):
if board[1][1] == 0:
#无论如何优先抢占-1
return (1,1)
#判断每次计算后当前是否存在胜负状况
def if_win(board):
#棋盘共有8条线,每条线可以是O或X两种情况
win = 0
print board
if board[0][0] == board[0][1] and board[0][1] == board [0][2]:
#第一行
if board[0][0] == 1:
win = 1
elif board[0][0] == -1:
win = -1
else:
win = 0
if board[1][0] == board[1][1] and board[1][1] == board [1][2]:
#第二行
print "here"
if board[1][0] == 1:
win = 1
elif board[1][0] == -1:
win = -1
else:
win = 0
if board[2][0] == board[2][1] and board[2][1] == board [2][2]:
#第三行
if board[2][0] == 1:
win = 1
elif board[2][0] == -1:
win = -1
else:
win = 0
if board[0][0] == board[1][0] and board[1][0] == board [2][0]:
#第一列
if board[0][0] == 1:
win = 1
elif board[0][0] == -1:
win = -1
else:
win = 0
if board[0][1] == board[1][1] and board[1][1] == board [2][1]:
#第二列
if board[0][1] == 1:
win = 1
elif board[0][1] == -1:
win = -1
else:
win = 0
if board[0][2] == board[1][2] and board[1][2] == board [2][2]:
#第三列
if board[0][2] == 1:
win = 1
elif board[0][2] == -1:
win = -1
else:
win = 0
if board[0][0] == board[1][1] and board[1][1] == board [2][2]:
#左上到右下
if board[0][0] == 1:
win = 1
elif board[0][0] == -1:
win = -1
else:
win = 0
if board[0][2] == board[1][1] and board[1][1] == board [2][0]:
#右上到左下
if board[0][2] == 1:
win = 1
elif board[0][2] == -1:
win = -1
else:
win = 0
return win
|
16,190 | 2a704dcaa1f07d3df88145da05e6b8072e1a652e | import os
import torch
import shutil
def create_path(path):
if not os.path.exists(path):
os.makedirs(path)
def save_checkpoint(state, is_best, model_path, name):
torch.save(state, '%s/%s_checkpoint.pth.tar' % (model_path, name))
if is_best:
shutil.copyfile('%s/%s_checkpoint.pth.tar' % (model_path, name),
'%s/%s_best.pth.tar' % (model_path, name))
def resume_model(model, checkpoint):
params_dict = torch.load(checkpoint)
state_dict = params_dict['state_dict']
model.load_state_dict(state_dict)
epoch = params_dict['epoch']
best = params_dict['best']
print("Load model from {}: \n"
"Epoch: {}\n"
"Best: {:.3f}".format(checkpoint,epoch,best))
return params_dict['epoch'], params_dict['best'] |
16,191 | b89acf8c903476c2c7d161e4b6124e893663d31c | import gzip, pickle
import os
from ast import literal_eval
import pandas as pd
import natsort
import csv
from sklearn.preprocessing import StandardScaler, MinMaxScaler
from sklearn.preprocessing import LabelEncoder
from sklearn.decomposition import PCA
from sklearn import model_selection, naive_bayes, svm
import docx
from docx import Document
import numpy as np
#Import packages
from gensim.models.doc2vec import Doc2Vec, TaggedDocument
from nltk.tokenize import word_tokenize
import json, sys
# req = input("Enter the requirement description\n")
# test = input("Enter the test description\n")
def read_in():
lines = sys.stdin.readlines()
# Since our input would only be having one line, parse our JSON data from that
return json.loads(lines[0])
data = []
lines = read_in()
for item in lines:
data.append(item)
# print(item)
# print(data[0], data[1])
req = data[0]
test = data[1]
tokenized_doc = []
doc = []
doc.append(req.lower())
for d in doc:
tokenized_doc.append(word_tokenize(d.lower()))
tagged_data = [TaggedDocument(d, [i]) for i, d in enumerate(tokenized_doc)]
model = Doc2Vec(tagged_data, vector_size=5, window=2, min_count=1, workers=4, epochs = 100)
data = model.docvecs.vectors_docs
# opening the csv file in 'w+' mode
filereq = open('req_vecs.csv', 'w+', newline ='')
# writing the data into the file
with filereq:
write = csv.writer(filereq)
write.writerows(data)
df = pd.read_csv('req_vecs.csv',header = None)
col_size = df.shape[1]
df.columns = ['req'+str(x) for x in range(0,col_size)]
df.applymap('{:.6f}'.format)
df.to_csv("req_with_index.csv", header = True, index = True)
tokenized_doct = []
doct = []
doct.append(test.lower())
for dt in doct:
tokenized_doct.append(word_tokenize(dt.lower()))
tagged_datat = [TaggedDocument(dt, [i]) for i, dt in enumerate(tokenized_doct)]
modelt = Doc2Vec(tagged_datat, vector_size=5, window=2, min_count=1, workers=4, epochs = 100)
datat = modelt.docvecs.vectors_docs
# opening the csv file in 'w+' mode
filetest = open('test_vecs.csv', 'w+', newline ='')
# writing the data into the file
with filetest:
write = csv.writer(filetest)
write.writerows(datat)
df2 = pd.read_csv('test_vecs.csv',header = None)
test_col_size = df2.shape[1]
df2.columns = ['test'+str(x) for x in range(0,test_col_size)]
df2.to_csv("test_with_index.csv", header = True, index = True)
'''
with open('req_vecs.csv','rt') as f1, open('test_vecs.csv','rt') as f2, open('merged_vecs.csv','w') as w:
writer = csv.writer(w)
for r1,r2 in zip(csv.reader(f1),csv.reader(f2)):
writer.writerow(r1+r2)
#writer.writerow(r1+r2)
'''
a = pd.read_csv("req_with_index.csv")
s = pd.read_csv("test_with_index.csv")
reqntest = pd.merge(a, s)
reqntest.to_csv('merged_vecs.csv', index = None)
new_input = pd.read_csv("merged_vecs.csv")
new_input = new_input.drop(['Unnamed: 0'],axis=1)
new_input = MinMaxScaler().fit_transform(new_input)
pickle_model = pickle.load(open("pickle_model.pkl", "rb"))
Ypredict = pickle_model.predict(new_input)
# print(Ypredict)
Ypredict = Ypredict.astype('int64')
if Ypredict == 0:
print("There's strong link between the two.")
else:
print("The requirement and test cases don't seem to have link.")
|
16,192 | 952220cf8d2db249d2bdf47f8edd0b20d6ad078a | import pygame
from pygame import mixer
screen = pygame.display.set_mode((800, 600))
pygame.init()
class Enemy:
def covid(self):
pass
def alien(self):
pass
class Player:
def __init__(self):
self.x = 370
self.y = 480
def move(self, xChange):
self.x -= xChange
print(self.x)
class Spaceship(Player):
def show(self, x, y=480):
self.image = pygame.image.load('spaceship.png')
screen.blit(self.image, (x, y))
class Stats:
def score(self):
pass
def kills(self):
pass
class Game:
def __init__(self):
# Intialize the pygame
# Background
self.background = pygame.image.load('bk1.jpg')
# Sound
mixer.music.load("background.mp3")
mixer.music.play(-1)
# Caption and Icon
pygame.display.set_caption("COVID INVADER")
self.icon = pygame.image.load('ussf.png')
pygame.display.set_icon(self.icon)
self.playerX_change = 0
self.Player1 = Spaceship()
def start(self):
running = True
x_move = 0
while running:
screen.fill((0, 0, 0))
for event in pygame.event.get():
if event.type == pygame.QUIT:
running = False
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_LEFT:
x_move = -1
print("left")
if event.key == pygame.K_RIGHT:
x_move = 1
print("right")
if event.type == pygame.KEYUP:
if event.key == pygame.K_LEFT or event.key == pygame.K_RIGHT:
x_move = 0
self.Player1.x += x_move
if self.Player1.x >= 736:
self.Player1.x = 736
elif self.Player1.x <= 0:
self.Player1.x = 0
self.Player1.show(x=self.Player1.x)
pygame.display.update()
if __name__ == "__main__":
game = Game()
game.start() |
16,193 | 9e6ef5fe756b6b175210a038acf834fc7ea80021 | def vowel_filter(function):
def wrapper():
letters = function()
return [l for l in letters if l.lower() in 'aoueiy']
return wrapper
@vowel_filter
def get_letters():
return ["a", "b", "c", "d", "e"]
print(get_letters())
|
16,194 | 4482cc3459b799efd123387a894f03ac9a38541e | import torch
from torch.utils.data import Dataset
from torchvision import datasets as datasets
from Keys import Keys
class VOCDataset(Dataset):
def __init__(self, root, year, image_set, transform=None):
self.dataset = datasets.VOCDetection(root=root, year=year, image_set=image_set, download=True)
self.transform = transform
def __len__(self):
return len(self.dataset)
def __getitem__(self, idx):
if torch.is_tensor(idx):
idx = idx.tolist()
item = self.dataset[idx]
image = item[0]
annotations = item[1][Keys.ANNOTATION][Keys.OBJECT]
print("before ")
print(annotations)
new_annotations = []
# there can be multiple annotations
for annotation in annotations:
bndbox = annotation["bndbox"]
x_min = float(bndbox[Keys.X_MIN])
x_max = float(bndbox[Keys.X_MAX])
y_min = float(bndbox[Keys.Y_MIN])
y_max = float(bndbox[Keys.Y_MAX])
annotation["bndbox"] = {Keys.CENTER_X: (x_min + x_max) / 2,
Keys.CENTER_Y: (y_min + y_max) / 2,
Keys.WIDTH: x_max - x_min,
Keys.HEIGHT: y_max - y_min}
new_annotations.append(annotation)
print("before ")
print(new_annotations)
sample = {Keys.IMAGE: image, Keys.ANNOTATION: new_annotations}
if self.transform:
sample = self.transform(sample)
return sample |
16,195 | 71d74d7e960bfa363fc42405301214766affe924 | import errori
class Persona:
def __init__(self, nome, cognome, numeroTelefonico, email):
if nome:
self.__nome = nome
else:
raise errori.ValoreObbligatorio
if cognome:
self.__cognome = cognome
else:
raise errori.ValoreObbligatorio
if numeroTelefonico:
self.__numeroTelefonico = numeroTelefonico
else:
raise errori.ValoreObbligatorio
if email:
self.__email = email
else:
self.__email = "Non specificato"
def get_Email(self):
return self.__email
def set_Email(self, email):
self.__email = email
def get_Nome(self):
return self.__nome
def set_Nome(self, nome):
self.__nome = nome
def get_Cognome(self):
return self.__cognome
def set_Cognome(self, cognome):
self.__cognome = cognome
def get_numeroTelefonico(self):
return self.__numeroTelefonico
def set_numeroTelefonico(self, numeroTelefonico):
self.__numeroTelefonico = numeroTelefonico
def __str__(self):
return f"Nome: {self.__nome} ,Cognome: {self.__cognome} ,Numero di telefono: {self.__numeroTelefonico} ,Email: {self.__email} "
|
16,196 | b057986d9a1595c4e8f998e6550d591b4a81b5e0 | from spyne import Application, rpc, ServiceBase, Unicode, Iterable
from spyne.protocol.http import HttpRpc
from spyne.protocol.json import JsonDocument
from spyne.server.wsgi import WsgiApplication
import requests
import simplejson as json
import logging
import numpy
logging.basicConfig(level=logging.DEBUG)
class FilterCrimeReportService(ServiceBase):
@rpc(Unicode, Unicode, Unicode, _returns=Iterable(Unicode))
def checkcrime(ctx, lat, lon, radius):
url = "https://api.spotcrime.com/crimes.json?lat=%s&lon=%s&radius=%s&key=." % (lat, lon, radius)
get_response = requests.get(url)
json_obj = json.loads(get_response.content)
crime_count = 0;
street_names = [];
street_crimes = [];
crime_type = {}
final_streets = []
event_time = [0,0,0,0,0,0,0,0]
if get_response.status_code == 200:
if len(json_obj['crimes']) != 0:
for crime in json_obj['crimes']:
# Incrementing the crime count
crime_count += 1
# Checking the crime type and adding or incrementing based on IF
if crime['type'] not in crime_type:
crime_type[crime['type']] = 1
else:
crime_type[crime['type']] += 1
# Checking the address and adding or incrementing the streets & crimes in streets
if 'OF' in crime['address']:
street_name = crime['address'].split("OF ", 1)[1]
if street_name not in street_names:
street_names.append(street_name)
street_crimes.append(1)
else:
i = street_names.index(street_name)
street_crimes[i] += 1
if 'BLOCK BLOCK' in crime['address']:
street_name = crime['address'].split("BLOCK BLOCK ", 1)[1]
if street_name not in street_names:
street_names.append(street_name)
street_crimes.append(1)
else:
i = street_names.index(street_name)
street_crimes[i] += 1
if '&' in crime['address']:
street_name1 = crime['address'].split("& ", 1)[1]
street_name2 = crime['address'][:crime['address'].rfind(' &')]
if street_name1 not in street_names:
street_names.append(street_name1)
street_crimes.append(1)
else:
i = street_names.index(street_name1)
street_crimes[i] += 1
if street_name2 not in street_names:
street_names.append(street_name2)
street_crimes.append(1)
else:
i = street_names.index(street_name2)
street_crimes[i] += 1
# Checking the time and filtering the crimes based on given requirement
if 'AM' in crime['date']:
crime_hour = crime['date'].split(" ",1)[1]
crime_hour = crime_hour[:crime_hour.rfind(':')]
crime_minute = crime['date'].split(":",1)[1]
crime_minute = crime_minute[:crime_minute.rfind(' AM')]
c_hour = int(crime_hour)
c_min = int(crime_minute)
if c_hour < 4 or c_hour == 12:
if c_hour == 3 and c_min != 0:
event_time[1] += 1
else:
if c_hour == 12 and c_min == 0:
event_time[7] += 1
else:
event_time[0] += 1
if 3 < c_hour < 7:
if c_hour == 6 and c_min != 0:
event_time[2] += 1
else:
event_time[1] += 1
if 6 < c_hour < 10:
if c_hour == 9 and c_min != 0:
event_time[3] += 1
else:
event_time[2] += 1
if 9 < c_hour < 12:
event_time[3] += 1
else:
crime_hour = crime['date'].split(" ", 1)[1]
crime_hour = crime_hour[:crime_hour.rfind(':')]
crime_minute = crime['date'].split(":", 1)[1]
crime_minute = crime_minute[:crime_minute.rfind(' PM')]
c_hour = int(crime_hour)
c_min = int(crime_minute)
if c_hour < 4 or c_hour == 12:
if c_hour == 3 and c_min != 0:
event_time[5] += 1
else:
event_time[4] += 1
if 3 < c_hour < 7:
if c_hour == 6 and c_min != 0:
event_time[6] += 1
else:
event_time[5] += 1
if 6 < c_hour < 10:
if c_hour == 9 and c_min != 0:
event_time[7] += 1
else:
event_time[6] += 1
if 9 < c_hour < 12:
event_time[7] += 1
# Finding the top 3 Dangerous streets
arr_street = numpy.array(street_crimes)
temp_street = numpy.argpartition(-arr_street, 3)
top3_index = temp_street[:3]
for j in top3_index:
final_streets.append(street_names[j])
# Making json for event time count
event_time_count = {"12:01am-3am" : event_time[0], "3:01am-6am" : event_time[1], "6:01am-9am" : event_time[2],
"9:01am-12noon" : event_time[3], "12:01pm-3pm" : event_time[4], "3:01pm-6pm" : event_time[5],
"6:01pm-9pm": event_time[6], "9:01pm - 12midnight" : event_time[7]}
yield json.loads(json.dumps({ 'total_crime' : crime_count, 'the_most_dangerous_streets' : final_streets, 'crime_type_count' : crime_type, 'event_time_count' : event_time_count}))
else:
yield json.loads({'Message' : 'No Crimes reported at these area co-ordinates'})
else:
yield json.loads({'Message' : 'Bad co-ordinates, Try Again'})
app = Application([FilterCrimeReportService], tns='com.sjsu.cmpe273.hw', in_protocol=HttpRpc(validator='soft'), out_protocol=JsonDocument())
if __name__ == '__main__':
from wsgiref.simple_server import make_server
wsgi_app = WsgiApplication(app)
server = make_server('0.0.0.0', 8000, wsgi_app)
server.serve_forever() |
16,197 | d8bf71b31b37d15c6c4ee870d395a52f28bfab5e | iChange = 1
bRemove = False
bPython = True
bHideInactive = True
Activities = ["AWAKE", "HOLD", "SLEEP", "HEAL", "SENTRY", "INTERCEPT", "MISSION", "PATROL", "PLUNDER"]
iSetValue = 3
iWarValue = 0
iChangeType = 2
iPlotType = 2
iOwnerType = 0
bApplyAll = False
iSelectedPlayer = 0
iSelectedCiv = -1
iSelectedClass = -1 |
16,198 | e804a0af6b554c15fdd13dfc0579ad9bbe7d07a4 | """Submodule implementing baseline and optimal models."""
from .deep_enhancer import deep_enhancers
from .fixed_cnn import fixed_cnn
from .fixed_ffnn import fixed_ffnn
__all__ = [
"deep_enhancers",
"fixed_cnn",
"fixed_ffnn"
]
|
16,199 | ea932fb0e7a5956401f8514d00325147c0966a4c | # Digital Signal Processing by Paolo Prandoni and Martin Vetterli
# Coursera/EPFL
# Nabin Sharma
# Oct 09, 2013
import pylab
import scipy.io.wavfile
import time
def ks_loop(x, alpha=1, D=1):
# If x is 1d array, convert to list.
if (not isinstance(x, list)):
x = x.tolist()
# Length of the output signal must be larger than the length of the
# input signal that is, D must be larger than 1.
D = max(D, 1)
# Number of input samples.
M = len(x)
# Number of output samples.
size_y = D * M
# Initialize with random input x.
y = list(x)
for i in range(M, size_y):
y.append(alpha * y[i - M])
return pylab.array(y)
def ks(x, alpha=1, D=1):
# If x is list, convert to 1d array.
if (isinstance(x, list)):
x = pylab.array(x)
# Length of the output signal must be larger than the length of the
# input signal that is, D must be larger than 1.
D = max(D, 1)
# Number of input samples.
M = len(x)
# Create a vector of the powers of alpha, [alpha^0 alpha^1 ....].
size_alphaVector = D;
alphaVector = (alpha * pylab.ones(
size_alphaVector)) ** range(size_alphaVector)
# Create a matrix with M columns, each being the vector of the powers
# of alpha.
alphaMatrix = pylab.tile(alphaVector, (M, 1)).T;
# Create a matrix with D rows filled by the input signal x.
xMatrix = pylab.tile(x, (D, 1));
# Multipliy the two
yMatrix = alphaMatrix * xMatrix
# Read out the output row by row
y = yMatrix.flatten(0)
return y
def generate_cord():
"""
TODO: Pass the parameters as input arguments.
Parameters:
- Fs : sampling frequency
- F0 : frequency of the notes forming chord
- gain : gains of individual notes in the chord
- duration : duration of the chord in second
- alpha : attenuation in KS algorithm
"""
Fs = 48000
# D2, D3, F3, G3, F4, A4, C5, G5
F0 = 440 * pylab.array(
[2**-(31.0/12), 2**-(19.0/12), 2**-(16.0/12), 2**(-14.0/12),
2**-(4.0/12), 1.0, 2**(3.0/12), 2**(10.0/12)])
gain = [1.2, 3.0, 1.0, 2.2, 1.0, 1.0, 1.0, 3.5]
duration = 4.0
alpha = 0.9785
# Number of samples in the chord.
nbsample_chord = Fs * duration
# This is used to correct alpha later, so that all the notes
# decay together (with the same decay rate).
first_duration = pylab.ceil(nbsample_chord / pylab.round_(Fs/F0[0]))
# Initialization.
chord = pylab.zeros(nbsample_chord)
for i, f in enumerate(F0):
print("Working on %g / %g" % (i+1, len(F0)))
# Get M and duration parameter.
current_M = pylab.round_(Fs/f)
current_duration = pylab.ceil(nbsample_chord / current_M)
# Correct current alpha so that all the notes decay together
# (with the same decay rate)
current_alpha = alpha ** (first_duration / current_duration)
# Let Paul's high D on the bass ring a bit longer.
if i == 1:
current_alpha = current_alpha ** 0.8
# Generate input and output of KS algorithm.
x = pylab.rand(current_M)
y = ks(x, current_alpha, int(current_duration))
y = y[:int(nbsample_chord)]
# Construct the chord by adding the generated note (with the
# appropriate gain).
chord = chord + gain[i] * y
return Fs, duration, chord
def main():
x = pylab.randn(100)
t0 = time.clock()
y1 = ks_loop(x, 0.9, 10)
t_loop = time.clock() - t0
t0 = time.clock()
y2 = ks(x, 0.9, 10)
t_matrix = time.clock() - t0
print("Loop method took %g seconds." % t_loop)
print("Matrix method took %g seconds." % t_matrix)
# Make sure y1 and y2 are same within very small numeric
# error.
assert(pylab.sum(pylab.absolute(y1 - y2)) < 1e-10)
# Plot x and y
pylab.figure()
pylab.subplot(211)
pylab.stem(x)
pylab.ylabel('x')
pylab.subplot(212)
pylab.stem(y2)
pylab.ylabel('y')
pylab.xlabel('samples')
print("Generating the opening chord of Hard day's night by The Beatles ...")
Fs, T, chord = generate_cord()
pylab.figure()
pylab.plot(pylab.arange(0.0, T, 1.0/Fs), chord)
pylab.xlabel('time (sec)')
pylab.title('First Chord of Hard Days Night')
print("Writing the chord to chord.wav ...")
C = max(pylab.absolute(chord))
scipy.io.wavfile.write("chord.wav", Fs,
pylab.int16((2**15 - 1) * chord / C))
print("Done.")
pylab.show()
if __name__ == '__main__':
main()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.