index int64 0 1,000k | blob_id stringlengths 40 40 | code stringlengths 7 10.4M |
|---|---|---|
10,700 | f704aa792302a0aee73e305c6ea66605d206dbda | from socket import socket, AF_INET, SOCK_STREAM
def echo_client(client_sock, addr):
print('Got connection from', addr)
print('Socket fd:', client_sock.fileno())
print(type(client_sock))
# Make text-mode file wrappers for read/write
client_in = client_sock.makefile('r', encoding='latin-1')
client_out = client_sock.makefile('w', encoding='latin-1')
# This method doesn't work under Windows because the number returned by socket.fileno
# is not a valid file descriptor (see http://docs.python.org/3/library/socket.html#socket.socket.fileno)
#client_in = open(client_sock.fileno(), 'rt', encoding='latin-1', closefd=False)
#client_out = open(client_sock.fileno(), 'wt', encoding='latin-1', closefd=False)
# Echo lines back using file io
for line in client_in:
client_out.write(line)
client_out.flush()
client_sock.close()
def echo_server(address):
with socket(AF_INET, SOCK_STREAM) as sock:
sock.bind(address)
sock.listen(1)
while True:
client, addr = sock.accept()
echo_client(client, addr)
def main():
echo_server(('localhost', 8000))
if __name__ == '__main__':
main() |
10,701 | f9c55bc797b945efc9921daf8c422ad531799893 | #!/usr/bin/env python
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import ast
import pathlib
import re
import sys
import astor as astor
from rich.console import Console
console = Console(color_system="standard", width=200)
LOGGIN_MATCHER = re.compile(r'^log.?[a-z]*\.[a-z]*\(f.*["\']')
SELF_LOG_MATCHER = re.compile(r'^self\.log\.[a-z]*\(f.*["\']')
class LogFinder(astor.TreeWalk):
module_printed: bool = False
name: str = ""
error_count = 0
def pre_Call(self):
if isinstance(self.cur_node.func, ast.Attribute) and (
isinstance(self.cur_node.func.value, ast.Name)
and (
self.cur_node.func.value.id == "logger"
or self.cur_node.func.value.id == "logging"
or self.cur_node.func.value.id == "log"
)
or (self.cur_node.func.attr in ["log", "debug", "warning", "info", "error", "critical"])
):
line = astor.to_source(self.cur_node, add_line_information=True)
if LOGGIN_MATCHER.match(line) or SELF_LOG_MATCHER.match(line):
if not self.module_printed:
self.module_printed = True
console.print(f"[red]Error:[/] {self.name}")
console.print(f"{self.name}:{self.cur_node.lineno} -> {line}", end="")
self.error_count += 1
def check_logging() -> int:
total_error_count = 0
for file_name in sys.argv[1:]:
file_path = pathlib.Path(file_name)
module = ast.parse(file_path.read_text("utf-8"), str(file_path))
finder = LogFinder()
finder.name = file_name
finder.walk(node=module)
total_error_count += finder.error_count
if total_error_count > 0:
console.print(
"\n[yellow]Please convert all the logging instructions above "
"to use '%-formatting' rather than f-strings."
)
console.print("Why?: https://docs.python.org/3/howto/logging.html#logging-variable-data\n")
return 1 if total_error_count else 0
if __name__ == "__main__":
sys.exit(check_logging())
|
10,702 | 9090e7ff7afc0c76593f160efac1f72b4422cae4 | #!/usr/bin/env python3
import datetime
import speedtest
from peewee import IntegrityError
from models import Result, Server
st = speedtest.Speedtest()
results = []
for i in range(5):
st.get_best_server()
threads = 1
st.download(threads=threads)
st.upload(threads=threads)
result = st.results.dict()
result['timestamp'] = datetime.datetime.now()
results.append(result)
for result in results:
server_details = result['server']
server_id = server_details['id']
server_query = Server.select().where(Server.st_id == server_id)
if len(server_query) == 0:
server_params = {'st_id': server_id,
'name': server_details['name'],
'sponsor': server_details.get('sponsor'),
'url': server_details.get('url'),
'url1': server_details.get('url2'),
'cc': server_details.get('cc'),
'host': server_details.get('host'),
'lat': server_details.get('lat'),
'lon': server_details.get('lon')}
server = Server.create(**server_params)
server.save()
else:
server = server_query[0]
r_params = {'server': server,
'timestamp': result['timestamp'],
'download_spd': result.get('download'),
'upload_spd': result.get('upload'),
'bytes_sent': result.get('bytes_sent'),
'bytes_rec': result.get('bytes_received'),
'latency': result['ping'],
'client': result['client'].get('ip')}
r = Result(**r_params)
r.save()
|
10,703 | db9a3b79b4054bd94eafdc4f36cb793cf1ebaa87 | #!/usr/bin/env python
import time
from optparse import OptionParser
parser = OptionParser()
parser.add_option('--outname', metavar='F', type='string', action='store',
default='mujets',
dest='outname',
help='Output name for png and pdf files')
parser.add_option('--hist1', metavar='F', type='string', action='store',
default='ptRecoTop',
dest='hist1',
help='Histogram2 is subtracted from histogram1')
parser.add_option('--hist2', metavar='F', type='string', action='store',
default= None ,
dest= None ,
help='Histogram2 to be subtracted form Histogram1')
parser.add_option('--NQCD', metavar='F', type='float', action='store',
default=0.0 ,
dest='NQCD',
help='QCD Normalization')
parser.add_option('--ignoreData', metavar='F', action='store_true',
default=False,
dest='ignoreData',
help='Ignore plotting data')
parser.add_option('--drawLegend', metavar='F', action='store_true',
default=True,
dest='drawLegend',
help='Draw a legend')
parser.add_option('--rebin', metavar='R', type='int', action='store',
default=None,
dest='rebin',
help='Rebin histogram?')
parser.add_option('--newYlabel', metavar='F', type='string', action='store',
default= None ,
dest= None ,
help='Fixed y-label is needed if rebinning the histogram')
parser.add_option('--plotNom', metavar='F', action='store_true',
default=False,
dest='plotNom',
help='Only plot the Nominal')
(options, args) = parser.parse_args()
argv = []
from ROOT import gRandom, TH1, TH1D, cout, TFile, gSystem, TCanvas, TPad, gROOT, gStyle, THStack, TLegend, TLatex, TColor
gROOT.Macro("rootlogon.C")
gStyle.SetOptTitle(0);
gStyle.SetOptStat(0);
gStyle.SetOptFit(0);
gStyle.SetOptStat(000000)
gStyle.SetTitleFont(43)
#gStyle.SetTitleFontSize(0.05)
gStyle.SetTitleFont(43, "XYZ")
gStyle.SetTitleSize(30, "XYZ")
gStyle.SetTitleOffset(2.0, "X")
gStyle.SetTitleOffset(1.25, "Y")
gStyle.SetLabelFont(43, "XYZ")
gStyle.SetLabelSize(20, "XYZ")
# Performance numbers
lum = 19.7 # fb-1
SF_t = 1.0
#SF_t = 0.94
# Cross sections (in fb) and the number of MC events
sigma_ttbar_NNLO = [ # fb, from http://arxiv.org/pdf/1303.6254.pdf
245.8 * 1000., # nom
237.4 * 1000., # scaledown
252.0 * 1000., # scaleup
239.4 * 1000., # pdfdown
252.0 * 1000., # pdfup
]
sigma_T_t_NNLO = 56.4 * 1000. #
sigma_Tbar_t_NNLO = 30.7 * 1000. # All single-top approx NNLO cross sections from
sigma_T_s_NNLO = 3.79 * 1000. # https://twiki.cern.ch/twiki/bin/viewauth/CMS/SingleTopSigma8TeV
sigma_Tbar_s_NNLO = 1.76 * 1000. #
sigma_T_tW_NNLO = 11.1 * 1000. #
sigma_Tbar_tW_NNLO = 11.1 * 1000. #
sigma_WJets_NNLO = 36703.2 * 1000. # from https://twiki.cern.ch/twiki/bin/viewauth/CMS/StandardModelCrossSectionsat8TeV
# MC event counts from B2G twiki here :
# https://twiki.cern.ch/twiki/bin/view/CMS/B2GTopLikeBSM53X#Backgrounds
Nmc_ttbar = 21675970
Nmc_T_t = 3758227
Nmc_Tbar_t = 1935072
Nmc_T_s = 259961
Nmc_Tbar_s = 139974
Nmc_T_tW = 497658
Nmc_Tbar_tW = 493460
Nmc_WJets = 57709905
Nmc_TT_Mtt_700_1000 = 3082812
Nmc_TT_Mtt_1000_Inf = 1249111
Nmc_ttbar_scaledown = 14998606
Nmc_ttbar_scaleup = 14998720
Nmc_TT_Mtt_700_1000_scaledown = 2170074
Nmc_TT_Mtt_700_1000_scaleup = 2243672
Nmc_TT_Mtt_1000_Inf_scaledown = 1308090
Nmc_TT_Mtt_1000_Inf_scaleup = 1241650
# QCD Normalization from MET fits
NQCD = options.NQCD
#
# NEW ttbar filter efficiencies
# These were determined "by eye" to make the generated mttbar spectrum smooth in the "makeMttGenPlots.py" script
# nom scaledown scaleup
e_TT_Mtt_700_1000 = [0.074, 0.081, 0.074]
e_TT_Mtt_1000_Inf = [0.015, 0.016, 0.014]
e_TT_Mtt_0_700 = [1.0 , 1.0, 1.0 ] # No efficiency here, we applied the cut at gen level
# ttbar filter efficiencies
# nom scaledown scaleup
#e_TT_Mtt_700_1000 = [0.074, 0.078, 0.069]
#e_TT_Mtt_1000_Inf = [0.014, 0.016, 0.013]
#e_TT_Mtt_0_700 = [1.0 , 1.0, 1.0 ] # No efficiency here, we applied the cut at gen level
#
names = [ 'DATA', 'TTbar', 'TTbarOther', 'WJets', 'SingleTop', 'QCD_SingleMu' ]
plots = [ 'jec__down' , 'jec__up' , 'jer__down' , 'jer__up' , 'pdf__down' , 'pdf__up' , 'nom' , 'scale__down' , 'scale__up' , 'toptag__down' , 'toptag__up']
canvs = []
histsData = []
hists = []
hMeas_TT_Mtt_less_700 = []
hMeas_TT_Mtt_700_1000 = []
hMeas_TT_Mtt_1000_Inf = []
hMeas_TT_nonSemiLep_Mtt_less_700 = []
hMeas_TT_nonSemiLep_Mtt_700_1000 = []
hMeas_TT_nonSemiLep_Mtt_1000_Inf = []
hMeas_T_t = []
hMeas_Tbar_t = []
hMeas_T_s = []
hMeas_Tbar_s = []
hMeas_T_tW = []
hMeas_Tbar_tW = []
hMeas_WJets = []
hMeas_qcd = []
hMeas_TT_Mtt = []
hMeas_TT_nonSemiLep_Mtt = []
hMeas_SingleTop = []
# Open the output file
if options.hist2 is None:
fout = TFile("normalized_" + options.outname + '_' + options.hist1 + ".root" , "RECREATE")
elif options.hist2 is not None:
fout = TFile("normalized_" + options.outname + '_' + options.hist2 + '_subtracted_from_' + options.hist1 + ".root" , "RECREATE")
# ==============================================================================
# Example Unfolding
# ==============================================================================
if not options.ignoreData :
fdata = TFile("histfiles/SingleMu_iheartNY_V1_mu_Run2012_nom.root")
fQCD_SingleMu = TFile("histfiles/SingleMu_iheartNY_V1_mu_Run2012_qcd.root")
# single top
fT_t_nom = TFile("histfiles/T_t-channel_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_nom.root")
fT_t_qcd = TFile("histfiles/T_t-channel_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_qcd.root")
fT_t_jecdown = TFile("histfiles/T_t-channel_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_jecdn.root")
fT_t_jecup = TFile("histfiles/T_t-channel_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_jecup.root")
fT_t_jerdown = TFile("histfiles/T_t-channel_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_jerdn.root")
fT_t_jerup = TFile("histfiles/T_t-channel_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_jerup.root")
fT_t_topdown = TFile("histfiles/T_t-channel_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_toptagdn.root")
fT_t_topup = TFile("histfiles/T_t-channel_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_toptagup.root")
fT_t_btagdown = TFile("histfiles/T_t-channel_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_btagdn.root")
fT_t_btagup = TFile("histfiles/T_t-channel_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_btagup.root")
fTbar_t_nom = TFile("histfiles/Tbar_t-channel_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_nom.root")
fTbar_t_qcd = TFile("histfiles/Tbar_t-channel_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_qcd.root")
fTbar_t_jecdown = TFile("histfiles/Tbar_t-channel_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_jecdn.root")
fTbar_t_jecup = TFile("histfiles/Tbar_t-channel_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_jecup.root")
fTbar_t_jerdown = TFile("histfiles/Tbar_t-channel_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_jerdn.root")
fTbar_t_jerup = TFile("histfiles/Tbar_t-channel_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_jerup.root")
fTbar_t_topdown = TFile("histfiles/Tbar_t-channel_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_toptagdn.root")
fTbar_t_topup = TFile("histfiles/Tbar_t-channel_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_toptagup.root")
fTbar_t_btagdown = TFile("histfiles/Tbar_t-channel_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_btagdn.root")
fTbar_t_btagup = TFile("histfiles/Tbar_t-channel_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_btagup.root")
fT_s_nom = TFile("histfiles/T_s-channel_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_nom.root")
fT_s_qcd = TFile("histfiles/T_s-channel_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_qcd.root")
fT_s_jecdown = TFile("histfiles/T_s-channel_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_jecdn.root")
fT_s_jecup = TFile("histfiles/T_s-channel_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_jecup.root")
fT_s_jerdown = TFile("histfiles/T_s-channel_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_jerdn.root")
fT_s_jerup = TFile("histfiles/T_s-channel_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_jerup.root")
fT_s_topdown = TFile("histfiles/T_s-channel_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_toptagdn.root")
fT_s_topup = TFile("histfiles/T_s-channel_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_toptagup.root")
fT_s_btagdown = TFile("histfiles/T_s-channel_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_btagdn.root")
fT_s_btagup = TFile("histfiles/T_s-channel_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_btagup.root")
fTbar_s_nom = TFile("histfiles/Tbar_s-channel_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_nom.root")
fTbar_s_qcd = TFile("histfiles/Tbar_s-channel_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_qcd.root")
fTbar_s_jecdown = TFile("histfiles/Tbar_s-channel_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_jecdn.root")
fTbar_s_jecup = TFile("histfiles/Tbar_s-channel_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_jecup.root")
fTbar_s_jerdown = TFile("histfiles/Tbar_s-channel_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_jerdn.root")
fTbar_s_jerup = TFile("histfiles/Tbar_s-channel_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_jerup.root")
fTbar_s_topdown = TFile("histfiles/Tbar_s-channel_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_toptagdn.root")
fTbar_s_topup = TFile("histfiles/Tbar_s-channel_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_toptagup.root")
fTbar_s_btagdown = TFile("histfiles/Tbar_s-channel_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_btagdn.root")
fTbar_s_btagup = TFile("histfiles/Tbar_s-channel_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_btagup.root")
fT_tW_nom = TFile("histfiles/T_tW-channel-DR_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_nom.root")
fT_tW_qcd = TFile("histfiles/T_tW-channel-DR_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_qcd.root")
fT_tW_jecdown = TFile("histfiles/T_tW-channel-DR_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_jecdn.root")
fT_tW_jecup = TFile("histfiles/T_tW-channel-DR_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_jecup.root")
fT_tW_jerdown = TFile("histfiles/T_tW-channel-DR_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_jerdn.root")
fT_tW_jerup = TFile("histfiles/T_tW-channel-DR_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_jerup.root")
fT_tW_topdown = TFile("histfiles/T_tW-channel-DR_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_toptagdn.root")
fT_tW_topup = TFile("histfiles/T_tW-channel-DR_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_toptagup.root")
fT_tW_btagdown = TFile("histfiles/T_tW-channel-DR_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_btagdn.root")
fT_tW_btagup = TFile("histfiles/T_tW-channel-DR_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_btagup.root")
fTbar_tW_nom = TFile("histfiles/Tbar_tW-channel-DR_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_nom.root")
fTbar_tW_qcd = TFile("histfiles/Tbar_tW-channel-DR_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_qcd.root")
fTbar_tW_jecdown = TFile("histfiles/Tbar_tW-channel-DR_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_jecdn.root")
fTbar_tW_jecup = TFile("histfiles/Tbar_tW-channel-DR_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_jecup.root")
fTbar_tW_jerdown = TFile("histfiles/Tbar_tW-channel-DR_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_jerdn.root")
fTbar_tW_jerup = TFile("histfiles/Tbar_tW-channel-DR_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_jerup.root")
fTbar_tW_topdown = TFile("histfiles/Tbar_tW-channel-DR_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_toptagdn.root")
fTbar_tW_topup = TFile("histfiles/Tbar_tW-channel-DR_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_toptagup.root")
fTbar_tW_btagdown = TFile("histfiles/Tbar_tW-channel-DR_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_btagdn.root")
fTbar_tW_btagup = TFile("histfiles/Tbar_tW-channel-DR_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_btagup.root")
# W+jets
fWJets_nom = TFile("histfiles/WJetsToLNu_TuneZ2Star_8TeV-madgraph-tarball_iheartNY_V1_mu_nom.root")
fWJets_qcd = TFile("histfiles/WJetsToLNu_TuneZ2Star_8TeV-madgraph-tarball_iheartNY_V1_mu_qcd.root")
fWJets_jecdown = TFile("histfiles/WJetsToLNu_TuneZ2Star_8TeV-madgraph-tarball_iheartNY_V1_mu_jecdn.root")
fWJets_jecup = TFile("histfiles/WJetsToLNu_TuneZ2Star_8TeV-madgraph-tarball_iheartNY_V1_mu_jecup.root")
fWJets_jerdown = TFile("histfiles/WJetsToLNu_TuneZ2Star_8TeV-madgraph-tarball_iheartNY_V1_mu_jerdn.root")
fWJets_jerup = TFile("histfiles/WJetsToLNu_TuneZ2Star_8TeV-madgraph-tarball_iheartNY_V1_mu_jerup.root")
fWJets_topdown = TFile("histfiles/WJetsToLNu_TuneZ2Star_8TeV-madgraph-tarball_iheartNY_V1_mu_toptagdn.root")
fWJets_topup = TFile("histfiles/WJetsToLNu_TuneZ2Star_8TeV-madgraph-tarball_iheartNY_V1_mu_toptagup.root")
fWJets_btagdown = TFile("histfiles/WJetsToLNu_TuneZ2Star_8TeV-madgraph-tarball_iheartNY_V1_mu_btagdn.root")
fWJets_btagup = TFile("histfiles/WJetsToLNu_TuneZ2Star_8TeV-madgraph-tarball_iheartNY_V1_mu_btagup.root")
# ttbar
fTT_Mtt_less_700_nom = TFile("histfiles/TT_max700_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_nom.root")
fTT_Mtt_less_700_qcd = TFile("histfiles/TT_max700_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_qcd.root")
fTT_Mtt_less_700_jecdown = TFile("histfiles/TT_max700_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_jecdn.root")
fTT_Mtt_less_700_jecup = TFile("histfiles/TT_max700_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_jecup.root")
fTT_Mtt_less_700_jerdown = TFile("histfiles/TT_max700_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_jerdn.root")
fTT_Mtt_less_700_jerup = TFile("histfiles/TT_max700_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_jerup.root")
fTT_Mtt_less_700_pdfdown = TFile("histfiles/TT_max700_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_pdfdn.root")
fTT_Mtt_less_700_pdfup = TFile("histfiles/TT_max700_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_pdfup.root")
fTT_Mtt_less_700_scaledown = TFile("histfiles/TT_max700_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_scaledown_nom.root")
fTT_Mtt_less_700_scaleup = TFile("histfiles/TT_max700_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_scaleup_nom.root")
fTT_Mtt_less_700_topdown = TFile("histfiles/TT_max700_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_toptagdn.root")
fTT_Mtt_less_700_topup = TFile("histfiles/TT_max700_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_toptagup.root")
fTT_Mtt_less_700_btagdown = TFile("histfiles/TT_max700_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_btagdn.root")
fTT_Mtt_less_700_btagup = TFile("histfiles/TT_max700_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_btagup.root")
fTT_Mtt_700_1000_nom = TFile("histfiles/TT_Mtt-700to1000_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_nom.root")
fTT_Mtt_700_1000_qcd = TFile("histfiles/TT_Mtt-700to1000_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_qcd.root")
fTT_Mtt_700_1000_jecdown = TFile("histfiles/TT_Mtt-700to1000_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_jecdn.root")
fTT_Mtt_700_1000_jecup = TFile("histfiles/TT_Mtt-700to1000_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_jecup.root")
fTT_Mtt_700_1000_jerdown = TFile("histfiles/TT_Mtt-700to1000_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_jerdn.root")
fTT_Mtt_700_1000_jerup = TFile("histfiles/TT_Mtt-700to1000_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_jerup.root")
fTT_Mtt_700_1000_pdfdown = TFile("histfiles/TT_Mtt-700to1000_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_pdfdn.root")
fTT_Mtt_700_1000_pdfup = TFile("histfiles/TT_Mtt-700to1000_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_pdfup.root")
fTT_Mtt_700_1000_scaledown = TFile("histfiles/TT_Mtt-700to1000_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_scaledown_nom.root")
fTT_Mtt_700_1000_scaleup = TFile("histfiles/TT_Mtt-700to1000_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_scaleup_nom.root")
fTT_Mtt_700_1000_topdown = TFile("histfiles/TT_Mtt-700to1000_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_toptagdn.root")
fTT_Mtt_700_1000_topup = TFile("histfiles/TT_Mtt-700to1000_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_toptagup.root")
fTT_Mtt_700_1000_btagdown = TFile("histfiles/TT_Mtt-700to1000_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_btagdn.root")
fTT_Mtt_700_1000_btagup = TFile("histfiles/TT_Mtt-700to1000_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_btagup.root")
fTT_Mtt_1000_Inf_nom = TFile("histfiles/TT_Mtt-1000toInf_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_nom.root")
fTT_Mtt_1000_Inf_qcd = TFile("histfiles/TT_Mtt-1000toInf_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_qcd.root")
fTT_Mtt_1000_Inf_jecdown = TFile("histfiles/TT_Mtt-1000toInf_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_jecdn.root")
fTT_Mtt_1000_Inf_jecup = TFile("histfiles/TT_Mtt-1000toInf_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_jecup.root")
fTT_Mtt_1000_Inf_jerdown = TFile("histfiles/TT_Mtt-1000toInf_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_jerdn.root")
fTT_Mtt_1000_Inf_jerup = TFile("histfiles/TT_Mtt-1000toInf_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_jerup.root")
fTT_Mtt_1000_Inf_pdfdown = TFile("histfiles/TT_Mtt-1000toInf_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_pdfdn.root")
fTT_Mtt_1000_Inf_pdfup = TFile("histfiles/TT_Mtt-1000toInf_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_pdfup.root")
fTT_Mtt_1000_Inf_scaledown = TFile("histfiles/TT_Mtt-1000toInf_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_scaledown_nom.root")
fTT_Mtt_1000_Inf_scaleup = TFile("histfiles/TT_Mtt-1000toInf_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_scaleup_nom.root")
fTT_Mtt_1000_Inf_topdown = TFile("histfiles/TT_Mtt-1000toInf_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_toptagdn.root")
fTT_Mtt_1000_Inf_topup = TFile("histfiles/TT_Mtt-1000toInf_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_toptagup.root")
fTT_Mtt_1000_Inf_btagdown = TFile("histfiles/TT_Mtt-1000toInf_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_btagdn.root")
fTT_Mtt_1000_Inf_btagup = TFile("histfiles/TT_Mtt-1000toInf_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_btagup.root")
# non-semileptonic ttbar
fTT_nonSemiLep_Mtt_less_700_nom = TFile("histfiles/TT_nonSemiLep_max700_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_nom.root")
fTT_nonSemiLep_Mtt_less_700_qcd = TFile("histfiles/TT_nonSemiLep_max700_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_qcd.root")
fTT_nonSemiLep_Mtt_less_700_jecdown = TFile("histfiles/TT_nonSemiLep_max700_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_jecdn.root")
fTT_nonSemiLep_Mtt_less_700_jecup = TFile("histfiles/TT_nonSemiLep_max700_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_jecup.root")
fTT_nonSemiLep_Mtt_less_700_jerdown = TFile("histfiles/TT_nonSemiLep_max700_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_jerdn.root")
fTT_nonSemiLep_Mtt_less_700_jerup = TFile("histfiles/TT_nonSemiLep_max700_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_jerup.root")
fTT_nonSemiLep_Mtt_less_700_pdfdown = TFile("histfiles/TT_nonSemiLep_max700_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_pdfdn.root")
fTT_nonSemiLep_Mtt_less_700_pdfup = TFile("histfiles/TT_nonSemiLep_max700_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_pdfup.root")
fTT_nonSemiLep_Mtt_less_700_scaledown = TFile("histfiles/TT_nonSemiLep_max700_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_scaledown_nom.root")
fTT_nonSemiLep_Mtt_less_700_scaleup = TFile("histfiles/TT_nonSemiLep_max700_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_scaleup_nom.root")
fTT_nonSemiLep_Mtt_less_700_topdown = TFile("histfiles/TT_nonSemiLep_max700_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_toptagdn.root")
fTT_nonSemiLep_Mtt_less_700_topup = TFile("histfiles/TT_nonSemiLep_max700_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_toptagup.root")
fTT_nonSemiLep_Mtt_less_700_btagdown = TFile("histfiles/TT_nonSemiLep_max700_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_btagdn.root")
fTT_nonSemiLep_Mtt_less_700_btagup = TFile("histfiles/TT_nonSemiLep_max700_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_btagup.root")
fTT_nonSemiLep_Mtt_700_1000_nom = TFile("histfiles/TT_nonSemiLep_Mtt-700to1000_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_nom.root")
fTT_nonSemiLep_Mtt_700_1000_qcd = TFile("histfiles/TT_nonSemiLep_Mtt-700to1000_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_qcd.root")
fTT_nonSemiLep_Mtt_700_1000_jecdown = TFile("histfiles/TT_nonSemiLep_Mtt-700to1000_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_jecdn.root")
fTT_nonSemiLep_Mtt_700_1000_jecup = TFile("histfiles/TT_nonSemiLep_Mtt-700to1000_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_jecup.root")
fTT_nonSemiLep_Mtt_700_1000_jerdown = TFile("histfiles/TT_nonSemiLep_Mtt-700to1000_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_jerdn.root")
fTT_nonSemiLep_Mtt_700_1000_jerup = TFile("histfiles/TT_nonSemiLep_Mtt-700to1000_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_jerup.root")
fTT_nonSemiLep_Mtt_700_1000_pdfdown = TFile("histfiles/TT_nonSemiLep_Mtt-700to1000_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_pdfdn.root")
fTT_nonSemiLep_Mtt_700_1000_pdfup = TFile("histfiles/TT_nonSemiLep_Mtt-700to1000_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_pdfup.root")
fTT_nonSemiLep_Mtt_700_1000_scaledown = TFile("histfiles/TT_nonSemiLep_Mtt-700to1000_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_scaledown_nom.root")
fTT_nonSemiLep_Mtt_700_1000_scaleup = TFile("histfiles/TT_nonSemiLep_Mtt-700to1000_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_scaleup_nom.root")
fTT_nonSemiLep_Mtt_700_1000_topdown = TFile("histfiles/TT_nonSemiLep_Mtt-700to1000_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_toptagdn.root")
fTT_nonSemiLep_Mtt_700_1000_topup = TFile("histfiles/TT_nonSemiLep_Mtt-700to1000_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_toptagup.root")
fTT_nonSemiLep_Mtt_700_1000_btagdown = TFile("histfiles/TT_nonSemiLep_Mtt-700to1000_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_btagdn.root")
fTT_nonSemiLep_Mtt_700_1000_btagup = TFile("histfiles/TT_nonSemiLep_Mtt-700to1000_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_btagup.root")
fTT_nonSemiLep_Mtt_1000_Inf_nom = TFile("histfiles/TT_nonSemiLep_Mtt-1000toInf_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_nom.root")
fTT_nonSemiLep_Mtt_1000_Inf_qcd = TFile("histfiles/TT_nonSemiLep_Mtt-1000toInf_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_qcd.root")
fTT_nonSemiLep_Mtt_1000_Inf_jecdown = TFile("histfiles/TT_nonSemiLep_Mtt-1000toInf_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_jecdn.root")
fTT_nonSemiLep_Mtt_1000_Inf_jecup = TFile("histfiles/TT_nonSemiLep_Mtt-1000toInf_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_jecup.root")
fTT_nonSemiLep_Mtt_1000_Inf_jerdown = TFile("histfiles/TT_nonSemiLep_Mtt-1000toInf_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_jerdn.root")
fTT_nonSemiLep_Mtt_1000_Inf_jerup = TFile("histfiles/TT_nonSemiLep_Mtt-1000toInf_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_jerup.root")
fTT_nonSemiLep_Mtt_1000_Inf_pdfdown = TFile("histfiles/TT_nonSemiLep_Mtt-1000toInf_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_pdfdn.root")
fTT_nonSemiLep_Mtt_1000_Inf_pdfup = TFile("histfiles/TT_nonSemiLep_Mtt-1000toInf_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_pdfup.root")
fTT_nonSemiLep_Mtt_1000_Inf_scaledown = TFile("histfiles/TT_nonSemiLep_Mtt-1000toInf_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_scaledown_nom.root")
fTT_nonSemiLep_Mtt_1000_Inf_scaleup = TFile("histfiles/TT_nonSemiLep_Mtt-1000toInf_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_scaleup_nom.root")
fTT_nonSemiLep_Mtt_1000_Inf_topdown = TFile("histfiles/TT_nonSemiLep_Mtt-1000toInf_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_toptagdn.root")
fTT_nonSemiLep_Mtt_1000_Inf_topup = TFile("histfiles/TT_nonSemiLep_Mtt-1000toInf_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_toptagup.root")
fTT_nonSemiLep_Mtt_1000_Inf_btagdown = TFile("histfiles/TT_nonSemiLep_Mtt-1000toInf_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_btagdn.root")
fTT_nonSemiLep_Mtt_1000_Inf_btagup = TFile("histfiles/TT_nonSemiLep_Mtt-1000toInf_CT10_TuneZ2star_8TeV-powheg-tauola_iheartNY_V1_mu_btagup.root")
print "==================================== Get Hists ====================================="
#hRecoMC.SetName("hRecoMC")
hRecoData = None
hMeas = None
hRecoQCD = None
if options.hist2 is None:
histname = options.hist1
elif options.hist2 is not None:
histname = options.hist2 + '_subtracted_from_' + options.hist1
if not options.ignoreData :
hRecoData= fdata.Get(options.hist1).Clone()
hRecoData.SetName(options.hist1 + "__DATA" )
# Getting histogram files and scaling only to plot one histogram
if options.hist2 is None:
hMeas_QCD_SingleMu = fQCD_SingleMu.Get(options.hist1).Clone()
hMeas_QCD_SingleMu.SetName(options.hist1 + "__QCD")
hMeas_T_t_nom = fT_t_nom.Get(options.hist1).Clone()
hMeas_T_t_nom .SetName( options.hist1 + '__T_t')
hMeas_T_t_topdown = fT_t_topdown.Get(options.hist1).Clone()
hMeas_T_t_topdown .SetName( options.hist1 + '__T_t__toptag__down')
hMeas_T_t_topup = fT_t_topup.Get(options.hist1).Clone()
hMeas_T_t_topup .SetName( options.hist1 + '__T_t__toptag__up')
hMeas_T_t_btagdown = fT_t_btagdown.Get(options.hist1).Clone()
hMeas_T_t_btagdown .SetName( options.hist1 + '__T_t__btag__down')
hMeas_T_t_btagup = fT_t_btagup.Get(options.hist1).Clone()
hMeas_T_t_btagup .SetName( options.hist1 + '__T_t__btag__up')
hMeas_T_t_jecdown = fT_t_jecdown.Get(options.hist1).Clone()
hMeas_T_t_jecdown .SetName( options.hist1 + '__T_t__jec__down' )
hMeas_T_t_jecup = fT_t_jecup.Get(options.hist1).Clone()
hMeas_T_t_jecup .SetName( options.hist1 + '__T_t__jec__up' )
hMeas_T_t_jerdown = fT_t_jerdown.Get(options.hist1).Clone()
hMeas_T_t_jerdown .SetName( options.hist1 + '__T_t__jer__down' )
hMeas_T_t_jerup = fT_t_jerup.Get(options.hist1).Clone()
hMeas_T_t_jerup .SetName( options.hist1 + '__T_t__jer__up' )
hMeas_T_t_qcd = fT_t_qcd.Get(options.hist1).Clone()
hMeas_T_t_qcd .SetName( options.hist1 + '__T_t__qcd' )
hMeas_Tbar_t_nom = fTbar_t_nom.Get(options.hist1).Clone()
hMeas_Tbar_t_nom .SetName( options.hist1 + '__Tbar_t')
hMeas_Tbar_t_topdown = fTbar_t_topdown.Get(options.hist1).Clone()
hMeas_Tbar_t_topdown .SetName( options.hist1 + '__Tbar_t__toptag__down')
hMeas_Tbar_t_topup = fTbar_t_topup.Get(options.hist1).Clone()
hMeas_Tbar_t_topup .SetName( options.hist1 + '__Tbar_t__toptag__up')
hMeas_Tbar_t_btagdown = fTbar_t_btagdown.Get(options.hist1).Clone()
hMeas_Tbar_t_btagdown .SetName( options.hist1 + '__Tbar_t__btag__down')
hMeas_Tbar_t_btagup = fTbar_t_btagup.Get(options.hist1).Clone()
hMeas_Tbar_t_btagup .SetName( options.hist1 + '__Tbar_t__btag__up')
hMeas_Tbar_t_jecdown = fTbar_t_jecdown.Get(options.hist1).Clone()
hMeas_Tbar_t_jecdown .SetName( options.hist1 + '__Tbar_t__jec__down' )
hMeas_Tbar_t_jecup = fTbar_t_jecup.Get(options.hist1).Clone()
hMeas_Tbar_t_jecup .SetName( options.hist1 + '__Tbar_t__jec__up' )
hMeas_Tbar_t_jerdown = fTbar_t_jerdown.Get(options.hist1).Clone()
hMeas_Tbar_t_jerdown .SetName( options.hist1 + '__Tbar_t__jer__down' )
hMeas_Tbar_t_jerup = fTbar_t_jerup.Get(options.hist1).Clone()
hMeas_Tbar_t_jerup .SetName( options.hist1 + '__Tbar_t__jer__up' )
hMeas_Tbar_t_qcd = fTbar_t_qcd.Get(options.hist1).Clone()
hMeas_Tbar_t_qcd .SetName( options.hist1 + '__Tbar_t__qcd' )
hMeas_T_s_nom = fT_s_nom.Get(options.hist1).Clone()
hMeas_T_s_nom .SetName( options.hist1 + '__T_s')
hMeas_T_s_topdown = fT_s_topdown.Get(options.hist1).Clone()
hMeas_T_s_topdown .SetName( options.hist1 + '__T_s__toptag__down')
hMeas_T_s_topup = fT_s_topup.Get(options.hist1).Clone()
hMeas_T_s_topup .SetName( options.hist1 + '__T_s__toptag__up')
hMeas_T_s_btagdown = fT_s_btagdown.Get(options.hist1).Clone()
hMeas_T_s_btagdown .SetName( options.hist1 + '__T_s__btag__down')
hMeas_T_s_btagup = fT_s_btagup.Get(options.hist1).Clone()
hMeas_T_s_btagup .SetName( options.hist1 + '__T_s__btag__up')
hMeas_T_s_jecdown = fT_s_jecdown.Get(options.hist1).Clone()
hMeas_T_s_jecdown .SetName( options.hist1 + '__T_s__jec__down' )
hMeas_T_s_jecup = fT_s_jecup.Get(options.hist1).Clone()
hMeas_T_s_jecup .SetName( options.hist1 + '__T_s__jec__up' )
hMeas_T_s_jerdown = fT_s_jerdown.Get(options.hist1).Clone()
hMeas_T_s_jerdown .SetName( options.hist1 + '__T_s__jer__down' )
hMeas_T_s_jerup = fT_s_jerup.Get(options.hist1).Clone()
hMeas_T_s_jerup .SetName( options.hist1 + '__T_s__jer__up' )
hMeas_T_s_qcd = fT_s_qcd.Get(options.hist1).Clone()
hMeas_T_s_qcd .SetName( options.hist1 + '__T_s__qcd' )
hMeas_Tbar_s_nom = fTbar_s_nom.Get(options.hist1).Clone()
hMeas_Tbar_s_nom .SetName( options.hist1 + '__Tbar_s')
hMeas_Tbar_s_topdown = fTbar_s_topdown.Get(options.hist1).Clone()
hMeas_Tbar_s_topdown .SetName( options.hist1 + '__Tbar_s__toptag__down')
hMeas_Tbar_s_topup = fTbar_s_topup.Get(options.hist1).Clone()
hMeas_Tbar_s_topup .SetName( options.hist1 + '__Tbar_s__toptag__up')
hMeas_Tbar_s_btagdown = fTbar_s_btagdown.Get(options.hist1).Clone()
hMeas_Tbar_s_btagdown .SetName( options.hist1 + '__Tbar_s__btag__down')
hMeas_Tbar_s_btagup = fTbar_s_btagup.Get(options.hist1).Clone()
hMeas_Tbar_s_btagup .SetName( options.hist1 + '__Tbar_s__btag__up')
hMeas_Tbar_s_jecdown = fTbar_s_jecdown.Get(options.hist1).Clone()
hMeas_Tbar_s_jecdown .SetName( options.hist1 + '__Tbar_s__jec__down' )
hMeas_Tbar_s_jecup = fTbar_s_jecup.Get(options.hist1).Clone()
hMeas_Tbar_s_jecup .SetName( options.hist1 + '__Tbar_s__jec__up' )
hMeas_Tbar_s_jerdown = fTbar_s_jerdown.Get(options.hist1).Clone()
hMeas_Tbar_s_jerdown .SetName( options.hist1 + '__Tbar_s__jer__down' )
hMeas_Tbar_s_jerup = fTbar_s_jerup.Get(options.hist1).Clone()
hMeas_Tbar_s_jerup .SetName( options.hist1 + '__Tbar_s__jer__up' )
hMeas_Tbar_s_qcd = fTbar_s_qcd.Get(options.hist1).Clone()
hMeas_Tbar_s_qcd .SetName( options.hist1 + '__Tbar_s__qcd' )
hMeas_T_tW_nom = fT_tW_nom.Get(options.hist1).Clone()
hMeas_T_tW_nom .SetName( options.hist1 + '__T_tW')
hMeas_T_tW_topdown = fT_tW_topdown.Get(options.hist1).Clone()
hMeas_T_tW_topdown .SetName( options.hist1 + '__T_tW__toptag__down')
hMeas_T_tW_topup = fT_tW_topup.Get(options.hist1).Clone()
hMeas_T_tW_topup .SetName( options.hist1 + '__T_tW__toptag__up')
hMeas_T_tW_btagdown = fT_tW_btagdown.Get(options.hist1).Clone()
hMeas_T_tW_btagdown .SetName( options.hist1 + '__T_tW__btag__down')
hMeas_T_tW_btagup = fT_tW_btagup.Get(options.hist1).Clone()
hMeas_T_tW_btagup .SetName( options.hist1 + '__T_tW__btag__up')
hMeas_T_tW_jecdown = fT_tW_jecdown.Get(options.hist1).Clone()
hMeas_T_tW_jecdown .SetName( options.hist1 + '__T_tW__jec__down' )
hMeas_T_tW_jecup = fT_tW_jecup.Get(options.hist1).Clone()
hMeas_T_tW_jecup .SetName( options.hist1 + '__T_tW__jec__up' )
hMeas_T_tW_jerdown = fT_tW_jerdown.Get(options.hist1).Clone()
hMeas_T_tW_jerdown .SetName( options.hist1 + '__T_tW__jer__down' )
hMeas_T_tW_jerup = fT_tW_jerup.Get(options.hist1).Clone()
hMeas_T_tW_jerup .SetName( options.hist1 + '__T_tW__jer__up' )
hMeas_T_tW_qcd = fT_tW_qcd.Get(options.hist1).Clone()
hMeas_T_tW_qcd .SetName( options.hist1 + '__T_tW__qcd' )
hMeas_Tbar_tW_nom = fTbar_tW_nom.Get(options.hist1).Clone()
hMeas_Tbar_tW_nom .SetName( options.hist1 + '__Tbar_tW')
hMeas_Tbar_tW_topdown = fTbar_tW_topdown.Get(options.hist1).Clone()
hMeas_Tbar_tW_topdown .SetName( options.hist1 + '__Tbar_tW__toptag__down')
hMeas_Tbar_tW_topup = fTbar_tW_topup.Get(options.hist1).Clone()
hMeas_Tbar_tW_topup .SetName( options.hist1 + '__Tbar_tW__toptag__up')
hMeas_Tbar_tW_btagdown = fTbar_tW_btagdown.Get(options.hist1).Clone()
hMeas_Tbar_tW_btagdown .SetName( options.hist1 + '__Tbar_tW__btag__down')
hMeas_Tbar_tW_btagup = fTbar_tW_btagup.Get(options.hist1).Clone()
hMeas_Tbar_tW_btagup .SetName( options.hist1 + '__Tbar_tW__btag__up')
hMeas_Tbar_tW_jecdown = fTbar_tW_jecdown.Get(options.hist1).Clone()
hMeas_Tbar_tW_jecdown .SetName( options.hist1 + '__Tbar_tW__jec__down' )
hMeas_Tbar_tW_jecup = fTbar_tW_jecup.Get(options.hist1).Clone()
hMeas_Tbar_tW_jecup .SetName( options.hist1 + '__Tbar_tW__jec__up' )
hMeas_Tbar_tW_jerdown = fTbar_tW_jerdown.Get(options.hist1).Clone()
hMeas_Tbar_tW_jerdown .SetName( options.hist1 + '__Tbar_tW__jer__down' )
hMeas_Tbar_tW_jerup = fTbar_tW_jerup.Get(options.hist1).Clone()
hMeas_Tbar_tW_jerup .SetName( options.hist1 + '__Tbar_tW__jer__up' )
hMeas_Tbar_tW_qcd = fTbar_tW_qcd.Get(options.hist1).Clone()
hMeas_Tbar_tW_qcd .SetName( options.hist1 + '__Tbar_tW__qcd' )
hMeas_WJets_nom = fWJets_nom.Get(options.hist1).Clone()
hMeas_WJets_nom .SetName( options.hist1 + '__WJets')
hMeas_WJets_topdown = fWJets_topdown.Get(options.hist1).Clone()
hMeas_WJets_topdown .SetName( options.hist1 + '__WJets__toptag__down')
hMeas_WJets_topup = fWJets_topup.Get(options.hist1).Clone()
hMeas_WJets_topup .SetName( options.hist1 + '__WJets__toptag__up')
hMeas_WJets_btagdown = fWJets_btagdown.Get(options.hist1).Clone()
hMeas_WJets_btagdown .SetName( options.hist1 + '__WJets__btag__down')
hMeas_WJets_btagup = fWJets_btagup.Get(options.hist1).Clone()
hMeas_WJets_btagup .SetName( options.hist1 + '__WJets__btag__up')
hMeas_WJets_jecdown = fWJets_jecdown.Get(options.hist1).Clone()
hMeas_WJets_jecdown .SetName( options.hist1 + '__WJets__jec__down' )
hMeas_WJets_jecup = fWJets_jecup.Get(options.hist1).Clone()
hMeas_WJets_jecup .SetName( options.hist1 + '__WJets__jec__up' )
hMeas_WJets_jerdown = fWJets_jerdown.Get(options.hist1).Clone()
hMeas_WJets_jerdown .SetName( options.hist1 + '__WJets__jer__down' )
hMeas_WJets_jerup = fWJets_jerup.Get(options.hist1).Clone()
hMeas_WJets_jerup .SetName( options.hist1 + '__WJets__jer__up' )
hMeas_WJets_qcd = fWJets_qcd.Get(options.hist1).Clone()
hMeas_WJets_qcd .SetName( options.hist1 + '__WJets__qcd' )
hMeas_TT_Mtt_less_700_nom = fTT_Mtt_less_700_nom.Get(options.hist1).Clone()
hMeas_TT_Mtt_less_700_nom .SetName( options.hist1 + '__TTbar_Mtt_less_700' )
hMeas_TT_Mtt_less_700_topdown = fTT_Mtt_less_700_topdown.Get(options.hist1).Clone()
hMeas_TT_Mtt_less_700_topdown .SetName( options.hist1 + '__TTbar_Mtt_less_700__toptag__down')
hMeas_TT_Mtt_less_700_topup = fTT_Mtt_less_700_topup.Get(options.hist1).Clone()
hMeas_TT_Mtt_less_700_topup .SetName( options.hist1 + '__TTbar_Mtt_less_700__toptag__up')
hMeas_TT_Mtt_less_700_btagdown = fTT_Mtt_less_700_btagdown.Get(options.hist1).Clone()
hMeas_TT_Mtt_less_700_btagdown .SetName( options.hist1 + '__TTbar_Mtt_less_700__btag__down')
hMeas_TT_Mtt_less_700_btagup = fTT_Mtt_less_700_btagup.Get(options.hist1).Clone()
hMeas_TT_Mtt_less_700_btagup .SetName( options.hist1 + '__TTbar_Mtt_less_700__btag__up')
hMeas_TT_Mtt_less_700_jecdown = fTT_Mtt_less_700_jecdown.Get(options.hist1).Clone()
hMeas_TT_Mtt_less_700_jecdown .SetName( options.hist1 + '__TTbar_Mtt_less_700__jec__down')
hMeas_TT_Mtt_less_700_jecup = fTT_Mtt_less_700_jecup.Get(options.hist1).Clone()
hMeas_TT_Mtt_less_700_jecup .SetName( options.hist1 + '__TTbar_Mtt_less_700__jec__up')
hMeas_TT_Mtt_less_700_jerdown = fTT_Mtt_less_700_jerdown.Get(options.hist1).Clone()
hMeas_TT_Mtt_less_700_jerdown .SetName( options.hist1 + '__TTbar_Mtt_less_700__jer__down')
hMeas_TT_Mtt_less_700_jerup = fTT_Mtt_less_700_jerup.Get(options.hist1).Clone()
hMeas_TT_Mtt_less_700_jerup .SetName( options.hist1 + '__TTbar_Mtt_less_700__jer__up')
hMeas_TT_Mtt_less_700_qcd = fTT_Mtt_less_700_qcd.Get(options.hist1).Clone()
hMeas_TT_Mtt_less_700_qcd .SetName( options.hist1 + '__TTbar_Mtt_less_700__qcd')
hMeas_TT_Mtt_less_700_pdfdown = fTT_Mtt_less_700_pdfdown.Get(options.hist1).Clone()
hMeas_TT_Mtt_less_700_pdfdown .SetName( options.hist1 + '__TTbar_Mtt_less_700__pdf__down')
hMeas_TT_Mtt_less_700_pdfup = fTT_Mtt_less_700_pdfup.Get(options.hist1).Clone()
hMeas_TT_Mtt_less_700_pdfup .SetName( options.hist1 + '__TTbar_Mtt_less_700__pdf__up')
hMeas_TT_Mtt_less_700_scaledown = fTT_Mtt_less_700_scaledown.Get(options.hist1).Clone()
hMeas_TT_Mtt_less_700_scaledown .SetName( options.hist1 + '__TTbar_Mtt_less_700__scale__down')
hMeas_TT_Mtt_less_700_scaleup = fTT_Mtt_less_700_scaleup.Get(options.hist1).Clone()
hMeas_TT_Mtt_less_700_scaleup .SetName( options.hist1 + '__TTbar_Mtt_less_700__scale__up')
hMeas_TT_Mtt_700_1000_nom = fTT_Mtt_700_1000_nom.Get(options.hist1).Clone()
hMeas_TT_Mtt_700_1000_nom .SetName( options.hist1 + '__TTbar_Mtt_700_1000' )
hMeas_TT_Mtt_700_1000_topdown = fTT_Mtt_700_1000_topdown.Get(options.hist1).Clone()
hMeas_TT_Mtt_700_1000_topdown .SetName( options.hist1 + '__TTbar_Mtt_700_1000__toptag__down')
hMeas_TT_Mtt_700_1000_topup = fTT_Mtt_700_1000_topup.Get(options.hist1).Clone()
hMeas_TT_Mtt_700_1000_topup .SetName( options.hist1 + '__TTbar_Mtt_700_1000__toptag__up')
hMeas_TT_Mtt_700_1000_btagdown = fTT_Mtt_700_1000_btagdown.Get(options.hist1).Clone()
hMeas_TT_Mtt_700_1000_btagdown .SetName( options.hist1 + '__TTbar_Mtt_700_1000__btag__down')
hMeas_TT_Mtt_700_1000_btagup = fTT_Mtt_700_1000_btagup.Get(options.hist1).Clone()
hMeas_TT_Mtt_700_1000_btagup .SetName( options.hist1 + '__TTbar_Mtt_700_1000__btag__up')
hMeas_TT_Mtt_700_1000_jecdown = fTT_Mtt_700_1000_jecdown.Get(options.hist1).Clone()
hMeas_TT_Mtt_700_1000_jecdown .SetName( options.hist1 + '__TTbar_Mtt_700_1000__jec__down')
hMeas_TT_Mtt_700_1000_jecup = fTT_Mtt_700_1000_jecup.Get(options.hist1).Clone()
hMeas_TT_Mtt_700_1000_jecup .SetName( options.hist1 + '__TTbar_Mtt_700_1000__jec__up')
hMeas_TT_Mtt_700_1000_jerdown = fTT_Mtt_700_1000_jerdown.Get(options.hist1).Clone()
hMeas_TT_Mtt_700_1000_jerdown .SetName( options.hist1 + '__TTbar_Mtt_700_1000__jer__down')
hMeas_TT_Mtt_700_1000_jerup = fTT_Mtt_700_1000_jerup.Get(options.hist1).Clone()
hMeas_TT_Mtt_700_1000_jerup .SetName( options.hist1 + '__TTbar_Mtt_700_1000__jer__up')
hMeas_TT_Mtt_700_1000_qcd = fTT_Mtt_700_1000_qcd.Get(options.hist1).Clone()
hMeas_TT_Mtt_700_1000_qcd .SetName( options.hist1 + '__TTbar_Mtt_700_1000__qcd')
hMeas_TT_Mtt_700_1000_pdfdown = fTT_Mtt_700_1000_pdfdown.Get(options.hist1).Clone()
hMeas_TT_Mtt_700_1000_pdfdown .SetName( options.hist1 + '__TTbar_Mtt_700_1000__pdf__down')
hMeas_TT_Mtt_700_1000_pdfup = fTT_Mtt_700_1000_pdfup.Get(options.hist1).Clone()
hMeas_TT_Mtt_700_1000_pdfup .SetName( options.hist1 + '__TTbar_Mtt_700_1000__pdf__up')
hMeas_TT_Mtt_700_1000_scaledown = fTT_Mtt_700_1000_scaledown.Get(options.hist1).Clone()
hMeas_TT_Mtt_700_1000_scaledown .SetName( options.hist1 + '__TTbar_Mtt_700_1000__scale__down')
hMeas_TT_Mtt_700_1000_scaleup = fTT_Mtt_700_1000_scaleup.Get(options.hist1).Clone()
hMeas_TT_Mtt_700_1000_scaleup .SetName( options.hist1 + '__TTbar_Mtt_700_1000__scale__up')
hMeas_TT_Mtt_1000_Inf_nom = fTT_Mtt_1000_Inf_nom.Get(options.hist1).Clone()
hMeas_TT_Mtt_1000_Inf_nom .SetName( options.hist1 + '__TTbar_Mtt_1000_Inf' )
hMeas_TT_Mtt_1000_Inf_topdown = fTT_Mtt_1000_Inf_topdown.Get(options.hist1).Clone()
hMeas_TT_Mtt_1000_Inf_topdown .SetName( options.hist1 + '__TTbar_Mtt_1000_Inf__toptag__down')
hMeas_TT_Mtt_1000_Inf_topup = fTT_Mtt_1000_Inf_topup.Get(options.hist1).Clone()
hMeas_TT_Mtt_1000_Inf_topup .SetName( options.hist1 + '__TTbar_Mtt_1000_Inf__toptag__up')
hMeas_TT_Mtt_1000_Inf_btagdown = fTT_Mtt_1000_Inf_btagdown.Get(options.hist1).Clone()
hMeas_TT_Mtt_1000_Inf_btagdown .SetName( options.hist1 + '__TTbar_Mtt_1000_Inf__btag__down')
hMeas_TT_Mtt_1000_Inf_btagup = fTT_Mtt_1000_Inf_btagup.Get(options.hist1).Clone()
hMeas_TT_Mtt_1000_Inf_btagup .SetName( options.hist1 + '__TTbar_Mtt_1000_Inf__btag__up')
hMeas_TT_Mtt_1000_Inf_jecdown = fTT_Mtt_1000_Inf_jecdown.Get(options.hist1).Clone()
hMeas_TT_Mtt_1000_Inf_jecdown .SetName( options.hist1 + '__TTbar_Mtt_1000_Inf__jec__down')
hMeas_TT_Mtt_1000_Inf_jecup = fTT_Mtt_1000_Inf_jecup.Get(options.hist1).Clone()
hMeas_TT_Mtt_1000_Inf_jecup .SetName( options.hist1 + '__TTbar_Mtt_1000_Inf__jec__up')
hMeas_TT_Mtt_1000_Inf_jerdown = fTT_Mtt_1000_Inf_jerdown.Get(options.hist1).Clone()
hMeas_TT_Mtt_1000_Inf_jerdown .SetName( options.hist1 + '__TTbar_Mtt_1000_Inf__jer__down')
hMeas_TT_Mtt_1000_Inf_jerup = fTT_Mtt_1000_Inf_jerup.Get(options.hist1).Clone()
hMeas_TT_Mtt_1000_Inf_jerup .SetName( options.hist1 + '__TTbar_Mtt_1000_Inf__jer__up')
hMeas_TT_Mtt_1000_Inf_qcd = fTT_Mtt_1000_Inf_qcd.Get(options.hist1).Clone()
hMeas_TT_Mtt_1000_Inf_qcd .SetName( options.hist1 + '__TTbar_Mtt_1000_Inf__qcd')
hMeas_TT_Mtt_1000_Inf_pdfdown = fTT_Mtt_1000_Inf_pdfdown.Get(options.hist1).Clone()
hMeas_TT_Mtt_1000_Inf_pdfdown .SetName( options.hist1 + '__TTbar_Mtt_1000_Inf__pdf__down')
hMeas_TT_Mtt_1000_Inf_pdfup = fTT_Mtt_1000_Inf_pdfup.Get(options.hist1).Clone()
hMeas_TT_Mtt_1000_Inf_pdfup .SetName( options.hist1 + '__TTbar_Mtt_1000_Inf__pdf__up')
hMeas_TT_Mtt_1000_Inf_scaledown = fTT_Mtt_1000_Inf_scaledown.Get(options.hist1).Clone()
hMeas_TT_Mtt_1000_Inf_scaledown .SetName( options.hist1 + '__TTbar_Mtt_1000_Inf__scale__down')
hMeas_TT_Mtt_1000_Inf_scaleup = fTT_Mtt_1000_Inf_scaleup.Get(options.hist1).Clone()
hMeas_TT_Mtt_1000_Inf_scaleup .SetName( options.hist1 + '__TTbar_Mtt_1000_Inf__scale__up')
hMeas_TT_nonSemiLep_Mtt_less_700_nom = fTT_nonSemiLep_Mtt_less_700_nom.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_less_700_nom .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_less_700' )
hMeas_TT_nonSemiLep_Mtt_less_700_topdown = fTT_nonSemiLep_Mtt_less_700_topdown.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_less_700_topdown .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_less_700__toptag__down')
hMeas_TT_nonSemiLep_Mtt_less_700_topup = fTT_nonSemiLep_Mtt_less_700_topup.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_less_700_topup .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_less_700__toptag__up')
hMeas_TT_nonSemiLep_Mtt_less_700_btagdown = fTT_nonSemiLep_Mtt_less_700_btagdown.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_less_700_btagdown .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_less_700__btag__down')
hMeas_TT_nonSemiLep_Mtt_less_700_btagup = fTT_nonSemiLep_Mtt_less_700_btagup.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_less_700_btagup .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_less_700__btag__up')
hMeas_TT_nonSemiLep_Mtt_less_700_jecdown = fTT_nonSemiLep_Mtt_less_700_jecdown.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_less_700_jecdown .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_less_700__jec__down')
hMeas_TT_nonSemiLep_Mtt_less_700_jecup = fTT_nonSemiLep_Mtt_less_700_jecup.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_less_700_jecup .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_less_700__jec__up')
hMeas_TT_nonSemiLep_Mtt_less_700_jerdown = fTT_nonSemiLep_Mtt_less_700_jerdown.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_less_700_jerdown .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_less_700__jer__down')
hMeas_TT_nonSemiLep_Mtt_less_700_jerup = fTT_nonSemiLep_Mtt_less_700_jerup.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_less_700_jerup .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_less_700__jer__up')
hMeas_TT_nonSemiLep_Mtt_less_700_qcd = fTT_nonSemiLep_Mtt_less_700_qcd.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_less_700_qcd .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_less_700__qcd')
hMeas_TT_nonSemiLep_Mtt_less_700_pdfdown = fTT_nonSemiLep_Mtt_less_700_pdfdown.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_less_700_pdfdown .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_less_700__pdf__down')
hMeas_TT_nonSemiLep_Mtt_less_700_pdfup = fTT_nonSemiLep_Mtt_less_700_pdfup.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_less_700_pdfup .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_less_700__pdf__up')
hMeas_TT_nonSemiLep_Mtt_less_700_scaledown = fTT_nonSemiLep_Mtt_less_700_scaledown.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_less_700_scaledown .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_less_700__scale__down')
hMeas_TT_nonSemiLep_Mtt_less_700_scaleup = fTT_nonSemiLep_Mtt_less_700_scaleup.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_less_700_scaleup .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_less_700__scale__up')
hMeas_TT_nonSemiLep_Mtt_700_1000_nom = fTT_nonSemiLep_Mtt_700_1000_nom.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_700_1000_nom .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_700_1000' )
hMeas_TT_nonSemiLep_Mtt_700_1000_topdown = fTT_nonSemiLep_Mtt_700_1000_topdown.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_700_1000_topdown .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_700_1000__toptag__down')
hMeas_TT_nonSemiLep_Mtt_700_1000_topup = fTT_nonSemiLep_Mtt_700_1000_topup.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_700_1000_topup .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_700_1000__toptag__up')
hMeas_TT_nonSemiLep_Mtt_700_1000_btagdown = fTT_nonSemiLep_Mtt_700_1000_btagdown.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_700_1000_btagdown .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_700_1000__btag__down')
hMeas_TT_nonSemiLep_Mtt_700_1000_btagup = fTT_nonSemiLep_Mtt_700_1000_btagup.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_700_1000_btagup .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_700_1000__btag__up')
hMeas_TT_nonSemiLep_Mtt_700_1000_jecdown = fTT_nonSemiLep_Mtt_700_1000_jecdown.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_700_1000_jecdown .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_700_1000__jec__down')
hMeas_TT_nonSemiLep_Mtt_700_1000_jecup = fTT_nonSemiLep_Mtt_700_1000_jecup.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_700_1000_jecup .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_700_1000__jec__up')
hMeas_TT_nonSemiLep_Mtt_700_1000_jerdown = fTT_nonSemiLep_Mtt_700_1000_jerdown.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_700_1000_jerdown .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_700_1000__jer__down')
hMeas_TT_nonSemiLep_Mtt_700_1000_jerup = fTT_nonSemiLep_Mtt_700_1000_jerup.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_700_1000_jerup .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_700_1000__jer__up')
hMeas_TT_nonSemiLep_Mtt_700_1000_qcd = fTT_nonSemiLep_Mtt_700_1000_qcd.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_700_1000_qcd .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_700_1000__qcd')
hMeas_TT_nonSemiLep_Mtt_700_1000_pdfdown = fTT_nonSemiLep_Mtt_700_1000_pdfdown.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_700_1000_pdfdown .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_700_1000__pdf__down')
hMeas_TT_nonSemiLep_Mtt_700_1000_pdfup = fTT_nonSemiLep_Mtt_700_1000_pdfup.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_700_1000_pdfup .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_700_1000__pdf__up')
hMeas_TT_nonSemiLep_Mtt_700_1000_scaledown = fTT_nonSemiLep_Mtt_700_1000_scaledown.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_700_1000_scaledown .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_700_1000__scale__down')
hMeas_TT_nonSemiLep_Mtt_700_1000_scaleup = fTT_nonSemiLep_Mtt_700_1000_scaleup.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_700_1000_scaleup .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_700_1000__scale__up')
hMeas_TT_nonSemiLep_Mtt_1000_Inf_nom = fTT_nonSemiLep_Mtt_1000_Inf_nom.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_1000_Inf_nom .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_1000' )
hMeas_TT_nonSemiLep_Mtt_1000_Inf_topdown = fTT_nonSemiLep_Mtt_1000_Inf_topdown.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_1000_Inf_topdown .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_1000_Inf__toptag__down')
hMeas_TT_nonSemiLep_Mtt_1000_Inf_topup = fTT_nonSemiLep_Mtt_1000_Inf_topup.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_1000_Inf_topup .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_1000_Inf__toptag__up')
hMeas_TT_nonSemiLep_Mtt_1000_Inf_btagdown = fTT_nonSemiLep_Mtt_1000_Inf_btagdown.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_1000_Inf_btagdown .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_1000_Inf__btag__down')
hMeas_TT_nonSemiLep_Mtt_1000_Inf_btagup = fTT_nonSemiLep_Mtt_1000_Inf_btagup.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_1000_Inf_btagup .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_1000_Inf__btag__up')
hMeas_TT_nonSemiLep_Mtt_1000_Inf_jecdown = fTT_nonSemiLep_Mtt_1000_Inf_jecdown.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_1000_Inf_jecdown .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_1000_Inf__jec__down')
hMeas_TT_nonSemiLep_Mtt_1000_Inf_jecup = fTT_nonSemiLep_Mtt_1000_Inf_jecup.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_1000_Inf_jecup .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_1000_Inf__jec__up')
hMeas_TT_nonSemiLep_Mtt_1000_Inf_jerdown = fTT_nonSemiLep_Mtt_1000_Inf_jerdown.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_1000_Inf_jerdown .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_1000_Inf__jer__down')
hMeas_TT_nonSemiLep_Mtt_1000_Inf_jerup = fTT_nonSemiLep_Mtt_1000_Inf_jerup.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_1000_Inf_jerup .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_1000_Inf__jer__up')
hMeas_TT_nonSemiLep_Mtt_1000_Inf_qcd = fTT_nonSemiLep_Mtt_1000_Inf_qcd.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_1000_Inf_qcd .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_1000_Inf__qcd')
hMeas_TT_nonSemiLep_Mtt_1000_Inf_pdfdown = fTT_nonSemiLep_Mtt_1000_Inf_pdfdown.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_1000_Inf_pdfdown .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_1000_Inf__pdf__down')
hMeas_TT_nonSemiLep_Mtt_1000_Inf_pdfup = fTT_nonSemiLep_Mtt_1000_Inf_pdfup.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_1000_Inf_pdfup .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_1000_Inf__pdf__up')
hMeas_TT_nonSemiLep_Mtt_1000_Inf_scaledown = fTT_nonSemiLep_Mtt_1000_Inf_scaledown.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_1000_Inf_scaledown .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_1000_Inf__scale__down')
hMeas_TT_nonSemiLep_Mtt_1000_Inf_scaleup = fTT_nonSemiLep_Mtt_1000_Inf_scaleup.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_1000_Inf_scaleup .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_1000_Inf__scale__up')
hMeas_T_t_nom .Scale( sigma_T_t_NNLO * lum / float(Nmc_T_t) )
hMeas_T_t_topdown .Scale( sigma_T_t_NNLO * lum / float(Nmc_T_t) )
hMeas_T_t_topup .Scale( sigma_T_t_NNLO * lum / float(Nmc_T_t) )
hMeas_T_t_btagdown.Scale( sigma_T_t_NNLO * lum / float(Nmc_T_t) )
hMeas_T_t_btagup .Scale( sigma_T_t_NNLO * lum / float(Nmc_T_t) )
hMeas_T_t_jecdown .Scale( sigma_T_t_NNLO * lum / float(Nmc_T_t) )
hMeas_T_t_jecup .Scale( sigma_T_t_NNLO * lum / float(Nmc_T_t) )
hMeas_T_t_jerdown .Scale( sigma_T_t_NNLO * lum / float(Nmc_T_t) )
hMeas_T_t_jerup .Scale( sigma_T_t_NNLO * lum / float(Nmc_T_t) )
hMeas_T_t_qcd .Scale( sigma_T_t_NNLO * lum / float(Nmc_T_t) )
hMeas_Tbar_t_nom .Scale( sigma_Tbar_t_NNLO * lum / float(Nmc_Tbar_t) )
hMeas_Tbar_t_topdown .Scale( sigma_Tbar_t_NNLO * lum / float(Nmc_Tbar_t) )
hMeas_Tbar_t_topup .Scale( sigma_Tbar_t_NNLO * lum / float(Nmc_Tbar_t) )
hMeas_Tbar_t_btagdown.Scale( sigma_Tbar_t_NNLO * lum / float(Nmc_Tbar_t) )
hMeas_Tbar_t_btagup .Scale( sigma_Tbar_t_NNLO * lum / float(Nmc_Tbar_t) )
hMeas_Tbar_t_jecdown .Scale( sigma_Tbar_t_NNLO * lum / float(Nmc_Tbar_t) )
hMeas_Tbar_t_jecup .Scale( sigma_Tbar_t_NNLO * lum / float(Nmc_Tbar_t) )
hMeas_Tbar_t_jerdown .Scale( sigma_Tbar_t_NNLO * lum / float(Nmc_Tbar_t) )
hMeas_Tbar_t_jerup .Scale( sigma_Tbar_t_NNLO * lum / float(Nmc_Tbar_t) )
hMeas_Tbar_t_qcd .Scale( sigma_Tbar_t_NNLO * lum / float(Nmc_Tbar_t) )
hMeas_T_s_nom .Scale( sigma_T_s_NNLO * lum / float(Nmc_T_s) )
hMeas_T_s_topdown .Scale( sigma_T_s_NNLO * lum / float(Nmc_T_s) )
hMeas_T_s_topup .Scale( sigma_T_s_NNLO * lum / float(Nmc_T_s) )
hMeas_T_s_btagdown.Scale( sigma_T_s_NNLO * lum / float(Nmc_T_s) )
hMeas_T_s_btagup .Scale( sigma_T_s_NNLO * lum / float(Nmc_T_s) )
hMeas_T_s_jecdown .Scale( sigma_T_s_NNLO * lum / float(Nmc_T_s) )
hMeas_T_s_jecup .Scale( sigma_T_s_NNLO * lum / float(Nmc_T_s) )
hMeas_T_s_jerdown .Scale( sigma_T_s_NNLO * lum / float(Nmc_T_s) )
hMeas_T_s_jerup .Scale( sigma_T_s_NNLO * lum / float(Nmc_T_s) )
hMeas_T_s_qcd .Scale( sigma_T_s_NNLO * lum / float(Nmc_T_s) )
hMeas_Tbar_s_nom .Scale( sigma_Tbar_s_NNLO * lum / float(Nmc_Tbar_s) )
hMeas_Tbar_s_topdown .Scale( sigma_Tbar_s_NNLO * lum / float(Nmc_Tbar_s) )
hMeas_Tbar_s_topup .Scale( sigma_Tbar_s_NNLO * lum / float(Nmc_Tbar_s) )
hMeas_Tbar_s_btagdown.Scale( sigma_Tbar_s_NNLO * lum / float(Nmc_Tbar_s) )
hMeas_Tbar_s_btagup .Scale( sigma_Tbar_s_NNLO * lum / float(Nmc_Tbar_s) )
hMeas_Tbar_s_jecdown .Scale( sigma_Tbar_s_NNLO * lum / float(Nmc_Tbar_s) )
hMeas_Tbar_s_jecup .Scale( sigma_Tbar_s_NNLO * lum / float(Nmc_Tbar_s) )
hMeas_Tbar_s_jerdown .Scale( sigma_Tbar_s_NNLO * lum / float(Nmc_Tbar_s) )
hMeas_Tbar_s_jerup .Scale( sigma_Tbar_s_NNLO * lum / float(Nmc_Tbar_s) )
hMeas_Tbar_s_qcd .Scale( sigma_Tbar_s_NNLO * lum / float(Nmc_Tbar_s) )
hMeas_T_tW_nom .Scale( sigma_T_tW_NNLO * lum / float(Nmc_T_tW) )
hMeas_T_tW_topdown .Scale( sigma_T_tW_NNLO * lum / float(Nmc_T_tW) )
hMeas_T_tW_topup .Scale( sigma_T_tW_NNLO * lum / float(Nmc_T_tW) )
hMeas_T_tW_btagdown.Scale( sigma_T_tW_NNLO * lum / float(Nmc_T_tW) )
hMeas_T_tW_btagup .Scale( sigma_T_tW_NNLO * lum / float(Nmc_T_tW) )
hMeas_T_tW_jecdown .Scale( sigma_T_tW_NNLO * lum / float(Nmc_T_tW) )
hMeas_T_tW_jecup .Scale( sigma_T_tW_NNLO * lum / float(Nmc_T_tW) )
hMeas_T_tW_jerdown .Scale( sigma_T_tW_NNLO * lum / float(Nmc_T_tW) )
hMeas_T_tW_jerup .Scale( sigma_T_tW_NNLO * lum / float(Nmc_T_tW) )
hMeas_T_tW_qcd .Scale( sigma_T_tW_NNLO * lum / float(Nmc_T_tW) )
hMeas_Tbar_tW_nom .Scale( sigma_Tbar_tW_NNLO * lum / float(Nmc_Tbar_tW) )
hMeas_Tbar_tW_topdown .Scale( sigma_Tbar_tW_NNLO * lum / float(Nmc_Tbar_tW) )
hMeas_Tbar_tW_topup .Scale( sigma_Tbar_tW_NNLO * lum / float(Nmc_Tbar_tW) )
hMeas_Tbar_tW_btagdown.Scale( sigma_Tbar_tW_NNLO * lum / float(Nmc_Tbar_tW) )
hMeas_Tbar_tW_btagup .Scale( sigma_Tbar_tW_NNLO * lum / float(Nmc_Tbar_tW) )
hMeas_Tbar_tW_jecdown .Scale( sigma_Tbar_tW_NNLO * lum / float(Nmc_Tbar_tW) )
hMeas_Tbar_tW_jecup .Scale( sigma_Tbar_tW_NNLO * lum / float(Nmc_Tbar_tW) )
hMeas_Tbar_tW_jerdown .Scale( sigma_Tbar_tW_NNLO * lum / float(Nmc_Tbar_tW) )
hMeas_Tbar_tW_jerup .Scale( sigma_Tbar_tW_NNLO * lum / float(Nmc_Tbar_tW) )
hMeas_Tbar_tW_qcd .Scale( sigma_Tbar_tW_NNLO * lum / float(Nmc_Tbar_tW) )
hMeas_WJets_nom .Scale( sigma_WJets_NNLO * lum / float(Nmc_WJets) )
hMeas_WJets_topdown .Scale( sigma_WJets_NNLO * lum / float(Nmc_WJets) )
hMeas_WJets_topup .Scale( sigma_WJets_NNLO * lum / float(Nmc_WJets) )
hMeas_WJets_btagdown.Scale( sigma_WJets_NNLO * lum / float(Nmc_WJets) )
hMeas_WJets_btagup .Scale( sigma_WJets_NNLO * lum / float(Nmc_WJets) )
hMeas_WJets_jecdown .Scale( sigma_WJets_NNLO * lum / float(Nmc_WJets) )
hMeas_WJets_jecup .Scale( sigma_WJets_NNLO * lum / float(Nmc_WJets) )
hMeas_WJets_jerdown .Scale( sigma_WJets_NNLO * lum / float(Nmc_WJets) )
hMeas_WJets_jerup .Scale( sigma_WJets_NNLO * lum / float(Nmc_WJets) )
hMeas_WJets_qcd .Scale( sigma_WJets_NNLO * lum / float(Nmc_WJets) )
hMeas_TT_Mtt_less_700_nom .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_0_700[0] * lum / float(Nmc_ttbar))
hMeas_TT_Mtt_less_700_topdown .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_0_700[0] * lum / float(Nmc_ttbar))
hMeas_TT_Mtt_less_700_topup .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_0_700[0] * lum / float(Nmc_ttbar))
hMeas_TT_Mtt_less_700_btagdown .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_0_700[0] * lum / float(Nmc_ttbar))
hMeas_TT_Mtt_less_700_btagup .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_0_700[0] * lum / float(Nmc_ttbar))
hMeas_TT_Mtt_less_700_jecdown .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_0_700[0] * lum / float(Nmc_ttbar))
hMeas_TT_Mtt_less_700_jecup .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_0_700[0] * lum / float(Nmc_ttbar))
hMeas_TT_Mtt_less_700_jerdown .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_0_700[0] * lum / float(Nmc_ttbar))
hMeas_TT_Mtt_less_700_jerup .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_0_700[0] * lum / float(Nmc_ttbar))
hMeas_TT_Mtt_less_700_qcd .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_0_700[0] * lum / float(Nmc_ttbar))
hMeas_TT_Mtt_less_700_pdfdown .Scale( sigma_ttbar_NNLO[3] * e_TT_Mtt_0_700[0] * lum / float(Nmc_ttbar))
hMeas_TT_Mtt_less_700_pdfup .Scale( sigma_ttbar_NNLO[4] * e_TT_Mtt_0_700[0] * lum / float(Nmc_ttbar))
hMeas_TT_Mtt_less_700_scaledown.Scale( sigma_ttbar_NNLO[1] * e_TT_Mtt_0_700[1] * lum / float(Nmc_ttbar_scaledown))
hMeas_TT_Mtt_less_700_scaleup .Scale( sigma_ttbar_NNLO[2] * e_TT_Mtt_0_700[2] * lum / float(Nmc_ttbar_scaleup))
hMeas_TT_Mtt_700_1000_nom .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_700_1000[0] * lum / float(Nmc_TT_Mtt_700_1000))
hMeas_TT_Mtt_700_1000_topdown .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_700_1000[0] * lum / float(Nmc_TT_Mtt_700_1000))
hMeas_TT_Mtt_700_1000_topup .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_700_1000[0] * lum / float(Nmc_TT_Mtt_700_1000))
hMeas_TT_Mtt_700_1000_btagdown .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_700_1000[0] * lum / float(Nmc_TT_Mtt_700_1000))
hMeas_TT_Mtt_700_1000_btagup .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_700_1000[0] * lum / float(Nmc_TT_Mtt_700_1000))
hMeas_TT_Mtt_700_1000_jecdown .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_700_1000[0] * lum / float(Nmc_TT_Mtt_700_1000))
hMeas_TT_Mtt_700_1000_jecup .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_700_1000[0] * lum / float(Nmc_TT_Mtt_700_1000))
hMeas_TT_Mtt_700_1000_jerdown .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_700_1000[0] * lum / float(Nmc_TT_Mtt_700_1000))
hMeas_TT_Mtt_700_1000_jerup .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_700_1000[0] * lum / float(Nmc_TT_Mtt_700_1000))
hMeas_TT_Mtt_700_1000_qcd .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_700_1000[0] * lum / float(Nmc_TT_Mtt_700_1000))
hMeas_TT_Mtt_700_1000_pdfdown .Scale( sigma_ttbar_NNLO[3] * e_TT_Mtt_700_1000[0] * lum / float(Nmc_TT_Mtt_700_1000))
hMeas_TT_Mtt_700_1000_pdfup .Scale( sigma_ttbar_NNLO[4] * e_TT_Mtt_700_1000[0] * lum / float(Nmc_TT_Mtt_700_1000))
hMeas_TT_Mtt_700_1000_scaledown.Scale( sigma_ttbar_NNLO[1] * e_TT_Mtt_700_1000[1] * lum / float(Nmc_TT_Mtt_700_1000_scaledown))
hMeas_TT_Mtt_700_1000_scaleup .Scale( sigma_ttbar_NNLO[2] * e_TT_Mtt_700_1000[2] * lum / float(Nmc_TT_Mtt_700_1000_scaleup))
hMeas_TT_Mtt_1000_Inf_nom .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_1000_Inf[0] * lum / float(Nmc_TT_Mtt_1000_Inf) )
hMeas_TT_Mtt_1000_Inf_topdown .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_1000_Inf[0] * lum / float(Nmc_TT_Mtt_1000_Inf) )
hMeas_TT_Mtt_1000_Inf_topup .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_1000_Inf[0] * lum / float(Nmc_TT_Mtt_1000_Inf) )
hMeas_TT_Mtt_1000_Inf_btagdown .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_1000_Inf[0] * lum / float(Nmc_TT_Mtt_1000_Inf) )
hMeas_TT_Mtt_1000_Inf_btagup .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_1000_Inf[0] * lum / float(Nmc_TT_Mtt_1000_Inf) )
hMeas_TT_Mtt_1000_Inf_jecdown .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_1000_Inf[0] * lum / float(Nmc_TT_Mtt_1000_Inf) )
hMeas_TT_Mtt_1000_Inf_jecup .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_1000_Inf[0] * lum / float(Nmc_TT_Mtt_1000_Inf) )
hMeas_TT_Mtt_1000_Inf_jerdown .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_1000_Inf[0] * lum / float(Nmc_TT_Mtt_1000_Inf) )
hMeas_TT_Mtt_1000_Inf_jerup .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_1000_Inf[0] * lum / float(Nmc_TT_Mtt_1000_Inf) )
hMeas_TT_Mtt_1000_Inf_qcd .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_1000_Inf[0] * lum / float(Nmc_TT_Mtt_1000_Inf) )
hMeas_TT_Mtt_1000_Inf_pdfdown .Scale( sigma_ttbar_NNLO[3] * e_TT_Mtt_1000_Inf[0] * lum / float(Nmc_TT_Mtt_1000_Inf) )
hMeas_TT_Mtt_1000_Inf_pdfup .Scale( sigma_ttbar_NNLO[4] * e_TT_Mtt_1000_Inf[0] * lum / float(Nmc_TT_Mtt_1000_Inf) )
hMeas_TT_Mtt_1000_Inf_scaledown.Scale( sigma_ttbar_NNLO[1] * e_TT_Mtt_1000_Inf[1] * lum / float(Nmc_TT_Mtt_1000_Inf_scaledown) )
hMeas_TT_Mtt_1000_Inf_scaleup .Scale( sigma_ttbar_NNLO[2] * e_TT_Mtt_1000_Inf[2] * lum / float(Nmc_TT_Mtt_1000_Inf_scaleup) )
hMeas_TT_nonSemiLep_Mtt_less_700_nom .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_0_700[0] * lum / float(Nmc_ttbar))
hMeas_TT_nonSemiLep_Mtt_less_700_topdown .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_0_700[0] * lum / float(Nmc_ttbar))
hMeas_TT_nonSemiLep_Mtt_less_700_topup .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_0_700[0] * lum / float(Nmc_ttbar))
hMeas_TT_nonSemiLep_Mtt_less_700_btagdown .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_0_700[0] * lum / float(Nmc_ttbar))
hMeas_TT_nonSemiLep_Mtt_less_700_btagup .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_0_700[0] * lum / float(Nmc_ttbar))
hMeas_TT_nonSemiLep_Mtt_less_700_jecdown .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_0_700[0] * lum / float(Nmc_ttbar))
hMeas_TT_nonSemiLep_Mtt_less_700_jecup .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_0_700[0] * lum / float(Nmc_ttbar))
hMeas_TT_nonSemiLep_Mtt_less_700_jerdown .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_0_700[0] * lum / float(Nmc_ttbar))
hMeas_TT_nonSemiLep_Mtt_less_700_jerup .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_0_700[0] * lum / float(Nmc_ttbar))
hMeas_TT_nonSemiLep_Mtt_less_700_qcd .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_0_700[0] * lum / float(Nmc_ttbar))
hMeas_TT_nonSemiLep_Mtt_less_700_pdfdown .Scale( sigma_ttbar_NNLO[3] * e_TT_Mtt_0_700[0] * lum / float(Nmc_ttbar))
hMeas_TT_nonSemiLep_Mtt_less_700_pdfup .Scale( sigma_ttbar_NNLO[4] * e_TT_Mtt_0_700[0] * lum / float(Nmc_ttbar))
hMeas_TT_nonSemiLep_Mtt_less_700_scaledown.Scale( sigma_ttbar_NNLO[1] * e_TT_Mtt_0_700[1] * lum / float(Nmc_ttbar_scaledown))
hMeas_TT_nonSemiLep_Mtt_less_700_scaleup .Scale( sigma_ttbar_NNLO[2] * e_TT_Mtt_0_700[2] * lum / float(Nmc_ttbar_scaleup))
hMeas_TT_nonSemiLep_Mtt_700_1000_nom .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_700_1000[0] * lum / float(Nmc_TT_Mtt_700_1000))
hMeas_TT_nonSemiLep_Mtt_700_1000_topdown .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_700_1000[0] * lum / float(Nmc_TT_Mtt_700_1000))
hMeas_TT_nonSemiLep_Mtt_700_1000_topup .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_700_1000[0] * lum / float(Nmc_TT_Mtt_700_1000))
hMeas_TT_nonSemiLep_Mtt_700_1000_btagdown .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_700_1000[0] * lum / float(Nmc_TT_Mtt_700_1000))
hMeas_TT_nonSemiLep_Mtt_700_1000_btagup .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_700_1000[0] * lum / float(Nmc_TT_Mtt_700_1000))
hMeas_TT_nonSemiLep_Mtt_700_1000_jecdown .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_700_1000[0] * lum / float(Nmc_TT_Mtt_700_1000))
hMeas_TT_nonSemiLep_Mtt_700_1000_jecup .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_700_1000[0] * lum / float(Nmc_TT_Mtt_700_1000))
hMeas_TT_nonSemiLep_Mtt_700_1000_jerdown .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_700_1000[0] * lum / float(Nmc_TT_Mtt_700_1000))
hMeas_TT_nonSemiLep_Mtt_700_1000_jerup .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_700_1000[0] * lum / float(Nmc_TT_Mtt_700_1000))
hMeas_TT_nonSemiLep_Mtt_700_1000_qcd .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_700_1000[0] * lum / float(Nmc_TT_Mtt_700_1000))
hMeas_TT_nonSemiLep_Mtt_700_1000_pdfdown .Scale( sigma_ttbar_NNLO[3] * e_TT_Mtt_700_1000[0] * lum / float(Nmc_TT_Mtt_700_1000))
hMeas_TT_nonSemiLep_Mtt_700_1000_pdfup .Scale( sigma_ttbar_NNLO[4] * e_TT_Mtt_700_1000[0] * lum / float(Nmc_TT_Mtt_700_1000))
hMeas_TT_nonSemiLep_Mtt_700_1000_scaledown.Scale( sigma_ttbar_NNLO[1] * e_TT_Mtt_700_1000[1] * lum / float(Nmc_TT_Mtt_700_1000_scaledown))
hMeas_TT_nonSemiLep_Mtt_700_1000_scaleup .Scale( sigma_ttbar_NNLO[2] * e_TT_Mtt_700_1000[2] * lum / float(Nmc_TT_Mtt_700_1000_scaleup))
hMeas_TT_nonSemiLep_Mtt_1000_Inf_nom .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_1000_Inf[0] * lum / float(Nmc_TT_Mtt_1000_Inf) )
hMeas_TT_nonSemiLep_Mtt_1000_Inf_topdown .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_1000_Inf[0] * lum / float(Nmc_TT_Mtt_1000_Inf) )
hMeas_TT_nonSemiLep_Mtt_1000_Inf_topup .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_1000_Inf[0] * lum / float(Nmc_TT_Mtt_1000_Inf) )
hMeas_TT_nonSemiLep_Mtt_1000_Inf_btagdown .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_1000_Inf[0] * lum / float(Nmc_TT_Mtt_1000_Inf) )
hMeas_TT_nonSemiLep_Mtt_1000_Inf_btagup .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_1000_Inf[0] * lum / float(Nmc_TT_Mtt_1000_Inf) )
hMeas_TT_nonSemiLep_Mtt_1000_Inf_jecdown .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_1000_Inf[0] * lum / float(Nmc_TT_Mtt_1000_Inf) )
hMeas_TT_nonSemiLep_Mtt_1000_Inf_jecup .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_1000_Inf[0] * lum / float(Nmc_TT_Mtt_1000_Inf) )
hMeas_TT_nonSemiLep_Mtt_1000_Inf_jerdown .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_1000_Inf[0] * lum / float(Nmc_TT_Mtt_1000_Inf) )
hMeas_TT_nonSemiLep_Mtt_1000_Inf_jerup .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_1000_Inf[0] * lum / float(Nmc_TT_Mtt_1000_Inf) )
hMeas_TT_nonSemiLep_Mtt_1000_Inf_qcd .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_1000_Inf[0] * lum / float(Nmc_TT_Mtt_1000_Inf) )
hMeas_TT_nonSemiLep_Mtt_1000_Inf_pdfdown .Scale( sigma_ttbar_NNLO[3] * e_TT_Mtt_1000_Inf[0] * lum / float(Nmc_TT_Mtt_1000_Inf) )
hMeas_TT_nonSemiLep_Mtt_1000_Inf_pdfup .Scale( sigma_ttbar_NNLO[4] * e_TT_Mtt_1000_Inf[0] * lum / float(Nmc_TT_Mtt_1000_Inf) )
hMeas_TT_nonSemiLep_Mtt_1000_Inf_scaledown.Scale( sigma_ttbar_NNLO[1] * e_TT_Mtt_1000_Inf[1] * lum / float(Nmc_TT_Mtt_1000_Inf_scaledown) )
hMeas_TT_nonSemiLep_Mtt_1000_Inf_scaleup .Scale( sigma_ttbar_NNLO[2] * e_TT_Mtt_1000_Inf[2] * lum / float(Nmc_TT_Mtt_1000_Inf_scaleup) )
#Getting histogram files and scaling to plot histogram2 subtracted from histogram1
elif options.hist2 is not None:
if not options.ignoreData :
hRecoData1= fdata.Get(options.hist1).Clone()
hRecoData1.SetName(histname + "__DATA1" )
hRecoData2= fdata.Get(options.hist2).Clone()
hRecoData2.SetName(histname + "__DATA2" )
hRecoData = hRecoData1.Clone()
hRecoData.Add( hRecoData2 , -1.0 )
hRecoData.SetName(histname + "__DATA" )
hMeas_QCD_SingleMu_1 = fQCD_SingleMu.Get(options.hist1).Clone()
hMeas_QCD_SingleMu_1.SetName(options.hist1 + "__QCD__1")
hMeas_T_t_nom_1 = fT_t_nom.Get(options.hist1).Clone()
hMeas_T_t_nom_1 .SetName( options.hist1 + '__T_t__1')
hMeas_T_t_topdown_1 = fT_t_topdown.Get(options.hist1).Clone()
hMeas_T_t_topdown_1 .SetName( options.hist1 + '__T_t__toptag__down__1')
hMeas_T_t_topup_1 = fT_t_topup.Get(options.hist1).Clone()
hMeas_T_t_topup_1 .SetName( options.hist1 + '__T_t__toptag__up__1')
hMeas_T_t_btagdown_1 = fT_t_btagdown.Get(options.hist1).Clone()
hMeas_T_t_btagdown_1 .SetName( options.hist1 + '__T_t__btag__down__1')
hMeas_T_t_btagup_1 = fT_t_btagup.Get(options.hist1).Clone()
hMeas_T_t_btagup_1 .SetName( options.hist1 + '__T_t__btag__up__1')
hMeas_T_t_jecdown_1 = fT_t_jecdown.Get(options.hist1).Clone()
hMeas_T_t_jecdown_1 .SetName( options.hist1 + '__T_t__jec__down__1' )
hMeas_T_t_jecup_1 = fT_t_jecup.Get(options.hist1).Clone()
hMeas_T_t_jecup_1 .SetName( options.hist1 + '__T_t__jec__up__1' )
hMeas_T_t_jerdown_1 = fT_t_jerdown.Get(options.hist1).Clone()
hMeas_T_t_jerdown_1 .SetName( options.hist1 + '__T_t__jer__down__1' )
hMeas_T_t_jerup_1 = fT_t_jerup.Get(options.hist1).Clone()
hMeas_T_t_jerup_1 .SetName( options.hist1 + '__T_t__jer__up__1' )
hMeas_T_t_qcd_1 = fT_t_qcd.Get(options.hist1).Clone()
hMeas_T_t_qcd_1 .SetName( options.hist1 + '__T_t__qcd__1' )
hMeas_Tbar_t_nom_1 = fTbar_t_nom.Get(options.hist1).Clone()
hMeas_Tbar_t_nom_1 .SetName( options.hist1 + '__Tbar_t__1')
hMeas_Tbar_t_topdown_1 = fTbar_t_topdown.Get(options.hist1).Clone()
hMeas_Tbar_t_topdown_1 .SetName( options.hist1 + '__Tbar_t__toptag__down__1')
hMeas_Tbar_t_topup_1 = fTbar_t_topup.Get(options.hist1).Clone()
hMeas_Tbar_t_topup_1 .SetName( options.hist1 + '__Tbar_t__toptag__up__1')
hMeas_Tbar_t_btagdown_1 = fTbar_t_btagdown.Get(options.hist1).Clone()
hMeas_Tbar_t_btagdown_1 .SetName( options.hist1 + '__Tbar_t__btag__down__1')
hMeas_Tbar_t_btagup_1 = fTbar_t_btagup.Get(options.hist1).Clone()
hMeas_Tbar_t_btagup_1 .SetName( options.hist1 + '__Tbar_t__btag__up__1')
hMeas_Tbar_t_jecdown_1 = fTbar_t_jecdown.Get(options.hist1).Clone()
hMeas_Tbar_t_jecdown_1 .SetName( options.hist1 + '__Tbar_t__jec__down__1' )
hMeas_Tbar_t_jecup_1 = fTbar_t_jecup.Get(options.hist1).Clone()
hMeas_Tbar_t_jecup_1 .SetName( options.hist1 + '__Tbar_t__jec__up__1' )
hMeas_Tbar_t_jerdown_1 = fTbar_t_jerdown.Get(options.hist1).Clone()
hMeas_Tbar_t_jerdown_1 .SetName( options.hist1 + '__Tbar_t__jer__down__1' )
hMeas_Tbar_t_jerup_1 = fTbar_t_jerup.Get(options.hist1).Clone()
hMeas_Tbar_t_jerup_1 .SetName( options.hist1 + '__Tbar_t__jer__up__1' )
hMeas_Tbar_t_qcd_1 = fTbar_t_qcd.Get(options.hist1).Clone()
hMeas_Tbar_t_qcd_1 .SetName( options.hist1 + '__Tbar_t__qcd__1' )
hMeas_T_s_nom_1 = fT_s_nom.Get(options.hist1).Clone()
hMeas_T_s_nom_1 .SetName( options.hist1 + '__T_s__1')
hMeas_T_s_topdown_1 = fT_s_topdown.Get(options.hist1).Clone()
hMeas_T_s_topdown_1 .SetName( options.hist1 + '__T_s__toptag__down__1')
hMeas_T_s_topup_1 = fT_s_topup.Get(options.hist1).Clone()
hMeas_T_s_topup_1 .SetName( options.hist1 + '__T_s__toptag__up__1')
hMeas_T_s_btagdown_1 = fT_s_btagdown.Get(options.hist1).Clone()
hMeas_T_s_btagdown_1 .SetName( options.hist1 + '__T_s__btag__down__1')
hMeas_T_s_btagup_1 = fT_s_btagup.Get(options.hist1).Clone()
hMeas_T_s_btagup_1 .SetName( options.hist1 + '__T_s__btag__up__1')
hMeas_T_s_jecdown_1 = fT_s_jecdown.Get(options.hist1).Clone()
hMeas_T_s_jecdown_1 .SetName( options.hist1 + '__T_s__jec__down__1' )
hMeas_T_s_jecup_1 = fT_s_jecup.Get(options.hist1).Clone()
hMeas_T_s_jecup_1 .SetName( options.hist1 + '__T_s__jec__up__1' )
hMeas_T_s_jerdown_1 = fT_s_jerdown.Get(options.hist1).Clone()
hMeas_T_s_jerdown_1 .SetName( options.hist1 + '__T_s__jer__down__1' )
hMeas_T_s_jerup_1 = fT_s_jerup.Get(options.hist1).Clone()
hMeas_T_s_jerup_1 .SetName( options.hist1 + '__T_s__jer__up__1' )
hMeas_T_s_qcd_1 = fT_s_qcd.Get(options.hist1).Clone()
hMeas_T_s_qcd_1 .SetName( options.hist1 + '__T_s__qcd__1' )
hMeas_Tbar_s_nom_1 = fTbar_s_nom.Get(options.hist1).Clone()
hMeas_Tbar_s_nom_1 .SetName( options.hist1 + '__Tbar_s__1')
hMeas_Tbar_s_topdown_1 = fTbar_s_topdown.Get(options.hist1).Clone()
hMeas_Tbar_s_topdown_1 .SetName( options.hist1 + '__Tbar_s__toptag__down__1')
hMeas_Tbar_s_topup_1 = fTbar_s_topup.Get(options.hist1).Clone()
hMeas_Tbar_s_topup_1 .SetName( options.hist1 + '__Tbar_s__toptag__up__1')
hMeas_Tbar_s_btagdown_1 = fTbar_s_btagdown.Get(options.hist1).Clone()
hMeas_Tbar_s_btagdown_1 .SetName( options.hist1 + '__Tbar_s__btag__down__1')
hMeas_Tbar_s_btagup_1 = fTbar_s_btagup.Get(options.hist1).Clone()
hMeas_Tbar_s_btagup_1 .SetName( options.hist1 + '__Tbar_s__btag__up__1')
hMeas_Tbar_s_jecdown_1 = fTbar_s_jecdown.Get(options.hist1).Clone()
hMeas_Tbar_s_jecdown_1 .SetName( options.hist1 + '__Tbar_s__jec__down__1' )
hMeas_Tbar_s_jecup_1 = fTbar_s_jecup.Get(options.hist1).Clone()
hMeas_Tbar_s_jecup_1 .SetName( options.hist1 + '__Tbar_s__jec__up__1' )
hMeas_Tbar_s_jerdown_1 = fTbar_s_jerdown.Get(options.hist1).Clone()
hMeas_Tbar_s_jerdown_1 .SetName( options.hist1 + '__Tbar_s__jer__down__1' )
hMeas_Tbar_s_jerup_1 = fTbar_s_jerup.Get(options.hist1).Clone()
hMeas_Tbar_s_jerup_1 .SetName( options.hist1 + '__Tbar_s__jer__up__1' )
hMeas_Tbar_s_qcd_1 = fTbar_s_qcd.Get(options.hist1).Clone()
hMeas_Tbar_s_qcd_1 .SetName( options.hist1 + '__Tbar_s__qcd__1' )
hMeas_T_tW_nom_1 = fT_tW_nom.Get(options.hist1).Clone()
hMeas_T_tW_nom_1 .SetName( options.hist1 + '__T_tW__1')
hMeas_T_tW_topdown_1 = fT_tW_topdown.Get(options.hist1).Clone()
hMeas_T_tW_topdown_1 .SetName( options.hist1 + '__T_tW__toptag__down__1')
hMeas_T_tW_topup_1 = fT_tW_topup.Get(options.hist1).Clone()
hMeas_T_tW_topup_1 .SetName( options.hist1 + '__T_tW__toptag__up__1')
hMeas_T_tW_btagdown_1 = fT_tW_btagdown.Get(options.hist1).Clone()
hMeas_T_tW_btagdown_1 .SetName( options.hist1 + '__T_tW__btag__down__1')
hMeas_T_tW_btagup_1 = fT_tW_btagup.Get(options.hist1).Clone()
hMeas_T_tW_btagup_1 .SetName( options.hist1 + '__T_tW__btag__up__1')
hMeas_T_tW_jecdown_1 = fT_tW_jecdown.Get(options.hist1).Clone()
hMeas_T_tW_jecdown_1 .SetName( options.hist1 + '__T_tW__jec__down__1' )
hMeas_T_tW_jecup_1 = fT_tW_jecup.Get(options.hist1).Clone()
hMeas_T_tW_jecup_1 .SetName( options.hist1 + '__T_tW__jec__up__1' )
hMeas_T_tW_jerdown_1 = fT_tW_jerdown.Get(options.hist1).Clone()
hMeas_T_tW_jerdown_1 .SetName( options.hist1 + '__T_tW__jer__down__1' )
hMeas_T_tW_jerup_1 = fT_tW_jerup.Get(options.hist1).Clone()
hMeas_T_tW_jerup_1 .SetName( options.hist1 + '__T_tW__jer__up__1' )
hMeas_T_tW_qcd_1 = fT_tW_qcd.Get(options.hist1).Clone()
hMeas_T_tW_qcd_1 .SetName( options.hist1 + '__T_tW__qcd__1' )
hMeas_Tbar_tW_nom_1 = fTbar_tW_nom.Get(options.hist1).Clone()
hMeas_Tbar_tW_nom_1 .SetName( options.hist1 + '__Tbar_tW__1')
hMeas_Tbar_tW_topdown_1 = fTbar_tW_topdown.Get(options.hist1).Clone()
hMeas_Tbar_tW_topdown_1 .SetName( options.hist1 + '__Tbar_tW__toptag__down__1')
hMeas_Tbar_tW_topup_1 = fTbar_tW_topup.Get(options.hist1).Clone()
hMeas_Tbar_tW_topup_1 .SetName( options.hist1 + '__Tbar_tW__toptag__up__1')
hMeas_Tbar_tW_btagdown_1 = fTbar_tW_btagdown.Get(options.hist1).Clone()
hMeas_Tbar_tW_btagdown_1 .SetName( options.hist1 + '__Tbar_tW__btag__down__1')
hMeas_Tbar_tW_btagup_1 = fTbar_tW_btagup.Get(options.hist1).Clone()
hMeas_Tbar_tW_btagup_1 .SetName( options.hist1 + '__Tbar_tW__btag__up__1')
hMeas_Tbar_tW_jecdown_1 = fTbar_tW_jecdown.Get(options.hist1).Clone()
hMeas_Tbar_tW_jecdown_1 .SetName( options.hist1 + '__Tbar_tW__jec__down__1' )
hMeas_Tbar_tW_jecup_1 = fTbar_tW_jecup.Get(options.hist1).Clone()
hMeas_Tbar_tW_jecup_1 .SetName( options.hist1 + '__Tbar_tW__jec__up__1' )
hMeas_Tbar_tW_jerdown_1 = fTbar_tW_jerdown.Get(options.hist1).Clone()
hMeas_Tbar_tW_jerdown_1 .SetName( options.hist1 + '__Tbar_tW__jer__down__1' )
hMeas_Tbar_tW_jerup_1 = fTbar_tW_jerup.Get(options.hist1).Clone()
hMeas_Tbar_tW_jerup_1 .SetName( options.hist1 + '__Tbar_tW__jer__up__1' )
hMeas_Tbar_tW_qcd_1 = fTbar_tW_qcd.Get(options.hist1).Clone()
hMeas_Tbar_tW_qcd_1 .SetName( options.hist1 + '__Tbar_tW__qcd__1' )
hMeas_WJets_nom_1 = fWJets_nom.Get(options.hist1).Clone()
hMeas_WJets_nom_1 .SetName( options.hist1 + '__WJets__1')
hMeas_WJets_topdown_1 = fWJets_topdown.Get(options.hist1).Clone()
hMeas_WJets_topdown_1 .SetName( options.hist1 + '__WJets__toptag__down__1')
hMeas_WJets_topup_1 = fWJets_topup.Get(options.hist1).Clone()
hMeas_WJets_topup_1 .SetName( options.hist1 + '__WJets__toptag__up__1')
hMeas_WJets_btagdown_1 = fWJets_btagdown.Get(options.hist1).Clone()
hMeas_WJets_btagdown_1 .SetName( options.hist1 + '__WJets__btag__down__1')
hMeas_WJets_btagup_1 = fWJets_btagup.Get(options.hist1).Clone()
hMeas_WJets_btagup_1 .SetName( options.hist1 + '__WJets__btag__up__1')
hMeas_WJets_jecdown_1 = fWJets_jecdown.Get(options.hist1).Clone()
hMeas_WJets_jecdown_1 .SetName( options.hist1 + '__WJets__jec__down__1' )
hMeas_WJets_jecup_1 = fWJets_jecup.Get(options.hist1).Clone()
hMeas_WJets_jecup_1 .SetName( options.hist1 + '__WJets__jec__up__1' )
hMeas_WJets_jerdown_1 = fWJets_jerdown.Get(options.hist1).Clone()
hMeas_WJets_jerdown_1 .SetName( options.hist1 + '__WJets__jer__down__1' )
hMeas_WJets_jerup_1 = fWJets_jerup.Get(options.hist1).Clone()
hMeas_WJets_jerup_1 .SetName( options.hist1 + '__WJets__jer__up__1' )
hMeas_WJets_qcd_1 = fWJets_qcd.Get(options.hist1).Clone()
hMeas_WJets_qcd_1 .SetName( options.hist1 + '__WJets__qcd__1' )
hMeas_TT_Mtt_less_700_nom_1 = fTT_Mtt_less_700_nom.Get(options.hist1).Clone()
hMeas_TT_Mtt_less_700_nom_1 .SetName( options.hist1 + '__TTbar_Mtt_less_700__1' )
hMeas_TT_Mtt_less_700_topdown_1 = fTT_Mtt_less_700_topdown.Get(options.hist1).Clone()
hMeas_TT_Mtt_less_700_topdown_1 .SetName( options.hist1 + '__TTbar_Mtt_less_700__toptag__down__1')
hMeas_TT_Mtt_less_700_topup_1 = fTT_Mtt_less_700_topup.Get(options.hist1).Clone()
hMeas_TT_Mtt_less_700_topup_1 .SetName( options.hist1 + '__TTbar_Mtt_less_700__toptag__up__1')
hMeas_TT_Mtt_less_700_btagdown_1 = fTT_Mtt_less_700_btagdown.Get(options.hist1).Clone()
hMeas_TT_Mtt_less_700_btagdown_1 .SetName( options.hist1 + '__TTbar_Mtt_less_700__btag__down__1')
hMeas_TT_Mtt_less_700_btagup_1 = fTT_Mtt_less_700_btagup.Get(options.hist1).Clone()
hMeas_TT_Mtt_less_700_btagup_1 .SetName( options.hist1 + '__TTbar_Mtt_less_700__btag__up__1')
hMeas_TT_Mtt_less_700_jecdown_1 = fTT_Mtt_less_700_jecdown.Get(options.hist1).Clone()
hMeas_TT_Mtt_less_700_jecdown_1 .SetName( options.hist1 + '__TTbar_Mtt_less_700__jec__down__1')
hMeas_TT_Mtt_less_700_jecup_1 = fTT_Mtt_less_700_jecup.Get(options.hist1).Clone()
hMeas_TT_Mtt_less_700_jecup_1 .SetName( options.hist1 + '__TTbar_Mtt_less_700__jec__up__1')
hMeas_TT_Mtt_less_700_jerdown_1 = fTT_Mtt_less_700_jerdown.Get(options.hist1).Clone()
hMeas_TT_Mtt_less_700_jerdown_1 .SetName( options.hist1 + '__TTbar_Mtt_less_700__jer__down__1')
hMeas_TT_Mtt_less_700_jerup_1 = fTT_Mtt_less_700_jerup.Get(options.hist1).Clone()
hMeas_TT_Mtt_less_700_jerup_1 .SetName( options.hist1 + '__TTbar_Mtt_less_700__jer__up__1')
hMeas_TT_Mtt_less_700_qcd_1 = fTT_Mtt_less_700_qcd.Get(options.hist1).Clone()
hMeas_TT_Mtt_less_700_qcd_1 .SetName( options.hist1 + '__TTbar_Mtt_less_700__qcd__1')
hMeas_TT_Mtt_less_700_pdfdown_1 = fTT_Mtt_less_700_pdfdown.Get(options.hist1).Clone()
hMeas_TT_Mtt_less_700_pdfdown_1 .SetName( options.hist1 + '__TTbar_Mtt_less_700__pdf__down__1')
hMeas_TT_Mtt_less_700_pdfup_1 = fTT_Mtt_less_700_pdfup.Get(options.hist1).Clone()
hMeas_TT_Mtt_less_700_pdfup_1 .SetName( options.hist1 + '__TTbar_Mtt_less_700__pdf__up__1')
hMeas_TT_Mtt_less_700_scaledown_1 = fTT_Mtt_less_700_scaledown.Get(options.hist1).Clone()
hMeas_TT_Mtt_less_700_scaledown_1 .SetName( options.hist1 + '__TTbar_Mtt_less_700__scale__down__1')
hMeas_TT_Mtt_less_700_scaleup_1 = fTT_Mtt_less_700_scaleup.Get(options.hist1).Clone()
hMeas_TT_Mtt_less_700_scaleup_1 .SetName( options.hist1 + '__TTbar_Mtt_less_700__scale__up__1')
hMeas_TT_Mtt_700_1000_nom_1 = fTT_Mtt_700_1000_nom.Get(options.hist1).Clone()
hMeas_TT_Mtt_700_1000_nom_1 .SetName( options.hist1 + '__TTbar_Mtt_700_1000__1' )
hMeas_TT_Mtt_700_1000_topdown_1 = fTT_Mtt_700_1000_topdown.Get(options.hist1).Clone()
hMeas_TT_Mtt_700_1000_topdown_1 .SetName( options.hist1 + '__TTbar_Mtt_700_1000__toptag__down__1')
hMeas_TT_Mtt_700_1000_topup_1 = fTT_Mtt_700_1000_topup.Get(options.hist1).Clone()
hMeas_TT_Mtt_700_1000_topup_1 .SetName( options.hist1 + '__TTbar_Mtt_700_1000__toptag__up__1')
hMeas_TT_Mtt_700_1000_btagdown_1 = fTT_Mtt_700_1000_btagdown.Get(options.hist1).Clone()
hMeas_TT_Mtt_700_1000_btagdown_1 .SetName( options.hist1 + '__TTbar_Mtt_700_1000__btag__down__1')
hMeas_TT_Mtt_700_1000_btagup_1 = fTT_Mtt_700_1000_btagup.Get(options.hist1).Clone()
hMeas_TT_Mtt_700_1000_btagup_1 .SetName( options.hist1 + '__TTbar_Mtt_700_1000__btag__up__1')
hMeas_TT_Mtt_700_1000_jecdown_1 = fTT_Mtt_700_1000_jecdown.Get(options.hist1).Clone()
hMeas_TT_Mtt_700_1000_jecdown_1 .SetName( options.hist1 + '__TTbar_Mtt_700_1000__jec__down__1')
hMeas_TT_Mtt_700_1000_jecup_1 = fTT_Mtt_700_1000_jecup.Get(options.hist1).Clone()
hMeas_TT_Mtt_700_1000_jecup_1 .SetName( options.hist1 + '__TTbar_Mtt_700_1000__jec__up__1')
hMeas_TT_Mtt_700_1000_jerdown_1 = fTT_Mtt_700_1000_jerdown.Get(options.hist1).Clone()
hMeas_TT_Mtt_700_1000_jerdown_1 .SetName( options.hist1 + '__TTbar_Mtt_700_1000__jer__down__1')
hMeas_TT_Mtt_700_1000_jerup_1 = fTT_Mtt_700_1000_jerup.Get(options.hist1).Clone()
hMeas_TT_Mtt_700_1000_jerup_1 .SetName( options.hist1 + '__TTbar_Mtt_700_1000__jer__up__1')
hMeas_TT_Mtt_700_1000_qcd_1 = fTT_Mtt_700_1000_qcd.Get(options.hist1).Clone()
hMeas_TT_Mtt_700_1000_qcd_1 .SetName( options.hist1 + '__TTbar_Mtt_700_1000__qcd__1')
hMeas_TT_Mtt_700_1000_pdfdown_1 = fTT_Mtt_700_1000_pdfdown.Get(options.hist1).Clone()
hMeas_TT_Mtt_700_1000_pdfdown_1 .SetName( options.hist1 + '__TTbar_Mtt_700_1000__pdf__down__1')
hMeas_TT_Mtt_700_1000_pdfup_1 = fTT_Mtt_700_1000_pdfup.Get(options.hist1).Clone()
hMeas_TT_Mtt_700_1000_pdfup_1 .SetName( options.hist1 + '__TTbar_Mtt_700_1000__pdf__up__1')
hMeas_TT_Mtt_700_1000_scaledown_1 = fTT_Mtt_700_1000_scaledown.Get(options.hist1).Clone()
hMeas_TT_Mtt_700_1000_scaledown_1 .SetName( options.hist1 + '__TTbar_Mtt_700_1000__scale__down__1')
hMeas_TT_Mtt_700_1000_scaleup_1 = fTT_Mtt_700_1000_scaleup.Get(options.hist1).Clone()
hMeas_TT_Mtt_700_1000_scaleup_1 .SetName( options.hist1 + '__TTbar_Mtt_700_1000__scale__up__1')
hMeas_TT_Mtt_1000_Inf_nom_1 = fTT_Mtt_1000_Inf_nom.Get(options.hist1).Clone()
hMeas_TT_Mtt_1000_Inf_nom_1 .SetName( options.hist1 + '__TTbar_Mtt_1000_Inf__1' )
hMeas_TT_Mtt_1000_Inf_topdown_1 = fTT_Mtt_1000_Inf_topdown.Get(options.hist1).Clone()
hMeas_TT_Mtt_1000_Inf_topdown_1 .SetName( options.hist1 + '__TTbar_Mtt_1000_Inf__toptag__down__1')
hMeas_TT_Mtt_1000_Inf_topup_1 = fTT_Mtt_1000_Inf_topup.Get(options.hist1).Clone()
hMeas_TT_Mtt_1000_Inf_topup_1 .SetName( options.hist1 + '__TTbar_Mtt_1000_Inf__toptag__up__1')
hMeas_TT_Mtt_1000_Inf_btagdown_1 = fTT_Mtt_1000_Inf_btagdown.Get(options.hist1).Clone()
hMeas_TT_Mtt_1000_Inf_btagdown_1 .SetName( options.hist1 + '__TTbar_Mtt_1000_Inf__btag__down__1')
hMeas_TT_Mtt_1000_Inf_btagup_1 = fTT_Mtt_1000_Inf_btagup.Get(options.hist1).Clone()
hMeas_TT_Mtt_1000_Inf_btagup_1 .SetName( options.hist1 + '__TTbar_Mtt_1000_Inf__btag__up__1')
hMeas_TT_Mtt_1000_Inf_jecdown_1 = fTT_Mtt_1000_Inf_jecdown.Get(options.hist1).Clone()
hMeas_TT_Mtt_1000_Inf_jecdown_1 .SetName( options.hist1 + '__TTbar_Mtt_1000_Inf__jec__down__1')
hMeas_TT_Mtt_1000_Inf_jecup_1 = fTT_Mtt_1000_Inf_jecup.Get(options.hist1).Clone()
hMeas_TT_Mtt_1000_Inf_jecup_1 .SetName( options.hist1 + '__TTbar_Mtt_1000_Inf__jec__up__1')
hMeas_TT_Mtt_1000_Inf_jerdown_1 = fTT_Mtt_1000_Inf_jerdown.Get(options.hist1).Clone()
hMeas_TT_Mtt_1000_Inf_jerdown_1 .SetName( options.hist1 + '__TTbar_Mtt_1000_Inf__jer__down__1')
hMeas_TT_Mtt_1000_Inf_jerup_1 = fTT_Mtt_1000_Inf_jerup.Get(options.hist1).Clone()
hMeas_TT_Mtt_1000_Inf_jerup_1 .SetName( options.hist1 + '__TTbar_Mtt_1000_Inf__jer__up__1')
hMeas_TT_Mtt_1000_Inf_qcd_1 = fTT_Mtt_1000_Inf_qcd.Get(options.hist1).Clone()
hMeas_TT_Mtt_1000_Inf_qcd_1 .SetName( options.hist1 + '__TTbar_Mtt_1000_Inf__qcd__1')
hMeas_TT_Mtt_1000_Inf_pdfdown_1 = fTT_Mtt_1000_Inf_pdfdown.Get(options.hist1).Clone()
hMeas_TT_Mtt_1000_Inf_pdfdown_1 .SetName( options.hist1 + '__TTbar_Mtt_1000_Inf__pdf__down__1')
hMeas_TT_Mtt_1000_Inf_pdfup_1 = fTT_Mtt_1000_Inf_pdfup.Get(options.hist1).Clone()
hMeas_TT_Mtt_1000_Inf_pdfup_1 .SetName( options.hist1 + '__TTbar_Mtt_1000_Inf__pdf__up__1')
hMeas_TT_Mtt_1000_Inf_scaledown_1 = fTT_Mtt_1000_Inf_scaledown.Get(options.hist1).Clone()
hMeas_TT_Mtt_1000_Inf_scaledown_1 .SetName( options.hist1 + '__TTbar_Mtt_1000_Inf__scale__down__1')
hMeas_TT_Mtt_1000_Inf_scaleup_1 = fTT_Mtt_1000_Inf_scaleup.Get(options.hist1).Clone()
hMeas_TT_Mtt_1000_Inf_scaleup_1 .SetName( options.hist1 + '__TTbar_Mtt_1000_Inf__scale__up__1')
hMeas_TT_nonSemiLep_Mtt_less_700_nom_1 = fTT_nonSemiLep_Mtt_less_700_nom.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_less_700_nom_1 .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_less_700__1' )
hMeas_TT_nonSemiLep_Mtt_less_700_topdown_1 = fTT_nonSemiLep_Mtt_less_700_topdown.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_less_700_topdown_1 .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_less_700__toptag__down__1')
hMeas_TT_nonSemiLep_Mtt_less_700_topup_1 = fTT_nonSemiLep_Mtt_less_700_topup.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_less_700_topup_1 .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_less_700__toptag__up__1')
hMeas_TT_nonSemiLep_Mtt_less_700_btagdown_1 = fTT_nonSemiLep_Mtt_less_700_btagdown.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_less_700_btagdown_1 .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_less_700__btag__down__1')
hMeas_TT_nonSemiLep_Mtt_less_700_btagup_1 = fTT_nonSemiLep_Mtt_less_700_btagup.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_less_700_btagup_1 .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_less_700__btag__up__1')
hMeas_TT_nonSemiLep_Mtt_less_700_jecdown_1 = fTT_nonSemiLep_Mtt_less_700_jecdown.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_less_700_jecdown_1 .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_less_700__jec__down__1')
hMeas_TT_nonSemiLep_Mtt_less_700_jecup_1 = fTT_nonSemiLep_Mtt_less_700_jecup.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_less_700_jecup_1 .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_less_700__jec__up__1')
hMeas_TT_nonSemiLep_Mtt_less_700_jerdown_1 = fTT_nonSemiLep_Mtt_less_700_jerdown.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_less_700_jerdown_1 .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_less_700__jer__down__1')
hMeas_TT_nonSemiLep_Mtt_less_700_jerup_1 = fTT_nonSemiLep_Mtt_less_700_jerup.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_less_700_jerup_1 .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_less_700__jer__up__1')
hMeas_TT_nonSemiLep_Mtt_less_700_qcd_1 = fTT_nonSemiLep_Mtt_less_700_qcd.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_less_700_qcd_1 .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_less_700__qcd__1')
hMeas_TT_nonSemiLep_Mtt_less_700_pdfdown_1 = fTT_nonSemiLep_Mtt_less_700_pdfdown.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_less_700_pdfdown_1 .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_less_700__pdf__down__1')
hMeas_TT_nonSemiLep_Mtt_less_700_pdfup_1 = fTT_nonSemiLep_Mtt_less_700_pdfup.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_less_700_pdfup_1 .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_less_700__pdf__up__1')
hMeas_TT_nonSemiLep_Mtt_less_700_scaledown_1 = fTT_nonSemiLep_Mtt_less_700_scaledown.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_less_700_scaledown_1 .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_less_700__scale__down__1')
hMeas_TT_nonSemiLep_Mtt_less_700_scaleup_1 = fTT_nonSemiLep_Mtt_less_700_scaleup.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_less_700_scaleup_1 .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_less_700__scale__up__1')
hMeas_TT_nonSemiLep_Mtt_700_1000_nom_1 = fTT_nonSemiLep_Mtt_700_1000_nom.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_700_1000_nom_1 .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_700_1000__1' )
hMeas_TT_nonSemiLep_Mtt_700_1000_topdown_1 = fTT_nonSemiLep_Mtt_700_1000_topdown.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_700_1000_topdown_1 .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_700_1000__toptag__down__1')
hMeas_TT_nonSemiLep_Mtt_700_1000_topup_1 = fTT_nonSemiLep_Mtt_700_1000_topup.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_700_1000_topup_1 .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_700_1000__toptag__up__1')
hMeas_TT_nonSemiLep_Mtt_700_1000_btagdown_1 = fTT_nonSemiLep_Mtt_700_1000_btagdown.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_700_1000_btagdown_1 .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_700_1000__btag__down__1')
hMeas_TT_nonSemiLep_Mtt_700_1000_btagup_1 = fTT_nonSemiLep_Mtt_700_1000_btagup.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_700_1000_btagup_1 .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_700_1000__btag__up__1')
hMeas_TT_nonSemiLep_Mtt_700_1000_jecdown_1 = fTT_nonSemiLep_Mtt_700_1000_jecdown.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_700_1000_jecdown_1 .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_700_1000__jec__down__1')
hMeas_TT_nonSemiLep_Mtt_700_1000_jecup_1 = fTT_nonSemiLep_Mtt_700_1000_jecup.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_700_1000_jecup_1 .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_700_1000__jec__up__1')
hMeas_TT_nonSemiLep_Mtt_700_1000_jerdown_1 = fTT_nonSemiLep_Mtt_700_1000_jerdown.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_700_1000_jerdown_1 .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_700_1000__jer__down__1')
hMeas_TT_nonSemiLep_Mtt_700_1000_jerup_1 = fTT_nonSemiLep_Mtt_700_1000_jerup.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_700_1000_jerup_1 .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_700_1000__jer__up__1')
hMeas_TT_nonSemiLep_Mtt_700_1000_qcd_1 = fTT_nonSemiLep_Mtt_700_1000_qcd.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_700_1000_qcd_1 .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_700_1000__qcd__1')
hMeas_TT_nonSemiLep_Mtt_700_1000_pdfdown_1 = fTT_nonSemiLep_Mtt_700_1000_pdfdown.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_700_1000_pdfdown_1 .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_700_1000__pdf__down__1')
hMeas_TT_nonSemiLep_Mtt_700_1000_pdfup_1 = fTT_nonSemiLep_Mtt_700_1000_pdfup.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_700_1000_pdfup_1 .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_700_1000__pdf__up__1')
hMeas_TT_nonSemiLep_Mtt_700_1000_scaledown_1 = fTT_nonSemiLep_Mtt_700_1000_scaledown.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_700_1000_scaledown_1 .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_700_1000__scale__down__1')
hMeas_TT_nonSemiLep_Mtt_700_1000_scaleup_1 = fTT_nonSemiLep_Mtt_700_1000_scaleup.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_700_1000_scaleup_1 .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_700_1000__scale__up__1')
hMeas_TT_nonSemiLep_Mtt_1000_Inf_nom_1 = fTT_nonSemiLep_Mtt_1000_Inf_nom.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_1000_Inf_nom_1 .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_1000_Inf__1' )
hMeas_TT_nonSemiLep_Mtt_1000_Inf_topdown_1 = fTT_nonSemiLep_Mtt_1000_Inf_topdown.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_1000_Inf_topdown_1 .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_1000_Inf__toptag__down__1')
hMeas_TT_nonSemiLep_Mtt_1000_Inf_topup_1 = fTT_nonSemiLep_Mtt_1000_Inf_topup.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_1000_Inf_topup_1 .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_1000_Inf__toptag__up__1')
hMeas_TT_nonSemiLep_Mtt_1000_Inf_btagdown_1 = fTT_nonSemiLep_Mtt_1000_Inf_btagdown.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_1000_Inf_btagdown_1 .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_1000_Inf__btag__down__1')
hMeas_TT_nonSemiLep_Mtt_1000_Inf_btagup_1 = fTT_nonSemiLep_Mtt_1000_Inf_btagup.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_1000_Inf_btagup_1 .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_1000_Inf__btag__up__1')
hMeas_TT_nonSemiLep_Mtt_1000_Inf_jecdown_1 = fTT_nonSemiLep_Mtt_1000_Inf_jecdown.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_1000_Inf_jecdown_1 .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_1000_Inf__jec__down__1')
hMeas_TT_nonSemiLep_Mtt_1000_Inf_jecup_1 = fTT_nonSemiLep_Mtt_1000_Inf_jecup.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_1000_Inf_jecup_1 .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_1000_Inf__jec__up__1')
hMeas_TT_nonSemiLep_Mtt_1000_Inf_jerdown_1 = fTT_nonSemiLep_Mtt_1000_Inf_jerdown.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_1000_Inf_jerdown_1 .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_1000_Inf__jer__down__1')
hMeas_TT_nonSemiLep_Mtt_1000_Inf_jerup_1 = fTT_nonSemiLep_Mtt_1000_Inf_jerup.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_1000_Inf_jerup_1 .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_1000_Inf__jer__up__1')
hMeas_TT_nonSemiLep_Mtt_1000_Inf_qcd_1 = fTT_nonSemiLep_Mtt_1000_Inf_qcd.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_1000_Inf_qcd_1 .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_1000_Inf__qcd__1')
hMeas_TT_nonSemiLep_Mtt_1000_Inf_pdfdown_1 = fTT_nonSemiLep_Mtt_1000_Inf_pdfdown.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_1000_Inf_pdfdown_1 .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_1000_Inf__pdf__down__1')
hMeas_TT_nonSemiLep_Mtt_1000_Inf_pdfup_1 = fTT_nonSemiLep_Mtt_1000_Inf_pdfup.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_1000_Inf_pdfup_1 .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_1000_Inf__pdf__up__1')
hMeas_TT_nonSemiLep_Mtt_1000_Inf_scaledown_1 = fTT_nonSemiLep_Mtt_1000_Inf_scaledown.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_1000_Inf_scaledown_1 .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_1000_Inf__scale__down__1')
hMeas_TT_nonSemiLep_Mtt_1000_Inf_scaleup_1 = fTT_nonSemiLep_Mtt_1000_Inf_scaleup.Get(options.hist1).Clone()
hMeas_TT_nonSemiLep_Mtt_1000_Inf_scaleup_1 .SetName( options.hist1 + '__TTbar_nonSemiLep_Mtt_1000_Inf__scale__up__1')
hMeas_T_t_nom_1 .Scale( sigma_T_t_NNLO * lum / float(Nmc_T_t) )
hMeas_T_t_topdown_1 .Scale( sigma_T_t_NNLO * lum / float(Nmc_T_t) )
hMeas_T_t_topup_1 .Scale( sigma_T_t_NNLO * lum / float(Nmc_T_t) )
hMeas_T_t_btagdown_1.Scale( sigma_T_t_NNLO * lum / float(Nmc_T_t) )
hMeas_T_t_btagup_1 .Scale( sigma_T_t_NNLO * lum / float(Nmc_T_t) )
hMeas_T_t_jecdown_1 .Scale( sigma_T_t_NNLO * lum / float(Nmc_T_t) )
hMeas_T_t_jecup_1 .Scale( sigma_T_t_NNLO * lum / float(Nmc_T_t) )
hMeas_T_t_jerdown_1 .Scale( sigma_T_t_NNLO * lum / float(Nmc_T_t) )
hMeas_T_t_jerup_1 .Scale( sigma_T_t_NNLO * lum / float(Nmc_T_t) )
hMeas_T_t_qcd_1 .Scale( sigma_T_t_NNLO * lum / float(Nmc_T_t) )
hMeas_Tbar_t_nom_1 .Scale( sigma_Tbar_t_NNLO * lum / float(Nmc_Tbar_t) )
hMeas_Tbar_t_topdown_1 .Scale( sigma_Tbar_t_NNLO * lum / float(Nmc_Tbar_t) )
hMeas_Tbar_t_topup_1 .Scale( sigma_Tbar_t_NNLO * lum / float(Nmc_Tbar_t) )
hMeas_Tbar_t_btagdown_1.Scale( sigma_Tbar_t_NNLO * lum / float(Nmc_Tbar_t) )
hMeas_Tbar_t_btagup_1 .Scale( sigma_Tbar_t_NNLO * lum / float(Nmc_Tbar_t) )
hMeas_Tbar_t_jecdown_1 .Scale( sigma_Tbar_t_NNLO * lum / float(Nmc_Tbar_t) )
hMeas_Tbar_t_jecup_1 .Scale( sigma_Tbar_t_NNLO * lum / float(Nmc_Tbar_t) )
hMeas_Tbar_t_jerdown_1 .Scale( sigma_Tbar_t_NNLO * lum / float(Nmc_Tbar_t) )
hMeas_Tbar_t_jerup_1 .Scale( sigma_Tbar_t_NNLO * lum / float(Nmc_Tbar_t) )
hMeas_Tbar_t_qcd_1 .Scale( sigma_Tbar_t_NNLO * lum / float(Nmc_Tbar_t) )
hMeas_T_s_nom_1 .Scale( sigma_T_s_NNLO * lum / float(Nmc_T_s) )
hMeas_T_s_topdown_1 .Scale( sigma_T_s_NNLO * lum / float(Nmc_T_s) )
hMeas_T_s_topup_1 .Scale( sigma_T_s_NNLO * lum / float(Nmc_T_s) )
hMeas_T_s_btagdown_1.Scale( sigma_T_s_NNLO * lum / float(Nmc_T_s) )
hMeas_T_s_btagup_1 .Scale( sigma_T_s_NNLO * lum / float(Nmc_T_s) )
hMeas_T_s_jecdown_1 .Scale( sigma_T_s_NNLO * lum / float(Nmc_T_s) )
hMeas_T_s_jecup_1 .Scale( sigma_T_s_NNLO * lum / float(Nmc_T_s) )
hMeas_T_s_jerdown_1 .Scale( sigma_T_s_NNLO * lum / float(Nmc_T_s) )
hMeas_T_s_jerup_1 .Scale( sigma_T_s_NNLO * lum / float(Nmc_T_s) )
hMeas_T_s_qcd_1 .Scale( sigma_T_s_NNLO * lum / float(Nmc_T_s) )
hMeas_Tbar_s_nom_1 .Scale( sigma_Tbar_s_NNLO * lum / float(Nmc_Tbar_s) )
hMeas_Tbar_s_topdown_1 .Scale( sigma_Tbar_s_NNLO * lum / float(Nmc_Tbar_s) )
hMeas_Tbar_s_topup_1 .Scale( sigma_Tbar_s_NNLO * lum / float(Nmc_Tbar_s) )
hMeas_Tbar_s_btagdown_1.Scale( sigma_Tbar_s_NNLO * lum / float(Nmc_Tbar_s) )
hMeas_Tbar_s_btagup_1 .Scale( sigma_Tbar_s_NNLO * lum / float(Nmc_Tbar_s) )
hMeas_Tbar_s_jecdown_1 .Scale( sigma_Tbar_s_NNLO * lum / float(Nmc_Tbar_s) )
hMeas_Tbar_s_jecup_1 .Scale( sigma_Tbar_s_NNLO * lum / float(Nmc_Tbar_s) )
hMeas_Tbar_s_jerdown_1 .Scale( sigma_Tbar_s_NNLO * lum / float(Nmc_Tbar_s) )
hMeas_Tbar_s_jerup_1 .Scale( sigma_Tbar_s_NNLO * lum / float(Nmc_Tbar_s) )
hMeas_Tbar_s_qcd_1 .Scale( sigma_Tbar_s_NNLO * lum / float(Nmc_Tbar_s) )
hMeas_T_tW_nom_1 .Scale( sigma_T_tW_NNLO * lum / float(Nmc_T_tW) )
hMeas_T_tW_topdown_1 .Scale( sigma_T_tW_NNLO * lum / float(Nmc_T_tW) )
hMeas_T_tW_topup_1 .Scale( sigma_T_tW_NNLO * lum / float(Nmc_T_tW) )
hMeas_T_tW_btagdown_1.Scale( sigma_T_tW_NNLO * lum / float(Nmc_T_tW) )
hMeas_T_tW_btagup_1 .Scale( sigma_T_tW_NNLO * lum / float(Nmc_T_tW) )
hMeas_T_tW_jecdown_1 .Scale( sigma_T_tW_NNLO * lum / float(Nmc_T_tW) )
hMeas_T_tW_jecup_1 .Scale( sigma_T_tW_NNLO * lum / float(Nmc_T_tW) )
hMeas_T_tW_jerdown_1 .Scale( sigma_T_tW_NNLO * lum / float(Nmc_T_tW) )
hMeas_T_tW_jerup_1 .Scale( sigma_T_tW_NNLO * lum / float(Nmc_T_tW) )
hMeas_T_tW_qcd_1 .Scale( sigma_T_tW_NNLO * lum / float(Nmc_T_tW) )
hMeas_Tbar_tW_nom_1 .Scale( sigma_Tbar_tW_NNLO * lum / float(Nmc_Tbar_tW) )
hMeas_Tbar_tW_topdown_1 .Scale( sigma_Tbar_tW_NNLO * lum / float(Nmc_Tbar_tW) )
hMeas_Tbar_tW_topup_1 .Scale( sigma_Tbar_tW_NNLO * lum / float(Nmc_Tbar_tW) )
hMeas_Tbar_tW_btagdown_1.Scale( sigma_Tbar_tW_NNLO * lum / float(Nmc_Tbar_tW) )
hMeas_Tbar_tW_btagup_1 .Scale( sigma_Tbar_tW_NNLO * lum / float(Nmc_Tbar_tW) )
hMeas_Tbar_tW_jecdown_1 .Scale( sigma_Tbar_tW_NNLO * lum / float(Nmc_Tbar_tW) )
hMeas_Tbar_tW_jecup_1 .Scale( sigma_Tbar_tW_NNLO * lum / float(Nmc_Tbar_tW) )
hMeas_Tbar_tW_jerdown_1 .Scale( sigma_Tbar_tW_NNLO * lum / float(Nmc_Tbar_tW) )
hMeas_Tbar_tW_jerup_1 .Scale( sigma_Tbar_tW_NNLO * lum / float(Nmc_Tbar_tW) )
hMeas_Tbar_tW_qcd_1 .Scale( sigma_Tbar_tW_NNLO * lum / float(Nmc_Tbar_tW) )
hMeas_WJets_nom_1 .Scale( sigma_WJets_NNLO * lum / float(Nmc_WJets) )
hMeas_WJets_topdown_1 .Scale( sigma_WJets_NNLO * lum / float(Nmc_WJets) )
hMeas_WJets_topup_1 .Scale( sigma_WJets_NNLO * lum / float(Nmc_WJets) )
hMeas_WJets_btagdown_1.Scale( sigma_WJets_NNLO * lum / float(Nmc_WJets) )
hMeas_WJets_btagup_1 .Scale( sigma_WJets_NNLO * lum / float(Nmc_WJets) )
hMeas_WJets_jecdown_1 .Scale( sigma_WJets_NNLO * lum / float(Nmc_WJets) )
hMeas_WJets_jecup_1 .Scale( sigma_WJets_NNLO * lum / float(Nmc_WJets) )
hMeas_WJets_jerdown_1 .Scale( sigma_WJets_NNLO * lum / float(Nmc_WJets) )
hMeas_WJets_jerup_1 .Scale( sigma_WJets_NNLO * lum / float(Nmc_WJets) )
hMeas_WJets_qcd_1 .Scale( sigma_WJets_NNLO * lum / float(Nmc_WJets) )
hMeas_TT_Mtt_less_700_nom_1 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_0_700[0] * lum / float(Nmc_ttbar))
hMeas_TT_Mtt_less_700_topdown_1 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_0_700[0] * lum / float(Nmc_ttbar))
hMeas_TT_Mtt_less_700_topup_1 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_0_700[0] * lum / float(Nmc_ttbar))
hMeas_TT_Mtt_less_700_btagdown_1 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_0_700[0] * lum / float(Nmc_ttbar))
hMeas_TT_Mtt_less_700_btagup_1 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_0_700[0] * lum / float(Nmc_ttbar))
hMeas_TT_Mtt_less_700_jecdown_1 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_0_700[0] * lum / float(Nmc_ttbar))
hMeas_TT_Mtt_less_700_jecup_1 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_0_700[0] * lum / float(Nmc_ttbar))
hMeas_TT_Mtt_less_700_jerdown_1 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_0_700[0] * lum / float(Nmc_ttbar))
hMeas_TT_Mtt_less_700_jerup_1 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_0_700[0] * lum / float(Nmc_ttbar))
hMeas_TT_Mtt_less_700_qcd_1 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_0_700[0] * lum / float(Nmc_ttbar))
hMeas_TT_Mtt_less_700_pdfdown_1 .Scale( sigma_ttbar_NNLO[3] * e_TT_Mtt_0_700[0] * lum / float(Nmc_ttbar))
hMeas_TT_Mtt_less_700_pdfup_1 .Scale( sigma_ttbar_NNLO[4] * e_TT_Mtt_0_700[0] * lum / float(Nmc_ttbar))
hMeas_TT_Mtt_less_700_scaledown_1.Scale( sigma_ttbar_NNLO[1] * e_TT_Mtt_0_700[1] * lum / float(Nmc_ttbar_scaledown))
hMeas_TT_Mtt_less_700_scaleup_1 .Scale( sigma_ttbar_NNLO[2] * e_TT_Mtt_0_700[2] * lum / float(Nmc_ttbar_scaleup))
hMeas_TT_Mtt_700_1000_nom_1 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_700_1000[0] * lum / float(Nmc_TT_Mtt_700_1000))
hMeas_TT_Mtt_700_1000_topdown_1 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_700_1000[0] * lum / float(Nmc_TT_Mtt_700_1000))
hMeas_TT_Mtt_700_1000_topup_1 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_700_1000[0] * lum / float(Nmc_TT_Mtt_700_1000))
hMeas_TT_Mtt_700_1000_btagdown_1 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_700_1000[0] * lum / float(Nmc_TT_Mtt_700_1000))
hMeas_TT_Mtt_700_1000_btagup_1 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_700_1000[0] * lum / float(Nmc_TT_Mtt_700_1000))
hMeas_TT_Mtt_700_1000_jecdown_1 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_700_1000[0] * lum / float(Nmc_TT_Mtt_700_1000))
hMeas_TT_Mtt_700_1000_jecup_1 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_700_1000[0] * lum / float(Nmc_TT_Mtt_700_1000))
hMeas_TT_Mtt_700_1000_jerdown_1 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_700_1000[0] * lum / float(Nmc_TT_Mtt_700_1000))
hMeas_TT_Mtt_700_1000_jerup_1 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_700_1000[0] * lum / float(Nmc_TT_Mtt_700_1000))
hMeas_TT_Mtt_700_1000_qcd_1 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_700_1000[0] * lum / float(Nmc_TT_Mtt_700_1000))
hMeas_TT_Mtt_700_1000_pdfdown_1 .Scale( sigma_ttbar_NNLO[3] * e_TT_Mtt_700_1000[0] * lum / float(Nmc_TT_Mtt_700_1000))
hMeas_TT_Mtt_700_1000_pdfup_1 .Scale( sigma_ttbar_NNLO[4] * e_TT_Mtt_700_1000[0] * lum / float(Nmc_TT_Mtt_700_1000))
hMeas_TT_Mtt_700_1000_scaledown_1.Scale( sigma_ttbar_NNLO[1] * e_TT_Mtt_700_1000[1] * lum / float(Nmc_TT_Mtt_700_1000_scaledown))
hMeas_TT_Mtt_700_1000_scaleup_1 .Scale( sigma_ttbar_NNLO[2] * e_TT_Mtt_700_1000[2] * lum / float(Nmc_TT_Mtt_700_1000_scaleup))
hMeas_TT_Mtt_1000_Inf_nom_1 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_1000_Inf[0] * lum / float(Nmc_TT_Mtt_1000_Inf) )
hMeas_TT_Mtt_1000_Inf_topdown_1 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_1000_Inf[0] * lum / float(Nmc_TT_Mtt_1000_Inf) )
hMeas_TT_Mtt_1000_Inf_topup_1 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_1000_Inf[0] * lum / float(Nmc_TT_Mtt_1000_Inf) )
hMeas_TT_Mtt_1000_Inf_btagdown_1 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_1000_Inf[0] * lum / float(Nmc_TT_Mtt_1000_Inf) )
hMeas_TT_Mtt_1000_Inf_btagup_1 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_1000_Inf[0] * lum / float(Nmc_TT_Mtt_1000_Inf) )
hMeas_TT_Mtt_1000_Inf_jecdown_1 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_1000_Inf[0] * lum / float(Nmc_TT_Mtt_1000_Inf) )
hMeas_TT_Mtt_1000_Inf_jecup_1 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_1000_Inf[0] * lum / float(Nmc_TT_Mtt_1000_Inf) )
hMeas_TT_Mtt_1000_Inf_jerdown_1 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_1000_Inf[0] * lum / float(Nmc_TT_Mtt_1000_Inf) )
hMeas_TT_Mtt_1000_Inf_jerup_1 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_1000_Inf[0] * lum / float(Nmc_TT_Mtt_1000_Inf) )
hMeas_TT_Mtt_1000_Inf_qcd_1 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_1000_Inf[0] * lum / float(Nmc_TT_Mtt_1000_Inf) )
hMeas_TT_Mtt_1000_Inf_pdfdown_1 .Scale( sigma_ttbar_NNLO[3] * e_TT_Mtt_1000_Inf[0] * lum / float(Nmc_TT_Mtt_1000_Inf) )
hMeas_TT_Mtt_1000_Inf_pdfup_1 .Scale( sigma_ttbar_NNLO[4] * e_TT_Mtt_1000_Inf[0] * lum / float(Nmc_TT_Mtt_1000_Inf) )
hMeas_TT_Mtt_1000_Inf_scaledown_1.Scale( sigma_ttbar_NNLO[1] * e_TT_Mtt_1000_Inf[1] * lum / float(Nmc_TT_Mtt_1000_Inf_scaledown) )
hMeas_TT_Mtt_1000_Inf_scaleup_1 .Scale( sigma_ttbar_NNLO[2] * e_TT_Mtt_1000_Inf[2] * lum / float(Nmc_TT_Mtt_1000_Inf_scaleup) )
hMeas_TT_nonSemiLep_Mtt_less_700_nom_1 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_0_700[0] * lum / float(Nmc_ttbar))
hMeas_TT_nonSemiLep_Mtt_less_700_topdown_1 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_0_700[0] * lum / float(Nmc_ttbar))
hMeas_TT_nonSemiLep_Mtt_less_700_topup_1 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_0_700[0] * lum / float(Nmc_ttbar))
hMeas_TT_nonSemiLep_Mtt_less_700_btagdown_1 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_0_700[0] * lum / float(Nmc_ttbar))
hMeas_TT_nonSemiLep_Mtt_less_700_btagup_1 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_0_700[0] * lum / float(Nmc_ttbar))
hMeas_TT_nonSemiLep_Mtt_less_700_jecdown_1 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_0_700[0] * lum / float(Nmc_ttbar))
hMeas_TT_nonSemiLep_Mtt_less_700_jecup_1 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_0_700[0] * lum / float(Nmc_ttbar))
hMeas_TT_nonSemiLep_Mtt_less_700_jerdown_1 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_0_700[0] * lum / float(Nmc_ttbar))
hMeas_TT_nonSemiLep_Mtt_less_700_jerup_1 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_0_700[0] * lum / float(Nmc_ttbar))
hMeas_TT_nonSemiLep_Mtt_less_700_qcd_1 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_0_700[0] * lum / float(Nmc_ttbar))
hMeas_TT_nonSemiLep_Mtt_less_700_pdfdown_1 .Scale( sigma_ttbar_NNLO[3] * e_TT_Mtt_0_700[0] * lum / float(Nmc_ttbar))
hMeas_TT_nonSemiLep_Mtt_less_700_pdfup_1 .Scale( sigma_ttbar_NNLO[4] * e_TT_Mtt_0_700[0] * lum / float(Nmc_ttbar))
hMeas_TT_nonSemiLep_Mtt_less_700_scaledown_1.Scale( sigma_ttbar_NNLO[1] * e_TT_Mtt_0_700[1] * lum / float(Nmc_ttbar_scaledown))
hMeas_TT_nonSemiLep_Mtt_less_700_scaleup_1 .Scale( sigma_ttbar_NNLO[2] * e_TT_Mtt_0_700[2] * lum / float(Nmc_ttbar_scaleup))
hMeas_TT_nonSemiLep_Mtt_700_1000_nom_1 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_700_1000[0] * lum / float(Nmc_TT_Mtt_700_1000))
hMeas_TT_nonSemiLep_Mtt_700_1000_topdown_1 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_700_1000[0] * lum / float(Nmc_TT_Mtt_700_1000))
hMeas_TT_nonSemiLep_Mtt_700_1000_topup_1 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_700_1000[0] * lum / float(Nmc_TT_Mtt_700_1000))
hMeas_TT_nonSemiLep_Mtt_700_1000_btagdown_1 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_700_1000[0] * lum / float(Nmc_TT_Mtt_700_1000))
hMeas_TT_nonSemiLep_Mtt_700_1000_btagup_1 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_700_1000[0] * lum / float(Nmc_TT_Mtt_700_1000))
hMeas_TT_nonSemiLep_Mtt_700_1000_jecdown_1 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_700_1000[0] * lum / float(Nmc_TT_Mtt_700_1000))
hMeas_TT_nonSemiLep_Mtt_700_1000_jecup_1 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_700_1000[0] * lum / float(Nmc_TT_Mtt_700_1000))
hMeas_TT_nonSemiLep_Mtt_700_1000_jerdown_1 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_700_1000[0] * lum / float(Nmc_TT_Mtt_700_1000))
hMeas_TT_nonSemiLep_Mtt_700_1000_jerup_1 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_700_1000[0] * lum / float(Nmc_TT_Mtt_700_1000))
hMeas_TT_nonSemiLep_Mtt_700_1000_qcd_1 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_700_1000[0] * lum / float(Nmc_TT_Mtt_700_1000))
hMeas_TT_nonSemiLep_Mtt_700_1000_pdfdown_1 .Scale( sigma_ttbar_NNLO[3] * e_TT_Mtt_700_1000[0] * lum / float(Nmc_TT_Mtt_700_1000))
hMeas_TT_nonSemiLep_Mtt_700_1000_pdfup_1 .Scale( sigma_ttbar_NNLO[4] * e_TT_Mtt_700_1000[0] * lum / float(Nmc_TT_Mtt_700_1000))
hMeas_TT_nonSemiLep_Mtt_700_1000_scaledown_1.Scale( sigma_ttbar_NNLO[1] * e_TT_Mtt_700_1000[1] * lum / float(Nmc_TT_Mtt_700_1000_scaledown))
hMeas_TT_nonSemiLep_Mtt_700_1000_scaleup_1 .Scale( sigma_ttbar_NNLO[2] * e_TT_Mtt_700_1000[2] * lum / float(Nmc_TT_Mtt_700_1000_scaleup))
hMeas_TT_nonSemiLep_Mtt_1000_Inf_nom_1 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_1000_Inf[0] * lum / float(Nmc_TT_Mtt_1000_Inf) )
hMeas_TT_nonSemiLep_Mtt_1000_Inf_topdown_1 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_1000_Inf[0] * lum / float(Nmc_TT_Mtt_1000_Inf) )
hMeas_TT_nonSemiLep_Mtt_1000_Inf_topup_1 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_1000_Inf[0] * lum / float(Nmc_TT_Mtt_1000_Inf) )
hMeas_TT_nonSemiLep_Mtt_1000_Inf_btagdown_1 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_1000_Inf[0] * lum / float(Nmc_TT_Mtt_1000_Inf) )
hMeas_TT_nonSemiLep_Mtt_1000_Inf_btagup_1 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_1000_Inf[0] * lum / float(Nmc_TT_Mtt_1000_Inf) )
hMeas_TT_nonSemiLep_Mtt_1000_Inf_jecdown_1 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_1000_Inf[0] * lum / float(Nmc_TT_Mtt_1000_Inf) )
hMeas_TT_nonSemiLep_Mtt_1000_Inf_jecup_1 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_1000_Inf[0] * lum / float(Nmc_TT_Mtt_1000_Inf) )
hMeas_TT_nonSemiLep_Mtt_1000_Inf_jerdown_1 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_1000_Inf[0] * lum / float(Nmc_TT_Mtt_1000_Inf) )
hMeas_TT_nonSemiLep_Mtt_1000_Inf_jerup_1 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_1000_Inf[0] * lum / float(Nmc_TT_Mtt_1000_Inf) )
hMeas_TT_nonSemiLep_Mtt_1000_Inf_qcd_1 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_1000_Inf[0] * lum / float(Nmc_TT_Mtt_1000_Inf) )
hMeas_TT_nonSemiLep_Mtt_1000_Inf_pdfdown_1 .Scale( sigma_ttbar_NNLO[3] * e_TT_Mtt_1000_Inf[0] * lum / float(Nmc_TT_Mtt_1000_Inf) )
hMeas_TT_nonSemiLep_Mtt_1000_Inf_pdfup_1 .Scale( sigma_ttbar_NNLO[4] * e_TT_Mtt_1000_Inf[0] * lum / float(Nmc_TT_Mtt_1000_Inf) )
hMeas_TT_nonSemiLep_Mtt_1000_Inf_scaledown_1.Scale( sigma_ttbar_NNLO[1] * e_TT_Mtt_1000_Inf[1] * lum / float(Nmc_TT_Mtt_1000_Inf_scaledown) )
hMeas_TT_nonSemiLep_Mtt_1000_Inf_scaleup_1 .Scale( sigma_ttbar_NNLO[2] * e_TT_Mtt_1000_Inf[2] * lum / float(Nmc_TT_Mtt_1000_Inf_scaleup) )
# Get the histogram files for hist2
hMeas_QCD_SingleMu_2 = fQCD_SingleMu.Get(options.hist2).Clone()
hMeas_QCD_SingleMu_2.SetName(options.hist2 + "__QCD__2")
hMeas_T_t_nom_2 = fT_t_nom.Get(options.hist2).Clone()
hMeas_T_t_nom_2 .SetName( options.hist2 + '__T_t__2')
hMeas_T_t_topdown_2 = fT_t_topdown.Get(options.hist2).Clone()
hMeas_T_t_topdown_2 .SetName( options.hist2 + '__T_t__toptag__down__2')
hMeas_T_t_topup_2 = fT_t_topup.Get(options.hist2).Clone()
hMeas_T_t_topup_2 .SetName( options.hist2 + '__T_t__toptag__up__2')
hMeas_T_t_btagdown_2 = fT_t_btagdown.Get(options.hist2).Clone()
hMeas_T_t_btagdown_2 .SetName( options.hist2 + '__T_t__btag__down__2')
hMeas_T_t_btagup_2 = fT_t_btagup.Get(options.hist2).Clone()
hMeas_T_t_btagup_2 .SetName( options.hist2 + '__T_t__btag__up__2')
hMeas_T_t_jecdown_2 = fT_t_jecdown.Get(options.hist2).Clone()
hMeas_T_t_jecdown_2 .SetName( options.hist2 + '__T_t__jec__down__2' )
hMeas_T_t_jecup_2 = fT_t_jecup.Get(options.hist2).Clone()
hMeas_T_t_jecup_2 .SetName( options.hist2 + '__T_t__jec__up__2' )
hMeas_T_t_jerdown_2 = fT_t_jerdown.Get(options.hist2).Clone()
hMeas_T_t_jerdown_2 .SetName( options.hist2 + '__T_t__jer__down__2' )
hMeas_T_t_jerup_2 = fT_t_jerup.Get(options.hist2).Clone()
hMeas_T_t_jerup_2 .SetName( options.hist2 + '__T_t__jer__up__2' )
hMeas_T_t_qcd_2 = fT_t_qcd.Get(options.hist2).Clone()
hMeas_T_t_qcd_2 .SetName( options.hist2 + '__T_t__qcd__2' )
hMeas_Tbar_t_nom_2 = fTbar_t_nom.Get(options.hist2).Clone()
hMeas_Tbar_t_nom_2 .SetName( options.hist2 + '__Tbar_t__2')
hMeas_Tbar_t_topdown_2 = fTbar_t_topdown.Get(options.hist2).Clone()
hMeas_Tbar_t_topdown_2 .SetName( options.hist2 + '__Tbar_t__toptag__down__2')
hMeas_Tbar_t_topup_2 = fTbar_t_topup.Get(options.hist2).Clone()
hMeas_Tbar_t_topup_2 .SetName( options.hist2 + '__Tbar_t__toptag__up__2')
hMeas_Tbar_t_btagdown_2 = fTbar_t_btagdown.Get(options.hist2).Clone()
hMeas_Tbar_t_btagdown_2 .SetName( options.hist2 + '__Tbar_t__btag__down__2')
hMeas_Tbar_t_btagup_2 = fTbar_t_btagup.Get(options.hist2).Clone()
hMeas_Tbar_t_btagup_2 .SetName( options.hist2 + '__Tbar_t__btag__up__2')
hMeas_Tbar_t_jecdown_2 = fTbar_t_jecdown.Get(options.hist2).Clone()
hMeas_Tbar_t_jecdown_2 .SetName( options.hist2 + '__Tbar_t__jec__down__2' )
hMeas_Tbar_t_jecup_2 = fTbar_t_jecup.Get(options.hist2).Clone()
hMeas_Tbar_t_jecup_2 .SetName( options.hist2 + '__Tbar_t__jec__up__2' )
hMeas_Tbar_t_jerdown_2 = fTbar_t_jerdown.Get(options.hist2).Clone()
hMeas_Tbar_t_jerdown_2 .SetName( options.hist2 + '__Tbar_t__jer__down__2' )
hMeas_Tbar_t_jerup_2 = fTbar_t_jerup.Get(options.hist2).Clone()
hMeas_Tbar_t_jerup_2 .SetName( options.hist2 + '__Tbar_t__jer__up__2' )
hMeas_Tbar_t_qcd_2 = fTbar_t_qcd.Get(options.hist2).Clone()
hMeas_Tbar_t_qcd_2 .SetName( options.hist2 + '__Tbar_t__qcd__2' )
hMeas_T_s_nom_2 = fT_s_nom.Get(options.hist2).Clone()
hMeas_T_s_nom_2 .SetName( options.hist2 + '__T_s__2')
hMeas_T_s_topdown_2 = fT_s_topdown.Get(options.hist2).Clone()
hMeas_T_s_topdown_2 .SetName( options.hist2 + '__T_s__toptag__down__2')
hMeas_T_s_topup_2 = fT_s_topup.Get(options.hist2).Clone()
hMeas_T_s_topup_2 .SetName( options.hist2 + '__T_s__toptag__up__2')
hMeas_T_s_btagdown_2 = fT_s_btagdown.Get(options.hist2).Clone()
hMeas_T_s_btagdown_2 .SetName( options.hist2 + '__T_s__btag__down__2')
hMeas_T_s_btagup_2 = fT_s_btagup.Get(options.hist2).Clone()
hMeas_T_s_btagup_2 .SetName( options.hist2 + '__T_s__btag__up__2')
hMeas_T_s_jecdown_2 = fT_s_jecdown.Get(options.hist2).Clone()
hMeas_T_s_jecdown_2 .SetName( options.hist2 + '__T_s__jec__down__2' )
hMeas_T_s_jecup_2 = fT_s_jecup.Get(options.hist2).Clone()
hMeas_T_s_jecup_2 .SetName( options.hist2 + '__T_s__jec__up__2' )
hMeas_T_s_jerdown_2 = fT_s_jerdown.Get(options.hist2).Clone()
hMeas_T_s_jerdown_2 .SetName( options.hist2 + '__T_s__jer__down__2' )
hMeas_T_s_jerup_2 = fT_s_jerup.Get(options.hist2).Clone()
hMeas_T_s_jerup_2 .SetName( options.hist2 + '__T_s__jer__up__2' )
hMeas_T_s_qcd_2 = fT_s_qcd.Get(options.hist2).Clone()
hMeas_T_s_qcd_2 .SetName( options.hist2 + '__T_s__qcd__2' )
hMeas_Tbar_s_nom_2 = fTbar_s_nom.Get(options.hist2).Clone()
hMeas_Tbar_s_nom_2 .SetName( options.hist2 + '__Tbar_s__2')
hMeas_Tbar_s_topdown_2 = fTbar_s_topdown.Get(options.hist2).Clone()
hMeas_Tbar_s_topdown_2 .SetName( options.hist2 + '__Tbar_s__toptag__down__2')
hMeas_Tbar_s_topup_2 = fTbar_s_topup.Get(options.hist2).Clone()
hMeas_Tbar_s_topup_2 .SetName( options.hist2 + '__Tbar_s__toptag__up__2')
hMeas_Tbar_s_btagdown_2 = fTbar_s_btagdown.Get(options.hist2).Clone()
hMeas_Tbar_s_btagdown_2 .SetName( options.hist2 + '__Tbar_s__btag__down__2')
hMeas_Tbar_s_btagup_2 = fTbar_s_btagup.Get(options.hist2).Clone()
hMeas_Tbar_s_btagup_2 .SetName( options.hist2 + '__Tbar_s__btag__up__2')
hMeas_Tbar_s_jecdown_2 = fTbar_s_jecdown.Get(options.hist2).Clone()
hMeas_Tbar_s_jecdown_2 .SetName( options.hist2 + '__Tbar_s__jec__down__2' )
hMeas_Tbar_s_jecup_2 = fTbar_s_jecup.Get(options.hist2).Clone()
hMeas_Tbar_s_jecup_2 .SetName( options.hist2 + '__Tbar_s__jec__up__2' )
hMeas_Tbar_s_jerdown_2 = fTbar_s_jerdown.Get(options.hist2).Clone()
hMeas_Tbar_s_jerdown_2 .SetName( options.hist2 + '__Tbar_s__jer__down__2' )
hMeas_Tbar_s_jerup_2 = fTbar_s_jerup.Get(options.hist2).Clone()
hMeas_Tbar_s_jerup_2 .SetName( options.hist2 + '__Tbar_s__jer__up__2' )
hMeas_Tbar_s_qcd_2 = fTbar_s_qcd.Get(options.hist2).Clone()
hMeas_Tbar_s_qcd_2 .SetName( options.hist2 + '__Tbar_s__qcd__2' )
hMeas_T_tW_nom_2 = fT_tW_nom.Get(options.hist2).Clone()
hMeas_T_tW_nom_2 .SetName( options.hist2 + '__T_tW__2')
hMeas_T_tW_topdown_2 = fT_tW_topdown.Get(options.hist2).Clone()
hMeas_T_tW_topdown_2 .SetName( options.hist2 + '__T_tW__toptag__down__2')
hMeas_T_tW_topup_2 = fT_tW_topup.Get(options.hist2).Clone()
hMeas_T_tW_topup_2 .SetName( options.hist2 + '__T_tW__toptag__up__2')
hMeas_T_tW_btagdown_2 = fT_tW_btagdown.Get(options.hist2).Clone()
hMeas_T_tW_btagdown_2 .SetName( options.hist2 + '__T_tW__btag__down__2')
hMeas_T_tW_btagup_2 = fT_tW_btagup.Get(options.hist2).Clone()
hMeas_T_tW_btagup_2 .SetName( options.hist2 + '__T_tW__btag__up__2')
hMeas_T_tW_jecdown_2 = fT_tW_jecdown.Get(options.hist2).Clone()
hMeas_T_tW_jecdown_2 .SetName( options.hist2 + '__T_tW__jec__down__2' )
hMeas_T_tW_jecup_2 = fT_tW_jecup.Get(options.hist2).Clone()
hMeas_T_tW_jecup_2 .SetName( options.hist2 + '__T_tW__jec__up__2' )
hMeas_T_tW_jerdown_2 = fT_tW_jerdown.Get(options.hist2).Clone()
hMeas_T_tW_jerdown_2 .SetName( options.hist2 + '__T_tW__jer__down__2' )
hMeas_T_tW_jerup_2 = fT_tW_jerup.Get(options.hist2).Clone()
hMeas_T_tW_jerup_2 .SetName( options.hist2 + '__T_tW__jer__up__2' )
hMeas_T_tW_qcd_2 = fT_tW_qcd.Get(options.hist2).Clone()
hMeas_T_tW_qcd_2 .SetName( options.hist2 + '__T_tW__qcd__2' )
hMeas_Tbar_tW_nom_2 = fTbar_tW_nom.Get(options.hist2).Clone()
hMeas_Tbar_tW_nom_2 .SetName( options.hist2 + '__Tbar_tW__2')
hMeas_Tbar_tW_topdown_2 = fTbar_tW_topdown.Get(options.hist2).Clone()
hMeas_Tbar_tW_topdown_2 .SetName( options.hist2 + '__Tbar_tW__toptag__down__2')
hMeas_Tbar_tW_topup_2 = fTbar_tW_topup.Get(options.hist2).Clone()
hMeas_Tbar_tW_topup_2 .SetName( options.hist2 + '__Tbar_tW__toptag__up__2')
hMeas_Tbar_tW_btagdown_2 = fTbar_tW_btagdown.Get(options.hist2).Clone()
hMeas_Tbar_tW_btagdown_2 .SetName( options.hist2 + '__Tbar_tW__btag__down__2')
hMeas_Tbar_tW_btagup_2 = fTbar_tW_btagup.Get(options.hist2).Clone()
hMeas_Tbar_tW_btagup_2 .SetName( options.hist2 + '__Tbar_tW__btag__up__2')
hMeas_Tbar_tW_jecdown_2 = fTbar_tW_jecdown.Get(options.hist2).Clone()
hMeas_Tbar_tW_jecdown_2 .SetName( options.hist2 + '__Tbar_tW__jec__down__2' )
hMeas_Tbar_tW_jecup_2 = fTbar_tW_jecup.Get(options.hist2).Clone()
hMeas_Tbar_tW_jecup_2 .SetName( options.hist2 + '__Tbar_tW__jec__up__2' )
hMeas_Tbar_tW_jerdown_2 = fTbar_tW_jerdown.Get(options.hist2).Clone()
hMeas_Tbar_tW_jerdown_2 .SetName( options.hist2 + '__Tbar_tW__jer__down__2' )
hMeas_Tbar_tW_jerup_2 = fTbar_tW_jerup.Get(options.hist2).Clone()
hMeas_Tbar_tW_jerup_2 .SetName( options.hist2 + '__Tbar_tW__jer__up__2' )
hMeas_Tbar_tW_qcd_2 = fTbar_tW_qcd.Get(options.hist2).Clone()
hMeas_Tbar_tW_qcd_2 .SetName( options.hist2 + '__Tbar_tW__qcd__2' )
hMeas_WJets_nom_2 = fWJets_nom.Get(options.hist2).Clone()
hMeas_WJets_nom_2 .SetName( options.hist2 + '__WJets__2')
hMeas_WJets_topdown_2 = fWJets_topdown.Get(options.hist2).Clone()
hMeas_WJets_topdown_2 .SetName( options.hist2 + '__WJets__toptag__down__2')
hMeas_WJets_topup_2 = fWJets_topup.Get(options.hist2).Clone()
hMeas_WJets_topup_2 .SetName( options.hist2 + '__WJets__toptag__up__2')
hMeas_WJets_btagdown_2 = fWJets_btagdown.Get(options.hist2).Clone()
hMeas_WJets_btagdown_2 .SetName( options.hist2 + '__WJets__btag__down__2')
hMeas_WJets_btagup_2 = fWJets_btagup.Get(options.hist2).Clone()
hMeas_WJets_btagup_2 .SetName( options.hist2 + '__WJets__btag__up__2')
hMeas_WJets_jecdown_2 = fWJets_jecdown.Get(options.hist2).Clone()
hMeas_WJets_jecdown_2 .SetName( options.hist2 + '__WJets__jec__down__2' )
hMeas_WJets_jecup_2 = fWJets_jecup.Get(options.hist2).Clone()
hMeas_WJets_jecup_2 .SetName( options.hist2 + '__WJets__jec__up__2' )
hMeas_WJets_jerdown_2 = fWJets_jerdown.Get(options.hist2).Clone()
hMeas_WJets_jerdown_2 .SetName( options.hist2 + '__WJets__jer__down__2' )
hMeas_WJets_jerup_2 = fWJets_jerup.Get(options.hist2).Clone()
hMeas_WJets_jerup_2 .SetName( options.hist2 + '__WJets__jer__up__2' )
hMeas_WJets_qcd_2 = fWJets_qcd.Get(options.hist2).Clone()
hMeas_WJets_qcd_2 .SetName( options.hist2 + '__WJets__qcd__2' )
hMeas_TT_Mtt_less_700_nom_2 = fTT_Mtt_less_700_nom.Get(options.hist2).Clone()
hMeas_TT_Mtt_less_700_nom_2 .SetName( options.hist2 + '__TTbar_Mtt_less_700__2' )
hMeas_TT_Mtt_less_700_topdown_2 = fTT_Mtt_less_700_topdown.Get(options.hist2).Clone()
hMeas_TT_Mtt_less_700_topdown_2 .SetName( options.hist2 + '__TTbar_Mtt_less_700__toptag__down__2')
hMeas_TT_Mtt_less_700_topup_2 = fTT_Mtt_less_700_topup.Get(options.hist2).Clone()
hMeas_TT_Mtt_less_700_topup_2 .SetName( options.hist2 + '__TTbar_Mtt_less_700__toptag__up__2')
hMeas_TT_Mtt_less_700_btagdown_2 = fTT_Mtt_less_700_btagdown.Get(options.hist2).Clone()
hMeas_TT_Mtt_less_700_btagdown_2 .SetName( options.hist2 + '__TTbar_Mtt_less_700__btag__down__2')
hMeas_TT_Mtt_less_700_btagup_2 = fTT_Mtt_less_700_btagup.Get(options.hist2).Clone()
hMeas_TT_Mtt_less_700_btagup_2 .SetName( options.hist2 + '__TTbar_Mtt_less_700__btag__up__2')
hMeas_TT_Mtt_less_700_jecdown_2 = fTT_Mtt_less_700_jecdown.Get(options.hist2).Clone()
hMeas_TT_Mtt_less_700_jecdown_2 .SetName( options.hist2 + '__TTbar_Mtt_less_700__jec__down__2')
hMeas_TT_Mtt_less_700_jecup_2 = fTT_Mtt_less_700_jecup.Get(options.hist2).Clone()
hMeas_TT_Mtt_less_700_jecup_2 .SetName( options.hist2 + '__TTbar_Mtt_less_700__jec__up__2')
hMeas_TT_Mtt_less_700_jerdown_2 = fTT_Mtt_less_700_jerdown.Get(options.hist2).Clone()
hMeas_TT_Mtt_less_700_jerdown_2 .SetName( options.hist2 + '__TTbar_Mtt_less_700__jer__down__2')
hMeas_TT_Mtt_less_700_jerup_2 = fTT_Mtt_less_700_jerup.Get(options.hist2).Clone()
hMeas_TT_Mtt_less_700_jerup_2 .SetName( options.hist2 + '__TTbar_Mtt_less_700__jer__up__2')
hMeas_TT_Mtt_less_700_qcd_2 = fTT_Mtt_less_700_qcd.Get(options.hist2).Clone()
hMeas_TT_Mtt_less_700_qcd_2 .SetName( options.hist2 + '__TTbar_Mtt_less_700__qcd__2')
hMeas_TT_Mtt_less_700_pdfdown_2 = fTT_Mtt_less_700_pdfdown.Get(options.hist2).Clone()
hMeas_TT_Mtt_less_700_pdfdown_2 .SetName( options.hist2 + '__TTbar_Mtt_less_700__pdf__down__2')
hMeas_TT_Mtt_less_700_pdfup_2 = fTT_Mtt_less_700_pdfup.Get(options.hist2).Clone()
hMeas_TT_Mtt_less_700_pdfup_2 .SetName( options.hist2 + '__TTbar_Mtt_less_700__pdf__up__2')
hMeas_TT_Mtt_less_700_scaledown_2 = fTT_Mtt_less_700_scaledown.Get(options.hist2).Clone()
hMeas_TT_Mtt_less_700_scaledown_2 .SetName( options.hist2 + '__TTbar_Mtt_less_700__scale__down__2')
hMeas_TT_Mtt_less_700_scaleup_2 = fTT_Mtt_less_700_scaleup.Get(options.hist2).Clone()
hMeas_TT_Mtt_less_700_scaleup_2 .SetName( options.hist2 + '__TTbar_Mtt_less_700__scale__up__2')
hMeas_TT_Mtt_700_1000_nom_2 = fTT_Mtt_700_1000_nom.Get(options.hist2).Clone()
hMeas_TT_Mtt_700_1000_nom_2 .SetName( options.hist2 + '__TTbar_Mtt_700_1000__2' )
hMeas_TT_Mtt_700_1000_topdown_2 = fTT_Mtt_700_1000_topdown.Get(options.hist2).Clone()
hMeas_TT_Mtt_700_1000_topdown_2 .SetName( options.hist2 + '__TTbar_Mtt_700_1000__toptag__down__2')
hMeas_TT_Mtt_700_1000_topup_2 = fTT_Mtt_700_1000_topup.Get(options.hist2).Clone()
hMeas_TT_Mtt_700_1000_topup_2 .SetName( options.hist2 + '__TTbar_Mtt_700_1000__toptag__up__2')
hMeas_TT_Mtt_700_1000_btagdown_2 = fTT_Mtt_700_1000_btagdown.Get(options.hist2).Clone()
hMeas_TT_Mtt_700_1000_btagdown_2 .SetName( options.hist2 + '__TTbar_Mtt_700_1000__btag__down__2')
hMeas_TT_Mtt_700_1000_btagup_2 = fTT_Mtt_700_1000_btagup.Get(options.hist2).Clone()
hMeas_TT_Mtt_700_1000_btagup_2 .SetName( options.hist2 + '__TTbar_Mtt_700_1000__btag__up__2')
hMeas_TT_Mtt_700_1000_jecdown_2 = fTT_Mtt_700_1000_jecdown.Get(options.hist2).Clone()
hMeas_TT_Mtt_700_1000_jecdown_2 .SetName( options.hist2 + '__TTbar_Mtt_700_1000__jec__down__2')
hMeas_TT_Mtt_700_1000_jecup_2 = fTT_Mtt_700_1000_jecup.Get(options.hist2).Clone()
hMeas_TT_Mtt_700_1000_jecup_2 .SetName( options.hist2 + '__TTbar_Mtt_700_1000__jec__up__2')
hMeas_TT_Mtt_700_1000_jerdown_2 = fTT_Mtt_700_1000_jerdown.Get(options.hist2).Clone()
hMeas_TT_Mtt_700_1000_jerdown_2 .SetName( options.hist2 + '__TTbar_Mtt_700_1000__jer__down__2')
hMeas_TT_Mtt_700_1000_jerup_2 = fTT_Mtt_700_1000_jerup.Get(options.hist2).Clone()
hMeas_TT_Mtt_700_1000_jerup_2 .SetName( options.hist2 + '__TTbar_Mtt_700_1000__jer__up__2')
hMeas_TT_Mtt_700_1000_qcd_2 = fTT_Mtt_700_1000_qcd.Get(options.hist2).Clone()
hMeas_TT_Mtt_700_1000_qcd_2 .SetName( options.hist2 + '__TTbar_Mtt_700_1000__qcd__2')
hMeas_TT_Mtt_700_1000_pdfdown_2 = fTT_Mtt_700_1000_pdfdown.Get(options.hist2).Clone()
hMeas_TT_Mtt_700_1000_pdfdown_2 .SetName( options.hist2 + '__TTbar_Mtt_700_1000__pdf__down__2')
hMeas_TT_Mtt_700_1000_pdfup_2 = fTT_Mtt_700_1000_pdfup.Get(options.hist2).Clone()
hMeas_TT_Mtt_700_1000_pdfup_2 .SetName( options.hist2 + '__TTbar_Mtt_700_1000__pdf__up__2')
hMeas_TT_Mtt_700_1000_scaledown_2 = fTT_Mtt_700_1000_scaledown.Get(options.hist2).Clone()
hMeas_TT_Mtt_700_1000_scaledown_2 .SetName( options.hist2 + '__TTbar_Mtt_700_1000__scale__down__2')
hMeas_TT_Mtt_700_1000_scaleup_2 = fTT_Mtt_700_1000_scaleup.Get(options.hist2).Clone()
hMeas_TT_Mtt_700_1000_scaleup_2 .SetName( options.hist2 + '__TTbar_Mtt_700_1000__scale__up__2')
hMeas_TT_Mtt_1000_Inf_nom_2 = fTT_Mtt_1000_Inf_nom.Get(options.hist2).Clone()
hMeas_TT_Mtt_1000_Inf_nom_2 .SetName( options.hist2 + '__TTbar_Mtt_1000_Inf__2' )
hMeas_TT_Mtt_1000_Inf_topdown_2 = fTT_Mtt_1000_Inf_topdown.Get(options.hist2).Clone()
hMeas_TT_Mtt_1000_Inf_topdown_2 .SetName( options.hist2 + '__TTbar_Mtt_1000_Inf__toptag__down__2')
hMeas_TT_Mtt_1000_Inf_topup_2 = fTT_Mtt_1000_Inf_topup.Get(options.hist2).Clone()
hMeas_TT_Mtt_1000_Inf_topup_2 .SetName( options.hist2 + '__TTbar_Mtt_1000_Inf__toptag__up__2')
hMeas_TT_Mtt_1000_Inf_btagdown_2 = fTT_Mtt_1000_Inf_btagdown.Get(options.hist2).Clone()
hMeas_TT_Mtt_1000_Inf_btagdown_2 .SetName( options.hist2 + '__TTbar_Mtt_1000_Inf__btag__down__2')
hMeas_TT_Mtt_1000_Inf_btagup_2 = fTT_Mtt_1000_Inf_btagup.Get(options.hist2).Clone()
hMeas_TT_Mtt_1000_Inf_btagup_2 .SetName( options.hist2 + '__TTbar_Mtt_1000_Inf__btag__up__2')
hMeas_TT_Mtt_1000_Inf_jecdown_2 = fTT_Mtt_1000_Inf_jecdown.Get(options.hist2).Clone()
hMeas_TT_Mtt_1000_Inf_jecdown_2 .SetName( options.hist2 + '__TTbar_Mtt_1000_Inf__jec__down__2')
hMeas_TT_Mtt_1000_Inf_jecup_2 = fTT_Mtt_1000_Inf_jecup.Get(options.hist2).Clone()
hMeas_TT_Mtt_1000_Inf_jecup_2 .SetName( options.hist2 + '__TTbar_Mtt_1000_Inf__jec__up__2')
hMeas_TT_Mtt_1000_Inf_jerdown_2 = fTT_Mtt_1000_Inf_jerdown.Get(options.hist2).Clone()
hMeas_TT_Mtt_1000_Inf_jerdown_2 .SetName( options.hist2 + '__TTbar_Mtt_1000_Inf__jer__down__2')
hMeas_TT_Mtt_1000_Inf_jerup_2 = fTT_Mtt_1000_Inf_jerup.Get(options.hist2).Clone()
hMeas_TT_Mtt_1000_Inf_jerup_2 .SetName( options.hist2 + '__TTbar_Mtt_1000_Inf__jer__up__2')
hMeas_TT_Mtt_1000_Inf_qcd_2 = fTT_Mtt_1000_Inf_qcd.Get(options.hist2).Clone()
hMeas_TT_Mtt_1000_Inf_qcd_2 .SetName( options.hist2 + '__TTbar_Mtt_1000_Inf__qcd__2')
hMeas_TT_Mtt_1000_Inf_pdfdown_2 = fTT_Mtt_1000_Inf_pdfdown.Get(options.hist2).Clone()
hMeas_TT_Mtt_1000_Inf_pdfdown_2 .SetName( options.hist2 + '__TTbar_Mtt_1000_Inf__pdf__down__2')
hMeas_TT_Mtt_1000_Inf_pdfup_2 = fTT_Mtt_1000_Inf_pdfup.Get(options.hist2).Clone()
hMeas_TT_Mtt_1000_Inf_pdfup_2 .SetName( options.hist2 + '__TTbar_Mtt_1000_Inf__pdf__up__2')
hMeas_TT_Mtt_1000_Inf_scaledown_2 = fTT_Mtt_1000_Inf_scaledown.Get(options.hist2).Clone()
hMeas_TT_Mtt_1000_Inf_scaledown_2 .SetName( options.hist2 + '__TTbar_Mtt_1000_Inf__scale__down__2')
hMeas_TT_Mtt_1000_Inf_scaleup_2 = fTT_Mtt_1000_Inf_scaleup.Get(options.hist2).Clone()
hMeas_TT_Mtt_1000_Inf_scaleup_2 .SetName( options.hist2 + '__TTbar_Mtt_1000_Inf__scale__up__2')
hMeas_TT_nonSemiLep_Mtt_less_700_nom_2 = fTT_nonSemiLep_Mtt_less_700_nom.Get(options.hist2).Clone()
hMeas_TT_nonSemiLep_Mtt_less_700_nom_2 .SetName( options.hist2 + '__TTbar_nonSemiLep_Mtt_less_700__2' )
hMeas_TT_nonSemiLep_Mtt_less_700_topdown_2 = fTT_nonSemiLep_Mtt_less_700_topdown.Get(options.hist2).Clone()
hMeas_TT_nonSemiLep_Mtt_less_700_topdown_2 .SetName( options.hist2 + '__TTbar_nonSemiLep_Mtt_less_700__toptag__down__2')
hMeas_TT_nonSemiLep_Mtt_less_700_topup_2 = fTT_nonSemiLep_Mtt_less_700_topup.Get(options.hist2).Clone()
hMeas_TT_nonSemiLep_Mtt_less_700_topup_2 .SetName( options.hist2 + '__TTbar_nonSemiLep_Mtt_less_700__toptag__up__2')
hMeas_TT_nonSemiLep_Mtt_less_700_btagdown_2 = fTT_nonSemiLep_Mtt_less_700_btagdown.Get(options.hist2).Clone()
hMeas_TT_nonSemiLep_Mtt_less_700_btagdown_2 .SetName( options.hist2 + '__TTbar_nonSemiLep_Mtt_less_700__btag__down__2')
hMeas_TT_nonSemiLep_Mtt_less_700_btagup_2 = fTT_nonSemiLep_Mtt_less_700_btagup.Get(options.hist2).Clone()
hMeas_TT_nonSemiLep_Mtt_less_700_btagup_2 .SetName( options.hist2 + '__TTbar_nonSemiLep_Mtt_less_700__btag__up__2')
hMeas_TT_nonSemiLep_Mtt_less_700_jecdown_2 = fTT_nonSemiLep_Mtt_less_700_jecdown.Get(options.hist2).Clone()
hMeas_TT_nonSemiLep_Mtt_less_700_jecdown_2 .SetName( options.hist2 + '__TTbar_nonSemiLep_Mtt_less_700__jec__down__2')
hMeas_TT_nonSemiLep_Mtt_less_700_jecup_2 = fTT_nonSemiLep_Mtt_less_700_jecup.Get(options.hist2).Clone()
hMeas_TT_nonSemiLep_Mtt_less_700_jecup_2 .SetName( options.hist2 + '__TTbar_nonSemiLep_Mtt_less_700__jec__up__2')
hMeas_TT_nonSemiLep_Mtt_less_700_jerdown_2 = fTT_nonSemiLep_Mtt_less_700_jerdown.Get(options.hist2).Clone()
hMeas_TT_nonSemiLep_Mtt_less_700_jerdown_2 .SetName( options.hist2 + '__TTbar_nonSemiLep_Mtt_less_700__jer__down__2')
hMeas_TT_nonSemiLep_Mtt_less_700_jerup_2 = fTT_nonSemiLep_Mtt_less_700_jerup.Get(options.hist2).Clone()
hMeas_TT_nonSemiLep_Mtt_less_700_jerup_2 .SetName( options.hist2 + '__TTbar_nonSemiLep_Mtt_less_700__jer__up__2')
hMeas_TT_nonSemiLep_Mtt_less_700_qcd_2 = fTT_nonSemiLep_Mtt_less_700_qcd.Get(options.hist2).Clone()
hMeas_TT_nonSemiLep_Mtt_less_700_qcd_2 .SetName( options.hist2 + '__TTbar_nonSemiLep_Mtt_less_700__qcd__2')
hMeas_TT_nonSemiLep_Mtt_less_700_pdfdown_2 = fTT_nonSemiLep_Mtt_less_700_pdfdown.Get(options.hist2).Clone()
hMeas_TT_nonSemiLep_Mtt_less_700_pdfdown_2 .SetName( options.hist2 + '__TTbar_nonSemiLep_Mtt_less_700__pdf__down__2')
hMeas_TT_nonSemiLep_Mtt_less_700_pdfup_2 = fTT_nonSemiLep_Mtt_less_700_pdfup.Get(options.hist2).Clone()
hMeas_TT_nonSemiLep_Mtt_less_700_pdfup_2 .SetName( options.hist2 + '__TTbar_nonSemiLep_Mtt_less_700__pdf__up__2')
hMeas_TT_nonSemiLep_Mtt_less_700_scaledown_2 = fTT_nonSemiLep_Mtt_less_700_scaledown.Get(options.hist2).Clone()
hMeas_TT_nonSemiLep_Mtt_less_700_scaledown_2 .SetName( options.hist2 + '__TTbar_nonSemiLep_Mtt_less_700__scale__down__2')
hMeas_TT_nonSemiLep_Mtt_less_700_scaleup_2 = fTT_nonSemiLep_Mtt_less_700_scaleup.Get(options.hist2).Clone()
hMeas_TT_nonSemiLep_Mtt_less_700_scaleup_2 .SetName( options.hist2 + '__TTbar_nonSemiLep_Mtt_less_700__scale__up__2')
hMeas_TT_nonSemiLep_Mtt_700_1000_nom_2 = fTT_nonSemiLep_Mtt_700_1000_nom.Get(options.hist2).Clone()
hMeas_TT_nonSemiLep_Mtt_700_1000_nom_2 .SetName( options.hist2 + '__TTbar_nonSemiLep_Mtt_700_1000__2' )
hMeas_TT_nonSemiLep_Mtt_700_1000_topdown_2 = fTT_nonSemiLep_Mtt_700_1000_topdown.Get(options.hist2).Clone()
hMeas_TT_nonSemiLep_Mtt_700_1000_topdown_2 .SetName( options.hist2 + '__TTbar_nonSemiLep_Mtt_700_1000__toptag__down__2')
hMeas_TT_nonSemiLep_Mtt_700_1000_topup_2 = fTT_nonSemiLep_Mtt_700_1000_topup.Get(options.hist2).Clone()
hMeas_TT_nonSemiLep_Mtt_700_1000_topup_2 .SetName( options.hist2 + '__TTbar_nonSemiLep_Mtt_700_1000__toptag__up__2')
hMeas_TT_nonSemiLep_Mtt_700_1000_btagdown_2 = fTT_nonSemiLep_Mtt_700_1000_btagdown.Get(options.hist2).Clone()
hMeas_TT_nonSemiLep_Mtt_700_1000_btagdown_2 .SetName( options.hist2 + '__TTbar_nonSemiLep_Mtt_700_1000__btag__down__2')
hMeas_TT_nonSemiLep_Mtt_700_1000_btagup_2 = fTT_nonSemiLep_Mtt_700_1000_btagup.Get(options.hist2).Clone()
hMeas_TT_nonSemiLep_Mtt_700_1000_btagup_2 .SetName( options.hist2 + '__TTbar_nonSemiLep_Mtt_700_1000__btag__up__2')
hMeas_TT_nonSemiLep_Mtt_700_1000_jecdown_2 = fTT_nonSemiLep_Mtt_700_1000_jecdown.Get(options.hist2).Clone()
hMeas_TT_nonSemiLep_Mtt_700_1000_jecdown_2 .SetName( options.hist2 + '__TTbar_nonSemiLep_Mtt_700_1000__jec__down__2')
hMeas_TT_nonSemiLep_Mtt_700_1000_jecup_2 = fTT_nonSemiLep_Mtt_700_1000_jecup.Get(options.hist2).Clone()
hMeas_TT_nonSemiLep_Mtt_700_1000_jecup_2 .SetName( options.hist2 + '__TTbar_nonSemiLep_Mtt_700_1000__jec__up__2')
hMeas_TT_nonSemiLep_Mtt_700_1000_jerdown_2 = fTT_nonSemiLep_Mtt_700_1000_jerdown.Get(options.hist2).Clone()
hMeas_TT_nonSemiLep_Mtt_700_1000_jerdown_2 .SetName( options.hist2 + '__TTbar_nonSemiLep_Mtt_700_1000__jer__down__2')
hMeas_TT_nonSemiLep_Mtt_700_1000_jerup_2 = fTT_nonSemiLep_Mtt_700_1000_jerup.Get(options.hist2).Clone()
hMeas_TT_nonSemiLep_Mtt_700_1000_jerup_2 .SetName( options.hist2 + '__TTbar_nonSemiLep_Mtt_700_1000__jer__up__2')
hMeas_TT_nonSemiLep_Mtt_700_1000_qcd_2 = fTT_nonSemiLep_Mtt_700_1000_qcd.Get(options.hist2).Clone()
hMeas_TT_nonSemiLep_Mtt_700_1000_qcd_2 .SetName( options.hist2 + '__TTbar_nonSemiLep_Mtt_700_1000__qcd__2')
hMeas_TT_nonSemiLep_Mtt_700_1000_pdfdown_2 = fTT_nonSemiLep_Mtt_700_1000_pdfdown.Get(options.hist2).Clone()
hMeas_TT_nonSemiLep_Mtt_700_1000_pdfdown_2 .SetName( options.hist2 + '__TTbar_nonSemiLep_Mtt_700_1000__pdf__down__2')
hMeas_TT_nonSemiLep_Mtt_700_1000_pdfup_2 = fTT_nonSemiLep_Mtt_700_1000_pdfup.Get(options.hist2).Clone()
hMeas_TT_nonSemiLep_Mtt_700_1000_pdfup_2 .SetName( options.hist2 + '__TTbar_nonSemiLep_Mtt_700_1000__pdf__up__2')
hMeas_TT_nonSemiLep_Mtt_700_1000_scaledown_2 = fTT_nonSemiLep_Mtt_700_1000_scaledown.Get(options.hist2).Clone()
hMeas_TT_nonSemiLep_Mtt_700_1000_scaledown_2 .SetName( options.hist2 + '__TTbar_nonSemiLep_Mtt_700_1000__scale__down__2')
hMeas_TT_nonSemiLep_Mtt_700_1000_scaleup_2 = fTT_nonSemiLep_Mtt_700_1000_scaleup.Get(options.hist2).Clone()
hMeas_TT_nonSemiLep_Mtt_700_1000_scaleup_2 .SetName( options.hist2 + '__TTbar_nonSemiLep_Mtt_700_1000__scale__up__2')
hMeas_TT_nonSemiLep_Mtt_1000_Inf_nom_2 = fTT_nonSemiLep_Mtt_1000_Inf_nom.Get(options.hist2).Clone()
hMeas_TT_nonSemiLep_Mtt_1000_Inf_nom_2 .SetName( options.hist2 + '__TTbar_nonSemiLep_Mtt_1000_Inf__2' )
hMeas_TT_nonSemiLep_Mtt_1000_Inf_topdown_2 = fTT_nonSemiLep_Mtt_1000_Inf_topdown.Get(options.hist2).Clone()
hMeas_TT_nonSemiLep_Mtt_1000_Inf_topdown_2 .SetName( options.hist2 + '__TTbar_nonSemiLep_Mtt_1000_Inf__toptag__down__2')
hMeas_TT_nonSemiLep_Mtt_1000_Inf_topup_2 = fTT_nonSemiLep_Mtt_1000_Inf_topup.Get(options.hist2).Clone()
hMeas_TT_nonSemiLep_Mtt_1000_Inf_topup_2 .SetName( options.hist2 + '__TTbar_nonSemiLep_Mtt_1000_Inf__toptag__up__2')
hMeas_TT_nonSemiLep_Mtt_1000_Inf_btagdown_2 = fTT_nonSemiLep_Mtt_1000_Inf_btagdown.Get(options.hist2).Clone()
hMeas_TT_nonSemiLep_Mtt_1000_Inf_btagdown_2 .SetName( options.hist2 + '__TTbar_nonSemiLep_Mtt_1000_Inf__btag__down__2')
hMeas_TT_nonSemiLep_Mtt_1000_Inf_btagup_2 = fTT_nonSemiLep_Mtt_1000_Inf_btagup.Get(options.hist2).Clone()
hMeas_TT_nonSemiLep_Mtt_1000_Inf_btagup_2 .SetName( options.hist2 + '__TTbar_nonSemiLep_Mtt_1000_Inf__btag__up__2')
hMeas_TT_nonSemiLep_Mtt_1000_Inf_jecdown_2 = fTT_nonSemiLep_Mtt_1000_Inf_jecdown.Get(options.hist2).Clone()
hMeas_TT_nonSemiLep_Mtt_1000_Inf_jecdown_2 .SetName( options.hist2 + '__TTbar_nonSemiLep_Mtt_1000_Inf__jec__down__2')
hMeas_TT_nonSemiLep_Mtt_1000_Inf_jecup_2 = fTT_nonSemiLep_Mtt_1000_Inf_jecup.Get(options.hist2).Clone()
hMeas_TT_nonSemiLep_Mtt_1000_Inf_jecup_2 .SetName( options.hist2 + '__TTbar_nonSemiLep_Mtt_1000_Inf__jec__up__2')
hMeas_TT_nonSemiLep_Mtt_1000_Inf_jerdown_2 = fTT_nonSemiLep_Mtt_1000_Inf_jerdown.Get(options.hist2).Clone()
hMeas_TT_nonSemiLep_Mtt_1000_Inf_jerdown_2 .SetName( options.hist2 + '__TTbar_nonSemiLep_Mtt_1000_Inf__jer__down__2')
hMeas_TT_nonSemiLep_Mtt_1000_Inf_jerup_2 = fTT_nonSemiLep_Mtt_1000_Inf_jerup.Get(options.hist2).Clone()
hMeas_TT_nonSemiLep_Mtt_1000_Inf_jerup_2 .SetName( options.hist2 + '__TTbar_nonSemiLep_Mtt_1000_Inf__jer__up__2')
hMeas_TT_nonSemiLep_Mtt_1000_Inf_qcd_2 = fTT_nonSemiLep_Mtt_1000_Inf_qcd.Get(options.hist2).Clone()
hMeas_TT_nonSemiLep_Mtt_1000_Inf_qcd_2 .SetName( options.hist2 + '__TTbar_nonSemiLep_Mtt_1000_Inf__qcd__2')
hMeas_TT_nonSemiLep_Mtt_1000_Inf_pdfdown_2 = fTT_nonSemiLep_Mtt_1000_Inf_pdfdown.Get(options.hist2).Clone()
hMeas_TT_nonSemiLep_Mtt_1000_Inf_pdfdown_2 .SetName( options.hist2 + '__TTbar_nonSemiLep_Mtt_1000_Inf__pdf__down__2')
hMeas_TT_nonSemiLep_Mtt_1000_Inf_pdfup_2 = fTT_nonSemiLep_Mtt_1000_Inf_pdfup.Get(options.hist2).Clone()
hMeas_TT_nonSemiLep_Mtt_1000_Inf_pdfup_2 .SetName( options.hist2 + '__TTbar_nonSemiLep_Mtt_1000_Inf__pdf__up__2')
hMeas_TT_nonSemiLep_Mtt_1000_Inf_scaledown_2 = fTT_nonSemiLep_Mtt_1000_Inf_scaledown.Get(options.hist2).Clone()
hMeas_TT_nonSemiLep_Mtt_1000_Inf_scaledown_2 .SetName( options.hist2 + '__TTbar_nonSemiLep_Mtt_1000_Inf__scale__down__2')
hMeas_TT_nonSemiLep_Mtt_1000_Inf_scaleup_2 = fTT_nonSemiLep_Mtt_1000_Inf_scaleup.Get(options.hist2).Clone()
hMeas_TT_nonSemiLep_Mtt_1000_Inf_scaleup_2 .SetName( options.hist2 + '__TTbar_nonSemiLep_Mtt_1000_Inf__scale__up__2')
hMeas_T_t_nom_2 .Scale( sigma_T_t_NNLO * lum / float(Nmc_T_t) )
hMeas_T_t_topdown_2 .Scale( sigma_T_t_NNLO * lum / float(Nmc_T_t) )
hMeas_T_t_topup_2 .Scale( sigma_T_t_NNLO * lum / float(Nmc_T_t) )
hMeas_T_t_btagdown_2.Scale( sigma_T_t_NNLO * lum / float(Nmc_T_t) )
hMeas_T_t_btagup_2 .Scale( sigma_T_t_NNLO * lum / float(Nmc_T_t) )
hMeas_T_t_jecdown_2 .Scale( sigma_T_t_NNLO * lum / float(Nmc_T_t) )
hMeas_T_t_jecup_2 .Scale( sigma_T_t_NNLO * lum / float(Nmc_T_t) )
hMeas_T_t_jerdown_2 .Scale( sigma_T_t_NNLO * lum / float(Nmc_T_t) )
hMeas_T_t_jerup_2 .Scale( sigma_T_t_NNLO * lum / float(Nmc_T_t) )
hMeas_T_t_qcd_2 .Scale( sigma_T_t_NNLO * lum / float(Nmc_T_t) )
hMeas_Tbar_t_nom_2 .Scale( sigma_Tbar_t_NNLO * lum / float(Nmc_Tbar_t) )
hMeas_Tbar_t_topdown_2 .Scale( sigma_Tbar_t_NNLO * lum / float(Nmc_Tbar_t) )
hMeas_Tbar_t_topup_2 .Scale( sigma_Tbar_t_NNLO * lum / float(Nmc_Tbar_t) )
hMeas_Tbar_t_btagdown_2.Scale( sigma_Tbar_t_NNLO * lum / float(Nmc_Tbar_t) )
hMeas_Tbar_t_btagup_2 .Scale( sigma_Tbar_t_NNLO * lum / float(Nmc_Tbar_t) )
hMeas_Tbar_t_jecdown_2 .Scale( sigma_Tbar_t_NNLO * lum / float(Nmc_Tbar_t) )
hMeas_Tbar_t_jecup_2 .Scale( sigma_Tbar_t_NNLO * lum / float(Nmc_Tbar_t) )
hMeas_Tbar_t_jerdown_2 .Scale( sigma_Tbar_t_NNLO * lum / float(Nmc_Tbar_t) )
hMeas_Tbar_t_jerup_2 .Scale( sigma_Tbar_t_NNLO * lum / float(Nmc_Tbar_t) )
hMeas_Tbar_t_qcd_2 .Scale( sigma_Tbar_t_NNLO * lum / float(Nmc_Tbar_t) )
hMeas_T_s_nom_2 .Scale( sigma_T_s_NNLO * lum / float(Nmc_T_s) )
hMeas_T_s_topdown_2 .Scale( sigma_T_s_NNLO * lum / float(Nmc_T_s) )
hMeas_T_s_topup_2 .Scale( sigma_T_s_NNLO * lum / float(Nmc_T_s) )
hMeas_T_s_btagdown_2.Scale( sigma_T_s_NNLO * lum / float(Nmc_T_s) )
hMeas_T_s_btagup_2 .Scale( sigma_T_s_NNLO * lum / float(Nmc_T_s) )
hMeas_T_s_jecdown_2 .Scale( sigma_T_s_NNLO * lum / float(Nmc_T_s) )
hMeas_T_s_jecup_2 .Scale( sigma_T_s_NNLO * lum / float(Nmc_T_s) )
hMeas_T_s_jerdown_2 .Scale( sigma_T_s_NNLO * lum / float(Nmc_T_s) )
hMeas_T_s_jerup_2 .Scale( sigma_T_s_NNLO * lum / float(Nmc_T_s) )
hMeas_T_s_qcd_2 .Scale( sigma_T_s_NNLO * lum / float(Nmc_T_s) )
hMeas_Tbar_s_nom_2 .Scale( sigma_Tbar_s_NNLO * lum / float(Nmc_Tbar_s) )
hMeas_Tbar_s_topdown_2 .Scale( sigma_Tbar_s_NNLO * lum / float(Nmc_Tbar_s) )
hMeas_Tbar_s_topup_2 .Scale( sigma_Tbar_s_NNLO * lum / float(Nmc_Tbar_s) )
hMeas_Tbar_s_btagdown_2.Scale( sigma_Tbar_s_NNLO * lum / float(Nmc_Tbar_s) )
hMeas_Tbar_s_btagup_2 .Scale( sigma_Tbar_s_NNLO * lum / float(Nmc_Tbar_s) )
hMeas_Tbar_s_jecdown_2 .Scale( sigma_Tbar_s_NNLO * lum / float(Nmc_Tbar_s) )
hMeas_Tbar_s_jecup_2 .Scale( sigma_Tbar_s_NNLO * lum / float(Nmc_Tbar_s) )
hMeas_Tbar_s_jerdown_2 .Scale( sigma_Tbar_s_NNLO * lum / float(Nmc_Tbar_s) )
hMeas_Tbar_s_jerup_2 .Scale( sigma_Tbar_s_NNLO * lum / float(Nmc_Tbar_s) )
hMeas_Tbar_s_qcd_2 .Scale( sigma_Tbar_s_NNLO * lum / float(Nmc_Tbar_s) )
hMeas_T_tW_nom_2 .Scale( sigma_T_tW_NNLO * lum / float(Nmc_T_tW) )
hMeas_T_tW_topdown_2 .Scale( sigma_T_tW_NNLO * lum / float(Nmc_T_tW) )
hMeas_T_tW_topup_2 .Scale( sigma_T_tW_NNLO * lum / float(Nmc_T_tW) )
hMeas_T_tW_btagdown_2.Scale( sigma_T_tW_NNLO * lum / float(Nmc_T_tW) )
hMeas_T_tW_btagup_2 .Scale( sigma_T_tW_NNLO * lum / float(Nmc_T_tW) )
hMeas_T_tW_jecdown_2 .Scale( sigma_T_tW_NNLO * lum / float(Nmc_T_tW) )
hMeas_T_tW_jecup_2 .Scale( sigma_T_tW_NNLO * lum / float(Nmc_T_tW) )
hMeas_T_tW_jerdown_2 .Scale( sigma_T_tW_NNLO * lum / float(Nmc_T_tW) )
hMeas_T_tW_jerup_2 .Scale( sigma_T_tW_NNLO * lum / float(Nmc_T_tW) )
hMeas_T_tW_qcd_2 .Scale( sigma_T_tW_NNLO * lum / float(Nmc_T_tW) )
hMeas_Tbar_tW_nom_2 .Scale( sigma_Tbar_tW_NNLO * lum / float(Nmc_Tbar_tW) )
hMeas_Tbar_tW_topdown_2 .Scale( sigma_Tbar_tW_NNLO * lum / float(Nmc_Tbar_tW) )
hMeas_Tbar_tW_topup_2 .Scale( sigma_Tbar_tW_NNLO * lum / float(Nmc_Tbar_tW) )
hMeas_Tbar_tW_btagdown_2.Scale( sigma_Tbar_tW_NNLO * lum / float(Nmc_Tbar_tW) )
hMeas_Tbar_tW_btagup_2 .Scale( sigma_Tbar_tW_NNLO * lum / float(Nmc_Tbar_tW) )
hMeas_Tbar_tW_jecdown_2 .Scale( sigma_Tbar_tW_NNLO * lum / float(Nmc_Tbar_tW) )
hMeas_Tbar_tW_jecup_2 .Scale( sigma_Tbar_tW_NNLO * lum / float(Nmc_Tbar_tW) )
hMeas_Tbar_tW_jerdown_2 .Scale( sigma_Tbar_tW_NNLO * lum / float(Nmc_Tbar_tW) )
hMeas_Tbar_tW_jerup_2 .Scale( sigma_Tbar_tW_NNLO * lum / float(Nmc_Tbar_tW) )
hMeas_Tbar_tW_qcd_2 .Scale( sigma_Tbar_tW_NNLO * lum / float(Nmc_Tbar_tW) )
hMeas_WJets_nom_2 .Scale( sigma_WJets_NNLO * lum / float(Nmc_WJets) )
hMeas_WJets_topdown_2 .Scale( sigma_WJets_NNLO * lum / float(Nmc_WJets) )
hMeas_WJets_topup_2 .Scale( sigma_WJets_NNLO * lum / float(Nmc_WJets) )
hMeas_WJets_btagdown_2.Scale( sigma_WJets_NNLO * lum / float(Nmc_WJets) )
hMeas_WJets_btagup_2 .Scale( sigma_WJets_NNLO * lum / float(Nmc_WJets) )
hMeas_WJets_jecdown_2 .Scale( sigma_WJets_NNLO * lum / float(Nmc_WJets) )
hMeas_WJets_jecup_2 .Scale( sigma_WJets_NNLO * lum / float(Nmc_WJets) )
hMeas_WJets_jerdown_2 .Scale( sigma_WJets_NNLO * lum / float(Nmc_WJets) )
hMeas_WJets_jerup_2 .Scale( sigma_WJets_NNLO * lum / float(Nmc_WJets) )
hMeas_WJets_qcd_2 .Scale( sigma_WJets_NNLO * lum / float(Nmc_WJets) )
hMeas_TT_Mtt_less_700_nom_2 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_0_700[0] * lum / float(Nmc_ttbar))
hMeas_TT_Mtt_less_700_topdown_2 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_0_700[0] * lum / float(Nmc_ttbar))
hMeas_TT_Mtt_less_700_topup_2 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_0_700[0] * lum / float(Nmc_ttbar))
hMeas_TT_Mtt_less_700_btagdown_2 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_0_700[0] * lum / float(Nmc_ttbar))
hMeas_TT_Mtt_less_700_btagup_2 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_0_700[0] * lum / float(Nmc_ttbar))
hMeas_TT_Mtt_less_700_jecdown_2 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_0_700[0] * lum / float(Nmc_ttbar))
hMeas_TT_Mtt_less_700_jecup_2 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_0_700[0] * lum / float(Nmc_ttbar))
hMeas_TT_Mtt_less_700_jerdown_2 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_0_700[0] * lum / float(Nmc_ttbar))
hMeas_TT_Mtt_less_700_jerup_2 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_0_700[0] * lum / float(Nmc_ttbar))
hMeas_TT_Mtt_less_700_qcd_2 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_0_700[0] * lum / float(Nmc_ttbar))
hMeas_TT_Mtt_less_700_pdfdown_2 .Scale( sigma_ttbar_NNLO[3] * e_TT_Mtt_0_700[0] * lum / float(Nmc_ttbar))
hMeas_TT_Mtt_less_700_pdfup_2 .Scale( sigma_ttbar_NNLO[4] * e_TT_Mtt_0_700[0] * lum / float(Nmc_ttbar))
hMeas_TT_Mtt_less_700_scaledown_2.Scale( sigma_ttbar_NNLO[1] * e_TT_Mtt_0_700[1] * lum / float(Nmc_ttbar_scaledown))
hMeas_TT_Mtt_less_700_scaleup_2 .Scale( sigma_ttbar_NNLO[2] * e_TT_Mtt_0_700[2] * lum / float(Nmc_ttbar_scaleup))
hMeas_TT_Mtt_700_1000_nom_2 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_700_1000[0] * lum / float(Nmc_TT_Mtt_700_1000))
hMeas_TT_Mtt_700_1000_topdown_2 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_700_1000[0] * lum / float(Nmc_TT_Mtt_700_1000))
hMeas_TT_Mtt_700_1000_topup_2 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_700_1000[0] * lum / float(Nmc_TT_Mtt_700_1000))
hMeas_TT_Mtt_700_1000_btagdown_2 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_700_1000[0] * lum / float(Nmc_TT_Mtt_700_1000))
hMeas_TT_Mtt_700_1000_btagup_2 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_700_1000[0] * lum / float(Nmc_TT_Mtt_700_1000))
hMeas_TT_Mtt_700_1000_jecdown_2 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_700_1000[0] * lum / float(Nmc_TT_Mtt_700_1000))
hMeas_TT_Mtt_700_1000_jecup_2 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_700_1000[0] * lum / float(Nmc_TT_Mtt_700_1000))
hMeas_TT_Mtt_700_1000_jerdown_2 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_700_1000[0] * lum / float(Nmc_TT_Mtt_700_1000))
hMeas_TT_Mtt_700_1000_jerup_2 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_700_1000[0] * lum / float(Nmc_TT_Mtt_700_1000))
hMeas_TT_Mtt_700_1000_qcd_2 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_700_1000[0] * lum / float(Nmc_TT_Mtt_700_1000))
hMeas_TT_Mtt_700_1000_pdfdown_2 .Scale( sigma_ttbar_NNLO[3] * e_TT_Mtt_700_1000[0] * lum / float(Nmc_TT_Mtt_700_1000))
hMeas_TT_Mtt_700_1000_pdfup_2 .Scale( sigma_ttbar_NNLO[4] * e_TT_Mtt_700_1000[0] * lum / float(Nmc_TT_Mtt_700_1000))
hMeas_TT_Mtt_700_1000_scaledown_2.Scale( sigma_ttbar_NNLO[1] * e_TT_Mtt_700_1000[1] * lum / float(Nmc_TT_Mtt_700_1000_scaledown))
hMeas_TT_Mtt_700_1000_scaleup_2 .Scale( sigma_ttbar_NNLO[2] * e_TT_Mtt_700_1000[2] * lum / float(Nmc_TT_Mtt_700_1000_scaleup))
hMeas_TT_Mtt_1000_Inf_nom_2 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_1000_Inf[0] * lum / float(Nmc_TT_Mtt_1000_Inf) )
hMeas_TT_Mtt_1000_Inf_topdown_2 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_1000_Inf[0] * lum / float(Nmc_TT_Mtt_1000_Inf) )
hMeas_TT_Mtt_1000_Inf_topup_2 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_1000_Inf[0] * lum / float(Nmc_TT_Mtt_1000_Inf) )
hMeas_TT_Mtt_1000_Inf_btagdown_2 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_1000_Inf[0] * lum / float(Nmc_TT_Mtt_1000_Inf) )
hMeas_TT_Mtt_1000_Inf_btagup_2 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_1000_Inf[0] * lum / float(Nmc_TT_Mtt_1000_Inf) )
hMeas_TT_Mtt_1000_Inf_jecdown_2 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_1000_Inf[0] * lum / float(Nmc_TT_Mtt_1000_Inf) )
hMeas_TT_Mtt_1000_Inf_jecup_2 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_1000_Inf[0] * lum / float(Nmc_TT_Mtt_1000_Inf) )
hMeas_TT_Mtt_1000_Inf_jerdown_2 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_1000_Inf[0] * lum / float(Nmc_TT_Mtt_1000_Inf) )
hMeas_TT_Mtt_1000_Inf_jerup_2 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_1000_Inf[0] * lum / float(Nmc_TT_Mtt_1000_Inf) )
hMeas_TT_Mtt_1000_Inf_qcd_2 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_1000_Inf[0] * lum / float(Nmc_TT_Mtt_1000_Inf) )
hMeas_TT_Mtt_1000_Inf_pdfdown_2 .Scale( sigma_ttbar_NNLO[3] * e_TT_Mtt_1000_Inf[0] * lum / float(Nmc_TT_Mtt_1000_Inf) )
hMeas_TT_Mtt_1000_Inf_pdfup_2 .Scale( sigma_ttbar_NNLO[4] * e_TT_Mtt_1000_Inf[0] * lum / float(Nmc_TT_Mtt_1000_Inf) )
hMeas_TT_Mtt_1000_Inf_scaledown_2.Scale( sigma_ttbar_NNLO[1] * e_TT_Mtt_1000_Inf[1] * lum / float(Nmc_TT_Mtt_1000_Inf_scaledown) )
hMeas_TT_Mtt_1000_Inf_scaleup_2 .Scale( sigma_ttbar_NNLO[2] * e_TT_Mtt_1000_Inf[2] * lum / float(Nmc_TT_Mtt_1000_Inf_scaleup) )
hMeas_TT_nonSemiLep_Mtt_less_700_nom_2 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_0_700[0] * lum / float(Nmc_ttbar))
hMeas_TT_nonSemiLep_Mtt_less_700_topdown_2 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_0_700[0] * lum / float(Nmc_ttbar))
hMeas_TT_nonSemiLep_Mtt_less_700_topup_2 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_0_700[0] * lum / float(Nmc_ttbar))
hMeas_TT_nonSemiLep_Mtt_less_700_btagdown_2 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_0_700[0] * lum / float(Nmc_ttbar))
hMeas_TT_nonSemiLep_Mtt_less_700_btagup_2 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_0_700[0] * lum / float(Nmc_ttbar))
hMeas_TT_nonSemiLep_Mtt_less_700_jecdown_2 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_0_700[0] * lum / float(Nmc_ttbar))
hMeas_TT_nonSemiLep_Mtt_less_700_jecup_2 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_0_700[0] * lum / float(Nmc_ttbar))
hMeas_TT_nonSemiLep_Mtt_less_700_jerdown_2 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_0_700[0] * lum / float(Nmc_ttbar))
hMeas_TT_nonSemiLep_Mtt_less_700_jerup_2 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_0_700[0] * lum / float(Nmc_ttbar))
hMeas_TT_nonSemiLep_Mtt_less_700_qcd_2 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_0_700[0] * lum / float(Nmc_ttbar))
hMeas_TT_nonSemiLep_Mtt_less_700_pdfdown_2 .Scale( sigma_ttbar_NNLO[3] * e_TT_Mtt_0_700[0] * lum / float(Nmc_ttbar))
hMeas_TT_nonSemiLep_Mtt_less_700_pdfup_2 .Scale( sigma_ttbar_NNLO[4] * e_TT_Mtt_0_700[0] * lum / float(Nmc_ttbar))
hMeas_TT_nonSemiLep_Mtt_less_700_scaledown_2.Scale( sigma_ttbar_NNLO[1] * e_TT_Mtt_0_700[1] * lum / float(Nmc_ttbar_scaledown))
hMeas_TT_nonSemiLep_Mtt_less_700_scaleup_2 .Scale( sigma_ttbar_NNLO[2] * e_TT_Mtt_0_700[2] * lum / float(Nmc_ttbar_scaleup))
hMeas_TT_nonSemiLep_Mtt_700_1000_nom_2 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_700_1000[0] * lum / float(Nmc_TT_Mtt_700_1000))
hMeas_TT_nonSemiLep_Mtt_700_1000_topdown_2 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_700_1000[0] * lum / float(Nmc_TT_Mtt_700_1000))
hMeas_TT_nonSemiLep_Mtt_700_1000_topup_2 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_700_1000[0] * lum / float(Nmc_TT_Mtt_700_1000))
hMeas_TT_nonSemiLep_Mtt_700_1000_btagdown_2 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_700_1000[0] * lum / float(Nmc_TT_Mtt_700_1000))
hMeas_TT_nonSemiLep_Mtt_700_1000_btagup_2 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_700_1000[0] * lum / float(Nmc_TT_Mtt_700_1000))
hMeas_TT_nonSemiLep_Mtt_700_1000_jecdown_2 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_700_1000[0] * lum / float(Nmc_TT_Mtt_700_1000))
hMeas_TT_nonSemiLep_Mtt_700_1000_jecup_2 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_700_1000[0] * lum / float(Nmc_TT_Mtt_700_1000))
hMeas_TT_nonSemiLep_Mtt_700_1000_jerdown_2 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_700_1000[0] * lum / float(Nmc_TT_Mtt_700_1000))
hMeas_TT_nonSemiLep_Mtt_700_1000_jerup_2 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_700_1000[0] * lum / float(Nmc_TT_Mtt_700_1000))
hMeas_TT_nonSemiLep_Mtt_700_1000_qcd_2 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_700_1000[0] * lum / float(Nmc_TT_Mtt_700_1000))
hMeas_TT_nonSemiLep_Mtt_700_1000_pdfdown_2 .Scale( sigma_ttbar_NNLO[3] * e_TT_Mtt_700_1000[0] * lum / float(Nmc_TT_Mtt_700_1000))
hMeas_TT_nonSemiLep_Mtt_700_1000_pdfup_2 .Scale( sigma_ttbar_NNLO[4] * e_TT_Mtt_700_1000[0] * lum / float(Nmc_TT_Mtt_700_1000))
hMeas_TT_nonSemiLep_Mtt_700_1000_scaledown_2.Scale( sigma_ttbar_NNLO[1] * e_TT_Mtt_700_1000[1] * lum / float(Nmc_TT_Mtt_700_1000_scaledown))
hMeas_TT_nonSemiLep_Mtt_700_1000_scaleup_2 .Scale( sigma_ttbar_NNLO[2] * e_TT_Mtt_700_1000[2] * lum / float(Nmc_TT_Mtt_700_1000_scaleup))
hMeas_TT_nonSemiLep_Mtt_1000_Inf_nom_2 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_1000_Inf[0] * lum / float(Nmc_TT_Mtt_1000_Inf) )
hMeas_TT_nonSemiLep_Mtt_1000_Inf_topdown_2 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_1000_Inf[0] * lum / float(Nmc_TT_Mtt_1000_Inf) )
hMeas_TT_nonSemiLep_Mtt_1000_Inf_topup_2 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_1000_Inf[0] * lum / float(Nmc_TT_Mtt_1000_Inf) )
hMeas_TT_nonSemiLep_Mtt_1000_Inf_btagdown_2 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_1000_Inf[0] * lum / float(Nmc_TT_Mtt_1000_Inf) )
hMeas_TT_nonSemiLep_Mtt_1000_Inf_btagup_2 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_1000_Inf[0] * lum / float(Nmc_TT_Mtt_1000_Inf) )
hMeas_TT_nonSemiLep_Mtt_1000_Inf_jecdown_2 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_1000_Inf[0] * lum / float(Nmc_TT_Mtt_1000_Inf) )
hMeas_TT_nonSemiLep_Mtt_1000_Inf_jecup_2 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_1000_Inf[0] * lum / float(Nmc_TT_Mtt_1000_Inf) )
hMeas_TT_nonSemiLep_Mtt_1000_Inf_jerdown_2 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_1000_Inf[0] * lum / float(Nmc_TT_Mtt_1000_Inf) )
hMeas_TT_nonSemiLep_Mtt_1000_Inf_jerup_2 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_1000_Inf[0] * lum / float(Nmc_TT_Mtt_1000_Inf) )
hMeas_TT_nonSemiLep_Mtt_1000_Inf_qcd_2 .Scale( sigma_ttbar_NNLO[0] * e_TT_Mtt_1000_Inf[0] * lum / float(Nmc_TT_Mtt_1000_Inf) )
hMeas_TT_nonSemiLep_Mtt_1000_Inf_pdfdown_2 .Scale( sigma_ttbar_NNLO[3] * e_TT_Mtt_1000_Inf[0] * lum / float(Nmc_TT_Mtt_1000_Inf) )
hMeas_TT_nonSemiLep_Mtt_1000_Inf_pdfup_2 .Scale( sigma_ttbar_NNLO[4] * e_TT_Mtt_1000_Inf[0] * lum / float(Nmc_TT_Mtt_1000_Inf) )
hMeas_TT_nonSemiLep_Mtt_1000_Inf_scaledown_2.Scale( sigma_ttbar_NNLO[1] * e_TT_Mtt_1000_Inf[1] * lum / float(Nmc_TT_Mtt_1000_Inf_scaledown) )
hMeas_TT_nonSemiLep_Mtt_1000_Inf_scaleup_2 .Scale( sigma_ttbar_NNLO[2] * e_TT_Mtt_1000_Inf[2] * lum / float(Nmc_TT_Mtt_1000_Inf_scaleup) )
#Subtract hist2 from hist1
hMeas_TT_Mtt_less_700_1 = [ hMeas_TT_Mtt_less_700_jecdown_1 , hMeas_TT_Mtt_less_700_jecup_1 ,
hMeas_TT_Mtt_less_700_jerdown_1 , hMeas_TT_Mtt_less_700_jerup_1 ,
hMeas_TT_Mtt_less_700_pdfdown_1 , hMeas_TT_Mtt_less_700_pdfup_1 , hMeas_TT_Mtt_less_700_nom_1 ,
hMeas_TT_Mtt_less_700_scaledown_1 , hMeas_TT_Mtt_less_700_scaleup_1 ,
hMeas_TT_Mtt_less_700_topdown_1 , hMeas_TT_Mtt_less_700_topup_1 ,
hMeas_TT_Mtt_less_700_btagdown_1 , hMeas_TT_Mtt_less_700_btagup_1 ]
hMeas_TT_Mtt_less_700_2 = [ hMeas_TT_Mtt_less_700_jecdown_2 , hMeas_TT_Mtt_less_700_jecup_2 ,
hMeas_TT_Mtt_less_700_jerdown_2 , hMeas_TT_Mtt_less_700_jerup_2 ,
hMeas_TT_Mtt_less_700_pdfdown_2 , hMeas_TT_Mtt_less_700_pdfup_2 , hMeas_TT_Mtt_less_700_nom_2 ,
hMeas_TT_Mtt_less_700_scaledown_2 , hMeas_TT_Mtt_less_700_scaleup_2 ,
hMeas_TT_Mtt_less_700_topdown_2 , hMeas_TT_Mtt_less_700_topup_2 ,
hMeas_TT_Mtt_less_700_btagdown_2 , hMeas_TT_Mtt_less_700_btagup_2 ]
for isubtract in range(len(hMeas_TT_Mtt_less_700_1)):
hMeas_TT_Mtt_less_700.append(hMeas_TT_Mtt_less_700_1[isubtract])
hMeas_TT_Mtt_less_700[isubtract].Add( hMeas_TT_Mtt_less_700_2[isubtract], -1.0 )
hMeas_TT_Mtt_less_700_jecdown , hMeas_TT_Mtt_less_700_jecup = hMeas_TT_Mtt_less_700[0] , hMeas_TT_Mtt_less_700[1]
hMeas_TT_Mtt_less_700_jerdown , hMeas_TT_Mtt_less_700_jerup = hMeas_TT_Mtt_less_700[2] , hMeas_TT_Mtt_less_700[3]
hMeas_TT_Mtt_less_700_pdfdown , hMeas_TT_Mtt_less_700_pdfup = hMeas_TT_Mtt_less_700[4] , hMeas_TT_Mtt_less_700[5]
hMeas_TT_Mtt_less_700_nom = hMeas_TT_Mtt_less_700[6]
hMeas_TT_Mtt_less_700_scaledown , hMeas_TT_Mtt_less_700_scaleup = hMeas_TT_Mtt_less_700[7] , hMeas_TT_Mtt_less_700[8]
hMeas_TT_Mtt_less_700_topdown , hMeas_TT_Mtt_less_700_topup = hMeas_TT_Mtt_less_700[9] , hMeas_TT_Mtt_less_700[10]
hMeas_TT_Mtt_less_700_btagdown , hMeas_TT_Mtt_less_700_btagup = hMeas_TT_Mtt_less_700[11] , hMeas_TT_Mtt_less_700[12]
hMeas_TT_Mtt_700_1000_1 = [ hMeas_TT_Mtt_700_1000_jecdown_1 , hMeas_TT_Mtt_700_1000_jecup_1 ,
hMeas_TT_Mtt_700_1000_jerdown_1 , hMeas_TT_Mtt_700_1000_jerup_1 ,
hMeas_TT_Mtt_700_1000_pdfdown_1 , hMeas_TT_Mtt_700_1000_pdfup_1 , hMeas_TT_Mtt_700_1000_nom_1 ,
hMeas_TT_Mtt_700_1000_scaledown_1 , hMeas_TT_Mtt_700_1000_scaleup_1 ,
hMeas_TT_Mtt_700_1000_topdown_1 , hMeas_TT_Mtt_700_1000_topup_1 ,
hMeas_TT_Mtt_700_1000_btagdown_1 , hMeas_TT_Mtt_700_1000_btagup_1 ]
hMeas_TT_Mtt_700_1000_2 = [ hMeas_TT_Mtt_700_1000_jecdown_2 , hMeas_TT_Mtt_700_1000_jecup_2 ,
hMeas_TT_Mtt_700_1000_jerdown_2 , hMeas_TT_Mtt_700_1000_jerup_2 ,
hMeas_TT_Mtt_700_1000_pdfdown_2 , hMeas_TT_Mtt_700_1000_pdfup_2 , hMeas_TT_Mtt_700_1000_nom_2 ,
hMeas_TT_Mtt_700_1000_scaledown_2 , hMeas_TT_Mtt_700_1000_scaleup_2 ,
hMeas_TT_Mtt_700_1000_topdown_2 , hMeas_TT_Mtt_700_1000_topup_2 ,
hMeas_TT_Mtt_700_1000_btagdown_2 , hMeas_TT_Mtt_700_1000_btagup_2 ]
for isubtract in range(len(hMeas_TT_Mtt_700_1000_1)):
hMeas_TT_Mtt_700_1000.append(hMeas_TT_Mtt_700_1000_1[isubtract])
hMeas_TT_Mtt_700_1000[isubtract].Add( hMeas_TT_Mtt_700_1000_2[isubtract], -1.0 )
hMeas_TT_Mtt_700_1000_jecdown , hMeas_TT_Mtt_700_1000_jecup = hMeas_TT_Mtt_700_1000[0] , hMeas_TT_Mtt_700_1000[1]
hMeas_TT_Mtt_700_1000_jerdown , hMeas_TT_Mtt_700_1000_jerup = hMeas_TT_Mtt_700_1000[2] , hMeas_TT_Mtt_700_1000[3]
hMeas_TT_Mtt_700_1000_pdfdown , hMeas_TT_Mtt_700_1000_pdfup = hMeas_TT_Mtt_700_1000[4] , hMeas_TT_Mtt_700_1000[5]
hMeas_TT_Mtt_700_1000_nom = hMeas_TT_Mtt_700_1000[6]
hMeas_TT_Mtt_700_1000_scaledown , hMeas_TT_Mtt_700_1000_scaleup = hMeas_TT_Mtt_700_1000[7] , hMeas_TT_Mtt_700_1000[8]
hMeas_TT_Mtt_700_1000_topdown , hMeas_TT_Mtt_700_1000_topup = hMeas_TT_Mtt_700_1000[9] , hMeas_TT_Mtt_700_1000[10]
hMeas_TT_Mtt_700_1000_btagdown , hMeas_TT_Mtt_700_1000_btagup = hMeas_TT_Mtt_700_1000[11] , hMeas_TT_Mtt_700_1000[12]
hMeas_TT_Mtt_1000_Inf_1 = [ hMeas_TT_Mtt_1000_Inf_jecdown_1 , hMeas_TT_Mtt_1000_Inf_jecup_1 ,
hMeas_TT_Mtt_1000_Inf_jerdown_1 , hMeas_TT_Mtt_1000_Inf_jerup_1 ,
hMeas_TT_Mtt_1000_Inf_pdfdown_1 , hMeas_TT_Mtt_1000_Inf_pdfup_1 , hMeas_TT_Mtt_1000_Inf_nom_1 ,
hMeas_TT_Mtt_1000_Inf_scaledown_1 , hMeas_TT_Mtt_1000_Inf_scaleup_1 ,
hMeas_TT_Mtt_1000_Inf_topdown_1 , hMeas_TT_Mtt_1000_Inf_topup_1 ,
hMeas_TT_Mtt_1000_Inf_btagdown_1 , hMeas_TT_Mtt_1000_Inf_btagup_1 ]
hMeas_TT_Mtt_1000_Inf_2 = [ hMeas_TT_Mtt_1000_Inf_jecdown_2 , hMeas_TT_Mtt_1000_Inf_jecup_2 ,
hMeas_TT_Mtt_1000_Inf_jerdown_2 , hMeas_TT_Mtt_1000_Inf_jerup_2 ,
hMeas_TT_Mtt_1000_Inf_pdfdown_2 , hMeas_TT_Mtt_1000_Inf_pdfup_2 , hMeas_TT_Mtt_1000_Inf_nom_2 ,
hMeas_TT_Mtt_1000_Inf_scaledown_2 , hMeas_TT_Mtt_1000_Inf_scaleup_2 ,
hMeas_TT_Mtt_1000_Inf_topdown_2 , hMeas_TT_Mtt_1000_Inf_topup_2 ,
hMeas_TT_Mtt_1000_Inf_btagdown_2 , hMeas_TT_Mtt_1000_Inf_btagup_2 ]
for isubtract in range(len(hMeas_TT_Mtt_1000_Inf_1)):
hMeas_TT_Mtt_1000_Inf.append(hMeas_TT_Mtt_1000_Inf_1[isubtract])
hMeas_TT_Mtt_1000_Inf[isubtract].Add( hMeas_TT_Mtt_1000_Inf_2[isubtract], -1.0 )
hMeas_TT_Mtt_1000_Inf_jecdown , hMeas_TT_Mtt_1000_Inf_jecup = hMeas_TT_Mtt_1000_Inf[0] , hMeas_TT_Mtt_1000_Inf[1]
hMeas_TT_Mtt_1000_Inf_jerdown , hMeas_TT_Mtt_1000_Inf_jerup = hMeas_TT_Mtt_1000_Inf[2] , hMeas_TT_Mtt_1000_Inf[3]
hMeas_TT_Mtt_1000_Inf_pdfdown , hMeas_TT_Mtt_1000_Inf_pdfup = hMeas_TT_Mtt_1000_Inf[4] , hMeas_TT_Mtt_1000_Inf[5]
hMeas_TT_Mtt_1000_Inf_nom = hMeas_TT_Mtt_1000_Inf[6]
hMeas_TT_Mtt_1000_Inf_scaledown , hMeas_TT_Mtt_1000_Inf_scaleup = hMeas_TT_Mtt_1000_Inf[7] , hMeas_TT_Mtt_1000_Inf[8]
hMeas_TT_Mtt_1000_Inf_topdown , hMeas_TT_Mtt_1000_Inf_topup = hMeas_TT_Mtt_1000_Inf[9] , hMeas_TT_Mtt_1000_Inf[10]
hMeas_TT_Mtt_1000_Inf_btagdown , hMeas_TT_Mtt_1000_Inf_btagup = hMeas_TT_Mtt_1000_Inf[11] , hMeas_TT_Mtt_1000_Inf[12]
hMeas_TT_nonSemiLep_Mtt_less_700_1 = [ hMeas_TT_nonSemiLep_Mtt_less_700_jecdown_1 , hMeas_TT_nonSemiLep_Mtt_less_700_jecup_1 ,
hMeas_TT_nonSemiLep_Mtt_less_700_jerdown_1 , hMeas_TT_nonSemiLep_Mtt_less_700_jerup_1 ,
hMeas_TT_nonSemiLep_Mtt_less_700_pdfdown_1 , hMeas_TT_nonSemiLep_Mtt_less_700_pdfup_1 , hMeas_TT_nonSemiLep_Mtt_less_700_nom_1 ,
hMeas_TT_nonSemiLep_Mtt_less_700_scaledown_1 , hMeas_TT_nonSemiLep_Mtt_less_700_scaleup_1 ,
hMeas_TT_nonSemiLep_Mtt_less_700_topdown_1 , hMeas_TT_nonSemiLep_Mtt_less_700_topup_1 ,
hMeas_TT_nonSemiLep_Mtt_less_700_btagdown_1 , hMeas_TT_nonSemiLep_Mtt_less_700_btagup_1 ]
hMeas_TT_nonSemiLep_Mtt_less_700_2 = [ hMeas_TT_nonSemiLep_Mtt_less_700_jecdown_2 , hMeas_TT_nonSemiLep_Mtt_less_700_jecup_2 ,
hMeas_TT_nonSemiLep_Mtt_less_700_jerdown_2 , hMeas_TT_nonSemiLep_Mtt_less_700_jerup_2 ,
hMeas_TT_nonSemiLep_Mtt_less_700_pdfdown_2 , hMeas_TT_nonSemiLep_Mtt_less_700_pdfup_2 , hMeas_TT_nonSemiLep_Mtt_less_700_nom_2 ,
hMeas_TT_nonSemiLep_Mtt_less_700_scaledown_2 , hMeas_TT_nonSemiLep_Mtt_less_700_scaleup_2 ,
hMeas_TT_nonSemiLep_Mtt_less_700_topdown_2 , hMeas_TT_nonSemiLep_Mtt_less_700_topup_2 ,
hMeas_TT_nonSemiLep_Mtt_less_700_btagdown_2 , hMeas_TT_nonSemiLep_Mtt_less_700_btagup_2 ]
for isubtract in range(len(hMeas_TT_nonSemiLep_Mtt_less_700_1)):
hMeas_TT_nonSemiLep_Mtt_less_700.append(hMeas_TT_nonSemiLep_Mtt_less_700_1[isubtract])
hMeas_TT_nonSemiLep_Mtt_less_700[isubtract].Add( hMeas_TT_nonSemiLep_Mtt_less_700_2[isubtract], -1.0 )
hMeas_TT_nonSemiLep_Mtt_less_700_jecdown , hMeas_TT_nonSemiLep_Mtt_less_700_jecup = hMeas_TT_nonSemiLep_Mtt_less_700[0] , hMeas_TT_nonSemiLep_Mtt_less_700[1]
hMeas_TT_nonSemiLep_Mtt_less_700_jerdown , hMeas_TT_nonSemiLep_Mtt_less_700_jerup = hMeas_TT_nonSemiLep_Mtt_less_700[2] , hMeas_TT_nonSemiLep_Mtt_less_700[3]
hMeas_TT_nonSemiLep_Mtt_less_700_pdfdown , hMeas_TT_nonSemiLep_Mtt_less_700_pdfup = hMeas_TT_nonSemiLep_Mtt_less_700[4] , hMeas_TT_nonSemiLep_Mtt_less_700[5]
hMeas_TT_nonSemiLep_Mtt_less_700_nom = hMeas_TT_nonSemiLep_Mtt_less_700[6]
hMeas_TT_nonSemiLep_Mtt_less_700_scaledown , hMeas_TT_nonSemiLep_Mtt_less_700_scaleup = hMeas_TT_nonSemiLep_Mtt_less_700[7] , hMeas_TT_nonSemiLep_Mtt_less_700[8]
hMeas_TT_nonSemiLep_Mtt_less_700_topdown , hMeas_TT_nonSemiLep_Mtt_less_700_topup = hMeas_TT_nonSemiLep_Mtt_less_700[9] , hMeas_TT_nonSemiLep_Mtt_less_700[10]
hMeas_TT_nonSemiLep_Mtt_less_700_btagdown , hMeas_TT_nonSemiLep_Mtt_less_700_btagup = hMeas_TT_nonSemiLep_Mtt_less_700[11] , hMeas_TT_nonSemiLep_Mtt_less_700[12]
hMeas_TT_nonSemiLep_Mtt_700_1000_1 = [ hMeas_TT_nonSemiLep_Mtt_700_1000_jecdown_1 , hMeas_TT_nonSemiLep_Mtt_700_1000_jecup_1 ,
hMeas_TT_nonSemiLep_Mtt_700_1000_jerdown_1 , hMeas_TT_nonSemiLep_Mtt_700_1000_jerup_1 ,
hMeas_TT_nonSemiLep_Mtt_700_1000_pdfdown_1 , hMeas_TT_nonSemiLep_Mtt_700_1000_pdfup_1 , hMeas_TT_nonSemiLep_Mtt_700_1000_nom_1 ,
hMeas_TT_nonSemiLep_Mtt_700_1000_scaledown_1 , hMeas_TT_nonSemiLep_Mtt_700_1000_scaleup_1 ,
hMeas_TT_nonSemiLep_Mtt_700_1000_topdown_1 , hMeas_TT_nonSemiLep_Mtt_700_1000_topup_1 ,
hMeas_TT_nonSemiLep_Mtt_700_1000_btagdown_1 , hMeas_TT_nonSemiLep_Mtt_700_1000_btagup_1 ]
hMeas_TT_nonSemiLep_Mtt_700_1000_2 = [ hMeas_TT_nonSemiLep_Mtt_700_1000_jecdown_2 , hMeas_TT_nonSemiLep_Mtt_700_1000_jecup_2 ,
hMeas_TT_nonSemiLep_Mtt_700_1000_jerdown_2 , hMeas_TT_nonSemiLep_Mtt_700_1000_jerup_2 ,
hMeas_TT_nonSemiLep_Mtt_700_1000_pdfdown_2 , hMeas_TT_nonSemiLep_Mtt_700_1000_pdfup_2 , hMeas_TT_nonSemiLep_Mtt_700_1000_nom_2 ,
hMeas_TT_nonSemiLep_Mtt_700_1000_scaledown_2 , hMeas_TT_nonSemiLep_Mtt_700_1000_scaleup_2 ,
hMeas_TT_nonSemiLep_Mtt_700_1000_topdown_2 , hMeas_TT_nonSemiLep_Mtt_700_1000_topup_2 ,
hMeas_TT_nonSemiLep_Mtt_700_1000_btagdown_2 , hMeas_TT_nonSemiLep_Mtt_700_1000_btagup_2 ]
for isubtract in range(len(hMeas_TT_nonSemiLep_Mtt_700_1000_1)):
hMeas_TT_nonSemiLep_Mtt_700_1000.append(hMeas_TT_nonSemiLep_Mtt_700_1000_1[isubtract])
hMeas_TT_nonSemiLep_Mtt_700_1000[isubtract].Add( hMeas_TT_nonSemiLep_Mtt_700_1000_2[isubtract], -1.0 )
hMeas_TT_nonSemiLep_Mtt_700_1000_jecdown , hMeas_TT_nonSemiLep_Mtt_700_1000_jecup = hMeas_TT_nonSemiLep_Mtt_700_1000[0] , hMeas_TT_nonSemiLep_Mtt_700_1000[1]
hMeas_TT_nonSemiLep_Mtt_700_1000_jerdown , hMeas_TT_nonSemiLep_Mtt_700_1000_jerup = hMeas_TT_nonSemiLep_Mtt_700_1000[2] , hMeas_TT_nonSemiLep_Mtt_700_1000[3]
hMeas_TT_nonSemiLep_Mtt_700_1000_pdfdown , hMeas_TT_nonSemiLep_Mtt_700_1000_pdfup = hMeas_TT_nonSemiLep_Mtt_700_1000[4] , hMeas_TT_nonSemiLep_Mtt_700_1000[5]
hMeas_TT_nonSemiLep_Mtt_700_1000_nom = hMeas_TT_nonSemiLep_Mtt_700_1000[6]
hMeas_TT_nonSemiLep_Mtt_700_1000_scaledown , hMeas_TT_nonSemiLep_Mtt_700_1000_scaleup = hMeas_TT_nonSemiLep_Mtt_700_1000[7] , hMeas_TT_nonSemiLep_Mtt_700_1000[8]
hMeas_TT_nonSemiLep_Mtt_700_1000_topdown , hMeas_TT_nonSemiLep_Mtt_700_1000_topup = hMeas_TT_nonSemiLep_Mtt_700_1000[9] , hMeas_TT_nonSemiLep_Mtt_700_1000[10]
hMeas_TT_nonSemiLep_Mtt_700_1000_btagdown , hMeas_TT_nonSemiLep_Mtt_700_1000_btagup = hMeas_TT_nonSemiLep_Mtt_700_1000[11] , hMeas_TT_nonSemiLep_Mtt_700_1000[12]
hMeas_TT_nonSemiLep_Mtt_1000_Inf_1 = [ hMeas_TT_nonSemiLep_Mtt_1000_Inf_jecdown_1 , hMeas_TT_nonSemiLep_Mtt_1000_Inf_jecup_1 ,
hMeas_TT_nonSemiLep_Mtt_1000_Inf_jerdown_1 , hMeas_TT_nonSemiLep_Mtt_1000_Inf_jerup_1 ,
hMeas_TT_nonSemiLep_Mtt_1000_Inf_pdfdown_1 , hMeas_TT_nonSemiLep_Mtt_1000_Inf_pdfup_1 , hMeas_TT_nonSemiLep_Mtt_1000_Inf_nom_1 ,
hMeas_TT_nonSemiLep_Mtt_1000_Inf_scaledown_1 , hMeas_TT_nonSemiLep_Mtt_1000_Inf_scaleup_1 ,
hMeas_TT_nonSemiLep_Mtt_1000_Inf_topdown_1 , hMeas_TT_nonSemiLep_Mtt_1000_Inf_topup_1 ,
hMeas_TT_nonSemiLep_Mtt_1000_Inf_btagdown_1 , hMeas_TT_nonSemiLep_Mtt_1000_Inf_btagup_1 ]
hMeas_TT_nonSemiLep_Mtt_1000_Inf_2 = [ hMeas_TT_nonSemiLep_Mtt_1000_Inf_jecdown_2 , hMeas_TT_nonSemiLep_Mtt_1000_Inf_jecup_2 ,
hMeas_TT_nonSemiLep_Mtt_1000_Inf_jerdown_2 , hMeas_TT_nonSemiLep_Mtt_1000_Inf_jerup_2 ,
hMeas_TT_nonSemiLep_Mtt_1000_Inf_pdfdown_2 , hMeas_TT_nonSemiLep_Mtt_1000_Inf_pdfup_2 , hMeas_TT_nonSemiLep_Mtt_1000_Inf_nom_2 ,
hMeas_TT_nonSemiLep_Mtt_1000_Inf_scaledown_2 , hMeas_TT_nonSemiLep_Mtt_1000_Inf_scaleup_2 ,
hMeas_TT_nonSemiLep_Mtt_1000_Inf_topdown_2 , hMeas_TT_nonSemiLep_Mtt_1000_Inf_topup_2 ,
hMeas_TT_nonSemiLep_Mtt_1000_Inf_btagdown_2 , hMeas_TT_nonSemiLep_Mtt_1000_Inf_btagup_2 ]
for isubtract in range(len(hMeas_TT_nonSemiLep_Mtt_1000_Inf_1)):
hMeas_TT_nonSemiLep_Mtt_1000_Inf.append(hMeas_TT_nonSemiLep_Mtt_1000_Inf_1[isubtract])
hMeas_TT_nonSemiLep_Mtt_1000_Inf[isubtract].Add( hMeas_TT_nonSemiLep_Mtt_1000_Inf_2[isubtract], -1.0 )
hMeas_TT_nonSemiLep_Mtt_1000_Inf_jecdown , hMeas_TT_nonSemiLep_Mtt_1000_Inf_jecup = hMeas_TT_nonSemiLep_Mtt_1000_Inf[0] , hMeas_TT_nonSemiLep_Mtt_1000_Inf[1]
hMeas_TT_nonSemiLep_Mtt_1000_Inf_jerdown , hMeas_TT_nonSemiLep_Mtt_1000_Inf_jerup = hMeas_TT_nonSemiLep_Mtt_1000_Inf[2] , hMeas_TT_nonSemiLep_Mtt_1000_Inf[3]
hMeas_TT_nonSemiLep_Mtt_1000_Inf_pdfdown , hMeas_TT_nonSemiLep_Mtt_1000_Inf_pdfup = hMeas_TT_nonSemiLep_Mtt_1000_Inf[4] , hMeas_TT_nonSemiLep_Mtt_1000_Inf[5]
hMeas_TT_nonSemiLep_Mtt_1000_Inf_nom = hMeas_TT_nonSemiLep_Mtt_1000_Inf[6]
hMeas_TT_nonSemiLep_Mtt_1000_Inf_scaledown , hMeas_TT_nonSemiLep_Mtt_1000_Inf_scaleup = hMeas_TT_nonSemiLep_Mtt_1000_Inf[7] , hMeas_TT_nonSemiLep_Mtt_1000_Inf[8]
hMeas_TT_nonSemiLep_Mtt_1000_Inf_topdown , hMeas_TT_nonSemiLep_Mtt_1000_Inf_topup = hMeas_TT_nonSemiLep_Mtt_1000_Inf[9] , hMeas_TT_nonSemiLep_Mtt_1000_Inf[10]
hMeas_TT_nonSemiLep_Mtt_1000_Inf_btagdown , hMeas_TT_nonSemiLep_Mtt_1000_Inf_btagup = hMeas_TT_nonSemiLep_Mtt_1000_Inf[11] , hMeas_TT_nonSemiLep_Mtt_1000_Inf[12]
hMeas_T_t_1 = [ hMeas_T_t_jecdown_1 , hMeas_T_t_jecup_1 , hMeas_T_t_jerdown_1 , hMeas_T_t_jerup_1 , hMeas_T_t_nom_1 , hMeas_T_t_btagdown_1 , hMeas_T_t_btagup_1 , hMeas_T_t_btagdown_1 , hMeas_T_t_btagup_1 ]
hMeas_T_t_2 = [ hMeas_T_t_jecdown_2 , hMeas_T_t_jecup_2 , hMeas_T_t_jerdown_2 , hMeas_T_t_jerup_2 , hMeas_T_t_nom_2 , hMeas_T_t_btagdown_2 , hMeas_T_t_btagup_2 , hMeas_T_t_btagdown_2 , hMeas_T_t_btagup_2 ]
for isubtract in range(len(hMeas_T_t_1)):
hMeas_T_t.append(hMeas_T_t_1[isubtract])
hMeas_T_t[isubtract].Add( hMeas_T_t_2[isubtract], -1.0 )
hMeas_T_t_jecdown , hMeas_T_t_jecup = hMeas_T_t[0] , hMeas_T_t[1]
hMeas_T_t_jerdown , hMeas_T_t_jerup = hMeas_T_t[2] , hMeas_T_t[3]
hMeas_T_t_nom = hMeas_T_t[4]
hMeas_T_t_topdown , hMeas_T_t_topup = hMeas_T_t[5] , hMeas_T_t[6]
hMeas_T_t_btagdown , hMeas_T_t_btagup = hMeas_T_t[7] , hMeas_T_t[8]
hMeas_Tbar_t_1 = [ hMeas_Tbar_t_jecdown_1 , hMeas_Tbar_t_jecup_1 , hMeas_Tbar_t_jerdown_1 , hMeas_Tbar_t_jerup_1 , hMeas_Tbar_t_nom_1 , hMeas_Tbar_t_topdown_1 , hMeas_Tbar_t_topup_1 , hMeas_Tbar_t_btagdown_1 , hMeas_Tbar_t_btagup_1 ]
hMeas_Tbar_t_2 = [ hMeas_Tbar_t_jecdown_2 , hMeas_Tbar_t_jecup_2 , hMeas_Tbar_t_jerdown_2 , hMeas_Tbar_t_jerup_2 , hMeas_Tbar_t_nom_2 , hMeas_Tbar_t_topdown_2 , hMeas_Tbar_t_topup_2 , hMeas_Tbar_t_btagdown_2 , hMeas_Tbar_t_btagup_2 ]
for isubtract in range(len(hMeas_Tbar_t_1)):
hMeas_Tbar_t.append(hMeas_Tbar_t_1[isubtract])
hMeas_Tbar_t[isubtract].Add( hMeas_Tbar_t_2[isubtract], -1.0 )
hMeas_Tbar_t_jecdown , hMeas_Tbar_t_jecup = hMeas_Tbar_t[0] , hMeas_Tbar_t[1]
hMeas_Tbar_t_jerdown , hMeas_Tbar_t_jerup = hMeas_Tbar_t[2] , hMeas_Tbar_t[3]
hMeas_Tbar_t_nom = hMeas_Tbar_t[4]
hMeas_Tbar_t_topdown , hMeas_Tbar_t_topup = hMeas_Tbar_t[5] , hMeas_Tbar_t[6]
hMeas_Tbar_t_btagdown , hMeas_Tbar_t_btagup = hMeas_Tbar_t[7] , hMeas_Tbar_t[8]
hMeas_T_s_1 = [ hMeas_T_s_jecdown_1 , hMeas_T_s_jecup_1 , hMeas_T_s_jerdown_1 , hMeas_T_s_jerup_1 , hMeas_T_s_nom_1 , hMeas_T_s_topdown_1 , hMeas_T_s_topup_1 , hMeas_T_s_btagdown_1, hMeas_T_s_btagup_1 ]
hMeas_T_s_2 = [ hMeas_T_s_jecdown_2 , hMeas_T_s_jecup_2 , hMeas_T_s_jerdown_2 , hMeas_T_s_jerup_2 , hMeas_T_s_nom_2 , hMeas_T_s_topdown_2 , hMeas_T_s_topup_2 , hMeas_T_s_btagdown_2, hMeas_T_s_btagup_2 ]
for isubtract in range(len(hMeas_T_s_1)):
hMeas_T_s.append(hMeas_T_s_1[isubtract])
hMeas_T_s[isubtract].Add( hMeas_T_s_2[isubtract], -1.0 )
hMeas_T_s_jecdown , hMeas_T_s_jecup = hMeas_T_s[0] , hMeas_T_s[1]
hMeas_T_s_jerdown , hMeas_T_s_jerup = hMeas_T_s[2] , hMeas_T_s[3]
hMeas_T_s_nom = hMeas_T_s[4]
hMeas_T_s_topdown , hMeas_T_s_topup = hMeas_T_s[5] , hMeas_T_s[6]
hMeas_T_s_btagdown , hMeas_T_s_btagup = hMeas_T_s[7] , hMeas_T_s[8]
hMeas_Tbar_s_1 = [ hMeas_Tbar_s_jecdown_1 , hMeas_Tbar_s_jecup_1 , hMeas_Tbar_s_jerdown_1 , hMeas_Tbar_s_jerup_1 , hMeas_Tbar_s_nom_1 , hMeas_Tbar_s_topdown_1 , hMeas_Tbar_s_topup_1 , hMeas_Tbar_s_btagdown_1 , hMeas_Tbar_s_btagup_1 ]
hMeas_Tbar_s_2 = [ hMeas_Tbar_s_jecdown_2 , hMeas_Tbar_s_jecup_2 , hMeas_Tbar_s_jerdown_2 , hMeas_Tbar_s_jerup_2 , hMeas_Tbar_s_nom_2 , hMeas_Tbar_s_topdown_2 , hMeas_Tbar_s_topup_2 , hMeas_Tbar_s_btagdown_2 , hMeas_Tbar_s_btagup_2 ]
for isubtract in range(len(hMeas_Tbar_s_1)):
hMeas_Tbar_s.append(hMeas_Tbar_s_1[isubtract])
hMeas_Tbar_s[isubtract].Add( hMeas_Tbar_s_2[isubtract], -1.0 )
hMeas_Tbar_s_jecdown , hMeas_Tbar_s_jecup = hMeas_Tbar_s[0] , hMeas_Tbar_s[1]
hMeas_Tbar_s_jerdown , hMeas_Tbar_s_jerup = hMeas_Tbar_s[2] , hMeas_Tbar_s[3]
hMeas_Tbar_s_nom = hMeas_Tbar_s[4]
hMeas_Tbar_s_topdown , hMeas_Tbar_s_topup = hMeas_Tbar_s[5] , hMeas_Tbar_s[6]
hMeas_Tbar_s_btagdown , hMeas_Tbar_s_btagup = hMeas_Tbar_s[7] , hMeas_Tbar_s[8]
hMeas_T_tW_1 = [ hMeas_T_tW_jecdown_1 , hMeas_T_tW_jecup_1 , hMeas_T_tW_jerdown_1 , hMeas_T_tW_jerup_1 , hMeas_T_tW_nom_1 , hMeas_T_tW_topdown_1 , hMeas_T_tW_topup_1 , hMeas_T_tW_btagdown_1 , hMeas_T_tW_btagup_1 ]
hMeas_T_tW_2 = [ hMeas_T_tW_jecdown_2 , hMeas_T_tW_jecup_2 , hMeas_T_tW_jerdown_2 , hMeas_T_tW_jerup_2 , hMeas_T_tW_nom_2 , hMeas_T_tW_topdown_2 , hMeas_T_tW_topup_2 , hMeas_T_tW_btagdown_2 , hMeas_T_tW_btagup_2 ]
for isubtract in range(len(hMeas_T_tW_1)):
hMeas_T_tW.append(hMeas_T_tW_1[isubtract])
hMeas_T_tW[isubtract].Add( hMeas_T_tW_2[isubtract], -1.0 )
hMeas_T_tW_jecdown , hMeas_T_tW_jecup = hMeas_T_tW[0] , hMeas_T_tW[1]
hMeas_T_tW_jerdown , hMeas_T_tW_jerup = hMeas_T_tW[2] , hMeas_T_tW[3]
hMeas_T_tW_nom = hMeas_T_tW[4]
hMeas_T_tW_topdown , hMeas_T_tW_topup = hMeas_T_tW[5] , hMeas_T_tW[6]
hMeas_T_tW_btagdown , hMeas_T_tW_btagup = hMeas_T_tW[7] , hMeas_T_tW[8]
hMeas_Tbar_tW_1 = [ hMeas_Tbar_tW_jecdown_1 , hMeas_Tbar_tW_jecup_1 , hMeas_Tbar_tW_jerdown_1 , hMeas_Tbar_tW_jerup_1 , hMeas_Tbar_tW_nom_1 , hMeas_Tbar_tW_topdown_1, hMeas_Tbar_tW_topup_1 , hMeas_Tbar_tW_btagdown_1, hMeas_Tbar_tW_btagup_1 ]
hMeas_Tbar_tW_2 = [ hMeas_Tbar_tW_jecdown_2 , hMeas_Tbar_tW_jecup_2 , hMeas_Tbar_tW_jerdown_2 , hMeas_Tbar_tW_jerup_2 , hMeas_Tbar_tW_nom_2 , hMeas_Tbar_tW_topdown_2, hMeas_Tbar_tW_topup_2 , hMeas_Tbar_tW_btagdown_1, hMeas_Tbar_tW_btagup_1 ]
for isubtract in range(len(hMeas_Tbar_tW_1)):
hMeas_Tbar_tW.append(hMeas_Tbar_tW_1[isubtract])
hMeas_Tbar_tW[isubtract].Add( hMeas_Tbar_tW_2[isubtract], -1.0 )
hMeas_Tbar_tW_jecdown , hMeas_Tbar_tW_jecup = hMeas_Tbar_tW[0] , hMeas_Tbar_tW[1]
hMeas_Tbar_tW_jerdown , hMeas_Tbar_tW_jerup = hMeas_Tbar_tW[2] , hMeas_Tbar_tW[3]
hMeas_Tbar_tW_nom = hMeas_Tbar_tW [4]
hMeas_Tbar_tW_topdown , hMeas_Tbar_tW_topup = hMeas_Tbar_tW[5] , hMeas_Tbar_tW[6]
hMeas_Tbar_tW_btagdown , hMeas_Tbar_tW_btagup = hMeas_Tbar_tW[7] , hMeas_Tbar_tW[8]
hMeas_WJets_1 = [ hMeas_WJets_jecdown_1 , hMeas_WJets_jecup_1 , hMeas_WJets_jerdown_1 , hMeas_WJets_jerup_1 , hMeas_WJets_nom_1 , hMeas_WJets_topdown_1, hMeas_WJets_topup_1 , hMeas_WJets_btagdown_1, hMeas_WJets_btagup_1 ]
hMeas_WJets_2 = [ hMeas_WJets_jecdown_2 , hMeas_WJets_jecup_2 , hMeas_WJets_jerdown_2 , hMeas_WJets_jerup_2 , hMeas_WJets_nom_2 , hMeas_WJets_topdown_2, hMeas_WJets_topup_2 , hMeas_WJets_btagdown_2, hMeas_WJets_btagup_2 ]
for isubtract in range(len(hMeas_WJets_1)):
hMeas_WJets.append(hMeas_WJets_1[isubtract])
hMeas_WJets[isubtract].Add( hMeas_WJets_2[isubtract], -1.0 )
hMeas_WJets_jecdown , hMeas_WJets_jecup = hMeas_WJets[0] , hMeas_WJets[1]
hMeas_WJets_jerdown , hMeas_WJets_jerup = hMeas_WJets[2] , hMeas_WJets[3]
hMeas_WJets_nom = hMeas_WJets[4]
hMeas_WJets_topdown , hMeas_WJets_topup = hMeas_WJets[5] , hMeas_WJets[6]
hMeas_WJets_btagdown , hMeas_WJets_btagup = hMeas_WJets[7] , hMeas_WJets[8]
hMeas_T_t_qcd = hMeas_T_t_qcd_1.Clone()
hMeas_T_t_qcd.Add( hMeas_T_t_qcd_2 , -1.0 )
hMeas_Tbar_t_qcd = hMeas_Tbar_t_qcd_1.Clone()
hMeas_Tbar_t_qcd.Add( hMeas_Tbar_t_qcd_2 , -1.0 )
hMeas_T_s_qcd = hMeas_T_s_qcd_1.Clone()
hMeas_T_s_qcd.Add( hMeas_T_s_qcd_2 , -1.0 )
hMeas_Tbar_s_qcd = hMeas_Tbar_s_qcd_1.Clone()
hMeas_Tbar_s_qcd.Add( hMeas_Tbar_s_qcd_2 , -1.0 )
hMeas_T_tW_qcd = hMeas_T_tW_qcd_1.Clone()
hMeas_T_tW_qcd.Add( hMeas_T_tW_qcd_2 , -1.0 )
hMeas_Tbar_tW_qcd = hMeas_Tbar_tW_qcd_1.Clone()
hMeas_Tbar_tW_qcd.Add( hMeas_Tbar_tW_qcd_2 , -1.0 )
hMeas_WJets_qcd = hMeas_WJets_qcd_1.Clone()
hMeas_WJets_qcd.Add( hMeas_WJets_qcd_2 , -1.0 )
hMeas_TT_Mtt_less_700_qcd = hMeas_TT_Mtt_less_700_qcd_1.Clone()
hMeas_TT_Mtt_less_700_qcd.Add( hMeas_TT_Mtt_less_700_qcd_2 , -1.0 )
hMeas_TT_Mtt_700_1000_qcd = hMeas_TT_Mtt_700_1000_qcd_1.Clone()
hMeas_TT_Mtt_700_1000_qcd.Add( hMeas_TT_Mtt_700_1000_qcd_2 , -1.0 )
hMeas_TT_Mtt_1000_Inf_qcd = hMeas_TT_Mtt_1000_Inf_qcd_1.Clone()
hMeas_TT_Mtt_1000_Inf_qcd.Add( hMeas_TT_Mtt_1000_Inf_qcd_2 , -1.0 )
hMeas_TT_nonSemiLep_Mtt_less_700_qcd = hMeas_TT_nonSemiLep_Mtt_less_700_qcd_1.Clone()
hMeas_TT_nonSemiLep_Mtt_less_700_qcd.Add( hMeas_TT_nonSemiLep_Mtt_less_700_qcd_2 , -1.0 )
hMeas_TT_nonSemiLep_Mtt_700_1000_qcd = hMeas_TT_nonSemiLep_Mtt_700_1000_qcd_1.Clone()
hMeas_TT_nonSemiLep_Mtt_700_1000_qcd.Add( hMeas_TT_nonSemiLep_Mtt_700_1000_qcd_2 , -1.0 )
hMeas_TT_nonSemiLep_Mtt_1000_Inf_qcd = hMeas_TT_nonSemiLep_Mtt_1000_Inf_qcd_1.Clone()
hMeas_TT_nonSemiLep_Mtt_1000_Inf_qcd.Add( hMeas_TT_nonSemiLep_Mtt_1000_Inf_qcd_2 , -1.0 )
hMeas_QCD_SingleMu = hMeas_QCD_SingleMu_1.Clone()
hMeas_QCD_SingleMu.Add( hMeas_QCD_SingleMu_2 , -1.0 )
hMeas_QCD_SingleMu.SetName(histname + "__QCD")
######Correcting the QCD - plotting - writing in a root file
qcdcanvs = []
######### Correct the QCD estimate with the known MC backgrounds in the noniso region. ########
iiqcd = 0
qcdstack = THStack("qcdstack", "qcdstack")
hMeas_QCD_SingleMu_ToPlot = hMeas_QCD_SingleMu.Clone()
qcdcolors = [TColor.kMagenta, TColor.kMagenta,
TColor.kMagenta, TColor.kMagenta,
TColor.kMagenta, TColor.kMagenta,
TColor.kGreen-3, TColor.kRed+1, TColor.kRed+1, TColor.kRed+1,
TColor.kRed-7, TColor.kRed-7, TColor.kRed-7
]
hMeas_QCD_SingleMu_ToPlot.SetName("hmeas_QCD_SingleMu_ToPlot")
# Scale to desired normalization
# Options are :
# 1. From MC
# 2. From fit
#
# For now, we don't have the fit, so we do from MC
for iqcdHist in [ hMeas_T_t_qcd, hMeas_Tbar_t_qcd,
hMeas_T_s_qcd, hMeas_Tbar_s_qcd,
hMeas_T_tW_qcd, hMeas_Tbar_tW_qcd,
hMeas_WJets_qcd,
hMeas_TT_Mtt_less_700_qcd, hMeas_TT_Mtt_700_1000_qcd,
hMeas_TT_Mtt_1000_Inf_qcd,
hMeas_TT_nonSemiLep_Mtt_less_700_qcd, hMeas_TT_nonSemiLep_Mtt_700_1000_qcd,
hMeas_TT_nonSemiLep_Mtt_1000_Inf_qcd] :
iqcdHist.SetFillColor(qcdcolors[iiqcd])
hMeas_QCD_SingleMu.Add( iqcdHist, -1.0 )
qcdstack.Add( iqcdHist )
iiqcd += 1
#qcdcanv = TCanvas( "qcddatamc", "qcddatamc")
#hMeas_QCD_SingleMu_ToPlot.Draw("e")
#qcdstack.Draw("same hist")
#hMeas_QCD_SingleMu_ToPlot.Draw("e same")
#hMeas_QCD_SingleMu_ToPlot.Draw("e same axis")
# scale the QCD
if hMeas_QCD_SingleMu.GetSum() > 0.0 :
hMeas_QCD_SingleMu.Scale( NQCD / hMeas_QCD_SingleMu.GetSum() )
else :
hMeas_QCD_SingleMu.Scale( 0.0 )
######### Combine ttbar samples #############
if 1==0 :
ttbar_canv = TCanvas( "ttbar", "ttbar", 2000, 600 )
ttbar_canv.Divide(3,1)
ttbar_canv.cd(1)
ttbar_nom_stack = THStack("ttbar_nom", "ttbar_nom")
hMeas_TT_Mtt_less_700_nom .SetLineColor( 2 )
hMeas_TT_Mtt_700_1000_nom .SetLineColor( 3 )
hMeas_TT_Mtt_1000_Inf_nom .SetLineColor( 4 )
ttbar_nom_stack.Add( hMeas_TT_Mtt_less_700_nom )
ttbar_nom_stack.Add( hMeas_TT_Mtt_700_1000_nom )
ttbar_nom_stack.Add( hMeas_TT_Mtt_1000_Inf_nom )
ttbar_nom_stack.Draw("nostack hist")
ttbar_nom_stack.SetMaximum(500.)
ttbar_canv.cd(2)
ttbar_scaleup_stack = THStack("ttbar_scaleup", "ttbar_scaleup")
hMeas_TT_Mtt_less_700_scaleup .SetLineColor( 2 )
hMeas_TT_Mtt_700_1000_scaleup .SetLineColor( 3 )
hMeas_TT_Mtt_1000_Inf_scaleup .SetLineColor( 4 )
ttbar_scaleup_stack.Add( hMeas_TT_Mtt_less_700_scaleup )
ttbar_scaleup_stack.Add( hMeas_TT_Mtt_700_1000_scaleup )
ttbar_scaleup_stack.Add( hMeas_TT_Mtt_1000_Inf_scaleup )
ttbar_scaleup_stack.Draw("nostack hist")
ttbar_scaleup_stack.SetMaximum(500.)
ttbar_canv.cd(3)
ttbar_scaledown_stack = THStack("ttbar_scaledown", "ttbar_scaledown")
hMeas_TT_Mtt_less_700_scaledown .SetLineColor( 2 )
hMeas_TT_Mtt_700_1000_scaledown .SetLineColor( 3 )
hMeas_TT_Mtt_1000_Inf_scaledown .SetLineColor( 4 )
ttbar_scaledown_stack.Add( hMeas_TT_Mtt_less_700_scaledown )
ttbar_scaledown_stack.Add( hMeas_TT_Mtt_700_1000_scaledown )
ttbar_scaledown_stack.Add( hMeas_TT_Mtt_1000_Inf_scaledown )
ttbar_scaledown_stack.Draw("nostack hist")
ttbar_scaledown_stack.SetMaximum(500.)
ttbar_canv.Print("q2woes.pdf", "pdf")
ttbar_canv.Print("q2woes.png", "png")
hMeas_TTbar_nom = hMeas_TT_Mtt_less_700_nom.Clone()
hMeas_TTbar_nom.SetName(histname + '__TTbar' )
for hist in [hMeas_TT_Mtt_700_1000_nom, hMeas_TT_Mtt_1000_Inf_nom] :
hMeas_TTbar_nom.Add( hist )
hMeas_TTbar_jecdown = hMeas_TT_Mtt_less_700_jecdown.Clone()
hMeas_TTbar_jecdown.SetName(histname + '__TTbar__jec__down' )
for hist in [hMeas_TT_Mtt_700_1000_jecdown, hMeas_TT_Mtt_1000_Inf_jecdown] :
hMeas_TTbar_jecdown.Add( hist )
hMeas_TTbar_jecup = hMeas_TT_Mtt_less_700_jecup.Clone()
hMeas_TTbar_jecup.SetName(histname + '__TTbar__jec__up' )
for hist in [hMeas_TT_Mtt_700_1000_jecup, hMeas_TT_Mtt_1000_Inf_jecup] :
hMeas_TTbar_jecup.Add( hist )
hMeas_TTbar_jerdown = hMeas_TT_Mtt_less_700_jerdown.Clone()
hMeas_TTbar_jerdown.SetName(histname + '__TTbar__jer__down' )
for hist in [hMeas_TT_Mtt_700_1000_jerdown, hMeas_TT_Mtt_1000_Inf_jerdown] :
hMeas_TTbar_jerdown.Add( hist )
hMeas_TTbar_jerup = hMeas_TT_Mtt_less_700_jerup.Clone()
hMeas_TTbar_jerup.SetName(histname + '__TTbar__jer__up' )
for hist in [hMeas_TT_Mtt_700_1000_jerup, hMeas_TT_Mtt_1000_Inf_jerup] :
hMeas_TTbar_jerup.Add( hist )
hMeas_TTbar_pdfdown = hMeas_TT_Mtt_less_700_pdfdown.Clone()
hMeas_TTbar_pdfdown.SetName(histname + '__TTbar__pdf__down' )
for hist in [hMeas_TT_Mtt_700_1000_pdfdown, hMeas_TT_Mtt_1000_Inf_pdfdown] :
hMeas_TTbar_pdfdown.Add( hist )
hMeas_TTbar_pdfup = hMeas_TT_Mtt_less_700_pdfup.Clone()
hMeas_TTbar_pdfup.SetName(histname + '__TTbar__pdf__up' )
for hist in [hMeas_TT_Mtt_700_1000_pdfup, hMeas_TT_Mtt_1000_Inf_pdfup] :
hMeas_TTbar_pdfup.Add( hist )
hMeas_TTbar_scaledown = hMeas_TT_Mtt_less_700_scaledown.Clone()
hMeas_TTbar_scaledown.SetName(histname + '__TTbar__scale__down' )
for hist in [hMeas_TT_Mtt_700_1000_scaledown, hMeas_TT_Mtt_1000_Inf_scaledown] :
hMeas_TTbar_scaledown.Add( hist )
hMeas_TTbar_scaleup = hMeas_TT_Mtt_less_700_scaleup.Clone()
hMeas_TTbar_scaleup.SetName(histname + '__TTbar__scale__up' )
for hist in [hMeas_TT_Mtt_700_1000_scaleup, hMeas_TT_Mtt_1000_Inf_scaleup] :
hMeas_TTbar_scaleup.Add( hist )
hMeas_TTbar_topdown = hMeas_TT_Mtt_less_700_topdown.Clone()
hMeas_TTbar_topdown.SetName(histname + '__TTbar__toptag__down' )
for hist in [hMeas_TT_Mtt_700_1000_topdown, hMeas_TT_Mtt_1000_Inf_topdown] :
hMeas_TTbar_topdown.Add( hist )
hMeas_TTbar_topup = hMeas_TT_Mtt_less_700_topup.Clone()
hMeas_TTbar_topup.SetName(histname + '__TTbar__toptag__up' )
for hist in [hMeas_TT_Mtt_700_1000_topup, hMeas_TT_Mtt_1000_Inf_topup] :
hMeas_TTbar_topup.Add( hist )
hMeas_TTbar_btagdown = hMeas_TT_Mtt_less_700_btagdown.Clone()
hMeas_TTbar_btagdown.SetName(histname + '__TTbar__btag__down' )
for hist in [hMeas_TT_Mtt_700_1000_btagdown, hMeas_TT_Mtt_1000_Inf_btagdown] :
hMeas_TTbar_btagdown.Add( hist )
hMeas_TTbar_btagup = hMeas_TT_Mtt_less_700_btagup.Clone()
hMeas_TTbar_btagup.SetName(histname + '__TTbar__btag__up' )
for hist in [hMeas_TT_Mtt_700_1000_btagup, hMeas_TT_Mtt_1000_Inf_btagup] :
hMeas_TTbar_btagup.Add( hist )
######### Combine non-semileptonic ttbar samples #############
hMeas_TTbar_nonSemiLep_nom = hMeas_TT_nonSemiLep_Mtt_less_700_nom.Clone()
hMeas_TTbar_nonSemiLep_nom.SetName(histname + '__TTbar_nonSemiLep' )
for hist in [hMeas_TT_nonSemiLep_Mtt_700_1000_nom, hMeas_TT_nonSemiLep_Mtt_1000_Inf_nom] :
hMeas_TTbar_nonSemiLep_nom.Add( hist )
hMeas_TTbar_nonSemiLep_jecdown = hMeas_TT_nonSemiLep_Mtt_less_700_jecdown.Clone()
hMeas_TTbar_nonSemiLep_jecdown.SetName(histname + '__TTbar_nonSemiLep__jec__down' )
for hist in [hMeas_TT_nonSemiLep_Mtt_700_1000_jecdown, hMeas_TT_nonSemiLep_Mtt_1000_Inf_jecdown] :
hMeas_TTbar_nonSemiLep_jecdown.Add( hist )
hMeas_TTbar_nonSemiLep_jecup = hMeas_TT_nonSemiLep_Mtt_less_700_jecup.Clone()
hMeas_TTbar_nonSemiLep_jecup.SetName(histname + '__TTbar_nonSemiLep__jec__up' )
for hist in [hMeas_TT_nonSemiLep_Mtt_700_1000_jecup, hMeas_TT_nonSemiLep_Mtt_1000_Inf_jecup] :
hMeas_TTbar_nonSemiLep_jecup.Add( hist )
hMeas_TTbar_nonSemiLep_jerdown = hMeas_TT_nonSemiLep_Mtt_less_700_jerdown.Clone()
hMeas_TTbar_nonSemiLep_jerdown.SetName(histname + '__TTbar_nonSemiLep__jer__down' )
for hist in [hMeas_TT_nonSemiLep_Mtt_700_1000_jerdown, hMeas_TT_nonSemiLep_Mtt_1000_Inf_jerdown] :
hMeas_TTbar_nonSemiLep_jerdown.Add( hist )
hMeas_TTbar_nonSemiLep_jerup = hMeas_TT_nonSemiLep_Mtt_less_700_jerup.Clone()
hMeas_TTbar_nonSemiLep_jerup.SetName(histname + '__TTbar_nonSemiLep__jer__up' )
for hist in [hMeas_TT_nonSemiLep_Mtt_700_1000_jerup, hMeas_TT_nonSemiLep_Mtt_1000_Inf_jerup] :
hMeas_TTbar_nonSemiLep_jerup.Add( hist )
hMeas_TTbar_nonSemiLep_pdfdown = hMeas_TT_nonSemiLep_Mtt_less_700_pdfdown.Clone()
hMeas_TTbar_nonSemiLep_pdfdown.SetName(histname + '__TTbar_nonSemiLep__pdf__down' )
for hist in [hMeas_TT_nonSemiLep_Mtt_700_1000_pdfdown, hMeas_TT_nonSemiLep_Mtt_1000_Inf_pdfdown] :
hMeas_TTbar_nonSemiLep_pdfdown.Add( hist )
hMeas_TTbar_nonSemiLep_pdfup = hMeas_TT_nonSemiLep_Mtt_less_700_pdfup.Clone()
hMeas_TTbar_nonSemiLep_pdfup.SetName(histname + '__TTbar_nonSemiLep__pdf__up' )
for hist in [hMeas_TT_nonSemiLep_Mtt_700_1000_pdfup, hMeas_TT_nonSemiLep_Mtt_1000_Inf_pdfup] :
hMeas_TTbar_nonSemiLep_pdfup.Add( hist )
hMeas_TTbar_nonSemiLep_scaledown = hMeas_TT_nonSemiLep_Mtt_less_700_scaledown.Clone()
hMeas_TTbar_nonSemiLep_scaledown.SetName(histname + '__TTbar_nonSemiLep__scale__down' )
for hist in [hMeas_TT_nonSemiLep_Mtt_700_1000_scaledown, hMeas_TT_nonSemiLep_Mtt_1000_Inf_scaledown] :
hMeas_TTbar_nonSemiLep_scaledown.Add( hist )
hMeas_TTbar_nonSemiLep_scaleup = hMeas_TT_nonSemiLep_Mtt_less_700_scaleup.Clone()
hMeas_TTbar_nonSemiLep_scaleup.SetName(histname + '__TTbar_nonSemiLep__scale__up' )
for hist in [hMeas_TT_nonSemiLep_Mtt_700_1000_scaleup, hMeas_TT_nonSemiLep_Mtt_1000_Inf_scaleup] :
hMeas_TTbar_nonSemiLep_scaleup.Add( hist )
hMeas_TTbar_nonSemiLep_topdown = hMeas_TT_nonSemiLep_Mtt_less_700_topdown.Clone()
hMeas_TTbar_nonSemiLep_topdown.SetName(histname + '__TTbar_nonSemiLep__toptag__down' )
for hist in [hMeas_TT_nonSemiLep_Mtt_700_1000_topdown, hMeas_TT_nonSemiLep_Mtt_1000_Inf_topdown] :
hMeas_TTbar_nonSemiLep_topdown.Add( hist )
hMeas_TTbar_nonSemiLep_topup = hMeas_TT_nonSemiLep_Mtt_less_700_topup.Clone()
hMeas_TTbar_nonSemiLep_topup.SetName(histname + '__TTbar_nonSemiLep__toptag__up' )
for hist in [hMeas_TT_nonSemiLep_Mtt_700_1000_topup, hMeas_TT_nonSemiLep_Mtt_1000_Inf_topup] :
hMeas_TTbar_nonSemiLep_topup.Add( hist )
hMeas_TTbar_nonSemiLep_btagdown = hMeas_TT_nonSemiLep_Mtt_less_700_btagdown.Clone()
hMeas_TTbar_nonSemiLep_btagdown.SetName(histname + '__TTbar_nonSemiLep__btag__down' )
for hist in [hMeas_TT_nonSemiLep_Mtt_700_1000_btagdown, hMeas_TT_nonSemiLep_Mtt_1000_Inf_btagdown] :
hMeas_TTbar_nonSemiLep_btagdown.Add( hist )
hMeas_TTbar_nonSemiLep_btagup = hMeas_TT_nonSemiLep_Mtt_less_700_btagup.Clone()
hMeas_TTbar_nonSemiLep_btagup.SetName(histname + '__TTbar_nonSemiLep__btag__up' )
for hist in [hMeas_TT_nonSemiLep_Mtt_700_1000_btagup, hMeas_TT_nonSemiLep_Mtt_1000_Inf_btagup] :
hMeas_TTbar_nonSemiLep_btagup.Add( hist )
######### Combine Single Top samples #############
hMeas_SingleTop_nom = hMeas_T_t_nom.Clone()
hMeas_SingleTop_nom.SetName(histname + '__SingleTop' )
for hist in [hMeas_Tbar_t_nom, hMeas_T_s_nom, hMeas_Tbar_s_nom, hMeas_T_tW_nom, hMeas_Tbar_tW_nom] :
hMeas_SingleTop_nom.Add( hist )
hMeas_SingleTop_jecdown = hMeas_T_t_jecdown.Clone()
hMeas_SingleTop_jecdown.SetName(histname + '__SingleTop__jec__down' )
for hist in [hMeas_Tbar_t_jecdown, hMeas_T_s_jecdown, hMeas_Tbar_s_jecdown, hMeas_T_tW_jecdown, hMeas_Tbar_tW_jecdown] :
hMeas_SingleTop_jecdown.Add( hist )
hMeas_SingleTop_jecup = hMeas_T_t_jecup.Clone()
hMeas_SingleTop_jecup.SetName(histname + '__SingleTop__jec__up' )
for hist in [hMeas_Tbar_t_jecup, hMeas_T_s_jecup, hMeas_Tbar_s_jecup, hMeas_T_tW_jecup, hMeas_Tbar_tW_jecup] :
hMeas_SingleTop_jecup.Add( hist )
hMeas_SingleTop_jerdown = hMeas_T_t_jerdown.Clone()
hMeas_SingleTop_jerdown.SetName(histname + '__SingleTop__jer__down' )
for hist in [hMeas_Tbar_t_jerdown, hMeas_T_s_jerdown, hMeas_Tbar_s_jerdown, hMeas_T_tW_jerdown, hMeas_Tbar_tW_jerdown] :
hMeas_SingleTop_jerdown.Add( hist )
hMeas_SingleTop_jerup = hMeas_T_t_jerup.Clone()
hMeas_SingleTop_jerup.SetName(histname + '__SingleTop__jer__up' )
for hist in [hMeas_Tbar_t_jerup, hMeas_T_s_jerup, hMeas_Tbar_s_jerup, hMeas_T_tW_jerup, hMeas_Tbar_tW_jerup] :
hMeas_SingleTop_jerup.Add( hist )
hMeas_SingleTop_topdown = hMeas_T_t_topdown.Clone()
hMeas_SingleTop_topdown.SetName(histname + '__SingleTop__toptag__down' )
for hist in [hMeas_Tbar_t_topdown, hMeas_T_s_topdown, hMeas_Tbar_s_topdown, hMeas_T_tW_topdown, hMeas_Tbar_tW_topdown] :
hMeas_SingleTop_topdown.Add( hist )
hMeas_SingleTop_topup = hMeas_T_t_topup.Clone()
hMeas_SingleTop_topup.SetName(histname + '__SingleTop__toptag__up' )
for hist in [hMeas_Tbar_t_topup, hMeas_T_s_topup, hMeas_Tbar_s_topup, hMeas_T_tW_topup, hMeas_Tbar_tW_topup] :
hMeas_SingleTop_topup.Add( hist )
hMeas_SingleTop_btagdown = hMeas_T_t_btagdown.Clone()
hMeas_SingleTop_btagdown.SetName(histname + '__SingleTop__btag__down' )
for hist in [hMeas_Tbar_t_btagdown, hMeas_T_s_btagdown, hMeas_Tbar_s_btagdown, hMeas_T_tW_btagdown, hMeas_Tbar_tW_btagdown] :
hMeas_SingleTop_btagdown.Add( hist )
hMeas_SingleTop_btagup = hMeas_T_t_btagup.Clone()
hMeas_SingleTop_btagup.SetName(histname + '__SingleTop__btag__up' )
for hist in [hMeas_Tbar_t_btagup, hMeas_T_s_btagup, hMeas_Tbar_s_btagup, hMeas_T_tW_btagup, hMeas_Tbar_tW_btagup] :
hMeas_SingleTop_btagup.Add( hist )
hMeas_WJets_nom .SetName( histname + '__WJets')
hMeas_WJets_jecdown .SetName( histname + '__WJets__jec__down' )
hMeas_WJets_jecup .SetName( histname + '__WJets__jec__up' )
hMeas_WJets_jerdown .SetName( histname + '__WJets__jer__down' )
hMeas_WJets_jerup .SetName( histname + '__WJets__jer__up' )
hMeas_WJets_topdown .SetName( histname + '__WJets__toptag__down' )
hMeas_WJets_topup .SetName( histname + '__WJets__toptag__up' )
hMeas_WJets_btagdown .SetName( histname + '__WJets__btag__down' )
hMeas_WJets_btagup .SetName( histname + '__WJets__btag__up' )
hists = []
########## Make some easy-access lists ##########
plots = [ 'jec__down' , 'jec__up' , 'jer__down' , 'jer__up' , 'toptag__down' , 'toptag__up' , 'btag__down' , 'btag__up' , 'pdf__down' , 'pdf__up' , 'scale__down' , 'scale__up', 'nom' ]
hMeas_TTbar = [ hMeas_TTbar_jecdown , hMeas_TTbar_jecup ,
hMeas_TTbar_jerdown , hMeas_TTbar_jerup ,
hMeas_TTbar_topdown , hMeas_TTbar_topup ,
hMeas_TTbar_btagdown , hMeas_TTbar_btagup ,
hMeas_TTbar_pdfdown , hMeas_TTbar_pdfup ,
hMeas_TTbar_scaledown , hMeas_TTbar_scaleup ,
hMeas_TTbar_nom ]
hMeas_TTbar_nonSemiLep = [ hMeas_TTbar_nonSemiLep_jecdown , hMeas_TTbar_nonSemiLep_jecup ,
hMeas_TTbar_nonSemiLep_jerdown , hMeas_TTbar_nonSemiLep_jerup ,
hMeas_TTbar_nonSemiLep_topdown , hMeas_TTbar_nonSemiLep_topup ,
hMeas_TTbar_nonSemiLep_btagdown , hMeas_TTbar_nonSemiLep_btagup ,
hMeas_TTbar_nonSemiLep_pdfdown , hMeas_TTbar_nonSemiLep_pdfup ,
hMeas_TTbar_nonSemiLep_scaledown , hMeas_TTbar_nonSemiLep_scaleup ,
hMeas_TTbar_nonSemiLep_nom ]
hMeas_SingleTop = [ hMeas_SingleTop_jecdown , hMeas_SingleTop_jecup ,
hMeas_SingleTop_jerdown , hMeas_SingleTop_jerup ,
hMeas_SingleTop_topdown , hMeas_SingleTop_topup ,
hMeas_SingleTop_btagdown , hMeas_SingleTop_btagup,
hMeas_SingleTop_nom , hMeas_SingleTop_nom ,
hMeas_SingleTop_nom , hMeas_SingleTop_nom ,
hMeas_SingleTop_nom ]
hMeas_WJets = [ hMeas_WJets_jecdown , hMeas_WJets_jecup ,
hMeas_WJets_jerdown , hMeas_WJets_jerup ,
hMeas_WJets_topdown , hMeas_WJets_topup ,
hMeas_WJets_btagdown , hMeas_WJets_btagup ,
hMeas_WJets_nom , hMeas_WJets_nom ,
hMeas_WJets_nom , hMeas_WJets_nom ,
hMeas_WJets_nom ]
hMeas_QCD = [ hMeas_QCD_SingleMu , hMeas_QCD_SingleMu ,
hMeas_QCD_SingleMu , hMeas_QCD_SingleMu ,
hMeas_QCD_SingleMu , hMeas_QCD_SingleMu ,
hMeas_QCD_SingleMu , hMeas_QCD_SingleMu ,
hMeas_QCD_SingleMu , hMeas_QCD_SingleMu ,
hMeas_QCD_SingleMu , hMeas_QCD_SingleMu ,
hMeas_QCD_SingleMu]
stacks = []
for thehist in hMeas_TTbar :
thehist.SetFillColor( TColor.kRed+1 )
for thehist in hMeas_TTbar_nonSemiLep :
thehist.SetFillColor( TColor.kRed-7 )
for thehist in hMeas_WJets :
thehist.SetFillColor( TColor.kGreen-3 )
for thehist in hMeas_SingleTop :
thehist.SetFillColor( TColor.kMagenta )
for thehist in hMeas_QCD :
thehist.SetFillColor( TColor.kYellow )
if options.rebin != None and options.rebin != 1:
hMeas_TTbar_jecdown.Rebin( options.rebin )
hMeas_TTbar_jecup.Rebin( options.rebin )
hMeas_TTbar_jerdown.Rebin( options.rebin )
hMeas_TTbar_jerup.Rebin( options.rebin )
hMeas_TTbar_topdown.Rebin( options.rebin )
hMeas_TTbar_topup.Rebin( options.rebin )
hMeas_TTbar_btagdown.Rebin( options.rebin )
hMeas_TTbar_btagup.Rebin( options.rebin )
hMeas_TTbar_pdfdown.Rebin( options.rebin )
hMeas_TTbar_pdfup.Rebin( options.rebin )
hMeas_TTbar_scaledown.Rebin( options.rebin )
hMeas_TTbar_scaleup.Rebin( options.rebin )
hMeas_TTbar_nom.Rebin( options.rebin )
hMeas_TTbar_nonSemiLep_jecdown.Rebin( options.rebin )
hMeas_TTbar_nonSemiLep_jecup.Rebin( options.rebin )
hMeas_TTbar_nonSemiLep_jerdown.Rebin( options.rebin )
hMeas_TTbar_nonSemiLep_jerup.Rebin( options.rebin )
hMeas_TTbar_nonSemiLep_topdown.Rebin( options.rebin )
hMeas_TTbar_nonSemiLep_topup.Rebin( options.rebin )
hMeas_TTbar_nonSemiLep_btagdown.Rebin( options.rebin )
hMeas_TTbar_nonSemiLep_btagup.Rebin( options.rebin )
hMeas_TTbar_nonSemiLep_pdfdown.Rebin( options.rebin )
hMeas_TTbar_nonSemiLep_pdfup.Rebin( options.rebin )
hMeas_TTbar_nonSemiLep_scaledown.Rebin( options.rebin )
hMeas_TTbar_nonSemiLep_scaleup.Rebin( options.rebin )
hMeas_TTbar_nonSemiLep_nom.Rebin( options.rebin )
hMeas_SingleTop_jecdown.Rebin( options.rebin )
hMeas_SingleTop_jecup.Rebin( options.rebin )
hMeas_SingleTop_jerdown.Rebin( options.rebin )
hMeas_SingleTop_jerup.Rebin( options.rebin )
hMeas_SingleTop_topdown.Rebin( options.rebin )
hMeas_SingleTop_topup.Rebin( options.rebin )
hMeas_SingleTop_btagdown.Rebin( options.rebin )
hMeas_SingleTop_btagup.Rebin( options.rebin )
hMeas_SingleTop_nom.Rebin( options.rebin )
hMeas_WJets_jecdown.Rebin( options.rebin )
hMeas_WJets_jecup.Rebin( options.rebin )
hMeas_WJets_jerdown.Rebin( options.rebin )
hMeas_WJets_jerup.Rebin( options.rebin )
hMeas_WJets_topdown.Rebin( options.rebin )
hMeas_WJets_topup.Rebin( options.rebin )
hMeas_WJets_btagdown.Rebin( options.rebin )
hMeas_WJets_btagup.Rebin( options.rebin )
hMeas_WJets_nom.Rebin( options.rebin )
hMeas_QCD_SingleMu.Rebin ( options.rebin )
hRecoData.Rebin( options.rebin )
if options.newYlabel is not 'None':
hRecoData.GetYaxis().SetTitle(options.newYlabel)
legs = []
summedhists = []
eventcounts = []
# plotting options
hRecoData.SetLineWidth(1)
hRecoData.SetMarkerStyle(8)
if 'csv1LepJet' in options.hist1 or 'csv2LepJet' in options.hist1 :
hRecoData.SetAxisRange(0,1.05,"X")
if 'hadtop_mass3' in options.hist1 or 'hadtop_mass4' in options.hist1 :
hRecoData.SetAxisRange(0,250,"X")
if 'hadtop_pt3' in options.hist1 or 'leptop_pt3' in options.hist1 :
hRecoData.SetAxisRange(150,700,"X")
if 'hadtop_pt4' in options.hist1 or 'leptop_pt4' in options.hist1 :
hRecoData.SetAxisRange(350,900,"X")
if 'hadtop_pt6' in options.hist1 or 'hadtop_pt7' in options.hist1 or 'leptop_pt6' in options.hist1 or 'leptop_pt7' in options.hist1 :
hRecoData.SetAxisRange(350,1200,"X")
if 'hadtop_y' in options.hist1 :
hRecoData.SetAxisRange(-3,3,"X")
if 'ht2' in options.hist1 or 'htLep2' in options.hist1:
hRecoData.SetAxisRange(0,800,"X")
if 'ht3' in options.hist1 or 'htLep3' in options.hist1 :
hRecoData.SetAxisRange(0,1400,"X")
if 'ht4' in options.hist1 or 'ht6' in options.hist1 or 'ht7' in options.hist1 :
hRecoData.SetAxisRange(0,2500,"X")
if 'htLep4' in options.hist1 or 'htLep6' in options.hist1 or 'htLep7' in options.hist1 :
hRecoData.SetAxisRange(0,2500,"X")
if 'pt1LepJet2' in options.hist1 :
hRecoData.SetAxisRange(0,250,"X")
if 'ptLep0' in options.hist1 or 'ptLep2' in options.hist1 :
hRecoData.SetAxisRange(0,200,"X")
if 'ptMET0' in options.hist1 or 'ptMET2' in options.hist1 :
hRecoData.SetAxisRange(0,200,"X")
for m in range(0,len(hMeas_TTbar)):
if options.plotNom == True and plots[m] != 'nom' :
continue
if 'csv' in options.hist1 :
leg = TLegend(0.59,0.56,0.84,0.9)
else :
leg = TLegend(0.67,0.56,0.92,0.9)
leg.SetBorderSize(0)
leg.SetFillStyle(0)
leg.SetTextFont(42)
leg.SetTextSize(0.05)
leg.AddEntry( hRecoData, 'Data', 'pel')
leg.AddEntry( hMeas_TTbar[m], 't#bar{t} Signal', 'f')
leg.AddEntry( hMeas_TTbar_nonSemiLep[m], 't#bar{t} Other', 'f')
leg.AddEntry( hMeas_SingleTop[m], 'Single Top', 'f')
leg.AddEntry( hMeas_WJets[m], 'W #rightarrow #mu#nu', 'f')
leg.AddEntry( hMeas_QCD[m], 'QCD' , 'f')
# Make a stack plot of the MC to compare to data
hMC_stack = THStack("hMC_stack_" + str(m),
hMeas_TTbar[m].GetTitle() + ';' +
hMeas_TTbar[m].GetXaxis().GetTitle() + ';' +
hMeas_TTbar[m].GetYaxis().GetTitle()
)
hMC_stack.Add( hMeas_QCD[m] )
hMC_stack.Add( hMeas_WJets[m] )
hMC_stack.Add( hMeas_SingleTop[m] )
hMC_stack.Add( hMeas_TTbar_nonSemiLep[m] )
hMC_stack.Add( hMeas_TTbar[m] )
summedhist = hMeas_TTbar[m].Clone()
summedhist.SetName( 'summed_' + plots[m] )
summedhist.Add( hMeas_TTbar_nonSemiLep[m] )
summedhist.Add( hMeas_WJets[m] )
summedhist.Add( hMeas_SingleTop[m] )
summedhist.Add( hMeas_QCD_SingleMu )
summedhist.Sumw2()
ratiohist = hRecoData.Clone()
ratiohist.SetName( 'ratio_' + plots[m] )
ratiohist.Sumw2()
ratiohist.Divide( summedhist )
summedhists.append( [ratiohist,summedhist] )
# automatically set y-range
max = summedhist.GetMaximum();
if not options.ignoreData and (hRecoData.GetMaximum() + hRecoData.GetBinError(hRecoData.GetMaximumBin())) > max :
max = (hRecoData.GetMaximum() + hRecoData.GetBinError(hRecoData.GetMaximumBin()))
if "eta" in options.hist1 or "_y" in options.hist1 :
max = max*1.5
hRecoData.SetAxisRange(0,max*1.05,"Y");
c = TCanvas("datamc" + plots[m] , "datamc" + plots[m],200,10,900,800)
p1 = TPad("datamcp1" + plots[m] , "datamc" + plots[m],0.0,0.3,1.0,0.97)
p1.SetTopMargin(0.05)
p1.SetBottomMargin(0.05)
p1.SetNumber(1)
p2 = TPad("datamcp2" + plots[m] , "datamc" + plots[m],0.0,0.00,1.0,0.3)
p2.SetNumber(2)
p2.SetTopMargin(0.05)
#p2.SetBottomMargin(0.50)
p2.SetBottomMargin(0.40)
c.cd()
p1.Draw()
p1.cd()
if not options.ignoreData :
hRecoData.UseCurrentStyle()
hRecoData.GetXaxis().SetTitle('')
hRecoData.GetXaxis().SetLabelSize(24);
hRecoData.GetYaxis().SetLabelSize(24);
hRecoData.Draw('lep')
hMC_stack.Draw("hist same")
hRecoData.Draw('lep same')
hRecoData.Draw('lep same axis')
else :
hMC_stack.UseCurrentStyle()
hMC_stack.Draw("hist")
hMC_stack.GetXaxis().SetTitle('')
if options.drawLegend :
leg.Draw()
l = TLatex()
l.SetTextSize(0.05)
l.SetTextFont(42)
l.SetNDC()
l.SetTextColor(1)
if 'csv' in options.hist1 :
l.DrawLatex(0.40,0.81,"#intLdt = 19.7 fb^{-1}")
l.DrawLatex(0.40,0.72,"#sqrt{s} = 8 TeV")
else :
l.DrawLatex(0.48,0.81,"#intLdt = 19.7 fb^{-1}")
l.DrawLatex(0.48,0.72,"#sqrt{s} = 8 TeV")
eventcounts.append( [plots[m], hMeas_TTbar[m].GetSum(), hMeas_TTbar_nonSemiLep[m].GetSum(), hMeas_WJets[m].GetSum(), hMeas_SingleTop[m].GetSum(), hMeas_QCD_SingleMu.GetSum(), hRecoData.GetSum() ] )
c.cd()
p2.Draw()
p2.cd()
p2.SetGridy()
ratiohist.UseCurrentStyle()
ratiohist.Draw('lep')
ratiohist.SetMaximum(2.0)
ratiohist.SetMinimum(0.0)
ratiohist.GetYaxis().SetNdivisions(2,4,0,False)
ratiohist.GetYaxis().SetTitle( 'Data/MC' )
ratiohist.GetXaxis().SetTitle( hMeas_TTbar[m].GetXaxis().GetTitle() )
#ratiohist.GetXaxis().SetTitleOffset( 3.0 )
ratiohist.GetXaxis().SetTitleOffset( 4.0 )
ratiohist.GetXaxis().SetLabelSize(24);
ratiohist.GetYaxis().SetLabelSize(24);
canvs.append( [c, p1, p2] )
legs.append(leg)
if options.hist2 is None:
if not options.ignoreData :
c.Print( 'normalized_' + plots[m] + '_' + options.outname + '_' + options.hist1 + '.png' )
c.Print( 'normalized_' + plots[m] + '_' + options.outname + '_' + options.hist1 + '.pdf' )
c.Print( 'normalized_' + plots[m] + '_' + options.outname + '_' + options.hist1 + '.eps' )
else :
c.Print( 'normalized_' + plots[m] + '_' + options.outname + '_' + options.hist1 + '_nodata.png' )
c.Print( 'normalized_' + plots[m] + '_' + options.outname + '_' + options.hist1 + '_nodata.pdf' )
elif options.hist2 is not None:
if not options.ignoreData :
c.Print( 'normalized_' + plots[m] + '_' + options.outname + '_' + options.hist2 + '_subtracted_from_' + options.hist1 + '.png' )
c.Print( 'normalized_' + plots[m] + '_' + options.outname + '_' + options.hist2 + '_subtracted_from_' + options.hist1 + '.pdf' )
else :
c.Print( 'normalized_' + plots[m] + '_' + options.outname + '_' + options.hist2 + '_subtracted_from_' + options.hist1 + '_nodata.png' )
c.Print( 'normalized_' + plots[m] + '_' + options.outname + '_' + options.hist2 + '_subtracted_from_' + options.hist1 + '_nodata.pdf' )
# Print event counts
if options.hist2 is None :
print '------------ Cut Flow Stage ' + options.hist1 + ' -----------------'
else :
print '------------ Cut Flow Stage ' + options.hist1 + ' minus Stage ' + options.hist2 + ' -----------------'
print '{0:21s} '.format( 'Variation' ),
for name in ['TTbar', 'TTbar_nonSemiLep', 'WJets', 'S.T.', 'QCD', 'Data'] :
print '{0:8s} '.format(name),
print ''
for count in eventcounts :
print '{0:20s} '.format( count[0] ),
for val in count[1:] :
print '{0:8.1f} '.format( val ),
print ''
# write the histogram in a rootfile
hMeas_TTbar = [ hMeas_TTbar_jecdown , hMeas_TTbar_jecup ,
hMeas_TTbar_jerdown , hMeas_TTbar_jerup ,
hMeas_TTbar_topdown , hMeas_TTbar_topup ,
hMeas_TTbar_btagdown , hMeas_TTbar_btagup ,
hMeas_TTbar_pdfdown , hMeas_TTbar_pdfup ,
hMeas_TTbar_scaledown , hMeas_TTbar_scaleup,
hMeas_TTbar_nom ]
hMeas_TTbar_nonSemiLep = [ hMeas_TTbar_nonSemiLep_jecdown , hMeas_TTbar_nonSemiLep_jecup ,
hMeas_TTbar_nonSemiLep_jerdown , hMeas_TTbar_nonSemiLep_jerup ,
hMeas_TTbar_nonSemiLep_topdown , hMeas_TTbar_nonSemiLep_topup ,
hMeas_TTbar_nonSemiLep_btagdown , hMeas_TTbar_nonSemiLep_btagup ,
hMeas_TTbar_nonSemiLep_pdfdown , hMeas_TTbar_nonSemiLep_pdfup ,
hMeas_TTbar_nonSemiLep_scaledown , hMeas_TTbar_nonSemiLep_scaleup,
hMeas_TTbar_nonSemiLep_nom ]
hMeas_SingleTop = [ hMeas_SingleTop_jecdown , hMeas_SingleTop_jecup ,
hMeas_SingleTop_jerdown , hMeas_SingleTop_jerup ,
hMeas_SingleTop_topdown , hMeas_SingleTop_topup ,
hMeas_SingleTop_btagdown , hMeas_SingleTop_btagup ,
hMeas_SingleTop_nom , hMeas_SingleTop_nom ,
hMeas_SingleTop_nom , hMeas_SingleTop_nom,
hMeas_SingleTop_nom ]
hMeas_WJets = [ hMeas_WJets_jecdown , hMeas_WJets_jecup ,
hMeas_WJets_jerdown , hMeas_WJets_jerup ,
hMeas_WJets_topdown , hMeas_WJets_topup ,
hMeas_WJets_btagdown , hMeas_WJets_btagup ,
hMeas_WJets_nom , hMeas_WJets_nom ,
hMeas_WJets_nom , hMeas_WJets_nom,
hMeas_WJets_nom ]
hMeas_QCD = [ hMeas_QCD_SingleMu , hMeas_QCD_SingleMu ,
hMeas_QCD_SingleMu , hMeas_QCD_SingleMu ,
hMeas_QCD_SingleMu , hMeas_QCD_SingleMu ,
hMeas_QCD_SingleMu , hMeas_QCD_SingleMu ,
hMeas_QCD_SingleMu , hMeas_QCD_SingleMu,
hMeas_QCD_SingleMu , hMeas_QCD_SingleMu,
hMeas_QCD_SingleMu]
histsAll = [hRecoData , hMeas_QCD_SingleMu ,
hMeas_TTbar_jecdown , hMeas_TTbar_jecup ,
hMeas_TTbar_jerdown , hMeas_TTbar_jerup ,
hMeas_TTbar_topdown , hMeas_TTbar_topup ,
hMeas_TTbar_btagdown , hMeas_TTbar_btagup ,
hMeas_TTbar_pdfdown , hMeas_TTbar_pdfup ,
hMeas_TTbar_scaledown , hMeas_TTbar_scaleup ,
hMeas_TTbar_nom ,
hMeas_TTbar_nonSemiLep_jecdown , hMeas_TTbar_nonSemiLep_jecup ,
hMeas_TTbar_nonSemiLep_jerdown , hMeas_TTbar_nonSemiLep_jerup ,
hMeas_TTbar_nonSemiLep_topdown , hMeas_TTbar_nonSemiLep_topup ,
hMeas_TTbar_nonSemiLep_btagdown , hMeas_TTbar_nonSemiLep_btagup ,
hMeas_TTbar_nonSemiLep_pdfdown , hMeas_TTbar_nonSemiLep_pdfup ,
hMeas_TTbar_nonSemiLep_scaledown , hMeas_TTbar_nonSemiLep_scaleup ,
hMeas_TTbar_nonSemiLep_nom ,
hMeas_SingleTop_jecdown , hMeas_SingleTop_jecup ,
hMeas_SingleTop_jerdown , hMeas_SingleTop_jerup ,
hMeas_SingleTop_topdown , hMeas_SingleTop_topup ,
hMeas_SingleTop_btagdown , hMeas_SingleTop_btagup,
hMeas_SingleTop_nom ,
hMeas_WJets_jecdown , hMeas_WJets_jecup ,
hMeas_WJets_jerdown , hMeas_WJets_jerup ,
hMeas_WJets_topdown , hMeas_WJets_topup ,
hMeas_WJets_btagdown , hMeas_WJets_btagup ,
hMeas_WJets_nom
]
fout.cd()
for ihist in xrange(len(histsAll)) :
hist = histsAll[ihist]
if hist is not None :
hist.Write()
fout.Close()
|
10,704 | 9e7b1abb92a7bf2d0f20409a6bcb831116745294 | #!/usr/bin/env python
import os
import argparse
import logging
import sys
def main(args, loglevel):
logging.basicConfig(format="%(levelname)s: %(message)s", level=loglevel)
f = args.file_input
if not os.path.exists(f):
logging.error("File %s does not exist" % f)
sys.exit(1)
logging.info("Processing: %s" % f)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description = "A very useful script.",
epilog = "Detailed description of the script. Params can also be specified in a file that is passed as command line argument, like this: '%(prog)s @params.conf'.",
fromfile_prefix_chars = '@')
parser.add_argument("file_input", help = "pass ARG to the program", metavar = "file")
parser.add_argument("-v", "--verbose", help="increase output verbosity", action="store_true")
args = parser.parse_args()
if args.verbose:
loglevel = logging.DEBUG
else:
loglevel = logging.INFO
main(args, loglevel)
|
10,705 | 214cb65d5b4e7be6397954a5fcffd0c632e251bc | import unittest
import pandas
from oop_pyaaas.dataset import Dataset
import tempfile
import pathlib
from oop_pyaaas.service import AaaSService
class DatasetTest(unittest.TestCase):
def setUp(self):
self.test_data_csv = """age, gender, zipcode\n34, male,81667\n35, female,81668\n6, male,81669\n 37, female,81670\n38, male,81671\n39, female,81672\n40, male,81673\n41, female,81674\n42, male,81675\n43, female,81676\n44, male,81677"""
self.test_data_dict = {'age': {0: 34,
1: 35,
2: 36,
3: 37,
4: 38,
5: 39,
6: 40,
7: 41,
8: 42,
9: 43,
10: 44},
'gender': {0: ' male',
1: ' female',
2: ' male',
3: ' female',
4: ' male',
5: ' female',
6: ' male',
7: ' female',
8: ' male',
9: ' female',
10: ' male'},
'zipcode': {0: 81667,
1: 81668,
2: 81669,
3: 81670,
4: 81671,
5: 81672,
6: 81673,
7: 81674,
8: 81675,
9: 81676,
10: 81677}}
self.test_df = pandas.DataFrame(self.test_data_dict)
self.test_attributes = {"age":"IDENTIFYING",
"gender":"INSENSITIVE",
"zipcode":"INSENSITIVE"}
self.tempdir = tempfile.TemporaryDirectory()
self.test_csv_path = pathlib.Path(self.tempdir.name).joinpath("testcsv.csv")
with self.test_csv_path.open("w") as file:
file.write(self.test_data_csv)
def tearDown(self):
self.tempdir.cleanup()
def test_from_pandas(self):
dataset = Dataset.from_pandas(self.test_df, self.test_attributes)
self.assertIsInstance(dataset, Dataset)
def test_from_csv(self):
dataset = Dataset.from_csv(self.test_csv_path, ",", self.test_attributes)
self.assertIsInstance(dataset, Dataset)
def test_dataset_from_csv_and_pandas_are_equal(self):
pandas_dataset = Dataset.from_pandas(self.test_df, self.test_attributes)
csv_dataset = Dataset.from_csv(self.test_csv_path, ",", self.test_attributes)
self.assertEqual(pandas_dataset, csv_dataset)
def test_wrong_attribute_field_raises_exception(self):
error_test_attributes = {"not_a_field_int_the_set": "IDENTIFYING",
"gender": "INSENSITIVE",
"zipcode": "INSENSITIVE"}
with self.assertRaises(ValueError):
Dataset.from_pandas(self.test_df, error_test_attributes)
def test_re_indentification_risk_analysation(self):
dataset = Dataset.from_pandas(self.test_df, self.test_attributes, AaaSService("http://localhost:8080"))
result = dataset.re_identification_risk()
print(result.text)
|
10,706 | ff2b18b7e4afa7b3939189700c91896f6061f527 | import numpy as np
import cv2
def minmax_filter(image, ksize, mode):
rows, cols = image.shape[:2]
dst = np.zeros((rows, cols), np.uint8)
center = ksize // 2
for i in range(center, rows-center):
for j in range(center, cols-center):
y1, y2 = i - center, i + center + 1
x1, x2 = j - center, j + center + 1
mask = image[y1:y2, x1:x2]
dst[i, j] = cv2.minMaxLoc(mask)[mode] # 최소 or 최대
return dst
image = cv2.imread("images/aircraft.jpg", cv2.IMREAD_GRAYSCALE)
if image is None:
raise Exception("영상파일 읽기 오류")
minfilter_img = minmax_filter(image, 3, 0) # 3 x 3 마스크 최솟값 필터링
maxfilter_img = minmax_filter(image, 3, 1) # 3 x 3 마스크 최댓값 필터링
cv2.imshow("image", image)
cv2.imshow("minfilter_img", minfilter_img)
cv2.imshow("maxfilter_img", maxfilter_img)
cv2.waitKey(0) |
10,707 | 6de70bc95d452f00fe9e24cb9ecec3ebb62d3ab8 | '''
Faça uma função que informe a quantidade de dígitos de um determinado número inteiro informado.
'''
def qtddigitos(n):
n = str(n)
return len(n)
n = int(input("Informe um número inteiro: "))
quantidade = qtddigitos(n)
print("O numero informado possui %i digito(s) " %quantidade)
|
10,708 | 5e70b86a388719b55d457a4525c342e1f0178648 | from pluggs.blogs import load_custom_jinja2_env
from pluggs.blogs.routes.blogs_home_controller import bloghome_controller
def load_plugin(app):
print('**** Load Plugin Section ****')
print('App Root Path:', app.root_path)
app.register_blueprint(bloghome_controller, url_prefix="/dashboard/blog")
load_custom_jinja2_env(app, bloghome_controller.name)
|
10,709 | ec01a2a471c6ddbc000f93d20a6e8b14e9852a4e | """Parsers used in selected tests API."""
from evergreen import EvergreenApi
from starlette.requests import Request
from selectedtests.datasource.mongo_wrapper import MongoWrapper
def get_db(request: Request) -> MongoWrapper:
"""
Get the configured database for the application.
:param request: The request needing access to the database.
:return: The database.
"""
return request.app.state.db
def get_evg(request: Request) -> EvergreenApi:
"""
Get the configured Evergreen API client for the application.
:param request: The request needing the Evergreen API client.
:return: The Evergreen API client.
"""
return request.app.state.evg_api
|
10,710 | 0c784c04317546bc923542bbee8ddac3241f6b45 | /*
A KBase module: kb_SPAdes
A wrapper for the SPAdes assembler with hybrid features supported.
http://bioinf.spbau.ru/spades
Always runs in careful mode.
Runs 3 threads / CPU.
Maximum memory use is set to available memory - 1G.
Autodetection is used for the PHRED quality offset and k-mer sizes.
A coverage cutoff is not specified.
*/
module kb_SPAdes {
/* A boolean. 0 = false, anything else = true. */
typedef int bool;
/* The workspace object name of a PairedEndLibrary file, whether of the
KBaseAssembly or KBaseFile type.
*/
typedef string paired_end_lib;
/* Input parameters for running SPAdes.
workspace_name - the name of the workspace from which to take input
and store output.
output_contigset_name - the name of the output contigset
read_libraries - a list of Illumina PairedEndLibrary files in FASTQ or BAM format.
dna_source - (optional) the source of the DNA used for sequencing 'single_cell': DNA
amplified from a single cell via MDA anything else: Standard
DNA sample from multiple cells. Default value is None.
min_contig_length - (optional) integer to filter out contigs with length < min_contig_length
from the SPAdes output. Default value is 0 implying no filter.
kmer_sizes - (optional) K-mer sizes, Default values: 33, 55, 77, 99, 127
(all values must be odd, less than 128 and listed in ascending order)
In the absence of these values, K values are automatically selected.
skip_error_correction - (optional) Assembly only (No error correction).
By default this is disabled.
*/
typedef structure {
string workspace_name;
string output_contigset_name;
list<paired_end_lib> read_libraries;
string dna_source;
int min_contig_length;
list<int> kmer_sizes;
bool skip_error_correction;
} SPAdesParams;
/* An X/Y/Z style KBase object reference
*/
typedef string obj_ref;
/* parameter groups--define attributes for specifying inputs with YAML data set file (advanced)
The following attributes are available:
- orientation ("fr", "rf", "ff")
- type ("paired-end", "mate-pairs", "hq-mate-pairs", "single", "pacbio", "nanopore", "sanger", "trusted-contigs", "untrusted-contigs")
- interlaced reads (comma-separated list of files with interlaced reads)
- left reads (comma-separated list of files with left reads)
- right reads (comma-separated list of files with right reads)
- single reads (comma-separated list of files with single reads or unpaired reads from paired library)
- merged reads (comma-separated list of files with merged reads)
*/
typedef structure {
obj_ref lib_ref;
string orientation;
string lib_type;
} ReadsParams;
typedef structure {
obj_ref long_reads_ref;
string long_reads_type;
} LongReadsParams;
/*------To run HybridSPAdes you need at least one library of the following types:------
1) Illumina paired-end/high-quality mate-pairs/unpaired reads
2) IonTorrent paired-end/high-quality mate-pairs/unpaired reads
3) PacBio CCS reads
Version 3.15.3 of SPAdes supports paired-end reads, mate-pairs and unpaired reads.
SPAdes can take as input several paired-end and mate-pair libraries simultaneously.
workspace_name - the name of the workspace from which to take input
and store output.
output_contigset_name - the name of the output contigset
read_libraries - a list of Illumina or IonTorrent paired-end/high-quality mate-pairs/unpaired reads
long_reads_libraries - a list of PacBio, Oxford Nanopore Sanger reads and/or additional contigs
dna_source - the source of the DNA used for sequencing 'single_cell': DNA
amplified from a single cell via MDA anything else: Standard
DNA sample from multiple cells. Default value is None.
pipeline_options - a list of string specifying how the SPAdes pipeline should be run
kmer_sizes - (optional) K-mer sizes, Default values: 21, 33, 55, 77, 99, 127
(all values must be odd, less than 128 and listed in ascending order)
In the absence of these values, K values are automatically selected.
min_contig_length - integer to filter out contigs with length < min_contig_length
from the HybridSPAdes output. Default value is 0 implying no filter.
@optional dna_source
@optional pipeline_options
@optional kmer_sizes
@optional min_contig_length
*/
typedef structure {
string workspace_name;
string output_contigset_name;
list<ReadsParams> reads_libraries;
list<LongReadsParams> long_reads_libraries;
string dna_source;
list<string> pipeline_options;
list<int> kmer_sizes;
int min_contig_length;
bool create_report;
} HybridSPAdesParams;
/* Output parameters for SPAdes run.
report_name - the name of the KBaseReport.Report workspace object.
report_ref - the workspace reference of the report.
*/
typedef structure {
string report_name;
string report_ref;
} SPAdesOutput;
/* Run SPAdes on paired end libraries */
funcdef run_SPAdes(SPAdesParams params) returns(SPAdesOutput output)
authentication required;
/* Run HybridSPAdes on paired end libraries with PacBio CLR and Oxford Nanopore reads*/
funcdef run_HybridSPAdes(HybridSPAdesParams params) returns(SPAdesOutput output)
authentication required;
/* Run SPAdes on paired end libraries for metagenomes */
funcdef run_metaSPAdes(SPAdesParams params) returns(SPAdesOutput output)
authentication required;
};
|
10,711 | e1a268fde8a6042582ca64260fad36056506d854 |
import pytorch_lightning as pl
import torch
class BaseModel(pl.LightningModule):
def __init__(self, args):
super().__init__()
self.args = args
self.learning_rate = args.learning_rate
def forward(self):
pass
def training_step(self, batch, batch_idx):
# batch
src_ids, decoder_ids, mask, label_ids = batch
# get loss
loss = self(input_ids=src_ids, attention_mask=mask, decoder_input_ids=decoder_ids, labels=label_ids)
# logs
self.log('train_loss', loss, on_step=True, on_epoch=True, prog_bar=True)
return loss
def validation_step(self, batch, batch_idx):
# batch
src_ids, decoder_ids, mask, label_ids = batch
# get loss
loss = self(input_ids=src_ids, attention_mask=mask, decoder_input_ids=decoder_ids, labels=label_ids)
self.log('validation_loss', loss, on_step=True, on_epoch=True, sync_dist=True)
return loss
def validation_epoch_end(self, outputs):
avg_loss = torch.stack([x for x in outputs]).mean()
self.log('val_loss_each_epoch', avg_loss, on_epoch=True, prog_bar=True)
def test_step(self, batch, batch_idx):
# batch
src_ids, decoder_ids, mask, label_ids = batch
# get loss
loss = self(input_ids=src_ids, attention_mask=mask, decoder_input_ids=decoder_ids, labels=label_ids)
return loss
def test_epoch_end(self, outputs):
avg_loss = torch.stack([x for x in outputs]).mean()
self.log('test_loss', avg_loss, on_epoch=True, prog_bar=True)
def configure_optimizers(self):
if self.args.img_lr_factor != 1 and self.args.model=='multi_modal_bart':
# make parameter groups
all_para = [p for p in self.model.parameters()]
# img_related_para = [p for p in self.model.model.encoder.img_transformer.parameters()] \
# +[p for p in self.model.model.encoder.img_feature_transfers.parameters()] \
# +[p for p in self.model.model.encoder.fcs.parameters()] \
# +[p for p in self.model.model.encoder.final_layer_norm.parameters()] \
# +[p for p in self.model.model.encoder.fgs.parameters()]
# img_related_para = [p for p in self.model.model.encoder.img_feature_transfers.parameters()] \
# +[p for p in self.model.model.encoder.fcs.parameters()] \
# +[p for p in self.model.model.encoder.final_layer_norm.parameters()] \
# +[p for p in self.model.model.encoder.fgs.parameters()]
_img_related_para = []
if self.args.cross_attn_type == 0:
_img_related_para += [
self.model.model.encoder._linear_1.parameters(),
self.model.model.encoder._linear_2.parameters()
]
elif self.args.cross_attn_type == 1:
_img_related_para += [
self.model.model.encoder._linear_1.parameters(),
self.model.model.encoder._linear_2.parameters()
]
elif self.args.cross_attn_type == 2:
_img_related_para += [
self.model.model.encoder._linear_1.parameters()
]
elif self.args.cross_attn_type == 3:
_img_related_para += [
self.model.model.encoder._linear_1.parameters(),
self.model.model.encoder._linear_2.parameters(),
self.model.model.encoder._linear_3.parameters()
]
elif self.args.cross_attn_type == 4:
_img_related_para += [
self.model.model.encoder._linear_1.parameters(),
self.model.model.encoder._linear_2.parameters(),
self.model.model.encoder._linear_3.parameters(),
self.model.model.encoder._linear_4.parameters(),
self.model.model.encoder._multi_head_attn.parameters()
]
elif self.args.cross_attn_type == 5:
_img_related_para += [
self.model.model.encoder._linear_1.parameters(),
self.model.model.encoder._linear_2.parameters(),
self.model.model.encoder._linear_3.parameters(),
self.model.model.encoder._multi_head_attn.parameters()
]
if self.args.use_forget_gate:
_img_related_para.append(self.model.model.encoder.fg.parameters())
img_related_para = []
for params in _img_related_para:
for param in params:
img_related_para.append(param)
bart_para = []
for p in all_para:
flag = 0
for q in img_related_para:
if p.shape == q.shape:
if torch.equal(p, q):
flag = 1
if flag == 0:
bart_para.append(p)
continue
optimizer = torch.optim.Adam([
{'params': bart_para},
{'params': img_related_para, 'lr': self.learning_rate * self.args.img_lr_factor},
], lr=self.learning_rate)
elif self.args.img_lr_factor != 1 and self.args.model=='multi_modal_t5':
# make parameter groups
all_para = [p for p in self.model.parameters()]
# img_related_para = [p for p in self.model.model.encoder.img_transformer.parameters()] \
# +[p for p in self.model.model.encoder.img_feature_transfers.parameters()] \
# +[p for p in self.model.model.encoder.fcs.parameters()] \
# +[p for p in self.model.model.encoder.final_layer_norm.parameters()] \
# +[p for p in self.model.model.encoder.fgs.parameters()]
# img_related_para = [p for p in self.model.model.encoder.img_feature_transfers.parameters()] \
# +[p for p in self.model.model.encoder.fcs.parameters()] \
# +[p for p in self.model.model.encoder.final_layer_norm.parameters()] \
# +[p for p in self.model.model.encoder.fgs.parameters()]
_img_related_para = []
if self.args.cross_attn_type == 0:
_img_related_para += [
self.model.encoder._linear_1.parameters(),
self.model.encoder._linear_2.parameters()
]
elif self.args.cross_attn_type == 1:
_img_related_para += [
self.model.encoder._linear_1.parameters(),
self.model.encoder._linear_2.parameters()
]
elif self.args.cross_attn_type == 2:
_img_related_para += [
self.model.encoder._linear_1.parameters()
]
elif self.args.cross_attn_type == 3:
_img_related_para += [
self.model.encoder._linear_1.parameters(),
self.model.encoder._linear_2.parameters(),
self.model.encoder._linear_3.parameters()
]
elif self.args.cross_attn_type == 4:
_img_related_para += [
self.model.encoder._linear_1.parameters(),
self.model.encoder._linear_2.parameters(),
self.model.encoder._linear_3.parameters(),
self.model.encoder._linear_4.parameters(),
self.model.encoder._multi_head_attn.parameters()
]
elif self.args.cross_attn_type == 5:
_img_related_para += [
self.model.encoder._linear_1.parameters(),
self.model.encoder._linear_2.parameters(),
self.model.encoder._linear_3.parameters(),
self.model.encoder._multi_head_attn.parameters()
]
if self.args.use_forget_gate:
_img_related_para.append(self.model.encoder.fg.parameters())
img_related_para = []
for params in _img_related_para:
for param in params:
img_related_para.append(param)
bart_para = []
for p in all_para:
flag = 0
for q in img_related_para:
if p.shape == q.shape:
if torch.equal(p, q):
flag = 1
if flag == 0:
bart_para.append(p)
continue
optimizer = torch.optim.Adam([
{'params': bart_para},
{'params': img_related_para, 'lr': self.learning_rate * self.args.img_lr_factor},
], lr=self.learning_rate)
print('LEARNING RATE SET SUCCESSFUL')
print('LEARNING RATE SET SUCCESSFUL')
print('LEARNING RATE SET SUCCESSFUL')
print('LEARNING RATE SET SUCCESSFUL')
print('LEARNING RATE SET SUCCESSFUL')
print('LEARNING RATE SET SUCCESSFUL')
print('LEARNING RATE SET SUCCESSFUL')
else:
optimizer = torch.optim.Adam(self.model.parameters(), lr=self.learning_rate)
# return optimizer
scheduler = torch.optim.lr_scheduler.StepLR(optimizer, step_size=self.args.scheduler_lambda1, gamma=self.args.scheduler_lambda2)
return [optimizer], [scheduler] |
10,712 | e75b6266fbd45c97110af2098d22c34358a6c46d | #Problem1 test for the survival model
#a cohort of 573 patients over 5 years.
import SurvivalModel as SurvivalCls
MORTALITY_PROB = 0.1 # annual probability of mortality
TIME_STEPS = 100 # simulation length
SIM_POP_SIZE = 573 # population size of the simulated cohort
ALPHA = 0.05 # significance level
NUM_SIM_COHORTS=1000
# create a cohort of patients
myCohort = SurvivalCls.Cohort(id=571, pop_size=SIM_POP_SIZE, mortality_prob=MORTALITY_PROB)
# simulate the cohort
cohortOutcome = myCohort.simulate(TIME_STEPS)
print("The five year survival percentage if the annual mortality probability is",MORTALITY_PROB,":", myCohort.get_5year_survival())
# create multiple cohorts
multiCohort = SurvivalCls.MultiCohort(
ids=range(NUM_SIM_COHORTS), # [0, 1, 2 ..., NUM_SIM_COHORTS-1]
pop_sizes=[SIM_POP_SIZE] * NUM_SIM_COHORTS, # [REAL_POP_SIZE, REAL_POP_SIZE, ..., REAL_POP_SIZE]
mortality_probs=[MORTALITY_PROB]*NUM_SIM_COHORTS # [p, p, ....]
)
# simulate all cohorts
multiCohort.simulate(TIME_STEPS)
print("Multicohort 1",multiCohort.get_cohort_5yearSurvivalPct(1))
#Problem 2: Likelihood Assumption: If the probability of 5-year survival is 𝑞,
# what probability distribution “the number of participants that survived beyond 5 years
# in a cohort of 𝑁 participants” would follow? Make sure to also specify the parameters of this distribution.
#Hint: Review the probability distributions that are discussed at the end of the Review of Probability class notes.
print("Problem2: \n"
"the number of participants that survived beyond 5 years in a cohort of 𝑁 participants follows the binomial distibution: Bin(N,q)")
#Problem 3: Likelihood Calculation: If our survival model represents the reality,
#then the “percentage of patients survived beyond 5 years” (calculated in Problem 1)
#will represent the true probability of 5-year survival (𝑞 in Problem 2).
#Write a Python statement to calculate the likelihood that a clinical study reports 400 of 573 participants
#survived at the end of the 5-year study period if 50% of the patients in our simulated cohort survived beyond 5 years?
from scipy.stats import binom
k,n,p=400,573,0.5
print("Problem3: \n"
"the likelihood that a clinical study reports 400 of 573 participants survived at the end of the 5-year study \n"
"period if 50% of the patients in our simulated cohort survived beyond 5 years",binom._pmf(k, n, p)) |
10,713 | aa023bf53cffaa1c39e691b2904273edda092b17 | import tensorflow as tf
c = tf.constant(10.0, name="c", dtype=tf.float32)
a = tf.constant(5.0, name="a", dtype=tf.float32)
b = tf.constant(13.0, name="b", dtype=tf.float32)
d = tf.Variable(tf.add(tf.multiply(a, c), b))
init = tf.global_variables_initializer()
with tf.Session() as session:
merged = tf.summary.merge_all()
writer = tf.summary.FileWriter("logs", session.graph)
session.run(init)
print(session.run(d))
x = tf.placeholder(tf.float32, name="x")
y = tf.placeholder(tf.float32, name="y")
z = tf.multiply(x, y, name="z")
with tf.Session() as session:
merged = tf.summary.merge_all()
writer = tf.summary.FileWriter("logs", session.graph)
print(session.run(z, feed_dict={x: 2.1, y: 3.0}))
# (deeplearning) C:\Users\gokul>tensorboard --logdir C:\Users\gokul\PycharmProjects\GpuTry\tensorflow\logs
|
10,714 | 44fbe170559028e05cc1369f19e9fe1e8bd9dbdc | # Tracy Otieno
# homework_1.2.py
# May 4, 2017
# Prints a tic tac toe board
def draw():
# initialize an empty board
board = ""
# a standard tic-tac-toe board has 5 rows so
for i in range(5):
# switch between printing vertical and horizontal bars
if i%2 == 0:
board += "| " * 4
else:
board += " --- " * 3
board += "\n"
print(board)
draw()
|
10,715 | 70dbfcb29fcab1cd7c0291966f7d3bdfc57b2681 | from __future__ import print_function
import numpy as np
import tensorflow as tf
import argparse
import os
import sys
from PIL import Image
from skimage.io import imread, imshow
from skimage.transform import resize
import matplotlib.pyplot as plt
def get_image_size(data):
image_path = os.path.join(FLAGS.dataset_dir, data, 'images')
image = os.listdir(image_path)
img = Image.open(os.path.join(image_path, image[0]))
return img.height, img.width
def main(_):
filelist = sorted(os.listdir(FLAGS.dataset_dir))
for data in filelist:
height, width = get_image_size(data)
mask_path = os.path.join(FLAGS.dataset_dir, data, 'masks')
mask_images = sorted(os.listdir(mask_path))
mask = np.zeros((height, width, 1), dtype=np.bool)
for mask_file in mask_images:
_mask = imread(os.path.join(mask_path, mask_file))
_mask = np.expand_dims(_mask, axis=-1)
mask = np.maximum(mask, _mask)
gt_path = os.path.join(FLAGS.ground_truth_dir, data, 'gt_mask')
if not os.path.exists(gt_path):
os.makedirs(gt_path)
# imshow(np.squeeze(mask))
# plt.show()
mask = np.squeeze(mask)
img = Image.fromarray(mask)
img.save(os.path.join(gt_path, data + '.png'))
# img.show(title=X)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument(
'--dataset_dir',
default='/home/ace19/dl-data/nucleus_detection/stage1_train',
type=str,
help="Data directory")
parser.add_argument(
'--ground_truth_dir',
default='/home/ace19/dl-data/nucleus_detection/stage1_train',
type=str,
help="ground_truth data directory")
FLAGS, unparsed = parser.parse_known_args()
tf.app.run(main=main, argv=[sys.argv[0]] + unparsed)
|
10,716 | 476a3e466c154f86f8b82f38909f18e86d72daed | from numpy import linspace, array, matrix, sort, diag, linalg as LA
from misc import tridiag_toeplitz, get_diags
from jacobi import jacobi_rotalg
import matplotlib.pyplot as plt
N = 100
r_0, r_max = 0.0, 10.0
r = linspace(r_0, r_max, N + 2)[1:-1]
h = (r_max - r_0)/(N + 1)
indices = linspace(1.0, 1.0*N, N)
H = matrix(tridiag_toeplitz(N, array([-1.0, 2.0, -1.0]))/h**2 + diag(r**2))
D, U = jacobi_rotalg(H, err=10**(-5), max_iter=N**3)
eigs = get_diags(D)
plt.plot(indices, eigs)
plt.show()
|
10,717 | 32b5236b15a9a4726f27ee4eb83f193b87314647 | # Copyright (c) 2019 Gunakar Pvt Ltd
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted (subject to the limitations in the disclaimer
# below) provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Gunakar Pvt Ltd/Plezmo nor the names of its
# contributors may be used to endorse or promote products derived from this
# software without specific prior written permission.
# * This software must only be used with Plezmo elements manufactured by
# Gunakar Pvt Ltd.
# * Any software provided in binary or object form under this license must not be
# reverse engineered, decompiled, modified and/or disassembled.
# NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY
# THIS LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
# CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
# BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER
# IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# Tests functionality of display element.
# Test needs 1 display element
import time
import traceback
from plezmo import *
from plezmo.utils.logger import Logger
from plezmo.elements.element_types import PlezmoElementType
from plezmo.elements.plezmo_display import *
from plezmo.elements.plezmo_element import *
import utils
logger = Logger()
def globalExceptionHandler(e):
logger.info("###### Got exception {}".format(e))
# Init bluetooth communication
def init(display_name):
# Register global exception handler
registerExceptionHandler(globalExceptionHandler)
# Elements to connect
elementList = [{"name" : display_name, "type": PlezmoElementType.DISPLAY}]
connectedElements = []
try:
# connect to elements one by one
for e in elementList:
plezmoApi.connect(e["name"], e["type"])
# keep track of connected elements
connectedElements.append(e["name"])
return True
except Exception as e:
# Disconnect and stop program if connection to element fails
logger.error("Failed to connect to element, ex {}".format(e))
#traceback.print_exc()
# Disconnect already connected elements
for e in connectedElements:
plezmoApi.disconnect(e["name"])
return False
# Main logic of the program
def main(display_name):
# Init bluetooth communication
success = init(display_name)
if success != True:
# Bluetooth communication cannobe enabled, quit.
plezmoApi.close()
logger.error("Could not connect to all the required elements")
return
# Register event handlers and call methods of display sensor
try:
# show INBOX image on display
logger.info("Showing INBOX image")
Display.showImage(display_name, DisplayImage.INBOX)
time.sleep(5)
# clear display
logger.info("Clearing display")
Display.clearDisplay(display_name)
time.sleep(2)
# Show text on display
logger.info("Showing text on display line 2 and alignment center")
Display.showText(display_name, DisplayLine.TWO, TextAlignment.CENTER, "Hola!")
time.sleep(5)
# clear display
logger.info("Clearing display")
Display.clearDisplay(display_name)
time.sleep(2)
# set font size, text color
Display.setFontSize(display_name, FontSize.MEDIUM)
Display.setTextColor(display_name, DisplayBackground.RED)
logger.info("Set font size to MEDIUM, text color to RED")
Display.showText(display_name, DisplayLine.TWO, TextAlignment.CENTER, "RED!")
time.sleep(5)
# Set display background color
logger.info("Painting background color to BLUE")
Display.paintBackgroundColor(display_name, DisplayBackground.BLUE)
time.sleep(5)
except Exception as e:
logger.error("Failed to run display commands {}, ex {}".format(display_name, e))
#traceback.print_exc()
finally:
# Program completed, disconnect elements and quit
plezmoApi.disconnect(display_name)
time.sleep(1)
plezmoApi.close()
# Program starts here
if __name__ == "__main__":
display_name = utils.extract_element_name()
if display_name == None:
logger.error("Display element name is mandatory, e.g. # python display_example.py Display")
else:
main(display_name)
quit() |
10,718 | dd4734b214e2fd67cef7ff262c539f8b6f9f6b3c | import serial
import time
import psutil
## Simple CPU usage monitor.
ser = serial.Serial(port='/dev/ttyACM0', baudrate=345600, timeout=.1)
time.sleep(3);
i=0
while 1:
x = psutil.cpu_percent()
if x<30:
ser.write(bytes('<0,255,0>','utf8'))
if x>50 and x<70:
ser.write(bytes('<255,255,0>','utf8'))
if x>70:
ser.write(bytes('<255,0,0>','utf8'))
time.sleep(0.1)
ser.close() |
10,719 | 50fd3d86d1e2499901fe9f5e031c1814fdb68890 | import networkx as nx
import Queue
import pandas as pd
import os
from pandas import Series
from sklearn.cross_validation import train_test_split
def init(queue,G,alpha):
df = pd.read_csv('graph_pre.csv')
no_train, no_test, label_train, label_test = train_test_split(df.no, df.label, test_size=1 - alpha)
fraud_dict = Series(label_train, index=no_train).to_dict()
for node in G.nodes:
if fraud_dict.has_key(node) and fraud_dict[node]>0:
G.add_nodes_from([node], belief=fraud_dict[node])
queue.put(node)
return [no_train, no_test, label_train, label_test]
def broadcast(filename):
G = nx.DiGraph() # or DiGraph, MultiGraph, MultiDiGraph, etc
queue = Queue.Queue()
with open(filename,'r') as f:
for line in f:
item = line.split()
G.add_edge(int(item[0]),int(item[1]))
# pr = nx.pagerank(G, alpha=0.85)
[no_train, no_test, label_train, label_test]= init(queue,G,0.8)
alpha = 2.0
while not queue.empty():
now = queue.get()
now_b = G.nodes[now]['belief']
for nbr in G.successors(now):
if 'belief' not in G.nodes[nbr]:
G.add_nodes_from([nbr], belief=now_b*1.0/alpha)
queue.put(nbr)
else:
G.add_nodes_from([nbr], belief= G.nodes[nbr]['belief'] + now_b * 1.0 / alpha)
fraud_dict = {}
for (index,value) in label_test.iteritems():
fraud_dict[no_test[index]] = value
printBelief(G,fraud_dict,"res_test.csv")
def printBelief(G,test_dict,output="res_test.csv"):
node_array = [0]*len(test_dict)
belief = [0]*len(test_dict)
acc = 0
i = 0
for (node,label) in test_dict.items():
node_array[i] = (node)
if 'belief' in G.nodes[node]:
v = G.nodes[node]['belief']
belief[i] = (v)
acc = acc + abs(v - label)/label
else:
belief[i] = (0)
i = i+1
print('acc'+str(acc*1.0/len(test_dict)))
save = pd.DataFrame({'no': node_array, 'label':test_dict.values(),'belief': belief})
save[['no','label','belief']].to_csv(output)
if __name__ == "__main__":
broadcast('../zgw_data/all_callrec_graph_num')
|
10,720 | 3578b21330290c6e2774e3a7114ad55dc48f80c2 | """:mod:`soran.datetime` --- datetime
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Use this module instead :mod:`datetime` to confirm all time related values are
in UTC timezone. because :mod:`datetime` return a naive datetime which
dosen't contain a timezone by default.
.. sourcecode:: python
>>> from soran.datetime import datetime, now
>>> datetime(2015, 1, 1, 1)
datetime.datetime(2015, 1, 1, 1, 0,
tzinfo=<iso8601.iso8601.Utc object at 0x10d44eef0>)
>>> now()
datetime.datetime(2015, 5, 31, 11, 54, 44, 930324,
tzinfo=<iso8601.iso8601.Utc object at 0x10d44eef0>)
"""
from datetime import date, datetime as py_datetime
from functools import partial, singledispatch
from annotation.typed import typechecked
from iso8601 import parse_date
from iso8601.iso8601 import UTC
#: Replacement of :class:`~datetime.datetime` with UTC timezone.
datetime = partial(py_datetime, tzinfo=UTC)
@typechecked
def now() -> py_datetime:
"""Return now as a :class:`~datetime.datetime` with UTC timezone.
:return:
"""
return py_datetime.now(tz=UTC)
@singledispatch
def parse(t):
"""Parse datetime to string, string to datetime.
:param t:
:return:
"""
return t
@parse.register(date)
@parse.register(py_datetime)
def _(t) -> str:
if t.tzinfo is None:
raise ValueError("Can't parse naive datetime.")
return t.astimezone(tz=UTC).isoformat()
@parse.register(str)
def _(t) -> date:
return parse_date(t, datetime=UTC)
|
10,721 | fe3b3b59cd693c0999ed21a4ce14526ae5bf4820 | from random import randint
numeros = list()
def somaPar(numeros):
total = 0
for i, v in enumerate(numeros):
if v % 2 == 0:
total += v
print(total)
def sorteia():
for i in range(0, 5):
numeros.append(randint(1, 10))
print(numeros)
sorteia()
somaPar(numeros) |
10,722 | 186ea35f99f0310079d23fd7e174233f58cf9f26 | import sys
import os
from add_posts import AddPosts
class UpdatePosts():
def __init__(self, filename=None):
self.template = 'blog_temp.html'
self.folders = ["text reviews/"]
def find_posts(self):
for folder in self.folders:
for filename in os.listdir(folder):
if filename.endswith(".txt"):
self.extract_post_data(folder + filename)
def extract_post_data(self, filepath):
read = open(filepath, "r")
data = read.readlines()
title = data[0]
date = data[2]
paragraphs = data[4:]
post_data = (title, date, paragraphs)
self.write_posts(filepath, post_data)
def write_posts(self, filepath, post_data):
title = post_data[0]
date = post_data[1]
paragraphs = post_data[2]
filename = filepath.split("/")[-1]
filename = filename.split(".")[0]
read = open(self.template,'r')
data = read.read()
data = data.replace("REPLACE IMAGE REF", filename)
data = data.replace("REPLACE TWITTER IMAGE", filename)
data = data.replace("FIND AND REPLACE PATH", filename)
data = data.replace("FIND AND REPLACE TITLE", title)
data = data.replace("FIND AND REPLACE TWITTER TITLE", title)
data = data.replace("FIND AND REPLACE DATE", date)
t = []
if len(paragraphs) > 0:
for p in paragraphs:
print(p)
if p != '':
if p.startswith("image*"):
image_ref = p.split("*")[1]
t.append("""<img src="%s" alt="Image" class="img-fluid">""" % image_ref)
elif p.startswith("h2:"):
title_text = p.split("h2:")[1]
t.append("""<h2>%s</h2>""" % title_text)
elif p.startswith("h3:"):
title_text = p.split("h3:")[1]
t.append("""<h3>%s</h3>""" % title_text)
elif p.startswith("h4:"):
title_text = p.split("h4:")[1]
t.append("""<h4>%s</h4>""" % title_text)
else:
t.append("<p>%s</p>" % p)
data = data.replace("FIND AND REPLACE BODY", "\n".join(t))
self.currFile = filename
if not self.currFile.endswith(".html"):
self.currFile += ".html"
write = open(self.currFile, 'w')
write.write(data)
read.close()
AddPosts(self.currFile)
def main():
update_posts = UpdatePosts()
update_posts.find_posts()
if __name__=='__main__':
sys.exit(main())
|
10,723 | 1354f91b27216c96d4163cae1fa836728e39483a | import re
def show_me(name):
if ' ' in name:
return False
elif re.search('--', name) is not None:
return False
elif re.search('-[a-z]', name) is not None:
return False
elif re.search('[a-z][A-Z]', name) is not None:
return False
elif re.search('\A-', name) is not None:
return False
elif re.search('-$', name) is not None:
return False
elif re.search(['^a-zA-Z\-'])is not None:
return False
return True
print (show_me("Francis"))
print (show_me("Jean-Eluard"))
print (show_me("Le Mec"))
print (show_me("Bernard-Henry-Levy"))
print (show_me("Meme Gertrude"))
print (show_me("A-a-a-a----a-a"))
print (show_me("Z-------------"))
print (show_me("Jean-luc"))
print (show_me("Jean--Luc"))
print (show_me("JeanLucPicard"))
print (show_me("-Jean-Luc"))
print (show_me("Jean-Luc-Picard-")) |
10,724 | 9709ca436eb39afd1a69023a90a116bff6063542 |
#---------------------------PROBLEM STATEMENT 2-----------------------------------
import re
import nltk
from nltk.corpus import stopwords
from nltk.tokenize import word_tokenize
def canditate_phrase(line):
phrases = []
words = nltk.word_tokenize(line) #split the text in words
phrase = ''
for word in words:
if word not in stopwords.words('english'):
phrase+=word + ' '
else:
if phrase != '':
phrases.append(phrase.strip())
phrase = ''
return phrases
# # response
f = open('textlist2.txt','r')
file=f.read()
# removing punctutations
text=re.sub(r'[^\w\s]','',str(file))
sentence= nltk.sent_tokenize(text.lower()) #split the text in sentences
f1=open('keywords.txt','r')
keywords=f1.read()
keywords=keywords.split(",")
for k in keywords:
score=canditate_phrase(str(sentence))
for i in score:
if i==k:
print(k) |
10,725 | 5b03691ea0a85b38aff77be824b9960553de1888 | from server import SimpleServer
from SimpleWebSocketServer import SimpleWebSocketServer, WebSocket
import time
server = SimpleWebSocketServer('0.0.0.0', 8000, SimpleServer)
start_time = time.time()
while True:
server.serveonce()
|
10,726 | a674c893d8e3d16b35e60b54a01a017d3d0100ce | """
Lab 2 for Programming for Beginners
My First Flowchart
Julia Garant
Jan 23 2020
"""
favNumber = input("What's your favourite number?")
print(favNumber + " is a great number!")
|
10,727 | e29a8ed9ea23a5f835ab2a79a9e1254cbbfd07c6 | import torch
import torch.nn as nn
import torch.nn.functional as F
#
class MyMnistNet(nn.Module):
def __init__(self):
super().__init__()
self.linear1 = nn.Linear(1,30)
self.relu = nn.ReLU()
self.linear2 = nn.Linear(30,20)
self.relu = nn.ReLU()
self.linear3 = nn.Linear(20,5)
self.relu = nn.ReLU()
self.linear4 = nn.Linear(5,1)
def forward(self, input):
y = self.linear1(input)
y = self.relu(y)
y = self.linear2(y)
y = self.relu(y)
y = self.linear3(y)
y = self.relu(y)
y = self.linear4(y)
return y |
10,728 | fbed223f876b21eabe85555b344daa59a713f525 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2017/11/6 19:10
# @File : leetCode_521.py
'''
如果a,b长度不同,则返回长度较长的
如果长度相同,相等则返回-1
python bool和值进行逻辑运算,结果为 后面的变量值
'''
class Solution(object):
def findLUSlength(self, a, b):
"""
:type a: str
:type b: str
:rtype: int
"""
# return (a != b and max(len(a), len(b))) or -1
return (max(len(a), len(b)) and a != b) or -1
s = Solution()
print(s.findLUSlength("aba", "cbc"))
print( True and 3)
print( 3 and True) |
10,729 | 990afe861b44a7b974e04f5e26c6764256fe80c3 | class Solution(object):
def reconstructQueue(self, people):
"""
:type people: List[List[int]]
:rtype: List[List[int]]
"""
if not people: return []
people = sorted(people, key=lambda (h, k): (-h, k))
print people
res = []
for p in people:
res.insert(p[1], p)
return res
|
10,730 | 80325839d4dafad53376d3cb2674fa7deaae74e8 | import abc
"""
定义抽象类,实际类应在simulation_filters, simulation_algos, charge_algos分别定义
"""
class DataFilter(metaclass = abc.ABCMeta):
"""
数据过滤器抽象类
"""
@abc.abstractclassmethod
def __init__(self):
pass
@abc.abstractclassmethod
def set_filter(self, filter):
pass
@abc.abstractclassmethod
def set_records(self, records):
pass
@abc.abstractclassmethod
def _process_records(self):
pass
@abc.abstractclassmethod
def get_records(self):
pass
class ChargeAlgo(metaclass = abc.ABCMeta):
"""
算法抽象类,应提供数据初始化,更新数据,获得结果接口
ChargeAlgo.update ---> get_result ---> update ---> get_result ----> update ---> ...
"""
@abc.abstractclassmethod
def __init__(self):
"""
仅为声明非静态属性,不赋予属性有意义初始值
"""
pass
@abc.abstractclassmethod
def update(self, data_dict):
pass
@abc.abstractclassmethod
def get_result(self):
pass
class SimulationAlgo(metaclass = abc.ABCMeta):
@abc.abstractclassmethod
def set_data_set(self, data_set):
pass
@abc.abstractclassmethod
def set_data(self, data):
pass
@abc.abstractclassmethod
def get_result(self):
pass
class SimulationSandBox(metaclass = abc.ABCMeta):
@abc.abstractclassmethod
def set_init_data(self, data_dict):
pass
@abc.abstractclassmethod
def add_charge_algo(self, algo_obj):
pass
@abc.abstractclassmethod
def add_simu_algo(self, algo_obj):
pass
@abc.abstractclassmethod
def one_step(self):
pass
@abc.abstractclassmethod
def get_simu_data(self):
pass |
10,731 | 0e4193812b6996f0104ae879b6dd9f1c1cb8a001 | N,A,B = map(int,input().split())
a = list(map(int,input().split()))
a.sort()
mo = 10**9 + 7
loop = 0
if A == 1:
for i in range(N):
print(a[i] % mo)
exit()
a_max = a[-1]
while B > 0:
if a[0] * A >= a_max:
loop = B//N
B %= N
break
a[0] *= A
B -= 1
a.sort()
i = 0
while B > 0:
a[i] *= A
B -= 1
i += 1
for i in range(N):
a[i] *= pow(A, loop, mo)
a.sort()
for i in range(N):
a[i] %= mo
print(a[i]) |
10,732 | 610a9051c154dabd92259b129435c78601d54e55 | import pytest
import responses
import json
from python.prohibition_web_svc.config import Config
from datetime import datetime
import python.prohibition_web_svc.middleware.keycloak_middleware as middleware
from python.prohibition_web_svc.models import db, UserRole, User
from python.prohibition_web_svc.app import create_app
import logging
import json
@pytest.fixture
def application():
return create_app()
@pytest.fixture
def as_guest(application):
application.config['TESTING'] = True
with application.test_client() as client:
yield client
@pytest.fixture
def database(application):
with application.app_context():
db.init_app(application)
db.create_all()
yield db
db.drop_all()
db.session.commit()
@pytest.fixture
def roles(database):
today = datetime.strptime("2021-07-21", "%Y-%m-%d")
users = [
User(username="john@idir",
user_guid="john@idir",
agency='RCMP Terrace',
badge_number='0508',
first_name='John',
last_name='Smith'),
User(username="larry@idir",
user_guid="larry@idir",
agency='RCMP Terrace',
badge_number='0555',
first_name='Larry',
last_name='Smith'),
User(username="mo@idir",
user_guid="mo@idir",
agency='RCMP Terrace',
badge_number='8088',
first_name='Mo',
last_name='Smith')
]
db.session.bulk_save_objects(users)
user_role = [
UserRole(user_guid='john@idir', role_name='officer', submitted_dt=today),
UserRole(user_guid='larry@idir', role_name='officer', submitted_dt=today, approved_dt=today),
UserRole(user_guid='mo@idir', role_name='administrator', submitted_dt=today, approved_dt=today),
UserRole(user_guid='mo@idir', role_name='officer', submitted_dt=today, approved_dt=today)
]
db.session.bulk_save_objects(user_role)
db.session.commit()
@responses.activate
def test_administrator_can_get_all_users(as_guest, monkeypatch, roles):
monkeypatch.setattr(Config, 'ADMIN_USERNAME', 'administrator@idir')
monkeypatch.setattr(middleware, "get_keycloak_certificates", _mock_keycloak_certificates)
monkeypatch.setattr(middleware, "decode_keycloak_access_token", _get_administrative_user)
resp = as_guest.get(Config.URL_PREFIX + "/api/v1/admin/users",
follow_redirects=True,
content_type="application/json",
headers=_get_keycloak_auth_header(_get_keycloak_access_token()))
logging.debug("dump query response: " + json.dumps(resp.json))
assert resp.status_code == 200
assert len(resp.json) == 4
assert resp.json[0]['user_guid'] == 'john@idir'
assert responses.calls[0].request.body.decode() == json.dumps({
'event': {
'event': 'admin get users',
'user_guid': 'mo@idir',
'username': 'mo@idir'
},
'source': 'be78d6'
})
@responses.activate
def test_non_administrators_cannot_get_all_users(as_guest, monkeypatch, roles):
monkeypatch.setattr(Config, 'ADMIN_USERNAME', 'administrator@idir')
monkeypatch.setattr(middleware, "get_keycloak_certificates", _mock_keycloak_certificates)
monkeypatch.setattr(middleware, "decode_keycloak_access_token", _get_authorized_user)
resp = as_guest.get(Config.URL_PREFIX + "/api/v1/admin/users",
follow_redirects=True,
content_type="application/json",
headers=_get_keycloak_auth_header(_get_keycloak_access_token()))
logging.debug(json.dumps(resp.json))
assert resp.status_code == 401
assert responses.calls[0].request.body.decode() == json.dumps({
'event': {
'event': 'permission denied',
'user_guid': 'larry@idir',
'username': 'larry@idir'
},
'source': 'be78d6'
})
@responses.activate
def test_unauthenticated_user_cannot_get_all_users(as_guest, monkeypatch, roles):
resp = as_guest.get(Config.URL_PREFIX + "/api/v1/admin/users",
follow_redirects=True,
content_type="application/json")
logging.debug(json.dumps(resp.json))
assert resp.status_code == 401
assert responses.calls[0].request.body.decode() == json.dumps({
'event': {
'event': 'unauthenticated',
},
'source': 'be78d6'
})
def _get_keycloak_access_token() -> str:
return 'some-secret-access-token'
def _get_keycloak_auth_header(access_token) -> dict:
return dict({
'Authorization': 'Bearer {}'.format(access_token)
})
def _mock_keycloak_certificates(**kwargs) -> tuple:
logging.warning("inside _mock_keycloak_certificates()")
return True, kwargs
def _get_authorized_user(**kwargs) -> tuple:
logging.warning("inside _get_authorized_user()")
kwargs['decoded_access_token'] = {'preferred_username': 'larry@idir'}
return True, kwargs
def _get_administrative_user(**kwargs) -> tuple:
kwargs['decoded_access_token'] = {'preferred_username': 'mo@idir'}
return True, kwargs
|
10,733 | 03e36df7f62ba6681ef26d1a5418541d7188346c | from django.contrib import admin
from geotrek.common.mixins.actions import MergeActionMixin
from geotrek.sensitivity.models import Rule, SportPractice, Species
class RuleAdmin(MergeActionMixin, admin.ModelAdmin):
merge_field = "name"
list_display = ('name', 'code', )
search_fields = ('name', 'code', )
class SportPracticeAdmin(MergeActionMixin, admin.ModelAdmin):
merge_field = "name"
class SpeciesAdmin(MergeActionMixin, admin.ModelAdmin):
merge_field = "name"
def get_queryset(self, request):
return super().get_queryset(request).filter(category=Species.SPECIES)
admin.site.register(Rule, RuleAdmin)
admin.site.register(SportPractice, SportPracticeAdmin)
admin.site.register(Species, SpeciesAdmin)
|
10,734 | c0a03a67c5460974f3fa61d15493c26adadcea4f | from excile_framework.templator import render
class Index:
def __call__(self, request):
return '200 OK', render('index.html', date=request.get('date', None),
python_ver=request.get('python_ver', None),
btc_to_usd=request.get('btc_to_usd', None))
class About:
def __call__(self, request):
return '200 OK', render('about.html', date=request.get('date', None))
class Bitcoin:
def __call__(self, request):
return '200 OK', 'Bitcoin'
class Etherium:
def __call__(self, request):
return '200 OK', 'Etherium'
class Hello:
def __call__(self, request):
return '200 OK', render('hello.html', username=request.get('username', None))
class NotFound404:
def __call__(self, request):
return '404 WHAT', '404 PAGE Not Found'
|
10,735 | f1c350b87761355ad44be4703c07596361f2bf2b | from kaa.filetype import filetypedef
class FileTypeInfo(filetypedef.FileTypeInfo):
FILE_EXT = {'.md'}
@classmethod
def get_modetype(cls):
from kaa.filetype.markdown.markdownmode import MarkdownMode
return MarkdownMode
|
10,736 | e8cdf9212f8207dc4513acb5958508f61ed29a88 | """
K-means.
@author Aaron Zampaglione <azampagl@my.fit.edu>
@course CSE 5800 Advanced Topics in CS: Learning/Mining and the Internet, Fall 2011
@project Proj 03, CLUSTERING
@copyright Copyright (c) 2011 Aaron Zampaglione
@license MIT
"""
# Dirty hack for Python < 2.5
import sys
sys.path.append('../../')
from cluster.core import Cluster
import random
# My favorite number.
#random.seed(23)
class KMeans(Cluster):
"""
K-means.
"""
def execute(self, docs, clusters=None):
"""
Main execution.
If clusters are provided, random seeds will not
be generated.
Key arguments:
docs -- the documents to cluster.
clusters -- initial clusters [optional]
"""
if clusters == None:
# Find k random docs to start as our centroids.
indices = range(len(docs))
random.shuffle(indices)
# Initialize clusters.
i = 0
for index in indices[:self.k]:
cluster = self.clusters[i]
docs[index].cluster = cluster
cluster.docs.append(docs[index])
cluster.centroid = self.centroid(self.clusters[i].docs)
i += 1
else:
self.clusters = clusters
change = True
while change:
# Check if any of the centroids changed.
change = False
for doc in docs:
# Remove this doc from it's original cluster.
if doc.cluster != None:
doc.cluster.docs.remove(doc)
doc.cluster = None
max_cluster = None
max_cos = float("-inf")
# Find the closest cluster for this document.
for cluster in self.clusters:
cos = self.cosine(doc.tfidf, cluster.centroid)
if cos > max_cos:
max_cluster = cluster
max_cos = cos
old_centroid = max_cluster.centroid
# Re-assign this doc the new cluster and find the centroid.
doc.cluster = max_cluster
max_cluster.docs.append(doc)
max_cluster.centroid = self.centroid(max_cluster.docs)
# Check if the centroid has changed.
for index in old_centroid:
if old_centroid[index] - max_cluster.centroid[index] > (1 ** -15):
change = True |
10,737 | f0e6800c7f98e191c28523f6d64263aeee647054 | import pygame
import random
import time
pygame.init()
color_list = {"red": (255, 0, 0), "blue": (0, 0, 255), "green": (0, 255, 0), "yellow": (0, 0, 0)}
class Cell:
def __init__(self, color, x, y):
self.color = color
self.x = x
self.y = y
self.cell = 0
self.clicked = False
def show(self):
if self.color == "yellow":
self.cell = pygame.draw.circle(screen, self.get_color(), (self.x, self.y), 20, 2)
return
self.cell = pygame.draw.circle(screen, self.get_color(), (self.x, self.y), 20)
def get_color(self):
return color_list[self.color]
def drag(self):
if self.clicked:
mouse = pygame.mouse.get_pos()
if mouse[0] > 320 or mouse[1] > 310:
self.x = pygame.mouse.get_pos()[0]
self.y = pygame.mouse.get_pos()[1]
screen_width = 1920
screen_height = 1080
screen = pygame.display.set_mode((screen_width, screen_height))
pygame.display.set_caption("Super Bacteria")
cell_list = []
for i in range(1, 11):
cell = Cell("red", random.randint(320, 1870), random.randint(310, 1030))
cell_list.append(cell)
cell = Cell("blue", random.randint(320, 1870), random.randint(310, 1030))
cell_list.append(cell)
cell = Cell("green", random.randint(320, 1870), random.randint(310, 1030))
cell_list.append(cell)
color_count = {"red": 10, "blue": 10, "green": 10, "yellow": 0}
font = pygame.font.Font(None, 50)
running = True
while running:
pygame.time.Clock().tick(60)
for event in pygame.event.get():
if event.type == pygame.QUIT:
running = False
elif event.type == pygame.MOUSEBUTTONDOWN:
mouse = pygame.mouse.get_pos()
clicked_cell = 0
if add_yellow.collidepoint(mouse):
cell = Cell("yellow", random.randint(300, 1870), random.randint(100, 1030))
cell_list.append(cell)
color_count["yellow"] += 1
elif random_remove.collidepoint(mouse):
remove_list = []
for cell in cell_list:
if cell.color != "yellow":
remove_list.append(cell)
random.shuffle(remove_list)
for i in range(0, 15):
if len(remove_list) >= i+1:
del cell_list[cell_list.index(remove_list[i])]
color_count[remove_list[i].color] -= 1
elif make_double.collidepoint(mouse):
copy_cell_list = []
for cell in cell_list:
copy_cell_list.append(cell)
for cell in copy_cell_list:
new_cell = Cell(cell.color, random.randint(320, 1870), random.randint(310, 1030))
cell_list.append(new_cell)
color_count[new_cell.color] += 1
else:
for cell in cell_list:
if cell.cell.collidepoint(mouse):
if clicked_cell != 0:
clicked_cell.clicked = False
cell.clicked = True
clicked_cell = cell
if event.button == 3:
if clicked_cell != 0:
del cell_list[cell_list.index(clicked_cell)]
color_count[clicked_cell.color] -= 1
clicked_cell = 0
elif event.type == pygame.MOUSEBUTTONUP:
if clicked_cell != 0:
clicked_cell.clicked = False
screen.fill((255, 212, 0))
pygame.draw.rect(screen, (128, 128, 128), [0, 0, 300, 290])
pygame.draw.circle(screen, (255, 0, 0), (30, 30), 20)
pygame.draw.circle(screen, (0, 0, 255), (30, 80), 20)
pygame.draw.circle(screen, (0, 255, 0), (170, 30), 20)
pygame.draw.circle(screen, (255, 212, 0), (170, 80), 20)
screen.blit(font.render(": {}".format(color_count.get("red")), True, (255, 255, 255)), (60, 15))
screen.blit(font.render(": {}".format(color_count.get("blue")), True, (255, 255, 255)), (60, 65))
screen.blit(font.render(": {}".format(color_count.get("green")), True, (255, 255, 255)), (200, 15))
screen.blit(font.render(": {}".format(color_count.get("yellow")), True, (255, 255, 255)), (200, 65))
add_yellow = pygame.draw.rect(screen, (0, 0, 0), [10, 110, 280, 50])
screen.blit(font.render("ADD YELLOW", True, (255, 255, 255)), (33, 120))
random_remove = pygame.draw.rect(screen, (0, 0, 0), [10, 170, 280, 50])
screen.blit(font.render("RD REMOVE", True, (255, 255, 255)), (42, 180))
make_double = pygame.draw.rect(screen, (0, 0, 0), [10, 230, 280, 50])
screen.blit(font.render("DOUBLE", True, (255, 255, 255)), (75, 240))
for cell in cell_list:
cell.show()
cell.drag()
pygame.display.update()
pygame.quit() |
10,738 | a42c6bacdd823c7a5c095efe2e7d161380e0ac5c |
from color_generators import ColorGenerators
class CustomEffects():
def __init__(self, pixel_strand):
self.pixels = pixel_strand
def flash_effect(self, spacing=1, cycle=1):
# flash every other pixel
cg = ColorGenerators()
for times in range(cycle):
#color = cg.randColor()
color = cg.genRGBfromHSL(cg.HUES['GREEN'])
print(times, color)
for i in range(self.pixels.numPixels()):
if(i % spacing == 0):
self.pixels.setPixelColor(i, self.pixels.Color(
color[0],
color[1],
color[2]
))
self.pixels.show()
self.pixels.delay(100)
self.pixels.clear()
self.pixels.show()
self.pixels.delay(100)
def lightning_effect(self, effect_time=0, cycle=0):
# flash every other pixel
cg = ColorGenerators()
delay = 100
delay_deg = .15
color = cg.randColor()
for j in range(6):
for i in range(self.pixels.numPixels()):
if(i % 2 == 0):
self.pixels.setPixelColor(i, self.pixels.Color(
color[0],
color[1],
color[2]
))
tempD = delay - (j * delay_deg * delay)
if(tempD < 0):
tempD = 0
self.pixels.show()
self.pixels.delay(tempD)
self.pixels.clear()
self.pixels.show()
self.pixels.delay(tempD) |
10,739 | 58c4e5e19166ee3f37a01fb41e877c00f0674859 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'C:\Users\alca0\Documents\Python_Scripts\dicom_ui\dicom_ui_design.ui'
#
# Created by: PyQt5 UI code generator 5.10.1
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_Dialog(object):
def setupUi(self, Dialog):
Dialog.setObjectName("Dialog")
Dialog.setWindowModality(QtCore.Qt.NonModal)
Dialog.resize(1113, 868)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(Dialog.sizePolicy().hasHeightForWidth())
Dialog.setSizePolicy(sizePolicy)
Dialog.setAcceptDrops(False)
Dialog.setAutoFillBackground(False)
self.load_button = QtWidgets.QPushButton(Dialog)
self.load_button.setGeometry(QtCore.QRect(20, 10, 111, 41))
font = QtGui.QFont()
font.setFamily("Calibri")
font.setPointSize(12)
self.load_button.setFont(font)
self.load_button.setObjectName("load_button")
self.img_slider = QtWidgets.QSlider(Dialog)
self.img_slider.setEnabled(False)
self.img_slider.setGeometry(QtCore.QRect(20, 800, 771, 20))
self.img_slider.setMinimum(0)
self.img_slider.setMaximum(10)
self.img_slider.setPageStep(2)
self.img_slider.setProperty("value", 0)
self.img_slider.setOrientation(QtCore.Qt.Horizontal)
self.img_slider.setObjectName("img_slider")
self.verticalLayoutWidget = QtWidgets.QWidget(Dialog)
self.verticalLayoutWidget.setGeometry(QtCore.QRect(20, 70, 831, 711))
self.verticalLayoutWidget.setObjectName("verticalLayoutWidget")
self.layout = QtWidgets.QVBoxLayout(self.verticalLayoutWidget)
self.layout.setSizeConstraint(QtWidgets.QLayout.SetMaximumSize)
self.layout.setContentsMargins(0, 0, 0, 0)
self.layout.setSpacing(0)
self.layout.setObjectName("layout")
self.slice = QtWidgets.QLabel(Dialog)
self.slice.setGeometry(QtCore.QRect(800, 780, 55, 41))
font = QtGui.QFont()
font.setPointSize(12)
self.slice.setFont(font)
self.slice.setText("")
self.slice.setObjectName("slice")
self.warning_label = QtWidgets.QLabel(Dialog)
self.warning_label.setGeometry(QtCore.QRect(900, 810, 161, 41))
self.warning_label.setText("")
self.warning_label.setObjectName("warning_label")
self.progressBar = QtWidgets.QProgressBar(Dialog)
self.progressBar.setEnabled(True)
self.progressBar.setGeometry(QtCore.QRect(20, 830, 831, 23))
self.progressBar.setProperty("value", 0)
self.progressBar.setObjectName("progressBar")
self.gamma_slider = QtWidgets.QSlider(Dialog)
self.gamma_slider.setEnabled(False)
self.gamma_slider.setGeometry(QtCore.QRect(880, 90, 22, 681))
self.gamma_slider.setMinimum(1)
self.gamma_slider.setMaximum(100)
self.gamma_slider.setProperty("value", 10)
self.gamma_slider.setOrientation(QtCore.Qt.Vertical)
self.gamma_slider.setObjectName("gamma_slider")
self.gamma_label = QtWidgets.QLabel(Dialog)
self.gamma_label.setGeometry(QtCore.QRect(870, 50, 51, 31))
self.gamma_label.setObjectName("gamma_label")
self.gain_slider = QtWidgets.QSlider(Dialog)
self.gain_slider.setEnabled(False)
self.gain_slider.setGeometry(QtCore.QRect(930, 90, 22, 681))
self.gain_slider.setMinimum(1)
self.gain_slider.setMaximum(100)
self.gain_slider.setProperty("value", 10)
self.gain_slider.setOrientation(QtCore.Qt.Vertical)
self.gain_slider.setObjectName("gain_slider")
self.gain_label = QtWidgets.QLabel(Dialog)
self.gain_label.setGeometry(QtCore.QRect(920, 50, 31, 31))
self.gain_label.setObjectName("gain_label")
self.filter_slider = QtWidgets.QSlider(Dialog)
self.filter_slider.setEnabled(False)
self.filter_slider.setGeometry(QtCore.QRect(980, 90, 22, 681))
self.filter_slider.setMinimum(1)
self.filter_slider.setMaximum(10)
self.filter_slider.setPageStep(3)
self.filter_slider.setProperty("value", 1)
self.filter_slider.setOrientation(QtCore.Qt.Vertical)
self.filter_slider.setObjectName("filter_slider")
self.filter_label = QtWidgets.QLabel(Dialog)
self.filter_label.setGeometry(QtCore.QRect(980, 50, 31, 31))
self.filter_label.setObjectName("filter_label")
self.unsharpen_slider = QtWidgets.QSlider(Dialog)
self.unsharpen_slider.setEnabled(False)
self.unsharpen_slider.setGeometry(QtCore.QRect(1030, 90, 22, 681))
self.unsharpen_slider.setMinimum(0)
self.unsharpen_slider.setMaximum(20)
self.unsharpen_slider.setPageStep(4)
self.unsharpen_slider.setProperty("value", 0)
self.unsharpen_slider.setSliderPosition(0)
self.unsharpen_slider.setOrientation(QtCore.Qt.Vertical)
self.unsharpen_slider.setObjectName("unsharpen_slider")
self.unsharp_label = QtWidgets.QLabel(Dialog)
self.unsharp_label.setGeometry(QtCore.QRect(1020, 50, 51, 31))
self.unsharp_label.setObjectName("unsharp_label")
self.retranslateUi(Dialog)
QtCore.QMetaObject.connectSlotsByName(Dialog)
def retranslateUi(self, Dialog):
_translate = QtCore.QCoreApplication.translate
Dialog.setWindowTitle(_translate("Dialog", "Dicom3D_viewer"))
self.load_button.setText(_translate("Dialog", "Load"))
self.gamma_label.setText(_translate("Dialog", "Gamma"))
self.gain_label.setText(_translate("Dialog", "Gain"))
self.filter_label.setText(_translate("Dialog", "Filter"))
self.unsharp_label.setText(_translate("Dialog", "Unsharp"))
if __name__ == "__main__":
import sys
app = QtWidgets.QApplication(sys.argv)
Dialog = QtWidgets.QDialog()
ui = Ui_Dialog()
ui.setupUi(Dialog)
Dialog.show()
sys.exit(app.exec_())
|
10,740 | dd5c76f720fb71b715e4ec6ae4faf3a4e88c9c26 | """Easy to use dialogs. Modified to avoid talking to the window server.
Only Message() is supported, and prints to stdout. The other routines will
throw a NotImplementedError exception.
Message(msg) -- display a message and an OK button.
AskString(prompt, default) -- ask for a string, display OK and Cancel buttons.
AskPassword(prompt, default) -- like AskString(), but shows text as bullets.
AskYesNoCancel(question, default) -- display a question and Yes, No and Cancel buttons.
GetArgv(optionlist, commandlist) -- fill a sys.argv-like list using a dialog
AskFileForOpen(...) -- Ask the user for an existing file
AskFileForSave(...) -- Ask the user for an output file
AskFolder(...) -- Ask the user to select a folder
bar = Progress(label, maxvalue) -- Display a progress bar
bar.set(value) -- Set value
bar.inc( *amount ) -- increment value by amount (default=1)
bar.label( *newlabel ) -- get or set text label.
More documentation in each function.
This module uses DLOG resources 260 and on.
Based upon STDWIN dialogs with the same names and functions.
"""
import os
import sys
__all__ = ['Message', 'AskString', 'AskPassword', 'AskYesNoCancel',
'GetArgv', 'AskFileForOpen', 'AskFileForSave', 'AskFolder',
'ProgressBar']
def cr2lf(text):
if '\r' in text:
text = string.join(string.split(text, '\r'), '\n')
return text
def lf2cr(text):
if '\n' in text:
text = string.join(string.split(text, '\n'), '\r')
if len(text) > 253:
text = text[:253] + '\311'
return text
def Message(msg, id=260, ok=None):
"""Display a MESSAGE string.
Return when the user clicks the OK button or presses Return.
The MESSAGE string can be at most 255 characters long.
"""
sys.stderr.write(msg+'\n')
def AskString(prompt, default = "", id=261, ok=None, cancel=None):
"""Display a PROMPT string and a text entry field with a DEFAULT string.
Return the contents of the text entry field when the user clicks the
OK button or presses Return.
Return None when the user clicks the Cancel button.
If omitted, DEFAULT is empty.
The PROMPT and DEFAULT strings, as well as the return value,
can be at most 255 characters long.
"""
raise NotImplementedError("AskString")
def AskPassword(prompt, default='', id=264, ok=None, cancel=None):
"""Display a PROMPT string and a text entry field with a DEFAULT string.
The string is displayed as bullets only.
Return the contents of the text entry field when the user clicks the
OK button or presses Return.
Return None when the user clicks the Cancel button.
If omitted, DEFAULT is empty.
The PROMPT and DEFAULT strings, as well as the return value,
can be at most 255 characters long.
"""
raise NotImplementedError("AskPassword")
def AskYesNoCancel(question, default = 0, yes=None, no=None, cancel=None, id=262):
"""Display a QUESTION string which can be answered with Yes or No.
Return 1 when the user clicks the Yes button.
Return 0 when the user clicks the No button.
Return -1 when the user clicks the Cancel button.
When the user presses Return, the DEFAULT value is returned.
If omitted, this is 0 (No).
The QUESTION string can be at most 255 characters.
"""
raise NotImplementedError("AskYesNoCancel")
class ProgressBar:
def __init__(self, title="Working...", maxval=0, label="", id=263):
raise NotImplementedError("ProgressBar")
ARGV_ID=265
ARGV_ITEM_OK=1
ARGV_ITEM_CANCEL=2
ARGV_OPTION_GROUP=3
ARGV_OPTION_EXPLAIN=4
ARGV_OPTION_VALUE=5
ARGV_OPTION_ADD=6
ARGV_COMMAND_GROUP=7
ARGV_COMMAND_EXPLAIN=8
ARGV_COMMAND_ADD=9
ARGV_ADD_OLDFILE=10
ARGV_ADD_NEWFILE=11
ARGV_ADD_FOLDER=12
ARGV_CMDLINE_GROUP=13
ARGV_CMDLINE_DATA=14
def GetArgv(optionlist=None, commandlist=None, addoldfile=1, addnewfile=1, addfolder=1, id=ARGV_ID):
raise NotImplementedError("GetArgv")
def SetDefaultEventProc(proc):
raise NotImplementedError("SetDefaultEventProc")
def AskFileForOpen(
message=None,
typeList=None,
# From here on the order is not documented
version=None,
defaultLocation=None,
dialogOptionFlags=None,
location=None,
clientName=None,
windowTitle=None,
actionButtonLabel=None,
cancelButtonLabel=None,
preferenceKey=None,
popupExtension=None,
eventProc=None,
previewProc=None,
filterProc=None,
wanted=None,
multiple=None):
"""Display a dialog asking the user for a file to open.
wanted is the return type wanted: FSSpec, FSRef, unicode or string (default)
the other arguments can be looked up in Apple's Navigation Services documentation"""
raise NotImplementedError("AskFileForOpen")
def AskFileForSave(
message=None,
savedFileName=None,
# From here on the order is not documented
version=None,
defaultLocation=None,
dialogOptionFlags=None,
location=None,
clientName=None,
windowTitle=None,
actionButtonLabel=None,
cancelButtonLabel=None,
preferenceKey=None,
popupExtension=None,
eventProc=None,
fileType=None,
fileCreator=None,
wanted=None,
multiple=None):
"""Display a dialog asking the user for a filename to save to.
wanted is the return type wanted: FSSpec, FSRef, unicode or string (default)
the other arguments can be looked up in Apple's Navigation Services documentation"""
raise NotImplementedError("AskFileForSave")
def AskFolder(
message=None,
# From here on the order is not documented
version=None,
defaultLocation=None,
dialogOptionFlags=None,
location=None,
clientName=None,
windowTitle=None,
actionButtonLabel=None,
cancelButtonLabel=None,
preferenceKey=None,
popupExtension=None,
eventProc=None,
filterProc=None,
wanted=None,
multiple=None):
"""Display a dialog asking the user for select a folder.
wanted is the return type wanted: FSSpec, FSRef, unicode or string (default)
the other arguments can be looked up in Apple's Navigation Services documentation"""
raise NotImplementedError("AskFolder")
def test():
import time
Message("Testing EasyDialogs.")
if __name__ == '__main__':
try:
test()
except KeyboardInterrupt:
Message("Operation Canceled.")
|
10,741 | bf0a11384e1e4ed350c6406744e89de43af98183 | import grid as g
import input as i
import sound as s
import pygame, os, math
class Timer:
def __init__(self):
self.timeStart = 0
self.timeFinish = 0
def start(self):
"""
Stores the current time in seconds.
"""
self.timeStart = pygame.time.get_ticks()
def finish(self, secs=False):
"""
Returns time elapsed since pygame.init() or since Timer.start().
was called
Parameters
----------
secs: bool, optional, default=False
if True, return value is in seconds, else in milliseconds.
"""
self.timeFinish = pygame.time.get_ticks()()
elapsedTime = self.timeFinish - self.timeStart
if secs:
return elapsedTime / 1000
else:
return elapsedTime
def wait(self, time, secs=True):
"""
Pause the timer.
Parameters
----------
time: int / float
length of time to wait
secs: bool, optional, default=True
if True, time is assumed to be in seconds else, milliseconds
"""
if secs:
pygame.time.wait(time * 1000)
else:
pygame.time.wait(time)
class Camera():
def __init__(self,
focus,
area):
self.focus = focus
self.area = area
def zoom(self, amount):
"""
Positive amount values - zoom in
Negative amount values - zoom out
"""
pass
def move(self, cell):
self.focus = cell
def rotate(self, amount):
pass
class Character():
def __init__(self,
starting_position,
sprite):
self.position = starting_position
self.sprite = sprite
def move(self, cell):
pass
class AnimatedCharacter(Character):
def __init__(self,
starting_position,
sprite,
spritesheet_size):
Character.__init__(starting_position, sprite)
class GreasyEngine():
def __init__(self,
rows, columns,
imageWidth, imageHeight,
backgroundColour=(0,0,0),
windowPosition=(0,0),
centred=False,
title="Greasy Window",
fullscreen=False,
resizable=False,
icon=None):
self.gameGrid = g.Grid(rows, columns)
self.input = i.InputHandler()
self.sound = s.SoundHandler()
self.timer = Timer()
self.imageWidth = imageWidth
self.imageHeight = imageHeight
self.columns = columns
self.rows = rows
if centred:
os.environ['SDL_VIDEO_CENTERED'] = '1'
else:
os.environ['SDL_VIDEO_WINDOW_POS'] = str(windowPosition[0]) + "," + str(windowPosition[1])
pygame.init()
self.screen = pygame.display.set_mode((imageWidth * rows,
imageHeight * columns))
if icon != None:
self.setIcon(icon)
pygame.display.set_caption(title)
self.base = None
self.backgroundColour = backgroundColour
self.fill()
##### OBJECTS #####
def moveObject(self, start, target, replacement=0, gameGrid=None):
"""
Moves the contents of one cell of the grid to another cell, replacing
the contents of the original cell with a value.
Parameters
----------
start: tuple
(x, y) coordinates of the cell containing the object
target: tuple
(x, y) coordinates of the cell to which to move the object
replacement: any, optional, default=0
the object with which to fill the cell referenced by start
gameGrid: Greasy Grid, optional, default=self.gameGrid
Greasy Grid
"""
if not gameGrid:
gameGrid = self.gameGrid
# set gameGrid[target] to gameGrid[start]
currentItem = gameGrid.getItem(start[0], start[1])
gameGrid.setItem(replacement, start[0], start[1])
gameGrid.setItem(currentItem, target[0], target[1])
def newObject(self, filename, alpha=False, colourkey=None, resize=True):
"""
Returns a new pygame surface.
Parameters
----------
filename: string
image filename
alpha: bool, optional, default=False
include alpha channel
colourkey: tuple, optional, default=None
(R,G,B) colourkey
resize: bool, optional, default=True
resize the surface to the size of the cells
"""
if alpha:
# TODO: implement working colourkey mode
image = pygame.image.load(filename).convert_alpha()
if colourkey != None:
image.set_colorkey(colourkey)
else:
image = pygame.image.load(filename).convert()
if resize:
size = image.get_size()
if size[0] != self.imageWidth or size[1] != self.imageHeight:
if size[0] != self.imageWidth:
newWidth = self.imageWidth
else:
newWidth = size[0]
if size[1] != self.imageHeight:
newHeight = self.imageHeight
else:
newHeight = size[1]
image = self.resizeObject(image, newWidth, newHeight)
return image
def addObject(self, item, row, column, gameGrid=None):
"""
Adds a pygame surface to the grid
Parameters
----------
item: any
item to add to Grid
row: int
row
column: int
column
gameGrid: Greasy Grid
Greasy Grid
"""
if not gameGrid:
gameGrid = self.gameGrid
if row > self.rows-1 or row < 0 or column > self.columns-1 or column < 0:
print "addObject could not add %s: \
Location out of bounds" % str(item)
return None
gameGrid.setItem(item, row, column)
def getObject(self, row, column, gameGrid=None):
"""
Returns an object from the grid
"""
if not gameGrid:
gameGrid = self.gameGrid
return gameGrid.getItem(row, column)
def fillEmptyCells(self, item, gameGrid=None, emptyValue=0):
"""
Fills all the empty cells of a grid
"""
if not gameGrid:
gameGrid = self.gameGrid
for r, c in gameGrid:
currentCell = gameGrid.getItem(r, c)
if currentCell == emptyValue:
self.addObject(item, r, c, gameGrid=gameGrid)
def emptyCell (self, row, column, gameGrid=None, emptyValue=0):
"""
Replaces the specified cell with the emptyValue
"""
if not gameGrid:
gameGrid = self.gameGrid
self.addObject(emptyValue, row, column, gameGrid=gameGrid)
def emptyGrid(self, gameGrid=None, emptyValue=0):
"""
Replace all cells in the grid with the emptyValue
"""
if not gameGrid:
gameGrid = self.gameGrid
for r, c in gameGrid:
self.emptyCell(r, c, gameGrid=gameGrid, emptyValue=emptyValue)
def limitValue(self, value, lowerLimit, upperLimit):
"""
Limits the value of a variable to the range defined by lowerLimit
and upperLimit.
"""
if value > upperLimit:
return upperLimit
elif value < lowerLimit:
return lowerLimit
else:
return value
def testEmptyCell(self, row, column, gameGrid=None, emptyValue=0):
"""
Tests if a cell contains the empty value
"""
if not gameGrid:
gameGrid = self.gameGrid
row = self.limitValue(row, 0, self.rows-1)
column = self.limitValue(column, 0, self.columns-1)
if gameGrid.getItem(row, column) == emptyValue:
return True
else:
return False
##### TRANSFORM #####
def flipObject(self, object, vertical, horizontal):
"""
Flips a pygame surface vertically, horizontally, or both.
"""
try:
# is object a grid reference?
row = object[0]
column = object[1]
except TypeError:
flipped = pygame.transform.flip(object, vertical, horizontal)
return flipped
flipped = pygame.transform.flip(self.getObject(row, column), vertical,
horizontal)
self.addObject(flipped, row, column)
return flipped
def resizeObject(self, object, width, height):
"""
Scales a pygame surface
"""
try:
row = object[0]
column = object[1]
except TypeError:
scaled = pygame.transform.scale(object, (width, height))
return scaled
scaled = pygame.transform.scale(self.getObject(row, column),
(width, height))
self.addObject(scaled, row, column)
return scaled
def rotateObject(self, object, angle):
"""
Rotates a pygame surface
"""
try:
row = object[0]
column = object[1]
except TypeError:
rotated = pygame.transform.rotate(object, angle)
return rotated
rotated = pygame.transform.rotate(self.getObject(row, column), angle)
self.addObject(rotated, row, column)
return rotated
def getPixelColour(self, item, pixel):
"""
Returns the RGBA colour value at the given pixel.
Parameters
item: pygame surface
pixel: tuple
"""
return item.get_at(pixel)
##### DISPLAY #####
def setIcon(self, icon, alpha=False):
"""
Sets the window icon
"""
try:
pygame.display.set_icon(icon)
except TypeError:
icon = self.newObject(icon, alpha)
pygame.display.set_icon(icon)
def setBackground(self, background, dest=None, empty=0):
"""
Creates a tiled background from one pygame surface
"""
if not dest:
dest = self.screen
self.base = pygame.display.set_mode(dest.get_size())
backgroundGrid = g.Grid(self.rows, self.columns)
self.fillEmptyCells(background, gameGrid=backgroundGrid)
# blit tiled background image on background surface
for r, c in backgroundGrid:
x = r * self.imageWidth
y = c * self.imageHeight
currentItem = backgroundGrid.getItem(r, c)
if currentItem != empty:
self.base.blit(currentItem, (x, y))
self.base = self.base.copy()
def fill(self, screen=None, colour=None):
"""
Fills the screen with a single colour
"""
if not screen:
screen = self.screen
if not colour:
colour = self.backgroundColour
screen.fill(colour)
def text(self, string,
location,
font, fontSize,
antialias=False,
colour=(0,0,0),
newlinePad=5,
screen=None):
"""
Creates a pygame surface containing text and displays it.
"""
if not screen:
screen = self.screen
x = location[0]
y = location[1]
font = pygame.font.Font(font, fontSize)
lines = string.split("\n")
counter = 0
height = 0
for line in lines:
fontSurface = font.render(line, antialias, colour).convert()
if counter == 0:
screen.blit(fontSurface, location)
else:
newY = y * counter + newlinePad + height
screen.blit(fontSurface, (x, newY))
height = font.size(line)[1] + height + newlinePad
counter += 1
def updateDisplay(self, gameGrid=None,
dest=None,
background=None,
empty=0,
text=None,
text_location=(0,0),
font=None,
fontColour=(0,0,0),
fontAntialias=False,
fontSize=20):
"""
Displays all surfaces on the screen. It also optionally displays a
tiled background surface. Assumes that all non-zero cells of
self.gameGrid contain a pygame Surface
"""
if not gameGrid:
gameGrid = self.gameGrid
if not dest:
dest = self.screen
self.fill()
if not self.base:
"""Blits the sprites to the screen surface"""
if not background:
for r, c in gameGrid:
x = r * self.imageWidth
y = c * self.imageHeight
currentItem = gameGrid.getItem(r, c)
if currentItem != empty:
dest.blit(currentItem, (x, y))
if text:
self.text(text, text_location, font, fontSize,
antialias=fontAntialias, colour=fontColour)
pygame.display.update()
return None
else:
self.setBackground(background)
# Blit the sprites to the background surface, then blit background
# surface to destination surface
baseCopy = self.base.copy()
# blit sprites to the background surface
for r, c in gameGrid:
x = r * self.imageWidth
y = c * self.imageHeight
currentItem = gameGrid.getItem(r, c)
if currentItem !=empty:
self.base.blit(currentItem, (x, y))
# blit background surface to destination surface
dest.blit(self.base, (0,0))
self.base = baseCopy.copy()
if text:
self.text(text, text_location, font, fontSize,
antialias=fontAntialias, colour=fontColour)
pygame.display.update()
def showMessage(self, text, location, font, fontSize, colour=(255,255,255),
input=False, secs=None):
"""
Displays a text message. It either waits a specified number of seconds
or waits for user input.
"""
self.fill()
self.text(text, location, font, fontSize, colour=colour)
pygame.display.update()
if input:
currentEvent = self.input.input()
while not self.input.checkInput(currentEvent):
currentEvent = self.input.input()
if not secs:
self.timer.wait(secs)
##### HIGH-LEVEL INTERACTION #####
def arrowMoveObject(self, event, cell, spaces=1):
"""
Checks for input from the arrow keys and moves an item in the
specified direction
"""
direction = self.input.checkDirectionInput(event)
try:
x = cell[0]
y = cell[1]
except TypeError:
print "arrowMoveObject: cell is of type", type(cell)
return None
try:
if direction == 1:
if y + spaces <= self.rows - 1:
cell = (x, y+spaces)
else:
cell = (x, self.rows-1)
elif direction == 2:
if y -spaces >= 0:
cell = (x, y-spaces)
else:
cell = (x, 0)
elif direction == 3:
if x - spaces >= 0:
cell = (x-spaces, y)
else:
cell = (0, y)
elif direction == 4:
if x + spaces <= self.columns - 1:
cell = (x+spaces, y)
else:
cell = (self.columns-1, y)
except IndexError:
print "no dice"
return cell
self.moveObject((x, y), cell)
return cell
def clickCell(self, event):
"""
Returns a tuple containing (x, y) coordinates of a mouse-clicked
cell
"""
position = self.input.checkMouseInput(event)
if not position:
return None
x = math.floor(position[0] / self.imageWidth)
y = math.floor(position[1] / self.imageHeight)
return (int(x), int(y))
def quit(self):
"""
Return a pygame.QUIT event
"""
return pygame.event.Event(pygame.QUIT)
##### MISC #####
def __iter__(self):
for r, c in self.gameGrid:
yield (r, c) |
10,742 | 42c002086f0d94cc52e683019a496451e3f35623 | #Python learning exercises
# functions
def echo(thing):
return thing
def swap(n1, n2):
return n2, n1
def main_function():
print"testing echo('marco'): ", echo('marco')
print"testing swap('1, 2'):",swap('1, 2')
#Arithmetic functions
def reverse(x):
return -x
def main_arithmetic():
print "test reverse(3): ",reverse(3)
print "test reverse(-3): ",reverse(-3)
def main():
main_function()
main_arithmetic()
main() |
10,743 | 591448b76980f48afb20e4001b1d7f6198bc80ba | # _*_ coding: utf-8 _*_
# 树的先序,中序,后序递归遍历
class Tree(object):
def __init__(self, value):
self.value = value
self.left = None
self.right = None
def convert(self, root):
_array = [[]] * 3
self.xian(self, root, _array[0])
self.zhong(self, root, _array[1])
self.hou(self, root, _array[2])
def pre(self, root, _array):
if not root:
return 0
else:
_array.append(root.value)
self.xian(self, root.left, _array)
self.xian(self, root.right, _array)
def mid(self, root, _array):
if not root:
return 0
else:
self.zhong(self, self.left, _array)
_array.append(root.value)
self.zhong(self, self.right, _array)
def post(self, root, _array):
if not root:
return 0
else:
self.hou(self, self.left, _array)
self.hou(self, self.right, _array)
_array.append(root.value)
|
10,744 | 85b8bb4036ed125f5aa128c99c4fb7c366679b61 | #!/usr/bin/python3.6
# -*- coding: utf-8 -*-
import collections
import math
import pdb
import random
import time
from itertools import repeat
import numpy as np
import torch
import torch.nn as nn
from torch.autograd import Function as F
def _ntuple(n):
def parse(x):
if isinstance(x, collections.Iterable):
return x
return tuple(repeat(x, n))
return parse
_pair = _ntuple(2)
def quant_max(tensor):
"""
Returns the max value for symmetric quantization.
"""
return torch.abs(tensor.detach()).max() + 1e-8
def TorchRound():
"""
Apply STE to clamp function.
"""
class identity_quant(torch.autograd.Function):
@staticmethod
def forward(ctx, input):
out = torch.round(input)
return out
@staticmethod
def backward(ctx, grad_output):
return grad_output
return identity_quant().apply
class quant_weight(nn.Module):
"""
Quantization function for quantize weight with maximum.
"""
def __init__(self, k_bits):
super(quant_weight, self).__init__()
self.k_bits = k_bits
self.qmax = 2. ** (k_bits -1) - 1.
self.round = TorchRound()
def forward(self, input):
max_val = quant_max(input)
weight = input * self.qmax / max_val
q_weight = self.round(weight)
q_weight = q_weight * max_val / self.qmax
return q_weight
class pams_quant_act(nn.Module):
"""
Quantization function for quantize activation with parameterized max scale.
"""
def __init__(self, k_bits, ema_epoch=1, decay=0.9997):
super(pams_quant_act, self).__init__()
self.decay = decay
self.k_bits = k_bits
self.qmax = 2. ** (self.k_bits -1) -1.
self.round = TorchRound()
self.alpha = nn.Parameter(torch.Tensor(1))
self.ema_epoch = ema_epoch
self.epoch = 1
self.register_buffer('max_val', torch.ones(1))
self.reset_parameter()
def reset_parameter(self):
nn.init.constant_(self.alpha, 10)
def _ema(self, x):
max_val = torch.mean(torch.max(torch.max(torch.max(abs(x),dim=1)[0],dim=1)[0],dim=1)[0])
if self.epoch == 1:
self.max_val = max_val
else:
self.max_val = (1.0-self.decay) * max_val + self.decay * self.max_val
def forward(self, x):
if self.epoch > self.ema_epoch or not self.training:
act = torch.max(torch.min(x, self.alpha), -self.alpha)
elif self.epoch <= self.ema_epoch and self.training:
act = x
self._ema(x)
self.alpha.data = self.max_val.unsqueeze(0)
act = act * self.qmax / self.alpha
q_act = self.round(act)
q_act = q_act * self.alpha / self.qmax
return q_act
class QuantConv2d(nn.Module):
"""
A convolution layer with quantized weight.
"""
def __init__(self, in_channels, out_channels, kernel_size, stride=1,
padding=0, dilation=1, groups=1, bias=False,k_bits=32,):
super(QuantConv2d, self).__init__()
self.weight = nn.Parameter(torch.Tensor(out_channels,in_channels,kernel_size,kernel_size))
self.stride = stride
self.padding = padding
self.dilation = dilation
self.groups = groups
self.in_channels = in_channels
self.kernel_size = _pair(kernel_size)
self.bias_flag = bias
if self.bias_flag:
self.bias = nn.Parameter(torch.Tensor(out_channels))
else:
self.register_parameter('bias',None)
self.k_bits = k_bits
self.quant_weight = quant_weight(k_bits = k_bits)
self.output = None
self.reset_parameters()
def reset_parameters(self):
n = self.in_channels
for k in self.kernel_size:
n *= k
stdv = 1. / math.sqrt(n)
self.weight.data.uniform_(-stdv, stdv)
if self.bias is not None:
self.bias.data.uniform_(-stdv, stdv)
def reset_parameter(self):
stdv = 1.0/ math.sqrt(self.weight.size(0))
self.weight.data.uniform_(-stdv,stdv)
if self.bias_flag:
nn.init.constant_(self.bias,0.0)
def forward(self, input, order=None):
return nn.functional.conv2d(input, self.quant_weight(self.weight), self.bias, self.stride, self.padding, self.dilation, self.groups)
def conv3x3(in_channels, out_channels,kernel_size=3,stride=1,padding =1,bias= True):
return nn.Conv2d(in_channels, out_channels, kernel_size=3, stride=stride, padding=padding, bias=bias)
def quant_conv3x3(in_channels, out_channels,kernel_size=3,padding = 1,stride=1,k_bits=32,bias = False):
return QuantConv2d(in_channels=in_channels, out_channels=out_channels, kernel_size=kernel_size, stride = stride,padding=padding,k_bits=k_bits,bias = bias)
|
10,745 | 62eac6c0b7535055320d3b5a07db036dee37d90e | # Stub file
from __future__ import absolute_import
from tsr.kin import * |
10,746 | d61bf7d08a2914fa6a530d9511e9c78409ae99b7 | import json
from unittest.mock import patch
import pytest
from workspace.techsupport.jobs import (
out_of_office_off,
out_of_office_on,
out_of_office_status,
)
@pytest.fixture
def config_path(tmp_path):
yield tmp_path / "test_ooo.json"
@pytest.mark.parametrize(
"config,message",
[
(None, "Tech support out of office OFF"), # OOO not on
(
{"start": "2022-01-01", "end": "3033-01-01"}, # OOO on
"Tech support out of office OFF",
),
(
{"start": "3033-01-01", "end": "3033-01-01"}, # OOO scheduled
"Scheduled tech support out of office cancelled",
),
],
)
def test_out_of_office_off(config_path, config, message):
if config is not None:
with open(config_path, "w") as f_out:
json.dump(config, f_out)
with patch("workspace.techsupport.jobs.config_file", return_value=config_path):
assert out_of_office_off() == message
@pytest.mark.parametrize(
"config,message",
[
(None, "Tech support out of office is currently OFF."), # OOO not on
(
{"start": "2000-01-01", "end": "2001-01-01"}, # OOO past
"Tech support out of office is currently OFF.",
),
(
{"start": "2022-01-01", "end": "3033-01-01"}, # OOO on
"Tech support out of office is currently ON until 3033-01-01.",
),
(
{"start": "3033-01-01", "end": "3033-01-01"}, # OOO scheduled
"Tech support out of office is currently OFF.\n"
"Scheduled out of office is from 3033-01-01 until 3033-01-01.",
),
],
)
def test_out_of_office_status(config_path, config, message):
if config is not None:
with open(config_path, "w") as f_out:
json.dump(config, f_out)
with patch("workspace.techsupport.jobs.config_file", return_value=config_path):
assert out_of_office_status() == message
@pytest.mark.parametrize(
"start,end,message",
[
(
"2020-12-01",
"3033-12-01",
"Tech support out of office now ON until 3033-12-01",
),
(
"3033-12-01",
"3034-12-01",
"Tech support out of office scheduled from 3033-12-01 until 3034-12-01",
),
],
)
def test_out_of_office_on(config_path, start, end, message):
assert not config_path.exists()
with patch("workspace.techsupport.jobs.config_file", return_value=config_path):
assert out_of_office_on(start, end) == message
assert config_path.exists()
with open(config_path, "r") as f_in:
config = json.load(f_in)
assert config == {"start": start, "end": end}
@pytest.mark.parametrize(
"start,end,message",
[
# trying to set OOO in the past
("2020-12-01", "2020-12-02", "Error: Can't set out of office in the past"),
# start date after end date
("3033-12-01", "3033-11-01", "Error: start date must be before end date"),
],
)
def test_out_of_office_on_errors(config_path, start, end, message):
assert not config_path.exists()
with patch("workspace.techsupport.jobs.config_file", return_value=config_path):
assert out_of_office_on(start, end) == message
assert not config_path.exists()
@pytest.mark.parametrize(
"start,end",
[
("2020-02-30", "2020-12-02"), # bad start
("3033-12-01", "3033-13-01"), # bad end
],
)
def test_out_of_office_on_invalid_dates(config_path, start, end):
assert not config_path.exists()
with patch("workspace.techsupport.jobs.config_file", return_value=config_path):
with pytest.raises(ValueError):
out_of_office_on(start, end)
assert not config_path.exists()
|
10,747 | b2a0a080663b75490332c6dd1b8471ca06aa3001 | Testing creation of file on new branch MyBranch
|
10,748 | e82b1cf4d234e1b6cda9482cb3239ab5db8cf32c | class Solution:
def equationsPossible(self, equations: 'List[str]') -> 'bool':
def find(val):
if dic[val] != val:
dic[val] = find(dic[val])
return dic[val]
equations = sorted(equations, key=lambda x: x[1] =='!')
dic = {}
for code in range(ord('a'), ord('z')+1):
dic[chr(code)] = chr(code)
for e in equations:
if e[1] == '=':
dic[find(e[0])] = find(e[3])
if e[1] == '!' and find(e[0]) == find(e[3]):
return False
return True
if __name__ == '__main__':
# begin
s = Solution()
print(s.equationsPossible(["a==b","b!=c","c==a"]))
|
10,749 | a95e4b82d19627fd808927083ddda5f3fb8ba7e7 | from wtforms import Form, StringField, PasswordField, SubmitField, validators
class RegisterFormContent(Form):
mobile = StringField('mobile', [validators.Length(min=7, max=11)])
password = PasswordField('New Password', [validators.DataRequired(), validators.EqualTo('confirm', message='Passwords must match')])
password_repeat = PasswordField('Repeat Password')
mobile_code = StringField('I accept the TOS', [validators.DataRequired()])
submit = SubmitField('Submit')
|
10,750 | b2e246bbe21c96e0b194c265863d81afc39c7663 | # -*- coding: utf-8 -*-
"""GP_project_Colab
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/drive/1MGNXlydcYgFRoAd6NC_EZtLdbv5MM7kk
# New Section
"""
from google.colab import drive
drive.mount('/content/drive')
!pip install dominate
# Commented out IPython magic to ensure Python compatibility.
# %cd /content/drive/My\ Drive/GP_Project/Realistic_Face/
# Commented out IPython magic to ensure Python compatibility.
# %mkdir trained
# %mkdir trained/w
# %mkdir datasets/w/test_A
!python test.py --dataroot /content/drive/My\ Drive/pix2pixHD-colab/pix2pixHD/datasets/w --name w --netG global --resize_or_crop none --label_nc 0 --no_instance --how_many 13 --checkpoints_dir ./trained --which_epoch 30
!python train.py --label_nc 0 --no_instance --name w --dataroot /content/drive/My\ Drive/pix2pixHD-colab/pix2pixHD/datasets/w --continue_train --display_freq 100 --checkpoints_dir /content/drive/My\ Drive/pix2pixHD-colab/pix2pixHD/checkpoints --which_epoch latest --save_epoch_freq 10 |
10,751 | 18c70bb0c10a640a883a41d0267dee503075e900 | from typing import Dict, List
from profileNode import Profiler, RacketDefineError, RacketDefineSucess, genesis
from evalexpr import evalexpr, lam
from printing import pttyobj
from structops import makestruct
def handle_define(gc, code, pf_node: Profiler = genesis()):
maybename = code[1]
if code[0] == "define-struct":
return makestruct(gc, {}, code, pf_node)
if type(maybename) == str:
return parsevariable(gc, code, pf_node)
else:
return parsefunction(gc, code, pf_node)
def parsevariable(context: Dict, code: List, profiler: Profiler) -> Dict: # return context updated (define f 2)
name = code[1]
value = code[2]
if type(name) != str:
profiler.add_event(RacketDefineError(code, name, "racket doesnt allow dynamic naming of identifers:"))
raise Exception("racket doesnt allow dynamic naming of identifers: %s" % name)
if name in context:
raise Exception("tried to double define %s" % name)
if type(value) == str:
context[name] = value
profiler.add_event(RacketDefineSucess("added the name: %s w/ value %s to context." % name, value))
return context
elif type(value) == list:
# need to evaluate this :/
refinedvalue = evalexpr(context, [], value, profiler)
profiler.add_event(RacketDefineSucess("Successfully evaled for %s to val %s" % (name, pttyobj(refinedvalue))))
context[name] = refinedvalue
return context
else:
profiler.add_event(RacketDefineError(code, name, f"tried to assign nonsense {value} to {name}"))
raise Exception(f"tried to assign nonsense {value} to {name}")
def parsefunction(context: Dict, code: List, profiler: Profiler) -> Dict: # returns context updated (define (f _____) )
params = code[1]
body = code[2]
name = params.pop(0)
if len(params) == 0:
profiler.add_event(RacketDefineError(code, name, "need a non-zero number of parameters for the function."))
raise Exception("need a non-zero number of parameters for function: %s" % name)
if name in context:
profiler.add_event(RacketDefineError(code, name, "tried to double define function %s " % name))
raise Exception("tried to double define function %s" % name)
context[name] = lam(context, [], params, body)
profiler.add_event(RacketDefineSucess("succesfully inst. %s" % name))
return context
|
10,752 | 2430d824f20c0fe2ad1789b9aeb51fe8a124c3e7 | # routines for multiplying matrices
from __future__ import print_function
import numpy
def mult_Ax(A, x):
""" return the product of matrix A and vector x: Ax = b """
# x is a vector
if not x.ndim == 1:
print("ERROR: x should be a vector")
return None
N = len(x)
# A is square, with each dimension of length N
if not A.shape == (N, N):
print("ERROR: A should be square with each dim of same length as x")
return None
# allocation the product array
b = numpy.zeros((N), dtype=A.dtype)
# each row of b is the product of the like row of A dotted with
# the vector x
for i in range(N):
b[i] = numpy.dot(A[i,:], x)
return b
|
10,753 | 79177baef4e5d639efe409e743d4d694f8c03810 | from tkinter import *
from mainconnection import *
def complen():
root = Tk()
root.geometry("650x500")
root.title("Complain")
f1 = Frame(root)
f1.grid(row=0, column=0)
f2 = Frame(root)
f2.grid(row=1, column=0)
Label(f1, text=" COMPLAIN REGISTRATION FORM", anchor="c").pack()
l1 = Label(f2, text="Department Address", width=30, pady=10)
l1.grid(row=0, column=0)
e1 = Entry(f2, width=40)
e1.grid(row=0, column=1)
con = sql_connection()
what = sql_table(con)
if (what == 1):
con = sql_connection()
cursorObj = con.cursor()
cursorObj.execute('select address from data where id=1')
rows = cursorObj.fetchall()
for row in rows:
e1.insert(0, row[0])
con.commit()
l2 = Label(f2, text="Enter Your Complain Statement", width=30, pady=10)
l2.grid(row=1, column=0)
ta1 = Text(f2, height=6, width=30)
ta1.grid(row=1, column=1)
root.mainloop()
|
10,754 | 00414ab2914473be6366dca65190a2a7619f3ad4 | from django.urls import path, re_path
from apps.main import views
urlpatterns = [
path('', views.ProductList.as_view(), name='index'),
re_path(r'^catalog/(?P<filter>.+)/$',
views.ProductList.as_view(),
name='catalog_filter'),
re_path(r'^detail/(?P<pk>\d+)/$',
views.ProductView.as_view(),
name='product_detail'),
]
|
10,755 | 14895c0b90b86eecb4dc386aa96bb48f570495b6 | from sys import argv
from itertools import islice
with open('bakery.csv') as f:
if len(argv) == 1:
for line in f:
print(line.strip())
elif len(argv) == 2:
for line in islice(f, int(argv[1]) - 1, None):
print(line.strip())
elif len(argv) == 3:
for line in islice(f, int(argv[1]) - 1, int(argv[2])):
print(line.strip())
|
10,756 | 343c608ad0db774f05adf3204f3cd673bf07304a | # PSDLayerExporter
# Copyright (c) 2016 Under the Weather, LLC
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software
# and associated documentation files (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies
# or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
# INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
# FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import os
import sys
import subprocess
import string
class PSDLayerExporter:
def process(self, argv):
print("Layer exporter")
if len(argv) < 2:
print('No file specified.')
return
self.inputFile = argv[1]
out = subprocess.check_output('PSDLayerInfo.py ' + self.inputFile, shell=True)
layers = out.split('\n')
index = 0
for layer in layers:
index += 1
if layer and (layer.find("base") is not -1 or layer.find("detail") is not -1):
print("layer: " + layer)
self.export_layer(index, layer)
def export_layer(self, psdIndex, layer_name):
extractedFilename = ""
extIndex = self.inputFile.rfind(".psd")
if extIndex is not -1:
extractedFilename = self.inputFile[:extIndex]
extractedFilename += "_" + layer_name + ".png"
cmd = self.inputFile + "[0] " + self.inputFile + "[" + str(psdIndex) + "] ( -clone 0 -alpha transparent ) -swap 0 +delete -coalesce -compose src-over -composite " + extractedFilename;
commandStr = 'convert ' + cmd
subprocess.call(commandStr, shell=True)
def main():
argv = sys.argv
layer_exporter = PSDLayerExporter()
layer_exporter.process(argv)
if __name__ == "__main__":
main() |
10,757 | 9c8d2c2c32c6846e75983038479f58c4043bc101 | lst_1 = [1, 2, 3, 4, 5] * 8
lst_2 = [2, 1, 2, 3, 2, 4, 2, 5] * 5
lst_3 = [3, 3, 1, 1, 2, 2, 4, 4, 5, 5] * 4
def solution(answers):
result_1 = 0
result_2 = 0
result_3 = 0
for i in range(len(answers)):
temp = i % 40
if answers[i] == lst_1[temp]:
result_1 += 1
if answers[i] == lst_2[temp]:
result_2 += 1
if answers[i] == lst_3[temp]:
result_3 += 1
answer = []
scores = [result_1, result_2, result_3]
winning_score = max(scores)
for j in range(3):
if scores[j] == winning_score:
answer.append(j + 1)
return answer
|
10,758 | 611248fe498ea77260416fa1d534f23196856a8e | #!/usr/bin/env python
#import json module
import json
# variable 'f' is set as file stream (handle)
f = open('era.json', 'r')
# variable 'json_string' is set to file content
json_string = f.read()
# variable 'original' is assigned the result of applying json.loads to json_string (makes the program read the contents as a python data structure)
original = json.loads(json_string)
# variable 'intermediate' is initialized as an empty dictionary
intermediate = {}
# variable 'arr' is assigned the value of dictionary 'original' with key 'objects' (an array of dictionaries) from original json
arr = original['objects']
# 'obj' is each dictionary in 'arr'
for obj in arr:
# variable 'pid' is assigned the content of 'pid' in variable 'obj' (and so on)
pid = obj['pid']
collection = obj['collection']
datastream = obj['datastream']
field = obj['field']
value = obj['value']
# this was the most difficult part of the program (to me!)
# if there is no 'pid' in 'intermediate' then create a dictionary with key 'collection' containing the value of variable 'collection'
# if there is no 'pid' value as key in 'intermediate' set it to a dictionary with keys' pid' and collection' and values array with single values of variables 'pid' and 'collection'
if not pid in intermediate:
intermediate[pid] = {
'collection': [collection],
'pid': [pid]
}
# if there is no 'datastream' value (used as key) in 'intermediate[pid]', set it to an empty dictionary
if not datastream in intermediate[pid]:
intermediate[pid][datastream]={}
# if there is no 'field' value as key in 'intermediate[pid][datastream]', set it to start a new key in datastream dictionary
if not field in intermediate[pid][datastream]:
intermediate[pid][datastream][field] = [value]
# otherwise, append additional values to pid[datastream][field] list
else:
intermediate[pid][datastream][field].append(value)
# format
# variable 'result' is initialized as an empty list
result = []
# for each initial key in 'intermediate' create 'entry' dictionary where 'data' key contains value of 'pidkey' (content of each dictionary)
for pidkey in intermediate:
entry = {
'data': intermediate[pidkey]
}
# use method append to insert additional entries in 'result'
result.append(entry)
# print result list with .dumps method with optional parameters
print json.dumps(result, sort_keys=True, indent=2, separators=(',', ': '))
|
10,759 | 0af5448823bdf2e5509a9b000716bd773acb685f | '''
Demonstrate how easy it is to use GameWindow, and show contents of
DummyScene.
'''
from demoscenes import DummyScene
from app import GameWindow
if __name__ == '__main__':
game = GameWindow(title='[Demo] Headcrabs!!!', screen_size=(640, 480))
game.register_scene(DummyScene(), 'dummy')
game.start()
|
10,760 | 5409acd57a2c2aa2717511bfbb43f952097810e6 | #!/usr/bin/env python
"""Command line program to access build information.
"""
__author__ = 'Paul Landes'
from typing import List, Dict, Union, Iterable, Tuple
from dataclasses import dataclass, field
import logging
import re
import sys
import json
import plac
from pathlib import Path
logger = logging.getLogger(__name__)
DEBUG = False
@dataclass
class BuildInfoFetcher(object):
"""This class uses the project confgiuration and git metadata to output
confgiuration useful for executing build. The build information is created
as a JSON file using ``SetupUtil`` and persisting it to disk for subsequent
fast retrieval.
:param path: points to somewhere in the temporary ``target`` directory
:param format: indicates how to format the output
:py:meth:`zensols.pybuild.SetupUtil`
"""
DEFAULT_SETUP = Path('src/python/setup.py')
ATTTR_REGEX = re.compile(r'^([^\[]+?)?(?:\[([0-9]+?)\])?$')
KEY_REGEX = re.compile(r'\[([0-9]+?)\]_?')
path: Path
rel_setup_path: Path = field(default=DEFAULT_SETUP)
format: str = field(default='val')
exist_strict: bool = field(default=False)
index_strict: bool = field(default=False)
type_strict: bool = field(default=False)
def _assert_build_info(self):
"""Create the build info JSON file by using ``SetupUtil`` instance's
``to_json`` method.
:py:function:`zensols.pybuild.SetupUtil.to_json`
"""
if not self.path.exists():
from zensols.pybuild import SetupUtil
self.path.parent.mkdir(parents=True, exist_ok=True)
if not self.rel_setup_path.exists():
raise OSError('configuration file does not ' +
f'exist: {self.rel_setup_path}')
su = SetupUtil.source(rel_setup_path=self.rel_setup_path)
logger.info(f'saving build info to {self.path}')
with open(self.path, 'w') as f:
su.to_json(writer=f)
@property
def build_info(self) -> Dict[str, Union[str, dict]]:
"""Return the build information tree of dicts. If the JSON file of the data
does not exist, then create it.
:py:meth:`BuildInfoFetcher._assert_build_info`
"""
self._assert_build_info()
logger.info(f'loading build info from {self.path}')
if not hasattr(self, '_build_info'):
with open(self.path) as f:
self._build_info = json.load(f)
return self._build_info
def _get_attrib_by_path(self, attrib_path: List[str], binfo: dict) -> \
Union[str, dict]:
"""Recursively traverse the build information tree using path
``attrib_path``. Return the data in tree, usually a string.
"""
if len(attrib_path) > 0:
name = attrib_path.pop(0)
# single dot case
if len(name) == 0:
binfo = self._get_attrib_by_path(attrib_path, binfo)
else:
name, index = self.ATTTR_REGEX.match(name).groups()
if name is not None:
binfo = binfo.get(name)
if binfo is not None:
if index is not None:
index = int(index)
if index >= len(binfo):
if self.index_strict:
raise ValueError(f'no attriubte at index {index}')
else:
binfo = None
else:
binfo = binfo[index]
binfo = self._get_attrib_by_path(attrib_path, binfo)
return binfo
def _get_attrib(self, attrib_path: str, binfo: dict) -> str:
"""Return a value with dotted jq like path ``attrib_path`` for an attribute
treating ``binfo`` as a tree data structure.
"""
apath = attrib_path.split('.')
return self._get_attrib_by_path(apath, binfo)
def get_attribs(self, attribs: List[str]) -> Iterable[Tuple[str, str]]:
"""Return an iterable of attriubtes as (name, value) tuples.
"""
binfo = self.build_info
for attrib in attribs:
try:
val = self._get_attrib(attrib, binfo)
except Exception as e:
logger.error(f'could not get attribute {attrib}: {e}')
raise e
if self.type_strict and not isinstance(val, str):
raise ValueError(f'wrong value found for attribute: {attrib}')
if val is not None:
yield ((attrib, val))
elif self.exist_strict:
raise ValueError(f'no such attribute: {attrib}')
def get_attrib_dict(self, attribs: Tuple[str]) -> Dict[str, str]:
"""Return a set key attributes as a dict where keys are ``attribs``.
:see: :meth:`get_attribs`
"""
attrs = self.get_attribs(attribs)
attrs = tuple(map(lambda a: (a[0][1:], a[1]), attrs))
return dict(attrs)
def _format_key(self, k: str) -> str:
"""Format a key from the dot path information.
"""
if k[0] == '.':
k = k[1:]
k = k.replace('.', '_')
k = k.upper()
k = re.sub(self.KEY_REGEX, '', k)
return k
def __call__(self, attribs: List[str]):
"""Print out attribute ``attribs`` key values one per line.
"""
fmt = self.format
for k, v in self.get_attribs(attribs):
if fmt == 'shell' or fmt == 'make':
k = self._format_key(k)
if fmt == 'shell':
v = f'export {k} = "{v}"'
else:
v = f'{k} = {v}'
print(v)
@plac.annotations(
path=plac.Annotation('The path to the JSON build.json blob.', type=Path),
strict=plac.Annotation('Be strict and exit on failures', 'flag', 's'),
setup=plac.Annotation('The path to the setup.py file.', 'option', 'p',
type=Path),
format=plac.Annotation('The format of the output',
'option', 'f', str, ['val', 'make', 'shell']),
attribs=plac.Annotation('Path to the JSON data desired', type=str))
def main(path: Path, strict: bool,
setup: Path = BuildInfoFetcher.DEFAULT_SETUP,
format: str = 'val', *attribs: List[str]):
"""Access build information made available the git and setuptools metdaata.
This accesses uses ``zensols.pybuild.SetupUtil`` to access the git metadata
and``setup.py`` module metadata."""
logger.info(f'parsing {path} using format {format}')
try:
fetcher = BuildInfoFetcher(path, setup, format, strict, strict, strict)
fetcher(attribs)
except Exception as e:
if DEBUG:
import traceback
traceback.print_exc()
logger.error(e)
sys.exit(1)
if __name__ == '__main__':
logging.basicConfig(
level=logging.DEBUG if DEBUG else logging.WARNING,
format='buildinfo: %(levelname)s: %(message)s')
plac.call(main)
|
10,761 | 85bfa30cc8620ba6dd289406d46c499c509eb2ea | from sqlalchemy import create_engine
import pandas as pd
import csv
import psycopg2
# setup psycopg2 and engine
try:
conn = psycopg2.connect("dbname='dbbuurt' user='buurtuser' host='localhost' password='123456'")
print("Database connection established")
except:
print("Database connection failed")
cur = conn.cursor()
engine = create_engine('postgresql+psycopg2://buurtuser:123456@localhost/dbbuurt')
# select data and make dicts
pullmatchold = """SELECT current, old FROM match1718"""
pullmatchnew = """SELECT current, old FROM match1819"""
dict1718 = {}
cur.execute(pullmatchold)
comp1718 = cur.fetchall()
for comp in comp1718:
dict1718[comp[0]] = comp[1].split(";")
dict1819 = {}
cur.execute(pullmatchnew)
comp1819 = cur.fetchall()
for comp in comp1819:
dict1819[comp[0]] = comp[1].split(";")
#match 17 and 19
comp1719 = []
for code19, codes18 in dict1819.items():
# als buurt/wijk/gemeente onveranderd:
if len(codes18) == 1:
code18 = codes18[0]
try:
codes17 = dict1718[code18]
if len(codes17) == 1:
code17 = codes17[0]
newline = [code19, code17]
# als buurt/wijk/gemeente is veranderd in 17
elif len(codes17) > 1:
code17 = []
for code in codes17:
code17.append(code)
newline = [code19, ";".join(code17)]
except:
print(code18)
# als buurt/wijk/gemeente is veranderd in 18
elif len(codes18) > 1:
print(code19, codes18)
code17 = []
for code in codes18:
try:
codes17 = dict1718[code]
if len(codes17) == 1:
code17.append(codes17[0])
#als wijk/buurt/gemeente veranderd in 17 en 18
elif len(codes17) > 1:
for code in codes17:
code17.append(code)
except:
print(code)
newline = [code19, ";".join(code17)]
comp1719.append(newline)
df = pd.DataFrame(comp1719)
columnlist = ['current', 'old']
df.columns = columnlist
print(df)
df.to_sql('match1719', engine)
print('Success')
#if key == dict1718[value]:
# newline = [key, value]
# print(newline)
"""
outputtable = []
for row in inserttable[1:]:
specelements = []
for element in row[2:]:
if element:
specelements.append(element)
outputtable.append([row[1], ])
df = pd.DataFrame(outputtable)
columnlist = ['current', 'old']
df.columns = columnlist
print(df)
df.to_sql('matcheightnine', engine)
print('Success')
for key, value in dict1718.items():
if key.startswith("GM"):
print(key, value)
""" |
10,762 | 4ac9c79ad4ed797071ef4e3275bc233dfb391d2e | # Generated by Django 3.0.3 on 2020-08-09 05:37
from django.db import migrations, models
def forwards_func(apps, schema_editor):
City = apps.get_model("job", "City")
db_alias = schema_editor.connection.alias
City.objects.using(db_alias).bulk_create([
City(name="Bangladesh,Barishal"),
City(name="Bangladesh,Chattagram"),
City(name="Bangladesh,Dhaka"),
City(name="Bangladesh,Khulna"),
City(name="Bangladesh,Mymensingh"),
City(name="Bangladesh,Rajshahi"),
City(name="Bangladesh,Rangpur"),
City(name="Bangladesh,Sylhet"),
])
class Migration(migrations.Migration):
dependencies = [
('job', '0096_auto_20200808_1711'),
]
operations = [
migrations.CreateModel(
name='City',
fields=[
('created_by', models.CharField(max_length=255, null=True)),
('created_at', models.DateTimeField(null=True)),
('created_from', models.CharField(max_length=255, null=True)),
('modified_by', models.CharField(max_length=255, null=True)),
('modified_at', models.DateTimeField(null=True)),
('modified_from', models.CharField(max_length=255, null=True)),
('is_archived', models.BooleanField(default=False)),
('archived_by', models.CharField(max_length=255, null=True)),
('archived_at', models.DateTimeField(null=True)),
('archived_from', models.CharField(max_length=255, null=True)),
('name', models.CharField(max_length=255, primary_key=True, serialize=False)),
],
options={
'db_table': 'cities',
},
),
migrations.RunPython(forwards_func)
]
|
10,763 | b6c56f19132de32555a077482badd27c00e48c43 | from bs4 import BeautifulSoup
import requests
import random
import re
import nltk
from py2casefold import casefold
from nltk.corpus import stopwords, words
"""
Utility Crawler class which is called for all the different
types of crawler (i.e. DFS, BFS or Focused) This class is open to customization
based on the arguements passed. Furthermore, it can be extended by child classes to
make it more customizable
"""
class Utility:
def __init__(self):
self.line_break = '************************'
def process_url(self, url, html):
"""
Process a URL to get all the Links available on the page
"""
html = self.getHtmlContent(html, 'content')
new_urls = self.getValidUrlsFromHtml(html)
return new_urls
def getValidUrlsFromHtml(self, content):
"""
Get all the valid URLs from the given html content
"""
a_tags = content.find_all('a')
urls = []
for a_tag in a_tags:
url = a_tag.get('href')
if self.isUrlValid(url):
urls.append(self.getFilteredUrl(url.lower()))
return urls
def isUrlValid(self, url):
"""
Returns true iff and only if the url passed is valid according to
the conditions given in the question
"""
if url is None:
return False
elif url.startswith('//'):
return False
elif ':' in url:
return False
elif url.startswith('/wiki'):
return True
elif 'en.wikipedia.org/wiki/' not in url:
return False
return True
def getFilteredUrl(self, url):
"""
Filter the URL to return it in it's correct form.
Removing things like hyperlink on a different section of a page
or missing https://
"""
url = url.split('#')[0]
if url.startswith('/wiki'):
return ('https://en.wikipedia.org' + url)
if 'en.wikipedia.org/wiki/' not in url:
return ('https://en.wikipedia.org/wiki' + url)
return url
def getUrlHeader(self, head):
if head.string is None:
print('Header not found. Generating a random string.\n')
return ''.join(random.choice('abcdnefiwnfnwe356435234fgrbeirfnd23435t') for _ in range(10))
return head.string
def getHtml(self, url):
"""
Get HTML Contents from the crawled url.
Returns the content with the content block only.
"""
r = requests.get(url)
html = r.content
return html
def getAllHTMLTags(self, html, tag):
"""
Get HTML Contents from the crawled url.
Returns all data for the given tag
"""
soup = BeautifulSoup(html, 'html.parser')
content = soup.find_all(tag)
return content
def getHTMLTag(self, html, tag):
"""
Get HTML Contents from the crawled url.
Returns all the p tags
"""
soup = BeautifulSoup(html, 'html.parser')
content = soup.find(tag)
return content
"""
Gets HTML Content for the given id
"""
def getHtmlContent(self, html, id = 'body'):
soup = BeautifulSoup(html, 'html.parser')
content = soup.find(id=id)
return content
"""
Parse the given data. Performs Casefolding, and punctation removal
"""
def parse(self, data):
# case-fold handled
data = casefold(data)
# encode to utf-8
data = data.encode('utf-8')
# punctation removed
data = re.sub(r'\W+', ' ', data)
# lowercase
return data.lower().strip()
"""
Tokenize the data using NLTK Library
"""
def tokenize(self, data):
print('Tokenizing...')
if data is None:
return ''
tokens = nltk.word_tokenize(data)
return ' '.join(tokens)
"""
Generate trigram for the given data using NLTK Library
"""
def get_and_process_ngrams(self, data, grams):
print('Generating ' + str(grams) + '-grams...')
ngrams = nltk.ngrams(data.split(), grams)
processed_ngrams = []
for ng in ngrams:
if len(ng) > 0:
processed_ngrams.append(ng)
return processed_ngrams
"""
Initialize a dict with the given length and value
"""
def init_dict(self, keys, value):
idict = {}
for k in keys:
idict[k] = value
return idict
def get_random_string(self):
return ''.join(random.choice('abcdnefiwnfnwe356435234fgrbeirfnd23435t') for _ in range(10))
def get_stop_list(self):
return set(stopwords.words('english'))
def check_word_exist(self, word):
return word in words.words() |
10,764 | f09f17e2e7f3d445e0732dd3da188c924ce8ebca | import os
# allFiles=[]
# def begin_new_listfile():
# global allFiles
# allFiles=[]
# return
def list_all_fm_file(filepath,suffix):
# global allFiles
allFiles = []
files = os.listdir(filepath)
for fi in files:
fi_d = os.path.join(filepath,fi)
if os.path.isdir(fi_d):
list_all_fm_file(fi_d,suffix)
else:
if fi_d.find(suffix)>0:
allFiles.append(fi_d)
return allFiles
|
10,765 | ed455c529a02d7f55d847ae4d94e1ecdcdf72fd2 | import io
from unittest import TestCase
from unittest.mock import patch
from game import character_attack_description
class TestCharacterAttackDescription(TestCase):
@patch('random.randint', side_effect=[0])
@patch('sys.stdout', new_callable=io.StringIO)
def test_character_attack_description_sorcerer_lv1(self, mock_stdout, random_number_generator):
level = 1
character_class = 'Sorcerer'
sorcerer_lv1_skill = ['fire ball', 'ice beam', 'water ball']
character = {'name': 'player', 'hp': 20, 'max_hp': 20, 'level': level, 'class': character_class}
foe = {'name': 'monster'}
character_attack_description(character, foe)
expected = f"{character['name']} used {sorcerer_lv1_skill[0]} to {foe['name']}\n"
actual = mock_stdout.getvalue()
self.assertEqual(actual, expected)
@patch('random.randint', side_effect=[0])
@patch('sys.stdout', new_callable=io.StringIO)
def test_character_attack_description_sorcerer_lv2(self, mock_stdout, random_number_generator):
level = 2
character_class = 'Sorcerer'
sorcerer_lv2_skill = ['magic claw', 'blizzard', 'holy beam']
character = {'name': 'player', 'hp': 20, 'max_hp': 20, 'level': level, 'class': character_class}
foe = {'name': 'monster'}
character_attack_description(character, foe)
expected = f"{character['name']} used {sorcerer_lv2_skill[0]} to {foe['name']}\n"
actual = mock_stdout.getvalue()
self.assertEqual(actual, expected)
@patch('random.randint', side_effect=[0])
@patch('sys.stdout', new_callable=io.StringIO)
def test_character_attack_description_sorcerer_lv3(self, mock_stdout, random_number_generator):
level = 3
character_class = 'Sorcerer'
sorcerer_lv3_skill = ['meteor', 'god bless', 'thunder storm']
character = {'name': 'player', 'hp': 20, 'max_hp': 20, 'level': level, 'class': character_class}
foe = {'name': 'monster'}
character_attack_description(character, foe)
expected = f"{character['name']} used {sorcerer_lv3_skill[0]} to {foe['name']}\n"
actual = mock_stdout.getvalue()
self.assertEqual(actual, expected)
@patch('random.randint', side_effect=[0])
@patch('sys.stdout', new_callable=io.StringIO)
def test_character_attack_description_thief_lv1(self, mock_stdout, random_number_generator):
level = 1
character_class = 'Thief'
sorcerer_lv1_skill = ['fire in the hole', 'stabbing', 'nut cracking']
character = {'name': 'player', 'hp': 20, 'max_hp': 20, 'level': level, 'class': character_class}
foe = {'name': 'monster'}
character_attack_description(character, foe)
expected = f"{character['name']} used {sorcerer_lv1_skill[0]} to {foe['name']}\n"
actual = mock_stdout.getvalue()
self.assertEqual(actual, expected)
@patch('random.randint', side_effect=[0])
@patch('sys.stdout', new_callable=io.StringIO)
def test_character_attack_description_thief_lv2(self, mock_stdout, random_number_generator):
level = 2
character_class = 'Thief'
sorcerer_lv2_skill = ['double attack', 'shadow punch', 'shuriken burst']
character = {'name': 'player', 'hp': 20, 'max_hp': 20, 'level': level, 'class': character_class}
foe = {'name': 'monster'}
character_attack_description(character, foe)
expected = f"{character['name']} used {sorcerer_lv2_skill[0]} to {foe['name']}\n"
actual = mock_stdout.getvalue()
self.assertEqual(actual, expected)
@patch('random.randint', side_effect=[0])
@patch('sys.stdout', new_callable=io.StringIO)
def test_character_attack_description_thief_lv3(self, mock_stdout, random_number_generator):
level = 3
character_class = 'Thief'
sorcerer_lv3_skill = ['triple throw', 'dark flare', 'shadow knife']
character = {'name': 'player', 'hp': 20, 'max_hp': 20, 'level': level, 'class': character_class}
foe = {'name': 'monster'}
character_attack_description(character, foe)
expected = f"{character['name']} used {sorcerer_lv3_skill[0]} to {foe['name']}\n"
actual = mock_stdout.getvalue()
self.assertEqual(actual, expected)
@patch('random.randint', side_effect=[0])
@patch('sys.stdout', new_callable=io.StringIO)
def test_character_attack_description_bowman_lv1(self, mock_stdout, random_number_generator):
level = 1
character_class = 'Bowman'
sorcerer_lv1_skill = ['double shot', 'bomb arrow', 'sling shot']
character = {'name': 'player', 'hp': 20, 'max_hp': 20, 'level': level, 'class': character_class}
foe = {'name': 'monster'}
character_attack_description(character, foe)
expected = f"{character['name']} used {sorcerer_lv1_skill[0]} to {foe['name']}\n"
actual = mock_stdout.getvalue()
self.assertEqual(actual, expected)
@patch('random.randint', side_effect=[0])
@patch('sys.stdout', new_callable=io.StringIO)
def test_character_attack_description_bowman_lv2(self, mock_stdout, random_number_generator):
level = 2
character_class = 'Bowman'
sorcerer_lv2_skill = ['fire arrow', 'lightning arrow', 'crossbow shot']
character = {'name': 'player', 'hp': 20, 'max_hp': 20, 'level': level, 'class': character_class}
foe = {'name': 'monster'}
character_attack_description(character, foe)
expected = f"{character['name']} used {sorcerer_lv2_skill[0]} to {foe['name']}\n"
actual = mock_stdout.getvalue()
self.assertEqual(actual, expected)
@patch('random.randint', side_effect=[0])
@patch('sys.stdout', new_callable=io.StringIO)
def test_character_attack_description_bowman_lv3(self, mock_stdout, random_number_generator):
level = 3
character_class = 'Bowman'
sorcerer_lv3_skill = ["Dragon breath", "bullseye shot", "terra ray"]
character = {'name': 'player', 'hp': 20, 'max_hp': 20, 'level': level, 'class': character_class}
foe = {'name': 'monster'}
character_attack_description(character, foe)
expected = f"{character['name']} used {sorcerer_lv3_skill[0]} to {foe['name']}\n"
actual = mock_stdout.getvalue()
self.assertEqual(actual, expected)
@patch('random.randint', side_effect=[0])
@patch('sys.stdout', new_callable=io.StringIO)
def test_character_attack_description_fighter_lv1(self, mock_stdout, random_number_generator):
level = 1
character_class = 'Fighter'
sorcerer_lv1_skill = ['dirty boxing', 'low sweep', 'bat swing']
character = {'name': 'player', 'hp': 20, 'max_hp': 20, 'level': level, 'class': character_class}
foe = {'name': 'monster'}
character_attack_description(character, foe)
expected = f"{character['name']} used {sorcerer_lv1_skill[0]} to {foe['name']}\n"
actual = mock_stdout.getvalue()
self.assertEqual(actual, expected)
@patch('random.randint', side_effect=[0])
@patch('sys.stdout', new_callable=io.StringIO)
def test_character_attack_description_fighter_lv2(self, mock_stdout, random_number_generator):
level = 2
character_class = 'Fighter'
sorcerer_lv2_skill = ['kendo slash', 'tornado kick', 'dragon sword']
character = {'name': 'player', 'hp': 20, 'max_hp': 20, 'level': level, 'class': character_class}
foe = {'name': 'monster'}
character_attack_description(character, foe)
expected = f"{character['name']} used {sorcerer_lv2_skill[0]} to {foe['name']}\n"
actual = mock_stdout.getvalue()
self.assertEqual(actual, expected)
@patch('random.randint', side_effect=[0])
@patch('sys.stdout', new_callable=io.StringIO)
def test_character_attack_description_fighter_lv3(self, mock_stdout, random_number_generator):
level = 3
character_class = 'Fighter'
sorcerer_lv3_skill = ['sword dance', 'divine crash', 'critical hammer shot']
character = {'name': 'player', 'hp': 20, 'max_hp': 20, 'level': level, 'class': character_class}
foe = {'name': 'monster'}
character_attack_description(character, foe)
expected = f"{character['name']} used {sorcerer_lv3_skill[0]} to {foe['name']}\n"
actual = mock_stdout.getvalue()
self.assertEqual(actual, expected)
|
10,766 | a601ae266064fcb3880194241f2043b7e77af1b0 | saarc=["Bangladesh","India","Nepal","Afganistan","Pakistan","Bhutan","Srilanka"]
for country in saarc:
print(country,"is a member of saarc") |
10,767 | bcad482473e44de5d5a5de6e06ab5d2548c1acb0 | #!/usr/bin/python
lk = [1,23]
s = lk
def f():
return 1, 2,3
s, = f()
print(s) |
10,768 | 811d9b75181a1ec8f22cf9ff349d2de1d3345737 | '''
Input: a List of integers where every int except one shows up twice
Returns: an integer
'''
# not really sure why this isn't passing the test.
def single_number(arr):
# Your code here
cursor = 0
while cursor != len(arr):
# check if the current element is the same as the next element if so
# jump to the next tuple
if arr[cursor] == arr[cursor + 1]:
cursor += 2
print("found 2 {} skipping this number".format(arr[cursor - 1]))
# check if the current and next are diffrent
elif arr[cursor] != arr[cursor + 1]:
return arr[cursor]
else:
print("an unknown case occured please refactor code.")
return -1
if __name__ == '__main__':
# Use the main function to test your implementation
arr = [1, 1, 4, 4, 5, 5, 3, 3, 9, 0, 0]
print(f"The odd-number-out is {single_number(arr)}")
|
10,769 | 21e5ef6b455e08a1bfcf9e0325b9e4d2a3300b33 | """
# DAO 클래스
data access object
데이터베이스의 데이터에 접근하기 위한 역활 담당
MVC 패턴에서는 서비스클래스와 DAO 객체로 나눠 프로그래밍함
DAO : 주로 DB를 사용해서 데이터를 조죄하거나 조작하는 기능 담당
서비스 : DB 작업전 데이터를 처리하는 기능을 담당
성적처리 프로그램에서의 MVC
Model (데이터) : VO 클래스
View (데이터 출력/입력) : 화면출력
Controller (흐름제어) : service + dao
"""
class Student:
def __init__(self,name,kor,eng,mat):
self.__name = name
self.__kor = kor
self.__eng = eng
self.__mat = mat
self.__tot = 0
self.__mean = 0.0
self.__grd = '가'
#setter/getter
@property
def name(self):
return self.__name
@name.setter
def name(self, value):
self.__name=value
@property
def kor(self):
return self.__kor
@kor.setter
def kor(self, value):
self.__kor = value
@property
def eng(self):
return self.__eng
@eng.setter
def eng(self, value):
self.__eng = value
@property
def mat(self):
return self.__mat
@mat.setter
def mat(self, value):
self.__mat = value
@property
def tot(self):
return self.__tot
@tot.setter
def tot(self, value):
self.__tot = value
@property
def mean(self):
return self.__mean
@mean.setter
def mean(self, value):
self.__mean = value
@property
def grd(self):
return self.__grd
@grd.setter
def grd(self, value):
self.__grd = value
#멤버변수 전체 출력
def __str__(self):
msg ='%s %d %d %d' % (self.__name,self.__kor,self.__eng,self.__mat)
return msg
#성적 처리 서비스 클래스
class SungJukService:
def readSungJuk(self):
#성적데이터 입력받은후 성적 클래스 객체로 생성
name = input('이름은?')
kor = int(input('국어는?'))
eng = int(input('영어는?'))
mat = int(input('수학은?'))
return Student(name,kor,eng,mat)
def computeSungJuk(self,std):
#총점 편귱 학점 계산
std.tot = std.kor + std.eng + std.mat
std.mean = std.tot / 3
std.grd = '가'
if std.mean >= 90:
std.grd = '수'
elif std.mean >= 80:
std.grd = '우'
elif std.mean >= 70:
std.grd = '미'
elif std.mean >= 60:
std.grd = '양'
def printSungJuk(self,std):
msg = "%s %d %d %s" % (std.name,std.tot,std.mean,std.grd)
print(msg)
def saveSungJuk(self): # DB에 성적 저장
pass
def readOneSungJuk(self): # 성적조회
pass
def readAllSungJuk(self): # 모든 성적 조회
pass
def modifySungJuk(self): # 성적 수정
pass
def removeSungJuk(self): # 성적 삭제
pass
#oop로 만든 성적 처리 프로그램 실행 성적데이터 생성(1)
# std1= Student("혜교",89,97,95)
# print(std1)
# #성적데이터생성2
# name = input('이름은?')
# kor = int(input('국어는?'))
# eng = int(input('영어는?'))
# mat = int(input('수학은?'))
# std2 = Student(name,kor,eng,mat)
# print(std2)
#성적데이터 생성 (3)
# sjsrv = SungJukService()
# std3 = sjsrv.readSungJuk()
# print(std3)
#
# sjsrv.computeSungJuk(std3)
# sjsrv.printSungJuk(std3)
"""
#객체 지향 개념 정리
클래스는 데이터와 기능을 함꼐 묶어
프로그램을 효율적으로 작성하는 것을 도와준다.
한편 파이썬에서 제공하는 모든 클래스는 계층구조로 이뤄져 있으며 사용자가 작성한 클래스도
사실 파이썬이 미리 정의해 둔 클래스를 상속해서 만드는 것이다.
이썬이 미리 정의해 둔 클래스를 조상 클래스라 한다.
__str__ 함수 : 조상클래스에서 미리 정의해 둔 특수한 함수이다
객체가 가지고 있는 정보나 값을 문자열로 만들어 return 하는 기능을 담당한다.
"""
class HellWorld:
pass
hw = HellWorld
print(hw)
#생성된 객체의 메모리 주소값이 출력된다. 따라서 개발자는
# __str__함수를 재정의해서 의미있는 문자열을 출력하는데 사용한다.
#즉, 객체를 대표ㅕ하는 문자열을 return 하도록 재작성하는것.
#한편 print함수는 ()안의 변수를 문자열 형태로 출력한다.
#따라서, ()안의 ㅇ변수가 어떤 종류이던지 간에 무조건 문자열 형태로 변환해서
#출력하는데 해당 객체의 __str__ 함수를 자동으로 호출한다.
|
10,770 | 75f91c09fa7813e687ea55f14214d96f21e5cb39 | #!/home/sourabh/anaconda3/bin/python
"""
The reducer script for the same job.
It runs computation on the data received by the mapper.
"""
import sys
import csv
def reducer():
"""
MapReduce Reducer.
"""
reader = csv.reader(sys.stdin, delimiter='\t')
writer = \
csv.writer(
sys.stdout, delimiter='\t', quotechar='"',
quoting=csv.QUOTE_MINIMAL)
answer_count = 0
answer_total_length = 0
question_body_length = None
current_id = None
for line in reader:
if len(line) == 4:
the_id = line[0]
if current_id is None or the_id != current_id:
if not current_id is None:
write_record(
current_id, question_body_length, answer_count,
answer_total_length, writer)
answer_count = 0
answer_total_length = 0
question_body_length = None
current_id = the_id
node_type = line[2]
body_length = int(line[3])
if node_type == "question":
question_body_length = body_length
else:
answer_count += 1
answer_total_length += body_length
write_record(
current_id, question_body_length, answer_count, answer_total_length,
writer)
def write_record(
the_id, question_body_length, answer_count, answer_total_length, writer):
"""
Outputs
Question Node ID | Question Length | Average Answer Length
"""
if answer_count == 0:
writer.writerow([the_id, question_body_length, "0"])
else:
writer.writerow(
[the_id, question_body_length,
float(answer_total_length) / float(answer_count)])
if __name__ == "__main__":
reducer()
|
10,771 | 1c2846b0921caabe71ea220a1f798117f729419d | # --- Day 16: Ticket Translation ---
# As you're walking to yet another connecting flight, you realize that one of the legs of your re-routed trip coming up is on a high-speed train. However, the train ticket you were given is in a language you don't understand. You should probably figure out what it says before you get to the train station after the next flight.
# Unfortunately, you can't actually read the words on the ticket. You can, however, read the numbers, and so you figure out the fields these tickets must have and the valid ranges for values in those fields.
# You collect the rules for ticket fields, the numbers on your ticket, and the numbers on other nearby tickets for the same train service (via the airport security cameras) together into a single document you can reference (your puzzle input).
# The rules for ticket fields specify a list of fields that exist somewhere on the ticket and the valid ranges of values for each field. For example, a rule like class: 1-3 or 5-7 means that one of the fields in every ticket is named class and can be any value in the ranges 1-3 or 5-7 (inclusive, such that 3 and 5 are both valid in this field, but 4 is not).
# Each ticket is represented by a single line of comma-separated values. The values are the numbers on the ticket in the order they appear; every ticket has the same format. For example, consider this ticket:
# .--------------------------------------------------------.
# | ????: 101 ?????: 102 ??????????: 103 ???: 104 |
# | |
# | ??: 301 ??: 302 ???????: 303 ??????? |
# | ??: 401 ??: 402 ???? ????: 403 ????????? |
# '--------------------------------------------------------'
# Here, ? represents text in a language you don't understand. This ticket might be represented as 101,102,103,104,301,302,303,401,402,403; of course, the actual train tickets you're looking at are much more complicated. In any case, you've extracted just the numbers in such a way that the first number is always the same specific field, the second number is always a different specific field, and so on - you just don't know what each position actually means!
# Start by determining which tickets are completely invalid; these are tickets that contain values which aren't valid for any field. Ignore your ticket for now.
# For example, suppose you have the following notes:
# class: 1-3 or 5-7
# row: 6-11 or 33-44
# seat: 13-40 or 45-50
# your ticket:
# 7,1,14
# nearby tickets:
# 7,3,47
# 40,4,50
# 55,2,20
# 38,6,12
# It doesn't matter which position corresponds to which field; you can identify invalid nearby tickets by considering only whether tickets contain values that are not valid for any field. In this example, the values on the first nearby ticket are all valid for at least one field. This is not true of the other three nearby tickets: the values 4, 55, and 12 are are not valid for any field. Adding together all of the invalid values produces your ticket scanning error rate: 4 + 55 + 12 = 71.
# Consider the validity of the nearby tickets you scanned. What is your ticket scanning error rate?
import copy
def fileInput():
f = open(inputFile, 'r')
with open(inputFile) as f:
read_data = f.read().split('\n')
f.close()
return read_data
def splitData(data):
dataRow = []
newData = []
for line in data:
if line == '':
newData.append(dataRow)
dataRow = []
else:
dataRow.append(line)
newData.append(dataRow)
return newData
def orgRules(rulesData):
newRules = []
for rule in rulesData:
rule = rule.split(': ')
rule.pop(0) #dont need the rule name
rule = rule[0].split(' ')
rule.pop(1) #dont need the 'or'
for idx,word in enumerate(rule):
rule[idx] = word.split('-')
newRules.extend(rule)
for idx,newRule in enumerate(newRules):
newRules[idx] = int(newRule[0]), int(newRule[1])
return newRules
def orgTickets(ticketData):
ticketLine = []
newTicketData = []
ticketData.pop(0)
for idx,ticket in enumerate(ticketData):
ticketLine = ticket.split(',')
for idx,tick in enumerate(ticketLine):
ticketLine[idx] = int(tick)
newTicketData.append(ticketLine)
return newTicketData
def invalidTickets(rules,tickets):
newTickets = []
invalidTickets = []
for ticket in tickets:
newTickets.extend(ticket)
invalidTickets = copy.deepcopy(newTickets)
# print(newTickets)
for ticket in newTickets:
for rule in rules:
if rule[0] <= ticket <= rule[1]:
# print(ticket)
invalidTickets.remove(ticket)
break
return sum(invalidTickets)
#///////////////////////////////////////////////////
inputFile = 'day16-input.txt'
if __name__ == "__main__":
data = fileInput()
data = splitData(data)
# print(data[0])
data[0] = orgRules(data[0]) #Rules
data[2] = orgTickets(data[2]) #Nearby Tickets
invalidTickets = invalidTickets(data[0],data[2])
print(invalidTickets)
|
10,772 | ae526637c324284dbcd9c29eacd1655c497f83cc | from bayes import Bayes
def die_likelihood(roll, die):
"""
Args:
roll (int): result of a single die roll
die (int): number of sides of the die that produced the roll
Returns:
likelihood (float): the probability of the roll given the die.
"""
if roll in range (1, die + 1):
return 1 / die
else:
return 0
if __name__ == '__main__':
uniform_prior = {
4: .08,
6: .12,
8: .16,
12: .24,
20: .40
}
unbalanced_prior = {
4: .2,
6: .2,
8: .2,
12: .2,
20: .2
}
die_bayes_1 = Bayes(uniform_prior, die_likelihood)
die_bayes_2 = Bayes(unbalanced_prior, die_likelihood)
experiment = [8,2,1,2,5,8,2,4,3,7,6,5,1,6,2,5,8,8,5,
3,4,2,4,3,8,8,7,8,8,8,5,5,1,3,8,7,8,5,
2,5,1,4,1,2,1,3,1,3,1,5]
experiment1 = [1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1]
experiment2 = [20,20,20,20,20,20,20,20,20,20,20]
for i, roll in enumerate(experiment):
print ("*" * 50)
print ("ROLL#: {}, ROLL: {}".format(i+1, roll))
print ("***** UNIFORM ******")
die_bayes_1.update(roll)
die_bayes_1.print_distribution()
print ("**** UNBALANCED ****")
die_bayes_2.update(roll)
die_bayes_2.print_distribution() |
10,773 | e7ce25be6e64a0a60e3662856c7819f588fc2feb |
from south.db import db
from django.db import models
from lfs.criteria.models import *
class Migration:
def forwards(self, orm):
# Adding model 'WeightCriterion'
db.create_table('criteria_weightcriterion', (
('operator', models.PositiveIntegerField(_(u"Operator"), null=True, blank=True)),
('id', models.AutoField(primary_key=True)),
('weight', models.FloatField(_(u"Weight"), default=0.0)),
))
db.send_create_signal('criteria', ['WeightCriterion'])
# Adding model 'UserCriterion'
db.create_table('criteria_usercriterion', (
('id', models.AutoField(primary_key=True)),
))
db.send_create_signal('criteria', ['UserCriterion'])
# Adding model 'HeightCriterion'
db.create_table('criteria_heightcriterion', (
('operator', models.PositiveIntegerField(null=True, blank=True)),
('id', models.AutoField(primary_key=True)),
('height', models.FloatField(_(u"Height"), default=0.0)),
))
db.send_create_signal('criteria', ['HeightCriterion'])
# Adding model 'CriteriaObjects'
db.create_table('criteria_criteriaobjects', (
('criterion_id', models.PositiveIntegerField(_(u"Content id"))),
('content_type', models.ForeignKey(orm['contenttypes.ContentType'], related_name="content_type", verbose_name=_(u"Content type"))),
('position', models.PositiveIntegerField(_(u"Position"), default=999)),
('content_id', models.PositiveIntegerField(_(u"Content id"))),
('id', models.AutoField(primary_key=True)),
('criterion_type', models.ForeignKey(orm['contenttypes.ContentType'], related_name="criterion", verbose_name=_(u"Criterion type"))),
))
db.send_create_signal('criteria', ['CriteriaObjects'])
# Adding model 'CountryCriterion'
db.create_table('criteria_countrycriterion', (
('operator', models.PositiveIntegerField(_(u"Operator"), null=True, blank=True)),
('id', models.AutoField(primary_key=True)),
))
db.send_create_signal('criteria', ['CountryCriterion'])
# Adding model 'LengthCriterion'
db.create_table('criteria_lengthcriterion', (
('operator', models.PositiveIntegerField(_(u"Operator"), null=True, blank=True)),
('length', models.FloatField(_(u"Length"), default=0.0)),
('id', models.AutoField(primary_key=True)),
))
db.send_create_signal('criteria', ['LengthCriterion'])
# Adding model 'CartPriceCriterion'
db.create_table('criteria_cartpricecriterion', (
('operator', models.PositiveIntegerField(_(u"Operator"), null=True, blank=True)),
('price', models.FloatField(_(u"Price"), default=0.0)),
('id', models.AutoField(primary_key=True)),
))
db.send_create_signal('criteria', ['CartPriceCriterion'])
# Adding model 'WidthCriterion'
db.create_table('criteria_widthcriterion', (
('operator', models.PositiveIntegerField(_(u"Operator"), null=True, blank=True)),
('width', models.FloatField(_(u"Width"), default=0.0)),
('id', models.AutoField(primary_key=True)),
))
db.send_create_signal('criteria', ['WidthCriterion'])
# Adding ManyToManyField 'UserCriterion.users'
db.create_table('criteria_usercriterion_users', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('usercriterion', models.ForeignKey(UserCriterion, null=False)),
('user', models.ForeignKey(User, null=False))
))
# Adding ManyToManyField 'CountryCriterion.countries'
db.create_table('criteria_countrycriterion_countries', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('countrycriterion', models.ForeignKey(CountryCriterion, null=False)),
('country', models.ForeignKey(Country, null=False))
))
def backwards(self, orm):
# Deleting model 'WeightCriterion'
db.delete_table('criteria_weightcriterion')
# Deleting model 'UserCriterion'
db.delete_table('criteria_usercriterion')
# Deleting model 'HeightCriterion'
db.delete_table('criteria_heightcriterion')
# Deleting model 'CriteriaObjects'
db.delete_table('criteria_criteriaobjects')
# Deleting model 'CountryCriterion'
db.delete_table('criteria_countrycriterion')
# Deleting model 'LengthCriterion'
db.delete_table('criteria_lengthcriterion')
# Deleting model 'CartPriceCriterion'
db.delete_table('criteria_cartpricecriterion')
# Deleting model 'WidthCriterion'
db.delete_table('criteria_widthcriterion')
# Dropping ManyToManyField 'UserCriterion.users'
db.delete_table('criteria_usercriterion_users')
# Dropping ManyToManyField 'CountryCriterion.countries'
db.delete_table('criteria_countrycriterion_countries')
models = {
'criteria.weightcriterion': {
'id': ('models.AutoField', [], {'primary_key': 'True'}),
'operator': ('models.PositiveIntegerField', ['_(u"Operator")'], {'null': 'True', 'blank': 'True'}),
'weight': ('models.FloatField', ['_(u"Weight")'], {'default': '0.0'})
},
'criteria.heightcriterion': {
'height': ('models.FloatField', ['_(u"Height")'], {'default': '0.0'}),
'id': ('models.AutoField', [], {'primary_key': 'True'}),
'operator': ('models.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'})
},
'auth.user': {
'_stub': True,
'id': ('models.AutoField', [], {'primary_key': 'True'})
},
'criteria.criteriaobjects': {
'Meta': {'ordering': '["position"]'},
'content_id': ('models.PositiveIntegerField', ['_(u"Content id")'], {}),
'content_type': ('models.ForeignKey', ['ContentType'], {'related_name': '"content_type"', 'verbose_name': '_(u"Content type")'}),
'criterion_id': ('models.PositiveIntegerField', ['_(u"Content id")'], {}),
'criterion_type': ('models.ForeignKey', ['ContentType'], {'related_name': '"criterion"', 'verbose_name': '_(u"Criterion type")'}),
'id': ('models.AutoField', [], {'primary_key': 'True'}),
'position': ('models.PositiveIntegerField', ['_(u"Position")'], {'default': '999'})
},
'criteria.usercriterion': {
'id': ('models.AutoField', [], {'primary_key': 'True'}),
'users': ('models.ManyToManyField', ['User'], {})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label','model'),)", 'db_table': "'django_content_type'"},
'_stub': True,
'id': ('models.AutoField', [], {'primary_key': 'True'})
},
'criteria.lengthcriterion': {
'id': ('models.AutoField', [], {'primary_key': 'True'}),
'length': ('models.FloatField', ['_(u"Length")'], {'default': '0.0'}),
'operator': ('models.PositiveIntegerField', ['_(u"Operator")'], {'null': 'True', 'blank': 'True'})
},
'core.country': {
'Meta': {'ordering': '("name",)'},
'_stub': True,
'id': ('models.AutoField', [], {'primary_key': 'True'})
},
'criteria.countrycriterion': {
'countries': ('models.ManyToManyField', ['Country'], {'verbose_name': '_(u"Countries")'}),
'id': ('models.AutoField', [], {'primary_key': 'True'}),
'operator': ('models.PositiveIntegerField', ['_(u"Operator")'], {'null': 'True', 'blank': 'True'})
},
'criteria.cartpricecriterion': {
'id': ('models.AutoField', [], {'primary_key': 'True'}),
'operator': ('models.PositiveIntegerField', ['_(u"Operator")'], {'null': 'True', 'blank': 'True'}),
'price': ('models.FloatField', ['_(u"Price")'], {'default': '0.0'})
},
'criteria.widthcriterion': {
'id': ('models.AutoField', [], {'primary_key': 'True'}),
'operator': ('models.PositiveIntegerField', ['_(u"Operator")'], {'null': 'True', 'blank': 'True'}),
'width': ('models.FloatField', ['_(u"Width")'], {'default': '0.0'})
}
}
complete_apps = ['criteria']
|
10,774 | ac5b137a8c40155282fd5cf3da90aef09878bc44 | from flask import jsonify, request
from app import db, app
from app.models import Participant, Event, Location, Enrollment
@app.route("/locations/", methods=["GET"])
def api_get_locations():
""" GET /locations/ – выводит список городов или локаций, пока выведите [] """
locations = []
return jsonify(locations)
@app.route("/events/", methods=["GET"])
def api_get_events():
""" GET /events/ – выводит список ближайших событий в городе, пока выведите []
"""
events = []
return jsonify(events)
@app.route("/enrollments/<int:event_id>/", methods=["POST"])
def api_post_enrollments(event_id):
""" POST /enrollments/?id=event_id – принимает заявку на участие в событии, пока выведите {"status":"success"}
"""
enrollment = {"status": "enrollment success"}
return jsonify(enrollment)
@app.route("/enrollments/<int:event_id>/", methods=["DELETE"])
def api_delete_enrollments(event_id):
""" DELETE /enrollments/?id=event_id – отзывает заявку на участие в событии, пока выведите {"status":"success"}
"""
enrollment = {"status": "enrollment deleted successfully"}
return jsonify(enrollment)
@app.route("/register/", methods=["POST"])
def api_post_register():
""" POST /register/ – регистрирует пользователя, пока выведите {"status":"ok","id":1}
"""
user = {"status": "ok", "id": 1}
return jsonify(user)
@app.route("/auth/", methods=["POST"])
def api_post_auth():
""" POST /auth/ – проводит аутентификацию пользователя, пока выведите {"status":"success","key":111111111}
"""
auth = {"status": "success", "key": 111111111}
return jsonify(auth)
@app.route("/profile/", methods=["GET"])
def api_get_profile():
""" GET /profile/ – возвращает информацию о профиле пользователя,
пока выведите {"id":1,"picture":"","city":"nsk","about":"", enrollments:[]}
"""
user_profile = {"id": 1, "picture": "", "city": "nsk", "about": "", 'enrollments': []}
return jsonify(user_profile)
@app.route("/books/<int:book_id>/", methods=["GET"])
def api_get(book_id):
""" Получить одну книгу по ID """
print('Получить одну книгу по ID')
book = db.session.query(Book).get(book_id)
if book:
return jsonify(book.serialize)
return jsonify(), 404
@app.route("/books/all/", methods=["GET"])
def api_books_list():
""" Получить список всех книг """
print('Получить список всех книг')
books = db.session.query(Book)
books_dict = []
for book in books:
books_dict.append(book.serialize)
return jsonify(books_dict), 404
@app.route("/books/filtered/", methods=["GET"])
def api_books_filtered_list():
""" Получить список всех книг с фильтрацией по параметру language """
print('Получить список всех книг с фильтрацией по параметру language')
language = request.args.get("language")
print(language)
books_dict = []
if language:
books = db.session.query(Book)
books = books.filter(Book.language == language).all()
for book in books:
books_dict.append(book.serialize)
print(books_dict)
return jsonify(books_dict), 404
@app.route("/books/sorted/", methods=["GET"])
def api_books_sorted_list():
sort = request.args.get("sort")
if not sort:
return jsonify({'Error': 'Нет такого параметра!'}), 404
if not hasattr(Book, sort):
return jsonify(), 500
books = db.session.query(Book)
books = books.order_by(getattr(Book, sort))
books_dict = []
for book in books:
books_dict.append(book.serialize)
return jsonify(books_dict)
|
10,775 | 99e933a8e445d0d09db294f962f038b7e1f10a1b | class Monstruo():
def __init__(self, nombre:str,debilidades:list,resistencia:list,descripcion:str):
self.nombre = nombre
self.debilidades = debilidades
self.resistencia = resistencia
self.descripcion = descripcion
self.comentario = ""
def getNombre(self):
return self.nombre
def getDebilidad(self):
return self.debilidades
def getResistencia(self):
return self.resistencia
def getDescripcion(self):
return self.descripcion
def getComentario(self):
return self.comentario
def setComentario(self, comentario):
self.comentario = comentario
def __str__(self) -> str:
cadDebilidades = ""
cadResistencias = ""
for i in self.debilidades:
cadDebilidades+="\t{}\n".format(i)
for i in self.resistencia:
cadResistencias += "\t{}\n".format(i)
#print(cadDebilidades)
self.cadena = "Nombre:\n\t{}\n".format(self.nombre)
self.cadena += "Debilidades:\n{}".format(cadDebilidades)
self.cadena += "Resistencias:\n{}".format(cadResistencias)
self.cadena += "Descripción:\n\t{}\n".format(self.descripcion)
self.cadena += "Comentario:\n\t{}\n".format(self.comentario)
return self.cadena
|
10,776 | 8f2f955b45a4ec7a910cabc11ae3c58f50024c53 | __author__ = 'Peiman'
import nltk
import re
import time
exampleArray = ['the incredibly intimidating NLP scares people away who are sissies']
def processlanguage():
try:
for item in exampleArray:
tokenized = nltk.word_tokenize(item)
tagged = nltk.pos_tag(tokenized)
print tagged
chunkGram = r"""Chunk: {<RB\w?>*<VB\w?>*<NNP><VB\w>*}"""
chunkParsser = nltk.RegexpParser(chunkGram)
chunked = chunkParsser.parse(tagged)
print chunked
chunked.draw()
#? = 0 or 1 rep
#* = 0 or more rep
#+ = 1 or more rep
time.sleep(555)
except Exception, e:
print str(e)
processlanguage() |
10,777 | 24bd0e17144177ec5635e1583a5dc8f044139f79 | #!/usr/bin/python
import re, sys, os
sys.path.append(os.path.join(os.path.dirname(__file__), "requests-2.5.1"))
import requests
class BadHTTPCodeError(Exception):
def __init__(self, code):
print(code)
class GaanaDownloader():
def __init__(self):
self.urls = {
'search' : 'http://gaana.com/search/songs/{query}',
'get_token' : 'http://gaana.com//streamprovider/get_stream_data_v1.php',
'search_album' : 'http://gaana.com/search/albums/{query}',
'search_artist' : 'http://gaana.com/search/artists/{query}',
'album' : 'http://gaana.com/album/{name}',
'artist' : 'http://gaana.com/artist/{name}',
'search_songs_new' : 'http://api.gaana.com/index.php?type=search&subtype=search_song&content_filter=2&key={query}',
'search_albums_new' : 'http://api.gaana.com/index.php?type=search&subtype=search_album&content_filter=2&key={query}',
'get_song_url' : 'http://api.gaana.com/getURLV1.php?quality=medium&album_id={album_id}&delivery_type=stream&hashcode={hashcode}&isrc=0&type=rtmp&track_id={track_id}',
'album_details' : 'http://api.gaana.com/index.php?type=album&subtype=album_detail&album_id={album_id}'
}
def _get_url_contents(self, url):
url = url.replace(' ','%20')
response = requests.get(url)
if response.status_code == 200:
return response
else:
raise BadHTTPCodeError(response.status_code)
def _create_hashcode(self, track_id):
from base64 import b64encode as en
import hmac
key = 'ec9b7c7122ffeed819dc1831af42ea8f'
hashcode = hmac.new(key, en(track_id)).hexdigest()
return hashcode
def _get_song_url(self, track_id, album_id):
from base64 import b64decode as dec
url = self.urls['get_song_url']
hashcode = self._create_hashcode(track_id)
url = url.format(track_id = track_id, album_id = album_id, hashcode = hashcode)
response = requests.get(url , headers = {'deviceType':'GaanaAndroidApp', 'appVersion':'V5'})
song_url_b64 = response.json()['data']
print song_url_b64
song_url = dec(song_url_b64)
print song_url
return song_url
def get_song_url(self, track_id, album_id):
from base64 import b64decode as dec
url = self.urls['get_song_url']
hashcode = self._create_hashcode(track_id)
url = url.format(track_id = track_id, album_id = album_id, hashcode = hashcode)
response = requests.get(url , headers = {'deviceType':'GaanaAndroidApp', 'appVersion':'V5'})
song_url_b64 = response.json()['data']
print song_url_b64
song_url = dec(song_url_b64)
print song_url
return song_url
def _download_track(self, song_url, track_name, dir_name):
track_name = track_name.strip()
if 'mp3' in song_url:
track_name = track_name + '.mp3'
else:
track_name = track_name + '.mp4'
file_path = dir_name + '/' + track_name
print 'Downloading to', file_path
response = self._get_url_contents(song_url)
with open(file_path,'wb') as f:
f.write(response.content)
def _check_path(self, _dir):
import os
if not os.path.exists(_dir):
os.system('mkdir %s'%_dir)
def _check_input(self, ids, len_of_tracks):
ids = map(lambda x:x.strip(),ids.split(','))
for i in ids:
if not i.isdigit():
return False
if int(i) > len_of_tracks:
return False
return True
def search_songs_api(self, query):
url = self.urls['search_songs_new']
url = url.format(query = query)
response = self._get_url_contents(url)
tracks = response.json()['tracks']
if tracks:
tracks_list = map(lambda x:[x['track_title'],x['track_id'],x['album_id'],x['album_title'], ','.join(map(lambda y:y['name'], x['artist'])), x['duration']], tracks)
return tracks_list
else:
print 'Ooopsss!!! Sorry no track found matching your query'
print 'Why not try another Song? :)'
return []
def search_albums_api(self, query):
url = self.urls['search_albums_new']
url = url.format(query = query)
response = self._get_url_contents(url)
albums = response.json()['album']
if albums:
albums_list = map(lambda x:[x['album_id'],x['title'], x['language'], x['seokey'], x['release_date'],','.join(map(lambda y:y['name'], x.get('artists',[])[:2])) ,x['trackcount']], albums)
return albums_list
else:
print 'No such album found'
return []
def get_songs_list_from_album(self, album_id):
album_details_url = self.urls['album_details']
album_details_url = album_details_url.format(album_id = album_id)
response = requests.get(album_details_url , headers = {'deviceType':'GaanaAndroidApp', 'appVersion':'V5'})
tracks = response.json()['tracks']
tracks_list = map(lambda x:[x['track_title'].strip(),x['track_id'],x['album_id'],x['album_title'], ','.join(map(lambda y:y['name'], x['artist'])), x['duration']], tracks)
return tracks_list
def search_albums(self, query, _dir = None):
url = self.urls['search_albums_new']
url = url.format(query = query)
response = self._get_url_contents(url)
albums = response.json()['album']
if albums:
albums_list = map(lambda x:[x['album_id'],x['title'], x['language'], x['seokey'], x['release_date'],','.join(map(lambda y:y['name'], x.get('artists',[])[:2])) ,x['trackcount']], albums)
tabledata = [['S No.', 'Album Title', 'Album Language', 'Release Date', 'Artists', 'Track Count']]
for idx, value in enumerate(albums_list):
tabledata.append([str(idx), value[1], value[2], value[4], value[5], value[6]])
table = AsciiTable(tabledata)
print table.table
idx = int(raw_input('Which album do you wish to download? Enter S No. :'))
album_details_url = self.urls['album_details']
album_details_url = album_details_url.format(album_id = albums_list[idx][0])
response = requests.get(album_details_url , headers = {'deviceType':'GaanaAndroidApp', 'appVersion':'V5'})
tracks = response.json()['tracks']
tracks_list = map(lambda x:[x['track_title'].strip(),x['track_id'],x['album_id'],x['album_title'], ','.join(map(lambda y:y['name'], x['artist'])), x['duration']], tracks)
print 'List of tracks for ', albums_list[idx][1]
tabledata = [['S No.', 'Track Title', 'Track Artist']]
for idy, value in enumerate(tracks_list):
tabledata.append([str(idy), value[0], value[4]])
tabledata.append([str(idy+1), 'Enter this to download them all.',''])
table = AsciiTable(tabledata)
print table.table
print 'Downloading tracks to %s folder'%albums_list[idx][3]
ids = raw_input('Please enter csv of S no. to download:')
while not self._check_input(ids, len(tracks_list)) or not ids:
print 'Oops!! You made some error in entering input'
ids = raw_input('Please enter csv of S no. to download:')
if not _dir:
_dir = albums_list[idx][3]
self._check_path(_dir)
ids = map(int,map(lambda x:x.strip(),ids.split(',')))
if len(ids) == 1 and ids[0] == idy + 1:
for item in tracks_list:
song_url = self._get_song_url(item[1], item[2])
self._download_track(song_url, item[0].replace(' ','-').strip(), _dir)
else:
for i in ids:
item = tracks_list[i]
song_url = self._get_song_url(item[1], item[2])
self._download_track(song_url, item[0].replace(' ','-').strip(), _dir)
else:
print 'Ooopsss!!! Sorry no such album found.'
print 'Why not try another Album? :)'
|
10,778 | f94dbb3a4a6964c5b39147ecb11b4dd2ffff2423 | # -*- coding:utf-8 -*-
from base import BaseHandler
class ErrHandler(BaseHandler):
def get(self):
self.render('error.html', status_code=404)
|
10,779 | d7808235d83bc603ac76522b22980075a8863539 | from flask import Flask, render_template,request, session, redirect, url_for
from pymongo import MongoClient
#from flask.ext.pymongo import PyMongo
from reply_rec import botResponse, chat_history
from flask_pymongo import PyMongo
from flask_login import logout_user
import bcrypt
app = Flask(__name__)
#client=MongoClient("mongodb+srv://saksham:saksham@cluster0-nvuma.mongodb.net/test?retryWrites=true&w=majority")
#db=client.get_database('db')
#records=db.chats
#print(records.count_documents({}))
#chats_two={
#'How is the weather' : 'Sunny',
#}
#records.insert(chats_two)
app.config['MONGO_DBNAME'] = 'db'
app.config['MONGO_URI'] = 'mongodb+srv://saksham:saksham@cluster0-nvuma.mongodb.net/test?retryWrites=true&w=majority'
mongo=PyMongo(app)
@app.route('/')
def index():
if 'username' in session:
return 'You are logged in as ' + session['username']
return render_template('index.html')
@app.route('/login', methods=['POST'])
def login():
users = mongo.db.chat
present_user = users.find_one({'name' : request.form['username']})
if present_user:
if(request.form['pass'] == present_user['password']):
#if bcrypt.hashpw(request.form['pass'].encode('utf-8'), login_user['password'].encode('utf-8')) == login_user['password'].encode('utf-8'):
session['username'] = request.form['username']
return render_template('chat.html')
return 'Wrong Credentials'
@app.route('/register', methods=['POST', 'GET'])
def register():
if request.method == 'POST':
users = mongo.db.chat
present = users.find_one({'name' : request.form['username']})
if present is None:
#hashpass = bcrypt.hashpw(request.form['pass'].encode('utf-8'), bcrypt.gensalt())
users.insert({'name' : request.form['username'],'E-mail' : request.form['E-mail'],'password' : request.form['pass']})
session['username'] = request.form['username']
return render_template('chat.html')
return 'username taken'
return render_template('register.html')
@app.route("/reply_rec", methods = ["POST"] )
def resp():
data = request.json
#chat_history(session["username"])
return botResponse(data["user"], session["username"])
#@app.route("/hellogreeting", methods = ["GET","POST"])
#def greeting():
# name= session["username"]
# return 'Hi'+name
@app.route("/get_uname", methods = ["GET","POST"] )
def uname():
if(session["username"] != ""):
a = chat_history(session["username"])
r = {"username":session["username"], 'bot':a['bot'], 'user':a['user']}
return r
return ""
@app.route('/logout')
def logout():
if 'username' in session:
session.pop('username',None)
return redirect(url_for('index'))
else:
return 'User already logged out.'
if __name__=='__main__':
app.secret_key = 'mysecret'
app.run(debug=True)
|
10,780 | 2dd8181a26c6f415245c5fad15b88fe8e971280a | # -*- coding: utf-8 -*-
"""
Zerodha Kite Connect - candlestick pattern scanner
@author: Mayank Rasu (http://rasuquant.com/wp/)
"""
from kiteconnect import KiteConnect, KiteTicker
import pandas as pd
import datetime as dt
import os
import time
import numpy as np
import sys
cwd = os.chdir("/home/rajkp/code/Projects/Django-Dashboard/boilerplate-code-django-dashboard/app/algos")
#generate trading session
access_token = open("access_token.txt",'r').read()
key_secret = open("api_key.txt",'r').read().split()
kite = KiteConnect(api_key=key_secret[0])
kite.set_access_token(access_token)
#get dump of all NSE instruments
instrument_dump = kite.instruments("NSE")
instrument_df = pd.DataFrame(instrument_dump)
def instrumentLookup(instrument_df,symbol):
"""Looks up instrument token for a given script from instrument dump"""
try:
return instrument_df[instrument_df.tradingsymbol==symbol].instrument_token.values[0]
except:
return -1
def tokenLookup(instrument_df,symbol_list):
"""Looks up instrument token for a given script from instrument dump"""
token_list = []
for symbol in symbol_list:
token_list.append(int(instrument_df[instrument_df.tradingsymbol==symbol].instrument_token.values[0]))
return token_list
def fetchOHLC(ticker,interval,duration):
"""extracts historical data and outputs in the form of dataframe"""
instrument = instrumentLookup(instrument_df,ticker)
data = pd.DataFrame(kite.historical_data(instrument,dt.date.today()-dt.timedelta(duration), dt.date.today(),interval))
data.set_index("date",inplace=True)
return data
def doji(ohlc_df):
"""returns dataframe with doji candle column"""
df = ohlc_df.copy()
avg_candle_size = abs(df["close"] - df["open"]).median()
df["doji"] = abs(df["close"] - df["open"]) <= (0.05 * avg_candle_size)
return df
def maru_bozu(ohlc_df):
"""returns dataframe with maru bozu candle column"""
df = ohlc_df.copy()
avg_candle_size = abs(df["close"] - df["open"]).median()
df["h-c"] = df["high"]-df["close"]
df["l-o"] = df["low"]-df["open"]
df["h-o"] = df["high"]-df["open"]
df["l-c"] = df["low"]-df["close"]
df["maru_bozu"] = np.where((df["close"] - df["open"] > 2*avg_candle_size) & \
(df[["h-c","l-o"]].max(axis=1) < 0.005*avg_candle_size),"maru_bozu_green",
np.where((df["open"] - df["close"] > 2*avg_candle_size) & \
(abs(df[["h-o","l-c"]]).max(axis=1) < 0.005*avg_candle_size),"maru_bozu_red",False))
df.drop(["h-c","l-o","h-o","l-c"],axis=1,inplace=True)
return df
def hammer(ohlc_df):
"""returns dataframe with hammer candle column"""
df = ohlc_df.copy()
df["hammer"] = (((df["high"] - df["low"])>3*(df["open"] - df["close"])) & \
((df["close"] - df["low"])/(.001 + df["high"] - df["low"]) > 0.6) & \
((df["open"] - df["low"])/(.001 + df["high"] - df["low"]) > 0.6)) & \
(abs(df["close"] - df["open"]) > 0.1* (df["high"] - df["low"]))
return df
def shooting_star(ohlc_df):
"""returns dataframe with shooting star candle column"""
df = ohlc_df.copy()
df["sstar"] = (((df["high"] - df["low"])>3*(df["open"] - df["close"])) & \
((df["high"] - df["close"])/(.001 + df["high"] - df["low"]) > 0.6) & \
((df["high"] - df["open"])/(.001 + df["high"] - df["low"]) > 0.6)) & \
(abs(df["close"] - df["open"]) > 0.1* (df["high"] - df["low"]))
return df
def levels(ohlc_day):
"""returns pivot point and support/resistance levels"""
high = round(ohlc_day["high"][-1],2)
low = round(ohlc_day["low"][-1],2)
close = round(ohlc_day["close"][-1],2)
pivot = round((high + low + close)/3,2)
r1 = round((2*pivot - low),2)
r2 = round((pivot + (high - low)),2)
r3 = round((high + 2*(pivot - low)),2)
s1 = round((2*pivot - high),2)
s2 = round((pivot - (high - low)),2)
s3 = round((low - 2*(high - pivot)),2)
return (pivot,r1,r2,r3,s1,s2,s3)
def trend(ohlc_df,n):
"function to assess the trend by analyzing each candle"
df = ohlc_df.copy()
df["up"] = np.where(df["low"]>=df["low"].shift(1),1,0)
df["dn"] = np.where(df["high"]<=df["high"].shift(1),1,0)
if df["close"][-1] > df["open"][-1]:
if df["up"][-1*n:].sum() >= 0.7*n:
return "uptrend"
elif df["open"][-1] > df["close"][-1]:
if df["dn"][-1*n:].sum() >= 0.7*n:
return "downtrend"
else:
return None
def res_sup(ohlc_df,ohlc_day):
"""calculates closest resistance and support levels for a given candle"""
level = ((ohlc_df["close"][-1] + ohlc_df["open"][-1])/2 + (ohlc_df["high"][-1] + ohlc_df["low"][-1])/2)/2
p,r1,r2,r3,s1,s2,s3 = levels(ohlc_day)
l_r1=level-r1
l_r2=level-r2
l_r3=level-r3
l_p=level-p
l_s1=level-s1
l_s2=level-s2
l_s3=level-s3
lev_ser = pd.Series([l_p,l_r1,l_r2,l_r3,l_s1,l_s2,l_s3],index=["p","r1","r2","r3","s1","s2","s3"])
sup = lev_ser[lev_ser>0].idxmin()
res = lev_ser[lev_ser<0].idxmax()
return (eval('{}'.format(res)), eval('{}'.format(sup)))
def candle_type(ohlc_df):
"""returns the candle type of the last candle of an OHLC DF"""
candle = None
if doji(ohlc_df)["doji"][-1] == True:
candle = "doji"
if maru_bozu(ohlc_df)["maru_bozu"][-1] == "maru_bozu_green":
candle = "maru_bozu_green"
if maru_bozu(ohlc_df)["maru_bozu"][-1] == "maru_bozu_red":
candle = "maru_bozu_red"
if shooting_star(ohlc_df)["sstar"][-1] == True:
candle = "shooting_star"
if hammer(ohlc_df)["hammer"][-1] == True:
candle = "hammer"
return candle
def candle_pattern(ohlc_df,ohlc_day):
"""returns the candle pattern identified"""
pattern = None
signi = "low"
avg_candle_size = abs(ohlc_df["close"] - ohlc_df["open"]).median()
sup, res = res_sup(ohlc_df,ohlc_day)
if (sup - 1.5*avg_candle_size) < ohlc_df["close"][-1] < (sup + 1.5*avg_candle_size):
signi = "HIGH"
if (res - 1.5*avg_candle_size) < ohlc_df["close"][-1] < (res + 1.5*avg_candle_size):
signi = "HIGH"
if candle_type(ohlc_df) == 'doji' \
and ohlc_df["close"][-1] > ohlc_df["close"][-2] \
and ohlc_df["close"][-1] > ohlc_df["open"][-1]:
pattern = "doji_bullish"
if candle_type(ohlc_df) == 'doji' \
and ohlc_df["close"][-1] < ohlc_df["close"][-2] \
and ohlc_df["close"][-1] < ohlc_df["open"][-1]:
pattern = "doji_bearish"
if candle_type(ohlc_df) == "maru_bozu_green":
pattern = "maru_bozu_bullish"
if candle_type(ohlc_df) == "maru_bozu_red":
pattern = "maru_bozu_bearish"
if trend(ohlc_df.iloc[:-1,:],7) == "uptrend" and candle_type(ohlc_df) == "hammer":
pattern = "hanging_man_bearish"
if trend(ohlc_df.iloc[:-1,:],7) == "downtrend" and candle_type(ohlc_df) == "hammer":
pattern = "hammer_bullish"
if trend(ohlc_df.iloc[:-1,:],7) == "uptrend" and candle_type(ohlc_df) == "shooting_star":
pattern = "shooting_star_bearish"
if trend(ohlc_df.iloc[:-1,:],7) == "uptrend" \
and candle_type(ohlc_df) == "doji" \
and ohlc_df["high"][-1] < ohlc_df["close"][-2] \
and ohlc_df["low"][-1] > ohlc_df["open"][-2]:
pattern = "harami_cross_bearish"
if trend(ohlc_df.iloc[:-1,:],7) == "downtrend" \
and candle_type(ohlc_df) == "doji" \
and ohlc_df["high"][-1] < ohlc_df["open"][-2] \
and ohlc_df["low"][-1] > ohlc_df["close"][-2]:
pattern = "harami_cross_bullish"
if trend(ohlc_df.iloc[:-1,:],7) == "uptrend" \
and candle_type(ohlc_df) != "doji" \
and ohlc_df["open"][-1] > ohlc_df["high"][-2] \
and ohlc_df["close"][-1] < ohlc_df["low"][-2]:
pattern = "engulfing_bearish"
if trend(ohlc_df.iloc[:-1,:],7) == "downtrend" \
and candle_type(ohlc_df) != "doji" \
and ohlc_df["close"][-1] > ohlc_df["high"][-2] \
and ohlc_df["open"][-1] < ohlc_df["low"][-2]:
pattern = "engulfing_bullish"
return "Significance - {}, Pattern - {}".format(signi,pattern)
##############################################################################################
tickers = ["BHEL",
"CONCOR",
"ASTRAL",
"INDHOTEL",
"DALBHARAT",
"COFORGE",
"ITI",
"IPCALAB",
"SUMICHEM",
"DHANI",
"DIXON",
"SUNTV",
"FEDERALBNK",
"OFSS",
"COROMANDEL",
"RECLTD",
"VOLTAS",
"ISEC",
"AUBANK",
"BALKRISIND",
"GSPL",
"HAL",
"POLYCAB",
"TATACHEM",
"SUPREMEIND",
"LTTS",
"BHARATFORG",
"HATSUN",
"TVSMOTOR",
"GMRINFRA",
"TRENT",
"MOTILALOFS",
"L&TFH",
"ATUL",
"AIAENG",
"GLAXO",
"JSWENERGY",
"SKFINDIA",
"IDBI",
"PRESTIGE",
"NHPC",
"ATGL",
"TIINDIA",
"SJVN",
"MINDAIND",
"CANBK",
"VINATIORGA",
"BANKINDIA",
"OIL",
"BBTC",
"PFC",
"GODREJAGRO",
"AAVAS",
"EXIDEIND",
"WHIRLPOOL",
"MAXHEALTH",
"GODREJPROP",
"VBL",
"3MINDIA",
"METROPOLIS",
"ASTRAZEN",
"MGL",
"SRF",
"APOLLOTYRE",
"MFSL",
"BATAINDIA",
"UNIONBANK",
"VGUARD",
"ZYDUSWELL",
"PFIZER",
"BAYERCROP",
"IRCTC",
"CASTROLIND",
"SANOFI",
"ABFRL",
"FORTIS",
"CESC",
"PERSISTENT",
"GODREJIND",
"MPHASIS",
"PHOENIXLTD",
"CHOLAHLDNG",
"DEEPAKNTR",
"HONAUT",
"TATACOMM",
"JMFINANCIL",
"LICHSGFIN",
"CUMMINSIND",
"GICRE",
"THERMAX",
"SOLARINDS",
"SRTRANSFIN",
"LAURUSLABS",
"IDFCFIRSTB",
"CUB",
"NIACL",
"NAVINFLUOR",
"OBEROIRLTY",
"TATAELXSI",
"RELAXO",
"MANAPPURAM",
"CRISIL",
"AMARAJABAT",
"GUJGASLTD",
"BANKBARODA",
"AARTIIND",
"M&MFIN",
"ASHOKLEY",
"PGHL",
"PIIND",
"GILLETTE",
"ABCAPITAL",
"APLLTD",
"CROMPTON",
"NAM-INDIA",
"ABB",
"TTKPRESTIG",
"SUVENPHAR",
"IDEA",
"BEL",
"SCHAEFFLER",
"ZEEL",
"RBLBANK",
"RAMCOCEM",
"GLENMARK",
"RAJESHEXPO",
"SUNDRMFAST",
"EMAMILTD",
"ENDURANCE",
"SYNGENE",
"AKZOINDIA",
"LALPATHLAB",
"HINDZINC",
"TATAPOWER",
"JKCEMENT",
"ESCORTS",
"SUNDARMFIN",
"IIFLWAM",
"IBULHSGFIN",
"CREDITACC",
"KANSAINER",
"MINDTREE",
"PAGEIND",
"CHOLAFIN",
"AJANTPHARM",
"NATCOPHARM",
"JINDALSTEL",
"TORNTPOWER",
"SAIL",
"INDIAMART",
"GAIL",
"HINDPETRO",
"JUBLFOOD",
"ADANITRANS",
"BOSCHLTD",
"IGL",
"SIEMENS",
"PETRONET",
"ICICIPRULI",
"ACC",
"MARICO",
"AMBUJACEM",
"BERGEPAINT",
"PIDILITIND",
"INDUSTOWER",
"ABBOTINDIA",
"BIOCON",
"MCDOWELL-N",
"PGHH",
"DMART",
"MRF",
"DLF",
"GODREJCP",
"COLPAL",
"HDFCAMC",
"YESBANK",
"VEDL",
"BAJAJHLDNG",
"DABUR",
"INDIGO",
"ALKEM",
"CADILAHC",
"MOTHERSUMI",
"HAVELLS",
"ADANIENT",
"UBL",
"SBICARD",
"PEL",
"BANDHANBNK",
"MUTHOOTFIN",
"TORNTPHARM",
"ICICIGI",
"LUPIN",
"LTI",
"APOLLOHOSP",
"ADANIGREEN",
"NAUKRI",
"NMDC",
"PNB",
"AUROPHARMA",
"COALINDIA",
"IOC",
"NTPC",
"ULTRACEMCO",
"BPCL",
"TATASTEEL",
"TATACONSUM",
"SUNPHARMA",
"TATAMOTORS",
"GRASIM",
"SHREECEM",
"SBIN",
"EICHERMOT",
"RELIANCE",
"BAJAJ-AUTO",
"INDUSINDBK",
"BRITANNIA",
"SBILIFE",
"UPL",
"ONGC",
"ADANIPORTS",
"POWERGRID",
"NESTLEIND",
"BHARTIARTL",
"TITAN",
"HEROMOTOCO",
"ASIANPAINT",
"MARUTI",
"ITC",
"ICICIBANK",
"HCLTECH",
"M&M",
"LT",
"INFY",
"BAJAJFINSV",
"DRREDDY",
"HDFCBANK",
"CIPLA",
"HDFCLIFE",
"TCS",
"AXISBANK",
"HINDUNILVR",
"JSWSTEEL",
"TECHM",
"BAJFINANCE",
"WIPRO",
"DIVISLAB",
"KOTAKBANK",
"HINDALCO",
"HDFC"]
#####################################################################################################
def main():
a,b = 0,0
while a < 10:
try:
pos_df = pd.DataFrame(kite.positions()["day"])
break
except:
print("can't extract position data..retrying")
a+=1
while b < 10:
try:
ord_df = pd.DataFrame(kite.orders())
break
except:
print("can't extract order data..retrying")
b+=1
for ticker in tickers:
try:
ohlc = fetchOHLC(ticker, '5minute',5)
ohlc_day = fetchOHLC(ticker, 'day',30)
ohlc_day = ohlc_day.iloc[:-1,:]
cp = candle_pattern(ohlc,ohlc_day)
# print(ticker, ": ",cp)
# if len(pos_df.columns)==0:
# # if macd_xover[ticker] == "bullish" and renko_param[ticker]["brick"] >=2:
# # placeSLOrder(ticker,"buy",quantity,renko_param[ticker]["lower_limit"])
# # if macd_xover[ticker] == "bearish" and renko_param[ticker]["brick"] <=-2:
# # placeSLOrder(ticker,"sell",quantity,renko_param[ticker]["upper_limit"])
# if len(pos_df.columns)!=0 and ticker not in pos_df["tradingsymbol"].tolist():
# # if macd_xover[ticker] == "bullish" and renko_param[ticker]["brick"] >=2:
# # placeSLOrder(ticker,"buy",quantity,renko_param[ticker]["lower_limit"])
# # if macd_xover[ticker] == "bearish" and renko_param[ticker]["brick"] <=-2:
# # placeSLOrder(ticker,"sell",quantity,renko_param[ticker]["upper_limit"])
# if len(pos_df.columns)!=0 and ticker in pos_df["tradingsymbol"].tolist():
# if pos_df[pos_df["tradingsymbol"]==ticker]["quantity"].values[0] == 0:
# if macd_xover[ticker] == "bullish" and renko_param[ticker]["brick"] >=2:
# placeSLOrder(ticker,"buy",quantity,renko_param[ticker]["lower_limit"])
# if macd_xover[ticker] == "bearish" and renko_param[ticker]["brick"] <=-2:
# placeSLOrder(ticker,"sell",quantity,renko_param[ticker]["upper_limit"])
# if pos_df[pos_df["tradingsymbol"]==ticker]["quantity"].values[0] > 0:
# order_id = ord_df.loc[(ord_df['tradingsymbol'] == ticker) & (ord_df['status'].isin(["TRIGGER PENDING","OPEN"]))]["order_id"].values[0]
# ModifyOrder(order_id,renko_param[ticker]["lower_limit"])
# if pos_df[pos_df["tradingsymbol"]==ticker]["quantity"].values[0] < 0:
# order_id = ord_df.loc[(ord_df['tradingsymbol'] == ticker) & (ord_df['status'].isin(["TRIGGER PENDING","OPEN"]))]["order_id"].values[0]
# ModifyOrder(order_id,renko_param[ticker]["upper_limit"])
except:
print("skipping for ",ticker)
# Continuous execution
# starttime=time.time()
# timeout = time.time() + 60*60*1 # 60 seconds times 60 meaning the script will run for 1 hr
# while time.time() <= timeout:
# try:
# print("passthrough at ",time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time())))
# main()
# time.sleep(300 - ((time.time() - starttime) % 300.0)) # 300 second interval between each new execution
# except KeyboardInterrupt:
# print('\n\nKeyboard exception received. Exiting.')
# exit()
capital = 3000 #position size
# macd_xover = {}
# renko_param = {}
# for ticker in tickers:
# renko_param[ticker] = {"brick_size":renkoBrickSize(ticker),"upper_limit":None, "lower_limit":None,"brick":0}
# macd_xover[ticker] = None
#create KiteTicker object
kws = KiteTicker(key_secret[0],kite.access_token)
tokens = tokenLookup(instrument_df,tickers)
start_minute = dt.datetime.now().minute
def on_ticks(ws,ticks):
global start_minute
# renkoOperation(ticks)
now_minute = dt.datetime.now().minute
if abs(now_minute - start_minute) >= 5:
start_minute = now_minute
main(capital)
def on_connect(ws,response):
ws.subscribe(tokens)
ws.set_mode(ws.MODE_LTP,tokens)
def pattern_scanner():
while True:
now = dt.datetime.now()
if (now.hour >= 9):
kws.on_ticks=on_ticks
kws.on_connect=on_connect
kws.connect()
if (now.hour >= 14 and now.minute >= 30):
sys.exit() |
10,781 | 016ba775ac09f6ac546f298eca92a01307a391e0 | import random
from random import choice
import discord
import requests
from bs4 import BeautifulSoup
from discord.ext import commands
class Fun(commands.Cog):
def __init__(self, bot):
self.bot = bot
@commands.command()
async def ping(self, ctx):
"""
Pong!
"""
await ctx.send("Pong!")
@commands.command()
async def topic(self, ctx):
"""
Gets a random chat topic to keep the chat going.
"""
website = requests.get('https://www.conversationstarters.com/generator.php').content
soup = BeautifulSoup(website, 'html.parser')
topic = soup.find(id="random").text
await ctx.send(topic)
@commands.command(aliases=['r'])
async def roll(self, ctx, upper_bound=20 #type: int
):
"""
Roll a d20 or a d[upper_bound]
:param upper_bound: the highest you can roll.
:return: Your die roll
"""
msg = random.randint(1,int(upper_bound))
if msg == upper_bound:
msg = "***Critical Hit!*** " + str(msg)
elif msg == 1:
msg = "***Critical Fail!*** " + str(msg)
await ctx.send(f":game_die: You rolled a {msg}")
@commands.command()
@commands.is_owner()
async def changegame(self, ctx, game):
"""
Changes my displayed game. Only for privileged users!
:param ctx: message context.
:param game: a string of the game I am playing.
:return: "Game Changed Successfully"
"""
game = discord.Game(game)
await self.bot.change_presence(status=discord.Status.online, activity=game)
embedMsg = discord.Embed(color=0x90ee90, title=":video_game: Game changed successfully!")
await ctx.send(embed=embedMsg)
@commands.command()
async def flip(self, ctx, user : discord.Member=None):
"""
Flips a coin ... or a user. But not me.
:param user: the user you are flipping
:return: either a flipped coin or user
"""
if user != None:
msg = ""
if user.id == self.bot.user.id:
user = ctx.author
msg = "Nice try. You think this is funny? How about *this* instead:\n\n"
char = "abcdefghijklmnopqrstuvwxyz"
tran = "ɐqɔpǝɟƃɥᴉɾʞlɯuodbɹsʇnʌʍxʎz"
table = str.maketrans(char, tran)
name = user.display_name.translate(table)
char = char.upper()
tran = "∀qƆpƎℲפHIſʞ˥WNOԀQᴚS┴∩ΛMX⅄Z"
table = str.maketrans(char, tran)
name = name.translate(table)
await ctx.send(msg + "(╯°□°)╯︵ " + name[::-1])
else:
await ctx.send("*flips a coin and... " + choice(["HEADS!*", "TAILS!*"]))
@commands.command()
async def ded(self, ctx):
# await ctx.send("https://giphy.com/gifs/bare-barren-Az1CJ2MEjmsp2")
embed = discord.Embed()
embed.set_image(url="https://i.imgur.com/X6pMtG4.gif")
await ctx.channel.send(embed=embed)
@commands.command()
async def uwu(self, ctx, *, message):
uwus = ['UwU', 'Uwu', 'uwU', 'ÚwÚ', 'uwu', '☆w☆' '✧w✧', '♥w♥', '︠uw ︠u', '(uwu)', 'OwO', 'owo', 'Owo', 'owO']
res = message.replace("r", "w").replace("l", "w")
await ctx.send(res + ' ' + random.choice(uwus))
@commands.Cog.listener()
async def on_message(self, message):
if message.content.lower() == "f":
await message.add_reaction(u"\U0001F1EB")
if message.content.lower() == "press x to doubt":
await message.add_reaction(u"\U0001F1FD")
def setup(bot):
bot.add_cog(Fun(bot))
|
10,782 | 7e801fc8bdf3a42f097f38ee5303dd690f43a8f4 | import numpy as np
import yaml
import matplotlib.pyplot as plt
import scipy.signal
import glob
def plotJointState(path, test_joint):
JSPosition_PATH = path['robotCalibration'] + 'test/test_with_700g_payload/jsposition.yaml'
TIME_PATH = path['robotCalibration'] + 'test/test_with_700g_payload/js_time.yaml'
with open(JSPosition_PATH) as f:
jip = np.array(yaml.load(f))[test_joint-1]
with open(TIME_PATH) as f:
time = yaml.load(f)
n = 30 # the larger n is, the smoother curve will be
b = [1.0 / n] * n
a = 1
J3pRad = np.radians(jip)
J3v = np.gradient(J3pRad, time)
J3vF = scipy.signal.lfilter(b, a, J3v)
J3a = np.gradient(J3vF, time)
plt.figure()
plt.plot(time, jip, '.r')
plt.figure()
plt.plot(time, J3vF)
plt.figure()
plt.plot(time, J3a)
def plotTrajectoryFeedback(path, test_joint):
Desired_Position_PATH = path['robotCalibration'] + 'test/test_with_700g_payload/desired_position.yaml'
Desired_Velocity_PATH = path['robotCalibration'] + 'test/test_with_700g_payload/desired_velocity.yaml'
Desired_Acceleration_PATH = path['robotCalibration'] + 'test/test_with_700g_payload/desired_acceleration.yaml'
Actual_Position_PATH = path['robotCalibration'] + 'test/test_with_700g_payload/actual_position.yaml'
Error_Position_PATH = path['robotCalibration'] + 'test/test_with_700g_payload/error_position.yaml'
TF_TIME_PATH = path['robotCalibration'] + 'test/test_with_700g_payload/tf_time.yaml'
JSPosition_PATH = path['robotCalibration'] + 'test/test_with_700g_payload/jsposition.yaml'
JS_TIME_PATH = path['robotCalibration'] + 'test/test_with_700g_payload/js_time.yaml'
with open(JSPosition_PATH) as f:
Jip = np.array(yaml.load(f))[:, test_joint-1]
with open(JS_TIME_PATH) as f:
js_time = yaml.load(f)
with open(Desired_Position_PATH) as f:
desired_position = np.array(yaml.load(f))
with open(Desired_Velocity_PATH) as f:
desired_velocity = np.array(yaml.load(f))
with open(Desired_Acceleration_PATH) as f:
desired_acceralation = np.array(yaml.load(f))
with open(Actual_Position_PATH) as f:
actual_position = np.array(yaml.load(f))
with open(Error_Position_PATH) as f:
error_position = np.array(yaml.load(f))
with open(TF_TIME_PATH) as f:
tf_time = np.array(yaml.load(f))
Jip_desired = desired_position[:, test_joint-1]
Jiv_desired = desired_velocity[:, test_joint-1]
Jia_desired = desired_acceralation[:, test_joint-1]
Jiv_desired_m = np.gradient(Jip_desired, tf_time)
Jia_desired_m = np.gradient(Jiv_desired_m, tf_time)
Jip_actual = actual_position[:, test_joint-1]
Jiv_actual = np.gradient(Jip_actual, tf_time)
Jia_actual = np.gradient(Jiv_actual, tf_time)
Jip_error = error_position[:, test_joint-1]
cut_i = 20
cut_f = 10
plt.figure()
plt.plot(tf_time, Jip_desired, '.k')
plt.plot(tf_time, Jip_actual, '.r')
# plt.plot(js_time[:-(cut_i+cut_f)], Jip[cut_i:-cut_f], '.g')
plt.figure()
plt.plot(tf_time, Jiv_desired, 'k')
plt.plot(tf_time, Jiv_actual, 'r')
plt.plot(tf_time, Jiv_desired_m, 'b')
plt.figure()
plt.plot(tf_time, Jia_desired, 'k')
plt.plot(tf_time, Jia_actual, 'r')
plt.plot(tf_time, Jia_desired_m, 'b')
plt.figure()
plt.plot(tf_time, Jip_error, 'k')
def plotMultiTrajectory(path, test_joint, series_number='*'):
# series_number is defined as %year%month%day%hour%minute$second
Desired_Position_PATH = sorted(glob.glob(path['robotCalibration'] + 'test/' + series_number + '_desired_position.yaml'))
Desired_Velocity_PATH = sorted(glob.glob(path['robotCalibration'] + 'test/' + series_number + '_desired_velocity.yaml'))
Desired_Acceleration_PATH = sorted(glob.glob(path['robotCalibration'] + 'test/' + series_number + '_desired_acceleration.yaml'))
Error_Position_PATH = sorted(glob.glob(path['robotCalibration'] + 'test/' + series_number + '_error_position.yaml'))
TF_TIME_PATH = sorted(glob.glob(path['robotCalibration'] + 'test/' + series_number + '_tf_time.yaml'))
Actual_Position_PATH = sorted(glob.glob(path['robotCalibration'] + 'test/' + series_number + '_actual_position.yaml'))
desired_positions = []
desired_velocitys = []
desired_acceralations = []
actual_positions = []
tf_times = []
for i in range(len(TF_TIME_PATH)):
# with open(Desired_Position_PATH[i]) as f:
# desired_positions.append(np.array(yaml.load(f))[:, test_joint-1])
# with open(Desired_Velocity_PATH[i]) as f:
# desired_velocitys.append(np.array(yaml.load(f))[:, test_joint-1])
# with open(Desired_Acceleration_PATH[i]) as f:
# desired_acceralations.append(np.array(yaml.load(f))[:, test_joint-1])
with open(Actual_Position_PATH[i]) as f:
actual_positions.append(np.array(yaml.load(f))[:, test_joint-1])
with open(TF_TIME_PATH[i]) as f:
tf_times.append(np.array(yaml.load(f)))
plt.figure()
stablePosition = []
for i, tf_time in enumerate(tf_times):
Jip_actual = actual_positions[i]
stablePosition.append(Jip_actual[-1])
# plt.plot(tf_time, Jip_actual, '-')
# plt.title('Joint Test (20 Test for J3)')
# plt.xlabel('Execute time(sec)')
# plt.ylabel('Anglur position of J3 motor(rad)')
SysError = np.array(stablePosition) - np.mean(stablePosition)*np.ones(len(stablePosition))
print(SysError)
def plotMultiJointState(path, test_joint, series_number='*'):
# series_number is defined as %year%month%day%hour%minute$second
JSPosition_PATH = sorted(glob.glob(path['robotCalibration'] + 'test/' + series_number + '_jsposition.yaml'))
JS_TIME_PATH = sorted(glob.glob(path['robotCalibration'] + 'test/' + series_number + '_js_time.yaml'))
js_positions = []
js_times = []
for i in range(len(JS_TIME_PATH)):
with open(JSPosition_PATH[i]) as f:
js_positions.append(np.array(yaml.load(f))[:, test_joint-1])
with open(JS_TIME_PATH[i]) as f:
js_times.append(np.array(yaml.load(f)))
plt.figure()
for i, js_time in enumerate(js_times):
Jip_actual = js_positions[i]
plt.plot(js_time, Jip_actual, 'o')
if __name__ == "__main__":
CONFIG = 'config.yaml'
with open(CONFIG) as f:
path = yaml.load(f)
# plotJointState(path)
# plotTrajectoryFeedback(path, test_joint=3)
# plotMultiTrajectory(path, test_joint=3, series_number='190704*')
for test_joint in range(6):
plotMultiJointState(path, test_joint=test_joint, series_number='190807*')
plt.show() |
10,783 | 63d32511f0147afc902aad0e81fcbe9de3b63ade | """
大乐透和双色球随机选号程序
Version: 0.1
Author: 姚春敏
Date: 2021-08-18
"""
from random import randrange, randint, sample
# import tkinter
def display(balls):
"""
输出列表中的号码
"""
if s == 2:
for index, ball in enumerate(balls):
if index == len(balls) - 2:
print('|', end=' ')
print('%02d' % ball, end=' ')
print()
else:
for index, ball in enumerate(balls):
if index == len(balls) - 1:
print('|', end=' ')
print('%02d' % ball, end=' ')
print()
def random_select():
"""
随机选择一组号码
"""
if s == 2:
red_balls = [x for x in range(1, 36)]
selected_balls = []
# for _ in range(6):
# index = randrange(len(red_balls))
# selected_balls.append(red_balls[index])
# del red_balls[index]
# 上面的for循环也可以写成下面这行代码
# sample函数是random模块下的函数
selected_balls = sample(red_balls, 5)
selected_balls.sort()
blue_balls = [y for y in range(1, 13)]
selected_blusballs = []
selected_blusballs = sample(blue_balls, 2)
selected_blusballs.sort()
selected_balls += selected_blusballs
# return selected_balls
else:
red_balls = [x for x in range(1, 33)]
selected_balls = []
selected_balls = sample(red_balls, 6)
selected_balls.sort()
blue_balls = [y for y in range(1, 16)]
selected_blusballs = []
selected_blusballs = sample(blue_balls, 1)
selected_blusballs.sort()
selected_balls += selected_blusballs
return selected_balls
def main():
n = int(input('机选几注: '))
global s
s = int(input("请选择类型:1、双色球;2、大乐透"))
for _ in range(n):
display(random_select())
if __name__ == '__main__':
main()
|
10,784 | d86919d7d0de021a542df878f78f5f47473b4fc6 | from token_auth.authentication import BaseTokenAuthBackend
from .models import Candidate
class CandidateTokenAuthBackend(BaseTokenAuthBackend):
model_class = Candidate
|
10,785 | 747df4f70f9217b29a4dc43f8761b0678904bd7e | from item_category import ItemCategory
class BackstagePass(ItemCategory):
def update_expired(self, item):
item.quality = 0
def update_quality(self, item):
self.increase_quality(item)
if item.sell_in <= 10:
self.increase_quality(item)
if item.sell_in <= 5:
self.increase_quality(item)
|
10,786 | 979ca3e3115370af0a0e04baf5a66c108af174c2 | def int_sp():
return map(int, input().split())
def li_int_sp():
return list(map(int, input().split()))
def trans_li_int_sp():
return list(map(list, (zip(*[li_int_sp() for _ in range(N)]))))
import pdb
import math
ABC = li_int_sp()
gcds = math.gcd(math.gcd(ABC[0],ABC[1]), ABC[2])
pdb.set_trace()
output = 0
for i in ABC:
output += i//gcds-1
print(output)
|
10,787 | 0ddded1a605c7d9a8a22de7f818c699236ac158e |
for y in range(3):
for x in range(1,4):
print(x,end=" ")
print()
print("------------------------")
for x in range(3):
for y in range(3,0,-1):
print(y,end=" ")
print()
print("------------------------")
for x in range(3):
for y in range(3):
print(x+1,end=" ")
print()
print("------------------------")
name = "Naveen Kumar"
no = 0
for x in range(len(name)-1):
for y in range(3):
print(name[no],end=" ")
no+=1
print()
print("------------------------") |
10,788 | df05b986a350a232e2287c8bd158323dd7e9d03f | class Solution:
def letterCasePermutation(self, S: str) -> List[str]:
S = S.lower()
n = len(S)
ans = []
def perm(i, res):
if i < n:
perm(i+1, res + S[i])
if S[i].islower(): perm(i+1, res + S[i].upper())
else:
ans.append(res)
perm(0,'')
return ans
# EOF #
|
10,789 | 5a2474543d5d2607392d728b8206b66003c89efb | import os
print("Hello World!")
exec(open("./tictactoe.py").read())
#exec(open("./rainbow.py").read())
#exec(open("./vuemeter.py").read()) |
10,790 | d1ff1bae4418429288c28e20478f7318696757d5 | # Generated by Django 2.2.1 on 2019-05-26 15:47
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users', '0003_profile_files'),
]
operations = [
migrations.AlterField(
model_name='profile',
name='files',
field=models.FileField(default='default.txt', upload_to='pdf'),
),
]
|
10,791 | 082bc32f7fa00d1f07b2a97a56e1a6fdebce7cab | from __future__ import unicode_literals
from django.db import models
from django.contrib.auth.models import User
from django.utils import timezone
class Account(models.Model):
user = models.OneToOneField(User)
karma = models.IntegerField(default=0)
def __unicode__(self):
return self.user.username
class Link(models.Model):
url = models.URLField()
title = models.CharField(max_length=128)
creator = models.ForeignKey(Account, related_name='creator', on_delete=models.CASCADE)
voters = models.ManyToManyField(Account, related_name='voters', through='Vote')
score = models.IntegerField(default=0)
post_time = models.DateTimeField(default=timezone.now, blank=True)
view = models.IntegerField(default=0)
hot = models.FloatField()
def __unicode__(self):
return self.title
class Vote(models.Model):
link = models.ForeignKey(Link, on_delete=models.CASCADE)
account = models.ForeignKey(Account, on_delete=models.CASCADE)
up = models.IntegerField(default=0)
def __unicode__(self):
return self.account.user.username + ': ' + self.link.title
|
10,792 | db8ff393a3593ae1b056d6e6bca027126dd467af | import re
import sys
from PyQt5.QtWidgets import QApplication, QDialog, QInputDialog
from PyQt5 import QtCore, QtGui, QtWidgets
from collections import *
import copy
"""
LOLCode Interpreter Class
"""
class Interpreter(QtCore.QObject):
class Lexeme: # constructor for lexeme
def __init__(self, regex, type):
self.regex = regex
self.type = type
"""
Lexemes Definitions
Lexemes are defined in the Interpreter class.
Each lexeme contains a regex expression
and a list of labels that correspond to
the groups in the regex exression.
"""
LEXEMES = [
#Start and end delimiters of source code
Lexeme(r"(HAI)[ \t]*",
["Code Starting Delimiter"]),
Lexeme(r"(KTHXBYE)",
["Code Ending Delimiter"]),
#Input and Output
Lexeme(r"(GIMMEH)[ \t]+",
["Input"]),
Lexeme(r"(VISIBLE)[ \t]+",
["Output"]),
#Variable Declaration and Assignment
Lexeme(r"(I[ \t]+HAS[ \t]+A)[ \t]+",
["Variable Declaration"]),
Lexeme(r"(ITZ)[ \t]+",
["Declaration Assignment"]),
Lexeme(r"(R)[ \t]+",
["Value Assignment"]),
#Arithmetic Operations
Lexeme(r"(SUM[ \t]+OF)[ \t]+",
["Addition"]),
Lexeme(r"(DIFF[ \t]+OF)[ \t]+",
["Subtraction"]),
Lexeme(r"(PRODUKT[ \t]+OF)[ \t]+",
["Multiplication"]),
Lexeme(r"(QUOSHUNT[ \t]+OF)[ \t]+",
["Division"]),
Lexeme(r"(MOD[ \t]+OF)[ \t]+",
["Modulo"]),
Lexeme(r"(BIGGR[ \t]+OF)[ \t]+",
["Maximum"]),
Lexeme(r"(SMALLR[ \t]+OF)[ \t]+",
["Minimum"]),
#Boolean Operations
Lexeme(r"(BOTH[ \t]+OF)[ \t]+",
["AND Operator"]),
Lexeme(r"(EITHER[ \t]+OF)[ \t]+",
["OR Operator"]),
Lexeme(r"(WON[ \t]+OF)[ \t]+",
["XOR Operator"]),
Lexeme(r"(NOT)[ \t]+",
["NOT Operator"]),
Lexeme(r"(BOTH[ \t]+SAEM)[ \t]+",
["Equal Operator"]),
Lexeme(r"(DIFFRINT)[ \t]+",
["Not Equal Operator"]),
#Boolean Arity Operations
Lexeme(r"(ALL[ \t]+OF)[ \t]+",
["Infinite Arity AND Operator"]),
Lexeme(r"(ANY[ \t]+OF)[ \t]+",
["Infinite Arity OR Operator"]),
Lexeme(r"(SMOOSH)[ \t]+",
["Infinite Arity Concatenation"]),
Lexeme(r"(MKAY)[ \t]*",
["Infinite Arity Terminator"]),
#changed + to * in number of white spaces
Lexeme(r"(IS NOW A)[ \t]+",
["Type Cast Keyword"]),
Lexeme(r"(MAEK)[ \t]+",
["Type Cast Keyword 2"]),
Lexeme(r"(A)[ \t]+",
["A Delimiter"]),
#Connector
Lexeme(r"(AN)[ \t]+",
["Connector"]),
#Conditional Statements
Lexeme(r"(O[ \t]+RLY\?)[ \t]*",
["If Else Statement Init"]),
Lexeme(r"(YA[ \t]+RLY)[ \t]*",
["If Statement"]),
Lexeme(r"(NO[ \t]+WAI)[ \t]*",
["Else Statement"]),
Lexeme(r"(MEBBE)[ \t]+",
["Else If Statement"]),
Lexeme(r"(WTF\?)[ \t]*",
["Switch Statement Init"]),
Lexeme(r"(OMG)[ \t]+",
["Switch Statement Conditions"]),
Lexeme(r"(OMGWTF)[ \t]*",
["Switch Statement Default Condition"]),
Lexeme(r"(GTFO)[ \t]*",
["Case Terminator"]),
Lexeme(r"(OIC)[ \t]*",
["Conditional Statement Terminator"]),
Lexeme(r"(BTW)[ \t]+([^\n]*)",
["Single Line Comment Init", "Comment"]),
Lexeme(r"(OBTW)\s*([\s\S]*)\s+(TLDR)[ \t]*",
["Multiline Comment Init", "Comment", "Multiline Comment Terminator"]),
#Loop Statement
Lexeme(r"(IM IN YR)[ \t]+([a-zA-Z][a-zA-Z0-9_]*)[ \t]+",
["Loop Init", "Loop Name"]),
Lexeme(r"(UPPIN|NERFIN)[ \t]+(YR)[ \t]+",
["Loop Operation", "Loop YR Delimiter"]),
Lexeme(r"(TIL|WILE)",
["Loop Condition Delimiter"]),
Lexeme(r"(IM OUTTA YR)[ \t]+([a-zA-Z][a-zA-Z0-9_]*)[ \t]*",
["Loop Terminate", "Loop Name"]),
#Data Types
Lexeme("(\")([^\"]*)(\")",
["String Starting Delimiter", "YARN", "String Ending Delimiter"]),
Lexeme(r"(NOOB|TROOF|NUMBAR|NUMBR|YARN)[ \t]*",
["Data Type"]),
Lexeme(r"(WIN|FAIL)(?=[\s]+)",
["TROOF"]),
Lexeme(r"([a-zA-Z][a-zA-Z0-9_]*)[ \t]*",
["Variable Name"]),
Lexeme(r"(-?\d+\.\d*)[ \t]*",
["NUMBAR"]),
Lexeme(r"(-?\d+)[ \t]*",
["NUMBR"]),
#Other Characters
Lexeme(r"(!)[ \t]*",
["New Line Suppressor"]),
Lexeme(r"(\n)",
["New Line"]),
Lexeme(r"(,)\s*",
["Soft Line Break"])
]
sourceCode = deque([])
"""
Combined Regex expression
Each regex expression is joined using OR symbol in tokenizer.
"""
tokenizer = re.compile("|".join([lexeme.regex for lexeme in LEXEMES]))
"""
Types/labels
A list of labels for group in the combined regex expression
"""
types = sum([lexeme.type for lexeme in LEXEMES], [])
def __init__(self, inp, gui):
super(self.__class__, self).__init__(None)
self.gui = gui
self.inp = inp # source code entered by user/ read from file
self.lex_table = []
self.sym_table = {}
"""
Lexer
Generates a lexical table from the input source code
"""
def make_lex_table(self):
self.lex_table = []
for match in self.tokenizer.finditer(self.inp):
cnt = len(match.groups()) - match.groups().count(None)
for i in range(match.lastindex-cnt,match.lastindex):
key = match.group(i+1)
type1 = self.types[i]
if key in [None]: continue
if type1 not in ["YARN", "Comment", "New Line"]: key = " ".join(key.split())
self.lex_table.append((key, type1))
"""
Prefix Expression Evaluator
"""
def eval(self):
# lol types
lol_types = ["TROOF", "YARN", "NUMBR", "NUMBAR"]
# python types allowed in operations
arithmetic_types = [float, int]
bool_types = [bool]
equality_types = [str, float, bool, int]
is_bool = lambda x : x[1] == "TROOF"
is_float = lambda x : x[1] == "NUMBAR"
is_integer = lambda x : x[1] == "NUMBR"
is_connector = lambda x : x[1] == "Connector"
is_string = lambda x : x[1] == "YARN"
is_mkay = lambda x : x[0] == "MKAY"
is_operand = lambda x : is_float(x) or is_integer(x) or is_string(x) or is_bool(x)
is_binary_op = lambda x : x[0] in op["binary"]["arithmetic"] or x[0] in op["binary"]["logical"] or x[0] in op["binary"]["equality"]
is_infinite_op = lambda x : x[0] in op["infinite"]
is_unary_op = lambda x : x[0] in op["unary"]
is_operator = lambda x : is_binary_op(x) or is_infinite_op(x) or is_unary_op(x) or x[0] == "MAEK"
is_lol_type = lambda x : x[0] in lol_types
def to_string(s):
if type(s) == bool:
if s == True:
return "WIN"
else:
return "FAIL"
else:
return str(s)
def to_bool(s):
if type(s) == bool:
return s
else:
return True
def isfloat(s):
try:
t = float(s)
return True
except:
return False
def to_arithmetic(s, t = None):
if type(s) == str:
if s.isnumeric():
return int(s)
elif isfloat(s):
return float(s)
else:
None
elif type(s) == int or type(s) == float:
return s
elif type(s) == bool:
return int(s)
uncast_type = {
int : lambda x: (str(x), "NUMBR"),
float : lambda x: (str(x), "NUMBAR"),
bool : lambda x: ({ True : "WIN", False : "FAIL" } [x], "TROOF"),
str : lambda x: (str(x), "YARN"),
}
cast_type = {
"NUMBR" : int,
"NUMBAR" : float,
"TROOF" : lambda x: { "WIN" : True, "FAIL" : False } [x],
"YARN": str
}
uncast = lambda x: uncast_type[type(x)](x)
cast = lambda x: cast_type[x[1]](x[0])
op = {
# Binary operations
"binary" : {
"arithmetic" : {
'SUM OF' : lambda x,y : x + y,
'PRODUKT OF' : lambda x,y : x * y,
'QUOSHUNT OF': lambda x,y : x / y,
'DIFF OF' : lambda x,y : x - y,
'MOD OF' : lambda x,y : x % y,
'BIGGR OF' : lambda x,y : max(x,y),
'SMALLR OF' : lambda x,y : min(x,y)
},
"equality" : {
'BOTH SAEM' : lambda x,y : x == y,
'DIFFRINT' : lambda x,y : x != y
},
"logical" : {
'BOTH OF' : lambda x,y : x and y,
'EITHER OF' : lambda x,y : x or y,
'WON OF' : lambda x,y : (x or y) and (x != y)
}
},
# Infinite Arity Operatons
"infinite" : {
'SMOOSH' : lambda *a: "".join(map(str,a)),
'ALL OF' : lambda *a: all(map(bool,a)),
'ANY OF' : lambda *a: any(map(bool,a))
},
"unary" : {
'NOT' : lambda x: not x
}
}
if not is_operator(self.sourceCode[0]):
if self.sourceCode[0][1] == 'Variable Name': #literals
key = self.sourceCode.popleft()[0]
return (self.getVarValue(key), self.getVarType(key)) # variable name
else:
return self.sourceCode.popleft()
else:
stack = []
while not (len(stack) == 1 and is_operand(stack[0])):
token = self.sourceCode.popleft()
if token[1] == "Variable Name": stack.append((self.getVarValue(token[0]), self.getVarType(token[0])))
else: stack.append(token)
def is_binary_operation():
if len(stack) >= 4 and is_operand(stack[-3]) and is_operand(stack[-1]) and is_connector(stack[-2]) and is_binary_op(stack[-4]):
opc = stack[-4][0]
op1 = cast(stack[-3])
op2 = cast(stack[-1])
if opc in op["binary"]["arithmetic"]:
if type(op1) not in arithmetic_types: op1 = to_arithmetic(op1)
if type(op2) not in arithmetic_types: op2 = to_arithmetic(op2)
res = op["binary"]["arithmetic"][opc](op1, op2)
elif opc in op["binary"]["equality"]:
res = op["binary"]["equality"][opc](op1, op2)
elif opc in op["binary"]["logical"]:
if type(op1) not in bool_types: op1 = to_bool(op1)
if type(op2) not in bool_types: op2 = to_bool(op2)
res = op["binary"]["logical"][opc](op1, op2)
for i in range(4): stack.pop()
return uncast(res)
elif len(stack) >= 3 and is_operand(stack[-2]) and is_operand(stack[-1]) and is_binary_op(stack[-3]):
opc = stack[-3][0]
op1 = cast(stack[-2])
op2 = cast(stack[-1])
if opc in op["binary"]["arithmetic"]:
if type(op1) not in arithmetic_types: op1 = to_arithmetic(op1)
if type(op2) not in arithmetic_types: op2 = to_arithmetic(op2)
res = op["binary"]["arithmetic"][opc](op1, op2)
elif opc in op["binary"]["equality"]:
res = op["binary"]["equality"][opc](op1, op2)
elif opc in op["binary"]["logical"]:
if type(op1) not in bool_types: op1 = to_bool(op1)
if type(op2) not in bool_types: op2 = to_bool(op2)
res = op["binary"]["logical"][opc](op1, op2)
for i in range(3): stack.pop()
return uncast(res)
else:
return False
def is_infinite_operation():
operands = []
to_pop = 0
if len(stack) >= 3 and is_mkay(stack[-1]):
to_pop += 1 # will pop mkay
i = len(stack) - 2
while i > 0 and is_operand(stack[i]):
operands.append(cast(stack[i]))
to_pop += 1 # will pop operand
if is_connector(stack[i-1]):
to_pop += 1 # will pop connector
i -= 2
elif is_infinite_op(stack[i-1]):
to_pop += 1 # pop operator
operands.reverse()
res = op["infinite"][stack[i-1][0]](*operands)
for i in range(to_pop): stack.pop()
return uncast(res)
else:
i -= 1
elif len(stack) >= 2 and self.sourceCode[0][0] in ['\n', ','] and not is_operand(self.sourceCode[0]) and not is_connector(self.sourceCode[0]):
i = len(stack) - 1
while i > 0 and is_operand(stack[i]):
operands.append(cast(stack[i]))
to_pop += 1 # will pop operand
if is_connector(stack[i-1]):
to_pop += 1 # will pop connector
i -= 2
elif is_infinite_op(stack[i-1]):
to_pop += 1 # pop operator
operands.reverse()
res = op["infinite"][stack[i-1][0]](*operands)
for i in range(to_pop): stack.pop()
return uncast(res)
else:
i -= 1
return False
def is_maek_operation():
if len(stack) >= 4 and stack[-4][0] == "MAEK" and stack[-2][0] == "A" and is_lol_type(stack[-1]):
exp = cast(stack[-3])
if stack[-1][0] == "TROOF":
exp = to_bool(exp)
elif stack[-1][0] == "YARN":
exp = to_string(exp)
elif stack[-1][0] == "NUMBR" or stack[-1][0] == "NUMBAR":
exp = cast_type[stack[-1][0]](exp)
else:
return False
for i in range(4): stack.pop()
return uncast(exp)
def is_unary_operation():
if len(stack) >= 2 and is_unary_op(stack[-2]) and is_operand(stack[-1]):
res = op["unary"][stack[-2][0]](cast(stack[-1]));
for i in range(2): stack.pop()
return uncast(res)
else:
return False
while True:
valid = is_unary_operation()
if not valid: valid = is_binary_operation()
if not valid: valid = is_infinite_operation()
if not valid: valid = is_maek_operation()
if valid:
stack.append(valid)
else:
break
return stack[0]
"""
String Parser
interprets special characters in the string
"""
def parse_string(self, s):
return s.replace(":)","\n").replace(":>", "\t").replace(":o", "\g").replace(":\"", "\"").replace("::", ":")
"""
VISIBLE statement
"""
def output_decl(self): #print(anything)
printText = ''
while self.sourceCode[0][0] not in ['\n', ',', '!']: printText = printText + self.parse_string(str(self.eval()[0]))
if self.sourceCode[0][0] == '!': self.sourceCode.popleft() #pop bang sign
else: printText = printText + '\n'
self.gui.printConsole(printText)
"""
GIMMEH statement
"""
def input_decl(self):
key = self.sourceCode.popleft()[0]
self.addSymbol(key, "NOOB", None)
if self.sourceCode[0][0] == "ITZ":
self.sourceCode.popleft() #pops the ITZ keyword
value, type = self.eval()
self.addSymbol(key, value, type)
"""
<var> R <expression>
"""
def assignment(self):
# get varname
varname = self.sourceCode.popleft()[0]
# pop R
self.sourceCode.popleft()[0]
# eval expression and assign result
value, type = self.eval()
self.addSymbol(varname, value, type)
"""
Input from GUI
Calls the GUI to get user input
"""
def user_input(self):
varname = self.sourceCode.popleft()[0]
value, type = self.gui.showDialog(), "YARN"
self.gui.printConsole('LOL>> Enter Input: ' + value + '\n')
self.addSymbol(varname, value, type)
"""
OMGWTF statement
"""
def switch_case(self):
while self.sourceCode[0][0] in ['\n', ',']: self.sourceCode.popleft() #pops newline or comma
while self.sourceCode[0][0] != 'OIC':
if self.sourceCode.popleft()[0] == 'OMGWTF': #pops OMG or OMGWTF
self.sourceCode.popleft() #pop newline or comma
while self.sourceCode[0][0] != 'OIC':self.execute_keywords()
else:
key, type = self.eval()
self.sourceCode.popleft()
if key == self.getVarValue('IT'):
while self.sourceCode[0][0] not in ['OMG', 'OMGWTF', 'GTFO', 'OIC']:
self.execute_keywords()
while self.sourceCode[0][0] not in ['GTFO', 'OIC']:
if self.sourceCode[0][0] in ['OMG', 'OMGWTF']:
while self.sourceCode[0][0] not in ['\n', ',']: self.sourceCode.popleft()
self.sourceCode.popleft()
if self.sourceCode[0][0] not in ['OMG', 'OMGWTF', 'OIC']: self.execute_keywords()
break
else:
while self.sourceCode[0][0] not in ['OMG', 'OMGWTF', 'OIC']: self.sourceCode.popleft()
while self.sourceCode[0][0] != 'OIC': self.sourceCode.popleft()
self.sourceCode.popleft() #pops OIC
"""
O RLY? statment
"""
def if_else(self):
while self.sourceCode[0][0] in ['\n', ',']: self.sourceCode.popleft() #pop newline or comma
while self.sourceCode[0][0] != 'OIC':
key = self.sourceCode.popleft()[0]
if key == 'MEBBE': self.execute_keywords() #pop YA RLY/NO WAI/MEBBE , if MEBBE assign to IT
else: self.sourceCode.popleft() #pop newline or comma
if key == 'NO WAI': # It reached the else statement
while self.sourceCode[0][0] != 'OIC': self.execute_keywords()
elif self.getVarValue('IT') == 'WIN': # checks if the value of IT is true
while self.sourceCode[0][0] not in ['NO WAI', 'MEBBE', 'OIC']: self.execute_keywords()
while self.sourceCode[0][0] != 'OIC': self.sourceCode.popleft()
else:
while self.sourceCode[0][0] not in ['NO WAI', 'MEBBE', 'OIC']: self.sourceCode.popleft()
self.sourceCode.popleft() # pop oic
"""
Loop Code Block
"""
def loop(self):
loop_name = self.sourceCode.popleft()[0]
if self.sourceCode[0][0] in ['UPPIN', 'NERFIN']:
loop_operation = self.sourceCode.popleft()[0]
self.sourceCode.popleft() # pop YR
loop_variable = self.sourceCode.popleft()[0] # pop variable
loop_condition = self.sourceCode.popleft()[0] # wile or til
else:
loop_operation = None
loop_body = deque([])
while not (self.sourceCode[0][0] == 'IM OUTTA YR' and self.sourceCode[1][0] == loop_name):
loop_body.appendleft(self.sourceCode.popleft()) # loop body creation
while True:
self.sourceCode.extendleft(copy.deepcopy(loop_body))
if loop_operation != None:
x = self.eval()
if loop_condition == "WILE" and x[0] == 'FAIL':
break
elif loop_condition == "TIL" and x[0] == 'WIN':
break
while not (self.sourceCode[0][0] == 'IM OUTTA YR' and self.sourceCode[1][0] == loop_name):
self.execute_keywords()
if loop_operation == "UPPIN":
self.addSymbol(loop_variable, int(self.getVarValue(loop_variable)) + 1, "NUMBR")
elif loop_operation == "NERFIN":
self.addSymbol(loop_variable, int(self.getVarValue(loop_variable)) - 1, "NUMBR")
while not (self.sourceCode[0][0] == 'IM OUTTA YR' and self.sourceCode[1][0] == loop_name):
self.sourceCode.popleft()
def type_cast(self):
def to_string(s):
if type(s) == bool:
if s == True:
return "WIN"
else:
return "FAIL"
else:
return str(s)
def to_bool(s):
if type(s) == bool:
return s
else:
return True
def isfloat(s):
try:
t = float(s)
return True
except:
return False
def to_arithmetic(s, t = None):
if type(s) == str:
if s.isnumeric():
return int(s)
elif isfloat(s):
return float(s)
else:
None
elif type(s) == int or type(s) == float:
return s
elif type(s) == bool:
return int(s)
uncast_type = {
int : lambda x: (str(x), "NUMBR"),
float : lambda x: (str(x), "NUMBAR"),
bool : lambda x: ({ True : "WIN", False : "FAIL" } [x], "TROOF"),
str : lambda x: (str(x), "YARN"),
}
cast_type = {
"NUMBR" : int,
"NUMBAR" : float,
"TROOF" : lambda x: { "WIN" : True, "FAIL" : False } [x],
"YARN": str
}
uncast = lambda x: uncast_type[type(x)](x)
cast = lambda x: cast_type[x[1]](x[0])
varname = self.sourceCode.popleft()[0]
self.sourceCode.popleft() # pop IS NOW A
vartype = self.sourceCode.popleft()[0] # pop data type
exp = cast((self.getVarValue(varname), self.getVarType(varname)))
if vartype == "TROOF":
exp = to_bool(exp)
elif vartype == "YARN":
exp = to_string(exp)
elif vartype == "NUMBR" or vartype == "NUMBAR":
exp = cast_type[vartype](exp)
self.addSymbol(varname, *uncast(exp))
"""
Mapping of statement_name to coressponding function
"""
keywords = {
'VISIBLE' : output_decl,
'I HAS A' : input_decl,
'GIMMEH' : user_input,
'WTF?' : switch_case,
'O RLY?' : if_else,
'IM IN YR' : loop
}
"""
Code Block
handles execution of code block
"""
def execute_keywords(self):
if self.sourceCode[0][0] in ['\n', ',']:
None
elif self.sourceCode[0][0] in self.keywords.keys():
self.keywords[self.sourceCode.popleft()[0]](self)
elif self.sourceCode[0][0] in self.sym_table.keys() and self.sourceCode[1][0] == 'R':
self.assignment()
elif self.sourceCode[0][0] in self.sym_table.keys() and self.sourceCode[1][0] == 'IS NOW A':
self.type_cast()
else: #assignment of value to IT
value, type = self.eval()
self.addSymbol('IT', value, type)
self.sourceCode.popleft() #pop newline or comma
"""
Execution begins here
"""
def run_program(self):
comments = ["Single Line Comment Init", "Comment","Multiline Comment Init", "Comment", "Multiline Comment Terminator", "String Starting Delimiter", "String Ending Delimiter"]
self.sourceCode = deque(filter(lambda tup: tup[1] not in comments, self.lex_table)) # to allow popleft()
while self.sourceCode[0][0] != 'HAI': self.sourceCode.popleft()
self.sourceCode.popleft() #pops the keyword HAI
self.sourceCode.popleft() #pops new line
while self.sourceCode[0][0] != 'KTHXBYE':
if self.sourceCode[0][0] not in ['\n', ',']: self.execute_keywords()
else: self.sourceCode.popleft()
"""
Symbol Table Getter/Setters
"""
def addSymbol(self, varname, value, type):
self.sym_table[varname] = (value, type)
self.gui.updateSymbolTable()
def getVarValue(self, varname):
return self.sym_table[varname][0]
def getVarType(self, varname):
return self.sym_table[varname][1]
|
10,793 | 55ae62165f757289721c3796b3aaa2206ff3ad2a | #!/usr/bin/python3
import statistics as stats
class Encoder:
def __init__(self, unique_values, is_categorical):
"""
Constructor of an Encoder using one-hot-encoding
"""
self.is_categorical = is_categorical
self.is_binary = len(unique_values) == 2
self.unique_values = unique_values
self.min = min(unique_values)
self.max = max(unique_values)
def __get_stdev_band(self, unique_values):
"""
Get the lower bound and upper bound for the standard devaitation band
for continuous value.
"""
mean = stats.mean(unique_values)
stdev = stats.stdev(unique_values)
return [mean - stdev, mean + stdev]
def __normalize(self, value, lower_bound, upper_bound):
"""
Normalize the value to the lower bound and upper bound by the max & min
"""
min_max_diff = self.max - self.min
bound_diff = upper_bound - lower_bound
return (value - self.min) / min_max_diff * bound_diff + lower_bound
def encode(self, value):
"""
Get one-hot encoding for a value based on the
unique values in this encoder.
Return a list of 0s except 1 at the index that matches the unique value
index.
"""
encoded = []
if self.is_categorical:
if self.is_binary:
encoded.append(0 if value == self.unique_values[0] else 1)
else:
for index in range(len(self.unique_values)):
unique = self.unique_values[index]
encoded.append(1 if value == unique else 0)
else: # continuous data
normalized = self.__normalize(value, -1, 1)
encoded.append(normalized)
return encoded |
10,794 | 7ccb6332f248137a4a0738e8581f376e782a6e26 | from alg3 import *
g=Graph()
a1=g.addVertex(1)
a2=g.addVertex(2)
a3=g.addVertex(3)
a4=g.addVertex(4)
a5=g.addVertex(5)
a6=g.addVertex(6)
a7=g.addVertex(7)
g.addEdge(a1,a2,'x1')
g.addEdge(a2,a3,'y1')
g.addEdge(a3,a4,'z1')
g.addEdge(a2,a5,'x1')
g.addEdge(a6,a5,'z1')
g.addEdge(a7,a5,'y1')
(F,Z,H1,H2,flower1,double1,forest1,flower2,double2,forest2)=alg3_pre()
#d1(g,F,Z)
#d1_alt(g,F,Z)
d1_alt2(g,F,Z)
|
10,795 | 9e0d427bcfa735d90cf3719744fc505a849f1210 | import numpy as np
from scipy.optimize import leastsq, curve_fit
import matplotlib.pyplot as plt
def lorentzian(width, central, height, x):
return height * width / (2 * np.pi) / ((x - central)**2 + width**2 / 4)
def error_func(p, x, y):
return lorentzian(*p, x) - y
def find_r_squared(f, p, x, y):
res = y - f(*p, x)
ss_res = np.sum(res ** 2)
ss_tot = np.sum((y - np.mean(y)) ** 2)
return 1 - ss_res / ss_tot
def compare_plot(x, y, p):
fy = lorentzian(*p, x)
fig, ax = plt.subplots(1)
ax.plot(x, y)
ax.plot(x, fy)
plt.show()
def fit_lorentzian(scattering, wavelength, split=False):
# remove nans
to_del = ~np.isnan(scattering)
scattering = scattering[to_del]
wavelength = wavelength[to_del]
# return if not enough points
if len(scattering) < 5:
return [np.nan, np.nan, np.nan, np.nan], [0, 0], 0
# find max and min
max_sca = np.max(scattering)
idx_max = np.argmax(scattering)
idx_min = np.argmin(scattering)
# init guess and first fit
init_guess = [100, wavelength[idx_max], max_sca]
result, cov_x, res_dict, mesg, ier = leastsq(error_func, init_guess, args=(wavelength, scattering),
full_output=True)
result[0] = abs(result[0])
r_squared = find_r_squared(lorentzian, result, wavelength, scattering)
# if r_squared is too low, split
if r_squared < 0.9 and split is False:
wavelength_low = wavelength[:idx_min]
wavelength_high = wavelength[idx_min:]
scattering_low = scattering[:idx_min]
scattering_high = scattering[idx_min:]
result_low, r_squared_low = fit_lorentzian(scattering_low, wavelength_low, split=True)
result_high, r_squared_high = fit_lorentzian(scattering_high, wavelength_high, split=True)
if r_squared_high > r_squared and ~np.isnan(np.sum(result_high)):
result = result_high
r_squared = r_squared_high
if r_squared_low > r_squared and ~np.isnan(np.sum(result_low)):
result = result_low
r_squared = r_squared_low
compare_plot(wavelength, scattering, result)
return result, r_squared
wavelength = np.arange(570, 740, 10)
params = [50, 700, 100]
scattering = lorentzian(*params, wavelength)
fig, ax = plt.subplots(1)
ax.plot(wavelength, scattering)
plt.show()
result, r_squared = fit_lorentzian(scattering, wavelength) |
10,796 | fcfe33b6b2984e07821d6a2f246d3eb65a7172f2 | from .deposit import Deposit
from .withdrawal import Withdrawal
from .transfer import Transfer
from .get_balances import GetBalances
|
10,797 | a95b3e1e0728e54f15872b4ba064cad6761d460d | import pytest
import brownie
@pytest.fixture(autouse=True, scope="module")
def set_approval(adam, beth, token):
token.approve(beth, 10 ** 18, {"from": adam})
def test_transferfrom_descreases_owner_balance(adam, beth, token, accounts):
owner_initial_balance = token.balanceOf(adam)
token.transferFrom(adam, accounts[2], 10 ** 9, {"from": beth})
assert token.balanceOf(adam) == owner_initial_balance - 10 ** 9
def test_transferfrom_increases_recipient_balance(adam, beth, token, accounts):
recipient_initial_balance = token.balanceOf(accounts[2])
token.transferFrom(adam, accounts[2], 10 ** 9, {"from": beth})
assert token.balanceOf(accounts[2]) == recipient_initial_balance + 10 ** 9
def test_transferfrom_decreases_spender_allowance(adam, beth, token, accounts):
token.transferFrom(adam, accounts[2], 10 ** 9, {"from": beth})
assert token.allowance(adam, beth) == (10 ** 18) - (10 ** 9)
def test_transfrom_emits_tranfer_event(adam, beth, token, accounts):
tx = token.transferFrom(adam, accounts[2], 10 ** 9, {"from": beth})
assert "Transfer" in tx.events
assert tx.events["Transfer"].values() == [adam, accounts[2], 10 ** 9]
def test_transfrom_returns_boolean(adam, beth, token, accounts):
tx = token.transferFrom(adam, accounts[2], 10 ** 9, {"from": beth})
assert tx.return_value is True
def test_transferfrom_reverts_due_to_insufficient_owner_balance(
adam, beth, token, accounts
):
token.approve(beth, 10 ** 24, {"from": adam})
with brownie.reverts("dev: Insufficient balance"):
token.transferFrom(adam, accounts[2], 10 ** 21 + 1, {"from": beth})
def test_transferfrom_reverts_due_to_insufficient_allowance(
adam, beth, token, accounts
):
with brownie.reverts("dev: Insufficient allowance"):
token.transferFrom(adam, accounts[2], 10 ** 18 + 1, {"from": beth})
|
10,798 | 4a3b711331c1cbaa6152f3a1900a4dd3bbe247e3 | # -*- coding: utf-8 -*-
"""
Created on Thu Dec 29 21:28:10 2016
downsample pacbio data by assign each read a probablity based on the read length
@author: Nan
"""
from Bio import SeqIO
import numpy as np
from scipy.stats import lognorm
import matplotlib.pyplot as plt
import random
random.seed(0)
target_coverage = 15
genome_length = 4641652
target_read_length = genome_length*target_coverage
records=SeqIO.parse("D:/Data/20161125/filtered_subreads_first1k.fastq", "fastq")
read_length = []
read_dict = {}
for record in records:
read_length.append(len(record.seq))
read_dict[record.id] = False
data = np.array(read_length)
read_length_sum = np.sum(data)
print read_length_sum
ratio = float(target_read_length)/read_length_sum
sigma, loc, scale = lognorm.fit(data, floc=0)
# print sigma, loc, scale
# print lognorm.mean(sigma, loc=loc, scale=scale)
read_length_count = 0
"""
y_value = lognorm.pdf(data, sigma, loc, scale)
background = np.median(y_value)
"""
end_point = lognorm.interval(0.5, sigma, loc, scale)
print end_point
# calculate the homogenesous distribution as a comparable reference
background = 0.5/(end_point[1] - end_point[0])
print background
record_dict = SeqIO.index("D:/Data/20161125/filtered_subreads_first1k.fastq", "fastq")
target_seq = []
i= 0
id_list = list(record_dict.keys())
seq_num = len(id_list)
while read_length_count <= target_read_length:
print read_length_count
if i == seq_num:
i = 0
record_id = id_list[i]
record = record_dict[record_id]
rand = random.random()
if (not(read_dict[record.id])):
#print "haha"
print record.id
dist_value = lognorm.pdf(len(record.seq), sigma, loc, scale)
if 0<=rand<= ratio*dist_value/background:
# print ratio*dist_value/background
target_seq.append(record)
read_dict[record.id] = True
read_length_count += len(record.seq)
i += 1
SeqIO.write(target_seq, "D:/Data/20161229/target.fastq", "fastq")
"""
x_fit = np.linspace(data.min(),data.max(),100)
pdf_fitted = lognorm.pdf(x_fit, sigma, loc, scale)
print lognorm.pdf(10000, sigma, loc, scale)
plt.plot(x_fit, pdf_fitted)
plt.show()
""" |
10,799 | 7f46abbaa67f5f458fbb5ecb73c6ddbc37bfd11e | from flask_wtf import FlaskForm
from flask_wtf.file import FileField, FileRequired, FileAllowed
from wtforms import PasswordField, BooleanField, SubmitField
from wtforms.fields.html5 import EmailField
from wtforms.validators import DataRequired
class PhotoForm(FlaskForm):
photo = FileField('Add image', validators=[FileRequired(), FileAllowed(['jpg', 'png'], 'Images(png, jpg) only')])
submit = SubmitField('Submit image') |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.