index int64 | repo_name string | branch_name string | path string | content string | import_graph string |
|---|---|---|---|---|---|
76,902 | jhugon/lariatPionAbs | refs/heads/master | /plotCompareReco.py | #!/usr/bin/env python
import ROOT as root
from helpers import *
root.gROOT.SetBatch(True)
import sys
if __name__ == "__main__":
cuts = ""
cuts += "*(nTracks == 1)"
cuts += "*( iBestMatch >= 0)" # primary Track found
cosmicCuts = cuts
cosmicCuts += "*((!isMC) || (trueHitCosmic1 && trueHitCosmic2) || (trueHitCosmic3 && trueHitCosmic4))"
cosmicCuts += "*((primTrkStartTheta > 27*pi/180.) && (primTrkStartTheta < 42*pi/180.))*(primTrkStartPhi > -57*pi/180. && primTrkStartPhi < 60*pi/180.)*(primTrkStartPhi < -15*pi/180. || primTrkStartPhi > 22*pi/180.)" # only angles that match MC
cosmicPhiGeq0Cuts = cosmicCuts + "*(primTrkStartPhi >= 0.)"
cosmicPhiLt0Cuts = cosmicCuts + "*(primTrkStartPhi < 0.)"
beamCuts = "*pzWeight"+cuts
beamPionCuts = beamCuts + "*((((!isMC) && pWC > 100 && pWC < 1100) || (isMC && trueStartMom > 100 && trueStartMom < 1100)) && (isMC || pWC*pWC*(firstTOF*firstTOF*0.00201052122-1.) < 5e4))" + "*(primTrkLength > 85.)"
beamProtonCuts = beamCuts + "*((((!isMC) && pWC > 100 && pWC < 1100) || (isMC && trueStartMom > 100 && trueStartMom < 1100)) && (isMC || pWC*pWC*(firstTOF*firstTOF*0.00201052122-1.) > 7e5))" + "*(primTrkLength < 60.)"
hitCuts = "*(primTrkXs > 3. && primTrkXs < 46. && primTrkYs < 18. && primTrkYs > -18. && primTrkZs > 3. && primTrkZs < 87.)"
cosmicHitCuts = hitCuts
beamHitCuts = hitCuts+"*(primTrkZs > 5. && primTrkZs < 10.)"
beamProtonHitCuts = hitCuts+"*(primTrkZs > 2. && primTrkZs < 6.)"
logy = True
scaleFactor = 0.066
c = root.TCanvas()
NMAX=1000000000
#NMAX=100
baseDir="/scratch/jhugon/"
baseDir=""
########################################################
## Beam Pions Definitions ##############################
########################################################
fileConfigs = [
{
'fn': baseDir+"cosmicBeamData_v2/new/cosmicAna_beam_Pos_RunII_current100_v02_a.root",
'addFriend': ["friend", baseDir+"cosmicBeamData_v2/new/friendTrees/cosmicAna_beam_Pos_RunII_current100_v02_a.root"],
'name': "BeamRunIIP100A_PiMuE",
'title': "Run II Beam +100 A a #pi/#mu/e",
'caption': "Run II Beam +100 A a #pi/#mu/e",
'isData': True,
'isBeam': True,
'cuts': beamPionCuts + beamHitCuts,
},
{
'fn': baseDir+"cosmicBeamMC/CosmicAna_pip_v6.root",
'addFriend': ["friend", baseDir+"cosmicBeamMC/friendTrees/CosmicAna_pip_v6.root"],
'name': "BeamMC_pip",
'title': "Beam #pi MC",
'caption': "Beam #pi MC",
'isData': False,
'isBeam': True,
'cuts': beamPionCuts + beamHitCuts,
},
{
'fn': baseDir+"cosmicBeamMC/CosmicAna_pip_presmear10_v6.root",
'addFriend': ["friend", baseDir+"cosmicBeamMC/friendTrees/CosmicAna_pip_presmear10_v6.root"],
'name': "BeamMC_pip_presmear10",
'title': "Beam #pi MC 10% Smearing",
'caption': "Beam #pi MC 10% Smearing",
'isData': False,
'isBeam': True,
'cuts': beamPionCuts + beamHitCuts,
},
{
'fn': baseDir+"caloAmpFiles/CosmicAna_data_Pos_RunII_current100_a_caloAmp.root",
'addFriend': ["friend", baseDir+"caloAmpFiles/friendTrees/CosmicAna_data_Pos_RunII_current100_a_caloAmp.root"],
'name': "BeamRunIIP100A_PiMuE_CaloAmp",
'title': "Run II Beam +100 A a #pi/#mu/e Amp",
'caption': "Run II Beam +100 A a #pi/#mu/e Amp",
'isData': True,
'isBeam': True,
'cuts': beamPionCuts + beamHitCuts,
},
{
'fn': baseDir+"caloAmpFiles/CosmicAna_pip_flat_caloAmp.root",
'addFriend': ["friend", baseDir+"caloAmpFiles/friendTrees/CosmicAna_pip_flat_caloAmp.root"],
'name': "BeamMC_pip_CaloAmp",
'title': "Beam #pi MC Amp",
'caption': "Beam #pi MC Amp",
'isData': False,
'isBeam': True,
'cuts': beamPionCuts + beamHitCuts,
},
]
for i in range(len(fileConfigs)):
fileConfigs[i]['color'] = COLORLIST[i]
m2SF = 1.
histConfigs = [
{
'name': "primTrkdEdxs",
'xtitle': "Primary TPC Track dE/dx [MeV/cm]",
'ytitle': "Hits / bin",
'binning': [50,1.,2.5],
'var': "primTrkdEdxs",
'cuts': "1",
'normalize': True,
},
{
'name': "pWC",
'xtitle': "Beamline Momentum [MeV/c]",
'ytitle': "Events / bin",
'binning': [40,100,1100],
'var': "(!isMC)*pWC+isMC*trueStartMom",
'cuts': "1",
'normalize': True,
},
{
'name': "primTrkLength",
'xtitle': "Primary Track Length [cm]",
'ytitle': "Events / bin",
'binning': [100,0,100],
'var': "primTrkLength",
'cuts': "1",
'normalize': True,
},
{
'name': "primTrkKinInteract",
'xtitle': "Interaction Kinetic Energy [MeV]",
'ytitle': "Events / bin",
'binning': [50,0,800],
'var': "primTrkKinInteract",
'cuts': "1",
'normalize': True,
},
{
'name': "beamlineMass",
'xtitle': "Beamline Mass Squared [1000#times (MeV^{2})]",
'ytitle': "Events / bin",
'binning': [100,-5e5*m2SF,2e6*m2SF],
'var': "pWC*pWC*(firstTOF*firstTOF*0.00201052122-1.)",
'cuts': "1",
#'normalize': True,
'logy': True,
'drawvlines':[105.65**2*m2SF,139.6**2*m2SF,493.677**2*m2SF,938.272046**2*m2SF],
},
]
plotManyFilesOnePlot(fileConfigs,histConfigs,c,"cosmicanalyzer/tree",outPrefix="CompareSmearing_PiMuE_",nMax=NMAX)
histConfigs = [
{
'name': "primTrkdEdxsVbeamlineMom",
'xtitle': "Beamline Momentum [MeV/c]",
'ytitle': "Primary TPC Track dE/dx [MeV/cm]",
'binning': [50,300,1100,50,1.,2.5],
'var': "primTrkdEdxs:(!isMC)*pWC+isMC*trueStartMom",
'cuts': "1",
},
{
'name': "primTrkdEdxsVResRange",
'xtitle': "Residual Range [cm]",
'ytitle': "Primary TPC Track dE/dx [MeV/cm]",
'binning': [50,0,100,50,1.,2.5],
'var': "primTrkdEdxs:primTrkResRanges",
'cuts': "1",
},
{
'name': "primTrkdEdxsVRangeSoFar",
'xtitle': "Track Distance from Start [cm]",
'ytitle': "Primary TPC Track dE/dx [MeV/cm]",
'binning': [50,0,100,50,1.,2.5],
'var': "primTrkdEdxs:primTrkRangeSoFars",
'cuts': "1",
},
{
'name': "primTrkLengthVkinWCInTPC",
'xtitle': "Kinetic Energy at TPC Start [MeV]",
'ytitle': "Primary TPC Track Length [cm]",
'binning': [50,0,600,50,0,100],
'var': "primTrkLength:kinWCInTPC",
'cuts': "1",
},
#{
# 'name': "beamline_TOFVMom",
# 'xtitle': "Beamline Momentum [MeV/c]",
# 'ytitle': "Time Of Flight [ns]",
# 'binning': [100,0,2000,100,0,100],
# 'var': "firstTOF:pWC",
# 'cuts': "1",
# 'normalize': True,
#},
#{
# 'name': "beamline_TOFVMom",
# 'xtitle': "Beamline Momentum [MeV/c]",
# 'ytitle': "Primary TPC Track dE/dx [MeV/cm]",
# 'binning': [100,100,1100,50,1,3.5],
# 'var': "primTrkdEdxs:pWC",
# 'cuts': "1",
#},
]
plotOneHistOnePlot(fileConfigs,histConfigs,c,"cosmicanalyzer/tree",outPrefix="CompareSmearing_PiMuE_",nMax=NMAX)
########################################################
## Beam Protons Definitions ##############################
########################################################
fileConfigs = [
{
'fn': baseDir+"cosmicBeamData_v2/new/cosmicAna_beam_Pos_RunII_current100_v02_a.root",
'addFriend': ["friend", baseDir+"cosmicBeamData_v2/new/friendTrees/cosmicAna_beam_Pos_RunII_current100_v02_a.root"],
'name': "BeamRunIIP100A_Proton",
'title': "Run II Beam +100 A a p",
'caption': "Run II Beam +100 A a p",
'isData': True,
'isBeam': True,
'cuts': beamProtonCuts + beamHitCuts,
},
{
'fn': baseDir+"cosmicBeamMC/CosmicAna_lariat_PiAbsAndChEx_flat_p_v5.root",
'addFriend': ["friend", baseDir+"cosmicBeamMC/friendTrees/CosmicAna_lariat_PiAbsAndChEx_flat_p_v5.root"],
'name': "BeamMC_pip",
'title': "Beam p MC",
'caption': "Beam p MC",
'isData': False,
'isBeam': True,
'cuts': beamProtonCuts + beamHitCuts,
},
{
'fn': baseDir+"cosmicBeamMC/newv5/CosmicAna_lariat_PiAbsAndChEx_flat_p_presmear30_v5.root",
'addFriend': ["friend", baseDir+"cosmicBeamMC/newv5/friendTrees/CosmicAna_lariat_PiAbsAndChEx_flat_p_presmear30_v5.root"],
'name': "BeamMC_p_presmear30",
'title': "Beam p MC 30% Smearing",
'caption': "Beam p MC 30% Smearing",
'isData': False,
'isBeam': True,
'cuts': beamProtonCuts + beamHitCuts,
},
{
'fn': baseDir+"caloAmpFiles/CosmicAna_data_Pos_RunII_current100_a_caloAmp.root",
'addFriend': ["friend", baseDir+"caloAmpFiles/friendTrees/CosmicAna_data_Pos_RunII_current100_a_caloAmp.root"],
'name': "BeamRunIIP100Aa_Proton_CaloAmp",
'title': "Run II Beam +100 A a p Amp",
'caption': "Run II Beam +100 A a p Amp",
'isData': True,
'isBeam': True,
'cuts': beamProtonCuts + beamHitCuts,
},
{
'fn': baseDir+"caloAmpFiles/CosmicAna_p_flat_caloAmp.root",
'addFriend': ["friend", baseDir+"caloAmpFiles/friendTrees/CosmicAna_p_flat_caloAmp.root"],
'name': "BeamMC_pip_CaloAmp",
'title': "Beam p MC Amp",
'caption': "Beam p MC Amp",
'isData': False,
'isBeam': True,
'cuts': beamProtonCuts + beamHitCuts,
},
]
for i in range(len(fileConfigs)):
fileConfigs[i]['color'] = COLORLIST[i]
histConfigs = [
{
'name': "primTrkdEdxs",
'xtitle': "Primary TPC Track dE/dx [MeV/cm]",
'ytitle': "Hits / bin",
'binning': [50,0,10.],
'var': "primTrkdEdxs",
'cuts': "1",
'normalize': True,
},
{
'name': "primTrkdEdxs_zoom4",
'xtitle': "Primary TPC Track dE/dx [MeV/cm]",
'ytitle': "Hits / bin",
'binning': [50,3,8.],
'var': "primTrkdEdxs",
'cuts': "1",
'normalize': True,
},
{
'name': "pWC",
'xtitle': "Beamline Momentum [MeV/c]",
'ytitle': "Events / bin",
'binning': [40,0,2000],
'var': "(!isMC)*pWC+isMC*trueStartMom",
'cuts': "1",
'normalize': True,
},
{
'name': "primTrkKinInteract",
'xtitle': "Interaction Kinetic Energy [MeV]",
'ytitle': "Events / bin",
'binning': [50,0,800],
'var': "primTrkKinInteractProton",
'cuts': "1",
'normalize': True,
},
#{
# 'name': "beamlineMass",
# 'xtitle': "Beamline Mass Squared [1000#times (MeV^{2})]",
# 'ytitle': "Events / bin",
# 'binning': [100,-5e5*m2SF,2e6*m2SF],
# 'var': "pWC*pWC*(firstTOF*firstTOF*0.00201052122-1.)",
# 'cuts': "1",
# #'normalize': True,
# 'logy': True,
# 'drawvlines':[105.65**2*m2SF,139.6**2*m2SF,493.677**2*m2SF,938.272046**2*m2SF],
#},
]
plotManyFilesOnePlot(fileConfigs,histConfigs,c,"cosmicanalyzer/tree",outPrefix="CompareSmearing_P_",nMax=NMAX)
histConfigs = [
{
'name': "primTrkdEdxsVbeamlineMom",
'xtitle': "Beamline Momentum [MeV/c]",
'ytitle': "Primary TPC Track dE/dx [MeV/cm]",
'binning': [50,300,1100,100,0.,10.],
'var': "primTrkdEdxs:(!isMC)*pWC+isMC*trueStartMom",
'cuts': "1",
},
{
'name': "beamline_TOFVMom",
'xtitle': "Beamline Momentum [MeV/c]",
'ytitle': "Time of Flight [ns]",
'binning': [100,100,1100,100,0,100],
'var': "firstTOF:pWC",
'cuts': "1",
},
{
'name': "primTrkdEdxsVResRange",
'xtitle': "Residual Range [cm]",
'ytitle': "Primary TPC Track dE/dx [MeV/cm]",
'binning': [50,0,100,50,1.,2.5],
'var': "primTrkdEdxs:primTrkResRanges",
'cuts': "1",
},
{
'name': "primTrkLengthVkinWCInTPCProton",
'xtitle': "Kinetic Energy at TPC Start [MeV]",
'ytitle': "Primary TPC Track Length [cm]",
'binning': [50,0,600,50,0,100],
'var': "primTrkLength:kinWCInTPCProton",
'cuts': "1",
},
]
plotOneHistOnePlot(fileConfigs,histConfigs,c,"cosmicanalyzer/tree",outPrefix="CompareSmearing_P_",nMax=NMAX)
########################################################
## Cosmics Definitions #################################
########################################################
fileConfigs = [
{
'fn': [baseDir+"cosmicData/CosmicAna_RIIP100_64a_v01.root"],
'name': "CosmicsRunIIPos100a",
'title': "Run II +100 A Cosmics a",
'caption': "Run II +100 A Cosmics a",
'isData': True,
},
{
'fn': baseDir+"cosmicMC/cosmicAna_v04.root",
'name': "CosmicMC",
'title': "Cosmic MC",
'caption': "Cosmic MC",
'isData': False,
},
#{
# 'fn': baseDir+"cosmicMC/cosmicAna_smearing20_v01.root",
# 'name': "CosmicMC_presmear20perc",
# 'title': "Cosmic MC Pre-smear 20% ",
# 'caption': "Cosmic MC Pre-smear 20%",
# 'isData': False,
#},
#{
# 'fn': baseDir+"cosmicMC/cosmicAna_smearing70_v01.root",
# 'name': "CosmicMC_presmear70perc",
# 'title': "Cosmic MC Pre-smear 70% ",
# 'caption': "Cosmic MC Pre-smear 70%",
# 'isData': False,
#},
{
'fn': [baseDir+"caloAmpFiles/CosmicAna_cosmics_data_Pos_RunII_current100_a_caloAmp.root"],
'name': "CosmicsRunIIPos100aAmp",
'title': "Run II +100 A Cosmics a Amp",
'caption': "Run II +100 A Cosmics a Amp",
'isData': True,
},
]
for i in range(len(fileConfigs)):
fileConfigs[i]['color'] = COLORLIST[i]
histConfigs = [
{
'name': "primTrkdEdxs",
'xtitle': "Primary TPC Track dE/dx [MeV/cm]",
'ytitle': "Hits / bin",
'binning': [100,1.,3.5],
#'var': "primTrkdEdxs*((1.05-1.)*isMC + 1.)",
'var': "primTrkdEdxs",
'cuts': "1"+cosmicPhiGeq0Cuts,
'normalize': True,
'caption':"Cosmics #phi #geq 0",
},
]
plotManyFilesOnePlot(fileConfigs,histConfigs,c,"cosmicanalyzer/tree",outPrefix="CompareReco_Cosmic_phiGeq0_",nMax=NMAX)
fileConfigs = [
{
'fn': [baseDir+"cosmicData/CosmicAna_RIIP100_64a_v01.root"],
'name': "CosmicsRunIIPos100a",
'title': "Run II +100 A Cosmics a",
'caption': "Run II +100 A Cosmics a",
'isData': True,
},
{
'fn': baseDir+"cosmicMC/cosmicAna_v04.root",
'name': "CosmicMC",
'title': "Cosmic MC",
'caption': "Cosmic MC",
'isData': False,
},
#{
# 'fn': baseDir+"cosmicMC/cosmicAna_smearing20_v01.root",
# 'name': "CosmicMC_presmear20perc",
# 'title': "Cosmic MC Pre-smear 20% ",
# 'caption': "Cosmic MC Pre-smear 20%",
# 'isData': False,
#},
#{
# 'fn': baseDir+"cosmicMC/cosmicAna_smearing70_v01.root",
# 'name': "CosmicMC_presmear70perc",
# 'title': "Cosmic MC Pre-smear 70% ",
# 'caption': "Cosmic MC Pre-smear 70%",
# 'isData': False,
#},
{
'fn': [baseDir+"caloAmpFiles/CosmicAna_cosmics_data_Pos_RunII_current100_a_caloAmp.root"],
'name': "CosmicsRunIIPos100aAmp",
'title': "Run II +100 A Cosmics a Amp",
'caption': "Run II +100 A Cosmics a Amp",
'isData': True,
},
]
for i in range(len(fileConfigs)):
fileConfigs[i]['color'] = COLORLIST[i]
histConfigs = [
{
'name': "primTrkdEdxs",
'xtitle': "Primary TPC Track dE/dx [MeV/cm]",
'ytitle': "Hits / bin",
'binning': [100,1.,3.5],
#'var': "primTrkdEdxs*((0.91-1.)*isMC + 1.)",
'var': "primTrkdEdxs",
'cuts': "1"+cosmicPhiLt0Cuts,
'normalize': True,
'caption':"Cosmics #phi < 0",
},
{
'name': "primTrkPitches",
'xtitle': "Primary TPC Track Pitch [cm]",
'ytitle': "Hits / bin",
'binning': [100,0.3,1.5],
'var': "primTrkPitches",
'cuts': "1"+cosmicPhiLt0Cuts,
'caption':"Cosmics #phi < 0",
#'normalize': True,
'logy': True,
},
]
plotManyFilesOnePlot(fileConfigs,histConfigs,c,"cosmicanalyzer/tree",outPrefix="CompareReco_Cosmic_phiLt0_",nMax=NMAX)
| {"/slicesIso.py": ["/fitCosmicHalo.py"], "/plotCosmics.py": ["/lookAtMonicaLifetime.py"]} |
76,903 | jhugon/lariatPionAbs | refs/heads/master | /plotWires.py | #!/usr/bin/env python
import ROOT as root
from helpers import *
root.gROOT.SetBatch(True)
import numpy
import matplotlib.pyplot as mpl
import matplotlib.patches as patches
import matplotlib.lines as lines
import matplotlib.colors
import matplotlib.gridspec as gridspec
def getHitName(wireName,start=True):
firstPart = wireName.split("_")[0]
if start:
return wireName.replace(firstPart,"wireHitStarts")
else:
return wireName.replace(firstPart,"wireHitEnds")
def getBranchNames(tree,branchPrefix,branchSuffix):
branchNames = []
for branch in tree.GetListOfBranches():
branchName = branch.GetName()
if branchPrefix == branchName[:len(branchPrefix)] and branchSuffix == branchName[-1*len(branchSuffix):]:
branchNames.append(branchName)
return branchNames
def makeWireHists(tree,maxEvents,cutFunc,nBefore=150,nAfter=150,yMin=-400,yMax=400,nBins=800):
nSamples = 4096
dataArray = numpy.zeros(nSamples)
rawDataArray = numpy.zeros(nSamples)
arange = numpy.arange(nBefore+nAfter,dtype="float64") - nBefore
xedges = None
yedges = None
yedgesNorm = None
yedgesRaw = None
yedgesNormRaw = None
rawHists = []
deconvHists = []
rawAtDeconvHists = []
rawHistNorms = []
deconvHistNorms = []
rawAtDeconvHistNorms = []
hitStartsHists = []
hitEndsHists = []
allHitStarts = []
allHitStartsRaw = []
allHitEnds = []
allHitEndsRaw = []
for suffix in ["C","I"]:
wireBranchNames = getBranchNames(tree,"wireData",suffix)
rawWireBranchNames = getBranchNames(tree,"rawWireData",suffix)
assert(len(wireBranchNames)==len(rawWireBranchNames))
nEvents = min(tree.GetEntries(),maxEvents)
rawHist = None
deconvHist = None
rawAtDeconvHist = None
rawHistNorm = None
deconvHistNorm = None
rawAtDeconvHistNorm = None
hitStartsHist = None
hitEndsHist = None
hitStarts = []
hitStartsRaw = []
hitEnds = []
hitEndsRaw = []
for iEvent in range(nEvents):
tree.GetEntry(iEvent)
if not cutFunc(tree):
continue
for iWire in range(len(wireBranchNames)):
dataArray[:] = 0
rawDataArray[:] = 0
wireData = getattr(tree,wireBranchNames[iWire])
rawWireData = getattr(tree,rawWireBranchNames[iWire])
for i in range(wireData.size()):
dataArray[i] = wireData[i]
for i in range(rawWireData.size()):
rawDataArray[i] = rawWireData[i]
signif = numpy.max(dataArray) / numpy.std(dataArray)
signifRaw = numpy.max(rawDataArray) / numpy.std(rawDataArray)
if signifRaw > 8.:
iMax = numpy.argmax(rawDataArray)
if iMax > nBefore and iMax < (nSamples - nAfter):
iStart = max(iMax-nBefore,0)
iEnd = min(iMax+nAfter,nSamples)
data = rawDataArray[iStart:iEnd]
hist, xedges, yedgesRaw = numpy.histogram2d(arange,data,bins=[nBefore+nAfter,int(yMax-yMin)],range=[[-nBefore,nAfter],[yMin,yMax]])
rawMax = numpy.max(data)
data /= rawMax
histNorm, xedges, yedgesNormRaw = numpy.histogram2d(arange,data,bins=[nBefore+nAfter,int(yMax-yMin)],range=[[-nBefore,nAfter],[-2.,2.]])
if rawHist is None:
rawHist = hist
rawHistNorm = histNorm
else:
rawHist += hist
rawHistNorm += histNorm
tmpHitStarts = numpy.array(getattr(tree,getHitName(rawWireBranchNames[iWire],start=True)))-iStart-nBefore
tmpHitEnds = numpy.array(getattr(tree,getHitName(rawWireBranchNames[iWire],start=False)))-iStart-nBefore
hitStartsRaw.extend(tmpHitStarts)
hitEndsRaw.extend(tmpHitEnds)
if signif > 8.:
iMax = numpy.argmax(dataArray)
if iMax > nBefore and iMax < (nSamples - nAfter):
iStart = max(iMax-nBefore,0)
iEnd = min(iMax+nAfter,nSamples)
data = dataArray[iStart:iEnd]
rawData = rawDataArray[iStart:iEnd]
hist, xedges, yedges = numpy.histogram2d(arange,data,bins=[nBefore+nAfter,nBins],range=[[-nBefore,nAfter],[yMin,yMax]])
histRaw, xedges, yedges = numpy.histogram2d(arange,rawData,bins=[nBefore+nAfter,nBins],range=[[-nBefore,nAfter],[yMin,yMax]])
deconvMax = numpy.max(data)
rawMax = numpy.max(rawData)
data /= deconvMax
rawData /= rawMax
histNorm, xedges, yedgesNorm = numpy.histogram2d(arange,data,bins=[nBefore+nAfter,nBins],range=[[-nBefore,nAfter],[-2.,2.]])
histRawNorm, xedges, yedgesNorm = numpy.histogram2d(arange,rawData,bins=[nBefore+nAfter,nBins],range=[[-nBefore,nAfter],[-2.,2.]])
if deconvHist is None:
deconvHist = hist
rawAtDeconvHist = histRaw
deconvHistNorm = histNorm
rawAtDeconvHistNorm = histRawNorm
else:
deconvHist += hist
rawAtDeconvHist += histRaw
deconvHistNorm += histNorm
rawAtDeconvHistNorm += histRawNorm
tmpHitStarts = numpy.array(getattr(tree,getHitName(wireBranchNames[iWire],start=True)))-iStart-nBefore
tmpHitEnds = numpy.array(getattr(tree,getHitName(wireBranchNames[iWire],start=False)))-iStart-nBefore
hitStarts.extend(tmpHitStarts)
hitEnds.extend(tmpHitEnds)
hist, xedges, yedges = numpy.histogram2d(tmpHitStarts,deconvMax*numpy.ones(len(tmpHitStarts)),bins=[nBefore+nAfter,nBins],range=[[-nBefore,nAfter],[yMin,yMax]])
if hitStartsHist is None:
hitStartsHist = hist
else:
hitStartsHist += hist
hist, xedges, yedges = numpy.histogram2d(tmpHitEnds,deconvMax*numpy.ones(len(tmpHitEnds)),bins=[nBefore+nAfter,nBins],range=[[-nBefore,nAfter],[yMin,yMax]])
if hitEndsHist is None:
hitEndsHist = hist
else:
hitEndsHist += hist
hitStarts = numpy.array(hitStarts)
hitEnds = numpy.array(hitEnds)
hitStartsRaw = numpy.array(hitStartsRaw)
hitEndsRaw = numpy.array(hitEndsRaw)
rawHists.append(rawHist)
deconvHists.append(deconvHist)
rawAtDeconvHists.append(rawAtDeconvHist)
rawHistNorms.append(rawHistNorm)
deconvHistNorms.append(deconvHistNorm)
rawAtDeconvHistNorms.append(rawAtDeconvHistNorm)
hitStartsHists.append(hitStartsHist)
hitEndsHists.append(hitEndsHist)
allHitStarts.append(hitStarts)
allHitStartsRaw.append(hitStartsRaw)
allHitEnds.append(hitEnds)
allHitEndsRaw.append(hitEndsRaw)
return rawHists, rawHistNorms, deconvHists, deconvHistNorms, rawAtDeconvHists, rawAtDeconvHistNorms, xedges, yedges, yedgesNorm, yedgesRaw, yedgesNormRaw, allHitStarts, allHitEnds, allHitStartsRaw, allHitEndsRaw, hitStartsHists, hitEndsHists
def makeWireHistsAndPkl(filePrefix, tree, maxEvents, cutFunc,**kargs):
fn = "{0}_{1:d}.pkl".format(filePrefix,nMax)
result = None
try:
with open(fn) as infile:
result = cPickle.load(infile)
except IOError:
result = makeWireHists(tree,nMax,cutFunc)
with open(fn,'wb') as outfile:
cPickle.dump(result,outfile)
return result
def justDraw(hist,hitStarts,hitEnds,xedges,yedges,fn,xMin,xMax,yMin,yMax,xLabel,yLabel,title,labels=[],compare=False):
gs = {'height_ratios':[4,1],'hspace':0}
fig, (ax1,ax2) = mpl.subplots(nrows=2,sharex=True,gridspec_kw=gs)
patchList = []
if compare:
if len(hist) != len(labels):
raise Exception("Length of hist doesn't equal length of labels. Maybe you forgot to add labels")
if len(hist) != len(hitStarts):
raise Exception("Length of hist doesn't equal length of hitStarts")
if len(hist) != len(hitEnds):
raise Exception("Length of hist doesn't equal length of hitEnds")
transparent_cmaps = []
for cmap in [mpl.cm.Greens,mpl.cm.Blues,mpl.cm.Reds,mpl.cm.Purples,mpl.cm.Oranges]:
frac_transparent = 0.5
cmap_colors = cmap(numpy.arange(cmap.N))
cmap_colors[:int(frac_transparent*cmap.N),-1] = numpy.linspace(0,1,int(frac_transparent*cmap.N)) # bottom frac linearly increases opacity
transparent_cmap = matplotlib.colors.ListedColormap(cmap_colors)
transparent_cmaps.append(transparent_cmap)
colors = ['g','b','r','purple','o']
for h, xed, yed, starts, ends, label, t_cmap, col in zip(hist, xedges , yedges, hitStarts, hitEnds, labels, transparent_cmaps[:len(hist)], colors[:len(hist)]):
if h is None:
print "Error: element of hist is None, no events passed amplitude cut for: ",fn," label: ",label
else:
histToPlot = numpy.array(h)
histToPlot[histToPlot == 0.] = 0.5
x, y = numpy.meshgrid(xed, yed)
norm = matplotlib.colors.LogNorm(vmin=0.5, vmax=histToPlot.max())
p = ax1.pcolormesh(x,y,histToPlot.T,norm=norm,cmap=t_cmap)
ax2.hist(starts,range=[xMin,xMax],bins=100,normed=True,histtype="step",color=col)
ax2.hist(ends,range=[xMin,xMax],bins=100,normed=True,histtype="step",color=col,ls=':')
patchList.append(
patches.Patch(color=col, label=label)
)
else:
if hist is None:
print "Error: hist is None, no events passed amplitude cut for: ",fn
else:
histToPlot = numpy.array(hist)
histToPlot[histToPlot == 0.] = 0.5
x, y = numpy.meshgrid(xedges, yedges)
norm = matplotlib.colors.LogNorm(vmin=0.5, vmax=histToPlot.max())
p = ax1.pcolormesh(x,y,histToPlot.T,norm=norm,cmap="Blues_r")
ax2.hist(hitStarts,range=[xMin,xMax],bins=100,normed=True,histtype="step",color="b")
ax2.hist(hitEnds,range=[xMin,xMax],bins=100,normed=True,histtype="step",color="b",ls=':')
ax1.set_xlim(xMin,xMax)
ax1.set_ylim(yMin,yMax)
yLabelSuffix = ""
ax1.set_ylabel(yLabel)
ax2.set_xlabel(xLabel)
ax2.set_ylabel("Hits / Bin")
ax2.set_yticks([])
ax2.set_ylim(0,ax2.get_ylim()[1]*1.5)
##
line1 = lines.Line2D([],[],color='k',label="Hit Start")
line2 = lines.Line2D([],[],color='k',ls=":",label="Hit End")
ax2.legend(handles=[line1,line2],ncol=2,fontsize='small')
if len(patchList) > 0:
ax1.legend(handles=patchList)
ax1.set_title(title)
fig.savefig(fn)
mpl.close()
def drawHitVAmpHists(hist,xedges,yedges,fn,xMin,xMax,yMin,yMax,xLabel,yLabel,title,labels=[],compare=False):
fig, ax1 = mpl.subplots()
patchList = []
if compare:
if len(hist) != len(labels):
raise Exception("Length of hist doesn't equal length of labels. Maybe you forgot to add labels")
transparent_cmaps = []
for cmap in [mpl.cm.Greens,mpl.cm.Blues,mpl.cm.Reds,mpl.cm.Purples,mpl.cm.Oranges]:
frac_transparent = 0.5
cmap_colors = cmap(numpy.arange(cmap.N))
cmap_colors[:int(frac_transparent*cmap.N),-1] = numpy.linspace(0,1,int(frac_transparent*cmap.N)) # bottom frac linearly increases opacity
transparent_cmap = matplotlib.colors.ListedColormap(cmap_colors)
transparent_cmaps.append(transparent_cmap)
colors = ['g','b','r','purple','o']
for h, xed, yed, label, t_cmap, col in zip(hist, xedges , yedges, labels, transparent_cmaps[:len(hist)], colors[:len(hist)]):
if h is None:
print "Error: element of hist is None, no events passed amplitude cut for: ",fn," label: ",label
else:
histToPlot = numpy.array(h)
histToPlot[histToPlot == 0.] = 0.5
x, y = numpy.meshgrid(xed, yed)
norm = matplotlib.colors.LogNorm(vmin=0.5, vmax=histToPlot.max())
p = ax1.pcolormesh(x,y,histToPlot.T,norm=norm,cmap=t_cmap)
patchList.append(
patches.Patch(color=col, label=label)
)
else:
if hist is None:
print "Error: hist is None, no events passed amplitude cut for: ",fn
else:
histToPlot = numpy.array(hist).T
histToPlot[histToPlot == 0.] = 0.5
x, y = numpy.meshgrid(xedges, yedges)
norm = matplotlib.colors.LogNorm(vmin=0.5, vmax=histToPlot.max())
p = ax1.pcolormesh(x,y,histToPlot,norm=norm,cmap="Blues_r")
ax1.set_xlim(xMin,xMax)
ax1.set_ylim(yMin,yMax)
yLabelSuffix = ""
ax1.set_ylabel(yLabel)
ax1.set_xlabel(xLabel)
ax1.set_title(title)
if len(patchList) > 0:
ax1.legend(handles=patchList)
fig.savefig(fn)
mpl.close()
def plotWireHists(*args,**kargs):
if len(args) != 17:
print "plotWireHists n args isn't 17 as expected is ", len(args)
sys.exit(1)
rawHists = args[0]
rawHistNorms = args[1]
deconvHists = args[2]
deconvHistNorms = args[3]
rawAtDeconvHists = args[4]
rawAtDeconvHistNorms = args[5]
xedges = args[6]
yedges = args[7]
yedgesNorm = args[8]
yedgesRaw = args[9]
yedgesNormRaw = args[10]
allHitStarts = args[11]
allHitEnds = args[12]
allHitStartsRaw = args[13]
allHitEndsRaw = args[14]
hitStartsHists = args[15]
hitEndsHists = args[16]
filePrefix=""
fileSuffixes=["C","I"]
xMins=[-150,-150]
xMaxs=[150,150]
yMins=[-50,-250]
yMaxs=[400,300]
yMinNorms=[-0.2,-1.8]
yLabels=["Collection Wire Response","Induction Wire Response"]
xLabel="Time Tick - Time Tick of Max"
title=""
try:
filePrefix = kargs["filePrefix"]
except KeyError:
raise Exception("plotWireHists: filePrefix=<prefix> argument required")
try:
fileSuffixes = kargs["fileSuffixes"]
except:
pass
try:
title = kargs["title"]
except:
pass
try:
xMins = kargs["xMins"]
except:
pass
try:
yMins = kargs["yMins"]
except:
pass
try:
xMins = kargs["xMaxs"]
except:
pass
try:
yMaxs = kargs["yMaxs"]
except:
pass
nRawHists = len(rawHists)
if len(fileSuffixes) != nRawHists:
raise ValueError("fileSuffixes length should be: ", nRawHists, " is ", len(fileSuffixes))
if len(xMins) != nRawHists:
raise ValueError("xMins length should be: ", nRawHists, " is ", len(xMins))
if len(yMins) != nRawHists:
raise ValueError("yMins length should be: ", nRawHists, " is ", len(yMins))
if len(xMaxs) != nRawHists:
raise ValueError("xMaxs length should be: ", nRawHists, " is ", len(xMaxs))
if len(yMaxs) != nRawHists:
raise ValueError("yMaxs length should be: ", nRawHists, " is ", len(yMaxs))
if len(yLabels) != nRawHists:
raise ValueError("yLabels length should be: ", nRawHists, " is ", len(yLabels))
for rawHist, rawHistNorm, deconvHist, deconvHistNorm, rawAtDeconvHist, \
rawAtDeconvHistNorm, hitStarts, hitEnds, hitStartsRaw, hitEndsRaw, \
fileSuffix, xMin, xMax, yMin, yMax, yMinNorm, yLabel, \
hitStartsHist, hitEndsHist in zip(
rawHists, rawHistNorms, deconvHists, deconvHistNorms, rawAtDeconvHists,
rawAtDeconvHistNorms, allHitStarts, allHitEnds, allHitStartsRaw, allHitEndsRaw,
fileSuffixes, xMins, xMaxs, yMins, yMaxs, yMinNorms, yLabels,
hitStartsHists, hitEndsHists
):
justDraw(rawHist,hitStartsRaw,hitEndsRaw,xedges,yedgesRaw,"{}_raw_{}.png".format(filePrefix,fileSuffix),xMin,xMax,yMin,yMax,xLabel,yLabel,title)
justDraw(rawHistNorm,hitStartsRaw,hitEndsRaw,xedges,yedgesNormRaw,"{}_raw_norm_{}.png".format(filePrefix,fileSuffix),xMin,xMax,yMinNorm,1.2,xLabel,yLabel+" Normalized to Max",title)
justDraw(deconvHist,hitStarts,hitEnds,xedges,yedges,"{}_deconv_{}.png".format(filePrefix,fileSuffix),xMin,xMax,yMin,yMax,xLabel,yLabel,title)
justDraw(deconvHistNorm,hitStarts,hitEnds,xedges,yedgesNorm,"{}_deconv_norm_{}.png".format(filePrefix,fileSuffix),xMin,xMax,-0.8,1.1,xLabel,yLabel+" Normalized to Max",title)
#justDraw(rawAtDeconvHist,hitStarts,hitEnds,xedges,yedges,"{}_raw_on_deconv_{}.png".format(filePrefix,fileSuffix),xMin,xMax,yMin,yMax,xLabel,yLabel,title)
#justDraw(rawAtDeconvHistNorm,hitStarts,hitEnds,xedges,yedgesNorm,"{}_raw_norm_on_deconv_{}.png".format(filePrefix,fileSuffix),xMin,xMax,-2,2,xLabel,yLabel+" Normalized to Max",title)
drawHitVAmpHists(hitStartsHist,xedges,yedges,"{}_hitStartVAmp_{}.png".format(filePrefix,fileSuffix),xMin,xMax,0,400,"Hit Start Time - Time of Hit Maximum","Amplitude of "+yLabel,title)
drawHitVAmpHists(hitEndsHist,xedges,yedges,"{}_hitEndVAmp_{}.png".format(filePrefix,fileSuffix),xMin,xMax,0,400,"Hit End Time - Time of Hit Maximum","Amplitude of "+yLabel,title)
def compareWireHists(*cases,**kargs):
filePrefix=""
fileSuffixes=["C","I"]
xMins=[-150,-150]
xMaxs=[150,150]
yMins=[-50,-150]
yMaxs=[400,300]
yLabels=["Collection Wire Response","Induction Wire Response"]
xLabel="Time Tick - Time Tick of Max"
title=""
labels=[]
try:
filePrefix = kargs["filePrefix"]
except KeyError:
raise Exception("plotWireHists: filePrefix=<prefix> argument required")
try:
fileSuffixes = kargs["fileSuffixes"]
except:
pass
try:
title = kargs["title"]
except:
pass
try:
xMins = kargs["xMins"]
except:
pass
try:
yMins = kargs["yMins"]
except:
pass
try:
xMins = kargs["xMaxs"]
except:
pass
try:
yMaxs = kargs["yMaxs"]
except:
pass
try:
labels = kargs["labels"]
except KeyError:
raise Exception("plotWireHists: labels=[<label1>,<label2>,...] argument required")
rawHists = []
rawHistNorms = []
deconvHists = []
deconvHistNorms = []
rawAtDeconvHists = []
rawAtDeconvHistNorms = []
xedges = []
yedges = []
yedgesNorm = []
yedgesRaw = []
yedgesNormRaw = []
allHitStarts = []
allHitEnds = []
allHitStartsRaw = []
allHitEndsRaw = []
hitStartsHists = []
hitEndsHists = []
for args in cases:
if len(args) != 17:
print "compareWireHists n args isn't 17 as expected is ", len(args)
sys.exit(1)
rawHists.append(args[0])
rawHistNorms.append(args[1])
deconvHists.append(args[2])
deconvHistNorms.append(args[3])
rawAtDeconvHists.append(args[4])
rawAtDeconvHistNorms.append(args[5])
xedges.append(args[6])
yedges.append(args[7])
yedgesNorm.append(args[8])
yedgesRaw.append(args[9])
yedgesNormRaw.append(args[10])
allHitStarts.append(args[11])
allHitEnds.append(args[12])
allHitStartsRaw.append(args[13])
allHitEndsRaw.append(args[14])
hitStartsHists.append(args[15])
hitEndsHists.append(args[16])
nRawHists = len(args[0])
if len(fileSuffixes) != nRawHists:
raise ValueError("fileSuffixes length should be: ", nRawHists, " is ", len(fileSuffixes))
if len(xMins) != nRawHists:
raise ValueError("xMins length should be: ", nRawHists, " is ", len(xMins))
if len(yMins) != nRawHists:
raise ValueError("yMins length should be: ", nRawHists, " is ", len(yMins))
if len(xMaxs) != nRawHists:
raise ValueError("xMaxs length should be: ", nRawHists, " is ", len(xMaxs))
if len(yMaxs) != nRawHists:
raise ValueError("yMaxs length should be: ", nRawHists, " is ", len(yMaxs))
if len(yLabels) != nRawHists:
raise ValueError("yLabels length should be: ", nRawHists, " is ", len(yLabels))
for iPlane in range(len(fileSuffixes)):
justDraw([x[iPlane] for x in rawHists],
[x[iPlane] for x in allHitStartsRaw],
[x[iPlane] for x in allHitEndsRaw],
xedges,
yedgesRaw,
"{}_raw_{}.png".format(filePrefix,fileSuffixes[iPlane]),
xMins[iPlane],xMaxs[iPlane],yMins[iPlane],yMaxs[iPlane],
xLabel,yLabels[iPlane],title,labels=labels,compare=True)
yMinNorm = [-0.2,-1.8][iPlane]
yMaxNorm = [1.8,2.2][iPlane]
justDraw([x[iPlane] for x in rawHistNorms],
[x[iPlane] for x in allHitStartsRaw],
[x[iPlane] for x in allHitEndsRaw],
xedges,
yedgesNormRaw,
"{}_raw_norm_{}.png".format(filePrefix,fileSuffixes[iPlane]),
xMins[iPlane],xMaxs[iPlane],yMinNorm,yMaxNorm,
xLabel,yLabels[iPlane]+" Normalized to Max",title,labels=labels,compare=True)
justDraw([x[iPlane] for x in deconvHists],
[x[iPlane] for x in allHitStarts],
[x[iPlane] for x in allHitEnds],
xedges,
yedgesRaw,
"{}_deconv_{}.png".format(filePrefix,fileSuffixes[iPlane]),
xMins[iPlane],xMaxs[iPlane],yMins[iPlane],yMaxs[iPlane],
xLabel,yLabels[iPlane],title,labels=labels,compare=True)
justDraw([x[iPlane] for x in deconvHistNorms],
[x[iPlane] for x in allHitStarts],
[x[iPlane] for x in allHitEnds],
xedges,
yedgesNormRaw,
"{}_deconv_norm_{}.png".format(filePrefix,fileSuffixes[iPlane]),
xMins[iPlane],xMaxs[iPlane],-0.5,1.6,
xLabel,yLabels[iPlane]+" Normalized to Max",title,labels=labels,compare=True)
#justDraw(rawAtDeconvHist,hitStarts,hitEnds,xedges,yedges,"{}_raw_on_deconv_{}.png".format(filePrefix,fileSuffix),xMin,xMax,yMin,yMax,xLabel,yLabel,title,labels=labels,compare=True)
#justDraw(rawAtDeconvHistNorm,hitStarts,hitEnds,xedges,yedgesNorm,"{}_raw_norm_on_deconv_{}.png".format(filePrefix,fileSuffix),xMin,xMax,-2,2,xLabel,yLabel+" Normalized to Max",title,labels=labels,compare=True)
drawHitVAmpHists([x[iPlane] for x in hitStartsHists],xedges,yedges,"{}_hitStartVAmp_{}.png".format(filePrefix,fileSuffixes[iPlane]),xMins[iPlane],xMaxs[iPlane],0,400,"Hit Start Time - Time of Hit Maximum","Amplitude of "+yLabels[iPlane],title,labels=labels,compare=True)
drawHitVAmpHists([x[iPlane] for x in hitEndsHists],xedges,yedges,"{}_hitEndVAmp_{}.png".format(filePrefix,fileSuffixes[iPlane]),xMins[iPlane],xMaxs[iPlane],0,400,"Hit End Time - Time of Hit Maximum","Amplitude of "+yLabels[iPlane],title,labels=labels,compare=True)
def plotAllWholeWires(tree,fileprefix,maxEvents=100,cutFunc=lambda x: True,branchNamePrefix="wireData",getHits=True):
collectionWireBranchNames = []
inductionWireBranchNames = []
for branch in tree.GetListOfBranches():
branchName = branch.GetName()
if branchNamePrefix == branchName[:len(branchNamePrefix)]:
if branchName[-1] == "C":
collectionWireBranchNames.append(branchName)
else:
inductionWireBranchNames.append(branchName)
nEvents = min(maxEvents,tree.GetEntries())
for iEvent in range(nEvents):
tree.GetEntry(iEvent)
if not cutFunc(tree):
continue
fig, (axc,axi) = mpl.subplots(nrows=2,figsize=(8.5,11),dpi=200)
nSamples = 4096
nWiresC = len(collectionWireBranchNames)
nWiresI = len(inductionWireBranchNames)
dataArrayC = numpy.zeros((nWiresC,nSamples))
dataArrayI = numpy.zeros((nWiresI,nSamples))
for iWire in range(nWiresC):
wireData = getattr(tree,collectionWireBranchNames[iWire])
for i in range(wireData.size()):
dataArrayC[iWire,i] = wireData[i]
for iWire in range(nWiresI):
wireData = getattr(tree,inductionWireBranchNames[iWire])
for i in range(wireData.size()):
dataArrayI[iWire,i] = wireData[i]
dataMaxC = dataArrayC.max()
dataMinC = dataArrayC.min()
dataWidthC = (dataMaxC - dataMinC)*0.75
dataMaxI = dataArrayI.max()
dataMinI = dataArrayI.min()
dataWidthI = (dataMaxI - dataMinI)*0.75
zScoreC = []
zScoreI = []
for iWire in range(nWiresC):
axc.plot(dataArrayC[iWire]+dataWidthC*iWire,'-b',lw=0.2)
if getHits:
hitStartsC = getattr(tree,getHitName(collectionWireBranchNames[iWire],start=True))
hitEndsC = getattr(tree,getHitName(collectionWireBranchNames[iWire],start=False))
axc.plot(hitStartsC,dataWidthC*iWire*numpy.ones(len(hitStartsC)),',g')
axc.plot(hitEndsC,dataWidthC*iWire*numpy.ones(len(hitEndsC)),',r')
amp = numpy.max(dataArrayC[iWire])
rms = numpy.std(dataArrayC[iWire])
zScoreC.append(amp/rms)
for iWire in range(nWiresI):
axi.plot(dataArrayI[iWire]+dataWidthI*iWire,'-b',lw=0.2)
if getHits:
hitStartsI = getattr(tree,getHitName(inductionWireBranchNames[iWire],start=True))
hitEndsI = getattr(tree,getHitName(inductionWireBranchNames[iWire],start=False))
axi.plot(hitStartsI,dataWidthI*iWire*numpy.ones(len(hitStartsI)),',g')
axi.plot(hitEndsI,dataWidthI*iWire*numpy.ones(len(hitEndsI)),',r')
amp = numpy.max(dataArrayI[iWire])
rms = numpy.std(dataArrayI[iWire])
zScoreI.append(amp/rms)
axc.set_xlim(0,4096)
axc.set_ylim(dataMinC,dataMinC+dataWidthC*nWiresC)
axi.set_xlim(0,4096)
axi.set_ylim(dataMinI,dataMinI+dataWidthI*nWiresC)
axi.set_xlabel("Time Tick")
axc.set_ylabel("Collection Wire Response")
axi.set_ylabel("Induction Wire Response")
title = "Run {} Subrun {} Event {}\n $\phi$: {:.1f}$^\circ$, Track Length: {:.1f} cm".format(tree.runNumber,tree.subRunNumber,tree.eventNumber,tree.primTrkStartPhi*180/math.pi,tree.primTrkLength)
isMCStr = ""
if tree.isMC:
title = "MC " + title
isMCStr = "_MC"
fig.suptitle(title)
fig.savefig("{}{}_r{:04d}_sr{:03d}_e{:04d}.pdf".format(fileprefix,isMCStr,tree.runNumber,tree.subRunNumber,tree.eventNumber))
axc.cla()
axi.cla()
axc.hist(zScoreC,bins=20)
axi.hist(zScoreI,bins=20)
axi.set_xlabel("Max amplitude / RMS")
axc.set_ylabel("N Collection Wires / bin")
axi.set_ylabel("N Induction Wires / bin")
fig.savefig("ZScore_{}{}_r{:04d}_sr{:03d}_e{:04d}.pdf".format(fileprefix,isMCStr,tree.runNumber,tree.subRunNumber,tree.eventNumber))
mpl.close()
if __name__ == "__main__":
import matplotlib
import cPickle
#f = root.TFile("WireData_RIIP100_64a.root")
#f = root.TFile("WireData_RIIP100_64a_nocrct.root")
#f = root.TFile("WireData_RIIP60_64a.root")
f = root.TFile("Wires_RIIP60a_v3.root")
fBeam100A = root.TFile("Wires_Lovely1_Pos_RunII_jhugon_current100_secondary64_d_v1_v01.root")
#fMC = root.TFile("WiresMC_v3.root")
#f.ls()
tree = f.Get("cosmicanalyzer/tree")
treeBeam100A = fBeam100A.Get("cosmicanalyzer/tree")
#treeMC = fMC.Get("cosmicanalyzer/tree")
#tree.Print()
def makeCuts(tree,phiGeq0=False,phiLt0=False,beam=False,tofLt25=False,tofGeq25=False):
pi = math.pi
result = True
if tree.nTracks != 1:
return False
if tree.iBestMatch < 0:
return False
if phiGeq0 and not tree.primTrkStartPhi >= 0.:
return False
if phiLt0 and not tree.primTrkStartPhi < 0.:
return False
if not (tree.isMC or beam or ((tree.triggerBits >> 10) & 1)):
return False
if not ((not tree.isMC) or (tree.trueHitCosmic1 and tree.trueHitCosmic2) or (tree.trueHitCosmic3 and tree.trueHitCosmic4)):
return False
if (not beam) and not ((tree.primTrkStartTheta > 27*pi/180.) and (tree.primTrkStartTheta < 42*pi/180.) and (tree.primTrkStartPhi > -57*pi/180. and tree.primTrkStartPhi < 60*pi/180.) and (tree.primTrkStartPhi < -15*pi/180. or tree.primTrkStartPhi > 22*pi/180.)):
return False
if tofLt25 and not (tree.firstTOF < 25.):
return False
if tofGeq25 and not (tree.firstTOF >= 25.):
return False
return True
nMax = 100
dataAllHists = makeWireHistsAndPkl("dataAllHists",tree,nMax,makeCuts)
dataPhiLt0Hists = makeWireHistsAndPkl("dataPhiLt0Hists",tree,nMax,lambda x: makeCuts(x,phiLt0=True))
dataPhiGeq0Hists = makeWireHistsAndPkl("dataPhiGeq0Hists",tree,nMax,lambda x: makeCuts(x,phiGeq0=True))
dataBeam100AHists = makeWireHistsAndPkl("dataBeam100AHists",treeBeam100A,nMax,lambda x: makeCuts(x,beam=True))
dataBeam100ATOFLt25Hists = makeWireHistsAndPkl("dataBeam100ATOFLt25Hists",treeBeam100A,nMax,lambda x: makeCuts(x,beam=True,tofLt25=True))
dataBeam100ATOFGeq25Hists = makeWireHistsAndPkl("dataBeam100ATOFGeq25Hists",treeBeam100A,nMax,lambda x: makeCuts(x,beam=True,tofGeq25=True))
#mcAllHists = makeWireHistsAndPkl("mcAllHists",treeMC,nMax,makeCuts)
#mcPhiLt0Hists = makeWireHistsAndPkl("mcPhiLt0Hists",treeMC,nMax,lambda x: makeCuts(x,phiLt0=True))
#mcPhiGeq0Hists = makeWireHistsAndPkl("mcPhiGeq0Hists",treeMC,nMax,lambda x: makeCuts(x,phiGeq0=True))
#plotWireHists(*dataAllHists,filePrefix="Scope_All")
plotWireHists(*dataPhiLt0Hists,filePrefix="Scope_PhiLt0")
plotWireHists(*dataPhiGeq0Hists,filePrefix="Scope_PhiGeq0")
plotWireHists(*dataBeam100ATOFLt25Hists,filePrefix="Scope_Beam100ATOFLt25")
plotWireHists(*dataBeam100ATOFGeq25Hists,filePrefix="Scope_Beam100ATOFGeq25")
compareWireHists(dataPhiLt0Hists,dataPhiGeq0Hists,filePrefix="ScopeCompare_Phi",
labels=["$\phi < 0$", "$\phi \geq 0$"])
compareWireHists(dataPhiLt0Hists,dataPhiGeq0Hists,dataBeam100ATOFLt25Hists,filePrefix="ScopeCompare_PhiBeam",
labels=["$\phi < 0$", "$\phi \geq 0$",r"+100A TOF < 25 ns"])
compareWireHists(dataBeam100ATOFGeq25Hists,dataBeam100ATOFLt25Hists,filePrefix="ScopeCompare_Beam",
labels=["+100A TOF $\geq$ 25 ns",r"+100A TOF < 25 ns"])
compareWireHists(dataBeam100ATOFLt25Hists,dataBeam100ATOFGeq25Hists,filePrefix="ScopeCompare_TOF",
labels=[r"100A TOF < 25 ns",r"+100A TOF $\geq$ 25 ns"])
# dataPhiGeq0Hists = makeWireHists(tree,nMax,lambda x: makeCuts(x,phiGeq0=True))
# dataPhiLt0Hists = makeWireHists(tree,nMax,lambda x: makeCuts(x,phiLt0=True))
# mcAllHists = makeWireHists(treeMC,nMax,makeCuts)
# mcPhiGeq0Hists = makeWireHists(treeMC,nMax,lambda x: makeCuts(x,phiGeq0=True))
# mcPhiLt0Hists = makeWireHists(treeMC,nMax,lambda x: makeCuts(x,phiLt0=True))
# plotAllWholeWires(tree,"all",100,cutFunc=makeCuts)
# plotAllWholeWires(tree,"rawAll",100,cutFunc=makeCuts,branchNamePrefix="rawWireData")
# plotAroundMaxWires(tree,"allMax",100,cutFunc=makeCuts)
# plotAroundMaxWires(tree,"rawAllMax",100,cutFunc=makeCuts,branchNamePrefix="rawWireData")
# plotMultiEventAroundMaxWires(tree,"allHist",20,cutFunc=makeCuts)
# plotMultiEventAroundMaxWires(tree,"rawAllHist",100,cutFunc=makeCuts,branchNamePrefix="rawWireData",nAfterC=150,nAfterI=150,yMinC=-50,yMinI=-200,yMaxC=400,yMaxI=250,nBinsC=450,nBinsI=450)
# plotMultiEventAroundMaxWires(tree,"rawAllHistNorm",10,normToAmp=True,cutFunc=makeCuts,branchNamePrefix="rawWireData",nAfterC=150,nAfterI=150,yMinC=-50,yMinI=-200,yMaxC=400,yMaxI=250,nBinsC=450,nBinsI=450)
# plotAllWholeWires(tree,"phiLt0",20,cutFunc=lambda x: makeCuts(x,phiLt0=True))
# plotAroundMaxWires(tree,"phiLt0Max",20,cutFunc=lambda x: makeCuts(x,phiLt0=True))
# plotAroundMaxWires(tree,"phiLt0MaxNorm",20,cutFunc=lambda x: makeCuts(x,phiLt0=True),normToAmp=True)
# plotAllWholeWires(tree,"phiGeq0",20,cutFunc=lambda x: makeCuts(x,phiGeq0=True))
# plotAroundMaxWires(tree,"phiGeq0Max",20,cutFunc=lambda x: makeCuts(x,phiGeq0=True))
# plotAroundMaxWires(tree,"phiGeq0MaxNorm",20,cutFunc=lambda x: makeCuts(x,phiGeq0=True),normToAmp=True)
| {"/slicesIso.py": ["/fitCosmicHalo.py"], "/plotCosmics.py": ["/lookAtMonicaLifetime.py"]} |
76,904 | jhugon/lariatPionAbs | refs/heads/master | /plotCosmics.py | #!/usr/bin/env python
import ROOT as root
from helpers import *
root.gROOT.SetBatch(True)
import sys
from lookAtMonicaLifetime import getLifetimeGraphs
if __name__ == "__main__":
cuts = ""
#cuts += "*(isMC || ((triggerBits >> 4) & 1))" # BEAMON trigger
cuts += "*(isMC || ((triggerBits >> 10) & 1))" # COSMICON trigger
#cuts += "*(isMC || !((triggerBits >> 10) & 1))" # Not COSMICON trigger
#cuts += "*(isMC || ((triggerBits >> 11) & 1))" # COSMIC trigger
#cuts += "*(isMC || (nWCTracks ==0 && nTOFs ==0))"
cuts += "*( iBestMatch >= 0)" # primary Track found
#cuts += "*(acos(sin(primTrkStartTheta)*sin(primTrkStartPhi))*180./pi < 5. || acos(sin(primTrkStartTheta)*sin(primTrkStartPhi))*180./pi > 175.)" # theta vertical
#cuts += "*((!isMC) || (trueStartMom>3000. && trueStartMom < 8000.))"
#cuts += "*enterExitYm*enterExitYp"
#cuts += "*(primTrkXs > 10. && primTrkXs < 38. && primTrkYs > 15. && primTrkZs > 10. && primTrkZs > 80.)"
#cuts += "*(primTrkYs > 15.)"
cuts += "*((!isMC) || (trueHitCosmic1 && trueHitCosmic2) || (trueHitCosmic3 && trueHitCosmic4))"
cuts += "*((primTrkStartTheta > 27*pi/180.) && (primTrkStartTheta < 42*pi/180.))*(primTrkStartPhi > -57*pi/180. && primTrkStartPhi < 60*pi/180.)*(primTrkStartPhi < -15*pi/180. || primTrkStartPhi > 22*pi/180.)" # only angles that match MC
#cuts += "*(primTrkLength > 10.)" # didn't seem to make a difference
cuts += "*(nTracks == 1)"
#cuts += "*(primTrkLength > 80. && primTrkLength < 85.)"
hitExtraCuts = "*(primTrkXs > 3. && primTrkXs < 46. && primTrkYs < 18. && primTrkYs > -18. && primTrkZs > 3. && primTrkZs < 87.)"
hitExtraCutsInduct = "*(primTrkXsInduct > 3. && primTrkXsInduct < 46. && primTrkYsInduct < 18. && primTrkYsInduct > -18. && primTrkZsInduct > 3. && primTrkZsInduct < 87.)"
#hitExtraCuts += "*((primTrkStartPhi >= 0 && primTrkPitches >= 0.45 && primTrkPitches < 0.47) || (primTrkStartPhi < 0 && primTrkPitches >= 0.68 && primTrkPitches < 0.70))" #small pitch region
weightStr = "1"+cuts
logy = True
scaleFactor = 0.066
c = root.TCanvas()
NMAX=1000000000
#NMAX=100
lifetimeGraph = getLifetimeGraphs()
lifetimeGraph.SetMarkerSize(0.7)
lifetimeGraph.SetLineWidth(1)
lifetimeGraphs = [lifetimeGraph]
########################################################
## File Definitions ####################################
########################################################
baseDir="/scratch/jhugon/"
baseDir=""
fileConfigs = [
{
'fn': [baseDir+"cosmicData/CosmicAna_RIIP60_64a_v02.root",
baseDir+"cosmicData/CosmicAna_RIIP60_64b_v02.root",
baseDir+"cosmicData/CosmicAna_RIIP60_64c_v02.root",
baseDir+"cosmicData/CosmicAna_RIIP100_64a_v01.root",
baseDir+"cosmicData/CosmicAna_RIIP100_64b_v01.root",
baseDir+"cosmicData/CosmicAna_RIIP100_64c_v01.root",
baseDir+"cosmicData/CosmicAna_RIIP100_64d_v01.root",
baseDir+"cosmicData/CosmicAna_RIIP100_64e_v01.root",
baseDir+"cosmicData/CosmicAna_RIIP100_64f_v01.root",
baseDir+"cosmicData/CosmicAna_RIIP100_64g_v01.root",
baseDir+"cosmicData/CosmicAna_RIIM20_64abc.root",
baseDir+"cosmicData/CosmicAna_RIIM60_64a.root",
baseDir+"cosmicData/CosmicAna_RIIM60_64b.root",
baseDir+"cosmicData/CosmicAna_RIIM60_64c.root",
baseDir+"cosmicData/CosmicAna_RIIM60_64d.root",
baseDir+"cosmicData/CosmicAna_RIIM60_64e.root",
baseDir+"cosmicData/CosmicAna_RIIM60_64f.root",
baseDir+"cosmicData/CosmicAna_RIIM60_64g.root",
baseDir+"cosmicData/CosmicAna_RIIM100_64a.root",
baseDir+"cosmicData/CosmicAna_RIIM100_64b.root",
baseDir+"cosmicData/CosmicAna_RIIM100_64c.root"],
'name': "RunIICosmics",
'title': "Run II Cosmics",
'caption': "Run II Cosmics",
'color': root.kBlack,
'isData': True,
},
#{
# 'fn': [
# baseDir+"cosmicData/CosmicAna_data_Pos_RunII_current100_secondary64_a_cosmics_nocorrections.root",
# baseDir+"cosmicData/CosmicAna_data_Pos_RunII_current100_secondary64_b_cosmics_nocorrections.root",
# baseDir+"cosmicData/CosmicAna_data_Pos_RunII_current100_secondary64_c_cosmics_nocorrections.root",
# baseDir+"cosmicData/CosmicAna_data_Pos_RunII_current100_secondary64_d_cosmics_nocorrections.root",
# baseDir+"cosmicData/CosmicAna_data_Pos_RunII_current100_secondary64_e_cosmics_nocorrections.root",
# baseDir+"cosmicData/CosmicAna_data_Pos_RunII_current100_secondary64_f_cosmics_nocorrections.root",
# baseDir+"cosmicData/CosmicAna_data_Pos_RunII_current100_secondary64_g_cosmics_nocorrections.root",
# baseDir+"cosmicData/CosmicAna_data_Pos_RunII_current60_secondary64_a_cosmics_nocorrections.root",
# baseDir+"cosmicData/CosmicAna_data_Pos_RunII_current60_secondary64_b_cosmics_nocorrections.root",
# baseDir+"cosmicData/CosmicAna_data_Pos_RunII_current60_secondary64_c_cosmics_nocorrections.root",
# baseDir+"cosmicData/CosmicAna_RIIM20_64abc_nocrct.root",
# baseDir+"cosmicData/CosmicAna_RIIM60_64a_nocrct.root",
# baseDir+"cosmicData/CosmicAna_RIIM60_64b_nocrct.root",
# baseDir+"cosmicData/CosmicAna_RIIM60_64c_nocrct.root",
# baseDir+"cosmicData/CosmicAna_RIIM60_64d_nocrct.root",
# baseDir+"cosmicData/CosmicAna_RIIM60_64e_nocrct.root",
# baseDir+"cosmicData/CosmicAna_RIIM60_64f_nocrct.root",
# baseDir+"cosmicData/CosmicAna_RIIM60_64g_nocrct.root",
# baseDir+"cosmicData/CosmicAna_RIIM100_64a_nocrct.root",
# baseDir+"cosmicData/CosmicAna_RIIM100_64b_nocrct.root",
# baseDir+"cosmicData/CosmicAna_RIIM100_64c_nocrct.root"],
# 'name': "RunIINocrct",
# 'title': "Uncorrected Run II",
# 'caption': "Uncorrected Run II",
# 'color': root.kGray+2,
# 'isData': True,
#},
#{
# 'fn': [baseDir+"cosmicData/CosmicAna_data_Pos_RunII_current60_secondary64_a_cosmics.root",
# baseDir+"cosmicData/CosmicAna_data_Pos_RunII_current60_secondary64_b_cosmics.root",
# baseDir+"cosmicData/CosmicAna_data_Pos_RunII_current60_secondary64_c_cosmics.root",
# baseDir+"cosmicData/CosmicAna_data_Pos_RunII_current100_secondary64_a_cosmics.root",
# baseDir+"cosmicData/CosmicAna_data_Pos_RunII_current100_secondary64_b_cosmics.root",
# baseDir+"cosmicData/CosmicAna_data_Pos_RunII_current100_secondary64_c_cosmics.root",
# baseDir+"cosmicData/CosmicAna_data_Pos_RunII_current100_secondary64_d_cosmics.root",
# baseDir+"cosmicData/CosmicAna_data_Pos_RunII_current100_secondary64_e_cosmics.root",
# baseDir+"cosmicData/CosmicAna_data_Pos_RunII_current100_secondary64_f_cosmics.root",
# baseDir+"cosmicData/CosmicAna_data_Pos_RunII_current100_secondary64_g_cosmics.root"],
# 'name': "RunIIPos",
# 'title': "Run II Positive Polarity",
# 'caption': "Run II Positive Polarity",
# 'color': root.kBlack,
# 'isData': True,
#},
#{
# 'fn': [baseDir+"cosmicData/CosmicAna_RIIM20_64abc.root",
# baseDir+"cosmicData/CosmicAna_RIIM60_64a.root",
# baseDir+"cosmicData/CosmicAna_RIIM60_64b.root",
# baseDir+"cosmicData/CosmicAna_RIIM60_64c.root",
# baseDir+"cosmicData/CosmicAna_RIIM60_64d.root",
# baseDir+"cosmicData/CosmicAna_RIIM60_64e.root",
# baseDir+"cosmicData/CosmicAna_RIIM60_64f.root",
# baseDir+"cosmicData/CosmicAna_RIIM60_64g.root",
# baseDir+"cosmicData/CosmicAna_RIIM100_64a.root",
# baseDir+"cosmicData/CosmicAna_RIIM100_64b.root",
# baseDir+"cosmicData/CosmicAna_RIIM100_64c.root"],
# 'name': "RunIINeg",
# 'title': "Run II Negative Polarity",
# 'caption': "Run II Negative Polarity",
# 'color': root.kGreen+3,
# 'isData': True,
#},
#{
# 'fn': [baseDir+"cosmicData/CosmicAna_RIIM20_64abc_nocrct.root",
# baseDir+"cosmicData/CosmicAna_RIIM60_64a_nocrct.root",
# baseDir+"cosmicData/CosmicAna_RIIM60_64b_nocrct.root",
# baseDir+"cosmicData/CosmicAna_RIIM60_64c_nocrct.root",
# baseDir+"cosmicData/CosmicAna_RIIM60_64d_nocrct.root",
# baseDir+"cosmicData/CosmicAna_RIIM60_64e_nocrct.root",
# baseDir+"cosmicData/CosmicAna_RIIM60_64f_nocrct.root",
# baseDir+"cosmicData/CosmicAna_RIIM60_64g_nocrct.root",
# baseDir+"cosmicData/CosmicAna_RIIM100_64a_nocrct.root",
# baseDir+"cosmicData/CosmicAna_RIIM100_64b_nocrct.root",
# baseDir+"cosmicData/CosmicAna_RIIM100_64c_nocrct.root"],
# 'name': "RunIINegNocrct",
# 'title': "Uncorrected Run II Negative Polarity",
# 'caption': "Uncorrected Run II Negative Polarity",
# 'color': root.kRed-4,
# 'isData': True,
#},
#{
# 'fn': [baseDir+"cosmicData/CosmicAna_RIIP60_64a_v02_v2_nocrct.root",
# baseDir+"cosmicData/CosmicAna_RIIP60_64b_v02_v2_nocrct.root",
# baseDir+"cosmicData/CosmicAna_RIIP60_64c_v02_v2_nocrct.root"],
# 'name': "RunIIP60Uncorr",
# 'title': "Run II+ 60 A Uncorrected",
# 'caption': "Run II+ 60 A Uncorrected",
# 'color': root.kGray+2,
# 'isData': True,
#},
#{
# 'fn': [baseDir+"cosmicData/CosmicAna_data_Pos_RunII_current60_secondary64_a_cosmics.root",
# baseDir+"cosmicData/CosmicAna_data_Pos_RunII_current60_secondary64_b_cosmics.root",
# baseDir+"cosmicData/CosmicAna_data_Pos_RunII_current60_secondary64_c_cosmics.root"],
# 'name': "RunIIP60",
# 'title': "Run II+ 60 A",
# 'caption': "Run II+ 60 A",
# 'color': root.kGray+2,
# 'isData': True,
#},
#{
# 'fn': [baseDir+"cosmicData/CosmicAna_RIIP100_64a_v01.root",
# baseDir+"cosmicData/CosmicAna_RIIP100_64b_v01.root",
# baseDir+"cosmicData/CosmicAna_RIIP100_64c_v01.root",
# baseDir+"cosmicData/CosmicAna_RIIP100_64d_v01.root",
# baseDir+"cosmicData/CosmicAna_RIIP100_64e_v01.root",
# baseDir+"cosmicData/CosmicAna_RIIP100_64f_v01.root",
# baseDir+"cosmicData/CosmicAna_RIIP100_64g_v01.root"],
# 'name': "RunIIP100",
# 'title': "Run II+ 100 A",
# 'caption': "Run II+ 100 A",
# 'color': root.kGray+2,
# 'isData': True,
#},
{
'fn': baseDir+"cosmicMC/cosmicAna_v04.root",
'name': "CosmicMC",
'title': "Cosmic MC",
'caption': "Cosmic MC",
'isData': False,
'scaleFactor': scaleFactor,
},
# {
# 'fn': baseDir+"cosmicMC/cosmicAna_smearing10_v01.root",
# 'name': "CosmicMC_presmear10perc",
# 'title': "Cosmic MC Pre-smear 10% ",
# 'caption': "Cosmic MC Pre-smear 10%",
# 'isData': False,
# 'scaleFactor': scaleFactor,
# },
# {
# 'fn': baseDir+"cosmicMC/cosmicAna_smearing20_v01.root",
# 'name': "CosmicMC_presmear20perc",
# 'title': "Cosmic MC Pre-smear 20% ",
# 'caption': "Cosmic MC Pre-smear 20%",
# 'isData': False,
# 'scaleFactor': scaleFactor,
# },
# {
# 'fn': baseDir+"cosmicMC/cosmicAna_smearing30_v01.root",
# 'name': "CosmicMC_presmear30perc",
# 'title': "Cosmic MC Pre-smear 30% ",
# 'caption': "Cosmic MC Pre-smear 30%",
# 'isData': False,
# 'scaleFactor': scaleFactor,
# },
# {
# 'fn': baseDir+"cosmicMC/cosmicAna_smearing40_v01.root",
# 'name': "CosmicMC_presmear40perc",
# 'title': "Cosmic MC Pre-smear 40% ",
# 'caption': "Cosmic MC Pre-smear 40%",
# 'isData': False,
# 'scaleFactor': scaleFactor,
# },
# {
# 'fn': baseDir+"cosmicMC/cosmicAna_smearing45_v01.root",
# 'name': "CosmicMC_presmear45perc",
# 'title': "Cosmic MC Pre-smear 45% ",
# 'caption': "Cosmic MC Pre-smear 45%",
# 'isData': False,
# 'scaleFactor': scaleFactor,
# },
# {
# 'fn': baseDir+"cosmicMC/cosmicAna_smearing50_v01.root",
# 'name': "CosmicMC_presmear50perc",
# 'title': "Cosmic MC Pre-smear 50% ",
# 'caption': "Cosmic MC Pre-smear 50%",
# 'isData': False,
# 'scaleFactor': scaleFactor,
# },
# {
# 'fn': baseDir+"cosmicMC/cosmicAna_smearing55_v01.root",
# 'name': "CosmicMC_presmear55perc",
# 'title': "Cosmic MC Pre-smear 55% ",
# 'caption': "Cosmic MC Pre-smear 55%",
# 'isData': False,
# 'scaleFactor': scaleFactor,
# },
# {
# 'fn': baseDir+"cosmicMC/cosmicAna_smearing60_v01.root",
# 'name': "CosmicMC_presmear60perc",
# 'title': "Cosmic MC Pre-smear 60% ",
# 'caption': "Cosmic MC Pre-smear 60%",
# 'isData': False,
# 'scaleFactor': scaleFactor,
# },
# {
# 'fn': baseDir+"cosmicMC/cosmicAna_smearing70_v01.root",
# 'name': "CosmicMC_presmear70perc",
# 'title': "Cosmic MC Pre-smear 70% ",
# 'caption': "Cosmic MC Pre-smear 70%",
# 'isData': False,
# 'scaleFactor': scaleFactor,
# },
]
for i in range(len(fileConfigs)):
if not ('isData' in fileConfigs[i]) or not fileConfigs[i]['isData']:
fileConfigs[i]['color'] = COLORLIST[i-1]
########################################################
## Compare Files #######################################
########################################################
histConfigs = [
{
'name': "primTrkHitAmpsCollection",
'xtitle': "Collection Plane Hit Amplitudes [ADC]",
'ytitle': "Hits / bin",
'binning': [150,0,150],
'var': "primTrkHitAmps*((0.62-1.)*isMC + 1.)",
'cuts': weightStr+"*(primTrkHitIsCollections)",
'normalize': logy,
'logy': not logy,
},
{
'name': "primTrkHitAmpsInduction",
'xtitle': "Induction Plane Hit Amplitudes [ADC]",
'ytitle': "Hits / bin",
'binning': [150,0,150],
'var': "primTrkHitAmps*((0.47-1.)*isMC + 1.)",
'cuts': weightStr+"*(!primTrkHitIsCollections)",
'normalize': logy,
'logy': not logy,
},
{
'name': "primTrkHitIntsCollection",
'xtitle': "Collection Plane Hit Integrals [ADC us]",
'ytitle': "Hits / bin",
'binning': [100,0,5e3],
'var': "primTrkHitIntegrals*((0.67-1.)*isMC + 1.)",
'cuts': weightStr+"*(primTrkHitIsCollections)",
'normalize': logy,
'logy': not logy,
},
{
'name': "primTrkHitIntsInduction",
'xtitle': "Induction Plane Hit Integrals [ADC us]",
'ytitle': "Hits / bin",
'binning': [100,0,5e3],
'var': "primTrkHitIntegrals*((0.52-1.)*isMC + 1.)",
'cuts': weightStr+"*(!primTrkHitIsCollections)",
'normalize': logy,
'logy': not logy,
},
{
'name': "primTrkHitAmpsCollection_phiLt0",
'xtitle': "Collection Plane Hit Amplitudes [ADC]",
'ytitle': "Hits / bin",
'binning': [150,0,150],
'var': "primTrkHitAmps*((0.62-1.)*isMC + 1.)",
'cuts': weightStr+"*(primTrkHitIsCollections)"+"*(primTrkStartPhi < 0)",
'caption': "Track #phi < 0",
'normalize': logy,
'logy': not logy,
},
{
'name': "primTrkHitAmpsInduction_phiLt0",
'xtitle': "Induction Plane Hit Amplitudes [ADC]",
'ytitle': "Hits / bin",
'binning': [150,0,150],
'var': "primTrkHitAmps*((0.47-1.)*isMC + 1.)",
'cuts': weightStr+"*(!primTrkHitIsCollections)"+"*(primTrkStartPhi < 0)",
'caption': "Track #phi < 0",
'normalize': logy,
'logy': not logy,
},
{
'name': "primTrkHitIntsCollection_phiLt0",
'xtitle': "Collection Plane Hit Integrals [ADC us]",
'ytitle': "Hits / bin",
'binning': [100,0,5e3],
'var': "primTrkHitIntegrals*((0.67-1.)*isMC + 1.)",
'cuts': weightStr+"*(primTrkHitIsCollections)"+"*(primTrkStartPhi < 0)",
'caption': "Track #phi < 0",
'normalize': logy,
'logy': not logy,
},
{
'name': "primTrkHitIntsInduction_phiLt0",
'xtitle': "Induction Plane Hit Integrals [ADC us]",
'ytitle': "Hits / bin",
'binning': [100,0,5e3],
'var': "primTrkHitIntegrals*((0.52-1.)*isMC + 1.)",
'cuts': weightStr+"*(!primTrkHitIsCollections)"+"*(primTrkStartPhi < 0)",
'caption': "Track #phi < 0",
'normalize': logy,
'logy': not logy,
},
{
'name': "primTrkHitAmpsCollection_phiGeq0",
'xtitle': "Collection Plane Hit Amplitudes [ADC]",
'ytitle': "Hits / bin",
'binning': [150,0,150],
'var': "primTrkHitAmps*((0.62-1.)*isMC + 1.)",
'cuts': weightStr+"*(primTrkHitIsCollections)"+"*(primTrkStartPhi >= 0)",
'caption': "Track #phi #geq 0",
'normalize': logy,
'logy': not logy,
},
{
'name': "primTrkHitAmpsInduction_phiGeq0",
'xtitle': "Induction Plane Hit Amplitudes [ADC]",
'ytitle': "Hits / bin",
'binning': [150,0,150],
'var': "primTrkHitAmps*((0.47-1.)*isMC + 1.)",
'cuts': weightStr+"*(!primTrkHitIsCollections)"+"*(primTrkStartPhi >= 0)",
'caption': "Track #phi #geq 0",
'normalize': logy,
'logy': not logy,
},
{
'name': "primTrkHitIntsCollection_phiGeq0",
'xtitle': "Collection Plane Hit Integrals [ADC us]",
'ytitle': "Hits / bin",
'binning': [100,0,5e3],
'var': "primTrkHitIntegrals*((0.67-1.)*isMC + 1.)",
'cuts': weightStr+"*(primTrkHitIsCollections)"+"*(primTrkStartPhi >= 0)",
'caption': "Track #phi #geq 0",
'normalize': logy,
'logy': not logy,
},
{
'name': "primTrkHitIntsInduction_phiGeq0",
'xtitle': "Induction Plane Hit Integrals [ADC us]",
'ytitle': "Hits / bin",
'binning': [100,0,5e3],
'var': "primTrkHitIntegrals*((0.52-1.)*isMC + 1.)",
'cuts': weightStr+"*(!primTrkHitIsCollections)"+"*(primTrkStartPhi >= 0)",
'caption': "Track #phi #geq 0",
'normalize': logy,
'logy': not logy,
},
# {
# 'name': "trackXFront",
# 'xtitle': "X of TPC Track Projection to TPC Front [cm]",
# 'ytitle': "TPC Tracks / bin",
# 'binning': [50,0,50],
# 'var': "trackXFront",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
# },
# {
# 'name': "trackYFront",
# 'xtitle': "Y of TPC Track Projection to TPC Front [cm]",
# 'ytitle': "TPC Tracks / bin",
# 'binning': [50,-50,50],
# 'var': "trackYFront",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
# },
# {
# 'name': "trackMatchLowestZ",
# 'xtitle': "TPC Track Start Z [cm]",
# 'ytitle': "TPC Tracks / bin",
# 'binning': [40,0,20],
# 'var': "trackMatchLowestZ",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
# },
# {
# 'name': "nTOFs",
# 'xtitle': "Number of TOF Objects",
# 'ytitle': "Events / bin",
# 'binning': [11,0,10],
# 'var': "nTOFs",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
# },
# {
# 'name': "trackStartX",
# 'xtitle': "TPC Track Start X [cm]",
# 'ytitle': "Tracks / bin",
# 'binning': [100,-20,60],
# 'var': "trackStartX",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
# },
# {
# 'name': "trackStartY",
# 'xtitle': "TPC Track Start Y [cm]",
# 'ytitle': "Tracks / bin",
# 'binning': [100,-50,50],
# 'var': "trackStartY",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
# },
# {
# 'name': "trackStartZ",
# 'xtitle': "TPC Track Start Z [cm]",
# 'ytitle': "Tracks / bin",
# 'binning': [100,-20,110],
# 'var': "trackStartZ",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
# },
# {
# 'name': "trackEndX",
# 'xtitle': "TPC Track End X [cm]",
# 'ytitle': "Tracks / bin",
# 'binning': [100,-20,60],
# 'var': "trackEndX",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
# },
# {
# 'name': "trackEndY",
# 'xtitle': "TPC Track End Y [cm]",
# 'ytitle': "Tracks / bin",
# 'binning': [100,-50,50],
# 'var': "trackEndY",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
# },
# {
# 'name': "trackEndZ",
# 'xtitle': "TPC Track End Z [cm]",
# 'ytitle': "Tracks / bin",
# 'binning': [100,-20,110],
# 'var': "trackEndZ",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
# },
# {
# 'name': "trackLength",
# 'xtitle': "TPC Track Length [cm]",
# 'ytitle': "Tracks / bin",
# 'binning': [100,-10,100],
# 'var': "trackLength",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
# },
#{
# 'name': "trackCaloKin",
# 'xtitle': "TPC Calo Estimate of KE [MeV]",
# 'ytitle': "Tracks / bin",
# 'binning': [50,0,2500],
# 'var': "trackCaloKin",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
#},
{
'name': "primTrkLength",
'xtitle': "Primary TPC Track Length [cm]",
'ytitle': "Events / bin",
'binning': [100,0,100],
'var': "primTrkLength",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
'printIntegral': True,
},
{
'name': "primTrkStartTheta",
'xtitle': "Primary TPC Track #theta [deg]",
'ytitle': "Events / bin",
'binning': [180,0,180],
'var': "primTrkStartTheta*180/pi",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
{
'name': "primTrkStartTheta_phiGeq0",
'xtitle': "Primary TPC Track #theta [deg]",
'ytitle': "Events / bin",
'binning': [120,0,60],
'var': "primTrkStartTheta*180/pi",
'cuts': weightStr+"*(primTrkStartPhi >= 0)",
'caption': "Track #phi #geq 0",
'normalize': logy,
'logy': not logy,
},
{
'name': "primTrkStartTheta_phiLt0",
'xtitle': "Primary TPC Track #theta [deg]",
'ytitle': "Events / bin",
'binning': [120,0,60],
'var': "primTrkStartTheta*180/pi",
'cuts': weightStr+"*(primTrkStartPhi < 0)",
'caption': "Track #phi < 0",
'normalize': logy,
'logy': not logy,
},
{
'name': "primTrkStartCosTheta",
'xtitle': "Primary TPC Track cos(#theta)",
'ytitle': "Events / bin",
'binning': [100,0,1],
'var': "cos(primTrkStartTheta)",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
{
'name': "primTrkStartPhi",
'xtitle': "Primary TPC Track #phi [deg]",
'ytitle': "Events / bin",
'binning': [180,-180,180],
'var': "primTrkStartPhi*180/pi",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
{
'name': "primTrkStartThetaY",
'xtitle': "Primary TPC Track #theta_{y} [deg]",
'ytitle': "Events / bin",
'binning': [180,0,180],
'var': "acos(sin(primTrkStartTheta)*sin(primTrkStartPhi))*180./pi",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
{
'name': "primTrkStartCosThetaY",
'xtitle': "Primary TPC Track cos(#theta_{y})",
'ytitle': "Events / bin",
'binning': [100,0,1],
'var': "sin(primTrkStartTheta)*sin(primTrkStartPhi)",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
{
'name': "primTrkStartPhiZX",
'xtitle': "Primary TPC Track #phi_{zx} [deg]",
'ytitle': "Events / bin",
'binning': [180,-180,180],
'var': "atan2(sin(primTrkStartTheta)*cos(primTrkStartPhi),cos(primTrkStartTheta))*180./pi",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
{
'name': "primTrkStartThetaX",
'xtitle': "Primary TPC Track #theta_{x} [deg]",
'ytitle': "Events / bin",
'binning': [180,0,180],
'var': "acos(sin(primTrkStartTheta)*cos(primTrkStartPhi))*180./pi",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
{
'name': "primTrkStartCosThetaX",
'xtitle': "Primary TPC Track cos(#theta_{x})",
'ytitle': "Events / bin",
'binning': [100,0,1],
'var': "sin(primTrkStartTheta)*cos(primTrkStartPhi)",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
{
'name': "primTrkStartPhiZY",
'xtitle': "Primary TPC Track #phi_{zy} [deg]",
'ytitle': "Events / bin",
'binning': [180,-180,180],
'var': "atan2(sin(primTrkStartTheta)*sin(primTrkStartPhi),cos(primTrkStartTheta))*180./pi",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
{
'name': "primTrkdEdxs",
'xtitle': "Primary TPC Track dE/dx [MeV/cm]",
'ytitle': "Hits / bin",
'binning': [100,0,50],
'var': "primTrkdEdxs",
'cuts': weightStr+hitExtraCuts,
#'normalize': True,
'logy': logy,
'printIntegral': True,
},
{
'name': "primTrkdEdxsInduct",
'xtitle': "Induction: Primary TPC Track dE/dx [MeV/cm]",
'ytitle': "Hits / bin",
'binning': [100,0,20],
'var': "primTrkdEdxsInduct*((2.65-1.)*isMC + 1.)",
'cuts': weightStr+hitExtraCutsInduct,
#'normalize': True,
'logy': logy,
'printIntegral': True,
},
{
'name': "primTrkdEdxs_zoom",
'xtitle': "Primary TPC Track dE/dx [MeV/cm]",
'ytitle': "Hits / bin",
'binning': [100,0,10],
'var': "primTrkdEdxs",
'cuts': weightStr+hitExtraCuts,
'normalize': not logy,
'logy': logy,
},
{
'name': "primTrkdEdxs_zoom2",
'xtitle': "Primary TPC Track dE/dx [MeV/cm]",
'ytitle': "Hits / bin",
'binning': [100,0,10],
'var': "primTrkdEdxs",
'cuts': weightStr+hitExtraCuts,
'normalize': logy,
'logy': not logy,
},
{
'name': "primTrkdEdxs_zoom3",
'xtitle': "Primary TPC Track dE/dx [MeV/cm]",
'ytitle': "Normalized hits / bin",
'binning': [50,0,5],
'var': "primTrkdEdxs",
'cuts': weightStr+hitExtraCuts,
'normalize': logy,
'logy': not logy,
},
{
'name': "primTrkdEdxsInduct_zoom3",
'xtitle': "Induction: Primary TPC Track dE/dx [MeV/cm]",
'ytitle': "Hits / bin",
'binning': [50,0,4],
'var': "primTrkdEdxsInduct*((2.65-1.)*isMC + 1.)",
'cuts': weightStr+hitExtraCutsInduct,
#'normalize': True,
'normalize': logy,
'logy': not logy,
},
{
'name': "primTrkdEdxs_zoom3_phiGeq0",
'xtitle': "Primary TPC Track dE/dx [MeV/cm]",
'ytitle': "Normalized hits / bin",
'binning': [50,0,5],
'var': "primTrkdEdxs",
'cuts': weightStr+hitExtraCuts+"*(primTrkStartPhi >= 0)",
'caption': "Track #phi #geq 0",
'normalize': logy,
'logy': not logy,
},
{
'name': "primTrkdEdxs_zoom3_phiLt0",
'xtitle': "Primary TPC Track dE/dx [MeV/cm]",
'ytitle': "Normalized hits / bin",
'binning': [50,0,5],
'var': "primTrkdEdxs",
'cuts': weightStr+hitExtraCuts+"*(primTrkStartPhi < 0)",
'caption': "Track #phi < 0",
'normalize': logy,
'logy': not logy,
},
{
'name': "primTrkdEdxsInduct_zoom3_phiGeq0",
'xtitle': "Induction: Primary TPC Track dE/dx [MeV/cm]",
'ytitle': "Normalized hits / bin",
'binning': [50,0,5],
'var': "primTrkdEdxsInduct*((2.65-1.)*isMC + 1.)",
'cuts': weightStr+hitExtraCutsInduct+"*(primTrkStartPhi >= 0)",
'caption': "Track #phi #geq 0",
'normalize': logy,
'logy': not logy,
},
{
'name': "primTrkdEdxsInduct_zoom3_phiLt0",
'xtitle': "Induction: Primary TPC Track dE/dx [MeV/cm]",
'ytitle': "Normalized hits / bin",
'binning': [50,0,5],
'var': "primTrkdEdxsInduct*((2.65-1.)*isMC + 1.)",
'cuts': weightStr+hitExtraCutsInduct+"*(primTrkStartPhi < 0)",
'caption': "Track #phi < 0",
'normalize': logy,
'logy': not logy,
},
# {
# 'name': "primTrkTruedEdxs",
# 'xtitle': "Primary TPC Track True dE/dx [MeV/cm]",
# 'ytitle': "Events / bin",
# 'binning': [100,0,50],
# 'var': "primTrkTruedEdxs",
# 'cuts': weightStr+hitExtraCuts,
# #'normalize': True,
# 'logy': logy,
# },
# {
# 'name': "primTrkTruedEdxs_zoom",
# 'xtitle': "Primary TPC Track True dE/dx [MeV/cm]",
# 'ytitle': "Events / bin",
# 'binning': [100,0,10],
# 'var': "primTrkTruedEdxs",
# 'cuts': weightStr+hitExtraCuts,
# #'normalize': True,
# 'logy': logy,
# },
{
'name': "primTrkdQdxs",
'xtitle': "Primary TPC Track dQ/dx [ADC/cm]",
'ytitle': "Events / bin",
'binning': [300,0,3e4],
'var': "primTrkdQdxs*((0.5-1.)*isMC + 1.)",
'cuts': weightStr+hitExtraCuts,
'normalize': not logy,
'logy': logy,
},
{
'name': "primTrkdQdxsInduct",
'xtitle': "Induction Plane -- Primary TPC Track dQ/dx [ADC/cm]",
'ytitle': "Events / bin",
'binning': [300,0,1e4],
'var': "primTrkdQdxsInduct*((0.70-1.)*isMC + 1.)",
'cuts': weightStr+hitExtraCutsInduct,
'normalize': not logy,
'logy': logy,
},
{
'name': "primTrkdQdxs_zoom",
'xtitle': "Primary TPC Track dQ/dx [ADC/cm]",
'ytitle': "Events / bin",
'binning': [100,0,8e3],
'var': "primTrkdQdxs*((0.5-1.)*isMC + 1.)",
'cuts': weightStr+hitExtraCuts,
'normalize': not logy,
'logy': logy,
'printIntegral' : True,
},
{
'name': "primTrkdQdxs_zoom2",
'xtitle': "Primary TPC Track dQ/dx [ADC/cm]",
'ytitle': "Events / bin",
'binning': [100,0,8e3],
'var': "primTrkdQdxs*((0.5-1.)*isMC + 1.)",
'cuts': weightStr+hitExtraCuts,
'normalize': logy,
'logy': not logy,
'printIntegral' : True,
},
{
'name': "primTrkdQdxs_zoom3",
'xtitle': "Primary TPC Track dQ/dx [ADC/cm]",
'ytitle': "Events / bin",
'binning': [100,0,5e3],
'var': "primTrkdQdxs*((0.5-1.)*isMC + 1.)",
'cuts': weightStr+hitExtraCuts,
'normalize': logy,
'logy': not logy,
'printIntegral' : True,
},
{
'name': "primTrkdQdxs_zoom3_phiGeq0",
'xtitle': "Primary TPC Track dQ/dx [ADC/cm]",
'ytitle': "Normalized hits / bin",
'binning': [100,0,5e3],
'var': "primTrkdQdxs*((0.5-1.)*isMC + 1.)",
'cuts': weightStr+hitExtraCuts+"*(primTrkStartPhi >= 0)",
'caption': "Track #phi #geq 0",
'normalize': logy,
'logy': not logy,
},
{
'name': "primTrkdQdxs_zoom3_phiLt0",
'xtitle': "Primary TPC Track dQ/dx [ADC/cm]",
'ytitle': "Normalized hits / bin",
'binning': [100,0,5e3],
'var': "primTrkdQdxs*((0.5-1.)*isMC + 1.)",
'cuts': weightStr+hitExtraCuts+"*(primTrkStartPhi < 0)",
'caption': "Track #phi < 0",
'normalize': logy,
'logy': not logy,
},
{
'name': "primTrkdQdxsInduct_zoom3_phiGeq0",
'xtitle': "Induct: Primary TPC Track dQ/dx [ADC/cm]",
'ytitle': "Normalized hits / bin",
'binning': [100,0,2e3],
'var': "primTrkdQdxsInduct*((0.70-1.)*isMC + 1.)",
'cuts': weightStr+hitExtraCutsInduct+"*(primTrkStartPhi >= 0)",
'caption': "Track #phi #geq 0",
'normalize': logy,
'logy': not logy,
},
{
'name': "primTrkdQdxsInduct_zoom3_phiLt0",
'xtitle': "Induct: Primary TPC Track dQ/dx [ADC/cm]",
'ytitle': "Normalized hits / bin",
'binning': [100,0,2e3],
'var': "primTrkdQdxsInduct*((0.70-1.)*isMC + 1.)",
'cuts': weightStr+hitExtraCutsInduct+"*(primTrkStartPhi < 0)",
'caption': "Track #phi < 0",
'normalize': logy,
'logy': not logy,
},
{
'name': "primTrkdQs",
'xtitle': "Primary TPC Track dQ [ADC]",
'ytitle': "Normalized Hits / bin",
'binning': [100,0,5e3],
'var': "primTrkdQdxs*primTrkPitches*((0.5-1.)*isMC + 1.)",
'cuts': weightStr+hitExtraCuts,
'normalize': logy,
'logy': not logy,
'printIntegral' : True,
},
# {
# 'name': "primTrkTruedQdxs",
# 'xtitle': "Primary TPC Track True dQ/dx [e^{-}/cm]",
# 'ytitle': "Events / bin",
# 'binning': [200,0,5e6],
# 'var': "primTrkTruedQdxs",
# 'cuts': weightStr+hitExtraCuts,
# #'normalize': True,
# 'logy': logy,
# },
# {
# 'name': "primTrkTruedQdxs_zoom",
# 'xtitle': "Primary TPC Track True dQ/dx [e^{-}/cm]",
# 'ytitle': "Events / bin",
# 'binning': [200,0,1e5],
# 'var': "primTrkTruedQdxs",
# 'cuts': weightStr+hitExtraCuts,
# #'normalize': True,
# 'logy': logy,
# },
# {
# 'name': "primTrkTruedQs",
# 'xtitle': "Primary TPC Track Q [e^{-}]",
# 'ytitle': "Events / bin",
# #'binning': [200,0,1e5],
# 'binning': getLogBins(100,1e3,1e7),
# 'var': "primTrkTruedQs",
# 'cuts': weightStr+hitExtraCuts,
# #'normalize': True,
# 'logy': logy,
# 'logx': True,
# },
# {
# 'name': "primTrkTruedQs2",
# 'xtitle': "Primary TPC Track Q [e^{-}]",
# 'ytitle': "Events / bin",
# 'binning': [200,0,2e5],
# 'var': "primTrkTruedQs",
# 'cuts': weightStr+hitExtraCuts,
# #'normalize': True,
# 'logy': not logy,
# 'logx': False,
# },
# {
# 'name': "primTrkdEdxs_Q1000to1500_zoom2",
# 'xtitle': "Primary TPC Track dE/dx [MeV/cm]",
# 'ytitle': "Events / bin",
# 'binning': [100,0,10],
# 'var': "primTrkdEdxs",
# 'cuts': weightStr+hitExtraCuts+"*(primTrkdQdxs*primTrkPitches*((0.5-1.)*isMC + 1.) > 1000. && primTrkdQdxs*primTrkPitches*((0.5-1.)*isMC + 1.) < 1500.)",
# #'normalize': True,
# 'logy': not logy,
# 'caption': "1000 ADC < Q < 1500 ADC",
# },
# {
# 'name': "primTrkdEdxs_Q1500to2000_zoom2",
# 'xtitle': "Primary TPC Track dE/dx [MeV/cm]",
# 'ytitle': "Events / bin",
# 'binning': [100,0,10],
# 'var': "primTrkdEdxs",
# 'cuts': weightStr+hitExtraCuts+"*(primTrkdQdxs*primTrkPitches*((0.5-1.)*isMC + 1.) > 1500. && primTrkdQdxs*primTrkPitches*((0.5-1.)*isMC + 1.) < 2000.)",
# #'normalize': True,
# 'logy': not logy,
# 'caption': "1500 ADC < Q < 2000 ADC",
# },
# {
# 'name': "primTrkdEdxs_Q2000to3000_zoom2",
# 'xtitle': "Primary TPC Track dE/dx [MeV/cm]",
# 'ytitle': "Events / bin",
# 'binning': [100,0,10],
# 'var': "primTrkdEdxs",
# 'cuts': weightStr+hitExtraCuts+"*(primTrkdQdxs*primTrkPitches*((0.5-1.)*isMC + 1.) > 2000. && primTrkdQdxs*primTrkPitches*((0.5-1.)*isMC + 1.) < 3000.)",
# #'normalize': True,
# 'logy': not logy,
# 'caption': "2000 ADC < Q < 3000 ADC",
# },
# {
# 'name': "primTrkdEdxs_Q3000to4000_zoom2",
# 'xtitle': "Primary TPC Track dE/dx [MeV/cm]",
# 'ytitle': "Events / bin",
# 'binning': [100,0,10],
# 'var': "primTrkdEdxs",
# 'cuts': weightStr+hitExtraCuts+"*(primTrkdQdxs*primTrkPitches*((0.5-1.)*isMC + 1.) > 3000. && primTrkdQdxs*primTrkPitches*((0.5-1.)*isMC + 1.) < 4000.)",
# #'normalize': True,
# 'logy': not logy,
# 'caption': "3000 ADC < Q < 4000 ADC",
# },
#{
# 'name': "primTrkdEdxsFidCut",
# 'xtitle': "Primary TPC Track dE/dx [MeV/cm]",
# 'ytitle': "Events / bin",
# 'binning': [200,0,50],
# 'var': "primTrkdEdxs",
# 'cuts': weightStr+hitExtraCuts+"*primTrkInFids",
# #'normalize': True,
# 'logy': logy,
#},
{
'name': "primTrkResRanges",
'xtitle': "Primary TPC Track Residual Range [cm]",
'ytitle': "Hits / bin",
'binning': [200,0,100],
'var': "primTrkResRanges",
'cuts': weightStr+hitExtraCuts,
#'normalize': True,
'logy': logy,
},
{
'name': "primTrkResRangesInduct",
'xtitle': "Induction Plane -- Primary TPC Track Residual Range [cm]",
'ytitle': "Hits / bin",
'binning': [200,0,100],
'var': "primTrkResRangesInduct",
'cuts': weightStr+hitExtraCutsInduct,
#'normalize': True,
'logy': logy,
},
{
'name': "primTrkPitches",
'xtitle': "Primary TPC Track Pitch [cm]",
'ytitle': "Hits / bin",
'binning': [100,0,5],
'var': "primTrkPitches",
'cuts': weightStr+hitExtraCuts,
#'normalize': True,
'logy': logy,
},
{
'name': "primTrkPitches_phiGeq0",
'xtitle': "Primary TPC Track Pitch [cm]",
'ytitle': "Hits / bin",
'binning': [100,0,2],
'var': "primTrkPitches",
'captionright1': "Track #phi #geq 0",
'cuts': weightStr+hitExtraCuts +" * (primTrkStartPhi >= 0)",
#'normalize': True,
'logy': logy,
},
{
'name': "primTrkPitches_phiLt0",
'xtitle': "Primary TPC Track Pitch [cm]",
'ytitle': "Hits / bin",
'binning': [100,0,2],
'var': "primTrkPitches",
'captionright1': "Track #phi < 0",
'cuts': weightStr+hitExtraCuts +" * (primTrkStartPhi < 0)",
#'normalize': True,
'logy': logy,
},
{
'name': "primTrkPitchesInduct_phiGeq0",
'xtitle': "Induction Plane -- Primary TPC Track Pitch [cm]",
'ytitle': "Hits / bin",
'binning': [100,0,2],
'var': "primTrkPitchesInduct",
'captionright1': "Track #phi #geq 0",
'cuts': weightStr+hitExtraCutsInduct +" * (primTrkStartPhi >= 0)",
#'normalize': True,
'logy': logy,
},
{
'name': "primTrkPitchesInduct_phiLt0",
'xtitle': "Induction Plane -- Primary TPC Track Pitch [cm]",
'ytitle': "Hits / bin",
'binning': [100,0,2],
'var': "primTrkPitchesInduct",
'captionright1': "Track #phi < 0",
'cuts': weightStr+hitExtraCutsInduct +" * (primTrkStartPhi < 0)",
#'normalize': True,
'logy': logy,
},
#{
# 'name': "primTrkEndKin",
# 'xtitle': "Primary TPC Track End Kinetic Energy [MeV]",
# 'ytitle': "Events / bin",
# 'binning': [50,0,1000],
# 'var': "primTrkEndKin",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
#},
#{
# 'name': "primTrkEndKinFid",
# 'xtitle': "Primary TPC Track End Kinetic Energy [MeV]",
# 'ytitle': "Events / bin",
# 'binning': [50,0,1000],
# 'var': "primTrkEndKinFid",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
#},
#{
# 'name': "trueEndProcess",
# 'xtitle': "trueEndProcess",
# 'ytitle': "Events / bin",
# 'binning': [17,0,17],
# 'var': "trueEndProcess",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
#},
#{
# 'name': "trueStartTheta",
# 'xtitle': "True Start #theta [deg]",
# 'binning': [90,0,180],
# 'var': "trueStartTheta*180/pi",
# 'cuts': weightStr,
# #'normalize': True,
#},
#{
# 'name': "trueStartPhi",
# 'xtitle': "True Start #phi",
# 'binning': [90,-180,180],
# 'var': "trueStartPhi*180/pi",
# 'cuts': weightStr,
# #'normalize': True,
#},
#{
# 'name': "trueStartThetaY",
# 'xtitle': "True Start #theta_{y} [deg]",
# 'ytitle': "Events / bin",
# 'binning': [180,0,180],
# 'var': "acos(sin(trueStartTheta)*sin(trueStartPhi))*180./pi",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
#},
#{
# 'name': "trueStartPhiZX",
# 'xtitle': "True Start #theta_{zx} [deg]",
# 'ytitle': "Events / bin",
# 'binning': [180,-180,180],
# 'var': "atan2(sin(trueStartTheta)*cos(trueStartPhi),cos(trueStartTheta))*180./pi",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
#},
#{
# 'name': "trueStartThetaX",
# 'xtitle': "True Start #theta_{x} [deg]",
# 'ytitle': "Events / bin",
# 'binning': [180,0,180],
# 'var': "acos(sin(trueStartTheta)*cos(trueStartPhi))*180./pi",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
#},
#{
# 'name': "trueStartPhiZY",
# 'xtitle': "True Start #theta_{zy} [deg]",
# 'ytitle': "Events / bin",
# 'binning': [180,-180,180],
# 'var': "atan2(sin(trueStartTheta)*sin(trueStartPhi),cos(trueStartTheta))*180./pi",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
#},
{
'name': "trueStartE",
'xtitle': "Muon True Initial Momentum [GeV]",
'ytitle': "Events / bin",
'binning': [100,0,300],
'var': "1e-3*trueStartE",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
#'printIntegral': True,
},
{
'name': "trueStartE_zoom",
'xtitle': "Muon True Initial Momentum [GeV]",
'ytitle': "Events / bin",
'binning': [40,0,10],
'var': "1e-3*trueStartE",
'cuts': weightStr,
#'normalize': True,
'logy': False,
#'printIntegral': True,
},
]
plotManyFilesOnePlot(fileConfigs,histConfigs,c,"cosmicanalyzer/tree",nMax=NMAX,outPrefix="Cosmics_")
# fileConfigMCs = copy.deepcopy(fileConfigs)
# fileConfigData = None
# for i in reversed(range(len(fileConfigMCs))):
# if 'isData' in fileConfigMCs[i] and fileConfigMCs[i]['isData']:
# fileConfigData = fileConfigMCs.pop(i)
# DataMCStack(fileConfigData,fileConfigMCs,histConfigs,c,"cosmicanalyzer/tree",nMax=NMAX)
########################################################
## Single Hists -- All Samples #########################
########################################################
histConfigs = [
{
'name': "primTrkHitAmpsVIntsCollection",
'xtitle': "Collection Plane Hit Integrals [ADC us]",
'ytitle': "Collection Plane Hit Amplitudes [ADC]",
'binning': [50,0,5e3,50,0,150],
'var': "primTrkHitAmps*((0.62-1.)*isMC + 1.):primTrkHitIntegrals*((0.67-1.)*isMC + 1.)",
'cuts': weightStr+"*(primTrkHitIsCollections)",
'logz': True,
},
{
'name': "primTrkHitAmpsVIntsInduction",
'xtitle': "Induction Plane Hit Integrals [ADC us]",
'ytitle': "Induction Plane Hit Amplitudes [ADC]",
'binning': [50,0,5e3,50,0,150],
'var': "primTrkHitAmps*((0.47-1.)*isMC + 1.):primTrkHitIntegrals*((0.52-1.)*isMC + 1.)",
'cuts': weightStr+"*(!primTrkHitIsCollections)",
'logz': True,
},
{
'name': "primTrkHitAmpsVIntsCollection_phiLt0",
'xtitle': "Collection Plane Hit Integrals [ADC us]",
'ytitle': "Collection Plane Hit Amplitudes [ADC]",
'binning': [50,0,5e3,50,0,150],
'var': "primTrkHitAmps*((0.62-1.)*isMC + 1.):primTrkHitIntegrals*((0.67-1.)*isMC + 1.)",
'cuts': weightStr+"*(primTrkHitIsCollections)"+"*(primTrkStartPhi < 0)",
'captionleft1': "Track #phi < 0",
'logz': True,
},
{
'name': "primTrkHitAmpsVIntsInduction_phiLt0",
'xtitle': "Induction Plane Hit Integrals [ADC us]",
'ytitle': "Induction Plane Hit Amplitudes [ADC]",
'binning': [50,0,5e3,50,0,150],
'var': "primTrkHitAmps*((0.47-1.)*isMC + 1.):primTrkHitIntegrals*((0.52-1.)*isMC + 1.)",
'cuts': weightStr+"*(!primTrkHitIsCollections)"+"*(primTrkStartPhi < 0)",
'captionleft1': "Track #phi < 0",
'logz': True,
},
{
'name': "primTrkHitAmpsVIntsCollection_phiGeq0",
'xtitle': "Collection Plane Hit Integrals [ADC us]",
'ytitle': "Collection Plane Hit Amplitudes [ADC]",
'binning': [50,0,5e3,50,0,150],
'var': "primTrkHitAmps*((0.62-1.)*isMC + 1.):primTrkHitIntegrals*((0.67-1.)*isMC + 1.)",
'cuts': weightStr+"*(primTrkHitIsCollections)"+"*(primTrkStartPhi >= 0)",
'captionleft1': "Track #phi #geq 0",
'logz': True,
},
{
'name': "primTrkHitAmpsVIntsInduction_phiGeq0",
'xtitle': "Induction Plane Hit Integrals [ADC us]",
'ytitle': "Induction Plane Hit Amplitudes [ADC]",
'binning': [50,0,5e3,50,0,150],
'var': "primTrkHitAmps*((0.47-1.)*isMC + 1.):primTrkHitIntegrals*((0.52-1.)*isMC + 1.)",
'cuts': weightStr+"*(!primTrkHitIsCollections)"+"*(primTrkStartPhi >= 0)",
'captionleft1': "Track #phi #geq 0",
'logz': True,
},
{
'name': "primTrkdEdxs_zoom3",
'xtitle': "Primary TPC Track dE/dx [MeV/cm]",
'ytitle': "Hits / bin",
'binning': [100,0,5],
'var': "primTrkdEdxs",
'cuts': weightStr+hitExtraCuts,
'writeImage': False,
},
{
'name': "primTrkdEdxs_zoom3_phiGeq0",
'xtitle': "Primary TPC Track dE/dx [MeV/cm]",
'ytitle': "Hits / bin",
'binning': [100,0,5],
'var': "primTrkdEdxs",
'cuts': weightStr+hitExtraCuts+"*(primTrkStartPhi >= 0)",
'writeImage': False,
},
{
'name': "primTrkdEdxs_zoom3_phiLt0",
'xtitle': "Primary TPC Track dE/dx [MeV/cm]",
'ytitle': "Hits / bin",
'binning': [100,0,5],
'var': "primTrkdEdxs",
'cuts': weightStr+hitExtraCuts+"*(primTrkStartPhi < 0)",
'writeImage': False,
},
# {
# 'name': "primTrkdEdxVRange",
# 'xtitle': "Primary Track Hit Residual Range [cm]",
# 'ytitle': "Primary Track Hit dE/dx [MeV/cm]",
# 'binning': [100,0,100,100,0,50],
# 'var': "primTrkdEdxs:primTrkResRanges",
# 'cuts': weightStr+hitExtraCuts,
# #'normalize': True,
# #'logz': True,
# },
# {
# 'name': "primTrkdEdxVRangeFidCut",
# 'xtitle': "Primary Track Hit Residual Range [cm]",
# 'ytitle': "Primary Track Hit dE/dx [MeV/cm]",
# 'binning': [100,0,100,100,0,50],
# 'var': "primTrkdEdxs:primTrkResRanges",
# 'cuts': weightStr+hitExtraCuts,
# #'normalize': True,
# #'logz': True,
# },
# {
# 'name': "trackYFrontVtrackXFront",
# 'xtitle': "X of TPC Track Projection to TPC Front [cm]",
# 'ytitle': "Y of TPC Track Projection to TPC Front [cm]",
# 'binning': [40,0,40,40,-20,20],
# 'var': "trackYFront:trackXFront",
# 'cuts': weightStr,
# #'normalize': True,
# #'logz': True,
# },
# {
# 'name': "primTrkdEdxsVyFromCenter",
# 'xtitle': "Hit |y| [cm]",
# 'ytitle': "Primary TPC Track dE/dx [MeV/cm]",
# 'binning': [40,0,25,200,0,20],
# 'var': "primTrkdEdxs:fabs(primTrkYs)",
# 'cuts': weightStr+hitExtraCuts,
# #'normalize': True,
# 'logz': True,
# },
# {
# 'name': "primTrkdEdxsVzFromCenter",
# 'xtitle': "Hit |z-45| [cm]",
# 'ytitle': "Primary TPC Track dE/dx [MeV/cm]",
# 'binning': [40,0,50,200,0,20],
# 'var': "primTrkdEdxs:fabs(primTrkZs-45.)",
# 'cuts': weightStr+hitExtraCuts,
# #'normalize': True,
# 'logz': True,
# },
# {
# 'name': "hitYVhitX",
# 'xtitle': "Hit x [cm]",
# 'ytitle': "Hit y [cm]",
# 'binning': [60,-5,55,60,-30,30],
# 'var': "primTrkYs:primTrkXs",
# 'cuts': weightStr+hitExtraCuts,
# #'normalize': True,
# #'logz': True,
# },
# {
# 'name': "hitYVhitZ",
# 'xtitle': "Hit z [cm]",
# 'ytitle': "Hit y [cm]",
# 'binning': [120,-10,110,60,-30,30],
# 'var': "primTrkYs:primTrkZs",
# 'cuts': weightStr+hitExtraCuts,
# #'normalize': True,
# #'logz': True,
# },
# {
# 'name': "hitXVhitZ",
# 'xtitle': "Hit z [cm]",
# 'ytitle': "Hit x [cm]",
# 'binning': [120,-10,110,60,-5,55],
# 'var': "primTrkXs:primTrkZs",
# 'cuts': weightStr+hitExtraCuts,
# #'normalize': True,
# #'logz': True,
# },
# {
# 'name': "trueStartThetaVtrueStartPhi",
# 'xtitle': "True Start #phi [deg]",
# 'ytitle': "True Start #theta [deg]",
# 'binning': [90,-180,180,90,0,180],
# 'var': "trueStartTheta*180/pi:trueStartPhi*180/pi",
# 'cuts': weightStr,
# #'normalize': True,
# 'logz': False,
# },
# {
# 'name': "trueStartThetaYVtrueStartPhiZX",
# 'xtitle': "True Start #phi_{zx} [deg]",
# 'ytitle': "True Start #theta_{y} [deg]",
# 'binning': [90,-180,180,90,0,180],
# 'var': "acos(sin(trueStartTheta)*sin(trueStartPhi))*180/pi:atan2(sin(trueStartTheta)*cos(trueStartPhi),cos(trueStartTheta))*180./pi",
# 'cuts': weightStr,
# #'normalize': True,
# 'logz': False,
# },
# {
# 'name': "trueStartThetaXVtrueStartPhiZY",
# 'xtitle': "True Start #phi_{zy} [deg]",
# 'ytitle': "True Start #theta_{x} [deg]",
# 'binning': [90,-180,180,90,0,180],
# 'var': "acos(sin(trueStartTheta)*cos(trueStartPhi))*180/pi:atan2(sin(trueStartTheta)*sin(trueStartPhi),cos(trueStartTheta))*180./pi",
# 'cuts': weightStr,
# #'normalize': True,
# 'logz': False,
# },
]
hists = plotOneHistOnePlot(fileConfigs,histConfigs,c,"cosmicanalyzer/tree",nMax=NMAX,outPrefix="Cosmics_")
outfile = root.TFile("cosmics_hists.root","recreate")
outfile.cd()
for var in hists:
for ds in hists[var]:
newname = var+"_"+ds
hist = hists[var][ds]
hist.SetName(newname)
hist.Print()
hist.Write()
########################################################
## Single Hists -- Not Smear Samples ###################
########################################################
histConfigs = [
{
'name': "primTrkStartThetaVPhi",
'xtitle': "Primary TPC Track #phi [deg]",
'ytitle': "Primary TPC Track #theta [deg]",
'binning': [90,-180,180,90,0,180],
'var': "primTrkStartTheta*180/pi:primTrkStartPhi*180/pi",
'cuts': weightStr,
#'normalize': True,
#'logz': True,
},
{
'name': "primTrkStartThetaYVprimTrkStartPhiZX",
'xtitle': "Primary TPC Track #phi_{zx} [deg]",
'ytitle': "Primary TPC Track #theta_{y} [deg]",
'binning': [90,-180,180,90,0,180],
'var': "acos(sin(primTrkStartTheta)*sin(primTrkStartPhi))*180/pi:atan2(sin(primTrkStartTheta)*cos(primTrkStartPhi),cos(primTrkStartTheta))*180/pi",
'cuts': weightStr,
#'normalize': True,
'logz': False,
},
{
'name': "primTrkStartThetaYVprimTrkStartPhiZX_Zoom",
'xtitle': "Primary TPC Track #phi_{zx} [deg]",
'ytitle': "Primary TPC Track #theta_{y} [deg]",
'binning': [45,0,45,80,50,130],
'var': "acos(sin(primTrkStartTheta)*sin(primTrkStartPhi))*180/pi:atan2(sin(primTrkStartTheta)*cos(primTrkStartPhi),cos(primTrkStartTheta))*180/pi",
'cuts': weightStr,
#'normalize': True,
'logz': False,
},
{
'name': "primTrkStartThetaXVprimTrkStartPhiZY",
'xtitle': "Primary TPC Track #phi_{zy} [deg]",
'ytitle': "Primary TPC Track #theta_{x} [deg]",
'binning': [90,-180,180,90,0,180],
'var': "acos(sin(primTrkStartTheta)*cos(primTrkStartPhi))*180/pi:atan2(sin(primTrkStartTheta)*sin(primTrkStartPhi),cos(primTrkStartTheta))*180/pi",
'cuts': weightStr,
#'normalize': True,
'logz': False,
},
{
'name': "primTrkdEdxsVx",
'xtitle': "Hit x [cm]",
'ytitle': "Primary TPC Track dE/dx [MeV/cm]",
'binning': [20,0,50,200,0,20],
'var': "primTrkdEdxs:primTrkXs",
'cuts': weightStr+hitExtraCuts,
#'normalize': True,
'logz': True,
},
{
'name': "primTrkdEdxsVy",
'xtitle': "Hit y [cm]",
'ytitle': "Primary TPC Track dE/dx [MeV/cm]",
'binning': [20,-25,25,200,0,20],
'var': "primTrkdEdxs:primTrkYs",
'cuts': weightStr+hitExtraCuts,
#'normalize': True,
'logz': True,
},
{
'name': "primTrkdEdxsVz",
'xtitle': "Hit z [cm]",
'ytitle': "Primary TPC Track dE/dx [MeV/cm]",
'binning': [50,-5,95,200,0,20],
'var': "primTrkdEdxs:primTrkZs",
'cuts': weightStr+hitExtraCuts,
#'normalize': True,
'logz': True,
},
{
'name': "primTrkdEdxsVx_phiGeq0",
'xtitle': "Hit x [cm]",
'ytitle': "Primary TPC Track dE/dx [MeV/cm]",
'binning': [20,0,50,200,0,20],
'var': "primTrkdEdxs:primTrkXs",
'cuts': weightStr+hitExtraCuts+"*(primTrkStartPhi >= 0)",
#'normalize': True,
'logz': True,
},
{
'name': "primTrkdEdxsVy_phiGeq0",
'xtitle': "Hit y [cm]",
'ytitle': "Primary TPC Track dE/dx [MeV/cm]",
'binning': [20,-25,25,200,0,20],
'var': "primTrkdEdxs:primTrkYs",
'cuts': weightStr+hitExtraCuts+"*(primTrkStartPhi >= 0)",
#'normalize': True,
'logz': True,
},
{
'name': "primTrkdEdxsInductVy_phiGeq0",
'xtitle': "Hit y [cm]",
'ytitle': "Induction: Primary TPC Track dE/dx [MeV/cm]",
'binning': [20,-25,25,100,0,10],
'var': "primTrkdEdxsInduct:primTrkYsInduct",
'cuts': weightStr+hitExtraCutsInduct+"*(primTrkStartPhi >= 0)",
#'normalize': True,
'logz': True,
},
{
'name': "primTrkdEdxsVz_phiGeq0",
'xtitle': "Hit z [cm]",
'ytitle': "Primary TPC Track dE/dx [MeV/cm]",
'binning': [50,-5,95,200,0,20],
'var': "primTrkdEdxs:primTrkZs",
'cuts': weightStr+hitExtraCuts+"*(primTrkStartPhi >= 0)",
#'normalize': True,
'logz': True,
},
{
'name': "primTrkdEdxsVx_phiLt0",
'xtitle': "Hit x [cm]",
'ytitle': "Primary TPC Track dE/dx [MeV/cm]",
'binning': [20,0,50,200,0,20],
'var': "primTrkdEdxs:primTrkXs",
'captionright1': "Track #phi < 0",
'cuts': weightStr+hitExtraCuts+"*(primTrkStartPhi < 0)",
#'normalize': True,
'logz': True,
},
{
'name': "primTrkdEdxsInductVx_phiLt0",
'xtitle': "Hit x [cm]",
'ytitle': "Induct: Primary TPC Track dE/dx [MeV/cm]",
'binning': [20,0,50,100,0,10],
'var': "primTrkdEdxsInduct:primTrkXsInduct",
'captionright1': "Track #phi < 0",
'cuts': weightStr+hitExtraCutsInduct+"*(primTrkStartPhi < 0)",
#'normalize': True,
'logz': True,
},
{
'name': "primTrkdEdxsVy_phiLt0",
'xtitle': "Hit y [cm]",
'ytitle': "Primary TPC Track dE/dx [MeV/cm]",
'binning': [20,-25,25,200,0,20],
'var': "primTrkdEdxs:primTrkYs",
'cuts': weightStr+hitExtraCuts+"*(primTrkStartPhi < 0)",
'captionright1': "Track #phi < 0",
#'normalize': True,
'logz': True,
},
{
'name': "primTrkdEdxsInductVy_phiLt0",
'xtitle': "Hit y [cm]",
'ytitle': "Induct: Primary TPC Track dE/dx [MeV/cm]",
'binning': [20,-25,25,100,0,10],
'var': "primTrkdEdxsInduct:primTrkYsInduct",
'cuts': weightStr+hitExtraCutsInduct+"*(primTrkStartPhi < 0)",
'captionright1': "Track #phi < 0",
#'normalize': True,
'logz': True,
},
{
'name': "primTrkdEdxsVz_phiLt0",
'xtitle': "Hit z [cm]",
'ytitle': "Primary TPC Track dE/dx [MeV/cm]",
'binning': [50,-5,95,200,0,20],
'var': "primTrkdEdxs:primTrkZs",
'cuts': weightStr+hitExtraCuts+"*(primTrkStartPhi < 0)",
'captionright1': "Track #phi < 0",
#'normalize': True,
'logz': True,
},
]
hists = plotOneHistOnePlot([x for x in fileConfigs if not ("smear" in x["name"])],
histConfigs,c,"cosmicanalyzer/tree",nMax=NMAX,outPrefix="Cosmics_")
outfile.cd()
for var in hists:
for ds in hists[var]:
newname = var+"_"+ds
hist = hists[var][ds]
hist.SetName(newname)
hist.Print()
hist.Write()
########################################################
## Single Hists -- Data Only ###########################
########################################################
histConfigs = [
{
'name': "primTrkdQdxs",
'xtitle': "Primary TPC Track dQ/dx [ADC ns / cm]",
'ytitle': "Hits / bin",
'binning': [100,0,1e4],
'var': "primTrkdQdxs",
'cuts': weightStr+hitExtraCuts,
'writeImage': False,
},
{
'name': "primTrkdQdxs_phiGeq0",
'xtitle': "Primary TPC Track dQ/dx [ADC ns / cm]",
'ytitle': "Hits / bin",
'binning': [100,0,1e4],
'var': "primTrkdQdxs",
'cuts': weightStr+hitExtraCuts+"*(primTrkStartPhi >= 0)",
'writeImage': False,
},
{
'name': "primTrkdQdxs_phiLt0",
'xtitle': "Primary TPC Track dQ/dx [ADC ns / cm]",
'ytitle': "Hits / bin",
'binning': [100,0,1e4],
'var': "primTrkdQdxs",
'cuts': weightStr+hitExtraCuts+"*(primTrkStartPhi < 0)",
'writeImage': False,
},
{
'name': "primTrkdQs",
'xtitle': "Primary TPC Track dQ [ADC ns]",
'ytitle': "Hits / bin",
'binning': [200,0,8e3],
'var': "primTrkdQdxs*primTrkPitches",
'cuts': weightStr+hitExtraCuts,
'writeImage': False,
},
{
'name': "primTrkdQs_phiGeq0",
'xtitle': "Primary TPC Track dQ [ADC ns]",
'ytitle': "Hits / bin",
'binning': [200,0,8e3],
'var': "primTrkdQdxs*primTrkPitches",
'cuts': weightStr+hitExtraCuts+"*(primTrkStartPhi >= 0)",
'writeImage': False,
},
{
'name': "primTrkdQs_phiLt0",
'xtitle': "Primary TPC Track dQ [ADC ns]",
'ytitle': "Hits / bin",
'binning': [200,0,8e3],
'var': "primTrkdQdxs*primTrkPitches",
'cuts': weightStr+hitExtraCuts+"*(primTrkStartPhi < 0)",
'writeImage': False,
},
{
'name': "primTrkStartPhiVrun",
'xtitle': "Run Number",
'ytitle': "Primary TPC Track #phi [deg]",
'binning': [1400,8200,9600,45,-180,180],
'var': "primTrkStartPhi*180/pi:runNumber",
'cuts': weightStr,
#'normalize': True,
'logz': True,
},
{
'name': "primTrkdEdxsVrun",
'xtitle': "Run Number",
'ytitle': "Primary TPC Track dE/dx [MeV/cm]",
'binning': [1400,8200,9600,200,0,20],
'var': "primTrkdEdxs:runNumber",
'cuts': weightStr+hitExtraCuts,
#'normalize': True,
'logz': True,
},
{
'name': "primTrkdEdxsVrun_phiGeq0",
'xtitle': "Run Number",
'ytitle': "Primary TPC Track dE/dx [MeV/cm]",
'binning': [1400,8200,9600,200,0,20],
'var': "primTrkdEdxs:runNumber",
'cuts': weightStr+hitExtraCuts+"*(primTrkStartPhi >= 0)",
#'normalize': True,
'logz': True,
'writeImage': False,
},
{
'name': "primTrkdEdxsVrun_phiLt0",
'xtitle': "Run Number",
'ytitle': "Primary TPC Track dE/dx [MeV/cm]",
'binning': [1400,8200,9600,200,0,20],
'var': "primTrkdEdxs:runNumber",
'cuts': weightStr+hitExtraCuts+"*(primTrkStartPhi < 0)",
#'normalize': True,
'logz': True,
'writeImage': False,
},
{
'name': "primTrkdEdxsInductVrun_phiGeq0",
'xtitle': "Run Number",
'ytitle': "Induction: Primary TPC Track dE/dx [MeV/cm]",
'binning': [1400,8200,9600,100,0,10],
'var': "primTrkdEdxsInduct:runNumber",
'cuts': weightStr+hitExtraCutsInduct+"*(primTrkStartPhi >= 0)",
#'normalize': True,
'logz': True,
},
{
'name': "primTrkdEdxsInductVrun_phiLt0",
'xtitle': "Run Number",
'ytitle': "Induction: Primary TPC Track dE/dx [MeV/cm]",
'binning': [1400,8200,9600,100,0,10],
'var': "primTrkdEdxsInduct:runNumber",
'cuts': weightStr+hitExtraCutsInduct+"*(primTrkStartPhi < 0)",
#'normalize': True,
'logz': True,
},
{
'name': "primTrkdEdxVwire",
'xtitle': "Primary Track Hit Wire Number",
'ytitle': "Primary Track Hit dE/dx [MeV/cm]",
'binning': [240,0,240,100,0,10],
'var': "primTrkdEdxs:primTrkTrueWires",
'cuts': weightStr+hitExtraCuts,
#'normalize': True,
#'logz': True,
},
{
'name': "primTrkdEdxVwire_phiGeq0",
'xtitle': "Primary Track Hit Wire Number",
'ytitle': "Primary Track Hit dE/dx [MeV/cm]",
'binning': [240,0,240,100,0,10],
'var': "primTrkdEdxs:primTrkTrueWires",
'cuts': weightStr+hitExtraCuts+"*(primTrkStartPhi >= 0)",
#'normalize': True,
#'logz': True,
'writeImage': False,
},
{
'name': "primTrkdEdxVwire_phiLt0",
'xtitle': "Primary Track Hit Wire Number",
'ytitle': "Primary Track Hit dE/dx [MeV/cm]",
'binning': [240,0,240,100,0,10],
'var': "primTrkdEdxs:primTrkTrueWires",
'cuts': weightStr+hitExtraCuts+"*(primTrkStartPhi < 0)",
#'normalize': True,
#'logz': True,
'writeImage': False,
},
{
'name': "primTrkdEdxInductVwire_phiGeq0",
'xtitle': "Primary Track Hit Wire Number",
'ytitle': "Induction: Primary Track Hit dE/dx [MeV/cm]",
'binning': [240,0,240,100,0,10],
'var': "primTrkdEdxsInduct:primTrkTrueWiresInduct",
'cuts': weightStr+hitExtraCutsInduct+"*(primTrkStartPhi >= 0)",
#'normalize': True,
'logz': True,
},
{
'name': "primTrkdEdxInductVwire_phiLt0",
'xtitle': "Primary Track Hit Wire Number",
'ytitle': "Induction: Primary Track Hit dE/dx [MeV/cm]",
'binning': [240,0,240,100,0,10],
'var': "primTrkdEdxsInduct:primTrkTrueWiresInduct",
'cuts': weightStr+hitExtraCutsInduct+"*(primTrkStartPhi < 0)",
#'normalize': True,
'logz': True,
},
{
'name': "primTrkdQdxsVrun",
'xtitle': "Run Number",
'ytitle': "Primary TPC Track dQ/dx [ADC ns / cm]",
'binning': [1400,8200,9600,100,0,1e4],
'var': "primTrkdQdxs:runNumber",
'cuts': weightStr+hitExtraCuts,
#'normalize': True,
'logz': True,
'graphs': lifetimeGraphs,
},
{
'name': "primTrkdQdxsVrun_phiGeq0",
'xtitle': "Run Number",
'ytitle': "Primary TPC Track dQ/dx [ADC ns / cm]",
'binning': [1400,8200,9600,100,0,1e4],
'var': "primTrkdQdxs:runNumber",
'cuts': weightStr+hitExtraCuts+"*(primTrkStartPhi >= 0)",
#'normalize': True,
'logz': True,
'writeImage': False,
},
{
'name': "primTrkdQdxsVrun_phiLt0",
'xtitle': "Run Number",
'ytitle': "Primary TPC Track dQ/dx [ADC ns / cm]",
'binning': [1400,8200,9600,100,0,1e4],
'var': "primTrkdQdxs:runNumber",
'cuts': weightStr+hitExtraCuts+"*(primTrkStartPhi < 0)",
#'normalize': True,
'logz': True,
'writeImage': False,
},
{
'name': "primTrkdQdxsVrun_phiLt0_xLt10",
'xtitle': "Run Number",
'ytitle': "Primary TPC Track dQ/dx [ADC ns / cm]",
'binning': [1400,8200,9600,100,0,1e4],
'var': "primTrkdQdxs:runNumber",
'cuts': weightStr+hitExtraCuts+"*(primTrkStartPhi < 0)*(primTrkXs < 10)",
#'normalize': True,
'logz': True,
},
{
'name': "primTrkdQdxsVrun_phiLt0_xGeq30",
'xtitle': "Run Number",
'ytitle': "Primary TPC Track dQ/dx [ADC ns / cm]",
'binning': [1400,8200,9600,100,0,1e4],
'var': "primTrkdQdxs:runNumber",
'cuts': weightStr+hitExtraCuts+"*(primTrkStartPhi < 0)*(primTrkXs >= 30)",
#'normalize': True,
'logz': True,
},
{
'name': "primTrkdQdxVwire",
'xtitle': "Primary Track Hit Wire Number",
'ytitle': "Primary Track Hit dQ/dx [ADC ns / cm]",
'binning': [240,0,240,100,0,1e4],
'var': "primTrkdQdxs:primTrkTrueWires",
'cuts': weightStr+hitExtraCuts,
#'normalize': True,
#'logz': True,
},
{
'name': "primTrkdQdxVwire_phiGeq0",
'xtitle': "Primary Track Hit Wire Number",
'ytitle': "Primary Track Hit dQ/dx [ADC ns / cm]",
'binning': [240,0,240,100,0,1e4],
'var': "primTrkdQdxs:primTrkTrueWires",
'cuts': weightStr+hitExtraCuts+"*(primTrkStartPhi >= 0)",
#'normalize': True,
#'logz': True,
'writeImage': False,
},
{
'name': "primTrkdQdxVwire_phiLt0",
'xtitle': "Primary Track Hit Wire Number",
'ytitle': "Primary Track Hit dQ/dx [ADC ns / cm]",
'binning': [240,0,240,100,0,1e4],
'var': "primTrkdQdxs:primTrkTrueWires",
'cuts': weightStr+hitExtraCuts+"*(primTrkStartPhi < 0)",
#'normalize': True,
#'logz': True,
'writeImage': False,
},
# {
# 'name': "primTrkdEdxsVprimTrkStartCosTheta",
# 'xtitle': "Primary TPC Track cos(#theta)",
# 'ytitle': "Primary TPC Track dE/dx [MeV/cm]",
# 'binning': [50,0,1,10000,0,50],
# 'var': "primTrkdEdxs:cos(primTrkStartTheta)",
# 'cuts': weightStr+hitExtraCuts,
# #'normalize': True,
# 'logz': True,
# },
# {
# 'name': "primTrkdEdxsVprimTrkStartPhi",
# 'xtitle': "Primary TPC Track #phi",
# 'ytitle': "Primary TPC Track dE/dx [MeV/cm]",
# 'binning': [30,-180,180,10000,0,50],
# 'var': "primTrkdEdxs:primTrkStartPhi*180/pi",
# 'cuts': weightStr+hitExtraCuts,
# #'normalize': True,
# 'logz': True,
# },
# {
# 'name': "primTrkdEdxsVprimTrkStartCosThetaX",
# 'xtitle': "Primary TPC Track cos(#theta_{x})",
# 'ytitle': "Primary TPC Track dE/dx [MeV/cm]",
# 'binning': [50,0,1,10000,0,50],
# 'var': "primTrkdEdxs:sin(primTrkStartTheta)*cos(primTrkStartPhi)",
# 'cuts': weightStr+hitExtraCuts,
# #'normalize': True,
# 'logz': False,
# },
# {
# 'name': "primTrkdEdxsVprimTrkStartCosThetaX_zoom",
# 'xtitle': "Primary TPC Track cos(#theta_{x})",
# 'ytitle': "Primary TPC Track dE/dx [MeV/cm]",
# 'binning': [50,0,1,100,0,5],
# 'var': "primTrkdEdxs:sin(primTrkStartTheta)*cos(primTrkStartPhi)",
# 'cuts': weightStr+hitExtraCuts,
# #'normalize': True,
# 'logz': False,
# },
# {
# 'name': "primTrkdEdxsVprimTrkStartPhiZY",
# 'xtitle': "Primary TPC Track #phi_{zy}",
# 'ytitle': "Primary TPC Track dE/dx [MeV/cm]",
# 'binning': [60,-180,180,10000,0,50],
# 'var': "primTrkdEdxs:atan2(sin(primTrkStartTheta)*sin(primTrkStartPhi),cos(primTrkStartTheta))*180/pi",
# 'cuts': weightStr+hitExtraCuts,
# #'normalize': True,
# 'logz': False,
# },
# {
# 'name': "primTrkdEdxsVprimTrkStartPhiZY_zoom",
# 'xtitle': "Primary TPC Track #phi_{zy}",
# 'ytitle': "Primary TPC Track dE/dx [MeV/cm]",
# 'binning': [60,-180,180,100,0,5],
# 'var': "primTrkdEdxs:atan2(sin(primTrkStartTheta)*sin(primTrkStartPhi),cos(primTrkStartTheta))*180/pi",
# 'cuts': weightStr+hitExtraCuts,
# #'normalize': True,
# 'logz': False,
# },
# {
# 'name': "primTrkdEdxsVprimTrkStartPhiZY_zoom_logy",
# 'xtitle': "Primary TPC Track #phi_{zy}",
# 'ytitle': "Primary TPC Track dE/dx [MeV/cm]",
# 'binning': [60,-180,180,100,0,5],
# 'var': "primTrkdEdxs:atan2(sin(primTrkStartTheta)*sin(primTrkStartPhi),cos(primTrkStartTheta))*180/pi",
# 'cuts': weightStr+hitExtraCuts,
# #'normalize': True,
# 'logz': True,
# },
# {
# 'name': "primTrkdEdxsVprimTrkStartCosThetaY",
# 'xtitle': "Primary TPC Track cos(#theta_{y})",
# 'ytitle': "Primary TPC Track dE/dx [MeV/cm]",
# 'binning': [50,0,1,10000,0,50],
# 'var': "primTrkdEdxs:sin(primTrkStartTheta)*sin(primTrkStartPhi)",
# 'cuts': weightStr+hitExtraCuts,
# #'normalize': True,
# 'logz': True,
# },
# {
# 'name': "primTrkdEdxsVprimTrkStartPhiZX",
# 'xtitle': "Primary TPC Track #phi_{zx}",
# 'ytitle': "Primary TPC Track dE/dx [MeV/cm]",
# 'binning': [30,-180,180,10000,0,50],
# 'var': "primTrkdEdxs:atan2(sin(primTrkStartTheta)*cos(primTrkStartPhi),cos(primTrkStartTheta))*180/pi",
# 'cuts': weightStr+hitExtraCuts,
# #'normalize': True,
# 'logz': True,
# },
{
'name': "primTrkdEdxsVHitWireAndHitY_phiLt0",
'xtitle': "Hit Wire Number",
'ytitle': "Hit y position [cm]",
'ztitle': "dE/dx [MeV/cm]",
'binning': [240,0,240,10,-25,25,50,0,5],
'var': "primTrkdEdxs:primTrkYs:primTrkTrueWires",
'cuts': weightStr+hitExtraCuts+"*(primTrkStartPhi < 0)",
'writeImage': False,
},
{
'name': "primTrkdEdxsVHitWireAndHitY_phiGeq0",
'xtitle': "Hit Wire Number",
'ytitle': "Hit y position [cm]",
'ztitle': "dE/dx [MeV/cm]",
'binning': [240,0,240,10,-25,25,50,0,5],
'var': "primTrkdEdxs:primTrkYs:primTrkTrueWires",
'cuts': weightStr+hitExtraCuts+"*(primTrkStartPhi >= 0)",
'writeImage': False,
},
{
'name': "primTrkdQdxsVHitWireAndHitY_phiLt0",
'xtitle': "Hit Wire Number",
'ytitle': "Hit y position [cm]",
'ztitle': "dE/dx dQ/dx [ADC ns / cm]",
'binning': [240,0,240,10,-25,25,50,0,1e4],
'var': "primTrkdQdxs:primTrkYs:primTrkTrueWires",
'cuts': weightStr+hitExtraCuts+"*(primTrkStartPhi < 0)",
'writeImage': False,
},
{
'name': "primTrkdQdxsVHitWireAndHitY_phiGeq0",
'xtitle': "Hit Wire Number",
'ytitle': "Hit y position [cm]",
'ztitle': "dE/dx dQ/dx [ADC ns / cm]",
'binning': [240,0,240,10,-25,25,50,0,1e4],
'var': "primTrkdQdxs:primTrkYs:primTrkTrueWires",
'cuts': weightStr+hitExtraCuts+"*(primTrkStartPhi >= 0)",
'writeImage': False,
},
{
'name': "primTrkdQdxsVrunAndHitX_phiLt0",
'xtitle': "Run Number",
'ytitle': "Hit y position [cm]",
'ztitle': "dE/dx dQ/dx [ADC ns / cm]",
'binning': [20,8200,9600,20,-5,55,50,0,1e4],
'var': "primTrkdQdxs:primTrkXs:primTrkTrueWires",
'cuts': weightStr+hitExtraCuts+"*(primTrkStartPhi < 0)",
'writeImage': False,
},
{
'name': "primTrkdEdxsVHitZAndHitY_phiLt0",
'xtitle': "Hit z position [cm]",
'ytitle': "Hit y position [cm]",
'ztitle': "dE/dx [MeV/cm]",
'binning': [60,3,87,60,-18,18,50,0,5],
'var': "primTrkdEdxs:primTrkYs:primTrkZs",
'cuts': weightStr+hitExtraCuts+"*(primTrkStartPhi < 0)",
'writeImage': False,
},
{
'name': "primTrkdEdxsVHitZAndHitY_phiGeq0",
'xtitle': "Hit z position [cm]",
'ytitle': "Hit y position [cm]",
'ztitle': "dE/dx [MeV/cm]",
'binning': [60,3,87,60,-18,18,50,0,5],
'var': "primTrkdEdxs:primTrkYs:primTrkZs",
'cuts': weightStr+hitExtraCuts+"*(primTrkStartPhi >= 0)",
'writeImage': False,
},
]
hists = plotOneHistOnePlot([x for x in fileConfigs if x["isData"]],
histConfigs,c,"cosmicanalyzer/tree",nMax=NMAX,outPrefix="Cosmics_")
outfile.cd()
for var in hists:
for ds in hists[var]:
newname = var+"_"+ds
hist = hists[var][ds]
hist.SetName(newname)
hist.Print()
hist.Write()
outfile.Close()
######################################################################################
## Compare Cuts -- True Paddle Hits ##################################################
######################################################################################
# histConfigs = [
# {
# 'title': "All",
# 'cuts': "( iBestMatch >= 0)",
# },
# {
# 'title': "Hit Cosmic 1",
# 'cuts': "( iBestMatch >= 0)*trueHitCosmic1",
# },
# {
# 'title': "Hit Cosmic 2",
# 'cuts': "( iBestMatch >= 0)*trueHitCosmic2",
# },
# {
# 'title': "Hit Cosmic 3",
# 'cuts': "( iBestMatch >= 0)*trueHitCosmic3",
# },
# {
# 'title': "Hit Cosmic 4",
# 'cuts': "( iBestMatch >= 0)*trueHitCosmic4",
# },
# {
# 'title': "Hit Cosmic 1 & 2",
# 'cuts': "( iBestMatch >= 0)*trueHitCosmic1*trueHitCosmic2",
# },
# {
# 'title': "Hit Cosmic 3 & 4",
# 'cuts': "( iBestMatch >= 0)*trueHitCosmic3*trueHitCosmic4",
# },
# ]
# for i in range(len(histConfigs)):
# histConfigs[i]["color"] = COLORLIST[i]
#
# for i in range(len(histConfigs)):
# histConfigs[i].update(
# {
# 'xtitle': "Muon True Initial Energy [GeV]",
# 'ytitle': "Events / bin",
# 'binning': [100,0,300],
# 'var': "1e-3*trueStartE",
# #'normalize': True,
# 'logy': logy,
# },
# )
# plotManyHistsOnePlot([x for x in fileConfigs if not x["isData"]],histConfigs,
# c,"cosmicanalyzer/tree",nMax=NMAX,outPrefix="Cosmics_paddles_trueStartE")
#
# for i in range(len(histConfigs)):
# histConfigs[i].update(
# {
# 'xtitle': "Muon True Initial Energy [MeV]",
# 'ytitle': "Events / bin",
# 'binning': [10,0,2000],
# 'var': "trueStartE",
# 'normalize': False,
# 'logy': False,
# },
# )
# plotManyHistsOnePlot([x for x in fileConfigs if not x["isData"]],histConfigs[1:],
# c,"cosmicanalyzer/tree",nMax=NMAX,outPrefix="Cosmics_paddles_trueStartE_zoom2")
#
# for i in range(len(histConfigs)):
# histConfigs[i].update(
# {
# 'xtitle': "Muon True Initial Energy [GeV]",
# 'ytitle': "Events / bin",
# 'binning': [40,0,10],
# 'var': "1e-3*trueStartE",
# 'normalize': False,
# 'logy': False,
# },
# )
# plotManyHistsOnePlot([x for x in fileConfigs if not x["isData"]],histConfigs[1:],
# c,"cosmicanalyzer/tree",nMax=NMAX,outPrefix="Cosmics_paddles_trueStartE_zoom")
#
# for i in range(len(histConfigs)):
# histConfigs[i].update(
# {
# 'xtitle': "Muon True Initial Energy [MeV]",
# 'ytitle': "Events / bin",
# 'binning': [40,0,10],
# 'var': "1e-3*trueStartE",
# 'normalize': False,
# 'logy': True,
# },
# )
# plotManyHistsOnePlot([x for x in fileConfigs if not x["isData"]],histConfigs,
# c,"cosmicanalyzer/tree",nMax=NMAX,outPrefix="Cosmics_paddles_trueStartE_zoom_logy")
#
# for i in range(len(histConfigs)):
# histConfigs[i].update(
# {
# 'xtitle': "Muon True Initial Energy [MeV]",
# 'ytitle': "Normalized events / bin",
# 'binning': [40,0,10],
# 'var': "1e-3*trueStartE",
# 'normalize': True,
# 'logy': False,
# },
# )
# plotManyHistsOnePlot([x for x in fileConfigs if not x["isData"]],histConfigs,
# c,"cosmicanalyzer/tree",nMax=NMAX,outPrefix="Cosmics_paddles_trueStartE_zoom_norm")
#
# for i in range(len(histConfigs)):
# histConfigs[i].update(
# {
# 'xtitle': "Primary TPC Track #phi_{zx} [deg]",
# 'ytitle': "Events / bin",
# 'binning': [90,-180,180],
# 'var': "atan2(sin(primTrkStartTheta)*cos(primTrkStartPhi),cos(primTrkStartTheta))*180./pi",
# #'normalize': True,
# 'logy': logy,
# },
# )
# plotManyHistsOnePlot([x for x in fileConfigs if not x["isData"]],histConfigs,
# c,"cosmicanalyzer/tree",nMax=NMAX,outPrefix="Cosmics_paddles_primTrkStartPhiZX")
#
# for i in range(len(histConfigs)):
# histConfigs[i].update(
# {
# 'xtitle': "Primary TPC Track #theta_{y} [deg]",
# 'ytitle': "Events / bin",
# 'binning': [90,0,180],
# 'var': "acos(sin(primTrkStartTheta)*sin(primTrkStartPhi))*180./pi",
# #'normalize': True,
# 'logy': logy,
# },
# )
# plotManyHistsOnePlot([x for x in fileConfigs if not x["isData"]],histConfigs,
# c,"cosmicanalyzer/tree",nMax=NMAX,outPrefix="Cosmics_paddles_primTrkStartThetaY")
#
# for i in range(len(histConfigs)):
# histConfigs[i].update(
# {
# 'xtitle': "Primary TPC Track dE/dx [MeV/cm]",
# 'ytitle': "Events / bin",
# 'binning': [100,0,10],
# 'var': "primTrkdEdxs",
# 'normalize': False,
# 'logy': True,
# },
# )
# plotManyHistsOnePlot([x for x in fileConfigs if not x["isData"]],histConfigs,
# c,"cosmicanalyzer/tree",nMax=NMAX,outPrefix="Cosmics_paddles_primTrkdEdxs_zoom")
#
# for i in range(len(histConfigs)):
# histConfigs[i].update(
# {
# 'xtitle': "Primary TPC Track dE/dx [MeV/cm]",
# 'ytitle': "Events / bin",
# 'binning': [50,0,5],
# 'var': "primTrkdEdxs",
# 'normalize': True,
# 'logy': False,
# },
# )
# plotManyHistsOnePlot([x for x in fileConfigs if not x["isData"]],histConfigs,
# c,"cosmicanalyzer/tree",nMax=NMAX,outPrefix="Cosmics_paddles_primTrkdEdxs_zoom3")
######################################################################################
## Compare Cuts -- Trigger Bits ######################################################
######################################################################################
# histConfigs = [
# {
# 'title': "All",
# 'cuts': "( iBestMatch >= 0)",
# },
# #{
# # 'title': "!Trigger Bit 12",
# # 'cuts': "( iBestMatch >= 0) && (!((triggerBits >> 12) & 1))",
# #},
# #{
# # 'title': "!Trigger Bit 13",
# # 'cuts': "( iBestMatch >= 0) && (!((triggerBits >> 13) & 1))",
# #},
# #{
# # 'title': "!Trigger Bit 14",
# # 'cuts': "( iBestMatch >= 0) && (!((triggerBits >> 14) & 1))",
# #},
# #{
# # 'title': "!Trigger Bit 4",
# # 'cuts': "( iBestMatch >= 0) && (!((triggerBits >> 4) & 1))",
# #},
# #{
# # 'title': "!Trigger Bit 9",
# # 'cuts': "( iBestMatch >= 0) && (!((triggerBits >> 9) & 1))",
# #},
# #{
# # 'title': "!Trigger Bit 10",
# # 'cuts': "( iBestMatch >= 0) && (!((triggerBits >> 10) & 1))",
# #},
# {
# 'title': "Trigger Bit 10",
# 'cuts': "( iBestMatch >= 0) && ( ((triggerBits >> 10) & 1))",
# },
# ]
# for i in range(len(histConfigs)):
# histConfigs[i]["color"] = COLORLIST[i]
#
# for i in range(len(histConfigs)):
# histConfigs[i].update(
# {
# 'xtitle': "Primary TPC Track #phi [deg]",
# 'ytitle': "Events / bin",
# 'binning': [60,-180,180],
# 'var': "primTrkStartPhi*180./pi",
# 'logy': logy,
# },
# )
# plotManyHistsOnePlot([x for x in fileConfigs if x["isData"]],histConfigs,
# c,"cosmicanalyzer/tree",nMax=NMAX,outPrefix="Cosmics_triggers_primTrkStartPhi")
######################################################################################
## Compare Cuts -- Position ##########################################################
######################################################################################
histConfigs = [
{
'title': "All",
'cuts': "( iBestMatch >= 0) && (nTracks == 1)",
},
{
'title': " 40 cm < x < 45 cm",
'cuts': "( iBestMatch >= 0) && (nTracks == 1) && (primTrkXs > 40 && primTrkXs < 45)",
},
{
'title': "x > 45 cm",
'cuts': "( iBestMatch >= 0) && (nTracks == 1) && (primTrkXs > 45)",
},
]
for i in range(len(histConfigs)):
histConfigs[i]["color"] = COLORLIST[i]
for i in range(len(histConfigs)):
histConfigs[i].update(
{
'xtitle': "Primary TPC Track dE/dx [MeV/cm]",
'ytitle': "Events / bin",
'binning': [50,0,5],
'var': "primTrkdEdxs",
'normalize': True,
'logy': False,
},
)
# plotManyHistsOnePlot([x for x in fileConfigs if x["isData"]],histConfigs,
# c,"cosmicanalyzer/tree",nMax=NMAX,outPrefix="Cosmics_regions_primTrkdEdxs_zoom3")
######################################################################################
## Compare Cuts -- Phi >= or < 0 #####################################################
######################################################################################
histConfigs = [
{
'title': "Track #phi #geq 0",
'cuts': weightStr+hitExtraCuts+" * (primTrkStartPhi >= 0)",
},
{
'title': "Track #phi < 0",
'cuts': weightStr+hitExtraCuts+" * (primTrkStartPhi < 0)",
},
]
for i in range(len(histConfigs)):
histConfigs[i]["color"] = COLORLIST[i]
for i in range(len(histConfigs)):
histConfigs[i].update(
{
'xtitle': "Primary TPC Track Length [cm]",
'ytitle': "Events / bin",
'binning': [40,0,120],
'var': "primTrkLength",
'normalize': True,
'logy': False,
},
)
plotManyHistsOnePlot([x for x in fileConfigs if not ("smear" in x["name"])],histConfigs,
c,"cosmicanalyzer/tree",nMax=NMAX,outPrefix="Cosmics_trackPhi_primTrkLength")
for i in range(len(histConfigs)):
histConfigs[i].update(
{
'xtitle': "Primary TPC Track dE/dx [MeV/cm]",
'ytitle': "Events / bin",
'binning': [50,0,5],
'var': "primTrkdEdxs",
'normalize': True,
'logy': False,
},
)
plotManyHistsOnePlot([x for x in fileConfigs if not ("smear" in x["name"])],histConfigs,
c,"cosmicanalyzer/tree",nMax=NMAX,outPrefix="Cosmics_trackPhi_primTrkdEdxs_zoom3")
for i in range(len(histConfigs)):
histConfigs[i].update(
{
'xtitle': "Primary TPC Track dQ/dx [ADC ns / cm]",
'ytitle': "Events / bin",
'binning': [100,0,10e3],
'var': "primTrkdQdxs*((0.5-1.)*isMC + 1.)",
'normalize': True,
'logy': False,
},
)
plotManyHistsOnePlot([x for x in fileConfigs if not ("smear" in x["name"])],histConfigs,
c,"cosmicanalyzer/tree",nMax=NMAX,outPrefix="Cosmics_trackPhi_primTrkdQdxs_zoom")
for i in range(len(histConfigs)):
histConfigs[i].update(
{
'xtitle': "Primary TPC Track dQ [ADC ns]",
'ytitle': "Events / bin",
'binning': [200,0,8e3],
'var': "primTrkdQdxs*((0.5-1.)*isMC + 1.)*primTrkPitches",
'normalize': True,
'logy': False,
},
)
plotManyHistsOnePlot([x for x in fileConfigs if not ("smear" in x["name"])],histConfigs,
c,"cosmicanalyzer/tree",nMax=NMAX,outPrefix="Cosmics_trackPhi_primTrkdQs_zoom")
for i in range(len(histConfigs)):
histConfigs[i].update(
{
'xtitle': "Primary TPC Track Pitch [cm]",
'ytitle': "Hits / bin",
'binning': [200,0,2],
'var': "primTrkPitches",
'normalize': False,
'logy': True,
},
)
plotManyHistsOnePlot([x for x in fileConfigs if not ("smear" in x["name"])],histConfigs,
c,"cosmicanalyzer/tree",nMax=NMAX,outPrefix="Cosmics_trackPhi_primTrkPitches_logy")
for i in range(len(histConfigs)):
histConfigs[i].update(
{
'xtitle': "Primary TPC Track Pitch [cm]",
'ytitle': "Normalized hits / bin",
'binning': [200,0,2],
'var': "primTrkPitches",
'normalize': True,
'logy': False,
},
)
plotManyHistsOnePlot([x for x in fileConfigs if not ("smear" in x["name"])],histConfigs,
c,"cosmicanalyzer/tree",nMax=NMAX,outPrefix="Cosmics_trackPhi_primTrkPitches")
histConfigs = [
{
'title': "Track #phi #geq 0",
'cuts': weightStr+hitExtraCutsInduct+" * (primTrkStartPhi >= 0)",
},
{
'title': "Track #phi < 0",
'cuts': weightStr+hitExtraCutsInduct+" * (primTrkStartPhi < 0)",
},
]
for i in range(len(histConfigs)):
histConfigs[i]["color"] = COLORLIST[i]
for i in range(len(histConfigs)):
histConfigs[i].update(
{
'xtitle': "Induction: Primary TPC Track dE/dx [MeV/cm]",
'ytitle': "Hits / bin",
'binning': [50,0,4],
'var': "primTrkdEdxsInduct*((2.65-1.)*isMC + 1.)",
'normalize': True,
'logy': False,
}
)
plotManyHistsOnePlot([x for x in fileConfigs if not ("smear" in x["name"])],histConfigs,
c,"cosmicanalyzer/tree",nMax=NMAX,outPrefix="Cosmics_trackPhi_primTrkdEdxsInduct_zoom3")
for i in range(len(histConfigs)):
histConfigs[i].update(
{
'xtitle': "Induction: Primary TPC Track dQ/dx [ADC/cm]",
'ytitle': "Events / bin",
'binning': [100,0,2e3],
'var': "primTrkdQdxsInduct*((0.70-1.)*isMC + 1.)",
'normalize': True,
'logy': False,
}
)
plotManyHistsOnePlot([x for x in fileConfigs if not ("smear" in x["name"])],histConfigs,
c,"cosmicanalyzer/tree",nMax=NMAX,outPrefix="Cosmics_trackPhi_primTrkdQdxsInduct_zoom3")
######################################################################################
## Compare Cuts -- Phi >= or < 0 && other angle cuts #################################
######################################################################################
# histConfigs = [
# {
# 'title': "Track #phi #geq 0",
# 'cuts': "( iBestMatch >= 0) * (primTrkStartPhi >= 0)",
# },
# {
# 'title': "Track #phi <0",
# 'cuts': "( iBestMatch >= 0) * (primTrkStartPhi < 0)",
# },
# {
# 'title': "Track #phi #geq 0 & Angle Cuts",
# 'cuts': "( iBestMatch >= 0) * (primTrkStartPhi >= 0)*((primTrkStartTheta > 27*pi/180.) && (primTrkStartTheta < 42*pi/180.))*(primTrkStartPhi > -57*pi/180. && primTrkStartPhi < 60*pi/180.)*(primTrkStartPhi < -15*pi/180. || primTrkStartPhi > 22*pi/180.)",
# },
# {
# 'title': "Track #phi <0 & Angle Cuts",
# 'cuts': "( iBestMatch >= 0) * (primTrkStartPhi < 0)*((primTrkStartTheta > 27*pi/180.) && (primTrkStartTheta < 42*pi/180.))*(primTrkStartPhi > -57*pi/180. && primTrkStartPhi < 60*pi/180.)*(primTrkStartPhi < -15*pi/180. || primTrkStartPhi > 22*pi/180.)",
# },
# ]
# for i in range(len(histConfigs)):
# histConfigs[i]["color"] = COLORLIST[i]
#
# for i in range(len(histConfigs)):
# histConfigs[i].update(
# {
# 'xtitle': "Primary TPC Track dE/dx [MeV/cm]",
# 'ytitle': "Hits / bin",
# 'binning': [50,0,5],
# 'var': "primTrkdEdxs",
# 'normalize': True,
# 'logy': False,
# },
# )
# plotManyHistsOnePlot([x for x in fileConfigs if not ("smear" in x["name"])],histConfigs,
# c,"cosmicanalyzer/tree",nMax=NMAX,outPrefix="Cosmics_trackPhiCuts_primTrkdEdxs_zoom3")
#
# for i in range(len(histConfigs)):
# histConfigs[i].update(
# {
# 'xtitle': "Primary TPC Track Pitch [cm]",
# 'ytitle': "Hits / bin",
# 'binning': [200,0,2],
# 'var': "primTrkPitches",
# 'normalize': False,
# 'logy': True,
# },
# )
# plotManyHistsOnePlot([x for x in fileConfigs if not ("smear" in x["name"])],histConfigs,
# c,"cosmicanalyzer/tree",nMax=NMAX,outPrefix="Cosmics_trackPhiCuts_primTrkPitches")
######################################################################################
## Hit Locations -- Phi >= or < 0 && other angle cuts #################################
######################################################################################
histConfigs = [
{
'name': "hitYVhitX_phiGeq0",
'xtitle': "Hit x [cm]",
'ytitle': "Hit y [cm]",
'binning': [60,-5,55,60,-30,30],
'var': "primTrkYs:primTrkXs",
'captionright1': "Track #phi #geq 0 & Angle Cuts",
'cuts': "( iBestMatch >= 0) && (nTracks == 1) * (primTrkStartPhi >= 0)*((primTrkStartTheta > 27*pi/180.) && (primTrkStartTheta < 42*pi/180.))*(primTrkStartPhi > -57*pi/180. && primTrkStartPhi < 60*pi/180.)*(primTrkStartPhi < -15*pi/180. || primTrkStartPhi > 22*pi/180.)",
#'normalize': True,
#'logz': True,
},
{
'name': "hitYVhitZ_phiGeq0",
'xtitle': "Hit z [cm]",
'ytitle': "Hit y [cm]",
'binning': [120,-10,110,60,-30,30],
'var': "primTrkYs:primTrkZs",
'captionright1': "Track #phi #geq 0 & Angle Cuts",
'cuts': "( iBestMatch >= 0) && (nTracks == 1) * (primTrkStartPhi >= 0)*((primTrkStartTheta > 27*pi/180.) && (primTrkStartTheta < 42*pi/180.))*(primTrkStartPhi > -57*pi/180. && primTrkStartPhi < 60*pi/180.)*(primTrkStartPhi < -15*pi/180. || primTrkStartPhi > 22*pi/180.)",
#'normalize': True,
#'logz': True,
},
{
'name': "hitXVhitZ_phiGeq0",
'xtitle': "Hit z [cm]",
'ytitle': "Hit x [cm]",
'binning': [120,-10,110,60,-5,55],
'var': "primTrkXs:primTrkZs",
'captionright1': "Track #phi #geq 0 & Angle Cuts",
'cuts': "( iBestMatch >= 0) && (nTracks == 1) * (primTrkStartPhi >= 0)*((primTrkStartTheta > 27*pi/180.) && (primTrkStartTheta < 42*pi/180.))*(primTrkStartPhi > -57*pi/180. && primTrkStartPhi < 60*pi/180.)*(primTrkStartPhi < -15*pi/180. || primTrkStartPhi > 22*pi/180.)",
#'normalize': True,
#'logz': True,
},
{
'name': "hitYVhitX_phiLt0",
'xtitle': "Hit x [cm]",
'ytitle': "Hit y [cm]",
'binning': [60,-5,55,60,-30,30],
'var': "primTrkYs:primTrkXs",
'captionright1': "Track #phi < 0 & Angle Cuts",
'cuts': "( iBestMatch >= 0) && (nTracks == 1) * (primTrkStartPhi < 0)*((primTrkStartTheta > 27*pi/180.) && (primTrkStartTheta < 42*pi/180.))*(primTrkStartPhi > -57*pi/180. && primTrkStartPhi < 60*pi/180.)*(primTrkStartPhi < -15*pi/180. || primTrkStartPhi > 22*pi/180.)",
#'normalize': True,
#'logz': True,
},
{
'name': "hitYVhitZ_phiLt0",
'xtitle': "Hit z [cm]",
'ytitle': "Hit y [cm]",
'binning': [120,-10,110,60,-30,30],
'var': "primTrkYs:primTrkZs",
'captionright1': "Track #phi < 0 & Angle Cuts",
'cuts': "( iBestMatch >= 0) && (nTracks == 1) * (primTrkStartPhi < 0)*((primTrkStartTheta > 27*pi/180.) && (primTrkStartTheta < 42*pi/180.))*(primTrkStartPhi > -57*pi/180. && primTrkStartPhi < 60*pi/180.)*(primTrkStartPhi < -15*pi/180. || primTrkStartPhi > 22*pi/180.)",
#'normalize': True,
#'logz': True,
},
{
'name': "hitXVhitZ_phiLt0",
'xtitle': "Hit z [cm]",
'ytitle': "Hit x [cm]",
'binning': [120,-10,110,60,-5,55],
'var': "primTrkXs:primTrkZs",
'captionright1': "Track #phi < 0 & Angle Cuts",
'cuts': "( iBestMatch >= 0) && (nTracks == 1) * (primTrkStartPhi < 0)*((primTrkStartTheta > 27*pi/180.) && (primTrkStartTheta < 42*pi/180.))*(primTrkStartPhi > -57*pi/180. && primTrkStartPhi < 60*pi/180.)*(primTrkStartPhi < -15*pi/180. || primTrkStartPhi > 22*pi/180.)",
#'normalize': True,
#'logz': True,
},
{
'name': "hitXVwire_phiLt0",
'xtitle': "Wire Number",
'ytitle': "Hit x [cm]",
'binning': [240,0,240,60,-5,55],
'var': "primTrkXs:primTrkTrueWires",
'captionright1': "Track #phi < 0 & Angle Cuts",
'cuts': "( iBestMatch >= 0) && (nTracks == 1) * (primTrkStartPhi < 0)*((primTrkStartTheta > 27*pi/180.) && (primTrkStartTheta < 42*pi/180.))*(primTrkStartPhi > -57*pi/180. && primTrkStartPhi < 60*pi/180.)*(primTrkStartPhi < -15*pi/180. || primTrkStartPhi > 22*pi/180.)",
#'normalize': True,
#'logz': True,
},
{
'name': "hitXVwire_phiGeq0",
'xtitle': "Wire Number",
'ytitle': "Hit x [cm]",
'binning': [240,0,240,60,-5,55],
'var': "primTrkXs:primTrkTrueWires",
'captionright1': "Track #phi #geq 0 & Angle Cuts",
'cuts': "( iBestMatch >= 0) && (nTracks == 1) * (primTrkStartPhi >= 0)*((primTrkStartTheta > 27*pi/180.) && (primTrkStartTheta < 42*pi/180.))*(primTrkStartPhi > -57*pi/180. && primTrkStartPhi < 60*pi/180.)*(primTrkStartPhi < -15*pi/180. || primTrkStartPhi > 22*pi/180.)",
#'normalize': True,
#'logz': True,
},
{
'name': "hitYVwire_phiLt0",
'xtitle': "Wire Number",
'ytitle': "Hit y [cm]",
'binning': [240,0,240,60,-30,30],
'var': "primTrkYs:primTrkTrueWires",
'captionright1': "Track #phi < 0 & Angle Cuts",
'cuts': "( iBestMatch >= 0) && (nTracks == 1) * (primTrkStartPhi < 0)*((primTrkStartTheta > 27*pi/180.) && (primTrkStartTheta < 42*pi/180.))*(primTrkStartPhi > -57*pi/180. && primTrkStartPhi < 60*pi/180.)*(primTrkStartPhi < -15*pi/180. || primTrkStartPhi > 22*pi/180.)",
#'normalize': True,
#'logz': True,
},
{
'name': "hitYVwire_phiGeq0",
'xtitle': "Wire Number",
'ytitle': "Hit y [cm]",
'binning': [240,0,240,60,-30,30],
'var': "primTrkYs:primTrkTrueWires",
'captionright1': "Track #phi #geq 0 & Angle Cuts",
'cuts': "( iBestMatch >= 0) && (nTracks == 1) * (primTrkStartPhi >= 0)*((primTrkStartTheta > 27*pi/180.) && (primTrkStartTheta < 42*pi/180.))*(primTrkStartPhi > -57*pi/180. && primTrkStartPhi < 60*pi/180.)*(primTrkStartPhi < -15*pi/180. || primTrkStartPhi > 22*pi/180.)",
#'normalize': True,
#'logz': True,
},
{
'name': "hitZVwire_phiLt0",
'xtitle': "Wire Number",
'ytitle': "Hit z [cm]",
'binning': [240,0,240,120,-10,110],
'var': "primTrkZs:primTrkTrueWires",
'captionright1': "Track #phi < 0 & Angle Cuts",
'cuts': "( iBestMatch >= 0) && (nTracks == 1) * (primTrkStartPhi < 0)*((primTrkStartTheta > 27*pi/180.) && (primTrkStartTheta < 42*pi/180.))*(primTrkStartPhi > -57*pi/180. && primTrkStartPhi < 60*pi/180.)*(primTrkStartPhi < -15*pi/180. || primTrkStartPhi > 22*pi/180.)",
#'normalize': True,
#'logz': True,
},
{
'name': "hitZVwire_phiGeq0",
'xtitle': "Wire Number",
'ytitle': "Hit z [cm]",
'binning': [240,0,240,120,-10,110],
'var': "primTrkZs:primTrkTrueWires",
'captionright1': "Track #phi #geq 0 & Angle Cuts",
'cuts': "( iBestMatch >= 0) && (nTracks == 1) * (primTrkStartPhi >= 0)*((primTrkStartTheta > 27*pi/180.) && (primTrkStartTheta < 42*pi/180.))*(primTrkStartPhi > -57*pi/180. && primTrkStartPhi < 60*pi/180.)*(primTrkStartPhi < -15*pi/180. || primTrkStartPhi > 22*pi/180.)",
#'normalize': True,
#'logz': True,
},
]
#plotOneHistOnePlot([x for x in fileConfigs if not ("smear" in x["name"])],histConfigs,
# c,"cosmicanalyzer/tree",nMax=NMAX,outPrefix="Cosmics_HitPos_")
######################################################################################
## Compare Cuts -- Phi >= or < 0 and restrict pitch ##################################
######################################################################################
histConfigs = [
{
'title': "Track #phi #geq 0",
'cuts': weightStr+hitExtraCuts+" * (primTrkStartPhi >= 0)",
},
{
'title': "Track #phi < 0",
'cuts': weightStr+hitExtraCuts+" * (primTrkStartPhi < 0)",
},
{
'title': "Track #phi #geq 0 & 0.45 #leq Pitch < 0.47",
'cuts': weightStr+hitExtraCuts+" * (primTrkStartPhi >= 0 && primTrkPitches >= 0.45 && primTrkPitches < 0.47)",
},
{
'title': "Track #phi < 0 & 0.68 #leq Pitch < 0.70",
'cuts': weightStr+hitExtraCuts+" * (primTrkStartPhi < 0 && primTrkPitches >= 0.68 && primTrkPitches < 0.70)",
},
]
#hitExtraCuts += "*((primTrkStartPhi >= 0) || (primTrkStartPhi < 0))" #small pitch region
for i in range(len(histConfigs)):
histConfigs[i]["color"] = COLORLIST[i]
for i in range(len(histConfigs)):
histConfigs[i].update(
{
'xtitle': "Primary TPC Track dE/dx [MeV/cm]",
'ytitle': "Events / bin",
'binning': [50,0,5],
'var': "primTrkdEdxs",
'normalize': True,
'logy': False,
},
)
# plotManyHistsOnePlot([x for x in fileConfigs if not ("smear" in x["name"])],histConfigs,
# c,"cosmicanalyzer/tree",nMax=NMAX,outPrefix="Cosmics_pitchCuts_primTrkdEdxs_zoom3")
| {"/slicesIso.py": ["/fitCosmicHalo.py"], "/plotCosmics.py": ["/lookAtMonicaLifetime.py"]} |
76,905 | jhugon/lariatPionAbs | refs/heads/master | /plotCalibrate.py | #!/usr/bin/env python
import ROOT as root
from helpers import *
import fitCosmicHalo
import bethe
root.gROOT.SetBatch(True)
import sys
import numpy
SLABTHICKNESS = 1.
if __name__ == "__main__":
cuts = ""
#cuts += "*( pWC > 100 && pWC < 1100 && (isMC || (firstTOF > 0 && firstTOF < 25)))" # old pions
piMassCuts = "*( pWC > 100 && pWC < 1100 && (isMC || pWC*pWC*(firstTOF*firstTOF*0.00201052122-1.) < 5e4))" # pions
#cuts += "*( pWC > 450 && pWC < 1100 && (isMC || (firstTOF > 28 && firstTOF < 55)))" # old protons
protonMassCuts = "*( pWC > 450 && pWC < 1100 && (isMC || pWC*pWC*(firstTOF*firstTOF*0.00201052122-1.) > 7e5))" # protons
#cuts += "*(nTracksInFirstZ[2] >= 1 && nTracksInFirstZ[14] < 4 && nTracksLengthLt[5] < 3)" # tpc tracks
cuts += "*(primTrkStartZ < 2.)" # tpc tracks
cuts += "*( iBestMatch >= 0 && nMatchedTracks == 1)" # matching in analyzer
#cuts += "*(primTrkEndInFid == 1)"
cuts += "*(primTrkEndX > 5.4 && primTrkEndX < 42.7)"
cuts += "*(primTrkEndY > -15. && primTrkEndY < 15.)"
cuts += "*(primTrkEndZ > 5. && primTrkEndZ < 85.)"
# From dE/dx calibration tech note
cuts += "*(primTrkLength > 10.)"
cuts += "*(nTracksLengthLt[5] < 3.)"
hitcuts = "*(Iteration$ < 12)"
logy = True
nData = 224281.0
c = root.TCanvas()
NMAX=1000000000
#NMAX=100
baseDir="/scratch/jhugon/"
baseDir=""
########################################################
## Beam Pions Definitions ##############################
########################################################
fileConfigs = [
{
'fn': "piAbs_v2/piAbsSelector_Pos_RunII_current100_v02_all.root",
'addFriend': ["friend", "piAbs_v2/friendTrees/friendTree_piAbsSelector_Pos_RunII_current100_v02_all.root"],
'name': "RunII_Pos_100",
'title': "Run II +100A",
'caption': "Run II +100A",
'isData': True,
},
{
'fn': "piAbs_v2/piAbsSelector_Pos_RunII_current60_v02_all.root",
'addFriend': ["friend", "piAbs_v2/friendTrees/friendTree_piAbsSelector_Pos_RunII_current60_v02_all.root"],
'name': "RunII_Pos_60",
'title': "Run II +60A",
'caption': "Run II +60A",
'isData': True,
},
{
'fn': "billMC1/MC1_PDG_211.root",
'addFriend': ["friend", "billMC1/friendTrees/friend_MC1_PDG_211.root"],
'name': "pip_weighted",
'title': "#pi^{+} MC Weighted",
'caption': "#pi^{+} MC Weighted",
'scaleFactor': 1./25000*nData,
'cuts': "*pzWeight",
},
{
'fn': "billMC1/MC1_PDG_211.root",
'addFriend': ["friend", "billMC1/friendTrees/friend_MC1_PDG_211.root"],
'name': "pip",
'title': "#pi^{+} MC",
'caption': "#pi^{+} MC",
'scaleFactor': 1./25000*nData,
},
{
'fn': "mcSmearedForCalibration/PiAbsSelector_lariat_PiAbsAndChEx_flat_pip_presmear10_v6.root",
'addFriend': ["friend", "mcSmearedForCalibration/friendTrees/PiAbsSelector_lariat_PiAbsAndChEx_flat_pip_presmear10_v6.root"],
'name': "pip_presmear10",
'title': "#pi^{+} MC Smear 10%",
'caption': "#pi^{+} MC Smear 10%",
'scaleFactor': 1./25000*nData,
},
{
'fn': "mcSmearedForCalibration/PiAbsSelector_lariat_PiAbsAndChEx_flat_pip_presmear10_v6.root",
'addFriend': ["friend", "mcSmearedForCalibration/friendTrees/PiAbsSelector_lariat_PiAbsAndChEx_flat_pip_presmear10_v6.root"],
'name': "pip_presmear10_weighted",
'title': "#pi^{+} MC Smear 10%",
'caption': "#pi^{+} MC Smear 10%",
'scaleFactor': 1./25000*nData,
'cuts': "*pzWeight",
},
{
'fn': "mcSmearedForCalibration/PiAbsSelector_lariat_PiAbsAndChEx_flat_pip_presmear30_v6.root",
'addFriend': ["friend", "mcSmearedForCalibration/friendTrees/PiAbsSelector_lariat_PiAbsAndChEx_flat_pip_presmear30_v6.root"],
'name': "pip_presmear30",
'title': "#pi^{+} MC Smear 30%",
'caption': "#pi^{+} MC Smear 30%",
'scaleFactor': 1./25000*nData,
},
{
'fn': "mcSmearedForCalibration/PiAbsSelector_lariat_PiAbsAndChEx_flat_pip_presmear30_v6.root",
'addFriend': ["friend", "mcSmearedForCalibration/friendTrees/PiAbsSelector_lariat_PiAbsAndChEx_flat_pip_presmear30_v6.root"],
'name': "pip_presmear30_weighted",
'title': "#pi^{+} MC Smear 30%",
'caption': "#pi^{+} MC Smear 30%",
'scaleFactor': 1./25000*nData,
'cuts': "*pzWeight",
},
# {
# 'fn': "billMC1/MC1_PDG_2212.root",
# 'addFriend': ["friend", "billMC1/friendTrees/friend_MC1_PDG_2212.root"],
# 'name': "p",
# 'title': "proton MC",
# 'caption': "proton MC",
# 'color': root.kRed-4,
# 'scaleFactor': 1./10000*nData,
# },
]
for i in range(len(fileConfigs)):
fileConfigs[i]['color'] = COLORLIST[i]
try:
fileConfigs[i]['cuts'] += cuts+piMassCuts
except KeyError:
fileConfigs[i]['cuts'] = cuts+piMassCuts
histConfigs = [
{
'name': "primTrkdEdxs",
'xtitle': "Primary TPC Track dE/dx [MeV/cm]",
'ytitle': "Hits / bin",
'binning': [100,1.,5.0],
'var': "primTrkdEdxs",
'cuts': "1"+hitcuts,
'normalize': True,
},
{
'name': "primTrkPitches",
'xtitle': "Primary TPC Track Pitch [cm]",
'ytitle': "Hits / bin",
'binning': [100,0.,2.0],
'var': "primTrkPitches",
'cuts': "1"+hitcuts,
'normalize': True,
},
{
'name': "nTracksLengthLt3",
'xtitle': "N Tracks with Length < 5 cm",
'ytitle': "Events / bin",
'binning': [20,0,20],
'var': "nTracksLengthLt[5]",
'cuts': "1",
'normalize': True,
},
#{
# 'name': "pWC",
# 'xtitle': "Beamline Momentum [MeV/c]",
# 'ytitle': "Events / bin",
# 'binning': [40,100,1100],
# 'var': "(!isMC)*pWC+isMC*trueStartMom",
# 'cuts': "1",
# 'normalize': True,
#},
#{
# 'name': "primTrkLength",
# 'xtitle': "Primary Track Length [cm]",
# 'ytitle': "Events / bin",
# 'binning': [100,0,100],
# 'var': "primTrkLength",
# 'cuts': "1",
# 'normalize': True,
#},
#{
# 'name': "beamlineMass",
# 'xtitle': "Beamline Mass Squared [MeV^{2}]",
# 'ytitle': "Events / bin",
# 'binning': [100,-5e5,2e6],
# 'var': "pWC*pWC*(firstTOF*firstTOF*0.00201052122-1.)",
# 'cuts': "1",
# #'normalize': True,
# 'logy': True,
# 'drawvlines':[105.65**2,139.6**2,493.677**2,938.272046**2],
#},
#{
# 'name': "primTrkRangeSoFars",
# 'ytitle': "Hits / bin",
# 'xtitle': "Primary Track Range so Far [cm]",
# 'binning': [100,0,50],
# 'var': "primTrkLength-primTrkResRanges",
# 'cuts': "1"+hitcuts,
# 'normalize': True,
#},
#{
# 'name': "primTrkZs",
# 'ytitle': "Hits / bin",
# 'xtitle': "Primary Track Hit z [cm]",
# 'binning': [120,-10,110],
# 'var': "primTrkZs",
# 'cuts': "1"+hitcuts,
# 'normalize': True,
#},
]
plotManyFilesOnePlot(fileConfigs,histConfigs,c,"PiAbsSelector/tree",outPrefix="Calibrate_PiMuE_",nMax=NMAX)
histConfigs = [
{
'name': "primTrkdEdxsVbeamlineMom",
'xtitle': "Beamline Momentum [MeV/c]",
'ytitle': "Primary TPC Track dE/dx [MeV/cm]",
'binning': [20,100,1100,100,0,10.0],
'var': "primTrkdEdxs:(!isMC)*pWC+isMC*trueStartMom",
'cuts': "1"+hitcuts,
},
#{
# 'name': "primTrkdEdxsVResRange",
# 'xtitle': "Residual Range [cm]",
# 'ytitle': "Primary TPC Track dE/dx [MeV/cm]",
# 'binning': [50,0,100,50,1.,2.5],
# 'var': "primTrkdEdxs:primTrkResRanges",
# 'cuts': "1"+hitcuts,
#},
#{
# 'name': "primTrkLengthVkinWCInTPC",
# 'xtitle': "Kinetic Energy at TPC Start [MeV]",
# 'ytitle': "Primary TPC Track Length [cm]",
# 'binning': [50,0,600,50,0,100],
# 'var': "primTrkLength:kinWCInTPC",
# 'cuts': "1",
#},
#{
# 'name': "beamline_TOFVMom",
# 'xtitle': "Beamline Momentum [MeV/c]",
# 'ytitle': "Time Of Flight [ns]",
# 'binning': [100,0,2000,100,0,100],
# 'var': "firstTOF:pWC",
# 'cuts': "1",
# 'logz': True,
#},
#{
# 'name': "primTrkRangeSoFarsVIteration",
# 'xtitle': "Primary Track Hit Iteration",
# 'ytitle': "Primary Track Range so Far [cm]",
# 'binning': [20,0,20,40,0,20],
# 'var': "primTrkLength-primTrkResRanges:Iteration$",
# 'cuts': "1"+hitcuts,
# 'logz': True,
#},
]
hists = plotOneHistOnePlot(fileConfigs,histConfigs,c,"PiAbsSelector/tree",outPrefix="Calibrate_PiMuE_",nMax=NMAX)
for histname in hists:
mpvGraphs = []
wGraphs = []
labels = []
for samplename in sorted(hists[histname]):
hist = hists[histname][samplename]
print "justin:", histname, samplename, hist, hists[histname]
mpvGraph, wGraph = fitCosmicHalo.fitSlicesLandaus(c,hist,samplename,fracMax=0.4)
mpvGraphs.append(mpvGraph)
wGraphs.append(wGraph)
label = samplename
for fileConfig in fileConfigs:
if fileConfig['name'] == samplename:
label = fileConfig['title']
labels.append(label)
#fitCosmicHalo.fitSlicesLandauCore(c,hist,samplename)
c.Clear()
for i in range(len(mpvGraphs)):
mpvGraphs[i].SetLineColor(COLORLIST[i])
mpvGraphs[i].SetMarkerColor(COLORLIST[i])
predictor = bethe.Bethe()
pionPredGraph = root.TGraph()
for iPoint, mom in enumerate(numpy.linspace(100,1500)):
mpvPred = predictor.mpv(SLABTHICKNESS,mom,bethe.PIONMASS)
pionPredGraph.SetPoint(iPoint,mom,mpvPred)
ax = drawGraphs(c,mpvGraphs+[pionPredGraph],"Beamline Momentum [MeV/c]","Landau MPV [MeV/cm]",xlims=[0,1200],ylims=[0,5],freeTopSpace=0.5)
#ax = drawGraphs(c,mpvGraphs,"Beamline Momentum [MeV/c]","Landau MPV [MeV/cm]",xlims=[400,1200],ylims=[0,10],freeTopSpace=0.5)
leg = drawNormalLegend(mpvGraphs+[pionPredGraph],labels+["Bethe #pi^{+}"],"lep")
c.SaveAs("Calibrate_mpvs.png")
c.SaveAs("Calibrate_mpvs.pdf")
| {"/slicesIso.py": ["/fitCosmicHalo.py"], "/plotCosmics.py": ["/lookAtMonicaLifetime.py"]} |
76,906 | jhugon/lariatPionAbs | refs/heads/master | /plotNMinusOne.py | #!/usr/bin/env python
import ROOT as root
from helpers import *
root.gROOT.SetBatch(True)
if __name__ == "__main__":
cutConfigs = [
{
'name': "pWC",
'xtitle': "Momentum from WC [MeV/c]",
'ytitle': "Events / bin",
'binning': [100,0,2000],
'var': "pWC",
'cut': "pWC > 100 && pWC < 1100",
},
{
'name': "firstTOF",
'xtitle': "TOF [ns]",
'ytitle': "Events / bin",
'binning': [100,0,100],
'var': "firstTOF",
'cut': "isMC || (firstTOF > 0 && firstTOF < 25)",
},
{
'name': "iBestMatch",
'xtitle': "iBestMatch",
'ytitle': "Events / bin",
'binning': [21,-1,20],
'var': "iBestMatch",
'cut': "iBestMatch >= 0",
},
{
'name': "nMatchedTracks",
'xtitle': "nMatchedTracks",
'ytitle': "Events / bin",
'binning': [21,-1,20],
'var': "nMatchedTracks",
'cut': "nMatchedTracks == 1",
},
{
'name': "nTracksInFirstZ2",
'xtitle': "Number of TPC Tracks in first 2 cm / Event",
'ytitle': "Events / bin",
'binning': [16,0,15],
'var': "nTracksInFirstZ[2]",
'cut': "nTracksInFirstZ[2] >= 1",
},
{
'name': "nTracksInFirstZ14",
'xtitle': "Number of TPC Tracks in first 14 cm / Event",
'ytitle': "Events / bin",
'binning': [16,0,15],
'var': "nTracksInFirstZ[14]",
'cut': "nTracksInFirstZ[14] < 4",
},
{
'name': "nTracksLengthLt5",
'xtitle': "Number of TPC Tracks with length < 5 cm / Event",
'ytitle': "Events / bin",
'binning': [16,0,15],
'var': "nTracksLengthLt[5]",
'cut': "nTracksLengthLt[5] < 3",
},
{
'name': "primTrkStartZ",
'xtitle': "Primary Track Start Z Postion [cm]",
'ytitle': "Events / bin",
'binning': [40,-5,5],
'var': "primTrkStartZ",
'cut': "primTrkStartZ >= -1 && primTrkStartZ < 2.",
},
{
'name': "primTrkEndInFid",
'xtitle': "Primary Track End in Fiducial Region",
'ytitle': "Events / bin",
'binning': [2,0,2],
'var': "primTrkEndInFid",
'cut': "primTrkEndInFid == 1",
},
{
'name': "primTrkEndX",
'xtitle': "Primary Track End X Postion [cm]",
'ytitle': "Events / bin",
'binning': [55,-5,50],
'var': "primTrkEndX",
'cut': "primTrkEndX > 5.4 && primTrkEndX < 42.7",
},
{
'name': "primTrkEndY",
'xtitle': "Primary Track End Y Postion [cm]",
'ytitle': "Events / bin",
'binning': [50,-25,25],
'var': "primTrkEndY",
'cut': "primTrkEndY > -15. && primTrkEndY < 15",
},
{
'name': "primTrkEndZ",
'xtitle': "Primary Track End Z Postion [cm]",
'ytitle': "Events / bin",
'binning': [120,-10,110],
'var': "primTrkEndZ",
'cut': "primTrkEndZ > 5 && primTrkEndZ < 85",
},
]
c = root.TCanvas()
NMAX=10000000000
#NMAX=100
nData = 224281.0
fileConfigData = [
{
'fn': "/scratch/jhugon/lariat/pionAbsSelectorData/Pos_RunII_100A_v02_all.root",
'addFriend': ["friend", "/scratch/jhugon/lariat/pionAbsSelectorData/friendTrees/friend_Pos_RunII_100A_v02_all.root"],
'name': "RunII_Pos_100",
'title': "Run II +100A",
'caption': "Run II +100A",
'color': root.kBlack,
'isData': True,
},
{
'fn': "/scratch/jhugon/lariat/pionAbsSelectorData/Pos_RunII_60A_v02_all.root",
'addFriend': ["friend", "/scratch/jhugon/lariat/pionAbsSelectorData/friendTrees/friend_Pos_RunII_60A_v02_all.root"],
'name': "RunII_Pos_60",
'title': "Run II +60A",
'caption': "Run II +60A",
'color': root.kGray+2,
'isData': True,
},
]
fileConfigsMC = [
{
'fn': "/scratch/jhugon/lariat/pionAbsSelectorMC1/MC1_PDG_211.root",
'addFriend': ["friend", "/scratch/jhugon/lariat/pionAbsSelectorMC1/friendTrees/friend_MC1_PDG_211.root"],
'name': "pip",
'title': "#pi^{+} MC",
'caption': "#pi^{+} MC",
'color': root.kBlue-7,
'scaleFactor': 1./25000*nData,
},
{
'fn': "/scratch/jhugon/lariat/pionAbsSelectorMC1/MC1_PDG_2212.root",
'addFriend': ["friend", "/scratch/jhugon/lariat/pionAbsSelectorMC1/friendTrees/friend_MC1_PDG_2212.root"],
'name': "p",
'title': "proton MC",
'caption': "proton MC",
'color': root.kRed-4,
'scaleFactor': 1./10000*nData,
},
{
'fn': "/scratch/jhugon/lariat/pionAbsSelectorMC1/MC1_PDG_-11.root",
'addFriend': ["friend", "/scratch/jhugon/lariat/pionAbsSelectorMC1/friendTrees/friend_MC1_PDG_-11.root"],
'name': "ep",
'title': "e^{+} MC",
'caption': "e^{+} MC",
'color': root.kGreen,
'scaleFactor': 1./10000*nData,
},
{
'fn': "/scratch/jhugon/lariat/pionAbsSelectorMC1/MC1_PDG_-13.root",
'addFriend': ["friend", "/scratch/jhugon/lariat/pionAbsSelectorMC1/friendTrees/friend_MC1_PDG_-13.root"],
'name': "mup",
'title': "#mu^{+} MC",
'caption': "#mu^{+} MC",
'color': root.kMagenta-4,
'scaleFactor': 1./10000*nData,
},
{
'fn': "/scratch/jhugon/lariat/pionAbsSelectorMC1/MC1_PDG_321.root",
'addFriend': ["friend", "/scratch/jhugon/lariat/pionAbsSelectorMC1/friendTrees/friend_MC1_PDG_321.root"],
'name': "kp",
'title': "K^{+} MC",
'caption': "K^{+} MC",
'color': root.kOrange-3,
'scaleFactor': 1./10000*nData,
},
]
NMinusOnePlot(fileConfigData,fileConfigsMC,cutConfigs,c,"PiAbsSelectorTC/tree",outPrefix="NM1_",nMax=NMAX,weight="pzWeight")
| {"/slicesIso.py": ["/fitCosmicHalo.py"], "/plotCosmics.py": ["/lookAtMonicaLifetime.py"]} |
76,907 | jhugon/lariatPionAbs | refs/heads/master | /compareBill.py | #!/usr/bin/env python
import ROOT as root
from helpers import *
root.gROOT.SetBatch(True)
def plotHists(canvas,filenames,labels,histNames):
for iHistName, histName in enumerate(histNames):
hists = []
mylabels = []
mycolors = []
for iFile, f in enumerate(files):
hist = f.Get(histName)
if hist:
hist.UseCurrentStyle()
hist.SetLineColor(COLORLIST[iFile])
hist.SetMarkerColor(COLORLIST[iFile])
hists.append(hist)
mycolors.append(COLORLIST[iFile])
mylabels.append(labels[iFile])
axisHist = makeStdAxisHist(hists)
setHistTitles(axisHist,histName,"Counts / bin")
axisHist.Draw()
for hist in hists:
if "PDG" in histName:
hist.Draw("Phistsame")
else:
hist.Draw("histsame")
leg = drawNormalLegend(hists,mylabels,wide=True)
canvas.SaveAs(histName+".png")
canvas.Clear()
for hist in hists:
integral = hist.Integral()
if integral != 0.:
hist.Scale(1./integral)
axisHist = makeStdAxisHist(hists)
setHistTitles(axisHist,histName,"Normalized Counts / bin")
axisHist.Draw()
for hist in hists:
if "PDG" in histName:
hist.Draw("Phistsame")
else:
hist.Draw("histsame")
leg = drawNormalLegend(hists,mylabels,wide=True)
canvas.SaveAs(histName+"_norm.png")
canvas.Clear()
if __name__ == "__main__":
canvas = root.TCanvas("canvas")
filenames = [
"/scratch/metcalf/lariat/pip_LC5_histos.root",
"/scratch/metcalf/lariat/pip_TC5_histos.root",
"/scratch/metcalf/lariat/pip_TCEl5_histos.root",
]
labels = ["Default linecluster","Default trajcluster","Elena's trajcluster"]
files = [root.TFile(fn) for fn in filenames]
histNames = set()
for f in files:
#f.ls()
for key in f.GetListOfKeys():
name = key.GetName()
histNames.add(name)
histNames = list(histNames)
histNames.sort()
histNames_nt = [h for h in histNames if ("_NT" == h[-3:])]
histNames_not_nt = [h for h in histNames if not ("_NT" == h[-3:] or "_T" == h[-2:])]
plotHists(canvas,filenames,labels,histNames)
| {"/slicesIso.py": ["/fitCosmicHalo.py"], "/plotCosmics.py": ["/lookAtMonicaLifetime.py"]} |
76,908 | jhugon/lariatPionAbs | refs/heads/master | /landau.py | #!/usr/bin/env python2
import uuid
import ROOT as root
from ROOT import gStyle as gStyle
root.gROOT.SetBatch(True)
class LandauMaker(object):
def __init__(self,rooObservable,rooMpv,rooXi):
self.mpv = rooMpv
self.xi = rooXi
self.observable = rooObservable
self.name = "landau_"+uuid.uuid4().get_hex()
self.p2 = root.RooFormulaVar(self.name+"p2","second landau param","4*@0",root.RooArgList(self.xi))
self.p1 = root.RooFormulaVar(self.name+"p1","first landau param","@0+0.22278*@1",root.RooArgList(self.mpv,self.p2))
self.landau = root.RooLandau(self.name,"landau",rooObservable,self.p1,self.p2)
def getPdf(self):
return self.landau
if __name__ == "__main__":
t = root.RooRealVar("t","dE/dx [MeV/cm]",-10,50)
observables = root.RooArgSet(t)
# MIP Muon
mpv = root.RooRealVar("mpv","mpv landau",1.7,-20,20)
xi = root.RooRealVar("xi","xi landau",0.105,0,20)
landauObj = LandauMaker(t,mpv,xi)
landau = landauObj.getPdf()
mg = root.RooRealVar("mg","mg",0)
sg = root.RooRealVar("sg","sg",0.1)
gauss = root.RooGaussian("gauss","gauss",t,mg,sg)
# 500 MeV proton
mpv2 = root.RooRealVar("mpv2","mpv landau",6.5,-20,20)
xi2 = root.RooRealVar("xi2","xi landau",0.375,0,20)
wl2 = root.RooFormulaVar("wl2","second landau param","4*@0",root.RooArgList(xi2))
ml2 = root.RooFormulaVar("ml2","first landau param","@0+0.22278*@1",root.RooArgList(mpv2,wl2))
landau2 = root.RooLandau("lx2","lx",t,ml2,wl2)
t.setBins(10000,"cache")
langaus = root.RooFFTConvPdf("langaus","landau (X) gauss",t,landau,gauss)
langaus2 = root.RooFFTConvPdf("langaus2","landau2 (X) gauss",t,landau2,gauss)
ratio = root.RooRealVar("ratio","ratio",0.18,0,1)
twolandaus = root.RooAddPdf("twolandaus","twolandaus",langaus,langaus2,ratio)
model = twolandaus
data = model.generate(observables,10000)
#model.fitTo(data)
frame = t.frame(root.RooFit.Title("landau (x) gauss convolution"))
data.plotOn(frame)
model.plotOn(frame)
model.plotOn(frame,root.RooFit.Components("langaus"),root.RooFit.LineStyle(root.kDashed))
model.plotOn(frame,root.RooFit.Components("langaus2"),root.RooFit.LineStyle(root.kDashed),root.RooFit.LineColor(root.kRed))
c = root.TCanvas("rf208_convolution","rf208_convolution",600,600)
root.gPad.SetLeftMargin(0.15)
frame.GetYaxis().SetTitleOffset(1.4)
frame.Draw("same")
axisHist = root.TH2F("axisHist","",1,0,50,1,0,1000)
#axisHist = root.TH2F("axisHist","",1,-5,5,1,0,1200)
axisHist.Draw()
frame.Draw("same")
c.SaveAs("roofit.pdf")
| {"/slicesIso.py": ["/fitCosmicHalo.py"], "/plotCosmics.py": ["/lookAtMonicaLifetime.py"]} |
76,909 | jhugon/lariatPionAbs | refs/heads/master | /plotIsoMuon.py | #!/usr/bin/env python
import ROOT as root
from helpers import *
root.gROOT.SetBatch(True)
if __name__ == "__main__":
cuts = ""
#cuts += "*(isMC || ((triggerBits >> 4) & 1))" # BEAMON trigger
#cuts += "*(isMC || ((triggerBits >> 10) & 1))" # COSMICON trigger
#cuts += "*(isMC || !((triggerBits >> 10) & 1))" # Not COSMICON trigger
#cuts += "*(isMC || ((triggerBits >> 11) & 1))" # COSMIC trigger
#cuts += "*(isMC || (nWCTracks ==0 && nTOFs ==0))"
cuts += "*(iBestMatch >= 0)" # primary Track found
#cuts += "*(acos(sin(primTrkStartTheta)*sin(primTrkStartPhi))*180./pi < 5. || acos(sin(primTrkStartTheta)*sin(primTrkStartPhi))*180./pi > 175.)" # theta vertical
#cuts += "*((!isMC) || (trueStartMom>3000. && trueStartMom < 8000.))"
#cuts += "*(primTrkResRanges[0] > 1.)"
#cuts += "*(Iteration$ < 10)"
cuts += "*(primTrkXs > 10. && primTrkXs < 38. && primTrkYs > -10. && primTrkYs < 10. && primTrkZs > 10. && primTrkZs < 80.)"
cuts += "*(sqrt(pow(primTrkXs - trueStartX,2)+pow(primTrkYs - trueStartY,2)+pow(primTrkZs - trueStartZ,2)) < 3.)"
#cuts += "*(trueStartTheta*180/pi < 90.)"
weightStr = "1"+cuts
nData = 30860.0
logy = True
c = root.TCanvas()
NMAX=1000000000
#NMAX=100
fileConfigs = [
{
#'fn': "/pnfs/lariat/scratch/users/jhugon/v06_15_00/cosmicAna/lariat_PiAbsAndChEx_flat_isoInTPC_mup_test_v1/anahist.root",
'fn': "lariat_PiAbsAndChEx_flat_isoInTPC_mup_test_v1_cosmicAna.root",
'name': "UniformIsoMuon",
'title': "#mu^{+} MC",
'caption': "Uniform,Isotropic #mu^{+} MC",
'isData': False,
},
]
histConfigs = [
# {
# 'name': "trueStartMom",
# 'xtitle': "True Start Momentum [MeV/c]",
# 'ytitle': "Events / bin",
# 'binning': [150,0,1500],
# 'var': "trueStartMom",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
# },
# {
# 'name': "trueLength",
# 'xtitle': "True Trajectory Length [cm]",
# 'ytitle': "Events / bin",
# 'binning': [150,0,1500],
# 'var': "sqrt(pow(trueEndX-trueStartX,2)+pow(trueEndY-trueStartY,2)+pow(trueEndZ-trueStartZ,2))",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
# },
# {
# 'name': "trackXFront",
# 'xtitle': "X of TPC Track Projection to TPC Front [cm]",
# 'ytitle': "TPC Tracks / bin",
# 'binning': [50,0,50],
# 'var': "trackXFront",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
# },
# {
# 'name': "trackYFront",
# 'xtitle': "Y of TPC Track Projection to TPC Front [cm]",
# 'ytitle': "TPC Tracks / bin",
# 'binning': [50,-50,50],
# 'var': "trackYFront",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
# },
# {
# 'name': "trackMatchLowestZ",
# 'xtitle': "TPC Track Start Z [cm]",
# 'ytitle': "TPC Tracks / bin",
# 'binning': [40,0,20],
# 'var': "trackMatchLowestZ",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
# },
# {
# 'name': "nTOFs",
# 'xtitle': "Number of TOF Objects",
# 'ytitle': "Events / bin",
# 'binning': [11,0,10],
# 'var': "nTOFs",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
# },
# {
# 'name': "trackStartX",
# 'xtitle': "TPC Track Start X [cm]",
# 'ytitle': "Tracks / bin",
# 'binning': [100,-20,60],
# 'var': "trackStartX",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
# },
# {
# 'name': "trackStartY",
# 'xtitle': "TPC Track Start Y [cm]",
# 'ytitle': "Tracks / bin",
# 'binning': [100,-50,50],
# 'var': "trackStartY",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
# },
# {
# 'name': "trackStartZ",
# 'xtitle': "TPC Track Start Z [cm]",
# 'ytitle': "Tracks / bin",
# 'binning': [100,-20,110],
# 'var': "trackStartZ",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
# },
# {
# 'name': "trackEndX",
# 'xtitle': "TPC Track End X [cm]",
# 'ytitle': "Tracks / bin",
# 'binning': [100,-20,60],
# 'var': "trackEndX",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
# },
# {
# 'name': "trackEndY",
# 'xtitle': "TPC Track End Y [cm]",
# 'ytitle': "Tracks / bin",
# 'binning': [100,-50,50],
# 'var': "trackEndY",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
# },
# {
# 'name': "trackEndZ",
# 'xtitle': "TPC Track End Z [cm]",
# 'ytitle': "Tracks / bin",
# 'binning': [100,-20,110],
# 'var': "trackEndZ",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
# },
# {
# 'name': "trackLength",
# 'xtitle': "TPC Track Length [cm]",
# 'ytitle': "Tracks / bin",
# 'binning': [100,-10,100],
# 'var': "trackLength",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
# },
#{
# 'name': "trackCaloKin",
# 'xtitle': "TPC Calo Estimate of KE [MeV]",
# 'ytitle': "Tracks / bin",
# 'binning': [50,0,2500],
# 'var': "trackCaloKin",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
#},
{
'name': "primTrkLength",
'xtitle': "Primary TPC Track Length [cm]",
'ytitle': "Events / bin",
'binning': [100,0,100],
'var': "primTrkLength",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
'printIntegral': True,
},
{
'name': "primTrkStartTheta",
'xtitle': "Primary TPC Track #theta [deg]",
'ytitle': "Events / bin",
'binning': [180,0,180],
'var': "primTrkStartTheta*180/pi",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
{
'name': "primTrkStartCosTheta",
'xtitle': "Primary TPC Track cos(#theta)",
'ytitle': "Events / bin",
'binning': [100,0,1],
'var': "cos(primTrkStartTheta)",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
{
'name': "primTrkStartPhi",
'xtitle': "Primary TPC Track #phi [deg]",
'ytitle': "Events / bin",
'binning': [180,-180,180],
'var': "primTrkStartPhi*180/pi",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
{
'name': "primTrkStartThetaY",
'xtitle': "Primary TPC Track #theta_{y} [deg]",
'ytitle': "Events / bin",
'binning': [180,0,180],
'var': "acos(sin(primTrkStartTheta)*sin(primTrkStartPhi))*180./pi",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
{
'name': "primTrkStartCosThetaY",
'xtitle': "Primary TPC Track cos(#theta_{y})",
'ytitle': "Events / bin",
'binning': [100,0,1],
'var': "sin(primTrkStartTheta)*sin(primTrkStartPhi)",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
{
'name': "primTrkStartPhiZX",
'xtitle': "Primary TPC Track #phi_{zx} [deg]",
'ytitle': "Events / bin",
'binning': [180,-180,180],
'var': "atan2(sin(primTrkStartTheta)*cos(primTrkStartPhi),cos(primTrkStartTheta))*180./pi",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
{
'name': "primTrkStartThetaX",
'xtitle': "Primary TPC Track #theta_{x} [deg]",
'ytitle': "Events / bin",
'binning': [180,0,180],
'var': "acos(sin(primTrkStartTheta)*cos(primTrkStartPhi))*180./pi",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
{
'name': "primTrkStartCosThetaX",
'xtitle': "Primary TPC Track cos(#theta_{x})",
'ytitle': "Events / bin",
'binning': [100,0,1],
'var': "sin(primTrkStartTheta)*cos(primTrkStartPhi)",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
{
'name': "primTrkStartPhiZY",
'xtitle': "Primary TPC Track #phi_{zy} [deg]",
'ytitle': "Events / bin",
'binning': [180,-180,180],
'var': "atan2(sin(primTrkStartTheta)*sin(primTrkStartPhi),cos(primTrkStartTheta))*180./pi",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
{
'name': "primTrkdEdxs",
'xtitle': "Primary TPC Track dE/dx [MeV/cm]",
'ytitle': "Events / bin",
'binning': [100,0,50],
'var': "primTrkdEdxs",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
{
'name': "primTrkdEdxs_zoom",
'xtitle': "Primary TPC Track dE/dx [MeV/cm]",
'ytitle': "Events / bin",
'binning': [100,0,10],
'var': "primTrkdEdxs",
'cuts': weightStr,
'normalize': not logy,
'logy': logy,
},
{
'name': "primTrkdEdxs_zoom2",
'xtitle': "Primary TPC Track dE/dx [MeV/cm]",
'ytitle': "Events / bin",
'binning': [100,0,10],
'var': "primTrkdEdxs",
'cuts': weightStr,
'normalize': logy,
'logy': not logy,
},
{
'name': "primTrkdEdxs_zoom3",
'xtitle': "Primary TPC Track dE/dx [MeV/cm]",
'ytitle': "Events / bin",
'binning': [50,0,5],
'var': "primTrkdEdxs",
'cuts': weightStr,
'normalize': logy,
'logy': not logy,
},
{
'name': "primTrkTruedEdxs",
'xtitle': "Primary TPC Track True dE/dx [MeV/cm]",
'ytitle': "Events / bin",
'binning': [100,0,50],
'var': "primTrkTruedEdxs",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
{
'name': "primTrkTruedEdxs_zoom",
'xtitle': "Primary TPC Track True dE/dx [MeV/cm]",
'ytitle': "Events / bin",
'binning': [100,0,10],
'var': "primTrkTruedEdxs",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
# {
# 'name': "primTrkdQdxs",
# 'xtitle': "Primary TPC Track dQ/dx [ADC/cm]",
# 'ytitle': "Events / bin",
# 'binning': [300,0,3e4],
# 'var': "primTrkdQdxs*((0.5-1.)*isMC + 1.)",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
# },
# {
# 'name': "primTrkdQdxs_zoom",
# 'xtitle': "Primary TPC Track dQ/dx [ADC/cm]",
# 'ytitle': "Events / bin",
# 'binning': [100,0,8e3],
# 'var': "primTrkdQdxs*((0.5-1.)*isMC + 1.)",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
# 'printIntegral' : True,
# },
# {
# 'name': "primTrkdQdxs_zoom2",
# 'xtitle': "Primary TPC Track dQ/dx [ADC/cm]",
# 'ytitle': "Events / bin",
# 'binning': [100,0,8e3],
# 'var': "primTrkdQdxs*((0.5-1.)*isMC + 1.)",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': not logy,
# 'printIntegral' : True,
# },
# {
# 'name': "primTrkdQs",
# 'xtitle': "Primary TPC Track dQ [ADC]",
# 'ytitle': "Events / bin",
# 'binning': [200,0,8e3],
# 'var': "primTrkdQdxs*primTrkPitches*((0.5-1.)*isMC + 1.)",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': not logy,
# 'printIntegral' : True,
# },
# {
# 'name': "primTrkTruedQdxs",
# 'xtitle': "Primary TPC Track True dQ/dx [e^{-}/cm]",
# 'ytitle': "Events / bin",
# 'binning': [200,0,5e6],
# 'var': "primTrkTruedQdxs",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
# },
# {
# 'name': "primTrkTruedQdxs_zoom",
# 'xtitle': "Primary TPC Track True dQ/dx [e^{-}/cm]",
# 'ytitle': "Events / bin",
# 'binning': [200,0,1e5],
# 'var': "primTrkTruedQdxs",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
# },
# {
# 'name': "primTrkTruedQs",
# 'xtitle': "Primary TPC Track Q [e^{-}]",
# 'ytitle': "Events / bin",
# #'binning': [200,0,1e5],
# 'binning': getLogBins(100,1e3,1e7),
# 'var': "primTrkTruedQs",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
# 'logx': True,
# },
# {
# 'name': "primTrkTruedQs2",
# 'xtitle': "Primary TPC Track Q [e^{-}]",
# 'ytitle': "Events / bin",
# 'binning': [200,0,2e5],
# 'var': "primTrkTruedQs",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': not logy,
# 'logx': False,
# },
# {
# 'name': "primTrkdEdxs_Q1000to1500_zoom2",
# 'xtitle': "Primary TPC Track dE/dx [MeV/cm]",
# 'ytitle': "Events / bin",
# 'binning': [100,0,10],
# 'var': "primTrkdEdxs",
# 'cuts': weightStr+"*(primTrkdQdxs*primTrkPitches*((0.5-1.)*isMC + 1.) > 1000. && primTrkdQdxs*primTrkPitches*((0.5-1.)*isMC + 1.) < 1500.)",
# #'normalize': True,
# 'logy': not logy,
# 'caption': "1000 ADC < Q < 1500 ADC",
# },
# {
# 'name': "primTrkdEdxs_Q1500to2000_zoom2",
# 'xtitle': "Primary TPC Track dE/dx [MeV/cm]",
# 'ytitle': "Events / bin",
# 'binning': [100,0,10],
# 'var': "primTrkdEdxs",
# 'cuts': weightStr+"*(primTrkdQdxs*primTrkPitches*((0.5-1.)*isMC + 1.) > 1500. && primTrkdQdxs*primTrkPitches*((0.5-1.)*isMC + 1.) < 2000.)",
# #'normalize': True,
# 'logy': not logy,
# 'caption': "1500 ADC < Q < 2000 ADC",
# },
# {
# 'name': "primTrkdEdxs_Q2000to3000_zoom2",
# 'xtitle': "Primary TPC Track dE/dx [MeV/cm]",
# 'ytitle': "Events / bin",
# 'binning': [100,0,10],
# 'var': "primTrkdEdxs",
# 'cuts': weightStr+"*(primTrkdQdxs*primTrkPitches*((0.5-1.)*isMC + 1.) > 2000. && primTrkdQdxs*primTrkPitches*((0.5-1.)*isMC + 1.) < 3000.)",
# #'normalize': True,
# 'logy': not logy,
# 'caption': "2000 ADC < Q < 3000 ADC",
# },
# {
# 'name': "primTrkdEdxs_Q3000to4000_zoom2",
# 'xtitle': "Primary TPC Track dE/dx [MeV/cm]",
# 'ytitle': "Events / bin",
# 'binning': [100,0,10],
# 'var': "primTrkdEdxs",
# 'cuts': weightStr+"*(primTrkdQdxs*primTrkPitches*((0.5-1.)*isMC + 1.) > 3000. && primTrkdQdxs*primTrkPitches*((0.5-1.)*isMC + 1.) < 4000.)",
# #'normalize': True,
# 'logy': not logy,
# 'caption': "3000 ADC < Q < 4000 ADC",
# },
#{
# 'name': "primTrkdEdxsFidCut",
# 'xtitle': "Primary TPC Track dE/dx [MeV/cm]",
# 'ytitle': "Events / bin",
# 'binning': [200,0,50],
# 'var': "primTrkdEdxs",
# 'cuts': weightStr+"*primTrkInFids",
# #'normalize': True,
# 'logy': logy,
#},
{
'name': "primTrkResRanges",
'xtitle': "Primary TPC Track Residual Range [cm]",
'ytitle': "Events / bin",
'binning': [200,0,100],
'var': "primTrkResRanges",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
{
'name': "primTrkPitches",
'xtitle': "Primary TPC Track Pitch [cm]",
'ytitle': "Events / bin",
'binning': [100,0,10],
'var': "primTrkPitches",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
#{
# 'name': "primTrkEndKin",
# 'xtitle': "Primary TPC Track End Kinetic Energy [MeV]",
# 'ytitle': "Events / bin",
# 'binning': [50,0,1000],
# 'var': "primTrkEndKin",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
#},
#{
# 'name': "primTrkEndKinFid",
# 'xtitle': "Primary TPC Track End Kinetic Energy [MeV]",
# 'ytitle': "Events / bin",
# 'binning': [50,0,1000],
# 'var': "primTrkEndKinFid",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
#},
#{
# 'name': "trueEndProcess",
# 'xtitle': "trueEndProcess",
# 'ytitle': "Events / bin",
# 'binning': [17,0,17],
# 'var': "trueEndProcess",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
#},
{
'name': "trueStartTheta",
'xtitle': "True Start #theta [deg]",
'binning': [90,0,180],
'var': "trueStartTheta*180/pi",
'cuts': weightStr,
#'normalize': True,
},
{
'name': "trueStartCosTheta",
'xtitle': "True Start cos(#theta)",
'binning': [100,0,1],
'var': "cos(trueStartTheta)",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
{
'name': "trueStartPhi",
'xtitle': "True Start #phi",
'binning': [90,-180,180],
'var': "trueStartPhi*180/pi",
'cuts': weightStr,
#'normalize': True,
},
{
'name': "trueStartThetaY",
'xtitle': "True Start #theta_{y} [deg]",
'ytitle': "Events / bin",
'binning': [180,0,180],
'var': "acos(sin(trueStartTheta)*sin(trueStartPhi))*180./pi",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
{
'name': "trueStartCosThetaY",
'xtitle': "True Start cos(#theta_{y})",
'ytitle': "Events / bin",
'binning': [100,0,1],
'var': "sin(trueStartTheta)*sin(trueStartPhi)",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
{
'name': "trueStartPhiZX",
'xtitle': "True Start #phi_{zx} [deg]",
'ytitle': "Events / bin",
'binning': [180,-180,180],
'var': "atan2(sin(trueStartTheta)*cos(trueStartPhi),cos(trueStartTheta))*180./pi",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
{
'name': "trueStartThetaX",
'xtitle': "True Start #theta_{x} [deg]",
'ytitle': "Events / bin",
'binning': [180,0,180],
'var': "acos(sin(trueStartTheta)*cos(trueStartPhi))*180./pi",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
{
'name': "trueStartCosThetaX",
'xtitle': "True Start cos(#theta_{x})",
'ytitle': "Events / bin",
'binning': [100,0,1],
'var': "sin(trueStartTheta)*cos(trueStartPhi)",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
{
'name': "trueStartPhiZY",
'xtitle': "True Start #phi_{zy} [deg]",
'ytitle': "Events / bin",
'binning': [180,-180,180],
'var': "atan2(sin(trueStartTheta)*sin(trueStartPhi),cos(trueStartTheta))*180./pi",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
{
'name': "trueCosThetaPitch",
'xtitle': "True Start cos(#theta_{pitch})",
'ytitle': "Events / bin",
'binning': [50,0,1],
'var': "fabs(cos(trueStartTheta)*cos(trueStartPhi)+sin(trueStartTheta)*cos(trueStartPhi))",
'cuts': weightStr,
#'normalize': True,
'logy': False,
},
{
'name': "recoCosThetaPitch",
'xtitle': "Reconstructed cos(#theta_{pitch})",
'ytitle': "Events / bin",
'binning': [50,0,1],
'var': "0.4/primTrkPitches",
'cuts': weightStr,
#'normalize': True,
'logy': False,
},
]
plotOneHistOnePlot(fileConfigs,histConfigs,c,"cosmicanalyzer/tree",nMax=NMAX,outPrefix="UnifIso_")
# fileConfigMCs = copy.deepcopy(fileConfigs)
# fileConfigData = None
# for i in reversed(range(len(fileConfigMCs))):
# if 'isData' in fileConfigMCs[i] and fileConfigMCs[i]['isData']:
# fileConfigData = fileConfigMCs.pop(i)
# DataMCStack(fileConfigData,fileConfigMCs,histConfigs,c,"PiAbsSelector/tree",nMax=NMAX)
########################################################
########################################################
########################################################
histConfigs = [
{
'name': "primTrkdEdxs",
'title': "All",
'xtitle': "Primary TPC Track dE/dx [MeV/cm]",
'ytitle': "Events / bin",
'binning': [200,0,50],
'var': "primTrkdEdxs",
'cuts': weightStr,
'color': root.kBlack,
#'normalize': True,
'logy': logy,
},
{
'name': "primTrkdEdxs",
'title': "x < 10 cm",
'xtitle': "Primary TPC Track dE/dx [MeV/cm]",
'ytitle': "Events / bin",
'binning': [200,0,50],
'var': "primTrkdEdxs",
'cuts': weightStr + "*(primTrkXs < 10.)",
'color': root.kBlue-7,
#'normalize': True,
'logy': logy,
},
{
'name': "primTrkdEdxs",
'title': "10 cm < x < 20 cm",
'xtitle': "Primary TPC Track dE/dx [MeV/cm]",
'ytitle': "Events / bin",
'binning': [200,0,50],
'var': "primTrkdEdxs",
'cuts': weightStr + "*(primTrkXs > 10. && primTrkXs < 20.)",
'color': root.kRed-4,
#'normalize': True,
'logy': logy,
},
{
'name': "primTrkdEdxs",
'title': "20 cm < x < 30 cm",
'xtitle': "Primary TPC Track dE/dx [MeV/cm]",
'ytitle': "Events / bin",
'binning': [200,0,50],
'var': "primTrkdEdxs",
'cuts': weightStr + "*(primTrkXs > 20. && primTrkXs < 30.)",
'color': root.kGreen,
#'normalize': True,
'logy': logy,
},
{
'name': "primTrkdEdxs",
'title': "30 cm < x < 40 cm",
'xtitle': "Primary TPC Track dE/dx [MeV/cm]",
'ytitle': "Events / bin",
'binning': [200,0,50],
'var': "primTrkdEdxs",
'cuts': weightStr + "*(primTrkXs > 30. && primTrkXs < 40.)",
'color': root.kMagenta-4,
#'normalize': True,
'logy': logy,
},
]
# plotManyHistsOnePlot(fileConfigs,histConfigs,c,"cosmicanalyzer/tree",nMax=NMAX,outPrefix="Cosmics_dEdxForX")
########################################################
########################################################
########################################################
histConfigs = [
# {
# 'name': "primTrkdEdxVRange",
# 'xtitle': "Primary Track Hit Residual Range [cm]",
# 'ytitle': "Primary Track Hit dE/dx [MeV/cm]",
# 'binning': [100,0,100,100,0,50],
# 'var': "primTrkdEdxs:primTrkResRanges",
# 'cuts': weightStr,
# #'normalize': True,
# #'logz': True,
# },
# {
# 'name': "primTrkdEdxVRangeFidCut",
# 'xtitle': "Primary Track Hit Residual Range [cm]",
# 'ytitle': "Primary Track Hit dE/dx [MeV/cm]",
# 'binning': [100,0,100,100,0,50],
# 'var': "primTrkdEdxs:primTrkResRanges",
# 'cuts': weightStr,
# #'normalize': True,
# #'logz': True,
# },
# {
# 'name': "trackYFrontVtrackXFront",
# 'xtitle': "X of TPC Track Projection to TPC Front [cm]",
# 'ytitle': "Y of TPC Track Projection to TPC Front [cm]",
# 'binning': [40,0,40,40,-20,20],
# 'var': "trackYFront:trackXFront",
# 'cuts': weightStr,
# #'normalize': True,
# #'logz': True,
# },
# {
# 'name': "primTrkdEdxVwire",
# 'xtitle': "Primary Track Hit Wire Number",
# 'ytitle': "Primary Track Hit dE/dx [MeV/cm]",
# 'binning': [240,0,240,100,0,10],
# 'var': "primTrkdEdxs:primTrkTrueWires",
# 'cuts': weightStr,
# #'normalize': True,
# #'logz': True,
# },
# {
# 'name': "primTrkStartThetaVPhi",
# 'xtitle': "Primary TPC Track #phi [deg]",
# 'ytitle': "Primary TPC Track #theta [deg]",
# 'binning': [90,-180,180,90,0,180],
# 'var': "primTrkStartTheta*180/pi:primTrkStartPhi*180/pi",
# 'cuts': weightStr,
# #'normalize': True,
# #'logz': True,
# },
# {
# 'name': "primTrkStartThetaYVprimTrkStartPhiZX",
# 'xtitle': "Primary TPC Track #phi_{zx} [deg]",
# 'ytitle': "Primary TPC Track #theta_{y} [deg]",
# 'binning': [90,-180,180,90,0,180],
# 'var': "acos(sin(primTrkStartTheta)*sin(primTrkStartPhi))*180/pi:atan2(sin(primTrkStartTheta)*cos(primTrkStartPhi),cos(primTrkStartTheta))*180/pi",
# 'cuts': weightStr,
# #'normalize': True,
# 'logz': False,
# },
# {
# 'name': "primTrkStartThetaXVprimTrkStartPhiZY",
# 'xtitle': "Primary TPC Track #phi_{zy} [deg]",
# 'ytitle': "Primary TPC Track #theta_{x} [deg]",
# 'binning': [90,-180,180,90,0,180],
# 'var': "acos(sin(primTrkStartTheta)*cos(primTrkStartPhi))*180/pi:atan2(sin(primTrkStartTheta)*sin(primTrkStartPhi),cos(primTrkStartTheta))*180/pi",
# 'cuts': weightStr,
# #'normalize': True,
# 'logz': False,
# },
{
'name': "primTrkdEdxsVx",
'xtitle': "Hit x [cm]",
'ytitle': "Primary TPC Track dE/dx [MeV/cm]",
'binning': [60,-5,55,100,0,5],
'var': "primTrkdEdxs:primTrkXs",
'cuts': weightStr,
#'normalize': True,
'logz': True,
},
{
'name': "primTrkdEdxsVy",
'xtitle': "Hit y [cm]",
'ytitle': "Primary TPC Track dE/dx [MeV/cm]",
'binning': [50,-25,25,100,0,5],
'var': "primTrkdEdxs:primTrkYs",
'cuts': weightStr,
#'normalize': True,
'logz': True,
},
{
'name': "primTrkdEdxsVz",
'xtitle': "Hit z [cm]",
'ytitle': "Primary TPC Track dE/dx [MeV/cm]",
'binning': [100,-5,95,100,0,5],
'var': "primTrkdEdxs:primTrkZs",
'cuts': weightStr,
#'normalize': True,
'logz': True,
},
{
'name': "primTrkdEdxsVprimTrkPitches",
'xtitle': "Hit Pitch [cm]",
'ytitle': "Primary TPC Track dE/dx [MeV/cm]",
#'binning': [30,0,6,10000,0,50],
'binning': [[0.4,0.6,1.,2.,5,20],getLinBins(10000,0,50)],
'var': "primTrkdEdxs:primTrkPitches",
'cuts': weightStr,
#'normalize': True,
'logz': True,
},
# {
# 'name': "primTrkdEdxsV1OprimTrkPitches",
# 'xtitle': "(Hit Pitch)^{-1} [cm^{-1}]",
# 'ytitle': "Primary TPC Track dE/dx [MeV/cm]",
# 'binning': [50,0,5,10000,0,50],
# 'var': "primTrkdEdxs:1./primTrkPitches",
# 'cuts': weightStr,
# #'normalize': True,
# 'logz': True,
# },
# {
# 'name': "primTrkdEdxsVrun",
# 'xtitle': "Run Number",
# 'ytitle': "Primary TPC Track dE/dx [MeV/cm]",
# 'binning': [200,8000,1000,500,0,50],
# 'var': "primTrkdEdxs:runNumber",
# 'cuts': weightStr,
# #'normalize': True,
# 'logz': True,
# },
# {
# 'name': "primTrkdEdxsVyFromCenter",
# 'xtitle': "Hit |y| [cm]",
# 'ytitle': "Primary TPC Track dE/dx [MeV/cm]",
# 'binning': [40,0,25,10000,0,50],
# 'var': "primTrkdEdxs:fabs(primTrkYs)",
# 'cuts': weightStr,
# #'normalize': True,
# 'logz': True,
# },
# {
# 'name': "primTrkdEdxsVzFromCenter",
# 'xtitle': "Hit |z-45| [cm]",
# 'ytitle': "Primary TPC Track dE/dx [MeV/cm]",
# 'binning': [40,0,50,10000,0,50],
# 'var': "primTrkdEdxs:fabs(primTrkZs-45.)",
# 'cuts': weightStr,
# #'normalize': True,
# 'logz': True,
# },
{
'name': "primTrkdEdxsVtrueStartTheta",
'xtitle': "True Start #theta [deg]",
'ytitle': "Primary TPC Track dE/dx [MeV/cm]",
'binning': [30,0,180,100,0,5],
'var': "primTrkdEdxs:trueStartTheta*180/pi",
'cuts': weightStr,
#'normalize': True,
#'logz': True,
},
{
'name': "primTrkdEdxsVtrueStartCosTheta",
'xtitle': "True Start cos(#theta)",
'ytitle': "Primary TPC Track dE/dx [MeV/cm]",
'binning': [50,0,1,100,0,5],
'var': "primTrkdEdxs:cos(trueStartTheta)",
'cuts': weightStr,
#'normalize': True,
#'logz': True,
},
{
'name': "primTrkdEdxsVtrueStartPhi",
'xtitle': "True Start #phi",
'ytitle': "Primary TPC Track dE/dx [MeV/cm]",
'binning': [30,-180,180,100,0,5],
'var': "primTrkdEdxs:trueStartPhi*180/pi",
'cuts': weightStr,
#'normalize': True,
#'logz': True,
},
{
'name': "primTrkdEdxsVtrueStartThetaX",
'xtitle': "True Start #theta_{x} [deg]",
'ytitle': "Primary TPC Track dE/dx [MeV/cm]",
'binning': [60,0,180,100,0,5],
'var': "primTrkdEdxs:acos(sin(trueStartTheta)*cos(trueStartPhi))*180/pi",
'cuts': weightStr,
#'normalize': True,
'logz': False,
},
{
'name': "primTrkdEdxsVtrueStartCosThetaX",
'xtitle': "True Start cos(#theta_{x})",
'ytitle': "Primary TPC Track dE/dx [MeV/cm]",
'binning': [50,0,1,100,0,5],
'var': "primTrkdEdxs:sin(trueStartTheta)*cos(trueStartPhi)",
'cuts': weightStr,
#'normalize': True,
'logz': False,
},
{
'name': "primTrkdEdxsVtrueStartThetaX_zoom",
'xtitle': "True Start #theta_{x} [deg]",
'ytitle': "Primary TPC Track dE/dx [MeV/cm]",
'binning': [60,0,180,100,0,5],
'var': "primTrkdEdxs:acos(sin(trueStartTheta)*cos(trueStartPhi))*180/pi",
'cuts': weightStr,
#'normalize': True,
'logz': False,
},
{
'name': "primTrkdEdxsVtrueStartCosThetaX_zoom",
'xtitle': "True Start cos(#theta_{x})",
'ytitle': "Primary TPC Track dE/dx [MeV/cm]",
'binning': [50,0,1,100,0,5],
'var': "primTrkdEdxs:sin(trueStartTheta)*cos(trueStartPhi)",
'cuts': weightStr,
#'normalize': True,
'logz': False,
},
{
'name': "primTrkdEdxsVtrueStartPhiZY",
'xtitle': "True Start #phi_{zy}",
'ytitle': "Primary TPC Track dE/dx [MeV/cm]",
'binning': [60,-180,180,10000,0,50],
'var': "primTrkdEdxs:atan2(sin(trueStartTheta)*sin(trueStartPhi),cos(trueStartTheta))*180/pi",
'cuts': weightStr,
#'normalize': True,
'logz': False,
},
{
'name': "primTrkdEdxsVtrueStartPhiZY_zoom",
'xtitle': "True Start #phi_{zy}",
'ytitle': "Primary TPC Track dE/dx [MeV/cm]",
'binning': [60,-180,180,100,0,5],
'var': "primTrkdEdxs:atan2(sin(trueStartTheta)*sin(trueStartPhi),cos(trueStartTheta))*180/pi",
'cuts': weightStr,
#'normalize': True,
'logz': False,
},
{
'name': "primTrkdEdxsVtrueStartPhiZY_zoom_onlyCentral",
'xtitle': "True Start #phi_{zy}",
'ytitle': "Primary TPC Track dE/dx [MeV/cm]",
'binning': [60,-180,180,100,0,5],
'var': "primTrkdEdxs:atan2(sin(trueStartTheta)*sin(trueStartPhi),cos(trueStartTheta))*180/pi",
'cuts': "(iBestMatch >= 0)"+"*(primTrkXs > 10. && primTrkXs < 38. && primTrkYs > -10. && primTrkYs < 10. && primTrkZs > 10. && primTrkZs < 80.)",
#'normalize': True,
'logz': False,
},
{
'name': "primTrkdEdxsVtrueStartPhiZY_zoom_onlyNearVertex",
'xtitle': "True Start #phi_{zy}",
'ytitle': "Primary TPC Track dE/dx [MeV/cm]",
'binning': [60,-180,180,100,0,5],
'var': "primTrkdEdxs:atan2(sin(trueStartTheta)*sin(trueStartPhi),cos(trueStartTheta))*180/pi",
'cuts': "(iBestMatch >= 0)"+"*(sqrt(pow(primTrkXs - trueStartX,2)+pow(primTrkYs - trueStartY,2)+pow(primTrkZs - trueStartZ,2))<3.)",
#'normalize': True,
'logz': False,
},
{
'name': "primTrkdEdxsVtrueStartPhiZY_zoom_noCuts",
'xtitle': "True Start #phi_{zy}",
'ytitle': "Primary TPC Track dE/dx [MeV/cm]",
'binning': [60,-180,180,100,0,5],
'var': "primTrkdEdxs:atan2(sin(trueStartTheta)*sin(trueStartPhi),cos(trueStartTheta))*180/pi",
'cuts': "(iBestMatch >= 0)",
#'normalize': True,
'logz': False,
},
{
'name': "primTrkdEdxsVtrueStartPhiZY_zoom_logy",
'xtitle': "True Start #phi_{zy}",
'ytitle': "Primary TPC Track dE/dx [MeV/cm]",
'binning': [60,-180,180,100,0,5],
'var': "primTrkdEdxs:atan2(sin(trueStartTheta)*sin(trueStartPhi),cos(trueStartTheta))*180/pi",
'cuts': weightStr,
#'normalize': True,
'logz': True,
},
{
'name': "primTrkdEdxsVtrueStartThetaY",
'xtitle': "True Start #theta_{y} [deg]",
'ytitle': "Primary TPC Track dE/dx [MeV/cm]",
'binning': [30,0,180,100,0,5],
'var': "primTrkdEdxs:acos(sin(trueStartTheta)*sin(trueStartPhi))*180/pi",
'cuts': weightStr,
#'normalize': True,
#'logz': True,
},
{
'name': "primTrkdEdxsVtrueStartCosThetaY",
'xtitle': "True Start cos(#theta_{y})",
'ytitle': "Primary TPC Track dE/dx [MeV/cm]",
'binning': [50,0,1,100,0,5],
'var': "primTrkdEdxs:sin(trueStartTheta)*sin(trueStartPhi)",
'cuts': weightStr,
#'normalize': True,
#'logz': True,
},
{
'name': "primTrkdEdxsVtrueStartPhiZX",
'xtitle': "True Start #phi_{zx}",
'ytitle': "Primary TPC Track dE/dx [MeV/cm]",
'binning': [30,-180,180,100,0,5],
'var': "primTrkdEdxs:atan2(sin(trueStartTheta)*cos(trueStartPhi),cos(trueStartTheta))*180/pi",
'cuts': weightStr,
#'normalize': True,
#'logz': True,
},
{
'name': "trueStartThetaVtrueStartPhi",
'xtitle': "True Start #phi [deg]",
'ytitle': "True Start #theta [deg]",
'binning': [90,-180,180,90,0,180],
'var': "trueStartTheta*180/pi:trueStartPhi*180/pi",
'cuts': weightStr,
#'normalize': True,
#'logz': False,
},
{
'name': "trueStartCosThetaVtrueStartPhi",
'xtitle': "True Start #phi [deg]",
'ytitle': "True Start cos(#theta)",
'binning': [90,-180,180,100,0,1],
'var': "cos(trueStartTheta):trueStartPhi*180/pi",
'cuts': weightStr,
#'normalize': True,
#'logz': False,
},
{
'name': "trueStartThetaYVtrueStartPhiZX",
'xtitle': "True Start #phi_{zx} [deg]",
'ytitle': "True Start #theta_{y} [deg]",
'binning': [90,-180,180,90,0,180],
'var': "acos(sin(trueStartTheta)*sin(trueStartPhi))*180/pi:atan2(sin(trueStartTheta)*cos(trueStartPhi),cos(trueStartTheta))*180/pi",
'cuts': weightStr,
#'normalize': True,
#'logz': False,
},
{
'name': "trueStartCosThetaYVtrueStartPhiZX",
'xtitle': "True Start #phi_{zx} [deg]",
'ytitle': "True Start cos(#theta_{y})",
'binning': [90,-180,180,100,0,1],
'var': "sin(trueStartTheta)*sin(trueStartPhi):atan2(sin(trueStartTheta)*cos(trueStartPhi),cos(trueStartTheta))*180/pi",
'cuts': weightStr,
#'normalize': True,
#'logz': False,
},
{
'name': "trueStartThetaXVtrueStartPhiZY",
'xtitle': "True Start #phi_{zy} [deg]",
'ytitle': "True Start #theta_{x} [deg]",
'binning': [90,-180,180,90,0,180],
'var': "acos(sin(trueStartTheta)*cos(trueStartPhi))*180/pi:atan2(sin(trueStartTheta)*sin(trueStartPhi),cos(trueStartTheta))*180/pi",
'cuts': weightStr,
#'normalize': True,
'logz': False,
},
{
'name': "trueStartCosThetaXVtrueStartPhiZY",
'xtitle': "True Start #phi_{zy} [deg]",
'ytitle': "True Start cos(#theta_{x})",
'binning': [90,-180,180,100,0,1],
'var': "sin(trueStartTheta)*cos(trueStartPhi):atan2(sin(trueStartTheta)*sin(trueStartPhi),cos(trueStartTheta))*180/pi",
'cuts': weightStr,
#'normalize': True,
'logz': False,
},
# {
# 'name': "trueStartCosThetaXVtrueStartPhiZY_primTrkPitchesGt2",
# 'xtitle': "True Start #phi_{zy} [deg]",
# 'ytitle': "True Start cos(#theta_{x})",
# 'binning': [90,-180,180,100,0,1],
# 'var': "sin(trueStartTheta)*cos(trueStartPhi):atan2(sin(trueStartTheta)*sin(trueStartPhi),cos(trueStartTheta))*180/pi",
# 'cuts': weightStr+"*(primTrkPitches > 2.)",
# #'normalize': True,
# 'logz': False,
# },
# {
# 'name': "trueStartCosThetaXVtrueStartPhiZY_primTrkPitchesGt5",
# 'xtitle': "True Start #phi_{zy} [deg]",
# 'ytitle': "True Start cos(#theta_{x})",
# 'binning': [90,-180,180,100,0,1],
# 'var': "sin(trueStartTheta)*cos(trueStartPhi):atan2(sin(trueStartTheta)*sin(trueStartPhi),cos(trueStartTheta))*180/pi",
# 'cuts': weightStr+"*(primTrkPitches > 5.)",
# #'normalize': True,
# 'logz': False,
# },
# {
# 'name': "trueStartCosThetaXVtrueStartPhiZY_primTrkPitchesGt10",
# 'xtitle': "True Start #phi_{zy} [deg]",
# 'ytitle': "True Start cos(#theta_{x})",
# 'binning': [90,-180,180,100,0,1],
# 'var': "sin(trueStartTheta)*cos(trueStartPhi):atan2(sin(trueStartTheta)*sin(trueStartPhi),cos(trueStartTheta))*180/pi",
# 'cuts': weightStr+"*(primTrkPitches > 10.)",
# #'normalize': True,
# 'logz': False,
# },
# {
# 'name': "hitYVhitX",
# 'xtitle': "Hit x [cm]",
# 'ytitle': "Hit y [cm]",
# 'binning': [60,-5,55,60,-30,30],
# 'var': "primTrkYs:primTrkXs",
# 'cuts': weightStr,
# #'normalize': True,
# #'logz': True,
# },
# {
# 'name': "hitYVhitZ",
# 'xtitle': "Hit z [cm]",
# 'ytitle': "Hit y [cm]",
# 'binning': [120,-10,110,60,-30,30],
# 'var': "primTrkYs:primTrkZs",
# 'cuts': weightStr,
# #'normalize': True,
# #'logz': True,
# },
# {
# 'name': "hitXVhitZ",
# 'xtitle': "Hit z [cm]",
# 'ytitle': "Hit x [cm]",
# 'binning': [120,-10,110,60,-5,55],
# 'var': "primTrkXs:primTrkZs",
# 'cuts': weightStr,
# #'normalize': True,
# #'logz': True,
# },
# {
# 'name': "hitYVhitX_cosmicon",
# 'xtitle': "Hit x [cm]",
# 'ytitle': "Hit y [cm]",
# 'binning': [60,-5,55,60,-30,30],
# 'var': "primTrkYs:primTrkXs",
# 'cuts': "1"+"*(isMC || ((triggerBits >> 10) & 1))",
# #'normalize': True,
# #'logz': True,
# },
# {
# 'name': "hitYVhitZ_cosmicon",
# 'xtitle': "Hit z [cm]",
# 'ytitle': "Hit y [cm]",
# 'binning': [120,-10,110,60,-30,30],
# 'var': "primTrkYs:primTrkZs",
# 'cuts': "1"+"*(isMC || ((triggerBits >> 10) & 1))",
# #'normalize': True,
# #'logz': True,
# },
# {
# 'name': "hitXVhitZ_cosmicon",
# 'xtitle': "Hit z [cm]",
# 'ytitle': "Hit x [cm]",
# 'binning': [120,-10,110,60,-5,55],
# 'var': "primTrkXs:primTrkZs",
# 'cuts': "1"+"*(isMC || ((triggerBits >> 10) & 1))",
# #'normalize': True,
# #'logz': True,
# },
# {
# 'name': "hitXVhitZ_NotCosmicon",
# 'xtitle': "Hit z [cm]",
# 'ytitle': "Hit x [cm]",
# 'binning': [120,-10,110,60,-5,55],
# 'var': "primTrkXs:primTrkZs",
# 'cuts': "1"+"*(isMC || !((triggerBits >> 10) & 1))",
# #'normalize': True,
# #'logz': True,
# },
# {
# 'name': "hitYVhitZ_cosmic",
# 'xtitle': "Hit z [cm]",
# 'ytitle': "Hit y [cm]",
# 'binning': [120,-10,110,60,-30,30],
# 'var': "primTrkYs:primTrkZs",
# 'cuts': "1"+"*(isMC || ((triggerBits >> 11) & 1))",
# #'normalize': True,
# #'logz': True,
# },
# {
# 'name': "hitYVhitZ_beamon",
# 'xtitle': "Hit z [cm]",
# 'ytitle': "Hit y [cm]",
# 'binning': [120,-10,110,60,-30,30],
# 'var': "primTrkYs:primTrkZs",
# 'cuts': "1"+"*(isMC || ((triggerBits >> 4) & 1))",
# #'normalize': True,
# #'logz': True,
# },
# {
# 'name': "hitYVhitZ_pickytrack",
# 'xtitle': "Hit z [cm]",
# 'ytitle': "Hit y [cm]",
# 'binning': [120,-10,110,60,-30,30],
# 'var': "primTrkYs:primTrkZs",
# 'cuts': "1"+"*(isMC || nWCTracks > 0)",
# #'normalize': True,
# #'logz': True,
# },
{
'name': "primTrkCosThetaPitchVtrueCosThetaPitch",
'xtitle': "True |cos(#theta_{pitch})|",
'ytitle': "Reconstruction |cos(#theta_{pitch})|",
'binning': [30,0,1,30,0,1],
#'var': "0.4/primTrkPitches:fabs(cos(trueStartTheta)*cos(pi/3.)+sin(trueStartTheta)*sin(pi/3.)*cos(trueStartPhi-0.5*pi))",
#'var': "0.4/primTrkPitches:fabs(cos(trueStartTheta)*cos(pi/3.)+sin(trueStartTheta)*sin(pi/3.)*cos(trueStartPhi-0.5*pi))",
'var': "0.4/primTrkPitches:fabs(cos(trueStartTheta)*cos(pi/3.)+sin(trueStartTheta)*sin(pi/3.)*cos(trueStartPhi-0.5*pi))",
'cuts': "(iBestMatch >= 0)*(Iteration$ == 0)",
#'normalize': True,
'logy': False,
},
{
'name': "primTrkCosThetaPitchVtrueCosThetaPitch_dottingThings",
'xtitle': "True |cos(#theta_{pitch})|",
'ytitle': "Reconstruction |cos(#theta_{pitch})|",
'binning': [30,0,1,30,0,1],
'var': "0.4/primTrkPitches:fabs(cos(trueStartTheta)*cos(pi/3.)+sin(trueStartTheta)*sin(pi/3.)*sin(trueStartPhi)*sin(0.5*pi))",
'cuts': "(iBestMatch >= 0)*(Iteration$ == 0)",
#'normalize': True,
'logy': False,
},
{
'name': "primTrkThetaPitchVtrueThetaPitch_dottingThings",
'xtitle': "True #theta_{pitch}",
'ytitle': "Reconstruction #theta_{pitch}",
'binning': [30,0,90,30,0,90],
'var': "acos(0.4/primTrkPitches)*180/pi:acos(fabs(cos(trueStartTheta)*cos(pi/3.)+sin(trueStartTheta)*sin(pi/3.)*sin(trueStartPhi)*sin(0.5*pi)))*180/pi",
'cuts': "(iBestMatch >= 0)*(Iteration$ == 0)",
#'normalize': True,
'logy': False,
},
{
'name': "primTrkCosThetaPitchVtrueCosThetaPitchZY",
'xtitle': "True |cos(#theta_{z/y-plane pitch})|",
'ytitle': "Reconstruction |cos(#theta_{pitch})|",
'binning': [30,0,1,30,0,1],
'var': "0.4/primTrkPitches:fabs(cos(atan2(sin(trueStartTheta)*sin(trueStartPhi),cos(trueStartTheta))-pi/3.))",
'cuts': "(iBestMatch >= 0)*(Iteration$ == 0)",
#'normalize': True,
'logy': False,
},
{
'name': "primTrkThetaPitchVtrueThetaPitchZY",
'xtitle': "True #theta_{z/y-plane pitch}",
'ytitle': "Reconstruction #theta_{pitch}",
'binning': [30,0,180,30,0,180],
'var': "acos(0.4/primTrkPitches)*180/pi:fabs(atan2(sin(trueStartTheta)*sin(trueStartPhi),cos(trueStartTheta))-pi/3.)*180/pi",
'cuts': "(iBestMatch >= 0)*(Iteration$ == 0)",
#'normalize': True,
'logy': False,
},
{
'name': "primTrkStartPhiVtrueStartPhi",
'xtitle': "True Start #phi_{xy} [deg]",
'ytitle': "TPC Track Start #phi_{xy} [deg]",
'binning': [180,-180,180,180,-180,180],
'var': "primTrkStartPhi*180/pi:trueStartPhi*180/pi",
'cuts': weightStr,
#'normalize': True,
'logz': False,
},
{
'name': "primTrkStartThetaVtrueStartTheta",
'xtitle': "True Start #theta_{z} [deg]",
'ytitle': "TPC Track Start #theta_{z} [deg]",
'binning': [180,0,180,180,0,180],
'var': "primTrkStartTheta*180/pi:trueStartTheta*180/pi",
'cuts': weightStr,
#'normalize': True,
'logz': False,
},
{
'name': "primTrkStartPhiZYVtrueStartPhiZY",
'xtitle': "True Start #phi_{zy} [deg]",
'ytitle': "TPC Track Start #phi_{zy} [deg]",
'binning': [180,-180,180,180,-180,180],
'var': "atan2(sin(primTrkStartTheta)*sin(primTrkStartPhi),cos(primTrkStartTheta))*180/pi:atan2(sin(trueStartTheta)*sin(trueStartPhi),cos(trueStartTheta))*180/pi",
'cuts': weightStr,
#'normalize': True,
'logz': False,
},
{
'name': "primTrkStartThetaXVtrueStartThetaX",
'xtitle': "True Start #theta_{x} [deg]",
'ytitle': "TPC Track Start #theta_{x} [deg]",
'binning': [180,0,180,180,0,180],
'var': "acos(sin(primTrkStartTheta)*cos(primTrkStartPhi))*180/pi:acos(sin(trueStartTheta)*cos(trueStartPhi))*180/pi",
'cuts': weightStr,
#'normalize': True,
'logz': False,
},
{
'name': "primTrkStartTanPhiZYVtrueStartTanPhiZY",
'xtitle': "True Start |tan(#phi_{zy})|",
'ytitle': "TPC Track Start |tan(#phi_{zy})|",
'binning': [100,0,20,100,0,20],
'var': "fabs(tan(primTrkStartTheta)*sin(primTrkStartPhi)):fabs(tan(trueStartTheta)*sin(trueStartPhi))",
'cuts': weightStr,
#'normalize': True,
'logz': False,
},
{
'name': "primTrkStartCosPhiZYVtrueStartCosPhiZY",
'xtitle': "True Start |cos(#phi_{zy})|",
'ytitle': "TPC Track Start |cos(#phi_{zy})|",
'binning': [100,0,1,100,0,1],
'var': "fabs(cos(atan2(sin(primTrkStartTheta)*sin(primTrkStartPhi),cos(primTrkStartTheta)))):fabs(cos(atan2(sin(trueStartTheta)*sin(trueStartPhi),cos(trueStartTheta))))",
'cuts': weightStr,
#'normalize': True,
'logz': False,
},
{
'name': "primTrkStartSinPhiZYVtrueStartSinPhiZY",
'xtitle': "True Start |sin(#phi_{zy})|",
'ytitle': "TPC Track Start |sin(#phi_{zy})|",
'binning': [100,0,1,100,0,1],
'var': "fabs(sin(atan2(sin(primTrkStartTheta)*sin(primTrkStartPhi),cos(primTrkStartTheta)))):fabs(sin(atan2(sin(trueStartTheta)*sin(trueStartPhi),cos(trueStartTheta))))",
'cuts': weightStr,
#'normalize': True,
'logz': False,
},
{
'name': "primTrkStartCosThetaXVtrueStartCosThetaX",
'xtitle': "True Start |cos(#theta_{x})|",
'ytitle': "TPC Track Start |cos(#theta_{x})|",
'binning': [100,0,1,100,0,1],
'var': "sin(primTrkStartTheta)*cos(primTrkStartPhi):sin(trueStartTheta)*cos(trueStartPhi)",
'cuts': weightStr,
#'normalize': True,
'logz': False,
},
{
'name': "deltaRecoTruePhiZYVtrueStartPhiZY",
'xtitle': "True Start #phi_{zy} [deg]",
'ytitle': "Reco - True #Delta #phi_{zy} [deg]",
'binning': [60,-180,180,90,-180,180],
'var': "(atan2(sin(primTrkStartTheta)*sin(primTrkStartPhi),cos(primTrkStartTheta))-atan2(sin(trueStartTheta)*sin(trueStartPhi),cos(trueStartTheta)))*180/pi:atan2(sin(trueStartTheta)*sin(trueStartPhi),cos(trueStartTheta))*180/pi",
'cuts': weightStr,
#'normalize': True,
'logz': False,
},
{
'name': "deltaRecoTrueThetaXVtrueStartCosThetaX",
'xtitle': "True Start |cos(#theta_{x})|",
'ytitle': "Reco - True #Delta #theta_{x} [deg]",
'binning': [50,0,1,90,-180,180],
'var': "(acos(sin(primTrkStartTheta)*cos(primTrkStartPhi))-acos(sin(trueStartTheta)*cos(trueStartPhi)))*180/pi:fabs(sin(trueStartTheta)*cos(trueStartPhi))",
'cuts': weightStr,
#'normalize': True,
'logz': False,
},
{
'name': "deltaRecoTrueThetaXVtrueStartPhiZY",
'xtitle': "True Start #phi_{zy} [deg]",
'ytitle': "Reco - True #Delta #theta_{x} [deg]",
'binning': [60,0,180,90,-180,180],
'var': "(acos(sin(primTrkStartTheta)*cos(primTrkStartPhi))-acos(sin(trueStartTheta)*cos(trueStartPhi)))*180/pi:atan2(sin(trueStartTheta)*sin(trueStartPhi),cos(trueStartTheta))*180/pi",
'cuts': weightStr,
#'normalize': True,
'logz': False,
},
]
hists = plotOneHistOnePlot(fileConfigs,histConfigs,c,"cosmicanalyzer/tree",nMax=NMAX,outPrefix="UnifIso_")
outfile = root.TFile("unifiso_hists.root","recreate")
outfile.cd()
for var in hists:
for ds in hists[var]:
newname = var+"_"+ds
hist = hists[var][ds]
hist.SetName(newname)
hist.Print()
hist.Write()
outfile.Close()
######################################################################################
######################################################################################
######################################################################################
######################################################################################
histConfigs = [
{
'name': "primTrkdEdxs",
"title": "Reco",
'xtitle': "Primary TPC Track dE/dx [MeV/cm]",
'ytitle': "Events / bin",
'binning': [400,0,50],
'var': "primTrkdEdxs",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
{
'name': "primTrkTruedEdxs",
"title": "MC Truth",
'xtitle': "Primary TPC Track True dE/dx [MeV/cm]",
'ytitle': "Events / bin",
'binning': [400,0,50],
'var': "primTrkTruedEdxs",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
{
'name': "primMCdEdxs",
"title": "MC Trajectory",
'xtitle': "Primary TPC Track True dE/dx [MeV/cm]",
'ytitle': "Events / bin",
'binning': [400,0,50],
'var': "primMCdEdxs",
'cuts': weightStr+"*(primMCXs>2. && primMCXs < 47. && primMCYs > -23. && primMCYs < 23. && primMCZs > 0. && primMCZs < 90 && primMClastXs>2. && primMClastXs < 47. && primMClastYs > -23. && primMClastYs < 23. && primMClastZs > 0. && primMClastZs < 90 )",
#'normalize': True,
'logy': logy,
},
]
for i in range(len(histConfigs)):
histConfigs[i]["color"] = COLORLIST[i]
# plotManyHistsOnePlot(fileConfigs,histConfigs,c,"cosmicanalyzer/tree",nMax=NMAX,outPrefix="CosmicsTrue_dEdx")
histConfigs = [
{
'name': "primTrkdEdxs_zoom",
"title": "Reco",
'xtitle': "Primary TPC Track dE/dx [MeV/cm]",
'ytitle': "Events / bin",
'binning': [200,0,10],
'var': "primTrkdEdxs",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
{
'name': "primTrkTruedEdxs_zoom",
"title": "MC Truth",
'xtitle': "Primary TPC Track True dE/dx [MeV/cm]",
'ytitle': "Events / bin",
'binning': [200,0,10],
'var': "primTrkTruedEdxs",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
{
'name': "primMCdEdxs_zoom",
"title": "MC Trajectory",
'xtitle': "Primary TPC Track True dE/dx [MeV/cm]",
'ytitle': "Events / bin",
'binning': [200,0,10],
'var': "primMCdEdxs",
'cuts': weightStr+"*(primMCXs>2. && primMCXs < 47. && primMCYs > -23. && primMCYs < 23. && primMCZs > 0. && primMCZs < 90 && primMClastXs>2. && primMClastXs < 47. && primMClastYs > -23. && primMClastYs < 23. && primMClastZs > 0. && primMClastZs < 90 )",
#'normalize': True,
'logy': logy,
},
]
for i in range(len(histConfigs)):
histConfigs[i]["color"] = COLORLIST[i]
# plotManyHistsOnePlot(fileConfigs,histConfigs,c,"cosmicanalyzer/tree",nMax=NMAX,outPrefix="UnifIso_dEdxZoom")
histConfigs = [
{
'name': "primTrkdQdxs_zoom",
'title': "Reco [10.8*ADC/cm]",
'xtitle': "Primary TPC Track dQ/dx",# [10*ADC/cm]",
'ytitle': "Events / bin",
'binning': [200,0,1e5],
'var': "10.8*primTrkdQdxs",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
{
'name': "primTrkTruedQdxs_zoom",
'title': "MC Truth [e^{-}/cm]",
'xtitle': "Primary TPC Track True dQ/dx",# [e^{-}/cm]",
'ytitle': "Events / bin",
'binning': [200,0,1e5],
'var': "primTrkTruedQdxs",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
]
for i in range(len(histConfigs)):
histConfigs[i]["color"] = COLORLIST[i]
# plotManyHistsOnePlot(fileConfigs,histConfigs,c,"cosmicanalyzer/tree",nMax=NMAX,outPrefix="CosmicsTrue_dQdxZoom")
histConfigs = [
{
"title": "All",
'xtitle': "Primary TPC Track dE/dx [MeV/cm]",
'ytitle': "Normalized Events",
'binning': [200,0,4],
'var': "primTrkdEdxs",
'cuts': "1",
'normalize': True,
'logy': False,
},
{
"title": "Res Ranges Correct",
'xtitle': "Primary TPC Track dE/dx [MeV/cm]",
'ytitle': "Normalized Events",
'binning': [200,0,4],
'var': "primTrkdEdxs",
'cuts': "(primTrkResRanges[0] > 1.)",
'normalize': True,
'logy': False,
},
{
"title": "Res Ranges Correct & 1st 50 hits",
'xtitle': "Primary TPC Track dE/dx [MeV/cm]",
'ytitle': "Normalized Events",
'binning': [200,0,4],
'var': "primTrkdEdxs",
'cuts': "(primTrkResRanges[0] > 1.)*(Iteration$ < 50)",
'normalize': True,
'logy': False,
},
{
"title": "Res Ranges Correct & 1st 10 hits",
'xtitle': "Primary TPC Track dE/dx [MeV/cm]",
'ytitle': "Normalized Events",
'binning': [200,0,4],
'var': "primTrkdEdxs",
'cuts': "(primTrkResRanges[0] > 1.)*(Iteration$ < 10)",
'normalize': True,
'logy': False,
},
{
"title": "Res Ranges Correct & 1st 5 hits",
'xtitle': "Primary TPC Track dE/dx [MeV/cm]",
'ytitle': "Normalized Events",
'binning': [200,0,4],
'var': "primTrkdEdxs",
'cuts': "(primTrkResRanges[0] > 1.)*(Iteration$ < 5)",
'normalize': True,
'logy': False,
},
]
for i in range(len(histConfigs)):
histConfigs[i]["color"] = COLORLIST[i]
# plotManyHistsOnePlot(fileConfigs,histConfigs,c,"cosmicanalyzer/tree",nMax=NMAX,outPrefix="UnifIso_dEdxZoomHits")
histConfigs = [
{
'title': "True No-Cuts",
'xtitle': "|cos(#theta_{pitch})|",
'ytitle': "Normalized events / bin",
'binning': [30,0,1],
'var': "fabs(cos(trueStartTheta)*cos(-pi/3.)+sin(trueStartTheta)*sin(-pi/3.)*cos(trueStartPhi-0.5*pi))",
'cuts': "",
#'normalize': True,
'logy': False,
},
{
'title': "True w/ Reco Track",
'xtitle': "|cos(#theta_{pitch})|",
'ytitle': "Normalized events / bin",
'binning': [30,0,1],
'var': "fabs(cos(trueStartTheta)*cos(-pi/3.)+sin(trueStartTheta)*sin(-pi/3.)*cos(trueStartPhi-0.5*pi))",
'cuts': "(iBestMatch >= 0)",
#'normalize': True,
'logy': False,
},
{
'title': "Reconstructed 1st Hit",
'xtitle': "|cos(#theta_{pitch})|",
'ytitle': "Normalized events / bin",
'binning': [30,0,1],
'var': "0.4/primTrkPitches",
'cuts': "(Iteration$ == 0)",
#'normalize': True,
'logy': False,
},
]
for i in range(len(histConfigs)):
histConfigs[i]["color"] = COLORLIST[i]
plotManyHistsOnePlot(fileConfigs,histConfigs,c,"cosmicanalyzer/tree",nMax=NMAX,outPrefix="UnifIso_CosThetaPitch")
| {"/slicesIso.py": ["/fitCosmicHalo.py"], "/plotCosmics.py": ["/lookAtMonicaLifetime.py"]} |
76,910 | jhugon/lariatPionAbs | refs/heads/master | /plotHalo.py | #!/usr/bin/env python
import ROOT as root
from helpers import *
root.gROOT.SetBatch(True)
if __name__ == "__main__":
cuts = ""
cuts += "*(isMC || !((triggerBits >> 10) & 1))" # Not COSMICON trigger
cuts += "*(isMC || (nWCTracks ==0 && nTOFs ==0))"
#cuts += "*( iBestMatch >= 0)" # primary Track found
weightStr = "1"+cuts
nData = 127306.0
logy = True
c = root.TCanvas()
NMAX=10000000
#NMAX=100
fileConfigs = [
{
#'fn': "/lariat/app/users/jhugon/lariatsoft_v06_15_00/srcs/lariatsoft/JobConfigurations/CosmicAna_Pos_RunII.root",
'fn': "/pnfs/lariat/scratch/users/jhugon/v06_15_00/cosmicAna/lariat_data_Lovely1_Pos_RunII_elanag_v02_v05/anahist.root",
'name': "RunIIPos",
'title': "Run II Pos. Polarity",
'caption': "Run II Pos. Polarity",
'color': root.kBlack,
'isData': True,
},
{
'fn': "/pnfs/lariat/scratch/users/jhugon/v06_15_00/cosmicAna/lariat_PiAbsAndChEx_cosmics_v1/anahist.root",
'name': "CosmicMC",
'title': "Cosmic MC",
'caption': "Cosmic MC",
'color': root.kRed-4,
'isData': False,
'scaleFactor': nData/8807.,
},
{
'fn': "/pnfs/lariat/scratch/users/jhugon/v06_15_00/cosmicAna/lariat_PiAbsAndChEx_halo_v1/anahist.root",
'name': "HaloMC",
'title': "Halo MC",
'caption': "Halo MC",
'color': root.kBlue+7,
'isData': False,
'scaleFactor': nData/3075.,
},
]
histConfigs = [
# {
# 'name': "trackXFront",
# 'xtitle': "X of TPC Track Projection to TPC Front [cm]",
# 'ytitle': "TPC Tracks / bin",
# 'binning': [50,0,50],
# 'var': "trackXFront",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
# },
# {
# 'name': "trackYFront",
# 'xtitle': "Y of TPC Track Projection to TPC Front [cm]",
# 'ytitle': "TPC Tracks / bin",
# 'binning': [50,-50,50],
# 'var': "trackYFront",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
# },
# {
# 'name': "trackMatchLowestZ",
# 'xtitle': "TPC Track Start Z [cm]",
# 'ytitle': "TPC Tracks / bin",
# 'binning': [40,0,20],
# 'var': "trackMatchLowestZ",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
# },
# {
# 'name': "nTOFs",
# 'xtitle': "Number of TOF Objects",
# 'ytitle': "Events / bin",
# 'binning': [11,0,10],
# 'var': "nTOFs",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
# },
# {
# 'name': "trackStartX",
# 'xtitle': "TPC Track Start X [cm]",
# 'ytitle': "Tracks / bin",
# 'binning': [100,-20,60],
# 'var': "trackStartX",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
# },
# {
# 'name': "trackStartY",
# 'xtitle': "TPC Track Start Y [cm]",
# 'ytitle': "Tracks / bin",
# 'binning': [100,-50,50],
# 'var': "trackStartY",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
# },
# {
# 'name': "trackStartZ",
# 'xtitle': "TPC Track Start Z [cm]",
# 'ytitle': "Tracks / bin",
# 'binning': [100,-20,110],
# 'var': "trackStartZ",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
# },
# {
# 'name': "trackEndX",
# 'xtitle': "TPC Track End X [cm]",
# 'ytitle': "Tracks / bin",
# 'binning': [100,-20,60],
# 'var': "trackEndX",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
# },
# {
# 'name': "trackEndY",
# 'xtitle': "TPC Track End Y [cm]",
# 'ytitle': "Tracks / bin",
# 'binning': [100,-50,50],
# 'var': "trackEndY",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
# },
# {
# 'name': "trackEndZ",
# 'xtitle': "TPC Track End Z [cm]",
# 'ytitle': "Tracks / bin",
# 'binning': [100,-20,110],
# 'var': "trackEndZ",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
# },
{
'name': "trackLength",
'xtitle': "TPC Track Length [cm]",
'ytitle': "Tracks / bin",
'binning': [100,-10,100],
'var': "trackLength",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
#{
# 'name': "trackCaloKin",
# 'xtitle': "TPC Calo Estimate of KE [MeV]",
# 'ytitle': "Tracks / bin",
# 'binning': [50,0,2500],
# 'var': "trackCaloKin",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
#},
{
'name': "primTrkLength",
'xtitle': "Primary TPC Track Length [cm]",
'ytitle': "Events / bin",
'binning': [100,0,100],
'var': "primTrkLength",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
'printIntegral': True,
},
{
'name': "primTrkStartTheta",
'xtitle': "Primary TPC Track #theta [deg]",
'ytitle': "Events / bin",
'binning': [180,0,180],
'var': "primTrkStartTheta*180/pi",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
{
'name': "primTrkStartPhi",
'xtitle': "Primary TPC Track #phi [deg]",
'ytitle': "Events / bin",
'binning': [180,-180,180],
'var': "primTrkStartPhi*180/pi",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
{
'name': "primTrkdEdxs",
'xtitle': "Primary TPC Track dE/dx [MeV/cm]",
'ytitle': "Events / bin",
'binning': [200,0,50],
'var': "primTrkdEdxs",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
{
'name': "primTrkdEdxs_zoom",
'xtitle': "Primary TPC Track dE/dx [MeV/cm]",
'ytitle': "Events / bin",
'binning': [200,0,10],
'var': "primTrkdEdxs",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
#{
# 'name': "primTrkdEdxsFidCut",
# 'xtitle': "Primary TPC Track dE/dx [MeV/cm]",
# 'ytitle': "Events / bin",
# 'binning': [200,0,50],
# 'var': "primTrkdEdxs",
# 'cuts': weightStr+"*primTrkInFids",
# #'normalize': True,
# 'logy': logy,
#},
{
'name': "primTrkResRanges",
'xtitle': "Primary TPC Track Residual Range [cm]",
'ytitle': "Events / bin",
'binning': [200,0,100],
'var': "primTrkResRanges",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
#{
# 'name': "primTrkEndKin",
# 'xtitle': "Primary TPC Track End Kinetic Energy [MeV]",
# 'ytitle': "Events / bin",
# 'binning': [50,0,1000],
# 'var': "primTrkEndKin",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
#},
#{
# 'name': "primTrkEndKinFid",
# 'xtitle': "Primary TPC Track End Kinetic Energy [MeV]",
# 'ytitle': "Events / bin",
# 'binning': [50,0,1000],
# 'var': "primTrkEndKinFid",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
#},
#{
# 'name': "trueEndProcess",
# 'xtitle': "trueEndProcess",
# 'ytitle': "Events / bin",
# 'binning': [17,0,17],
# 'var': "trueEndProcess",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
#},
]
# plotManyFilesOnePlot(fileConfigs,histConfigs,c,"cosmicanalyzer/tree",nMax=NMAX,outPrefix="Halo_")
# fileConfigMCs = copy.deepcopy(fileConfigs)
# fileConfigData = None
# for i in reversed(range(len(fileConfigMCs))):
# if 'isData' in fileConfigMCs[i] and fileConfigMCs[i]['isData']:
# fileConfigData = fileConfigMCs.pop(i)
# DataMCStack(fileConfigData,fileConfigMCs,histConfigs,c,"PiAbsSelector/tree",nMax=NMAX)
########################################################
########################################################
########################################################
histConfigs = [
{
'name': "primTrkdEdxs",
'title': "All",
'xtitle': "Primary TPC Track dE/dx [MeV/cm]",
'ytitle': "Events / bin",
'binning': [200,0,50],
'var': "primTrkdEdxs",
'cuts': weightStr,
'color': root.kBlack,
#'normalize': True,
'logy': logy,
},
{
'name': "primTrkdEdxs",
'title': "x < 10 cm",
'xtitle': "Primary TPC Track dE/dx [MeV/cm]",
'ytitle': "Events / bin",
'binning': [200,0,50],
'var': "primTrkdEdxs",
'cuts': weightStr + "*(primTrkXs < 10.)",
'color': root.kBlue-7,
#'normalize': True,
'logy': logy,
},
{
'name': "primTrkdEdxs",
'title': "10 cm < x < 20 cm",
'xtitle': "Primary TPC Track dE/dx [MeV/cm]",
'ytitle': "Events / bin",
'binning': [200,0,50],
'var': "primTrkdEdxs",
'cuts': weightStr + "*(primTrkXs > 10. && primTrkXs < 20.)",
'color': root.kRed-4,
#'normalize': True,
'logy': logy,
},
{
'name': "primTrkdEdxs",
'title': "20 cm < x < 30 cm",
'xtitle': "Primary TPC Track dE/dx [MeV/cm]",
'ytitle': "Events / bin",
'binning': [200,0,50],
'var': "primTrkdEdxs",
'cuts': weightStr + "*(primTrkXs > 20. && primTrkXs < 30.)",
'color': root.kGreen,
#'normalize': True,
'logy': logy,
},
{
'name': "primTrkdEdxs",
'title': "30 cm < x < 40 cm",
'xtitle': "Primary TPC Track dE/dx [MeV/cm]",
'ytitle': "Events / bin",
'binning': [200,0,50],
'var': "primTrkdEdxs",
'cuts': weightStr + "*(primTrkXs > 30. && primTrkXs < 40.)",
'color': root.kMagenta-4,
#'normalize': True,
'logy': logy,
},
]
# plotManyHistsOnePlot(fileConfigs,histConfigs,c,"cosmicanalyzer/tree",nMax=NMAX,outPrefix="Halo_dEdxForX")
########################################################
########################################################
########################################################
histConfigs = [
# {
# 'name': "primTrkdEdxVRange",
# 'xtitle': "Primary Track Hit Residual Range [cm]",
# 'ytitle': "Primary Track Hit dE/dx [MeV/cm]",
# 'binning': [100,0,100,100,0,50],
# 'var': "primTrkdEdxs:primTrkResRanges",
# 'cuts': weightStr,
# #'normalize': True,
# #'logz': True,
# },
# {
# 'name': "primTrkdEdxVRangeFidCut",
# 'xtitle': "Primary Track Hit Residual Range [cm]",
# 'ytitle': "Primary Track Hit dE/dx [MeV/cm]",
# 'binning': [100,0,100,100,0,50],
# 'var': "primTrkdEdxs:primTrkResRanges",
# 'cuts': weightStr,
# #'normalize': True,
# #'logz': True,
# },
# {
# 'name': "trackYFrontVtrackXFront",
# 'xtitle': "X of TPC Track Projection to TPC Front [cm]",
# 'ytitle': "Y of TPC Track Projection to TPC Front [cm]",
# 'binning': [40,0,40,40,-20,20],
# 'var': "trackYFront:trackXFront",
# 'cuts': weightStr,
# #'normalize': True,
# #'logz': True,
# },
{
'name': "primTrkStartThetaVPhi",
'xtitle': "Primary TPC Track #phi [deg]",
'ytitle': "Primary TPC Track #theta [deg]",
'binning': [90,-180,180,90,0,180],
'var': "primTrkStartTheta*180/pi:primTrkStartPhi*180/pi",
'cuts': weightStr,
#'normalize': True,
#'logz': True,
},
{
'name': "primTrkdEdxsVx",
'xtitle': "Hit x [cm]",
'ytitle': "Primary TPC Track dE/dx [MeV/cm]",
'binning': [20,0,50,10000,0,50],
'var': "primTrkdEdxs:primTrkXs",
'cuts': weightStr,
#'normalize': True,
'logz': True,
},
{
'name': "primTrkdEdxsVy",
'xtitle': "Hit y [cm]",
'ytitle': "Primary TPC Track dE/dx [MeV/cm]",
'binning': [20,-25,25,10000,0,50],
'var': "primTrkdEdxs:primTrkYs",
'cuts': weightStr,
#'normalize': True,
'logz': True,
},
{
'name': "primTrkdEdxsVz",
'xtitle': "Hit z [cm]",
'ytitle': "Primary TPC Track dE/dx [MeV/cm]",
'binning': [50,-5,95,10000,0,50],
'var': "primTrkdEdxs:primTrkZs",
'cuts': weightStr,
#'normalize': True,
'logz': True,
},
{
'name': "primTrkdEdxsVrun",
'xtitle': "Run Number",
'ytitle': "Primary TPC Track dE/dx [MeV/cm]",
'binning': [200,8000,1000,500,0,50],
'var': "primTrkdEdxs:runNumber",
'cuts': weightStr,
#'normalize': True,
'logz': True,
},
{
'name': "primTrkdEdxsVyFromCenter",
'xtitle': "Hit |y| [cm]",
'ytitle': "Primary TPC Track dE/dx [MeV/cm]",
'binning': [40,0,25,10000,0,50],
'var': "primTrkdEdxs:fabs(primTrkYs)",
'cuts': weightStr,
#'normalize': True,
'logz': True,
},
{
'name': "primTrkdEdxsVzFromCenter",
'xtitle': "Hit |z-45| [cm]",
'ytitle': "Primary TPC Track dE/dx [MeV/cm]",
'binning': [40,0,50,10000,0,50],
'var': "primTrkdEdxs:fabs(primTrkZs-45.)",
'cuts': weightStr,
#'normalize': True,
'logz': True,
},
# {
# 'name': "hitYVhitX",
# 'xtitle': "Hit x [cm]",
# 'ytitle': "Hit y [cm]",
# 'binning': [60,-5,55,60,-30,30],
# 'var': "primTrkYs:primTrkXs",
# 'cuts': weightStr,
# #'normalize': True,
# #'logz': True,
# },
# {
# 'name': "hitYVhitZ",
# 'xtitle': "Hit z [cm]",
# 'ytitle': "Hit y [cm]",
# 'binning': [120,-10,110,60,-30,30],
# 'var': "primTrkYs:primTrkZs",
# 'cuts': weightStr,
# #'normalize': True,
# #'logz': True,
# },
# {
# 'name': "hitXVhitZ",
# 'xtitle': "Hit z [cm]",
# 'ytitle': "Hit x [cm]",
# 'binning': [120,-10,110,60,-5,55],
# 'var': "primTrkXs:primTrkZs",
# 'cuts': weightStr,
# #'normalize': True,
# #'logz': True,
# },
# {
# 'name': "hitYVhitX_cosmicon",
# 'xtitle': "Hit x [cm]",
# 'ytitle': "Hit y [cm]",
# 'binning': [60,-5,55,60,-30,30],
# 'var': "primTrkYs:primTrkXs",
# 'cuts': "1"+"*(isMC || ((triggerBits >> 10) & 1))",
# #'normalize': True,
# #'logz': True,
# },
# {
# 'name': "hitYVhitZ_cosmicon",
# 'xtitle': "Hit z [cm]",
# 'ytitle': "Hit y [cm]",
# 'binning': [120,-10,110,60,-30,30],
# 'var': "primTrkYs:primTrkZs",
# 'cuts': "1"+"*(isMC || ((triggerBits >> 10) & 1))",
# #'normalize': True,
# #'logz': True,
# },
# {
# 'name': "hitXVhitZ_cosmicon",
# 'xtitle': "Hit z [cm]",
# 'ytitle': "Hit x [cm]",
# 'binning': [120,-10,110,60,-5,55],
# 'var': "primTrkXs:primTrkZs",
# 'cuts': "1"+"*(isMC || ((triggerBits >> 10) & 1))",
# #'normalize': True,
# #'logz': True,
# },
# {
# 'name': "hitXVhitZ_NotCosmicon",
# 'xtitle': "Hit z [cm]",
# 'ytitle': "Hit x [cm]",
# 'binning': [120,-10,110,60,-5,55],
# 'var': "primTrkXs:primTrkZs",
# 'cuts': "1"+"*(isMC || !((triggerBits >> 10) & 1))",
# #'normalize': True,
# #'logz': True,
# },
# {
# 'name': "hitYVhitZ_cosmic",
# 'xtitle': "Hit z [cm]",
# 'ytitle': "Hit y [cm]",
# 'binning': [120,-10,110,60,-30,30],
# 'var': "primTrkYs:primTrkZs",
# 'cuts': "1"+"*(isMC || ((triggerBits >> 11) & 1))",
# #'normalize': True,
# #'logz': True,
# },
# {
# 'name': "hitYVhitZ_beamon",
# 'xtitle': "Hit z [cm]",
# 'ytitle': "Hit y [cm]",
# 'binning': [120,-10,110,60,-30,30],
# 'var': "primTrkYs:primTrkZs",
# 'cuts': "1"+"*(isMC || ((triggerBits >> 4) & 1))",
# #'normalize': True,
# #'logz': True,
# },
# {
# 'name': "hitYVhitZ_pickytrack",
# 'xtitle': "Hit z [cm]",
# 'ytitle': "Hit y [cm]",
# 'binning': [120,-10,110,60,-30,30],
# 'var': "primTrkYs:primTrkZs",
# 'cuts': "1"+"*(isMC || nWCTracks > 0)",
# #'normalize': True,
# #'logz': True,
# },
]
hists = plotOneHistOnePlot(fileConfigs,histConfigs,c,"cosmicanalyzer/tree",nMax=NMAX,outPrefix="Halo_")
outfile = root.TFile("halo_hists.root","recreate")
outfile.cd()
for var in hists:
for ds in hists[var]:
newname = var+"_"+ds
hist = hists[var][ds]
hist.SetName(newname)
hist.Print()
hist.Write()
outfile.Close()
| {"/slicesIso.py": ["/fitCosmicHalo.py"], "/plotCosmics.py": ["/lookAtMonicaLifetime.py"]} |
76,911 | jhugon/lariatPionAbs | refs/heads/master | /plotTriggers.py | #!/usr/bin/env python
import ROOT as root
from helpers import *
root.gROOT.SetBatch(True)
if __name__ == "__main__":
cuts = ""
#cuts += "*( pWC > 100 && pWC < 1100 && (isMC || (firstTOF > 0 && firstTOF < 25)))" # pions
# # #cuts += "*( pWC > 450 && pWC < 1100 && (isMC || (firstTOF > 28 && firstTOF < 55)))" # protons
#cuts += "*(nTracksInFirstZ[2] >= 1 && nTracksInFirstZ[14] < 4 && nTracksLengthLt[5] < 3)" # tpc tracks
#cuts += "*( iBestMatch >= 0 && nMatchedTracks == 1)" # matching in analyzer
# matching debug
#cuts += "*(sqrt(pow(xWC-23.75,2)+pow(yWC-0.2,2)) < 11.93)" # wc track in flange
#cuts += "*(sqrt(pow(trackXFront-23.75,2)+pow(trackYFront-0.2,2)) < 11.93)" # TPC track in flange
#cuts += "*(trackMatchLowestZ < 2.)" # matching
#cuts += "*(fabs(trackMatchDeltaY) < 5.)" # matching
#cuts += "*((!isMC && (trackMatchDeltaX < 6. && trackMatchDeltaX > -4.)) || (isMC && (fabs(trackMatchDeltaX) < 5.)))" # matching
#cuts += "*(trackMatchDeltaAngle*180/pi < 10.)" # matching
###
###
secTrkCuts = "*(trackStartDistToPrimTrkEnd < 2.)"
#weightStr = "pzWeight"+cuts
weightStr = "1"+cuts
nData = 30860.0
logy = True
c = root.TCanvas()
NMAX=10000000000
#NMAX=100
fileConfigs = [
{
'fn': "/lariat/app/users/jhugon/lariatsoft_v06_15_00/srcs/lariatsoft/JobConfigurations/CosmicAnalyzer.root",
'name': "RunI_Pos",
'title': "Run I Pos. Polarity",
'caption': "Run I Pos. Polarity",
'color': root.kBlack,
'isData': True,
},
]
histConfigs = [
{
'name': "nTracks",
'xtitle': "Number of TPC Tracks / Event",
'ytitle': "Events / bin",
'binning': [31,0,30],
'var': "nTracks",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
{
'name': "nWCTracks",
'xtitle': "Number of WC Tracks",
'ytitle': "Events / bin",
'binning': [11,0,10],
'var': "nWCTracks",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
{
'name': "nTOFs",
'xtitle': "Number of TOF Objects",
'ytitle': "Events / bin",
'binning': [11,0,10],
'var': "nTOFs",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
]
cutList = [
"",
#"*(triggerCOSMICON)",
#"*(triggerCOSMIC)",
#"*(triggerBEAMON)",
#"*(triggerUSTOF || triggerDSTOF)",
#"*(triggerUSTOF && triggerDSTOF)",
#"*(triggerWCCOINC3OF4)",
#"*(triggerMICHEL)",
"*((triggerBits >> 10) & 1)",
"*((triggerBits >> 11) & 1)",
"*((triggerBits >> 4) & 1)",
"*(((triggerBits >> 5) & 1) || ((triggerBits >> 6) & 1))",
"*(((triggerBits >> 5) & 1) && ((triggerBits >> 6) & 1))",
"*((triggerBits >> 0) & 1)*((triggerBits >> 1) & 1)*((triggerBits >> 2) & 1)*((triggerBits >> 3) & 1)",
"*((triggerBits >> 13) & 1)",
"*((triggerBits >> 14) & 1)",
]
titles = [
"All",
"COSMICON",
"COSMIC",
"BEAMON",
"USTOF || DSTOF",
"USTOF && DSTOF",
"All WC",
"MICHEL",
"LARSCINT",
]
colors = [root.kBlack,root.kBlue-7, root.kRed-4, root.kGreen, root.kMagenta-4, root.kOrange-3,root.kGray+1,root.kYellow]
for histConfig in histConfigs:
name = histConfig["name"]
hcs = []
for cut,title,color in zip(cutList,titles,colors[:len(cutList)]):
hc = copy.deepcopy(histConfig)
hc["cuts"] = histConfig["cuts"]+cut
hc["title"] = title
hc["color"] = color
hcs.append(hc)
plotManyHistsOnePlot(fileConfigs,hcs,c,"cosmicanalyzer/tree",nMax=NMAX,outPrefix="Triggers_"+name+"_")
| {"/slicesIso.py": ["/fitCosmicHalo.py"], "/plotCosmics.py": ["/lookAtMonicaLifetime.py"]} |
76,934 | KjEndurance/Cipher-GUI | refs/heads/main | /cipherGUI.py | import CaesarCipher as cp
from tkinter import Tk, Frame, Label, Text, Button, Menu
class caesarCipherGUI(Frame):
def __init__(self, parent):
Frame.__init__(self, parent)
self.parent = parent
self.constructGUI()
def constructGUI(self):
self.parent.title("Encryption Software")
self.parent.geometry("700x700+100+100")
encryptLabel = Label(self.parent, text="Message to Encrypt")
encryptLabel.place(x=10, y=10)
decryptLabel = Label(self.parent, text="Message to Decrypt")
decryptLabel.place(x=570, y=10)
#Encrypt/Decrypt Text Boxes
self.encryptText = Text(self.parent, width=40, height=20, wrap='word')
self.encryptText.place(x=10, y=40)
self.resultText = Text(self.parent, width=40, height=20, wrap='word')
self.resultText.place(x=360, y=40)
#Encrypt/Decrypt Buttons
self.encryptButton = Button(self.parent, text='Encrypt Message', command=self.encryptPressed)
self.encryptButton.place(x=80, y=400)
self.decryptButton = Button(self.parent, text='Decrypt Message', command=self.decryptPressed)
self.decryptButton.place(x=500, y=400)
#Keypad
self.button1 = Button(self.parent, text='1')
self.button1.config(command= lambda: self.numberPressed(self.button1.cget('text')))
self.button1.place(x=325, y=530)
self.button2 = Button(self.parent, text='2')
self.button2.config(command= lambda: self.numberPressed(self.button2.cget('text')))
self.button2.place(x=350, y=530)
self.button3 = Button(self.parent, text='3')
self.button3.config(command= lambda: self.numberPressed(self.button3.cget('text')))
self.button3.place(x=375, y=530)
self.button4 = Button(self.parent, text='4')
self.button4.config(command= lambda: self.numberPressed(self.button4.cget('text')))
self.button4.place(x=325, y=565)
self.button5 = Button(self.parent, text='5')
self.button5.config(command= lambda: self.numberPressed(self.button5.cget('text')))
self.button5.place(x=350, y=565)
self.button6 = Button(self.parent, text='6')
self.button6.config(command= lambda: self.numberPressed(self.button6.cget('text')))
self.button6.place(x=375, y=565)
self.button7 = Button(self.parent, text='7')
self.button7.config(command= lambda: self.numberPressed(self.button7.cget('text')))
self.button7.place(x=325, y=600)
self.button8 = Button(self.parent, text='8')
self.button8.config(command= lambda: self.numberPressed(self.button8.cget('text')))
self.button8.place(x=350, y=600)
self.button9 = Button(self.parent, text='9')
self.button9.config(command= lambda: self.numberPressed(self.button9.cget('text')))
self.button9.place(x=375, y=600)
self.button0 = Button(self.parent, text='0')
self.button0.config(command= lambda: self.numberPressed(self.button0.cget('text')))
self.button0.place(x=350, y=635)
self.numpadBackspace = Button(self.parent, text='<--', command=self.backspacePressed)
self.numpadBackspace.place(x=300, y=635)
self.numpadClear = Button(self.parent, text='CLR', command=self.clearPressed)
self.numpadClear.place(x=380, y=635)
self.numpadDisplay = Text(self.parent, width=12, height=1, state='disabled')
self.numpadDisplay.place(x=310, y=490)
numpadLabel = Label(self.parent, text='Enter key for Encryption and Decryption')
numpadLabel.place(x=260, y=460)
def encryptPressed(self):
PIN = self.numpadDisplay.get('1.0', 'end-1c')
if len(PIN) > 0:
message = self.encryptText.get('1.0', 'end-1c')
encrypted = cp.encrypt(message, int(PIN))
self.resultText.delete('1.0', 'end')
self.resultText.insert('1.0', encrypted)
def decryptPressed(self):
PIN = self.numpadDisplay.get('1.0', 'end-1c')
if len(PIN) > 0:
message = self.resultText.get('1.0', 'end-1c')
decrypted = cp.decrypt(message, int(PIN))
self.encryptText.delete('1.0', 'end')
self.encryptText.insert('1.0', decrypted)
def numberPressed(self, num):
PIN = self.numpadDisplay.get('1.0', 'end-1c')
if len(PIN) < 12:
self.numpadDisplay.config(state='normal')
self.numpadDisplay.insert('end', num)
print(int(num))
self.numpadDisplay.config(state='disabled')
def backspacePressed(self):
PIN = self.numpadDisplay.get('1.0', 'end-1c')
if len(PIN) > 0:
self.numpadDisplay.config(state='normal')
self.numpadDisplay.delete('1.0', 'end-1c')
self.numpadDisplay.insert('1.0', PIN[:-1])
self.numpadDisplay.config(state='disabled')
def clearPressed(self):
self.numpadDisplay.config(state='normal')
self.numpadDisplay.delete('1.0', 'end-1c')
self.numpadDisplay.config(state='disabled')
root = Tk()
GUI = caesarCipherGUI(root)
root.mainloop() | {"/cipherGUI.py": ["/CaesarCipher.py"]} |
76,935 | KjEndurance/Cipher-GUI | refs/heads/main | /CaesarCipher.py | def encrypt(message, s):
result = ''
s = s % 100
for i in range(len(message)):
current = message[i]
num = ord(current) + s + 500
print(num)
result += chr(num)
print(result)
return result
def decrypt(message, s):
result = ''
s = s % 100
for i in range(len(message)):
current = message[i]
result += chr(ord(current) - s - 500)
print(result)
return result
| {"/cipherGUI.py": ["/CaesarCipher.py"]} |
76,937 | Emmano97/django-ambassador | refs/heads/master | /common/urls.py | from django.urls import path
from .views import (
RegisterAPIView,
LoginAPIView,
UserAPIView,
LogoutAPIView,
ProfileInfoAPIView,
ProfilePasswordAPIView,
)
urlpatterns = [
path("register", RegisterAPIView.as_view()),
path("login", LoginAPIView.as_view()),
path("user", UserAPIView.as_view()),
path("logout", LogoutAPIView.as_view()),
path("user/info", ProfileInfoAPIView.as_view()),
path("user/password", ProfilePasswordAPIView.as_view()),
] | {"/ambassador/urls.py": ["/ambassador/views.py"], "/administrator/urls.py": ["/administrator/views.py"], "/checkout/views.py": ["/checkout/serializers.py"], "/checkout/urls.py": ["/checkout/views.py"]} |
76,938 | Emmano97/django-ambassador | refs/heads/master | /core/migrations/0007_auto_20210925_1632.py | # Generated by Django 3.1.7 on 2021-09-25 16:32
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0006_orderitem_ambassador_revenue'),
]
operations = [
migrations.AlterField(
model_name='order',
name='updated_at',
field=models.DateTimeField(auto_now=True, null=True),
),
migrations.AlterField(
model_name='orderitem',
name='updated_at',
field=models.DateTimeField(auto_now=True, null=True),
),
]
| {"/ambassador/urls.py": ["/ambassador/views.py"], "/administrator/urls.py": ["/administrator/views.py"], "/checkout/views.py": ["/checkout/serializers.py"], "/checkout/urls.py": ["/checkout/views.py"]} |
76,939 | Emmano97/django-ambassador | refs/heads/master | /core/management/commands/populate_orders.py | from random import randrange, randint
from django.core.management import BaseCommand
from faker import Faker
from faker.providers import profile
from core.models import Order, User, OrderItem
class Command(BaseCommand):
def handle(self, *args, **options):
fake = Faker('en_US')
fake.add_provider(profile)
users_count = User.objects.all().count()
for _ in range(3):
order = Order.objects.create(
user_id=randint(1, users_count),
code='code',
ambassador_email=fake.email(),
first_name=fake.first_name(),
last_name=fake.last_name(),
email=fake.email(),
complete=True
)
for _ in range(randrange(1, 5)):
price = randrange(10, 100)
quantity = randrange(1, 5)
OrderItem.objects.create(
order_id=order.id,
product_title=fake.name(),
price=price,
quantity=quantity,
admin_revenue=.9 * price * quantity,
ambassador_revenue=.1 * price * quantity
) | {"/ambassador/urls.py": ["/ambassador/views.py"], "/administrator/urls.py": ["/administrator/views.py"], "/checkout/views.py": ["/checkout/serializers.py"], "/checkout/urls.py": ["/checkout/views.py"]} |
76,940 | Emmano97/django-ambassador | refs/heads/master | /checkout/serializers.py | from rest_framework import serializers
from core.models import Product, Link, User
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = [
'id',
'first_name',
'last_name',
'email',
'password',
'is_ambassador',
'revenue',
]
extra_kwargs = {
'password': {'write_only': True}
}
class ProductSerializer(serializers.ModelSerializer):
class Meta:
model = Product
fields = '__all__'
class LinkSerializer(serializers.ModelSerializer):
products = ProductSerializer(many=True)
user = UserSerializer()
class Meta:
model = Link
fields = '__all__'
| {"/ambassador/urls.py": ["/ambassador/views.py"], "/administrator/urls.py": ["/administrator/views.py"], "/checkout/views.py": ["/checkout/serializers.py"], "/checkout/urls.py": ["/checkout/views.py"]} |
76,941 | Emmano97/django-ambassador | refs/heads/master | /core/management/commands/populate_products.py | from random import randrange
from django.core.management import BaseCommand
from faker import Faker
from faker.providers import profile
from core.models import Product
class Command(BaseCommand):
def handle(self, *args, **options):
fake = Faker('en_US')
fake.add_provider(profile)
for i in range(30):
product = Product.objects.create(
title=fake.name(),
description=fake.text(100),
image=fake.image_url(),
price=randrange(10, 1000)
)
product.save() | {"/ambassador/urls.py": ["/ambassador/views.py"], "/administrator/urls.py": ["/administrator/views.py"], "/checkout/views.py": ["/checkout/serializers.py"], "/checkout/urls.py": ["/checkout/views.py"]} |
76,942 | Emmano97/django-ambassador | refs/heads/master | /ambassador/urls.py | from django.urls import path, include
from .views import (
ProductFrontendAPIView,
ProductBackendAPIView,
LinkAPIView,
StatsAPIView,
RankingsAPIView,
)
urlpatterns = [
path("", include('common.urls')),
path("products/frontend", ProductFrontendAPIView.as_view()),
path("products/backend", ProductBackendAPIView.as_view()),
path("links", LinkAPIView.as_view()),
path("stats", StatsAPIView.as_view()),
path("rankings", RankingsAPIView.as_view()),
]
| {"/ambassador/urls.py": ["/ambassador/views.py"], "/administrator/urls.py": ["/administrator/views.py"], "/checkout/views.py": ["/checkout/serializers.py"], "/checkout/urls.py": ["/checkout/views.py"]} |
76,943 | Emmano97/django-ambassador | refs/heads/master | /core/management/commands/populate_ambassadors.py | from django.core.management import BaseCommand
from faker import Faker
from faker.providers import profile
from core.models import User
class Command(BaseCommand):
def handle(self, *args, **options):
fake = Faker('en_US')
fake.add_provider(profile)
for i in range(30):
user = User.objects.create(
first_name=fake.first_name(),
last_name=fake.last_name(),
email=fake.email(),
password='',
is_ambassador=True
)
user.set_password("password")
user.save() | {"/ambassador/urls.py": ["/ambassador/views.py"], "/administrator/urls.py": ["/administrator/views.py"], "/checkout/views.py": ["/checkout/serializers.py"], "/checkout/urls.py": ["/checkout/views.py"]} |
76,944 | Emmano97/django-ambassador | refs/heads/master | /core/migrations/0006_orderitem_ambassador_revenue.py | # Generated by Django 3.1.7 on 2021-07-18 06:01
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0005_auto_20210718_0453'),
]
operations = [
migrations.AddField(
model_name='orderitem',
name='ambassador_revenue',
field=models.DecimalField(decimal_places=2, default=0, max_digits=10),
preserve_default=False,
),
]
| {"/ambassador/urls.py": ["/ambassador/views.py"], "/administrator/urls.py": ["/administrator/views.py"], "/checkout/views.py": ["/checkout/serializers.py"], "/checkout/urls.py": ["/checkout/views.py"]} |
76,945 | Emmano97/django-ambassador | refs/heads/master | /core/management/commands/update_rankings.py | from django.core.management import BaseCommand
from django_redis import get_redis_connection
from core.models import User
class Command(BaseCommand):
def handle(self, *args, **options):
connexion = get_redis_connection("default")
ambassadors = User.objects.filter(is_ambassador=True)
for ambassador in ambassadors:
connexion.zadd("rankings", {ambassador.name: float(ambassador.revenue)})
| {"/ambassador/urls.py": ["/ambassador/views.py"], "/administrator/urls.py": ["/administrator/views.py"], "/checkout/views.py": ["/checkout/serializers.py"], "/checkout/urls.py": ["/checkout/views.py"]} |
76,946 | Emmano97/django-ambassador | refs/heads/master | /app/urls.py | from django.contrib import admin
from django.urls import path, include, re_path
from rest_framework import permissions
from drf_yasg.views import get_schema_view
from drf_yasg import openapi
schema_view = get_schema_view(
openapi.Info(
title="AMBASSADORS API",
default_version="v1",
decription="Welcome to the ambassadors api",
terms_of_service="https://www.datadevpro.com",
contact=openapi.Contact(email="emmanoedorh@gmail.com"),
license=openapi.License(name="Awesome IP"),
),
public=True,
permission_classes=[permissions.AllowAny]
)
urlpatterns = [
re_path(r'^doc(?P<format>\.json|\.yaml)$',
schema_view.without_ui(cache_timeout=0), name='schema-json'), #<-- Here
path('api/doc/', schema_view.with_ui('swagger', cache_timeout=0),
name='schema-swagger-ui'), #<-- Here
path('api/redoc/', schema_view.with_ui('redoc', cache_timeout=0),
name='schema-redoc'),
path('admin/', admin.site.urls),
path('api/admin/', include("administrator.urls")),
path('api/ambassador/', include("ambassador.urls")),
path('api/checkout/', include("checkout.urls")),
]
| {"/ambassador/urls.py": ["/ambassador/views.py"], "/administrator/urls.py": ["/administrator/views.py"], "/checkout/views.py": ["/checkout/serializers.py"], "/checkout/urls.py": ["/checkout/views.py"]} |
76,947 | Emmano97/django-ambassador | refs/heads/master | /core/admin.py | from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from .models import User
class SuperUser(UserAdmin):
ordering = ['id']
admin.site.register(User, SuperUser)
| {"/ambassador/urls.py": ["/ambassador/views.py"], "/administrator/urls.py": ["/administrator/views.py"], "/checkout/views.py": ["/checkout/serializers.py"], "/checkout/urls.py": ["/checkout/views.py"]} |
76,948 | Emmano97/django-ambassador | refs/heads/master | /administrator/urls.py | from django.urls import path, include
from administrator.views import (
AmbassadorAPIView,
ProductGenericAPIView,
OrderAPIView,
LinkAPIView,
)
urlpatterns = [
path("", include('common.urls')),
path("ambassadors", AmbassadorAPIView.as_view()),
path("products/<str:pk>", ProductGenericAPIView.as_view()),
path("products", ProductGenericAPIView.as_view()),
path("user/<str:pk>/links", LinkAPIView.as_view()),
path("orders", OrderAPIView.as_view()),
] | {"/ambassador/urls.py": ["/ambassador/views.py"], "/administrator/urls.py": ["/administrator/views.py"], "/checkout/views.py": ["/checkout/serializers.py"], "/checkout/urls.py": ["/checkout/views.py"]} |
76,949 | Emmano97/django-ambassador | refs/heads/master | /administrator/views.py | from django.core.cache import cache
from rest_framework import generics, mixins
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from rest_framework.views import APIView
from administrator.serializers import ProductSerializer, LinkSerializer, OrderSerializer
from common.authentication import JWTAuthentication
from common.serializers import UserSerializer
from core.models import User, Product, Link, Order
class AmbassadorAPIView(APIView):
authentication_classes = [JWTAuthentication]
permission_classes = [IsAuthenticated]
def get(self, _):
ambassadors = User.objects.filter(is_ambassador=True)
serializer = UserSerializer(ambassadors, many=True)
return Response(serializer.data)
class ProductGenericAPIView(
generics.GenericAPIView,
mixins.RetrieveModelMixin,
mixins.ListModelMixin,
mixins.CreateModelMixin,
mixins.UpdateModelMixin,
mixins.DestroyModelMixin,
):
authentication_classes = [JWTAuthentication]
permission_classes = [IsAuthenticated]
serializer_class = ProductSerializer
queryset = Product.objects.all()
def get(self, request, pk=None):
if pk:
return self.retrieve(request, pk)
return self.list(request)
def post(self, request, pk=None):
response = self.create(request)
for key in cache.keys("*"):
if "products_frontend" in key:
cache.delete(key)
cache.delete("products_backend")
return response
def put(self, request, pk=None):
response = self.partial_update(request, pk)
for key in cache.keys("*"):
if "products_frontend" in key:
cache.delete(key)
cache.delete("products_backend")
return response
def delete(self, request, pk=None):
response = self.destroy(request, pk)
for key in cache.keys("*"):
if "products_frontend" in key:
cache.delete(key)
return response
class LinkAPIView(APIView):
authentication_classes = [JWTAuthentication]
permission_classes = [IsAuthenticated]
def get(self, request, pk=None):
links = Link.objects.filter(user_id=pk)
serializer = LinkSerializer(links, many=True)
return Response(serializer.data)
class OrderAPIView(APIView):
authentication_classes = [JWTAuthentication]
permission_classes = [IsAuthenticated]
def get(self, request):
orders = Order.objects.filter(complete=True)
serializer = OrderSerializer(orders, many=True)
return Response(serializer.data)
| {"/ambassador/urls.py": ["/ambassador/views.py"], "/administrator/urls.py": ["/administrator/views.py"], "/checkout/views.py": ["/checkout/serializers.py"], "/checkout/urls.py": ["/checkout/views.py"]} |
76,950 | Emmano97/django-ambassador | refs/heads/master | /ambassador/views.py | import math
import random
import string
import time
from django.core.cache import cache
from django.utils.decorators import method_decorator
from django.views.decorators.cache import cache_page
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from rest_framework.views import APIView
from django_redis import get_redis_connection
from common.authentication import JWTAuthentication
from core.models import Product, Link, Order, User
from .serializers import ProductSerializer, LinkSerializer
class ProductFrontendAPIView(APIView):
@method_decorator(cache_page(60 * 60 * 2, key_prefix="products_frontend"))
def get(self, _):
time.sleep(2)
products = Product.objects.all()
serializer = ProductSerializer(products, many=True)
return Response(serializer.data)
class ProductBackendAPIView(APIView):
def get(self, request):
products = cache.get("products_backend")
if not products:
time.sleep(2)
products = list(Product.objects.all())
cache.set("products_backend", products, timeout=60 * 30)
total = len(products)
s = request.query_params.get("s", "")
if s:
products = list([
product for product in products
if (s.lower() in product.title.lower()) or (s.lower() in product.description.lower())
])
sort = request.query_params.get("sort", "")
if sort == "asc":
products.sort(key=lambda p: p.price)
elif sort == "desc":
products.sort(key=lambda p: p.price, reverse=True)
per_page = 6
page = int(request.query_params.get("page", 1))
start = (page - 1) * per_page
end = page * per_page
data = ProductSerializer(products[start:end], many=True).data
return Response({
"data": data,
"meta": {
"total": total,
"page": page,
"last_page": math.ceil(total / per_page)
}
})
class LinkAPIView(APIView):
authentication_classes = [JWTAuthentication]
permission_classes = [IsAuthenticated]
def post(self, request):
user = request.user
serializer = LinkSerializer(data={
'user': user.id,
'code': ''.join(random.choices(string.ascii_lowercase + string.digits, k=6)),
'products': request.data["products"]
})
serializer.is_valid(raise_exception=True)
serializer.save()
return Response(serializer.data)
class StatsAPIView(APIView):
authentication_classes = [JWTAuthentication]
permission_classes = [IsAuthenticated]
def get(self, request):
user = request.user
links = Link.objects.filter(user_id=user.id)
return Response([self.format(link) for link in links])
def format(self, link):
orders = Order.objects.filter(code=link.code, complete=True)
return {
'code': link.code,
'count': len(orders),
'revenue': sum(order.ambassador_revenue for order in orders)
}
class RankingsAPIView(APIView):
authentication_classes = [JWTAuthentication]
permission_classes = [IsAuthenticated]
def get(self, request):
ambassadors = User.objects.filter(is_ambassador=True)
redis_connexion = get_redis_connection("default")
rankings = redis_connexion.zrevrangebyscore("rankings", min=0, max=1000, withscores=True)
return Response({
rank[0].decode("utf"): rank[1] for rank in rankings
})
| {"/ambassador/urls.py": ["/ambassador/views.py"], "/administrator/urls.py": ["/administrator/views.py"], "/checkout/views.py": ["/checkout/serializers.py"], "/checkout/urls.py": ["/checkout/views.py"]} |
76,951 | Emmano97/django-ambassador | refs/heads/master | /core/migrations/0005_auto_20210718_0453.py | # Generated by Django 3.1.7 on 2021-07-18 04:53
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0004_order_orderitem'),
]
operations = [
migrations.AlterField(
model_name='link',
name='code',
field=models.CharField(max_length=255, unique=True),
),
]
| {"/ambassador/urls.py": ["/ambassador/views.py"], "/administrator/urls.py": ["/administrator/views.py"], "/checkout/views.py": ["/checkout/serializers.py"], "/checkout/urls.py": ["/checkout/views.py"]} |
76,952 | Emmano97/django-ambassador | refs/heads/master | /checkout/views.py | import decimal
from django.shortcuts import render
from django.db import transaction
from django.conf import settings
from django.core.mail import send_mail
from rest_framework.response import Response
from rest_framework.views import APIView
from rest_framework import exceptions
from core.models import Link, Order, Product, OrderItem
from .serializers import LinkSerializer
import stripe
class LinkAPIView(APIView):
def get(self, _, code=""):
link = Link.objects.filter(code=code).first()
serializer = LinkSerializer(link)
return Response(serializer.data)
class OrderAPIView(APIView):
@transaction.atomic
def post(self, request):
data = request.data
link = Link.objeect.filter(code=data['code']).first()
if not link:
raise exceptions.APIException("Invalid code")
try:
order = Order()
order.code = link.code
order.user_id = link.user_id
order.ambassador_email = link.user.email
order.first_name = data["first_name"]
order.last_name = data["last_name"]
order.email = data["email"]
order.address = data["address"]
order.country = data["country"]
order.city = data["city"]
order.zip = data["zip"]
with transaction.atomic():
order.save()
line_items = []
for item in data['products']:
product = Product.ojects.get(item["product_id"])
quantity = decimal.Decimal(item['quantity'])
order_item = OrderItem()
order_item.order = order
order_item.product_title = product.title
order_item.price = product.price
order_item.quantity = quantity
order_item.ambassador_revenue = decimal.Decimal(.1) * product.price * quantity
order_item.admin_revenue = decimal.Decimal(.9) * product.price * quantity
with transaction.atomic():
order_item.save()
line_items.append({
'name': product.title,
"description": product.description,
"images": [product.image],
'amount': int(100 * product.price),
'currency': "usd",
'quantity': quantity,
})
stripe_api_key = getattr(settings, "STRIPE_API_KEY", None)
if not stripe_api_key:
raise exceptions.APIException("Can't proceed to the payment")
stripe.api_key = stripe_api_key
source = stripe.checkout.Session.create(
success_url="http//localhost:5000/success?source={CHECKOUT_SEESSION_ID}",
CANCEL_url="http//localhost:5000/error",
payment_method_types=['card'],
line_items=line_items
)
order.transaction_id = source['id']
order.save()
return Response(source)
except Exception:
transaction.rollback()
return Response({
"message": "Error occurred"
})
class OrderConfirmAPIView(APIView):
def post(self, request):
order = Order.objects.filter(transaction_id=request.data['source']).first()
if not order:
raise exceptions.APIException("Order not found")
order.complete = True
order.save()
send_mail(
subject="An order has been completed",
message=f"Order #{order.id} with a total of $ {order.admin_revenue} has been completed",
from_email="from@gmail.com",
recipient_list=["admin@gmail.com"]
)
send_mail(
subject="An order has been completed",
message=f"You earned $ {order.ambassador_revenue} from the link {order.code}",
from_email="from@gmail.com",
recipient_list=[order.ambassador_email]
)
return Response({
"message": "success"
}) | {"/ambassador/urls.py": ["/ambassador/views.py"], "/administrator/urls.py": ["/administrator/views.py"], "/checkout/views.py": ["/checkout/serializers.py"], "/checkout/urls.py": ["/checkout/views.py"]} |
76,953 | Emmano97/django-ambassador | refs/heads/master | /checkout/urls.py | from django.urls import path
from .views import LinkAPIView, OrderAPIView, OrderConfirmAPIView
urlpatterns = [
path("links/<str:code>", LinkAPIView.as_view()),
path("orders/", OrderAPIView.as_view()),
path("orders/confirm", OrderConfirmAPIView.as_view())
] | {"/ambassador/urls.py": ["/ambassador/views.py"], "/administrator/urls.py": ["/administrator/views.py"], "/checkout/views.py": ["/checkout/serializers.py"], "/checkout/urls.py": ["/checkout/views.py"]} |
76,955 | piiswrong/fedlearner | refs/heads/master | /test/data_join/test_data_portal_master.py | # Copyright 2020 The FedLearner Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
import os
import time
import unittest
import logging
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
from google.protobuf import text_format
import tensorflow_io
from tensorflow.compat.v1 import gfile
from fnmatch import fnmatch
import grpc
from google.protobuf import text_format, empty_pb2
from fedlearner.data_join import data_join_master, common
from fedlearner.common import common_pb2 as common_pb
from fedlearner.common import data_portal_service_pb2 as dp_pb
from fedlearner.common import data_portal_service_pb2_grpc as dp_grpc
from fedlearner.common.db_client import DBClient
from fedlearner.proxy.channel import make_insecure_channel, ChannelType
from fedlearner.data_join.data_portal_master import DataPortalMasterService
class DataPortalMaster(unittest.TestCase):
def test_api(self):
logging.getLogger().setLevel(logging.DEBUG)
kvstore_type = 'etcd'
db_base_dir = 'dp_test'
os.environ['ETCD_BASE_DIR'] = db_base_dir
data_portal_name = 'test_data_source'
kvstore = DBClient(kvstore_type, True)
kvstore.delete_prefix(db_base_dir)
portal_input_base_dir='./portal_upload_dir'
portal_output_base_dir='./portal_output_dir'
raw_data_publish_dir = 'raw_data_publish_dir'
portal_manifest = dp_pb.DataPortalManifest(
name=data_portal_name,
data_portal_type=dp_pb.DataPortalType.Streaming,
output_partition_num=4,
input_file_wildcard="*.done",
input_base_dir=portal_input_base_dir,
output_base_dir=portal_output_base_dir,
raw_data_publish_dir=raw_data_publish_dir,
processing_job_id=-1,
next_job_id=0
)
kvstore.set_data(common.portal_kvstore_base_dir(data_portal_name),
text_format.MessageToString(portal_manifest))
if gfile.Exists(portal_input_base_dir):
gfile.DeleteRecursively(portal_input_base_dir)
gfile.MakeDirs(portal_input_base_dir)
all_fnames = ['1001/{}.done'.format(i) for i in range(100)]
all_fnames.append('{}.xx'.format(100))
all_fnames.append('1001/_SUCCESS')
for fname in all_fnames:
fpath = os.path.join(portal_input_base_dir, fname)
gfile.MakeDirs(os.path.dirname(fpath))
with gfile.Open(fpath, "w") as f:
f.write('xxx')
portal_master_addr = 'localhost:4061'
portal_options = dp_pb.DataPotraMasterlOptions(
use_mock_etcd=True,
long_running=False,
check_success_tag=True,
)
data_portal_master = DataPortalMasterService(
int(portal_master_addr.split(':')[1]),
data_portal_name, kvstore_type,
portal_options
)
data_portal_master.start()
channel = make_insecure_channel(portal_master_addr, ChannelType.INTERNAL)
portal_master_cli = dp_grpc.DataPortalMasterServiceStub(channel)
recv_manifest = portal_master_cli.GetDataPortalManifest(empty_pb2.Empty())
self.assertEqual(recv_manifest.name, portal_manifest.name)
self.assertEqual(recv_manifest.data_portal_type, portal_manifest.data_portal_type)
self.assertEqual(recv_manifest.output_partition_num, portal_manifest.output_partition_num)
self.assertEqual(recv_manifest.input_file_wildcard, portal_manifest.input_file_wildcard)
self.assertEqual(recv_manifest.input_base_dir, portal_manifest.input_base_dir)
self.assertEqual(recv_manifest.output_base_dir, portal_manifest.output_base_dir)
self.assertEqual(recv_manifest.raw_data_publish_dir, portal_manifest.raw_data_publish_dir)
self.assertEqual(recv_manifest.next_job_id, 1)
self.assertEqual(recv_manifest.processing_job_id, 0)
self._check_portal_job(kvstore, all_fnames, portal_manifest, 0)
mapped_partition = set()
task_0 = portal_master_cli.RequestNewTask(dp_pb.NewTaskRequest(rank_id=0))
task_0_1 = portal_master_cli.RequestNewTask(dp_pb.NewTaskRequest(rank_id=0))
self.assertEqual(task_0, task_0_1)
self.assertTrue(task_0.HasField('map_task'))
mapped_partition.add(task_0.map_task.partition_id)
self._check_map_task(task_0.map_task, all_fnames,
task_0.map_task.partition_id,
portal_manifest)
portal_master_cli.FinishTask(dp_pb.FinishTaskRequest(
rank_id=0, partition_id=task_0.map_task.partition_id,
part_state=dp_pb.PartState.kIdMap)
)
task_1 = portal_master_cli.RequestNewTask(dp_pb.NewTaskRequest(rank_id=0))
self.assertTrue(task_1.HasField('map_task'))
mapped_partition.add(task_1.map_task.partition_id)
self._check_map_task(task_1.map_task, all_fnames,
task_1.map_task.partition_id,
portal_manifest)
task_2 = portal_master_cli.RequestNewTask(dp_pb.NewTaskRequest(rank_id=1))
self.assertTrue(task_2.HasField('map_task'))
mapped_partition.add(task_2.map_task.partition_id)
self._check_map_task(task_2.map_task, all_fnames,
task_2.map_task.partition_id,
portal_manifest)
task_3 = portal_master_cli.RequestNewTask(dp_pb.NewTaskRequest(rank_id=2))
self.assertTrue(task_3.HasField('map_task'))
mapped_partition.add(task_3.map_task.partition_id)
self._check_map_task(task_3.map_task, all_fnames,
task_3.map_task.partition_id,
portal_manifest)
self.assertEqual(len(mapped_partition), portal_manifest.output_partition_num)
portal_master_cli.FinishTask(dp_pb.FinishTaskRequest(
rank_id=0, partition_id=task_1.map_task.partition_id,
part_state=dp_pb.PartState.kIdMap)
)
pending_1 = portal_master_cli.RequestNewTask(dp_pb.NewTaskRequest(rank_id=4))
self.assertTrue(pending_1.HasField('pending'))
pending_2 = portal_master_cli.RequestNewTask(dp_pb.NewTaskRequest(rank_id=3))
self.assertTrue(pending_2.HasField('pending'))
portal_master_cli.FinishTask(dp_pb.FinishTaskRequest(
rank_id=1, partition_id=task_2.map_task.partition_id,
part_state=dp_pb.PartState.kIdMap)
)
portal_master_cli.FinishTask(dp_pb.FinishTaskRequest(
rank_id=2, partition_id=task_3.map_task.partition_id,
part_state=dp_pb.PartState.kIdMap)
)
reduce_partition = set()
task_4 = portal_master_cli.RequestNewTask(dp_pb.NewTaskRequest(rank_id=0))
task_4_1 = portal_master_cli.RequestNewTask(dp_pb.NewTaskRequest(rank_id=0))
self.assertEqual(task_4, task_4_1)
self.assertTrue(task_4.HasField('reduce_task'))
reduce_partition.add(task_4.reduce_task.partition_id)
self._check_reduce_task(task_4.reduce_task,
task_4.reduce_task.partition_id,
portal_manifest)
task_5 = portal_master_cli.RequestNewTask(dp_pb.NewTaskRequest(rank_id=1))
self.assertTrue(task_5.HasField('reduce_task'))
reduce_partition.add(task_5.reduce_task.partition_id)
self._check_reduce_task(task_5.reduce_task,
task_5.reduce_task.partition_id,
portal_manifest)
task_6 = portal_master_cli.RequestNewTask(dp_pb.NewTaskRequest(rank_id=2))
self.assertTrue(task_6.HasField('reduce_task'))
reduce_partition.add(task_6.reduce_task.partition_id)
self._check_reduce_task(task_6.reduce_task,
task_6.reduce_task.partition_id,
portal_manifest)
task_7= portal_master_cli.RequestNewTask(dp_pb.NewTaskRequest(rank_id=3))
self.assertTrue(task_7.HasField('reduce_task'))
reduce_partition.add(task_7.reduce_task.partition_id)
self.assertEqual(len(reduce_partition), 4)
self._check_reduce_task(task_7.reduce_task,
task_7.reduce_task.partition_id,
portal_manifest)
task_8= portal_master_cli.RequestNewTask(dp_pb.NewTaskRequest(rank_id=5))
self.assertTrue(task_8.HasField('pending'))
portal_master_cli.FinishTask(dp_pb.FinishTaskRequest(
rank_id=0, partition_id=task_4.reduce_task.partition_id,
part_state=dp_pb.PartState.kEventTimeReduce)
)
portal_master_cli.FinishTask(dp_pb.FinishTaskRequest(
rank_id=1, partition_id=task_5.reduce_task.partition_id,
part_state=dp_pb.PartState.kEventTimeReduce)
)
portal_master_cli.FinishTask(dp_pb.FinishTaskRequest(
rank_id=2, partition_id=task_6.reduce_task.partition_id,
part_state=dp_pb.PartState.kEventTimeReduce)
)
portal_master_cli.FinishTask(dp_pb.FinishTaskRequest(
rank_id=3, partition_id=task_7.reduce_task.partition_id,
part_state=dp_pb.PartState.kEventTimeReduce)
)
task_9= portal_master_cli.RequestNewTask(dp_pb.NewTaskRequest(rank_id=5))
self.assertTrue(task_9.HasField('finished'))
data_portal_master.stop()
gfile.DeleteRecursively(portal_input_base_dir)
def _check_portal_job(self, kvstore, fnames, portal_manifest, job_id):
kvstore_key = common.portal_job_kvstore_key(portal_manifest.name, job_id)
data = kvstore.get_data(kvstore_key)
self.assertIsNotNone(data)
portal_job = text_format.Parse(data, dp_pb.DataPortalJob())
self.assertEqual(job_id, portal_job.job_id)
self.assertFalse(portal_job.finished)
fnames.sort()
fpaths = [os.path.join(portal_manifest.input_base_dir, f) for f in fnames
if fnmatch(f, portal_manifest.input_file_wildcard)]
self.assertEqual(len(fpaths), len(portal_job.fpaths))
for index, fpath in enumerate(fpaths):
self.assertEqual(fpath, portal_job.fpaths[index])
def _check_map_task(self, map_task, fnames, partition_id, portal_manifest):
self.assertEqual(map_task.output_partition_num, portal_manifest.output_partition_num)
fnames.sort()
fpaths = [os.path.join(portal_manifest.input_base_dir, f) for f in fnames
if (fnmatch(f, portal_manifest.input_file_wildcard) and
hash(os.path.join(portal_manifest.input_base_dir, f)) %
map_task.output_partition_num == partition_id)]
self.assertEqual(len(fpaths), len(map_task.fpaths))
for index, fpath in enumerate(fpaths):
self.assertEqual(fpath, map_task.fpaths[index])
self.assertEqual(map_task.output_base_dir,
common.portal_map_output_dir(portal_manifest.output_base_dir, 0))
def _check_reduce_task(self, reduce_task, partition_id, portal_manifest):
self.assertEqual(reduce_task.partition_id, partition_id)
self.assertEqual(reduce_task.map_base_dir,
common.portal_map_output_dir(portal_manifest.output_base_dir, 0))
self.assertEqual(reduce_task.reduce_base_dir,
common.portal_reduce_output_dir(portal_manifest.output_base_dir, 0))
if __name__ == '__main__':
unittest.main()
| {"/test/data_join/test_data_portal_master.py": ["/fedlearner/data_join/data_portal_master.py"], "/fedlearner/trainer_master/leader_tm.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/disabled_train_master.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/data/data_block_set.py"], "/fedlearner/data_join/cmd/data_portal_master_service.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_master.py"], "/fedlearner/data_join/data_portal_master.py": ["/fedlearner/data_join/data_portal_job_manager.py"], "/fedlearner/data_join/cmd/data_portal_worker_cli.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_worker.py"], "/fedlearner/trainer/trainer_worker.py": ["/fedlearner/trainer/bridge.py", "/fedlearner/trainer/estimator.py"], "/fedlearner/trainer_master/follower_tm.py": ["/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/test_nn_online_training.py": ["/fedlearner/trainer_master/leader_tm.py", "/fedlearner/trainer_master/follower_tm.py"], "/test/data_join/test_data_portal_worker.py": ["/fedlearner/data_join/data_portal_worker.py"]} |
76,956 | piiswrong/fedlearner | refs/heads/master | /test/trainer/disabled_data_block_loader.py | # Copyright 2020 The FedLearner Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
import os
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
import unittest
import tensorflow.compat.v1 as tf
import numpy as np
import fedlearner.trainer as bft
class TestDataBlockLoader(unittest.TestCase):
def test_data_block_loader(self):
bridge_l = bft.bridge.Bridge('leader', 50051, 'localhost:50052')
bridge_f = bft.bridge.Bridge('follower', 50052, 'localhost:50051')
path_l = os.path.join(os.path.dirname(__file__), 'data/leader')
path_f = os.path.join(os.path.dirname(__file__), 'data/follower')
tm_l = bft.trainer_master_client.LocalTrainerMasterClient(
'leader', path_l)
tm_f = bft.trainer_master_client.LocalTrainerMasterClient(
'follower', path_f)
dataset_l = bft.data.DataBlockLoader(256, 'leader', bridge_l, tm_l)
dataset_f = bft.data.DataBlockLoader(256, 'follower', bridge_f, tm_f)
bridge_l.connect()
bridge_f.connect()
g_l = tf.Graph()
with g_l.as_default():
record_l = dataset_l.make_batch_iterator().get_next()
g_f = tf.Graph()
with g_f.as_default():
record_f = dataset_f.make_batch_iterator().get_next()
with tf.Session(graph=g_l) as sess_l:
try:
while True:
sess_l.run(record_l)
except tf.errors.OutOfRangeError:
pass
sess_l.close()
with tf.Session(graph=g_f) as sess_f:
try:
while True:
sess_f.run(record_f)
except tf.errors.OutOfRangeError:
pass
sess_f.close()
bridge_f.terminate()
bridge_l.terminate()
if __name__ == '__main__':
unittest.main()
| {"/test/data_join/test_data_portal_master.py": ["/fedlearner/data_join/data_portal_master.py"], "/fedlearner/trainer_master/leader_tm.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/disabled_train_master.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/data/data_block_set.py"], "/fedlearner/data_join/cmd/data_portal_master_service.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_master.py"], "/fedlearner/data_join/data_portal_master.py": ["/fedlearner/data_join/data_portal_job_manager.py"], "/fedlearner/data_join/cmd/data_portal_worker_cli.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_worker.py"], "/fedlearner/trainer/trainer_worker.py": ["/fedlearner/trainer/bridge.py", "/fedlearner/trainer/estimator.py"], "/fedlearner/trainer_master/follower_tm.py": ["/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/test_nn_online_training.py": ["/fedlearner/trainer_master/leader_tm.py", "/fedlearner/trainer_master/follower_tm.py"], "/test/data_join/test_data_portal_worker.py": ["/fedlearner/data_join/data_portal_worker.py"]} |
76,957 | piiswrong/fedlearner | refs/heads/master | /web_console_v2/api/fedlearner_webconsole/utils/k8s_client.py | # Copyright 2020 The FedLearner Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
import os
from http import HTTPStatus
import enum
import requests
from kubernetes import client, config
from kubernetes.client.exceptions import ApiException
FEDLEARNER_CUSTOM_GROUP = 'fedlearner.k8s.io'
FEDLEARNER_CUSTOM_VERSION = 'v1alpha1'
class CrdKind(enum.Enum):
FLAPP = 'flapps'
SPARK_APPLICATION = 'sparkapplications'
class K8sClient(object):
def __init__(self, config_path=None):
if config_path is None:
config.load_incluster_config()
else:
config.load_kube_config(config_path)
self._core = client.CoreV1Api()
self._networking = client.NetworkingV1beta1Api()
self._app = client.AppsV1Api()
self._custom_object = client.CustomObjectsApi()
self._client = client.ApiClient()
self._api_server_url = 'http://{}:{}'.format(
os.environ.get('FL_API_SERVER_HOST', 'fedlearner-apiserver'),
os.environ.get('FL_API_SERVER_PORT', 8101))
def close(self):
self._core.api_client.close()
self._networking.api_client.close()
def _raise_runtime_error(self, exception: ApiException):
raise RuntimeError('[{}] {}'.format(exception.status,
exception.reason))
def create_or_update_secret(self,
data,
metadata,
secret_type,
name,
namespace='default'):
"""Create secret. If existed, then replace"""
request = client.V1Secret(api_version='v1',
data=data,
kind='Secret',
metadata=metadata,
type=secret_type)
try:
self._core.read_namespaced_secret(name, namespace)
# If the secret already exists, then we use patch to replace it.
# We don't use replace method because it requires `resourceVersion`.
self._core.patch_namespaced_secret(name, namespace, request)
return
except ApiException as e:
# 404 is expected if the secret does not exist
if e.status != HTTPStatus.NOT_FOUND:
self._raise_runtime_error(e)
try:
self._core.create_namespaced_secret(namespace, request)
except ApiException as e:
self._raise_runtime_error(e)
def delete_secret(self, name, namespace='default'):
try:
self._core.delete_namespaced_secret(name, namespace)
except ApiException as e:
if e.status != HTTPStatus.NOT_FOUND:
self._raise_runtime_error(e)
def get_secret(self, name, namespace='default'):
try:
return self._core.read_namespaced_secret(name, namespace)
except ApiException as e:
self._raise_runtime_error(e)
def create_or_update_service(self,
metadata,
spec,
name,
namespace='default'):
"""Create secret. If existed, then replace"""
request = client.V1Service(api_version='v1',
kind='Service',
metadata=metadata,
spec=spec)
try:
self._core.read_namespaced_service(name, namespace)
# If the service already exists, then we use patch to replace it.
# We don't use replace method because it requires `resourceVersion`.
self._core.patch_namespaced_service(name, namespace, request)
return
except ApiException as e:
# 404 is expected if the service does not exist
if e.status != HTTPStatus.NOT_FOUND:
self._raise_runtime_error(e)
try:
self._core.create_namespaced_service(namespace, request)
except ApiException as e:
self._raise_runtime_error(e)
def delete_service(self, name, namespace='default'):
try:
self._core.delete_namespaced_service(name, namespace)
except ApiException as e:
if e.status != HTTPStatus.NOT_FOUND:
self._raise_runtime_error(e)
def get_service(self, name, namespace='default'):
try:
return self._core.read_namespaced_service(name, namespace)
except ApiException as e:
self._raise_runtime_error(e)
def create_or_update_ingress(self,
metadata,
spec,
name,
namespace='default'):
request = client.NetworkingV1beta1Ingress(
api_version='networking.k8s.io/v1beta1',
kind='Ingress',
metadata=metadata,
spec=spec)
try:
self._networking.read_namespaced_ingress(name, namespace)
# If the ingress already exists, then we use patch to replace it.
# We don't use replace method because it requires `resourceVersion`.
self._networking.patch_namespaced_ingress(name, namespace, request)
return
except ApiException as e:
# 404 is expected if the ingress does not exist
if e.status != HTTPStatus.NOT_FOUND:
self._raise_runtime_error(e)
try:
self._networking.create_namespaced_ingress(namespace, request)
except ApiException as e:
self._raise_runtime_error(e)
def delete_ingress(self, name, namespace='default'):
try:
self._networking.delete_namespaced_ingress(name, namespace)
except ApiException as e:
self._raise_runtime_error(e)
def get_ingress(self, name, namespace='default'):
try:
return self._networking.read_namespaced_ingress(name, namespace)
except ApiException as e:
if e.status != HTTPStatus.NOT_FOUND:
self._raise_runtime_error(e)
def create_or_update_deployment(self,
metadata,
spec,
name,
namespace='default'):
request = client.V1Deployment(api_version='apps/v1',
kind='Deployment',
metadata=metadata,
spec=spec)
try:
self._app.read_namespaced_deployment(name, namespace)
# If the deployment already exists, then we use patch to replace it.
# We don't use replace method because it requires `resourceVersion`.
self._app.patch_namespaced_deployment(name, namespace, request)
return
except ApiException as e:
# 404 is expected if the deployment does not exist
if e.status != HTTPStatus.NOT_FOUND:
self._raise_runtime_error(e)
try:
self._app.create_namespaced_deployment(namespace, request)
except ApiException as e:
self._raise_runtime_error(e)
def delete_deployment(self, name, namespace='default'):
try:
self._app.delete_namespaced_deployment(name, namespace)
except ApiException as e:
if e.status != HTTPStatus.NOT_FOUND:
self._raise_runtime_error(e)
def get_deployment(self, name, namespace='default'):
try:
return self._app.read_namespaced_deployment(name, namespace)
except ApiException as e:
self._raise_runtime_error(e)
def get_custom_object(self, crd_kind: CrdKind,
custom_object_name: str, namespace='default'):
response = requests.get(
'{api_server_url}/namespaces/{namespace}/fedlearner/'
'v1alpha1/{crd_kind}/{name}'.format(
api_server_url=self._api_server_url,
namespace=namespace,
crd_kind=crd_kind.value,
name=custom_object_name))
if response.status_code == HTTPStatus.NOT_FOUND:
return None
if response.status_code != HTTPStatus.OK:
raise RuntimeError('{}:{}'.format(response.status_code,
response.content))
return response.json()
def delete_custom_object(self, crd_kind: CrdKind,
custom_object_name: str, namespace='default'):
response = requests.delete(
'{api_server_url}/namespaces/{namespace}/fedlearner/'
'v1alpha1/{crd_kind}/{name}'.format(
api_server_url=self._api_server_url,
namespace=namespace,
crd_kind=crd_kind.value,
name=custom_object_name))
if response.status_code not in [HTTPStatus.OK, HTTPStatus.NOT_FOUND]:
raise RuntimeError('{}:{}'.format(response.status_code,
response.content))
return response.json()
def create_or_replace_custom_object(self, crd_kind: CrdKind, json_object,
namespace='default'):
custom_object_name = json_object['metadata']['name']
response = requests.get(
'{api_server_url}/namespaces/{namespace}/fedlearner/'
'v1alpha1/{crd_kind}/{name}'.format(
api_server_url=self._api_server_url,
namespace=namespace,
crd_kind=crd_kind.value,
name=custom_object_name))
if response.status_code == HTTPStatus.OK:
# If exist, replace
self.delete_custom_object(crd_kind, custom_object_name, namespace)
elif response.status_code != HTTPStatus.NOT_FOUND:
raise RuntimeError('{}:{}'.format(response.status_code,
response.content))
response = requests.post(
'{api_server_url}/namespaces/{namespace}/fedlearner/'
'v1alpha1/{crd_kind}'.format(
api_server_url=self._api_server_url,
namespace=namespace,
crd_kind=crd_kind.value),
json=json_object)
if response.status_code != HTTPStatus.CREATED:
raise RuntimeError('{}:{}'.format(response.status_code,
response.content))
return response.json()
def list_resource_of_custom_object(self, crd_kind: CrdKind,
custom_object_name: str,
resource_type: str, namespace='default'):
response = requests.get(
'{api_server_url}/namespaces/{namespace}/fedlearner/v1alpha1/'
'{plural}/{name}/{resource_type}'.format(
api_server_url=self._api_server_url,
namespace=namespace,
plural=crd_kind.value,
name=custom_object_name,
resource_type=resource_type))
if response.status_code == HTTPStatus.NOT_FOUND:
return None
if response.status_code != HTTPStatus.OK:
raise RuntimeError('{}:{}'.format(response.status_code,
response.content))
return response.json()
def get_webshell_session(self, flapp_name: str,
container_name: str, namespace='default'):
response = requests.get(
'{api_server_url}/namespaces/{namespace}/pods/{custom_object_name}/'
'shell/${container_name}'.format(
api_server_url=self._api_server_url,
namespace=namespace,
custom_object_name=flapp_name,
container_name=container_name))
if response.status_code != HTTPStatus.OK:
raise RuntimeError('{}:{}'.format(response.status_code,
response.content))
return response.json()
| {"/test/data_join/test_data_portal_master.py": ["/fedlearner/data_join/data_portal_master.py"], "/fedlearner/trainer_master/leader_tm.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/disabled_train_master.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/data/data_block_set.py"], "/fedlearner/data_join/cmd/data_portal_master_service.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_master.py"], "/fedlearner/data_join/data_portal_master.py": ["/fedlearner/data_join/data_portal_job_manager.py"], "/fedlearner/data_join/cmd/data_portal_worker_cli.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_worker.py"], "/fedlearner/trainer/trainer_worker.py": ["/fedlearner/trainer/bridge.py", "/fedlearner/trainer/estimator.py"], "/fedlearner/trainer_master/follower_tm.py": ["/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/test_nn_online_training.py": ["/fedlearner/trainer_master/leader_tm.py", "/fedlearner/trainer_master/follower_tm.py"], "/test/data_join/test_data_portal_worker.py": ["/fedlearner/data_join/data_portal_worker.py"]} |
76,958 | piiswrong/fedlearner | refs/heads/master | /web_console_v2/api/fedlearner_webconsole/auth/apis.py | # Copyright 2020 The FedLearner Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
# pylint: disable=cyclic-import
from http import HTTPStatus
from flask import request
from flask_restful import Resource, reqparse
from flask_jwt_extended import jwt_required, create_access_token
from fedlearner_webconsole.db import db
from fedlearner_webconsole.auth.models import User
from fedlearner_webconsole.exceptions import (
NotFoundException, InvalidArgumentException,
ResourceConflictException, UnauthorizedException)
class SigninApi(Resource):
def post(self):
parser = reqparse.RequestParser()
parser.add_argument('username', required=True, help='username is empty')
parser.add_argument('password', required=True, help='password is empty')
data = parser.parse_args()
username = data['username']
password = data['password']
user = User.query.filter_by(username=username).first()
if user is None:
raise NotFoundException()
if not user.verify_password(password):
raise UnauthorizedException('Invalid password')
token = create_access_token(identity=username)
return {'id': user.id, 'access_token': token}, HTTPStatus.OK
class UsersApi(Resource):
@jwt_required()
def get(self):
return {'data': [row.to_dict() for row in User.query.all()]}
@jwt_required()
def post(self):
parser = reqparse.RequestParser()
parser.add_argument('username', required=True, help='username is empty')
parser.add_argument('password', required=True, help='password is empty')
data = parser.parse_args()
username = data['username']
password = data['password']
if User.query.filter_by(username=username).first() is not None:
raise ResourceConflictException(
'user {} already exists'.format(username))
user = User(username=username)
user.set_password(password)
db.session.add(user)
db.session.commit()
return {'id': user.id, 'username': user.username}, HTTPStatus.CREATED
class UserApi(Resource):
def _find_user(self, user_id):
user = User.query.filter_by(id=user_id).first()
if user is None:
raise NotFoundException()
return user
@jwt_required()
def get(self, user_id):
user = self._find_user(user_id)
return user.to_dict(), HTTPStatus.OK
@jwt_required()
def put(self, user_id):
user = self._find_user(user_id)
data = request.get_json()
new_password = data.pop('new_password', None)
if new_password:
old_password = data.pop('old_password', None)
if data:
details = {}
for key in data.keys():
details[key] = 'Invalid field'
raise InvalidArgumentException(details=details)
if new_password:
if not user.verify_password(old_password):
raise UnauthorizedException(message='Wrong old password')
user.set_password(new_password)
db.session.commit()
return {'username': user.username}, HTTPStatus.OK
@jwt_required()
def delete(self, user_id):
user = self._find_user(user_id)
db.session.delete(user)
db.session.commit()
return {'username': user.username}, HTTPStatus.OK
def initialize_auth_apis(api):
api.add_resource(SigninApi, '/auth/signin')
api.add_resource(UsersApi, '/auth/users')
api.add_resource(UserApi, '/auth/users/<int:user_id>')
| {"/test/data_join/test_data_portal_master.py": ["/fedlearner/data_join/data_portal_master.py"], "/fedlearner/trainer_master/leader_tm.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/disabled_train_master.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/data/data_block_set.py"], "/fedlearner/data_join/cmd/data_portal_master_service.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_master.py"], "/fedlearner/data_join/data_portal_master.py": ["/fedlearner/data_join/data_portal_job_manager.py"], "/fedlearner/data_join/cmd/data_portal_worker_cli.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_worker.py"], "/fedlearner/trainer/trainer_worker.py": ["/fedlearner/trainer/bridge.py", "/fedlearner/trainer/estimator.py"], "/fedlearner/trainer_master/follower_tm.py": ["/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/test_nn_online_training.py": ["/fedlearner/trainer_master/leader_tm.py", "/fedlearner/trainer_master/follower_tm.py"], "/test/data_join/test_data_portal_worker.py": ["/fedlearner/data_join/data_portal_worker.py"]} |
76,959 | piiswrong/fedlearner | refs/heads/master | /web_console_v2/api/test/fedlearner_webconsole/job/yaml_formatter_test.py | # Copyright 2020 The FedLearner Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
import unittest
import tarfile
import base64
from io import BytesIO
from fedlearner_webconsole.job.yaml_formatter import format_yaml, code_dict_encode
class YamlFormatterTest(unittest.TestCase):
def test_format_with_phs(self):
project = {
'variables[0]':
{'storage_root_dir': 'root_dir'}
}
workflow = {
'jobs': {
'raw_data_job': {'name': 'raw_data123'}
}
}
yaml = format_yaml("""
{
"name": "OUTPUT_BASE_DIR",
"value": "${project.variables[0].storage_root_dir}/raw_data/${workflow.jobs.raw_data_job.name}"
}
""", project=project, workflow=workflow)
self.assertEqual(yaml, """
{
"name": "OUTPUT_BASE_DIR",
"value": "root_dir/raw_data/raw_data123"
}
""")
self.assertEqual(format_yaml('$project.variables[0].storage_root_dir',
project=project),
project['variables[0]']['storage_root_dir'])
def test_format_with_no_ph(self):
self.assertEqual(format_yaml('{a: 123, b: 234}'),
'{a: 123, b: 234}')
def test_format_yaml_unknown_ph(self):
x = {
'y': 123
}
with self.assertRaises(RuntimeError) as cm:
format_yaml('$x.y is $i.j.k', x=x)
self.assertEqual(str(cm.exception), 'Unknown placeholder: i.j.k')
with self.assertRaises(RuntimeError) as cm:
format_yaml('$x.y is ${i.j}', x=x)
self.assertEqual(str(cm.exception), 'Unknown placeholder: i.j')
def test_encode_code(self):
test_data = {'test/a.py': 'awefawefawefawefwaef',
'test1/b.py': 'asdfasd',
'c.py': '',
'test/d.py': 'asdf'}
code_base64 = code_dict_encode(test_data)
code_dict = {}
if code_base64.startswith('base64://'):
tar_binary = BytesIO(base64.b64decode(code_base64[9:]))
with tarfile.open(fileobj=tar_binary) as tar:
for file in tar.getmembers():
code_dict[file.name] = str(tar.extractfile(file).read(),
encoding='utf-8')
self.assertEqual(code_dict, test_data)
if __name__ == '__main__':
unittest.main()
| {"/test/data_join/test_data_portal_master.py": ["/fedlearner/data_join/data_portal_master.py"], "/fedlearner/trainer_master/leader_tm.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/disabled_train_master.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/data/data_block_set.py"], "/fedlearner/data_join/cmd/data_portal_master_service.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_master.py"], "/fedlearner/data_join/data_portal_master.py": ["/fedlearner/data_join/data_portal_job_manager.py"], "/fedlearner/data_join/cmd/data_portal_worker_cli.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_worker.py"], "/fedlearner/trainer/trainer_worker.py": ["/fedlearner/trainer/bridge.py", "/fedlearner/trainer/estimator.py"], "/fedlearner/trainer_master/follower_tm.py": ["/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/test_nn_online_training.py": ["/fedlearner/trainer_master/leader_tm.py", "/fedlearner/trainer_master/follower_tm.py"], "/test/data_join/test_data_portal_worker.py": ["/fedlearner/data_join/data_portal_worker.py"]} |
76,960 | piiswrong/fedlearner | refs/heads/master | /web_console_v2/api/fedlearner_webconsole/rpc/client.py | # Copyright 2020 The FedLearner Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
# pylint: disable=broad-except
import logging
import grpc
from fedlearner_webconsole.proto import (
service_pb2, service_pb2_grpc, common_pb2
)
def _build_channel(url, authority):
"""A helper function to build gRPC channel for easy testing."""
return grpc.insecure_channel(
target=url,
# options defined at
# https://github.com/grpc/grpc/blob/master/include/grpc/impl/codegen/grpc_types.h
options=[('grpc.default_authority', authority)]
)
class RpcClient(object):
def __init__(self, project_config, receiver_config):
self._project = project_config
self._receiver = receiver_config
self._auth_info = service_pb2.ProjAuthInfo(
project_name=self._project.name,
target_domain=self._receiver.domain_name,
auth_token=self._project.token)
egress_url = 'fedlearner-stack-ingress-nginx-controller.default'\
'.svc.cluster.local:80'
for variable in self._project.variables:
if variable.name == 'EGRESS_URL':
egress_url = variable.value
break
self._client = service_pb2_grpc.WebConsoleV2ServiceStub(_build_channel(
egress_url,
self._receiver.grpc_spec.authority
))
def _get_metadata(self):
metadata = []
x_host_prefix = 'fedlearner-webconsole-v2'
for variable in self._project.variables:
if variable.name == 'X_HOST':
x_host_prefix = variable.value
break
metadata.append(('x-host', '{}.{}'.format(x_host_prefix,
self._receiver.domain_name)))
for key, value in self._receiver.grpc_spec.extra_headers.items():
metadata.append((key, value))
# metadata is a tuple of tuples
return tuple(metadata)
def check_connection(self):
msg = service_pb2.CheckConnectionRequest(
auth_info=self._auth_info)
try:
response = self._client.CheckConnection(
request=msg, metadata=self._get_metadata())
if response.status.code != common_pb2.STATUS_SUCCESS:
logging.debug('check_connection request error: %s',
response.status.msg)
return response.status
except Exception as e:
logging.error('check_connection request error: %s',
repr(e))
return common_pb2.Status(
code=common_pb2.STATUS_UNKNOWN_ERROR,
msg=repr(e))
def update_workflow_state(self, name, state, target_state,
transaction_state, uuid, forked_from_uuid):
msg = service_pb2.UpdateWorkflowStateRequest(
auth_info=self._auth_info,
workflow_name=name,
state=state.value,
target_state=target_state.value,
transaction_state=transaction_state.value,
uuid=uuid,
forked_from_uuid=forked_from_uuid
)
try:
response = self._client.UpdateWorkflowState(
request=msg, metadata=self._get_metadata())
if response.status.code != common_pb2.STATUS_SUCCESS:
logging.error(
'update_workflow_state request error: %s',
response.status.msg)
return response
except Exception as e:
logging.error('workflow %s update_workflow_state request error: %s'
, name, repr(e))
return service_pb2.UpdateWorkflowStateResponse(
status=common_pb2.Status(
code=common_pb2.STATUS_UNKNOWN_ERROR,
msg=repr(e)))
def get_workflow(self, name):
msg = service_pb2.GetWorkflowRequest(
auth_info=self._auth_info,
workflow_name=name)
try:
response = self._client.GetWorkflow(
request=msg, metadata=self._get_metadata())
if response.status.code != common_pb2.STATUS_SUCCESS:
logging.error(
'workflow %s get_workflow request error: %s',
name,
response.status.msg)
return response
except Exception as e:
logging.error('workflow %s get_workflow request error: %s',
name,
repr(e))
return service_pb2.GetWorkflowResponse(
status=common_pb2.Status(
code=common_pb2.STATUS_UNKNOWN_ERROR,
msg=repr(e)))
def update_workflow(self, name, config):
msg = service_pb2.UpdateWorkflowRequest(
auth_info=self._auth_info,
workflow_name=name,
config=config)
try:
response = self._client.UpdateWorkflow(
request=msg, metadata=self._get_metadata())
if response.status.code != common_pb2.STATUS_SUCCESS:
logging.error(
'update_workflow request error: %s',
response.status.msg)
return response
except Exception as e:
logging.error('update_workflow request error: %s', repr(e))
return service_pb2.UpdateWorkflowResponse(
status=common_pb2.Status(
code=common_pb2.STATUS_UNKNOWN_ERROR,
msg=repr(e)))
def get_job_metrics(self, job_name):
msg = service_pb2.GetJobMetricsRequest(
auth_info=self._auth_info,
job_name=job_name)
try:
response = self._client.GetJobMetrics(
request=msg, metadata=self._get_metadata())
if response.status.code != common_pb2.STATUS_SUCCESS:
logging.error(
'get_job_metrics request error: %s',
response.status.msg)
return response
except Exception as e:
logging.error('get_job_metrics request error: %s', repr(e))
return service_pb2.GetJobMetricsResponse(
status=common_pb2.Status(
code=common_pb2.STATUS_UNKNOWN_ERROR,
msg=repr(e)))
def get_job_events(self, job_name, start_time, max_lines):
msg = service_pb2.GetJobMetricsRequest(
auth_info=self._auth_info,
job_name=job_name,
start_time=start_time,
max_lines=max_lines)
try:
response = self._client.GetJobMetrics(
request=msg, metadata=self._get_metadata())
if response.status.code != common_pb2.STATUS_SUCCESS:
logging.error(
'get_job_events request error: %s',
response.status.msg)
return response
except Exception as e:
logging.error('get_job_events request error: %s', repr(e))
return service_pb2.GetJobMetricsResponse(
status=common_pb2.Status(
code=common_pb2.STATUS_UNKNOWN_ERROR,
msg=repr(e)))
| {"/test/data_join/test_data_portal_master.py": ["/fedlearner/data_join/data_portal_master.py"], "/fedlearner/trainer_master/leader_tm.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/disabled_train_master.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/data/data_block_set.py"], "/fedlearner/data_join/cmd/data_portal_master_service.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_master.py"], "/fedlearner/data_join/data_portal_master.py": ["/fedlearner/data_join/data_portal_job_manager.py"], "/fedlearner/data_join/cmd/data_portal_worker_cli.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_worker.py"], "/fedlearner/trainer/trainer_worker.py": ["/fedlearner/trainer/bridge.py", "/fedlearner/trainer/estimator.py"], "/fedlearner/trainer_master/follower_tm.py": ["/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/test_nn_online_training.py": ["/fedlearner/trainer_master/leader_tm.py", "/fedlearner/trainer_master/follower_tm.py"], "/test/data_join/test_data_portal_worker.py": ["/fedlearner/data_join/data_portal_worker.py"]} |
76,961 | piiswrong/fedlearner | refs/heads/master | /web_console_v2/api/fedlearner_webconsole/k8s_client.py | # Copyright 2020 The FedLearner Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
import threading
import os
from fedlearner_webconsole.utils.k8s_client import K8sClient
from fedlearner_webconsole.utils.fake_k8s_client import FakeK8sClient
_k8s_client = None
def get_client():
# pylint: disable=global-statement
global _k8s_client
if _k8s_client is None:
with threading.Lock():
# Thread-safe singleton
if _k8s_client is None:
if os.environ.get('FLASK_ENV') == 'production':
_k8s_client = K8sClient()
else:
_k8s_client = FakeK8sClient()
return _k8s_client
| {"/test/data_join/test_data_portal_master.py": ["/fedlearner/data_join/data_portal_master.py"], "/fedlearner/trainer_master/leader_tm.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/disabled_train_master.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/data/data_block_set.py"], "/fedlearner/data_join/cmd/data_portal_master_service.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_master.py"], "/fedlearner/data_join/data_portal_master.py": ["/fedlearner/data_join/data_portal_job_manager.py"], "/fedlearner/data_join/cmd/data_portal_worker_cli.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_worker.py"], "/fedlearner/trainer/trainer_worker.py": ["/fedlearner/trainer/bridge.py", "/fedlearner/trainer/estimator.py"], "/fedlearner/trainer_master/follower_tm.py": ["/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/test_nn_online_training.py": ["/fedlearner/trainer_master/leader_tm.py", "/fedlearner/trainer_master/follower_tm.py"], "/test/data_join/test_data_portal_worker.py": ["/fedlearner/data_join/data_portal_worker.py"]} |
76,962 | piiswrong/fedlearner | refs/heads/master | /fedlearner/trainer/bridge.py | # Copyright 2020 The FedLearner Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
# pylint: disable=protected-access
import time
import logging
import os
import threading
import collections
from concurrent import futures
import grpc
import google.protobuf.any_pb2
import tensorflow.compat.v1 as tf
from fedlearner.common import common_pb2 as common_pb
from fedlearner.common import trainer_worker_service_pb2 as tws_pb
from fedlearner.common import trainer_worker_service_pb2_grpc as tws_grpc
from fedlearner.proxy.channel import make_insecure_channel, ChannelType
from fedlearner.common import metrics
def make_ready_client(channel, stop_event=None):
channel_ready = grpc.channel_ready_future(channel)
wait_secs = 0.5
start_time = time.time()
while (stop_event is None) or (not stop_event.is_set()):
try:
channel_ready.result(timeout=wait_secs)
break
except grpc.FutureTimeoutError:
logging.warning(
'Channel has not been ready for %.2f seconds',
time.time()-start_time)
if wait_secs < 5.0:
wait_secs *= 1.2
except Exception as e: # pylint: disable=broad-except
logging.warning('Waiting channel ready: %s', repr(e))
return tws_grpc.TrainerWorkerServiceStub(channel)
class _MessageQueue(object):
def __init__(self, window_size=100):
super(_MessageQueue, self).__init__()
self._window_size = window_size
self._condition = threading.Condition()
self._queue = collections.deque()
self._next = 0
def size(self):
with self._condition:
return len(self._queue)
def confirm(self, next_seq_num):
with self._condition:
while self._queue and self._queue[0].seq_num < next_seq_num:
self._queue.popleft()
if self._next > 0:
self._next -= 1
self._condition.notifyAll()
def resend(self, seq_num):
with self._condition:
while self._next > 0 and \
(self._next >= len(self._queue) or \
self._queue[self._next].seq_num > seq_num):
self._next -= 1
if self._queue:
logging.warning(
'Message with seq_num=%d missing. Resending from %d',
seq_num, self._queue[self._next].seq_num)
self._condition.notifyAll()
def put(self, msg):
with self._condition:
while len(self._queue) >= self._window_size:
self._condition.wait()
self._queue.append(msg)
self._condition.notifyAll()
def get(self, event):
with self._condition:
while self._next == len(self._queue):
if not self._condition.wait(10.0) and self._queue:
logging.warning(
'Timeout waiting for confirmation. Resending from %d',
self._queue[0].seq_num)
self._next = 0
if event.is_set():
raise StopIteration
if event.is_set():
raise StopIteration
assert self._next < len(self._queue)
msg = self._queue[self._next]
self._next += 1
return msg
class Bridge(object):
class TrainerWorkerServicer(tws_grpc.TrainerWorkerServiceServicer):
def __init__(self, bridge):
super(Bridge.TrainerWorkerServicer, self).__init__()
self._bridge = bridge
def Transmit(self, request, context):
return self._bridge._transmit_handler(request)
def StreamTransmit(self, request_iterator, context):
for request in request_iterator:
yield self._bridge._transmit_handler(request)
def LoadDataBlock(self, request, context):
return self._bridge._data_block_handler(request)
def Connect(self, request, context):
return self._bridge._connect_handler(request)
def Heartbeat(self, request, context):
return self._bridge._heartbeat_handler(request)
def Terminate(self, request, context):
return self._bridge._terminate_handler(request)
def __init__(self,
role,
listen_port,
remote_address,
app_id=None,
rank=0,
streaming_mode=True,
compression=grpc.Compression.NoCompression,
iter_timeout=1800):
self._role = role
self._listen_port = listen_port
self._remote_address = remote_address
if app_id is None:
app_id = 'test_trainer'
self._app_id = app_id
self._rank = rank
self._streaming_mode = streaming_mode
self._compression = compression
self._iter_timeout = iter_timeout
self._prefetch_handlers = []
self._data_block_handler_fn = None
# Connection related
self._connected = False
self._connected_at = 0
self._terminated = False
self._terminated_at = 0
self._peer_terminated = False
self._identifier = '%s-%s-%d-%d' % (
app_id, role, rank, int(time.time())) # Ensure unique per run
self._peer_identifier = ''
# data transmit
self._condition = threading.Condition()
self._iter_started_at = 0
self._current_iter_id = None
self._next_iter_id = 0
self._peer_next_iter_id = 0
self._received_data = {}
# grpc client
self._transmit_send_lock = threading.Lock()
self._client_lock = threading.Lock()
self._grpc_options = [
('grpc.max_send_message_length', 2**31-1),
('grpc.max_receive_message_length', 2**31-1)
]
self._channel = make_insecure_channel(
remote_address, ChannelType.REMOTE,
options=self._grpc_options, compression=self._compression)
self._client = tws_grpc.TrainerWorkerServiceStub(self._channel)
self._next_send_seq_num = 0
self._transmit_queue = _MessageQueue()
self._client_daemon = None
self._client_daemon_shutdown_fn = None
# server
self._transmit_receive_lock = threading.Lock()
self._next_receive_seq_num = 0
self._server = grpc.server(
futures.ThreadPoolExecutor(max_workers=10),
options=self._grpc_options,
compression=self._compression)
tws_grpc.add_TrainerWorkerServiceServicer_to_server(
Bridge.TrainerWorkerServicer(self), self._server)
self._server.add_insecure_port('[::]:%d' % listen_port)
def __del__(self):
self.terminate()
@property
def role(self):
return self._role
@property
def connected_at(self):
if self._connected:
return self._connected_at
return None
@property
def terminated_at(self):
if self._terminated:
return self._terminated_at
return None
def _rpc_with_retry(self, sender, err_log):
while True:
with self._client_lock:
try:
return sender()
except Exception as e: # pylint: disable=broad-except
logging.warning(
"%s: %s. Retry in 1s...", err_log, repr(e))
metrics.emit_counter('reconnect_counter', 1)
self._channel.close()
time.sleep(1)
self._channel = make_insecure_channel(
self._remote_address, ChannelType.REMOTE,
options=self._grpc_options,
compression=self._compression)
self._client = make_ready_client(self._channel)
self._check_remote_heartbeat(self._client)
def _client_daemon_fn(self):
stop_event = threading.Event()
generator = None
channel = make_insecure_channel(
self._remote_address, ChannelType.REMOTE,
options=self._grpc_options, compression=self._compression)
client = make_ready_client(channel, stop_event)
def shutdown_fn():
while self._transmit_queue.size():
logging.debug(
"Waiting for message queue's being cleaned. "
"Queue size: %d", self._transmit_queue.size())
time.sleep(1)
stop_event.set()
if generator is not None:
generator.cancel()
self._client_daemon_shutdown_fn = shutdown_fn
while not stop_event.is_set():
try:
event = threading.Event()
def iterator():
while True:
item = self._transmit_queue.get(event)
logging.debug("Streaming send message seq_num=%d",
item.seq_num)
yield item
generator = client.StreamTransmit(iterator())
for response in generator:
if response.status.code == common_pb.STATUS_SUCCESS:
self._transmit_queue.confirm(response.next_seq_num)
logging.debug("Message with seq_num=%d is "
"confirmed", response.next_seq_num-1)
elif response.status.code == \
common_pb.STATUS_MESSAGE_DUPLICATED:
self._transmit_queue.confirm(response.next_seq_num)
logging.debug("Resent Message with seq_num=%d is "
"confirmed", response.next_seq_num-1)
elif response.status.code == \
common_pb.STATUS_MESSAGE_MISSING:
self._transmit_queue.resend(response.next_seq_num)
else:
raise RuntimeError("Trainsmit failed with %d" %
response.status.code)
except Exception as e: # pylint: disable=broad-except
if not stop_event.is_set():
logging.warning("Bridge streaming broken: %s.", repr(e))
metrics.emit_counter('reconnect_counter', 1)
finally:
generator.cancel()
channel.close()
event.set()
time.sleep(1)
self._transmit_queue.resend(-1)
channel = make_insecure_channel(
self._remote_address, ChannelType.REMOTE,
options=self._grpc_options, compression=self._compression)
client = make_ready_client(channel, stop_event)
self._check_remote_heartbeat(client)
def _transmit(self, msg):
assert self._connected, "Cannot transmit before connect"
metrics.emit_counter('send_counter', 1)
with self._transmit_send_lock:
msg.seq_num = self._next_send_seq_num
self._next_send_seq_num += 1
if self._streaming_mode:
self._transmit_queue.put(msg)
return
def sender():
rsp = self._client.Transmit(msg)
assert rsp.status.code == common_pb.STATUS_SUCCESS, \
"Transmit error with code %d."%rsp.status.code
self._rpc_with_retry(sender, "Bridge transmit failed")
def _transmit_handler(self, request):
assert self._connected, "Cannot transmit before connect"
metrics.emit_counter('receive_counter', 1)
with self._transmit_receive_lock:
logging.debug("Received message seq_num=%d."
" Wanted seq_num=%d.",
request.seq_num, self._next_receive_seq_num)
if request.seq_num > self._next_receive_seq_num:
return tws_pb.TrainerWorkerResponse(
status=common_pb.Status(
code=common_pb.STATUS_MESSAGE_MISSING),
next_seq_num=self._next_receive_seq_num)
if request.seq_num < self._next_receive_seq_num:
return tws_pb.TrainerWorkerResponse(
status=common_pb.Status(
code=common_pb.STATUS_MESSAGE_DUPLICATED),
next_seq_num=self._next_receive_seq_num)
# request.seq_num == self._next_receive_seq_num
self._next_receive_seq_num += 1
if request.HasField('start'):
with self._condition:
self._received_data[request.start.iter_id] = {}
elif request.HasField('commit'):
self._peer_next_iter_id = request.commit.iter_id + 1
elif request.HasField('data'):
with self._condition:
assert request.data.iter_id in self._received_data
self._received_data[
request.data.iter_id][
request.data.name] = request.data
self._condition.notifyAll()
elif request.HasField('prefetch'):
for func in self._prefetch_handlers:
func(request.prefetch)
else:
return tws_pb.TrainerWorkerResponse(
status=common_pb.Status(
code=common_pb.STATUS_INVALID_REQUEST),
next_seq_num=self._next_receive_seq_num)
return tws_pb.TrainerWorkerResponse(
next_seq_num=self._next_receive_seq_num)
def _data_block_handler(self, request):
assert self._connected, "Cannot load data before connect"
if not self._data_block_handler_fn:
raise RuntimeError("Received DataBlockMessage but" \
" no handler registered")
metrics.emit_counter('load_data_block_counter', 1)
if self._data_block_handler_fn(request):
logging.info('Succeeded to load data block %s',
request.block_id)
return common_pb.Status(code=common_pb.STATUS_SUCCESS)
metrics.emit_counter('load_data_block_fail_counter', 1)
logging.info('Failed to load data block %s', request.block_id)
return common_pb.Status(code=common_pb.STATUS_INVALID_DATA_BLOCK)
def _connect_handler(self, request):
assert request.app_id == self._app_id, \
"Connection failed. Application id mismatch: %s vs %s"%(
request.app_id, self._app_id)
assert request.worker_rank == self._rank, \
"Connection failed. Rank mismatch: %s vs %s"%(
request.worker_rank, self._rank)
assert len(request.identifier) > 0, \
"Connection failed. An identifier should be offered!"
with self._condition:
if self._connected:
# If a duplicated reqeust from peer, just ignore it.
# If a new connect request from peer, suicide.
if request.identifier != self._peer_identifier:
logging.error('Suicide as peer %s has restarted!',
request.identifier)
os._exit(138) # Tell Scheduler to restart myself
else:
self._peer_identifier = request.identifier
self._connected = True
self._connected_at = max(self._connected_at, int(time.time()))
self._condition.notifyAll()
return tws_pb.ConnectResponse(
app_id=self._app_id, worker_rank=self._rank,
timestamp=self._connected_at)
def _heartbeat_handler(self, request):
return tws_pb.HeartbeatResponse(app_id=self._app_id,
worker_rank=self._rank,
current_iter_id=self._current_iter_id)
def _terminate_handler(self, request):
with self._condition:
self._peer_terminated = True
self._terminated_at = max(self._terminated_at, int(time.time()))
self._condition.notifyAll()
return tws_pb.TerminateResponse(
timestamp=self._terminated_at)
def _check_remote_heartbeat(self, client):
try:
rsp = client.Heartbeat(tws_pb.HeartbeatRequest())
logging.debug("Heartbeat success: %s:%d at iteration %s.",
rsp.app_id, rsp.worker_rank, rsp.current_iter_id)
return True
except Exception as e: # pylint: disable=broad-except
logging.warning("Heartbeat request failed: %s", repr(e))
return False
def _check_iter_timeout(self):
if self._iter_timeout <= 0:
return
with self._condition:
if not self._current_iter_id:
return
duration = time.time() - self._iter_started_at
if duration > self._iter_timeout:
msg = 'Suicide as iter run timeout, duration: {}.' \
' maybe blocked in some point.'.format(duration)
logging.fatal(msg)
os._exit(138)
def _supervise_fn(self):
check_handlers = []
if self._iter_timeout > 0:
logging.info('enable supervise iteartion timeout: %f',
self._iter_timeout)
check_handlers.append(self._check_iter_timeout)
if len(check_handlers) == 0:
return
while True:
with self._condition:
if self._terminated:
return
for handler in check_handlers:
handler()
time.sleep(10)
def connect(self):
if self._connected:
logging.warning("Bridge already connected!")
return
self._server.start()
# Get ACK from peer
msg = tws_pb.ConnectRequest(app_id=self._app_id,
worker_rank=self._rank,
identifier=self._identifier)
resp = self._rpc_with_retry(
lambda: self._client.Connect(msg),
"Bridge failed to connect")
logging.debug('Has connected to peer.')
# Ensure REQ from peer
with self._condition:
self._connected_at = max(self._connected_at, resp.timestamp)
while not self._connected:
self._condition.wait()
logging.debug('Connected from peer.')
if self._streaming_mode:
logging.debug('enter streaming_mode.')
self._client_daemon = threading.Thread(
target=self._client_daemon_fn, daemon=True)
self._client_daemon.start()
supervise_thread = threading.Thread(
target=self._supervise_fn, daemon=True)
supervise_thread.start()
logging.debug('finish connect.')
def terminate(self, forced=False):
with self._condition:
if not self._connected or self._terminated:
return
self._terminated = True
try:
if self._client_daemon is not None:
self._client_daemon_shutdown_fn()
self._client_daemon.join()
except Exception as e: # pylint: disable=broad-except
logging.warning(
'Error during streaming shutdown: %s', repr(e))
# Get ACK from peer
resp = self._rpc_with_retry(
lambda: self._client.Terminate(tws_pb.TerminateRequest()),
"Failed to send terminate message")
logging.debug('Waiting for peer to terminate.')
# Ensure REQ from peer
with self._condition:
self._terminated_at = max(self._terminated_at, resp.timestamp)
while not self._peer_terminated:
self._condition.wait()
self._server.stop(None)
logging.debug("Bridge connection terminated")
@property
def current_iter_id(self):
return self._current_iter_id
def new_iter_id(self):
iter_id = self._next_iter_id
self._next_iter_id += 1
return iter_id
def start(self, iter_id):
assert self._current_iter_id is None, "Last iter not finished"
with self._condition:
self._iter_started_at = time.time()
self._current_iter_id = iter_id
msg = tws_pb.TrainerWorkerMessage(start=tws_pb.StartMessage(
iter_id=iter_id))
self._transmit(msg)
logging.debug("Starting iter %d", iter_id)
def commit(self):
assert self._current_iter_id is not None, "Not started yet"
with self._condition:
last_iter_id = self._current_iter_id
self._current_iter_id = None
if last_iter_id in self._received_data:
del self._received_data[last_iter_id]
msg = tws_pb.TrainerWorkerMessage(commit=tws_pb.CommitMessage(
iter_id=last_iter_id))
self._transmit(msg)
logging.debug("iter %d committed", last_iter_id)
def register_data_block_handler(self, func):
assert self._data_block_handler_fn is None, \
"DataBlock handler already registered"
self._data_block_handler_fn = func
def load_data_block(self, count, block_id):
msg = tws_pb.LoadDataBlockRequest(count=count, block_id=block_id)
logging.debug("sending DataBlock with id %s", block_id)
stat = self._rpc_with_retry(
lambda: self._client.LoadDataBlock(msg),
"Failed to send load data block request")
if stat.code == common_pb.STATUS_SUCCESS:
logging.info('Remote succeeded to load data block %s', block_id)
return True
logging.info('Remoted failed to load data block %s. code: %d',
block_id, stat.code)
return False
def register_prefetch_handler(self, func):
self._prefetch_handlers.append(func)
def prefetch(self, iter_id, sample_ids):
msg = tws_pb.TrainerWorkerMessage(prefetch=tws_pb.PrefetchMessage(
iter_id=iter_id, sample_ids=sample_ids))
self._transmit(msg)
def send_proto(self, iter_id, name, proto):
any_proto = google.protobuf.any_pb2.Any()
any_proto.Pack(proto)
msg = tws_pb.TrainerWorkerMessage(data=tws_pb.DataMessage(
iter_id=iter_id, name=name, any_data=any_proto))
self._transmit(msg)
logging.debug('Data: send protobuf %s for iter %d. seq_num=%d.',
name, iter_id, msg.seq_num)
def send(self, iter_id, name, x):
msg = tws_pb.TrainerWorkerMessage(data=tws_pb.DataMessage(
iter_id=iter_id, name=name, tensor=tf.make_tensor_proto(x)))
self._transmit(msg)
logging.debug('Data: send %s for iter %d. seq_num=%d.',
name, iter_id, msg.seq_num)
def send_op(self, name, x):
def func(x):
assert self._current_iter_id is not None, "Bridge not started"
self.send(self._current_iter_id, name, x.numpy())
out = tf.py_function(func=func, inp=[x], Tout=[], name='send_' + name)
return out
def _receive(self, iter_id, name):
logging.debug(
'Data: Waiting to receive %s for iter %d.', name, iter_id)
start_time = time.time()
with self._condition:
while (iter_id not in self._received_data) \
or (name not in self._received_data[iter_id]):
if self._peer_next_iter_id > iter_id:
msg = 'Peer committed without sending %s. ' \
'Please check model code'%name
logging.fatal(msg)
raise RuntimeError(msg)
if not self._condition.wait(10):
logging.warning(
'Data: Still waiting to receive %s for iter %d...',
name, iter_id)
data = self._received_data[iter_id][name]
duration = time.time() - start_time
metrics.emit_timer('receive_timer', duration)
logging.debug(
'Data: received %s for iter %d after %f sec.',
name, iter_id, duration)
return data
def receive_proto(self, iter_id, name):
return self._receive(iter_id, name).any_data
def receive(self, iter_id, name):
return tf.make_ndarray(self._receive(iter_id, name).tensor)
def receive_op(self, name, dtype):
def func():
assert self._current_iter_id is not None, "Bridge not started"
x = self.receive(self._current_iter_id, name)
return tf.convert_to_tensor(x, dtype=dtype)
return tf.py_function(func=func, inp=[], Tout=[dtype])[0]
| {"/test/data_join/test_data_portal_master.py": ["/fedlearner/data_join/data_portal_master.py"], "/fedlearner/trainer_master/leader_tm.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/disabled_train_master.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/data/data_block_set.py"], "/fedlearner/data_join/cmd/data_portal_master_service.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_master.py"], "/fedlearner/data_join/data_portal_master.py": ["/fedlearner/data_join/data_portal_job_manager.py"], "/fedlearner/data_join/cmd/data_portal_worker_cli.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_worker.py"], "/fedlearner/trainer/trainer_worker.py": ["/fedlearner/trainer/bridge.py", "/fedlearner/trainer/estimator.py"], "/fedlearner/trainer_master/follower_tm.py": ["/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/test_nn_online_training.py": ["/fedlearner/trainer_master/leader_tm.py", "/fedlearner/trainer_master/follower_tm.py"], "/test/data_join/test_data_portal_worker.py": ["/fedlearner/data_join/data_portal_worker.py"]} |
76,963 | piiswrong/fedlearner | refs/heads/master | /fedlearner/trainer/patch.py | # Copyright 2020 The FedLearner Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
# pylint: disable=protected-access
import time
from tensorflow.python.client import session
from tensorflow.python.framework import meta_graph, ops
from tensorflow.python.framework.versions import VERSION
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.training import checkpoint_management, session_manager
from tensorflow.python.training.basic_session_run_hooks \
import CheckpointSaverHook
assert VERSION.startswith("1.15."), "Monkey patch is only valid for TF 1.15."
def new_restore_checkpoint(self,
master,
saver=None,
checkpoint_dir=None,
checkpoint_filename_with_path=None,
wait_for_checkpoint=False,
max_wait_secs=7200,
config=None):
"""Creates a `Session`, and tries to restore a checkpoint if needed.
Args:
master: `String` representation of the TensorFlow master to use.
saver: A `Saver` object used to restore a model.
checkpoint_dir: Path to the checkpoint files. The latest checkpoint
in the dir will be used to restore.
checkpoint_filename_with_path: Full file name path to the checkpoint
file.
wait_for_checkpoint: Whether to wait for checkpoint to become
available.
max_wait_secs: Maximum time to wait for checkpoints to become
available.
config: Optional `ConfigProto` proto used to configure the session.
Returns:
A pair (sess, is_restored) where 'is_restored' is `True` if the
session could be restored, `False` otherwise.
Raises:
ValueError: If both checkpoint_dir and checkpoint_filename_with_path
are set.
"""
self._target = master
sess = session.Session(self._target, graph=self._graph, config=config)
if checkpoint_dir and checkpoint_filename_with_path:
raise ValueError("Can not provide both checkpoint_dir and "
"checkpoint_filename_with_path.")
# If variables & resources in PS has beed initialized, do not recover.
is_ready_for_local_init, _ = self._model_ready_for_local_init(sess)
if is_ready_for_local_init:
return sess, True
# If either saver or checkpoint_* is not specified, cannot restore. Just
# return.
if not saver or not (checkpoint_dir or checkpoint_filename_with_path):
return sess, False
if checkpoint_filename_with_path:
saver.restore(sess, checkpoint_filename_with_path)
return sess, True
# Waits up until max_wait_secs for checkpoint to become available.
wait_time = 0
ckpt = checkpoint_management.get_checkpoint_state(checkpoint_dir)
while not ckpt or not ckpt.model_checkpoint_path:
if wait_for_checkpoint and wait_time < max_wait_secs:
logging.info("Waiting for checkpoint to be available.")
time.sleep(self._recovery_wait_secs)
wait_time += self._recovery_wait_secs
ckpt = checkpoint_management.get_checkpoint_state(checkpoint_dir)
else:
return sess, False
# Loads the checkpoint.
saver.restore(sess, ckpt.model_checkpoint_path)
saver.recover_last_checkpoints(ckpt.all_model_checkpoint_paths)
return sess, True
session_manager.SessionManager._restore_checkpoint = new_restore_checkpoint
old_CheckpointSaverHook_after_create_session = \
CheckpointSaverHook.after_create_session
def _new_CheckpointSaverHook_after_create_session(self, sess, coord):
global_step = sess.run(self._global_step_tensor)
try:
ckpt_tensor = sess.graph.get_tensor_by_name('data_checkpoint:0')
self.data_checkpoint = sess.run(ckpt_tensor)
except KeyError as e:
logging.info("tensor data_checkpoint:0 doesn't exist")
# We do write graph and saver_def at the first call of before_run.
# We cannot do this in begin, since we let other hooks to change graph and
# add variables in begin. Graph is finalized after all begin calls.
logging.info('Skip the writing of [graph.pbtxt]')
# training_util.write_graph(
# ops.get_default_graph().as_graph_def(add_shapes=True),
# self._checkpoint_dir, "graph.pbtxt")
saver_def = self._get_saver().saver_def if self._get_saver() else None
graph = ops.get_default_graph()
meta_graph_def = meta_graph.create_meta_graph_def(
graph_def=graph.as_graph_def(add_shapes=True), saver_def=saver_def)
self._summary_writer.add_graph(graph)
self._summary_writer.add_meta_graph(meta_graph_def)
# The checkpoint saved here is the state at step "global_step".
logging.info('Skip the writing of [checkpoint@%d]', global_step)
# self._save(sess, global_step)
self._timer.update_last_triggered_step(global_step)
CheckpointSaverHook.after_create_session = \
_new_CheckpointSaverHook_after_create_session
| {"/test/data_join/test_data_portal_master.py": ["/fedlearner/data_join/data_portal_master.py"], "/fedlearner/trainer_master/leader_tm.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/disabled_train_master.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/data/data_block_set.py"], "/fedlearner/data_join/cmd/data_portal_master_service.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_master.py"], "/fedlearner/data_join/data_portal_master.py": ["/fedlearner/data_join/data_portal_job_manager.py"], "/fedlearner/data_join/cmd/data_portal_worker_cli.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_worker.py"], "/fedlearner/trainer/trainer_worker.py": ["/fedlearner/trainer/bridge.py", "/fedlearner/trainer/estimator.py"], "/fedlearner/trainer_master/follower_tm.py": ["/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/test_nn_online_training.py": ["/fedlearner/trainer_master/leader_tm.py", "/fedlearner/trainer_master/follower_tm.py"], "/test/data_join/test_data_portal_worker.py": ["/fedlearner/data_join/data_portal_worker.py"]} |
76,964 | piiswrong/fedlearner | refs/heads/master | /fedlearner/trainer_master/data/data_block_queue.py | # Copyright 2020 The FedLearner Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
try:
import queue
except ImportError:
import Queue as queue
class DataBlockQueue(object):
'''
For quick implement a prototype, use python Queue,
If data size is laregr than local memory, replace by Redis
or other distributed Store.
'''
def __init__(self, maxsize=0):
self._db_queue = queue.Queue(maxsize=maxsize)
def put(self, data_block):
self._db_queue.put(data_block)
def get(self):
return self._db_queue.get(block=True, timeout=5)
def empty(self):
return self._db_queue.empty()
| {"/test/data_join/test_data_portal_master.py": ["/fedlearner/data_join/data_portal_master.py"], "/fedlearner/trainer_master/leader_tm.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/disabled_train_master.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/data/data_block_set.py"], "/fedlearner/data_join/cmd/data_portal_master_service.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_master.py"], "/fedlearner/data_join/data_portal_master.py": ["/fedlearner/data_join/data_portal_job_manager.py"], "/fedlearner/data_join/cmd/data_portal_worker_cli.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_worker.py"], "/fedlearner/trainer/trainer_worker.py": ["/fedlearner/trainer/bridge.py", "/fedlearner/trainer/estimator.py"], "/fedlearner/trainer_master/follower_tm.py": ["/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/test_nn_online_training.py": ["/fedlearner/trainer_master/leader_tm.py", "/fedlearner/trainer_master/follower_tm.py"], "/test/data_join/test_data_portal_worker.py": ["/fedlearner/data_join/data_portal_worker.py"]} |
76,965 | piiswrong/fedlearner | refs/heads/master | /web_console_v2/api/fedlearner_webconsole/app.py | # Copyright 2020 The FedLearner Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
# pylint: disable=wrong-import-position, global-statement
import importlib
import logging
import os
import traceback
from http import HTTPStatus
from flask import Flask, jsonify
from flask_migrate import Migrate
from flask_restful import Api
from flask_jwt_extended import JWTManager
migrate = Migrate()
jwt = JWTManager()
from fedlearner_webconsole.auth.apis import initialize_auth_apis
from fedlearner_webconsole.project.apis import initialize_project_apis
from fedlearner_webconsole.workflow_template.apis \
import initialize_workflow_template_apis
from fedlearner_webconsole.workflow.apis import initialize_workflow_apis
from fedlearner_webconsole.dataset.apis import initialize_dataset_apis
from fedlearner_webconsole.job.apis import initialize_job_apis
from fedlearner_webconsole.setting.apis import initialize_setting_apis
from fedlearner_webconsole.rpc.server import rpc_server
from fedlearner_webconsole.db import db
from fedlearner_webconsole.exceptions import (
make_response, WebConsoleApiException, InvalidArgumentException,
NotFoundException)
from fedlearner_webconsole.scheduler.scheduler import scheduler
def _handle_bad_request(error):
"""Handles the bad request raised by reqparse"""
if not isinstance(error, WebConsoleApiException):
# error.data.message contains the details raised by reqparse
details = None
if error.data is not None:
details = error.data['message']
return make_response(InvalidArgumentException(details))
return error
def _handle_not_found(error):
"""Handles the not found exception raised by framework"""
if not isinstance(error, WebConsoleApiException):
return make_response(NotFoundException())
return error
def _handle_uncaught_exception(error):
"""A fallback catcher for all exceptions."""
logging.error('Uncaught exception %s, stack trace:\n %s', str(error),
traceback.format_exc())
response = jsonify(
code=500,
msg='Unknown error',
)
response.status_code = HTTPStatus.INTERNAL_SERVER_ERROR
return response
@jwt.unauthorized_loader
def _handle_unauthorized_request(reason):
response = jsonify(
code=HTTPStatus.UNAUTHORIZED,
msg=reason
)
return response, HTTPStatus.UNAUTHORIZED
@jwt.invalid_token_loader
def _handle_invalid_jwt_request(reason):
response = jsonify(
code=HTTPStatus.UNPROCESSABLE_ENTITY,
msg=reason
)
return response, HTTPStatus.UNPROCESSABLE_ENTITY
@jwt.expired_token_loader
def _handle_token_expired_request(expired_token):
response = jsonify(
code=HTTPStatus.UNAUTHORIZED,
msg='Token has expired'
)
return response, HTTPStatus.UNAUTHORIZED
def create_app(config):
before_hook_path = os.getenv(
'FEDLEARNER_WEBCONSOLE_BEFORE_APP_START')
if before_hook_path:
module_path, func_name = before_hook_path.split(':')
module = importlib.import_module(module_path)
# Dynamically run the function
getattr(module, func_name)()
app = Flask('fedlearner_webconsole')
app.config.from_object(config)
db.init_app(app)
migrate.init_app(app, db)
jwt.init_app(app)
# Error handlers
app.register_error_handler(400, _handle_bad_request)
app.register_error_handler(404, _handle_not_found)
app.register_error_handler(WebConsoleApiException, make_response)
app.register_error_handler(Exception, _handle_uncaught_exception)
api = Api(prefix='/api/v2')
initialize_auth_apis(api)
initialize_project_apis(api)
initialize_workflow_template_apis(api)
initialize_workflow_apis(api)
initialize_job_apis(api)
initialize_dataset_apis(api)
initialize_setting_apis(api)
# A hack that use our customized error handlers
# Ref: https://github.com/flask-restful/flask-restful/issues/280
handle_exception = app.handle_exception
handle_user_exception = app.handle_user_exception
api.init_app(app)
app.handle_exception = handle_exception
app.handle_user_exception = handle_user_exception
if app.config.get('START_GRPC_SERVER', True):
rpc_server.stop()
rpc_server.start(app)
if app.config.get('START_SCHEDULER', True):
scheduler.stop()
scheduler.start(app)
return app
| {"/test/data_join/test_data_portal_master.py": ["/fedlearner/data_join/data_portal_master.py"], "/fedlearner/trainer_master/leader_tm.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/disabled_train_master.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/data/data_block_set.py"], "/fedlearner/data_join/cmd/data_portal_master_service.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_master.py"], "/fedlearner/data_join/data_portal_master.py": ["/fedlearner/data_join/data_portal_job_manager.py"], "/fedlearner/data_join/cmd/data_portal_worker_cli.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_worker.py"], "/fedlearner/trainer/trainer_worker.py": ["/fedlearner/trainer/bridge.py", "/fedlearner/trainer/estimator.py"], "/fedlearner/trainer_master/follower_tm.py": ["/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/test_nn_online_training.py": ["/fedlearner/trainer_master/leader_tm.py", "/fedlearner/trainer_master/follower_tm.py"], "/test/data_join/test_data_portal_worker.py": ["/fedlearner/data_join/data_portal_worker.py"]} |
76,966 | piiswrong/fedlearner | refs/heads/master | /web_console_v2/api/fedlearner_webconsole/db.py | # Copyright 2020 The FedLearner Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
from enum import Enum
from datetime import datetime
from typing import List, Dict, Callable
from flask_sqlalchemy import SQLAlchemy
from google.protobuf.message import Message
from google.protobuf.json_format import MessageToDict
db = SQLAlchemy()
def to_dict_mixin(ignores: List[str] = None,
extras: Dict[str, Callable] = None):
if ignores is None:
ignores = []
if extras is None:
extras = {}
def decorator(cls):
"""A decorator to add a to_dict method to a sqlalchemy model class."""
def to_dict(self: db.Model):
"""A helper function to convert a sqlalchemy model to dict."""
dic = {}
# Puts all columns into the dict
for col in self.__table__.columns:
if col.key in ignores:
continue
dic[col.key] = getattr(self, col.key)
# Puts extra items specified by consumer
for extra_key, func in extras.items():
dic[extra_key] = func(self)
# Converts type
for key in dic:
value = dic[key]
if isinstance(value, datetime):
dic[key] = int(value.timestamp())
elif isinstance(value, Message):
dic[key] = MessageToDict(
value,
preserving_proto_field_name=True,
including_default_value_fields=True)
elif isinstance(value, Enum):
dic[key] = value.name
return dic
setattr(cls, 'to_dict', to_dict)
return cls
return decorator
| {"/test/data_join/test_data_portal_master.py": ["/fedlearner/data_join/data_portal_master.py"], "/fedlearner/trainer_master/leader_tm.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/disabled_train_master.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/data/data_block_set.py"], "/fedlearner/data_join/cmd/data_portal_master_service.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_master.py"], "/fedlearner/data_join/data_portal_master.py": ["/fedlearner/data_join/data_portal_job_manager.py"], "/fedlearner/data_join/cmd/data_portal_worker_cli.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_worker.py"], "/fedlearner/trainer/trainer_worker.py": ["/fedlearner/trainer/bridge.py", "/fedlearner/trainer/estimator.py"], "/fedlearner/trainer_master/follower_tm.py": ["/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/test_nn_online_training.py": ["/fedlearner/trainer_master/leader_tm.py", "/fedlearner/trainer_master/follower_tm.py"], "/test/data_join/test_data_portal_worker.py": ["/fedlearner/data_join/data_portal_worker.py"]} |
76,967 | piiswrong/fedlearner | refs/heads/master | /fedlearner/common/common.py | import datetime
import logging
import os
import pytz
class Config(object):
DATA_JOIN_METRICS_SAMPLE_RATE = \
os.environ.get('DATA_JOIN_METRICS_SAMPLE_RATE', 0.3)
RAW_DATA_METRICS_SAMPLE_RATE = \
os.environ.get('RAW_DATA_METRICS_SAMPLE_RATE', 0.01)
ES_BATCH_SIZE = os.environ.get('ES_BATCH_SIZE', 1000)
TZ = pytz.timezone(os.environ.get('TZ', 'UTC'))
ES_USERNAME = os.environ.get('ES_USERNAME', 'elastic')
ES_PASSWORD = os.environ.get('ES_PASSWORD', 'Fedlearner123')
# YYYY-MM-DD'T'hh:mm:ss.SSSSSSZ
_es_datetime_format = 'strict_date_optional_time'
# WARNING: MAPPINGS BELOW ARE COMPATIBILITY MEASURES AND SHOULD NOT BE MODIFIED.
RAW_DATA_MAPPINGS = {
"dynamic": True,
"dynamic_templates": [
{
"strings": {
"match_mapping_type": "string",
"mapping": {
"type": "keyword"
}
}
}
],
"properties": {
"tags": {
"properties": {
"partition": {
"type": "short"
},
"application_id": {
"ignore_above": 128,
"type": "keyword"
},
"event_time": {
"format": _es_datetime_format,
"type": "date"
},
"process_time": {
"format": _es_datetime_format,
"type": "date"
}
}
}
}
}
DATA_JOIN_MAPPINGS = {
"dynamic": True,
# for dynamically adding string fields, use keyword to reduce space
"dynamic_templates": [
{
"strings": {
"match_mapping_type": "string",
"mapping": {
"type": "keyword"
}
}
}
],
"properties": {
"tags": {
"properties": {
"partition": {
"type": "short"
},
"joined": {
"type": "byte"
},
"label": {
"ignore_above": 32,
"type": "keyword"
},
"type": {
"ignore_above": 32,
"type": "keyword"
},
"has_click_id": {
"type": "boolean"
},
"has_example_id": {
"type": "boolean"
},
"application_id": {
"ignore_above": 128,
"type": "keyword"
},
"process_time": {
"format": _es_datetime_format,
"type": "date"
},
"event_time": {
"format": _es_datetime_format,
"type": "date"
}
}
}
}
}
METRICS_MAPPINGS = {
"dynamic": True,
"dynamic_templates": [
{
"strings": {
"match_mapping_type": "string",
"mapping": {
"type": "keyword"
}
}
}
],
"properties": {
"name": {
"type": "keyword"
},
"value": {
"type": "float"
},
"tags": {
"properties": {
"partition": {
"type": "short"
},
"application_id": {
"ignore_above": 128,
"type": "keyword"
},
"data_source_name": {
"ignore_above": 128,
"type": "keyword"
},
"joiner_name": {
"ignore_above": 32,
"type": "keyword"
},
"role": {
"ignore_above": 32,
"type": "keyword"
},
"event_time": {
"type": "date",
"format": _es_datetime_format
},
"process_time": {
"format": _es_datetime_format,
"type": "date"
}
}
}
}
}
INDEX_NAME = {'metrics': 'metrics_v2',
'raw_data': 'raw_data',
'data_join': 'data_join'}
INDEX_TYPE = INDEX_NAME.keys()
INDEX_MAP = {'metrics': METRICS_MAPPINGS,
'raw_data': RAW_DATA_MAPPINGS,
'data_join': DATA_JOIN_MAPPINGS}
def get_es_template(index_type, es_version):
index_name = INDEX_NAME[index_type]
template = {
"index_patterns": ["{}-*".format(index_name), index_name],
"settings": {
"index": {
"codec": "best_compression",
"routing": {
"allocation": {
"total_shards_per_node": "1"
}
},
"refresh_interval": "60s",
"number_of_shards": "1",
"number_of_replicas": "1",
}
}
}
if es_version == 6:
template['mappings'] = {'_doc': INDEX_MAP[index_type]}
else:
template['mappings'] = INDEX_MAP[index_type]
return template
def convert_to_datetime(value, enable_tz=False):
"""
Args:
value: datetime object | bytes | str | int | float.
Value to be converted. Expected to be a numeric in the format of
yyyymmdd or yyyymmddhhnnss, or a datetime object.
enable_tz: bool. whether converts to UTC and contains timezone info
Returns: str.
Try to convert a datetime str or numeric to a UTC iso format str.
1. Try to convert based on the length of str.
2. Try to convert assuming it is a timestamp.
3. If it does not match any pattern, return iso format of timestamp=0.
Timezone will be set according to system TZ env if unset and
then converted back to UTC if enable_tz is True.
"""
assert isinstance(value, (bytes, str, int, float))
if isinstance(value, bytes):
value = value.decode()
elif isinstance(value, (int, float)):
value = str(value)
# 1. try to parse datetime from value
try:
date_time = convert_time_string_to_datetime(value)
except ValueError: # Not fitting any of above patterns
# 2. try to convert assuming it is a timestamp
# not in the same `try` block b/c the length of some strings might
# be equal to 8 or 14 but it does not match any of the patterns
try:
date_time = datetime.datetime.fromtimestamp(float(value))
except ValueError: # might be a non-number str
# 3. default to 0
logging.warning('Unable to parse time %s to iso format, '
'defaults to 0.', value)
date_time = datetime.datetime.fromtimestamp(0)
if enable_tz:
date_time = set_timezone(date_time)
return date_time
def set_timezone(date_time):
if date_time.tzinfo is None:
date_time = Config.TZ.localize(date_time)
date_time = pytz.utc.normalize(date_time)
return date_time
def convert_time_string_to_datetime(value):
if len(value) == 8:
date_time = datetime.datetime.strptime(value, '%Y%m%d')
elif len(value) == 14:
date_time = datetime.datetime.strptime(value, '%Y%m%d%H%M%S')
else:
raise ValueError
return date_time
def set_logger():
verbosity = os.environ.get('VERBOSITY', 1)
if verbosity == 0:
logging.getLogger().setLevel(logging.WARNING)
elif verbosity == 1:
logging.getLogger().setLevel(logging.INFO)
elif verbosity > 1:
logging.getLogger().setLevel(logging.DEBUG)
logging.basicConfig(format="%(asctime)s %(filename)s " \
"%(lineno)s %(levelname)s - %(message)s")
| {"/test/data_join/test_data_portal_master.py": ["/fedlearner/data_join/data_portal_master.py"], "/fedlearner/trainer_master/leader_tm.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/disabled_train_master.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/data/data_block_set.py"], "/fedlearner/data_join/cmd/data_portal_master_service.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_master.py"], "/fedlearner/data_join/data_portal_master.py": ["/fedlearner/data_join/data_portal_job_manager.py"], "/fedlearner/data_join/cmd/data_portal_worker_cli.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_worker.py"], "/fedlearner/trainer/trainer_worker.py": ["/fedlearner/trainer/bridge.py", "/fedlearner/trainer/estimator.py"], "/fedlearner/trainer_master/follower_tm.py": ["/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/test_nn_online_training.py": ["/fedlearner/trainer_master/leader_tm.py", "/fedlearner/trainer_master/follower_tm.py"], "/test/data_join/test_data_portal_worker.py": ["/fedlearner/data_join/data_portal_worker.py"]} |
76,968 | piiswrong/fedlearner | refs/heads/master | /fedlearner/model/tree/loss.py | # Copyright 2020 The FedLearner Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
import numpy as np
from scipy import special as sp_special
def _roc_auc_score(label, pred):
p = np.argsort(pred, kind='mergesort')[::-1]
label = label[p]
pred = pred[p]
unique = np.r_[np.where(np.diff(pred))[0], label.size-1]
tps = np.cumsum(label)[unique]
fps = np.cumsum(1 - label)[unique]
tpr = np.r_[0, tps] / tps[-1]
fpr = np.r_[0, fps] / fps[-1]
ks = (tpr-fpr).max()
auc = np.trapz(tpr, x=fpr)
return ks, auc
def _precision_recall_f1(label, y_pred):
tp = (label * y_pred).sum()
precision = tp / (y_pred.sum() + 1e-16)
recall = tp / (label.sum() + 1e-16)
f1 = 2 * precision * recall / (precision + recall + 1e-16)
return precision, recall, f1
class LogisticLoss(object):
def __init__(self):
pass
def predict(self, x):
return sp_special.expit(x)
def loss(self, x, pred, label):
return np.zeros_like(pred)
def gradient(self, x, pred, label):
return pred - label
def hessian(self, x, pred, label):
return np.maximum(pred * (1.0 - pred), 1e-16)
def metrics(self, pred, label):
y_pred = (pred > 0.5).astype(label.dtype)
precision, recall, f1 = _precision_recall_f1(label, y_pred)
ks, auc = _roc_auc_score(label, pred)
return {
'acc': np.isclose(y_pred, label).sum() / len(label),
'precision': precision,
'recall': recall,
'f1': f1,
'auc': auc,
'ks': ks,
}
class MSELoss(object):
def __init__(self):
pass
def predict(self, x):
return x
def loss(self, x, pred, label):
return np.square(pred - label).mean() / 2.0
def gradient(self, x, pred, label):
return pred - label
def hessian(self, x, pred, label):
return np.ones_like(pred)
def metrics(self, pred, label):
mse = np.square(pred - label).mean()
msre = np.sqrt(mse)
fabs = np.abs(pred - label).mean()
return {
'mse': mse,
'msre': msre,
'abs': fabs,
}
| {"/test/data_join/test_data_portal_master.py": ["/fedlearner/data_join/data_portal_master.py"], "/fedlearner/trainer_master/leader_tm.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/disabled_train_master.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/data/data_block_set.py"], "/fedlearner/data_join/cmd/data_portal_master_service.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_master.py"], "/fedlearner/data_join/data_portal_master.py": ["/fedlearner/data_join/data_portal_job_manager.py"], "/fedlearner/data_join/cmd/data_portal_worker_cli.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_worker.py"], "/fedlearner/trainer/trainer_worker.py": ["/fedlearner/trainer/bridge.py", "/fedlearner/trainer/estimator.py"], "/fedlearner/trainer_master/follower_tm.py": ["/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/test_nn_online_training.py": ["/fedlearner/trainer_master/leader_tm.py", "/fedlearner/trainer_master/follower_tm.py"], "/test/data_join/test_data_portal_worker.py": ["/fedlearner/data_join/data_portal_worker.py"]} |
76,969 | piiswrong/fedlearner | refs/heads/master | /web_console_v2/api/fedlearner_webconsole/job/yaml_formatter.py | # Copyright 2020 The FedLearner Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
import json
import tarfile
from io import BytesIO
import base64
from string import Template
from flatten_dict import flatten
from fedlearner_webconsole.utils.system_envs import get_system_envs
class _YamlTemplate(Template):
delimiter = '$'
# Which placeholders in the template should be interpreted
idpattern = r'[a-zA-Z_\-\[0-9\]]+(\.[a-zA-Z_\-\[0-9\]]+)*'
def format_yaml(yaml, **kwargs):
"""Formats a yaml template.
Example usage:
format_yaml('{"abc": ${x.y}}', x={'y': 123})
output should be '{"abc": 123}'
"""
template = _YamlTemplate(yaml)
try:
return template.substitute(flatten(kwargs or {},
reducer='dot'))
except KeyError as e:
raise RuntimeError(
'Unknown placeholder: {}'.format(e.args[0])) from e
def _make_variables_dict(variables):
var_dict = {
var.name: (code_dict_encode(json.loads(var.value))
if var.value_type == 'CODE' else var.value)
for var in variables
}
return var_dict
def generate_job_run_yaml(job):
system_dict = {'basic_envs': get_system_envs()}
workflow = job.workflow.to_dict()
workflow['variables'] = _make_variables_dict(
job.workflow.get_config().variables)
workflow['jobs'] = {}
for j in job.workflow.get_jobs():
variables = _make_variables_dict(j.get_config().variables)
j_dic = j.to_dict()
j_dic['variables'] = variables
workflow['jobs'][j.get_config().name] = j_dic
project = job.project.to_dict()
project['variables'] = _make_variables_dict(
job.project.get_config().variables)
participants = project['config']['participants']
for index, participant in enumerate(participants):
project[f'participants[{index}]'] = {}
project[f'participants[{index}]']['egress_domain'] =\
participant['domain_name']
project[f'participants[{index}]']['egress_host'] = \
participant['grpc_spec']['authority']
yaml = format_yaml(job.yaml_template,
workflow=workflow,
project=project,
system=system_dict)
yaml = json.loads(yaml)
return yaml
def code_dict_encode(data_dict):
# if data_dict is a dict ,
# parse it to a tar file represented as base64 string
assert isinstance(data_dict, dict)
out = BytesIO()
with tarfile.open(fileobj=out, mode='w:gz') as tar:
for path in data_dict:
tarinfo = tarfile.TarInfo(path)
tarinfo.size = len(data_dict[path])
tar.addfile(tarinfo, BytesIO(
data_dict[path].encode('utf-8')))
result = str(base64.b64encode(out.getvalue()), encoding='utf-8')
return f'base64://{result}'
| {"/test/data_join/test_data_portal_master.py": ["/fedlearner/data_join/data_portal_master.py"], "/fedlearner/trainer_master/leader_tm.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/disabled_train_master.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/data/data_block_set.py"], "/fedlearner/data_join/cmd/data_portal_master_service.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_master.py"], "/fedlearner/data_join/data_portal_master.py": ["/fedlearner/data_join/data_portal_job_manager.py"], "/fedlearner/data_join/cmd/data_portal_worker_cli.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_worker.py"], "/fedlearner/trainer/trainer_worker.py": ["/fedlearner/trainer/bridge.py", "/fedlearner/trainer/estimator.py"], "/fedlearner/trainer_master/follower_tm.py": ["/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/test_nn_online_training.py": ["/fedlearner/trainer_master/leader_tm.py", "/fedlearner/trainer_master/follower_tm.py"], "/test/data_join/test_data_portal_worker.py": ["/fedlearner/data_join/data_portal_worker.py"]} |
76,970 | piiswrong/fedlearner | refs/heads/master | /web_console_v2/api/fedlearner_webconsole/workflow/apis.py | # Copyright 2020 The FedLearner Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=global-statement
# coding: utf-8
import logging
import json
from uuid import uuid4
from http import HTTPStatus
from flask_restful import Resource, reqparse, request
from google.protobuf.json_format import MessageToDict
from fedlearner_webconsole.workflow.models import (
Workflow, WorkflowState, TransactionState
)
from fedlearner_webconsole.job.yaml_formatter import generate_job_run_yaml
from fedlearner_webconsole.proto import common_pb2
from fedlearner_webconsole.workflow_template.apis import \
dict_to_workflow_definition
from fedlearner_webconsole.db import db
from fedlearner_webconsole.exceptions import (
NotFoundException, ResourceConflictException, InvalidArgumentException,
InternalException, NoAccessException)
from fedlearner_webconsole.scheduler.scheduler import scheduler
from fedlearner_webconsole.rpc.client import RpcClient
def _get_workflow(workflow_id):
result = Workflow.query.filter_by(id=workflow_id).first()
if result is None:
raise NotFoundException()
return result
class WorkflowsApi(Resource):
def get(self):
result = Workflow.query
if 'project' in request.args and request.args['project'] is not None:
project_id = request.args['project']
result = result.filter_by(project_id=project_id)
if 'keyword' in request.args and request.args['keyword'] is not None:
keyword = request.args['keyword']
result = result.filter(Workflow.name.like(
'%{}%'.format(keyword)))
if 'uuid' in request.args and request.args['uuid'] is not None:
uuid = request.args['uuid']
result = result.filter_by(uuid=uuid)
return {'data': [row.to_dict() for row in
result.order_by(
Workflow.created_at.desc()).all()]}, HTTPStatus.OK
def post(self):
parser = reqparse.RequestParser()
parser.add_argument('name', required=True, help='name is empty')
parser.add_argument('project_id', type=int, required=True,
help='project_id is empty')
# TODO: should verify if the config is compatible with
# workflow template
parser.add_argument('config', type=dict, required=True,
help='config is empty')
parser.add_argument('forkable', type=bool, required=True,
help='forkable is empty')
parser.add_argument('forked_from', type=int, required=False,
help='fork from base workflow')
parser.add_argument('create_job_flags', type=list, required=False,
location='json',
help='flags in common.CreateJobFlag')
parser.add_argument('peer_create_job_flags', type=list,
required=False, location='json',
help='peer flags in common.CreateJobFlag')
parser.add_argument('fork_proposal_config', type=dict, required=False,
help='fork and edit peer config')
parser.add_argument('comment')
data = parser.parse_args()
name = data['name']
if Workflow.query.filter_by(name=name).first() is not None:
raise ResourceConflictException(
'Workflow {} already exists.'.format(name))
# form to proto buffer
template_proto = dict_to_workflow_definition(data['config'])
workflow = Workflow(name=name,
# 20 bytes
# a DNS-1035 label must start with an
# alphabetic character. substring uuid[:19] has
# no collision in 10 million draws
uuid=f'u{uuid4().hex[:19]}',
comment=data['comment'],
project_id=data['project_id'],
forkable=data['forkable'],
forked_from=data['forked_from'],
state=WorkflowState.NEW,
target_state=WorkflowState.READY,
transaction_state=TransactionState.READY)
workflow.set_create_job_flags(data['create_job_flags'])
if workflow.forked_from is not None:
fork_config = dict_to_workflow_definition(
data['fork_proposal_config'])
# TODO: more validations
if len(fork_config.job_definitions) != \
len(template_proto.job_definitions):
raise InvalidArgumentException(
'Forked workflow\'s template does not match base workflow')
workflow.set_fork_proposal_config(fork_config)
# TODO: check that federated jobs have
# same reuse policy on both sides
workflow.set_peer_create_job_flags(data['peer_create_job_flags'])
workflow.set_config(template_proto)
db.session.add(workflow)
db.session.commit()
logging.info('Inserted a workflow to db')
scheduler.wakeup(workflow.id)
return {'data': workflow.to_dict()}, HTTPStatus.CREATED
class WorkflowApi(Resource):
def get(self, workflow_id):
workflow = _get_workflow(workflow_id)
result = workflow.to_dict()
result['jobs'] = [job.to_dict() for job in workflow.get_jobs()]
result['owned_jobs'] = [job.to_dict() for job in workflow.owned_jobs]
result['config'] = None
if workflow.get_config() is not None:
result['config'] = MessageToDict(
workflow.get_config(),
preserving_proto_field_name=True,
including_default_value_fields=True)
return {'data': result}, HTTPStatus.OK
def put(self, workflow_id):
parser = reqparse.RequestParser()
parser.add_argument('config', type=dict, required=True,
help='config is empty')
parser.add_argument('forkable', type=bool, required=True,
help='forkable is empty')
parser.add_argument('create_job_flags', type=list, required=False,
location='json',
help='flags in common.CreateJobFlag')
parser.add_argument('comment')
data = parser.parse_args()
workflow = _get_workflow(workflow_id)
if workflow.config:
raise ResourceConflictException(
'Resetting workflow is not allowed')
workflow.comment = data['comment']
workflow.forkable = data['forkable']
workflow.set_config(dict_to_workflow_definition(data['config']))
workflow.set_create_job_flags(data['create_job_flags'])
workflow.update_target_state(WorkflowState.READY)
db.session.commit()
scheduler.wakeup(workflow_id)
logging.info('update workflow %d target_state to %s',
workflow.id, workflow.target_state)
return {'data': workflow.to_dict()}, HTTPStatus.OK
def patch(self, workflow_id):
parser = reqparse.RequestParser()
parser.add_argument('target_state', type=str, required=False,
default=None, help='target_state is empty')
parser.add_argument('state', type=str, required=False,
default=None, help='state is empty')
parser.add_argument('forkable', type=bool)
parser.add_argument('metric_is_public', type=bool)
parser.add_argument('config', type=dict, required=False,
default=None, help='updated config')
data = parser.parse_args()
workflow = _get_workflow(workflow_id)
forkable = data['forkable']
if forkable is not None:
workflow.forkable = forkable
db.session.flush()
metric_is_public = data['metric_is_public']
if metric_is_public is not None:
workflow.metric_is_public = metric_is_public
db.session.flush()
target_state = data['target_state']
if target_state:
try:
if WorkflowState[target_state] == WorkflowState.RUNNING:
for job in workflow.owned_jobs:
try:
generate_job_run_yaml(job)
# TODO: check if peer variables is valid
except RuntimeError as e:
raise ValueError(
f'Invalid Variable when try '
f'to format the job {job.name}:{str(e)}')
workflow.update_target_state(WorkflowState[target_state])
db.session.flush()
logging.info('updated workflow %d target_state to %s',
workflow.id, workflow.target_state)
scheduler.wakeup(workflow.id)
except ValueError as e:
raise InvalidArgumentException(details=str(e)) from e
state = data['state']
if state:
try:
assert state == 'INVALID', \
'Can only set state to INVALID for invalidation'
workflow.invalidate()
db.session.flush()
logging.info('invalidate workflow %d', workflow.id)
except ValueError as e:
raise InvalidArgumentException(details=str(e)) from e
config = data['config']
if config:
try:
if workflow.target_state != WorkflowState.INVALID or \
workflow.state not in \
[WorkflowState.READY, WorkflowState.STOPPED]:
raise NoAccessException('Cannot edit running workflow')
config_proto = dict_to_workflow_definition(data['config'])
workflow.set_config(config_proto)
db.session.flush()
except ValueError as e:
raise InvalidArgumentException(details=str(e)) from e
db.session.commit()
return {'data': workflow.to_dict()}, HTTPStatus.OK
class PeerWorkflowsApi(Resource):
def get(self, workflow_id):
workflow = _get_workflow(workflow_id)
project_config = workflow.project.get_config()
peer_workflows = {}
for party in project_config.participants:
client = RpcClient(project_config, party)
# TODO(xiangyxuan): use uuid to identify the workflow
resp = client.get_workflow(workflow.name)
if resp.status.code != common_pb2.STATUS_SUCCESS:
raise InternalException(resp.status.msg)
peer_workflow = MessageToDict(
resp,
preserving_proto_field_name=True,
including_default_value_fields=True)
for job in peer_workflow['jobs']:
if 'pods' in job:
job['pods'] = json.loads(job['pods'])
peer_workflows[party.name] = peer_workflow
return {'data': peer_workflows}, HTTPStatus.OK
def patch(self, workflow_id):
parser = reqparse.RequestParser()
parser.add_argument('config', type=dict, required=True,
help='new config for peer')
data = parser.parse_args()
config_proto = dict_to_workflow_definition(data['config'])
workflow = _get_workflow(workflow_id)
project_config = workflow.project.get_config()
peer_workflows = {}
for party in project_config.participants:
client = RpcClient(project_config, party)
resp = client.update_workflow(
workflow.name, config_proto)
if resp.status.code != common_pb2.STATUS_SUCCESS:
raise InternalException(resp.status.msg)
peer_workflows[party.name] = MessageToDict(
resp,
preserving_proto_field_name=True,
including_default_value_fields=True)
return {'data': peer_workflows}, HTTPStatus.OK
def initialize_workflow_apis(api):
api.add_resource(WorkflowsApi, '/workflows')
api.add_resource(WorkflowApi, '/workflows/<int:workflow_id>')
api.add_resource(PeerWorkflowsApi,
'/workflows/<int:workflow_id>/peer_workflows')
| {"/test/data_join/test_data_portal_master.py": ["/fedlearner/data_join/data_portal_master.py"], "/fedlearner/trainer_master/leader_tm.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/disabled_train_master.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/data/data_block_set.py"], "/fedlearner/data_join/cmd/data_portal_master_service.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_master.py"], "/fedlearner/data_join/data_portal_master.py": ["/fedlearner/data_join/data_portal_job_manager.py"], "/fedlearner/data_join/cmd/data_portal_worker_cli.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_worker.py"], "/fedlearner/trainer/trainer_worker.py": ["/fedlearner/trainer/bridge.py", "/fedlearner/trainer/estimator.py"], "/fedlearner/trainer_master/follower_tm.py": ["/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/test_nn_online_training.py": ["/fedlearner/trainer_master/leader_tm.py", "/fedlearner/trainer_master/follower_tm.py"], "/test/data_join/test_data_portal_worker.py": ["/fedlearner/data_join/data_portal_worker.py"]} |
76,971 | piiswrong/fedlearner | refs/heads/master | /fedlearner/trainer/estimator.py | # Copyright 2020 The FedLearner Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
# pylint: disable=protected-access
import logging
import time
import tensorflow.compat.v1 as tf
from tensorflow.compat import as_str_any
from tensorflow.compat.v1.train import Optimizer
from tensorflow.compat.v1.estimator import ModeKeys
from tensorflow_estimator.python.estimator import model_fn as model_fn_lib
from fedlearner.common.summary_hook import SummaryHook
from fedlearner.trainer import patch # pylint: disable=unused-import
from fedlearner.common import metrics
SYNC_PATH = '/sync/'
DATA_CHECKPOINT_INIT_VALUE = "_init_value"
class DataCheckpointSaverListener(tf.estimator.CheckpointSaverListener):
def __init__(self, tm, appid):
self._trainer_master = tm
self._application_id = appid
def begin(self):
ckpt = tf.placeholder(tf.string, name="data_checkpoint_plhd")
var_tmp = tf.Variable(DATA_CHECKPOINT_INIT_VALUE, \
name="data_checkpoint")
self._ckpt_tensor = var_tmp.assign(ckpt)
def before_save(self, session, global_step_value):
logging.info('About to write a checkpoint at step %d', \
global_step_value)
data_checkpoint = self._trainer_master.get_data_block_checkpoint(
self._application_id)
#if empty block from checkpoint fetched due to exception or
# master not ready, no need to save.
if len(data_checkpoint) == 0:
return
res = session.run(self._ckpt_tensor, {"data_checkpoint_plhd:0":
",".join(data_checkpoint)})
logging.info("data checkpoint saved result: %s", res)
class FLModel(object):
def __init__(self, role, bridge, example_ids, exporting=False):
self._role = role
self._bridge = bridge
self._example_ids = example_ids
self._exporting = exporting
self._train_ops = []
self._recvs = []
self._sends = []
self._outputs = []
@property
def train_ops(self):
return self._train_ops
@property
def sends(self):
return [(n, t) for n, t, _ in self._sends]
@property
def recvs(self):
return [(n, t) for n, t, _ in self._recvs]
def verify_example_ids(self):
tensor = tf.strings.to_hash_bucket_fast(self._example_ids, 2**31 - 1)
if self._role == 'leader':
self.send('_verify_example_ids', tensor)
else:
recv_tensor = self.recv('_verify_example_ids', tensor.dtype)
op = tf.assert_equal(tensor, recv_tensor)
self._train_ops.append(op)
def send(self, name, tensor, require_grad=False):
with tf.control_dependencies([self._example_ids]):
op = self._bridge.send_op(name, tensor)
self._train_ops.append(op)
self._sends.append((name, tensor, require_grad))
if require_grad:
return self.recv(name + '_grad', tensor.dtype)
return None
def recv(self, name, dtype=tf.float32, require_grad=False, shape=None):
with tf.control_dependencies([self._example_ids]):
tensor = self._bridge.receive_op(name, dtype)
if shape:
tensor = tf.ensure_shape(tensor, shape)
else:
logging.warning(
'Receiving tensor %s without checking shape. '
'Consider setting shape at model.recv(shape=(...)). '
'shape can have None dimensions '
'which matches to any length.', name)
self._train_ops.append(tensor)
self._recvs.append((name, tensor, require_grad))
return tensor
def minimize(self,
optimizer,
loss,
global_step=None,
var_list=None,
gate_gradients=Optimizer.GATE_OP,
aggregation_method=None,
colocate_gradients_with_ops=False,
name=None,
grad_loss=None):
recv_grads = [i for i in self._recvs if i[2]]
if var_list is None:
var_list = \
tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES) + \
tf.get_collection(tf.GraphKeys.TRAINABLE_RESOURCE_VARIABLES)
var_list = [v for _, v, _ in recv_grads] + var_list
grads_and_vars = optimizer.compute_gradients(
loss,
var_list=var_list,
gate_gradients=gate_gradients,
aggregation_method=aggregation_method,
colocate_gradients_with_ops=colocate_gradients_with_ops,
grad_loss=grad_loss)
send_grads = grads_and_vars[:len(recv_grads)]
for (n, _, _), (grad, _) in zip(recv_grads, send_grads):
if grad is not None:
self.send(n + '_grad', grad)
if grads_and_vars[len(recv_grads):]:
train_op = optimizer.apply_gradients(
grads_and_vars[len(recv_grads):],
global_step=global_step,
name=name)
else:
train_op = tf.no_op()
return train_op
def _append_summary_hook(self, training_hooks):
if not training_hooks:
training_hooks = []
summary_hook = SummaryHook.get_hook()
if summary_hook:
training_hooks.append(summary_hook)
return training_hooks
def make_spec(self,
mode,
predictions=None,
loss=None,
train_op=None,
eval_metric_ops=None,
training_chief_hooks=None,
training_hooks=None,
evaluation_hooks=None,
prediction_hooks=None):
if isinstance(predictions, tf.Tensor):
predictions = {'output': predictions}
if mode == ModeKeys.TRAIN:
train_op = tf.group([train_op] + self._train_ops)
training_hooks = self._append_summary_hook(training_hooks)
return tf.estimator.EstimatorSpec(
mode=mode,
predictions=predictions,
loss=loss,
train_op=train_op,
eval_metric_ops=eval_metric_ops,
training_chief_hooks=training_chief_hooks,
training_hooks=training_hooks,
evaluation_hooks=evaluation_hooks,
prediction_hooks=prediction_hooks)
class FLEstimator(object):
def __init__(self,
model_fn,
bridge,
trainer_master,
role,
worker_rank=0,
application_id=None,
cluster_spec=None):
self._model_fn = model_fn
self._bridge = bridge
self._trainer_master = trainer_master
self._role = role
self._worker_rank = worker_rank
self._cluster_spec = cluster_spec
self._application_id = application_id
def _get_features_and_labels_from_input_fn(self, input_fn, mode):
dataset = input_fn(self._bridge, self._trainer_master)
features, labels = dataset.make_one_shot_iterator().get_next()
return features, labels
def _get_model_spec(self, features, labels, mode):
model = FLModel(self._role, self._bridge,
features.get('example_id', None),
exporting=(mode == ModeKeys.PREDICT))
spec = self._model_fn(model, features, labels, mode)
return spec, model
def _restore_datablock(self, blk_ids):
# only chief worker restores from checkpoint.
if self._worker_rank != 0 or blk_ids is None:
return True
block_id_str = as_str_any(blk_ids)
block_ids = []
if block_id_str != DATA_CHECKPOINT_INIT_VALUE:
block_ids = block_id_str.split(",")
logging.info("restore: %s", block_id_str)
return self._trainer_master.restore_data_block_checkpoint(
self._application_id, block_ids)
def train(self,
input_fn,
checkpoint_path=None,
save_checkpoint_steps=None,
save_checkpoint_secs=None):
config = tf.ConfigProto()
config.inter_op_parallelism_threads = 16
config.inter_op_parallelism_threads = 16
config.experimental.share_session_state_in_clusterspec_propagation \
= True
if self._cluster_spec is not None:
device_fn = tf.train.replica_device_setter(
worker_device="/job:worker/task:%d" % self._worker_rank,
merge_devices=True,
cluster=self._cluster_spec)
local_address = self._cluster_spec.job_tasks('worker')[
self._worker_rank]
config.rpc_options.compression_algorithm = 'gzip'
config.rpc_options.cache_rpc_response = True
server = tf.train.Server(tf.train.ClusterSpec(
{'local': {
0: local_address
}}),
job_name='local',
task_index=0,
config=config)
config.cluster_def.CopyFrom(self._cluster_spec.as_cluster_def())
target = "grpc://" + local_address
else:
device_fn = None
target = None
with tf.Graph().as_default() as g:
with tf.device(device_fn):
features, labels = self._get_features_and_labels_from_input_fn(
input_fn, ModeKeys.TRAIN)
spec, _ = self._get_model_spec(features, labels, ModeKeys.TRAIN)
# Explicitly add a Saver
if not tf.get_collection(tf.GraphKeys.SAVERS):
saver = tf.train.Saver(
sharded=True,
defer_build=True,
save_relative_paths=True) # Must set for portability
tf.add_to_collection(tf.GraphKeys.SAVERS, saver)
listener = DataCheckpointSaverListener(self._trainer_master,
self._application_id)
saver_hook = tf.estimator.CheckpointSaverHook(
checkpoint_path, save_secs=save_checkpoint_secs,
save_steps=save_checkpoint_steps, listeners=[listener])
self._bridge.connect()
try:
with tf.train.MonitoredTrainingSession(
master=target,
config=config,
is_chief=(self._worker_rank == 0),
chief_only_hooks=[saver_hook],
checkpoint_dir=checkpoint_path,
save_checkpoint_steps=None,
save_checkpoint_secs=None,
hooks=spec.training_hooks) as sess:
iter_id = 0
data_checkpoint_value = None
if hasattr(saver_hook, "data_checkpoint"):
data_checkpoint_value = saver_hook.data_checkpoint
if not self._restore_datablock(data_checkpoint_value):
raise ValueError("Restore data checkpoint error")
while not sess.should_stop():
self._bridge.start(iter_id)
logging.debug('after bridge start.')
start_time = time.time()
sess.run(spec.train_op, feed_dict={})
end_time = time.time()
metrics.emit_timer(
name="iter_timer",
value=end_time-start_time,
tags={})
logging.debug('after session run.')
self._bridge.commit()
logging.debug('after bridge commit.')
iter_id += 1
finally:
self._bridge.terminate()
return self
def evaluate(self,
input_fn,
checkpoint_path=None):
if not tf.train.latest_checkpoint(checkpoint_path):
raise ValueError(
"Could not find trained model at %s" % checkpoint_path)
with tf.Graph().as_default():
features, labels = self._get_features_and_labels_from_input_fn(
input_fn, ModeKeys.EVAL)
spec, model = self._get_model_spec(features, labels, ModeKeys.EVAL)
# Track the average loss in default
eval_metric_ops = spec.eval_metric_ops or {}
if model_fn_lib.LOSS_METRIC_KEY not in eval_metric_ops:
loss_metric = tf.metrics.mean(spec.loss)
eval_metric_ops[model_fn_lib.LOSS_METRIC_KEY] = loss_metric
# Create the real eval op
update_ops, eval_dict = _extract_metric_update_ops(eval_metric_ops)
update_ops.extend(model._train_ops)
eval_op = tf.group(*update_ops)
# Also track the global step
if tf.GraphKeys.GLOBAL_STEP in eval_dict:
raise ValueError(
'Metric with name `global_step` is not allowed, because '
'Estimator already defines a default metric with the '
'same name.')
eval_dict[tf.GraphKeys.GLOBAL_STEP] = \
tf.train.get_or_create_global_step()
# Prepare the session creator.
scaffold = tf.train.Scaffold()
session_creator = tf.train.ChiefSessionCreator(
scaffold=scaffold,
checkpoint_dir=checkpoint_path)
# Prepare hooks
all_hooks = list(spec.evaluation_hooks) or []
final_ops_hook = tf.train.FinalOpsHook(eval_dict)
all_hooks.append(final_ops_hook)
# Evaluate over dataset
self._bridge.connect()
try:
with tf.train.MonitoredSession(
session_creator=session_creator, hooks=all_hooks) as sess:
if not self._restore_datablock(DATA_CHECKPOINT_INIT_VALUE):
raise ValueError("Restore data checkpoint error")
iter_id = 0
while not sess.should_stop():
self._bridge.start(iter_id)
logging.debug('after bridge start.')
start_time = time.time()
sess.run(eval_op)
end_time = time.time()
metrics.emit_timer(
name="iter_timer",
value=end_time-start_time,
tags={})
logging.debug('after session run.')
self._bridge.commit()
logging.debug('after bridge commit.')
iter_id += 1
finally:
self._bridge.terminate()
# Print result
logging.info('Metrics for iteration %d: %s',
iter_id, _dict_to_str(final_ops_hook.final_ops_values))
return final_ops_hook.final_ops_values
def export_saved_model(self,
export_dir_base,
serving_input_receiver_fn,
checkpoint_path=None):
with tf.Graph().as_default():
receiver = serving_input_receiver_fn()
spec, model = self._get_model_spec(receiver.features, None,
ModeKeys.PREDICT)
assert not model.sends, "Exported model cannot send"
assert not model.recvs, "Exported model cannot receive"
with tf.Session() as sess:
saver_for_restore = tf.train.Saver(sharded=True)
saver_for_restore.restore(
sess, tf.train.latest_checkpoint(checkpoint_path))
tf.saved_model.simple_save(sess, export_dir_base,
receiver.receiver_tensors,
spec.predictions, None)
return export_dir_base
def _extract_metric_update_ops(eval_dict):
"""Separate update operations from metric value operations."""
update_ops = []
value_ops = {}
# Sort metrics lexicographically so graph is identical every time.
for name in sorted(eval_dict.keys()):
metric_tensor, update_op = eval_dict[name]
value_ops[name] = metric_tensor
update_ops.append(update_op)
return update_ops, value_ops
def _dict_to_str(dictionary):
"""Get a `str` representation of a `dict`.
Args:
dictionary: The `dict` to be represented as `str`.
Returns:
A `str` representing the `dictionary`.
"""
return ', '.join('%s = %s' % (k, v)
for k, v in sorted(dictionary.items())
if not isinstance(v, bytes))
| {"/test/data_join/test_data_portal_master.py": ["/fedlearner/data_join/data_portal_master.py"], "/fedlearner/trainer_master/leader_tm.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/disabled_train_master.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/data/data_block_set.py"], "/fedlearner/data_join/cmd/data_portal_master_service.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_master.py"], "/fedlearner/data_join/data_portal_master.py": ["/fedlearner/data_join/data_portal_job_manager.py"], "/fedlearner/data_join/cmd/data_portal_worker_cli.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_worker.py"], "/fedlearner/trainer/trainer_worker.py": ["/fedlearner/trainer/bridge.py", "/fedlearner/trainer/estimator.py"], "/fedlearner/trainer_master/follower_tm.py": ["/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/test_nn_online_training.py": ["/fedlearner/trainer_master/leader_tm.py", "/fedlearner/trainer_master/follower_tm.py"], "/test/data_join/test_data_portal_worker.py": ["/fedlearner/data_join/data_portal_worker.py"]} |
76,972 | piiswrong/fedlearner | refs/heads/master | /web_console_v2/api/test/fedlearner_webconsole/db_test.py | # Copyright 2020 The FedLearner Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
import unittest
from datetime import datetime
from fedlearner_webconsole.db import db, to_dict_mixin
from fedlearner_webconsole.proto import common_pb2
from testing.common import create_test_db
@to_dict_mixin(ignores=['token', 'grpc_spec'], extras={
'extra_key': (lambda model: model.get_grpc_spec())
})
class _TestModel(db.Model):
__tablename__ = 'test_table'
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
name = db.Column(db.String(255))
token = db.Column('token_string', db.String(64), index=True,
key='token')
created_at = db.Column(db.DateTime(timezone=True))
grpc_spec = db.Column(db.Text())
def set_grpc_spec(self, proto):
self.grpc_spec = proto.SerializeToString()
def get_grpc_spec(self):
proto = common_pb2.GrpcSpec()
proto.ParseFromString(self.grpc_spec)
return proto
class DbTest(unittest.TestCase):
def setUp(self):
self._db = create_test_db()
self._db.create_all()
def tearDown(self):
self._db.session.remove()
self._db.drop_all()
def test_to_dict_decorator(self):
# 2020/12/17 13:58:59 UTC+8
created_at_ts = 1608184739
test_model = _TestModel(
name='test-model',
token='test-token',
created_at=datetime.fromtimestamp(created_at_ts)
)
test_grpc_spec = common_pb2.GrpcSpec(authority='test-authority')
test_model.set_grpc_spec(test_grpc_spec)
self._db.session.add(test_model)
self._db.session.commit()
models = _TestModel.query.all()
self.assertEqual(len(models), 1)
self.assertDictEqual(models[0].to_dict(), {
'id': 1,
'name': 'test-model',
'created_at': created_at_ts,
'extra_key': {
'authority': 'test-authority',
'extra_headers': {},
}
})
if __name__ == '__main__':
unittest.main()
| {"/test/data_join/test_data_portal_master.py": ["/fedlearner/data_join/data_portal_master.py"], "/fedlearner/trainer_master/leader_tm.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/disabled_train_master.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/data/data_block_set.py"], "/fedlearner/data_join/cmd/data_portal_master_service.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_master.py"], "/fedlearner/data_join/data_portal_master.py": ["/fedlearner/data_join/data_portal_job_manager.py"], "/fedlearner/data_join/cmd/data_portal_worker_cli.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_worker.py"], "/fedlearner/trainer/trainer_worker.py": ["/fedlearner/trainer/bridge.py", "/fedlearner/trainer/estimator.py"], "/fedlearner/trainer_master/follower_tm.py": ["/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/test_nn_online_training.py": ["/fedlearner/trainer_master/leader_tm.py", "/fedlearner/trainer_master/follower_tm.py"], "/test/data_join/test_data_portal_worker.py": ["/fedlearner/data_join/data_portal_worker.py"]} |
76,973 | piiswrong/fedlearner | refs/heads/master | /web_console_v2/api/fedlearner_webconsole/envs.py | import os
import pytz
class Envs(object):
SUPPORT_HDFS = bool(os.environ.get('SUPPORT_HDFS'))
TZ = pytz.timezone(os.environ.get('TZ', 'UTC'))
HDFS_SERVER = os.environ.get('HDFS_SERVER', None)
ES_HOST = os.environ.get('ES_HOST', 'fedlearner-stack-elasticsearch-client')
ES_PORT = os.environ.get('ES_PORT', 9200)
ES_USERNAME = os.environ.get('ES_USERNAME', 'elastic')
ES_PASSWORD = os.environ.get('ES_PASSWORD', 'Fedlearner123')
KIBANA_SERVICE_HOST_PORT = os.environ.get(
'KIBANA_SERVICE_HOST_PORT', 'http://fedlearner-stack-kibana:443'
)
KIBANA_INGRESS_HOST = os.environ.get('KIBANA_INGRESS_HOST', 'localhost')
KIBANA_INGRESS_PORT = os.environ.get('KIBANA_INGRESS_PORT', '5601')
| {"/test/data_join/test_data_portal_master.py": ["/fedlearner/data_join/data_portal_master.py"], "/fedlearner/trainer_master/leader_tm.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/disabled_train_master.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/data/data_block_set.py"], "/fedlearner/data_join/cmd/data_portal_master_service.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_master.py"], "/fedlearner/data_join/data_portal_master.py": ["/fedlearner/data_join/data_portal_job_manager.py"], "/fedlearner/data_join/cmd/data_portal_worker_cli.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_worker.py"], "/fedlearner/trainer/trainer_worker.py": ["/fedlearner/trainer/bridge.py", "/fedlearner/trainer/estimator.py"], "/fedlearner/trainer_master/follower_tm.py": ["/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/test_nn_online_training.py": ["/fedlearner/trainer_master/leader_tm.py", "/fedlearner/trainer_master/follower_tm.py"], "/test/data_join/test_data_portal_worker.py": ["/fedlearner/data_join/data_portal_worker.py"]} |
76,974 | piiswrong/fedlearner | refs/heads/master | /fedlearner/trainer_master/leader_tm.py | # Copyright 2020 The FedLearner Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
import logging
from concurrent import futures
import threading
import argparse
import os
import random
import grpc
from fedlearner.common import trainer_master_service_pb2 as tm_pb
from fedlearner.common import trainer_master_service_pb2_grpc as tm_grpc
from fedlearner.common import common_pb2 as common_pb
from fedlearner.data_join.data_block_visitor import DataBlockVisitor
from fedlearner.trainer_master.data.data_block_queue import DataBlockQueue
from .trainer_master_service import TrainerMasterServer
kvstore_type = os.environ.get('KVSTORE_TYPE', 'etcd')
class LeaderTrainerMaster(object):
def __init__(self, application_id, data_source,
start_time, end_time, online_training,
shuffle_data_block, epoch_num):
self._application_id = application_id
self._online_training = online_training
self._checkpoint_mutex = threading.Lock()
self._allocated_data_blockids = None
self._status_mutex = threading.Lock()
self._status = tm_pb.MasterStatus.CREATED
kvstore_use_mock = os.environ.get('KVSTORE_USE_MOCK', "off") == "on"
self._data_block_queue = DataBlockQueue()
self._data_block_visitor = DataBlockVisitor(
data_source, kvstore_type, kvstore_use_mock)
self._start_time = start_time
self._end_time = end_time
self._epoch_num = epoch_num
self._shuffle_data_block = shuffle_data_block
self._visited_data_blocks = set()
self._lock = threading.Lock()
if online_training:
assert self._epoch_num == 1 and not self._shuffle_data_block, \
"epoch_num must be 1 and shuffle_data_block must be False " \
"online_training is set"
assert self._epoch_num >= 1, \
"epoch_num {} must >= 1".format(self._epoch_num)
def run(self, listen_port):
self._server = grpc.server(futures.ThreadPoolExecutor(max_workers=10))
tm_grpc.add_TrainerMasterServiceServicer_to_server(
TrainerMasterServer(self._data_block_response,
self._get_checkpoint_fn,
self._restore_checkpoint_fn), self._server)
self._server.add_insecure_port('[::]:%d' % listen_port)
self._server.start()
logging.info('Trainer Master Server start on port[%d].', listen_port)
self._transfer_status(tm_pb.MasterStatus.CREATED,
tm_pb.MasterStatus.INITIALING)
self._server.wait_for_termination()
def _transfer_status(self, frm, to, callback_fn=lambda *args: True):
with self._status_mutex:
if self._status == frm:
self._status = to
return callback_fn()
logging.warning("%s invalid status transfer, from %d to %d, "
"while status is %d", self.__class__.__name__,
frm, to, self._status)
self._status = tm_pb.MasterStatus.ERROR
return False
def _check_status(self, callback_fn):
with self._status_mutex:
return callback_fn(self._status)
raise ValueError("unreachable")
def _get_checkpoint_fn(self, request):
assert request.application_id == self._application_id, \
"Application id not matched"
response = tm_pb.GetDataBlockCheckpointResponse()
ckpt_not_ready_fn = lambda status: status not in \
(tm_pb.MasterStatus.RUNNING, tm_pb.MasterStatus.FINISHED)
if self._check_status(ckpt_not_ready_fn):
response.status.code = common_pb.STATUS_WAIT_FOR_SYNCING_CHECKPOINT
response.status.error_message = \
"master is not ready for querying daya checkpoint"
return response
response.status.code = common_pb.STATUS_SUCCESS
response.status.error_message = 'success'
response.block_ids.extend(list(self._allocated_data_blockids))
return response
def _restore_checkpoint_fn(self, request):
assert request.application_id == self._application_id,\
"Application id not matched: %s vs %s"%(
request.application_id, self._application_id)
response = tm_pb.RestoreDataBlockCheckpointResponse()
no_need_restore_fn = lambda status: status in (\
tm_pb.MasterStatus.RUNNING,\
tm_pb.MasterStatus.FINISHED,\
tm_pb.MasterStatus.ERROR)
if self._check_status(no_need_restore_fn):
logging.info("No need to restore %s", self.__class__.__name__)
response.status.code = common_pb.STATUS_SUCCESS
response.status.error_message = "success"
return response
# In case of race, load data before state transfering to RUNNING, and
# after filling data checkpoint
with self._checkpoint_mutex:
self._allocated_data_blockids = set(request.block_ids)
self._load_data()
trans_ok = self._transfer_status(tm_pb.MasterStatus.INITIALING,
tm_pb.MasterStatus.RUNNING)
if not trans_ok:
response.status.code = common_pb.STATUS_WAIT_FOR_SYNCING_CHECKPOINT
response.status.error_message = \
"must sync data checkpoint before alloc"
return response
response.status.code = common_pb.STATUS_SUCCESS
response.status.error_message = "success"
return response
def _get_checkpoint(self):
return self._allocated_data_blockids
def _data_block_response(self, request):
response = tm_pb.DataBlockResponse()
def status_check_fn(status):
response = tm_pb.DataBlockResponse()
if status in (tm_pb.MasterStatus.FINISHED, \
tm_pb.MasterStatus.ERROR):
response.status.code = common_pb.STATUS_DATA_FINISHED
response.status.error_message = 'datablock finished'
return response
if status != tm_pb.MasterStatus.RUNNING:
response.status.code = \
common_pb.STATUS_WAIT_FOR_SYNCING_CHECKPOINT
response.status.error_message = \
"must sync data checkpoint before alloc"
return response
#only if status is RUNNING
return True
ready = self._check_status(status_check_fn)
if ready is not True:
return ready
data_block = self._alloc_data_block(block_id=request.block_id)
if data_block:
logging.debug("%s allocated worker_%d with block id %s",
self.__class__.__name__,
request.worker_rank,
data_block.block_id)
response.status.code = common_pb.STATUS_SUCCESS
response.status.error_message = 'success'
response.data_block_info.data_path = \
str(data_block.data_block_fpath)
response.data_block_info.meta_path = ''
response.data_block_info.block_id = str(data_block.block_id)
elif self._online_training:
logging.debug("%s allocated worker_%d with empty data block. "\
"wait for new data block since online traning",
self.__class__.__name__, request.worker_rank)
response.status.code = common_pb.STATUS_NO_MORE_DATA
response.status.error_message = 'please wait for datablock ready'
else:
logging.debug("%s allocated worker_%d with empty data block. "\
"exit running since since batch traning",
self.__class__.__name__, request.worker_rank)
response.status.code = common_pb.STATUS_DATA_FINISHED
response.status.error_message = 'datablock finished'
if response.status.code == common_pb.STATUS_DATA_FINISHED:
self._transfer_status(tm_pb.MasterStatus.RUNNING,
tm_pb.MasterStatus.FINISHED)
return response
def _load_data(self):
checkpoint = self._get_checkpoint()
# pylint: disable=line-too-long
logging.info("load_data, checkpoint: %s", checkpoint)
data_block_reps = [
dbr for dbr in self._data_block_visitor.LoadDataBlockRepByTimeFrame(
self._start_time, self._end_time).values()
if dbr.block_id not in checkpoint and
dbr.block_id not in self._visited_data_blocks]
self._visited_data_blocks.update([i.block_id for i in data_block_reps])
if self._online_training:
data_block_reps.sort(key=lambda x: x.data_block_index)
else:
data_block_reps.sort(key=lambda x: x.start_time)
for rnd in range(self._epoch_num):
if self._shuffle_data_block:
random.shuffle(data_block_reps)
for dbr in data_block_reps:
logging.debug('epoch round-%d: add data block id %s path %s',
rnd, dbr.block_id, dbr.data_block_fpath)
self._data_block_queue.put(dbr)
def _alloc_data_block(self, block_id=None):
# block_id is unused in leader role
with self._lock:
if self._data_block_queue.empty() and self._online_training:
logging.info("Load data when queue empty and online training")
self._load_data()
if self._data_block_queue.empty():
logging.info("Allocate when data_block_queue is empty")
return None
data_blocks_resp = self._data_block_queue.get()
with self._checkpoint_mutex:
self._allocated_data_blockids.add(data_blocks_resp.block_id)
return data_blocks_resp
if __name__ == '__main__':
logging.getLogger().setLevel(logging.DEBUG)
parser = argparse.ArgumentParser('leader trainer master cmd.')
parser.add_argument('-p', '--port', type=int, default=50001,
help='Listen port of leader trainer master')
parser.add_argument('-app_id', '--application_id',
required=True, help='application_id')
parser.add_argument('-data_source', '--data_source',
required=False, help='training example data source')
parser.add_argument('-start_date', '--start_date',
default=None, help='training data start date')
parser.add_argument('-end_date', '--end_date',
default=None, help='training data end date')
parser.add_argument('--online_training', action='store_true',
help='the train master run for online training')
parser.add_argument('--shuffle_data_block', action='store_true',
help='shuffle the data block or not')
parser.add_argument('--epoch_num', type=int, default=1,
help='number of epoch for training, not '\
'support in online training')
FLAGS = parser.parse_args()
start_date = int(FLAGS.start_date) if FLAGS.start_date else None
end_date = int(FLAGS.end_date) if FLAGS.end_date else None
leader_tm = LeaderTrainerMaster(FLAGS.application_id, FLAGS.data_source,
start_date, end_date,
FLAGS.online_training,
FLAGS.shuffle_data_block,
FLAGS.epoch_num)
leader_tm.run(listen_port=FLAGS.port)
| {"/test/data_join/test_data_portal_master.py": ["/fedlearner/data_join/data_portal_master.py"], "/fedlearner/trainer_master/leader_tm.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/disabled_train_master.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/data/data_block_set.py"], "/fedlearner/data_join/cmd/data_portal_master_service.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_master.py"], "/fedlearner/data_join/data_portal_master.py": ["/fedlearner/data_join/data_portal_job_manager.py"], "/fedlearner/data_join/cmd/data_portal_worker_cli.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_worker.py"], "/fedlearner/trainer/trainer_worker.py": ["/fedlearner/trainer/bridge.py", "/fedlearner/trainer/estimator.py"], "/fedlearner/trainer_master/follower_tm.py": ["/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/test_nn_online_training.py": ["/fedlearner/trainer_master/leader_tm.py", "/fedlearner/trainer_master/follower_tm.py"], "/test/data_join/test_data_portal_worker.py": ["/fedlearner/data_join/data_portal_worker.py"]} |
76,975 | piiswrong/fedlearner | refs/heads/master | /test/trainer/disabled_train_master.py | # Copyright 2020 The FedLearner Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
import os
import unittest
from fedlearner.trainer_master.data.data_block import DataBlock
from fedlearner.trainer_master.data.data_block_queue import DataBlockQueue
from fedlearner.trainer_master.data.data_block_set import DataBlockSet
from fedlearner.trainer_master.data.data_source_reader import DataSourceReader
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
class TestDataBlockAlloc(unittest.TestCase):
def test_trainer_master(self):
db1 = DataBlock('1', 'data_path1', 'meta_path1')
db2 = DataBlock('2', 'data_path2', 'meta_path2')
db_queue = DataBlockQueue()
db_queue.put(db1)
db_queue.put(db2)
self.assertEqual(db_queue.get(), db1)
self.assertEqual(db_queue.get(), db2)
def test_data_block_set(self):
db1 = DataBlock('1', 'data_path1', 'meta_path1')
db2 = DataBlock('2', 'data_path2', 'meta_path2')
db_set = DataBlockSet()
db_set.add(db1)
db_set.add(db2)
self.assertIsNone(db_set.get('3'))
self.assertEqual(db_set.get('1'), db1)
self.assertIsNone(db_set.get('1'))
self.assertEqual(db_set.get('2'), db2)
self.assertIsNone(db_set.get('2'))
def test_data_block(self):
db1 = DataBlock('1', 'data_path1', None)
self.assertRaises(Exception, db1.validate)
db2 = DataBlock('2', 'data_path2', 'meta_path2')
self.assertTrue(db2.validate())
def test_data_block_reader(self):
ds_reader = DataSourceReader(data_source='data_source',
start_date='2019-01-02',
end_date='2019-10-02')
self.assertIsNotNone(ds_reader.read_all())
if __name__ == '__main__':
unittest.main() | {"/test/data_join/test_data_portal_master.py": ["/fedlearner/data_join/data_portal_master.py"], "/fedlearner/trainer_master/leader_tm.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/disabled_train_master.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/data/data_block_set.py"], "/fedlearner/data_join/cmd/data_portal_master_service.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_master.py"], "/fedlearner/data_join/data_portal_master.py": ["/fedlearner/data_join/data_portal_job_manager.py"], "/fedlearner/data_join/cmd/data_portal_worker_cli.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_worker.py"], "/fedlearner/trainer/trainer_worker.py": ["/fedlearner/trainer/bridge.py", "/fedlearner/trainer/estimator.py"], "/fedlearner/trainer_master/follower_tm.py": ["/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/test_nn_online_training.py": ["/fedlearner/trainer_master/leader_tm.py", "/fedlearner/trainer_master/follower_tm.py"], "/test/data_join/test_data_portal_worker.py": ["/fedlearner/data_join/data_portal_worker.py"]} |
76,976 | piiswrong/fedlearner | refs/heads/master | /fedlearner/trainer_master/trainer_master_service.py | # Copyright 2020 The FedLearner Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
import sys
import traceback
from fedlearner.common import trainer_master_service_pb2 as tm_pb
from fedlearner.common import trainer_master_service_pb2_grpc as tm_grpc
from fedlearner.common import common_pb2 as common_pb
class TrainerMasterServer(tm_grpc.TrainerMasterServiceServicer):
def __init__(self, receiver_fn, get_checkpoint_fn, restore_fn):
super(TrainerMasterServer, self).__init__()
self._receiver_fn = receiver_fn
self._get_checkpoint_fn = get_checkpoint_fn
self._restore_checkpoint_fn = restore_fn
def GetDataBlockCheckpoint(self, request, context):
response = tm_pb.GetDataBlockCheckpointResponse()
try:
response = self._get_checkpoint_fn(request)
except Exception: # pylint: disable=broad-except
response.status.code = common_pb.STATUS_UNKNOWN_ERROR
err_str = ''.join(traceback.format_exception(*sys.exc_info()))
response.status.error_message = err_str
return response
def RestoreDataBlockCheckpoint(self, request, context):
response = tm_pb.RestoreDataBlockCheckpointResponse()
try:
response = self._restore_checkpoint_fn(request)
except Exception: # pylint: disable=broad-except
response.status.code = common_pb.STATUS_UNKNOWN_ERROR
err_str = ''.join(traceback.format_exception(*sys.exc_info()))
response.status.error_message = err_str
return response
def RequestDataBlock(self, request, context):
response = tm_pb.DataBlockResponse()
try:
response = self._receiver_fn(request)
except Exception: # pylint: disable=broad-except
response.status.code = common_pb.STATUS_UNKNOWN_ERROR
err_str = ''.join(traceback.format_exception(*sys.exc_info()))
response.status.error_message = err_str
return response
| {"/test/data_join/test_data_portal_master.py": ["/fedlearner/data_join/data_portal_master.py"], "/fedlearner/trainer_master/leader_tm.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/disabled_train_master.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/data/data_block_set.py"], "/fedlearner/data_join/cmd/data_portal_master_service.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_master.py"], "/fedlearner/data_join/data_portal_master.py": ["/fedlearner/data_join/data_portal_job_manager.py"], "/fedlearner/data_join/cmd/data_portal_worker_cli.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_worker.py"], "/fedlearner/trainer/trainer_worker.py": ["/fedlearner/trainer/bridge.py", "/fedlearner/trainer/estimator.py"], "/fedlearner/trainer_master/follower_tm.py": ["/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/test_nn_online_training.py": ["/fedlearner/trainer_master/leader_tm.py", "/fedlearner/trainer_master/follower_tm.py"], "/test/data_join/test_data_portal_worker.py": ["/fedlearner/data_join/data_portal_worker.py"]} |
76,977 | piiswrong/fedlearner | refs/heads/master | /fedlearner/trainer_master/data/data_block_set.py | # Copyright 2020 The FedLearner Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
import logging
class DataBlockSet(object):
'''
For quick implement a prototype, use python Queue,
If data size is laregr than local memory,
replace by Redis or other distributed Store.
'''
def __init__(self, maxsize=0):
self._db_set = {}
def add(self, data_block):
if data_block.block_id:
self._db_set[data_block.block_id] = data_block
def get(self, block_id):
logging.debug("search %s and result: %r", block_id,
block_id in self._db_set)
return self._db_set.pop(block_id, None)
def __str__(self):
ret = ""
for item in self._db_set:
ret += str(item) + ","
return ret
| {"/test/data_join/test_data_portal_master.py": ["/fedlearner/data_join/data_portal_master.py"], "/fedlearner/trainer_master/leader_tm.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/disabled_train_master.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/data/data_block_set.py"], "/fedlearner/data_join/cmd/data_portal_master_service.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_master.py"], "/fedlearner/data_join/data_portal_master.py": ["/fedlearner/data_join/data_portal_job_manager.py"], "/fedlearner/data_join/cmd/data_portal_worker_cli.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_worker.py"], "/fedlearner/trainer/trainer_worker.py": ["/fedlearner/trainer/bridge.py", "/fedlearner/trainer/estimator.py"], "/fedlearner/trainer_master/follower_tm.py": ["/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/test_nn_online_training.py": ["/fedlearner/trainer_master/leader_tm.py", "/fedlearner/trainer_master/follower_tm.py"], "/test/data_join/test_data_portal_worker.py": ["/fedlearner/data_join/data_portal_worker.py"]} |
76,978 | piiswrong/fedlearner | refs/heads/master | /web_console_v2/api/fedlearner_webconsole/scheduler/scheduler.py | # Copyright 2020 The FedLearner Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
# pylint: disable=broad-except
import os
import threading
import logging
import traceback
from fedlearner_webconsole.job.yaml_formatter import generate_job_run_yaml
from fedlearner_webconsole.db import db
from fedlearner_webconsole.dataset.import_handler import ImportHandler
from fedlearner_webconsole.workflow.models import Workflow, WorkflowState
from fedlearner_webconsole.job.models import Job, JobState, JobDependency
from fedlearner_webconsole.scheduler.transaction import TransactionManager
from fedlearner_webconsole.k8s_client import get_client
from fedlearner_webconsole.utils.k8s_client import CrdKind
class Scheduler(object):
def __init__(self):
self._condition = threading.Condition(threading.RLock())
self._running = False
self._terminate = False
self._thread = None
self._pending_workflows = []
self._pending_jobs = []
self._app = None
self._import_handler = ImportHandler()
def start(self, app, force=False):
if self._running:
if not force:
raise RuntimeError("Scheduler is already started")
self.stop()
self._app = app
with self._condition:
self._running = True
self._terminate = False
self._thread = threading.Thread(target=self._routine)
self._thread.daemon = True
self._thread.start()
self._import_handler.init(app)
logging.info('Scheduler started')
def stop(self):
if not self._running:
return
with self._condition:
self._terminate = True
self._condition.notify_all()
print('stopping')
self._thread.join()
self._running = False
logging.info('Scheduler stopped')
def wakeup(self, workflow_ids=None,
job_ids=None,
data_batch_ids=None):
with self._condition:
if workflow_ids:
if isinstance(workflow_ids, int):
workflow_ids = [workflow_ids]
self._pending_workflows.extend(workflow_ids)
if job_ids:
if isinstance(job_ids, int):
job_ids = [job_ids]
self._pending_jobs.extend(job_ids)
if data_batch_ids:
self._import_handler.schedule_to_handle(data_batch_ids)
self._condition.notify_all()
def _routine(self):
self._app.app_context().push()
interval = int(os.environ.get(
'FEDLEARNER_WEBCONSOLE_POLLING_INTERVAL', 60))
while True:
with self._condition:
notified = self._condition.wait(interval)
if self._terminate:
return
if notified:
workflow_ids = self._pending_workflows
self._pending_workflows = []
self._poll_workflows(workflow_ids)
job_ids = self._pending_jobs
self._pending_jobs = []
job_ids.extend([
jid for jid, in db.session.query(Job.id) \
.filter(Job.state == JobState.WAITING) \
.filter(Job.workflow_id in workflow_ids)])
self._poll_jobs(job_ids)
self._import_handler.handle(pull=False)
continue
workflows = db.session.query(Workflow.id).filter(
Workflow.target_state != WorkflowState.INVALID).all()
self._poll_workflows([wid for wid, in workflows])
jobs = db.session.query(Job.id).filter(
Job.state == JobState.WAITING).all()
self._poll_jobs([jid for jid, in jobs])
self._import_handler.handle(pull=True)
def _poll_workflows(self, workflow_ids):
logging.info('Scheduler polling %d workflows...', len(workflow_ids))
for workflow_id in workflow_ids:
try:
self._schedule_workflow(workflow_id)
except Exception as e:
logging.warning(
"Error while scheduling workflow %d:\n%s",
workflow_id, traceback.format_exc())
def _poll_jobs(self, job_ids):
logging.info('Scheduler polling %d jobs...', len(job_ids))
for job_id in job_ids:
try:
self._schedule_job(job_id)
except Exception as e:
logging.warning(
"Error while scheduling job %d:\n%s",
job_id, traceback.format_exc())
def _schedule_workflow(self, workflow_id):
logging.debug('Scheduling workflow %d', workflow_id)
tm = TransactionManager(workflow_id)
return tm.process()
def _schedule_job(self, job_id):
job = Job.query.get(job_id)
assert job is not None, 'Job %d not found'%job_id
if job.state != JobState.WAITING:
return job.state
deps = JobDependency.query.filter(
JobDependency.dst_job_id == job.id).all()
for dep in deps:
src_job = Job.query.get(dep.src_job_id)
assert src_job is not None, 'Job %d not found'%dep.src_job_id
if not src_job.is_complete():
return job.state
k8s_client = get_client()
yaml = generate_job_run_yaml(job)
try:
k8s_client.create_or_replace_custom_object(CrdKind.FLAPP, yaml,
job.project.
get_namespace())
except RuntimeError as e:
logging.error('Start job %d has Runtime error msg: %s'
, job_id, e.args)
return job.state
job.start()
db.session.commit()
return job.state
scheduler = Scheduler()
| {"/test/data_join/test_data_portal_master.py": ["/fedlearner/data_join/data_portal_master.py"], "/fedlearner/trainer_master/leader_tm.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/disabled_train_master.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/data/data_block_set.py"], "/fedlearner/data_join/cmd/data_portal_master_service.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_master.py"], "/fedlearner/data_join/data_portal_master.py": ["/fedlearner/data_join/data_portal_job_manager.py"], "/fedlearner/data_join/cmd/data_portal_worker_cli.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_worker.py"], "/fedlearner/trainer/trainer_worker.py": ["/fedlearner/trainer/bridge.py", "/fedlearner/trainer/estimator.py"], "/fedlearner/trainer_master/follower_tm.py": ["/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/test_nn_online_training.py": ["/fedlearner/trainer_master/leader_tm.py", "/fedlearner/trainer_master/follower_tm.py"], "/test/data_join/test_data_portal_worker.py": ["/fedlearner/data_join/data_portal_worker.py"]} |
76,979 | piiswrong/fedlearner | refs/heads/master | /web_console_v2/api/fedlearner_webconsole/setting/apis.py | # Copyright 2020 The FedLearner Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
# pylint: disable=raise-missing-from
from flask_restful import Resource, reqparse
from fedlearner_webconsole.k8s_client import get_client
class SettingsApi(Resource):
def get(self):
res = {}
k8s_client = get_client()
deploy = k8s_client.get_deployment(
'fedlearner-web-console-v2')
res['webconsole_image'] = deploy.spec.template.spec.containers[0].image
return {'data': res}
def patch(self):
parser = reqparse.RequestParser()
parser.add_argument('webconsole_image', type=str, required=False,
default=None, help='image for webconsole')
data = parser.parse_args()
if data['webconsole_image']:
k8s_client = get_client()
deploy = k8s_client.get_deployment(
'fedlearner-web-console-v2')
deploy.spec.template.spec.containers[0].image = \
data['webconsole_image']
k8s_client.create_or_update_deployment(
deploy.metadata, deploy.spec, 'fedlearner-web-console-v2')
return {'data': {}}
def initialize_setting_apis(api):
api.add_resource(SettingsApi, '/settings')
| {"/test/data_join/test_data_portal_master.py": ["/fedlearner/data_join/data_portal_master.py"], "/fedlearner/trainer_master/leader_tm.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/disabled_train_master.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/data/data_block_set.py"], "/fedlearner/data_join/cmd/data_portal_master_service.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_master.py"], "/fedlearner/data_join/data_portal_master.py": ["/fedlearner/data_join/data_portal_job_manager.py"], "/fedlearner/data_join/cmd/data_portal_worker_cli.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_worker.py"], "/fedlearner/trainer/trainer_worker.py": ["/fedlearner/trainer/bridge.py", "/fedlearner/trainer/estimator.py"], "/fedlearner/trainer_master/follower_tm.py": ["/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/test_nn_online_training.py": ["/fedlearner/trainer_master/leader_tm.py", "/fedlearner/trainer_master/follower_tm.py"], "/test/data_join/test_data_portal_worker.py": ["/fedlearner/data_join/data_portal_worker.py"]} |
76,980 | piiswrong/fedlearner | refs/heads/master | /fedlearner/data_join/data_portal_worker.py | # Copyright 2020 The FedLearner Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
import logging
import time
import os
from functools import cmp_to_key
import gc
import grpc
import tensorflow_io # pylint: disable=unused-import
from tensorflow.compat.v1 import gfile
from fedlearner.common import data_portal_service_pb2 as dp_pb
from fedlearner.common import data_join_service_pb2 as dj_pb
from fedlearner.common import data_portal_service_pb2_grpc as dp_grpc
from fedlearner.proxy.channel import make_insecure_channel, ChannelType
from fedlearner.data_join.raw_data_partitioner import RawDataPartitioner
from fedlearner.data_join import common
from fedlearner.data_join.sort_run_merger import SortRunMerger
class RawDataSortPartitioner(RawDataPartitioner):
class OutputFileSortWriter(RawDataPartitioner.OutputFileWriter):
def __init__(self, options, partition_id, process_index):
super(RawDataSortPartitioner.OutputFileSortWriter, self).__init__(
options, partition_id, process_index
)
self._buffer = []
def append_item(self, index, item):
self._buffer.append(item)
if self._begin_index is None:
self._begin_index = index
self._end_index = index
def finish(self):
meta = None
if len(self._buffer) > 0:
writer = self._get_output_writer()
self._sort_buffer()
for item in self._buffer:
writer.write_item(item)
writer.close()
meta = RawDataPartitioner.FileMeta(
self._options.partitioner_rank_id,
self._process_index,
self._begin_index,
self._end_index
)
fpath = os.path.join(self._options.output_dir,
common.partition_repr(self._partition_id),
meta.encode_meta_to_fname())
gfile.Rename(self.get_tmp_fpath(), fpath, True)
self._buffer = []
self._begin_index = None
self._end_index = None
return meta
def _sort_buffer(self):
self._buffer = sorted(self._buffer, key=cmp_to_key(self.item_cmp))
@staticmethod
def item_cmp(a, b):
if a.event_time < b.event_time:
return -1
if a.event_time > b.event_time:
return 1
if a.example_id < b.example_id:
return -1
if a.example_id > b.example_id:
return 1
return 0
def _get_file_writer(self, partition_id):
if len(self._flying_writers) == 0:
self._flying_writers = \
[RawDataSortPartitioner.OutputFileSortWriter(
self._options, pid, self._dumped_process_index+1)
for pid in range(self._options.output_partition_num)]
assert partition_id < len(self._flying_writers)
return self._flying_writers[partition_id]
class DataPortalWorker(object):
def __init__(self, options, master_addr, rank_id,
kvstore_type, use_mock_etcd=False):
master_channel = make_insecure_channel(
master_addr, ChannelType.INTERNAL,
options=[('grpc.max_send_message_length', 2**31-1),
('grpc.max_receive_message_length', 2**31-1)]
)
self._kvstore_type = kvstore_type
self._rank_id = rank_id
self._options = options
self._use_mock_etcd = use_mock_etcd
self._master_client = dp_grpc.DataPortalMasterServiceStub(
master_channel)
def request_new_task(self):
request = dp_pb.NewTaskRequest()
request.rank_id = self._rank_id
while True:
try:
return self._master_client.RequestNewTask(request)
except grpc.RpcError as e:
logging.warning("Request new task failed, sleep 2 seconds"\
" and retry. %s", e)
time.sleep(2)
def finish_task(self, partition_id, part_state):
request = dp_pb.FinishTaskRequest()
request.rank_id = self._rank_id
request.partition_id = partition_id
request.part_state = part_state
while True:
try:
self._master_client.FinishTask(request)
return
except grpc.RpcError as e:
logging.warning("Failed to finish request, sleep 2 seconds" \
" and retry. %s", e)
time.sleep(2)
def start(self):
logging.info("Start DataPortal Worker, rank_id:%s", self._rank_id)
logging.info("kvstore type:%s", self._kvstore_type)
self.run()
def _make_partitioner_options(self, task):
return dj_pb.RawDataPartitionerOptions(
partitioner_name="{}-rank_{}".format(task.task_name,
self._rank_id),
input_file_paths=task.fpaths,
output_dir=task.output_base_dir,
output_partition_num=task.output_partition_num,
partitioner_rank_id=task.partition_id,
batch_processor_options=self._options.batch_processor_options,
raw_data_options=self._options.raw_data_options,
writer_options=self._options.writer_options,
memory_limit_ratio=self._options.memory_limit_ratio
)
def _make_merger_options(self, task):
return dj_pb.SortRunMergerOptions(
merger_name="{}-rank_{}".format(task.task_name,
self._rank_id),
reader_options=dj_pb.RawDataOptions(
raw_data_iter=self._options.writer_options.output_writer,
compressed_type=self._options.writer_options.compressed_type,
read_ahead_size=self._options.merger_read_ahead_size,
read_batch_size=self._options.merger_read_batch_size
),
writer_options=self._options.writer_options,
output_file_dir=task.reduce_base_dir,
partition_id=task.partition_id,
)
def _run_map_task(self, task):
partition_options = self._make_partitioner_options(task)
data_partitioner = None
type_repr = ''
if task.data_portal_type == dp_pb.DataPortalType.Streaming:
data_partitioner = RawDataSortPartitioner(
partition_options, task.part_field, self._kvstore_type,
self._use_mock_etcd
)
type_repr = 'streaming'
else:
assert task.data_portal_type == dp_pb.DataPortalType.PSI
data_partitioner = RawDataPartitioner(
partition_options, task.part_field, self._kvstore_type,
self._use_mock_etcd
)
type_repr = 'psi'
logging.info("Partitioner rank_id-[%d] start run task %s of type %s "\
"for partition %d, input %d files", self._rank_id,
partition_options.partitioner_name, type_repr,
partition_options.partitioner_rank_id,
len(partition_options.input_file_paths))
data_partitioner.start_process()
data_partitioner.wait_for_finished()
logging.info("Partitioner rank_id-[%d] finish run partition task %s "\
"for partition %d.", self._rank_id,
partition_options.partitioner_name,
partition_options.partitioner_rank_id)
del data_partitioner
gc.collect()
def _run_reduce_task(self, task):
merger_options = self._make_merger_options(task)
sort_run_merger = SortRunMerger(merger_options, self._merger_comparator)
input_dir = os.path.join(task.map_base_dir,
common.partition_repr(task.partition_id))
input_fpaths = [os.path.join(input_dir, f) for f in
gfile.ListDirectory(input_dir)
if f.endswith(common.RawDataFileSuffix)]
logging.info("Merger rank_id-[%d] start run task %s for partition "\
"%d. input_dir %s, with %d files",
self._rank_id, merger_options.merger_name,
task.partition_id, task.map_base_dir, len(input_fpaths))
sort_run_merger.merge_sort_runs(input_fpaths)
logging.info("Merger rank_id-[%d] finish task %s for "\
"partition %d", self._rank_id,
merger_options.merger_name, task.partition_id)
del sort_run_merger
gc.collect()
@staticmethod
def _merger_comparator(a, b):
if a.event_time != b.event_time:
return a.event_time < b.event_time
return a.example_id < b.example_id
def run(self):
while True:
response = self.request_new_task()
if response.HasField("finished"):
logging.info("Receive finished response from Master.")
return
if response.HasField("map_task"):
task = response.map_task
logging.info("Receive map task partition_id:%d, paths:%s",
task.partition_id, task.fpaths)
self._run_map_task(task)
self.finish_task(task.partition_id, dp_pb.PartState.kIdMap)
continue
if response.HasField("reduce_task"):
task = response.reduce_task
logging.info("Receive reduce task, partition_id:%d, input"\
" dir %s", task.partition_id, task.map_base_dir)
self._run_reduce_task(task)
self.finish_task(task.partition_id,
dp_pb.PartState.kEventTimeReduce)
continue
if response.HasField("pending"):
logging.warning("Receive pending response.")
else:
logging.warning("The response from master is invalid.")
time.sleep(2)
| {"/test/data_join/test_data_portal_master.py": ["/fedlearner/data_join/data_portal_master.py"], "/fedlearner/trainer_master/leader_tm.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/disabled_train_master.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/data/data_block_set.py"], "/fedlearner/data_join/cmd/data_portal_master_service.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_master.py"], "/fedlearner/data_join/data_portal_master.py": ["/fedlearner/data_join/data_portal_job_manager.py"], "/fedlearner/data_join/cmd/data_portal_worker_cli.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_worker.py"], "/fedlearner/trainer/trainer_worker.py": ["/fedlearner/trainer/bridge.py", "/fedlearner/trainer/estimator.py"], "/fedlearner/trainer_master/follower_tm.py": ["/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/test_nn_online_training.py": ["/fedlearner/trainer_master/leader_tm.py", "/fedlearner/trainer_master/follower_tm.py"], "/test/data_join/test_data_portal_worker.py": ["/fedlearner/data_join/data_portal_worker.py"]} |
76,981 | piiswrong/fedlearner | refs/heads/master | /fedlearner/trainer/parameter_server.py | # Copyright 2020 The FedLearner Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
# pylint: disable=unused-import
import argparse
import tensorflow.compat.v1 as tf
from fedlearner.trainer import operator
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='FedLearner Parameter Server.')
parser.add_argument('address', type=str,
help='Listen address of the parameter server, ' \
'with format [IP]:[PORT]')
args = parser.parse_args()
config = tf.ConfigProto()
config.rpc_options.compression_algorithm = 'gzip'
config.rpc_options.cache_rpc_response = True
cluster_spec = tf.train.ClusterSpec({'local': {0: args.address}})
server = tf.train.Server(cluster_spec,
job_name='local',
task_index=0,
config=config)
server.join()
| {"/test/data_join/test_data_portal_master.py": ["/fedlearner/data_join/data_portal_master.py"], "/fedlearner/trainer_master/leader_tm.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/disabled_train_master.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/data/data_block_set.py"], "/fedlearner/data_join/cmd/data_portal_master_service.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_master.py"], "/fedlearner/data_join/data_portal_master.py": ["/fedlearner/data_join/data_portal_job_manager.py"], "/fedlearner/data_join/cmd/data_portal_worker_cli.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_worker.py"], "/fedlearner/trainer/trainer_worker.py": ["/fedlearner/trainer/bridge.py", "/fedlearner/trainer/estimator.py"], "/fedlearner/trainer_master/follower_tm.py": ["/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/test_nn_online_training.py": ["/fedlearner/trainer_master/leader_tm.py", "/fedlearner/trainer_master/follower_tm.py"], "/test/data_join/test_data_portal_worker.py": ["/fedlearner/data_join/data_portal_worker.py"]} |
76,982 | piiswrong/fedlearner | refs/heads/master | /fedlearner/data_join/cmd/data_portal_master_service.py | # Copyright 2020 The FedLearner Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
import argparse
from google.protobuf import text_format
from fedlearner.common import data_portal_service_pb2 as dp_pb
from fedlearner.common.db_client import DBClient
from fedlearner.common.common import set_logger
from fedlearner.data_join import common
from fedlearner.data_join.data_portal_master import DataPortalMasterService
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='DataPortalMasterService cmd.')
parser.add_argument('--kvstore_type', type=str,
default='etcd', help='the type of kvstore')
parser.add_argument('--listen_port', '-p', type=int, default=4032,
help='Listen port of data join master')
parser.add_argument('--data_portal_name', type=str,
default='test_data_source',
help='the name of data source')
parser.add_argument('--data_portal_type', type=str,
default='Streaming', choices=['PSI', 'Streaming'],
help='the type of data portal type')
parser.add_argument('--output_partition_num', type=int, required=True,
help='the output partition number of data portal')
parser.add_argument('--input_file_wildcard', type=str, default='',
help='the wildcard filter for input file')
parser.add_argument('--input_base_dir', type=str, required=True,
help='the base dir of input directory')
parser.add_argument('--output_base_dir', type=str, required=True,
help='the base dir of output directory')
parser.add_argument('--raw_data_publish_dir', type=str, required=True,
help='the raw data publish dir in mysql')
parser.add_argument('--long_running', action='store_true',
help='make the data portal long running')
parser.add_argument('--check_success_tag', action='store_true',
help='Check that a _SUCCESS file exists before '
'processing files in a subfolder')
args = parser.parse_args()
set_logger()
use_mock_etcd = (args.kvstore_type == 'mock')
kvstore = DBClient(args.kvstore_type, use_mock_etcd)
kvstore_key = common.portal_kvstore_base_dir(args.data_portal_name)
if kvstore.get_data(kvstore_key) is None:
portal_manifest = dp_pb.DataPortalManifest(
name=args.data_portal_name,
data_portal_type=(dp_pb.DataPortalType.PSI if
args.data_portal_type == 'PSI' else
dp_pb.DataPortalType.Streaming),
output_partition_num=args.output_partition_num,
input_file_wildcard=args.input_file_wildcard,
input_base_dir=args.input_base_dir,
output_base_dir=args.output_base_dir,
raw_data_publish_dir=args.raw_data_publish_dir,
processing_job_id=-1
)
kvstore.set_data(kvstore_key, text_format.\
MessageToString(portal_manifest))
options = dp_pb.DataPotraMasterlOptions(
use_mock_etcd=use_mock_etcd,
long_running=args.long_running,
check_success_tag=args.check_success_tag)
portal_master_srv = DataPortalMasterService(args.listen_port,
args.data_portal_name,
args.kvstore_type,
options)
portal_master_srv.run()
| {"/test/data_join/test_data_portal_master.py": ["/fedlearner/data_join/data_portal_master.py"], "/fedlearner/trainer_master/leader_tm.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/disabled_train_master.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/data/data_block_set.py"], "/fedlearner/data_join/cmd/data_portal_master_service.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_master.py"], "/fedlearner/data_join/data_portal_master.py": ["/fedlearner/data_join/data_portal_job_manager.py"], "/fedlearner/data_join/cmd/data_portal_worker_cli.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_worker.py"], "/fedlearner/trainer/trainer_worker.py": ["/fedlearner/trainer/bridge.py", "/fedlearner/trainer/estimator.py"], "/fedlearner/trainer_master/follower_tm.py": ["/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/test_nn_online_training.py": ["/fedlearner/trainer_master/leader_tm.py", "/fedlearner/trainer_master/follower_tm.py"], "/test/data_join/test_data_portal_worker.py": ["/fedlearner/data_join/data_portal_worker.py"]} |
76,983 | piiswrong/fedlearner | refs/heads/master | /fedlearner/data_join/data_portal_master.py | # Copyright 2020 The FedLearner Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
import logging
from concurrent import futures
import grpc
from google.protobuf import empty_pb2
from fedlearner.common import common_pb2 as common_pb
from fedlearner.common import data_portal_service_pb2 as dp_pb
from fedlearner.common import data_portal_service_pb2_grpc as dp_grpc
from fedlearner.common.db_client import DBClient
from fedlearner.data_join.data_portal_job_manager import DataPortalJobManager
from fedlearner.data_join.routine_worker import RoutineWorker
class DataPortalMaster(dp_grpc.DataPortalMasterServiceServicer):
def __init__(self, portal_name, kvstore, portal_options):
super(DataPortalMaster, self).__init__()
self._portal_name = portal_name
self._kvstore = kvstore
self._portal_options = portal_options
self._data_portal_job_manager = DataPortalJobManager(
self._kvstore, self._portal_name,
self._portal_options.long_running,
self._portal_options.check_success_tag,
)
self._bg_worker = None
def GetDataPortalManifest(self, request, context):
return self._data_portal_job_manager.get_portal_manifest()
def RequestNewTask(self, request, context):
response = dp_pb.NewTaskResponse()
finished, task = \
self._data_portal_job_manager.alloc_task(request.rank_id)
if task is not None:
if isinstance(task, dp_pb.MapTask):
response.map_task.MergeFrom(task)
else:
assert isinstance(task, dp_pb.ReduceTask)
response.reduce_task.MergeFrom(task)
elif not finished:
response.pending.MergeFrom(empty_pb2.Empty())
else:
response.finished.MergeFrom(empty_pb2.Empty())
return response
def FinishTask(self, request, context):
self._data_portal_job_manager.finish_task(request.rank_id,
request.partition_id,
request.part_state)
return common_pb.Status()
def start(self):
self._bg_worker = RoutineWorker(
'portal_master_bg_worker',
self._data_portal_job_manager.backgroup_task,
lambda: True, 30
)
self._bg_worker.start_routine()
def stop(self):
if self._bg_worker is not None:
self._bg_worker.stop_routine()
self._bg_worker = None
class DataPortalMasterService(object):
def __init__(self, listen_port, portal_name,
kvstore_type, portal_options):
self._portal_name = portal_name
self._listen_port = listen_port
self._server = grpc.server(futures.ThreadPoolExecutor(max_workers=10))
kvstore = DBClient(kvstore_type, portal_options.use_mock_etcd)
self._data_portal_master = DataPortalMaster(portal_name, kvstore,
portal_options)
dp_grpc.add_DataPortalMasterServiceServicer_to_server(
self._data_portal_master, self._server
)
self._server.add_insecure_port('[::]:%d'%listen_port)
self._server_started = False
def start(self):
if not self._server_started:
self._server.start()
self._data_portal_master.start()
self._server_started = True
logging.warning("DataPortalMasterService name as %s start " \
"on port[%d]:",
self._portal_name, self._listen_port)
def stop(self):
if self._server_started:
self._data_portal_master.stop()
self._server.stop(None)
self._server_started = False
logging.warning("DataPortalMasterService name as %s"\
"stopped ", self._portal_name)
def run(self):
self.start()
self._server.wait_for_termination()
self.stop()
| {"/test/data_join/test_data_portal_master.py": ["/fedlearner/data_join/data_portal_master.py"], "/fedlearner/trainer_master/leader_tm.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/disabled_train_master.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/data/data_block_set.py"], "/fedlearner/data_join/cmd/data_portal_master_service.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_master.py"], "/fedlearner/data_join/data_portal_master.py": ["/fedlearner/data_join/data_portal_job_manager.py"], "/fedlearner/data_join/cmd/data_portal_worker_cli.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_worker.py"], "/fedlearner/trainer/trainer_worker.py": ["/fedlearner/trainer/bridge.py", "/fedlearner/trainer/estimator.py"], "/fedlearner/trainer_master/follower_tm.py": ["/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/test_nn_online_training.py": ["/fedlearner/trainer_master/leader_tm.py", "/fedlearner/trainer_master/follower_tm.py"], "/test/data_join/test_data_portal_worker.py": ["/fedlearner/data_join/data_portal_worker.py"]} |
76,984 | piiswrong/fedlearner | refs/heads/master | /web_console_v2/api/testing/common.py | # Copyright 2020 The FedLearner Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
import json
import logging
import unittest
import secrets
from http import HTTPStatus
import multiprocessing as mp
from flask import Flask
from flask_testing import TestCase
from fedlearner_webconsole.app import create_app
from fedlearner_webconsole.db import db
from fedlearner_webconsole.auth.models import User
class BaseTestCase(TestCase):
class Config(object):
SQLALCHEMY_DATABASE_URI = 'sqlite://'
SQLALCHEMY_TRACK_MODIFICATIONS = False
JWT_SECRET_KEY = secrets.token_urlsafe(64)
PROPAGATE_EXCEPTIONS = True
LOGGING_LEVEL = logging.DEBUG
TESTING = True
ENV = 'development'
GRPC_LISTEN_PORT = 1990
def create_app(self):
app = create_app(self.__class__.Config)
app.app_context().push()
return app
def setUp(self):
db.create_all()
user = User(username='ada')
user.set_password('ada')
db.session.add(user)
db.session.commit()
self.signin_helper()
def tearDown(self):
self.signout_helper()
db.session.remove()
db.drop_all()
def get_response_data(self, response):
return json.loads(response.data).get('data')
def signin_helper(self, username='ada', password='ada'):
resp = self.client.post(
'/api/v2/auth/signin',
data=json.dumps({
'username': username,
'password': password
}),
content_type='application/json')
self.assertEqual(resp.status_code, HTTPStatus.OK)
self.assertTrue('access_token' in resp.json)
self.assertTrue(len(resp.json.get('access_token')) > 1)
self._token = resp.json.get('access_token')
return self._token
def signout_helper(self):
self._token = None
def _get_headers(self, use_auth=True):
headers = {}
if use_auth and self._token:
headers['Authorization'] = f'Bearer {self._token}'
return headers
def get_helper(self, url, use_auth=True):
return self.client.get(
url, headers=self._get_headers(use_auth))
def post_helper(self, url, data, use_auth=True):
return self.client.post(
url,
data=json.dumps(data),
content_type='application/json',
headers=self._get_headers(use_auth))
def put_helper(self, url, data, use_auth=True):
return self.client.put(
url,
data=json.dumps(data),
content_type='application/json',
headers=self._get_headers(use_auth))
def patch_helper(self, url, data, use_auth=True):
return self.client.patch(
url,
data=json.dumps(data),
content_type='application/json',
headers=self._get_headers(use_auth))
def delete_helper(self, url, use_auth=True):
return self.client.delete(url,
headers=self._get_headers(use_auth))
def setup_project(self, role, peer_port):
if role == 'leader':
peer_role = 'follower'
else:
peer_role = 'leader'
name = 'test-project'
config = {
'participants': [
{
'name': f'party_{peer_role}',
'url': f'127.0.0.1:{peer_port}',
'domain_name': f'fl-{peer_role}.com'
}
],
'variables': [
{
'name': 'EGRESS_URL',
'value': f'127.0.0.1:{peer_port}'
}
]
}
create_response = self.post_helper(
'/api/v2/projects',
data={
'name': name,
'config': config,
})
self.assertEqual(create_response.status_code, HTTPStatus.OK)
return json.loads(create_response.data).get('data')
class TestAppProcess(mp.get_context('spawn').Process):
def __init__(self, test_class, method, config=None):
super(TestAppProcess, self).__init__()
self._test_class = test_class
self._method = method
self._app_config = config
self._queue = mp.get_context('spawn').Queue()
def run(self):
for h in logging.getLogger().handlers[:]:
logging.getLogger().removeHandler(h)
h.close()
logging.basicConfig(
level=logging.DEBUG,
format="SPAWN:%(filename)s %(lineno)s %(levelname)s - %(message)s")
if self._app_config:
self._test_class.Config = self._app_config
test = self._test_class(self._method)
old_tearDown = test.tearDown
def new_tearDown(*args, **kwargs):
self._queue.get()
old_tearDown(*args, **kwargs)
test.tearDown = new_tearDown
suite = unittest.TestSuite([test])
res = suite.run(unittest.TestResult())
if res.errors:
for method, err in res.errors:
print('======================================================================')
print('ERROR:', method)
print('----------------------------------------------------------------------')
print(err)
print('----------------------------------------------------------------------')
if res.failures:
for method, fail in res.failures:
print('======================================================================')
print('FAIL:', method)
print('----------------------------------------------------------------------')
print(fail)
print('----------------------------------------------------------------------')
assert res.wasSuccessful()
def join(self):
self._queue.put(None)
ret = super(TestAppProcess, self).join()
assert self.exitcode == 0, "Subprocess failed!"
return ret
def create_test_db():
"""Creates test db for testing non flask-must units."""
app = Flask('fedlearner_webconsole_test')
app.config['TESTING'] = True
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///:memory:'
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
db.init_app(app)
# this does the binding
app.app_context().push()
return db
| {"/test/data_join/test_data_portal_master.py": ["/fedlearner/data_join/data_portal_master.py"], "/fedlearner/trainer_master/leader_tm.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/disabled_train_master.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/data/data_block_set.py"], "/fedlearner/data_join/cmd/data_portal_master_service.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_master.py"], "/fedlearner/data_join/data_portal_master.py": ["/fedlearner/data_join/data_portal_job_manager.py"], "/fedlearner/data_join/cmd/data_portal_worker_cli.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_worker.py"], "/fedlearner/trainer/trainer_worker.py": ["/fedlearner/trainer/bridge.py", "/fedlearner/trainer/estimator.py"], "/fedlearner/trainer_master/follower_tm.py": ["/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/test_nn_online_training.py": ["/fedlearner/trainer_master/leader_tm.py", "/fedlearner/trainer_master/follower_tm.py"], "/test/data_join/test_data_portal_worker.py": ["/fedlearner/data_join/data_portal_worker.py"]} |
76,985 | piiswrong/fedlearner | refs/heads/master | /fedlearner/data_join/cmd/data_portal_worker_cli.py | # Copyright 2020 The FedLearner Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
import argparse
from fedlearner.common import data_join_service_pb2 as dj_pb
from fedlearner.common import data_portal_service_pb2 as dp_pb
from fedlearner.common.common import set_logger
from fedlearner.data_join.data_portal_worker import DataPortalWorker
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='DataJointPortal cmd.')
parser.add_argument("--rank_id", type=int,
help="the rank id of this worker")
parser.add_argument("--master_addr", type=str,
help="the addr of data portal master")
parser.add_argument("--kvstore_type", type=str,
default='etcd', help='the type of kvstore')
parser.add_argument("--use_mock_etcd", action="store_true",
help='use to mock mysql for test')
parser.add_argument("--merger_read_ahead_size", type=int, default=128<<10,
help="the read ahead size for merger")
parser.add_argument("--merger_read_batch_size", type=int, default=32,
help="the read batch size for merger")
parser.add_argument("--input_data_file_iter", type=str, default="TF_RECORD",
choices=['TF_RECORD', 'CSV_DICT'],
help="the type for input data iterator")
parser.add_argument("--compressed_type", type=str, default='',
choices=['', 'ZLIB', 'GZIP'],
help='the compressed type of input data file')
parser.add_argument('--read_ahead_size', type=int, default=1<<20,
help='the read ahead size for raw data')
parser.add_argument('--read_batch_size', type=int, default=128,
help='the read batch size for tf record iter')
parser.add_argument('--output_builder', type=str, default='TF_RECORD',
choices=['TF_RECORD', 'CSV_DICT'],
help='the builder for ouput file')
parser.add_argument('--builder_compressed_type', type=str, default='',
choices=['', 'ZLIB', 'GZIP'],
help='the builder for ouput file')
parser.add_argument("--batch_size", type=int, default=1024,
help="the batch size for raw data reader")
parser.add_argument('--memory_limit_ratio', type=int, default=70,
choices=range(40, 81),
help='the ratio(*100) of memory used for map&reduce')
parser.add_argument('--optional_fields', type=str, default='',
help='optional stat fields used in joiner, separated '
'by comma between fields, e.g. "label,rit". '
'Each field will be stripped.')
args = parser.parse_args()
set_logger()
if args.input_data_file_iter == 'TF_RECORD' or \
args.output_builder == 'TF_RECORD':
import tensorflow
tensorflow.compat.v1.enable_eager_execution()
optional_fields = list(
field for field in map(str.strip, args.optional_fields.split(','))
if field != ''
)
portal_worker_options = dp_pb.DataPortalWorkerOptions(
raw_data_options=dj_pb.RawDataOptions(
raw_data_iter=args.input_data_file_iter,
compressed_type=args.compressed_type,
read_ahead_size=args.read_ahead_size,
read_batch_size=args.read_batch_size,
optional_fields=optional_fields
),
writer_options=dj_pb.WriterOptions(
output_writer=args.output_builder,
compressed_type=args.builder_compressed_type
),
batch_processor_options=dj_pb.BatchProcessorOptions(
batch_size=args.batch_size,
max_flying_item=-1
),
merger_read_ahead_size=args.merger_read_ahead_size,
merger_read_batch_size=args.merger_read_batch_size,
memory_limit_ratio=args.memory_limit_ratio/100
)
data_portal_worker = DataPortalWorker(
portal_worker_options, args.master_addr,
args.rank_id, args.kvstore_type,
(args.kvstore_type == 'mock')
)
data_portal_worker.start()
| {"/test/data_join/test_data_portal_master.py": ["/fedlearner/data_join/data_portal_master.py"], "/fedlearner/trainer_master/leader_tm.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/disabled_train_master.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/data/data_block_set.py"], "/fedlearner/data_join/cmd/data_portal_master_service.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_master.py"], "/fedlearner/data_join/data_portal_master.py": ["/fedlearner/data_join/data_portal_job_manager.py"], "/fedlearner/data_join/cmd/data_portal_worker_cli.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_worker.py"], "/fedlearner/trainer/trainer_worker.py": ["/fedlearner/trainer/bridge.py", "/fedlearner/trainer/estimator.py"], "/fedlearner/trainer_master/follower_tm.py": ["/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/test_nn_online_training.py": ["/fedlearner/trainer_master/leader_tm.py", "/fedlearner/trainer_master/follower_tm.py"], "/test/data_join/test_data_portal_worker.py": ["/fedlearner/data_join/data_portal_worker.py"]} |
76,986 | piiswrong/fedlearner | refs/heads/master | /web_console_v2/api/fedlearner_webconsole/job/models.py | # Copyright 2020 The FedLearner Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
import logging
import enum
import json
from sqlalchemy.sql import func
from sqlalchemy.sql.schema import Index
from fedlearner_webconsole.db import db, to_dict_mixin
from fedlearner_webconsole.k8s_client import get_client
from fedlearner_webconsole.utils.k8s_client import CrdKind
from fedlearner_webconsole.proto.workflow_definition_pb2 import JobDefinition
class JobState(enum.Enum):
INVALID = 0
STOPPED = 1
WAITING = 2
STARTED = 3
# must be consistent with JobType in proto
class JobType(enum.Enum):
UNSPECIFIED = 0
RAW_DATA = 1
DATA_JOIN = 2
PSI_DATA_JOIN = 3
NN_MODEL_TRANINING = 4
TREE_MODEL_TRAINING = 5
NN_MODEL_EVALUATION = 6
TREE_MODEL_EVALUATION = 7
def merge(x, y):
"""Given two dictionaries, merge them into a new dict as a shallow copy."""
z = x.copy()
z.update(y)
return z
@to_dict_mixin(
extras={
'state': (lambda job: job.get_state_for_frontend()),
'pods': (lambda job: job.get_pods_for_frontend()),
'config': (lambda job: job.get_config()),
'complete_at': (lambda job: job.get_complete_at())
})
class Job(db.Model):
__tablename__ = 'job_v2'
__table_args__ = (Index('idx_workflow_id', 'workflow_id'), {
'comment': 'webconsole job',
'mysql_engine': 'innodb',
'mysql_charset': 'utf8mb4',
})
id = db.Column(db.Integer,
primary_key=True,
autoincrement=True,
comment='id')
name = db.Column(db.String(255), unique=True, comment='name')
job_type = db.Column(db.Enum(JobType, native_enum=False),
nullable=False,
comment='job type')
state = db.Column(db.Enum(JobState, native_enum=False),
nullable=False,
default=JobState.INVALID,
comment='state')
yaml_template = db.Column(db.Text(), comment='yaml_template')
config = db.Column(db.LargeBinary(), comment='config')
is_disabled = db.Column(db.Boolean(), default=False, comment='is_disabled')
workflow_id = db.Column(db.Integer, nullable=False, comment='workflow id')
project_id = db.Column(db.Integer, nullable=False, comment='project id')
flapp_snapshot = db.Column(db.Text(), comment='flapp snapshot')
pods_snapshot = db.Column(db.Text(), comment='pods snapshot')
created_at = db.Column(db.DateTime(timezone=True),
server_default=func.now(),
comment='created at')
updated_at = db.Column(db.DateTime(timezone=True),
server_default=func.now(),
onupdate=func.now(),
comment='updated at')
deleted_at = db.Column(db.DateTime(timezone=True), comment='deleted at')
project = db.relationship('Project',
primaryjoin='Project.id == '
'foreign(Job.project_id)')
workflow = db.relationship('Workflow',
primaryjoin='Workflow.id == '
'foreign(Job.workflow_id)')
_k8s_client = get_client()
def get_config(self):
if self.config is not None:
proto = JobDefinition()
proto.ParseFromString(self.config)
return proto
return None
def _set_snapshot_flapp(self):
flapp = self._k8s_client.get_custom_object(
CrdKind.FLAPP, self.name, self.project.get_namespace())
self.flapp_snapshot = json.dumps(flapp)
def _set_snapshot_pods(self):
pods = self._k8s_client.list_resource_of_custom_object(
CrdKind.FLAPP, self.name, 'pods', self.project.get_namespace())
self.pods_snapshot = json.dumps(pods)
def get_pods(self):
if self.state == JobState.STARTED:
try:
pods = self._k8s_client.list_resource_of_custom_object(
CrdKind.FLAPP, self.name, 'pods',
self.project.get_namespace())
return pods['pods']
except RuntimeError as e:
logging.error('Get %d pods error msg: %s', self.id, e.args)
return None
if self.pods_snapshot is not None:
return json.loads(self.pods_snapshot)['pods']
return None
def get_flapp(self):
if self.state == JobState.STARTED:
try:
flapp = self._k8s_client.get_custom_object(
CrdKind.FLAPP, self.name, self.project.get_namespace())
return flapp['flapp']
except RuntimeError as e:
logging.error('Get %d flapp error msg: %s', self.id, str(e))
return None
if self.flapp_snapshot is not None:
return json.loads(self.flapp_snapshot)['flapp']
return None
def get_pods_for_frontend(self, filter_private_info=False):
result = []
flapp = self.get_flapp()
if flapp is None:
return result
if 'status' in flapp \
and 'flReplicaStatus' in flapp['status']:
replicas = flapp['status']['flReplicaStatus']
if replicas:
for pod_type in replicas:
for state in ['failed', 'succeeded']:
for pod in replicas[pod_type][state]:
result.append({
'name': pod,
'pod_type': pod_type,
'status': 'Flapp_{}'.format(state),
'message': '',
})
# msg from pods
pods = self.get_pods()
if pods is None:
return result
pods = pods['items']
for pod in pods:
status = pod['status']['phase'].lower()
msgs = []
if 'containerStatuses' in pod['status']:
state = pod['status']['containerStatuses'][0]['state']
for key, detail in state.items():
if filter_private_info:
if 'reason' in detail:
msgs.append(key + ':' + detail['reason'])
elif 'message' in detail:
msgs.append(key + ':' + detail['message'])
for cond in pod['status']['conditions']:
if filter_private_info:
if 'reason' in cond:
msgs.append(cond['type'] + ':' + cond['reason'])
elif 'message' in cond:
msgs.append(cond['type'] + ':' + cond['message'])
result.append({
'name': pod['metadata']['name'],
'pod_type': pod['metadata']['labels']['fl-replica-type'],
'status': status,
'message': ', '.join(msgs)
})
# deduplication pods both in pods and flapp
result = list({pod['name']: pod for pod in result}.values())
return result
def get_state_for_frontend(self):
if self.state == JobState.STARTED:
if self.is_complete():
return 'COMPLETED'
if self.is_failed():
return 'FAILED'
return 'RUNNING'
if self.state == JobState.STOPPED:
if self.get_flapp() is None:
return 'NEW'
return self.state.name
def is_failed(self):
flapp = self.get_flapp()
if flapp is None \
or 'status' not in flapp \
or 'appState' not in flapp['status']:
return False
return flapp['status']['appState'] in [
'FLStateFailed', 'FLStateShutDown'
]
def is_complete(self):
flapp = self.get_flapp()
if flapp is None \
or 'status' not in flapp \
or 'appState' not in flapp['status']:
return False
return flapp['status']['appState'] == 'FLStateComplete'
def get_complete_at(self):
flapp = self.get_flapp()
if flapp is None \
or 'status' not in flapp \
or 'complete_at' not in flapp['status']:
return None
return flapp['status']['complete_at']
def stop(self):
if self.state == JobState.STARTED:
self._set_snapshot_flapp()
self._set_snapshot_pods()
self._k8s_client.delete_custom_object(CrdKind.FLAPP, self.name,
self.project.get_namespace())
self.state = JobState.STOPPED
def schedule(self):
assert self.state == JobState.STOPPED
self.pods_snapshot = None
self.flapp_snapshot = None
self.state = JobState.WAITING
def start(self):
self.state = JobState.STARTED
def set_yaml_template(self, yaml_template):
self.yaml_template = yaml_template
class JobDependency(db.Model):
__tablename__ = 'job_dependency_v2'
__table_args__ = (Index('idx_src_job_id', 'src_job_id'),
Index('idx_dst_job_id', 'dst_job_id'), {
'comment': 'record job dependencies',
'mysql_engine': 'innodb',
'mysql_charset': 'utf8mb4',
})
id = db.Column(db.Integer,
primary_key=True,
autoincrement=True,
comment='id')
src_job_id = db.Column(db.Integer, comment='src job id')
dst_job_id = db.Column(db.Integer, comment='dst job id')
dep_index = db.Column(db.Integer, comment='dep index')
| {"/test/data_join/test_data_portal_master.py": ["/fedlearner/data_join/data_portal_master.py"], "/fedlearner/trainer_master/leader_tm.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/disabled_train_master.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/data/data_block_set.py"], "/fedlearner/data_join/cmd/data_portal_master_service.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_master.py"], "/fedlearner/data_join/data_portal_master.py": ["/fedlearner/data_join/data_portal_job_manager.py"], "/fedlearner/data_join/cmd/data_portal_worker_cli.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_worker.py"], "/fedlearner/trainer/trainer_worker.py": ["/fedlearner/trainer/bridge.py", "/fedlearner/trainer/estimator.py"], "/fedlearner/trainer_master/follower_tm.py": ["/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/test_nn_online_training.py": ["/fedlearner/trainer_master/leader_tm.py", "/fedlearner/trainer_master/follower_tm.py"], "/test/data_join/test_data_portal_worker.py": ["/fedlearner/data_join/data_portal_worker.py"]} |
76,987 | piiswrong/fedlearner | refs/heads/master | /fedlearner/common/mysql_client.py | # Copyright 2020 The FedLearner Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
"""MySQL client."""
import os
import logging
from contextlib import contextmanager
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker, scoped_session
from sqlalchemy.orm.exc import NoResultFound
from sqlalchemy.ext.automap import automap_base
class MySQLClient(object):
def __init__(self, database, addr, user, password, base_dir):
self._unix_socket = os.environ.get('DB_SOCKET_PATH', None)
self._database = database
self._addr = addr
self._user = user
self._password = password
if self._unix_socket:
self._addr = ''
self._password = ''
self._base_dir = base_dir
if self._base_dir[0] != '/':
self._base_dir = '/' + self._base_dir
self._create_engine_inner()
def get_data(self, key):
with self.closing(self._engine) as sess:
try:
table = self._datasource_meta
value = sess.query(table).filter(table.kv_key ==
self._generate_key(key)).one().kv_value
if isinstance(value, str):
return value.encode()
return value
except NoResultFound:
return None
except Exception as e: # pylint: disable=broad-except
logging.error('failed to get data. msg[%s]', e)
sess.rollback()
return None
def set_data(self, key, data):
with self.closing(self._engine) as sess:
try:
table = self._datasource_meta
context = sess.query(table).filter(table.kv_key ==
self._generate_key(key)).first()
if context:
context.kv_value = data
sess.commit()
else:
context = self._datasource_meta(
kv_key=self._generate_key(key),
kv_value=data)
sess.add(context)
sess.commit()
return True
except Exception as e: # pylint: disable=broad-except
logging.error('failed to set data. msg[%s]', e)
sess.rollback()
return False
def delete(self, key):
with self.closing(self._engine) as sess:
try:
table = self._datasource_meta
for context in sess.query(table).filter(table.kv_key ==
self._generate_key(key)):
sess.delete(context)
sess.commit()
return True
except Exception as e: # pylint: disable=broad-except
logging.error('failed to delete. msg[%s]', e)
sess.rollback()
return False
def delete_prefix(self, key):
with self.closing(self._engine) as sess:
try:
table = self._datasource_meta
for context in sess.query(table).filter(table.kv_key.\
like(self._generate_key(key) + '%')):
sess.delete(context)
sess.commit()
return True
except Exception as e: # pylint: disable=broad-except
logging.error('failed to delete prefix. msg[%s]', e)
sess.rollback()
return False
def cas(self, key, old_data, new_data):
with self.closing(self._engine) as sess:
try:
table = self._datasource_meta
flag = True
if old_data is None:
context = self._datasource_meta(
kv_key=self._generate_key(key),
kv_value=new_data)
sess.add(context)
sess.commit()
else:
context = sess.query(table).filter(table.kv_key ==\
self._generate_key(key)).one()
if context.kv_value != old_data:
flag = False
return flag
context.kv_value = new_data
sess.commit()
return flag
except Exception as e: # pylint: disable=broad-except
logging.error('failed to cas. msg[%s]', e)
sess.rollback()
return False
def get_prefix_kvs(self, prefix, ignor_prefix=False):
kvs = []
path = self._generate_key(prefix)
with self.closing(self._engine) as sess:
try:
table = self._datasource_meta
for context in sess.query(table).filter(table.kv_key.\
like(path + '%')).order_by(table.kv_key):
if ignor_prefix and context.kv_key == path:
continue
nkey = self._normalize_output_key(context.kv_key,
self._base_dir)
if isinstance(nkey, str):
nkey = nkey.encode()
value = context.kv_value
if isinstance(value, str):
value = value.encode()
kvs.append((nkey, value))
return kvs
except Exception as e: # pylint: disable=broad-except
logging.error('failed to get prefix kvs. msg[%s]', e)
sess.rollback()
return None
def _generate_key(self, key):
nkey = '/'.join([self._base_dir, self._normalize_input_key(key)])
return nkey
@staticmethod
def _normalize_input_key(key):
skip_cnt = 0
while key[skip_cnt] == '.' or key[skip_cnt] == '/':
skip_cnt += 1
if skip_cnt > 0:
return key[skip_cnt:]
return key
@staticmethod
def _normalize_output_key(key, base_dir):
if isinstance(key, str):
assert key.startswith(base_dir)
else:
assert key.decoder().startswith(base_dir)
return key[len(base_dir)+1:]
def _create_engine_inner(self):
try:
conn_string_pattern = 'mysql+mysqldb://{user}:{passwd}@{addr}'\
'/{db_name}'
conn_string = conn_string_pattern.format(
user=self._user, passwd=self._password,
addr=self._addr, db_name=self._database)
if self._unix_socket:
sub = '?unix_socket={}'.format(self._unix_socket)
conn_string = conn_string + sub
self._engine = create_engine(conn_string, echo=False,
pool_recycle=180)
Base = automap_base()
Base.prepare(self._engine, reflect=True)
self._datasource_meta = Base.classes.datasource_meta
except Exception as e:
raise ValueError('create mysql engine failed; [{}]'.\
format(e))
@staticmethod
@contextmanager
def closing(engine):
try:
session = scoped_session(sessionmaker(bind=engine, autoflush=\
False))()
yield session
except Exception as e:
raise ValueError('Failed to create sql session, error\
meesage: {}'.format(e))
finally:
session.close()
| {"/test/data_join/test_data_portal_master.py": ["/fedlearner/data_join/data_portal_master.py"], "/fedlearner/trainer_master/leader_tm.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/disabled_train_master.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/data/data_block_set.py"], "/fedlearner/data_join/cmd/data_portal_master_service.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_master.py"], "/fedlearner/data_join/data_portal_master.py": ["/fedlearner/data_join/data_portal_job_manager.py"], "/fedlearner/data_join/cmd/data_portal_worker_cli.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_worker.py"], "/fedlearner/trainer/trainer_worker.py": ["/fedlearner/trainer/bridge.py", "/fedlearner/trainer/estimator.py"], "/fedlearner/trainer_master/follower_tm.py": ["/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/test_nn_online_training.py": ["/fedlearner/trainer_master/leader_tm.py", "/fedlearner/trainer_master/follower_tm.py"], "/test/data_join/test_data_portal_worker.py": ["/fedlearner/data_join/data_portal_worker.py"]} |
76,988 | piiswrong/fedlearner | refs/heads/master | /web_console_v2/api/test/fedlearner_webconsole/utils/file_manager_test.py | # Copyright 2020 The FedLearner Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
import os
import shutil
import stat
import tempfile
import unittest
from pathlib import Path
from unittest import mock
from unittest.mock import patch, MagicMock
from pyarrow import fs
from fedlearner_webconsole.utils.file_manager import (DefaultFileManager,
HdfsFileManager,
FileManager, File)
class DefaultFileManagerTest(unittest.TestCase):
_F1_SIZE = 3
_F2_SIZE = 4
_S1_SIZE = 55
_F1_MTIME = 1613982390
_F2_MTIME = 1613982391
_S1_MTIME = 1613982392
def _get_file_stat(self, orig_os_stat, path):
faked = list(orig_os_stat(path))
if path == self._get_temp_path('f1.txt') or \
path == self._get_temp_path('subdir/f1.txt'):
faked[stat.ST_SIZE] = self._F1_SIZE
faked[stat.ST_MTIME] = self._F1_MTIME
return os.stat_result(faked)
elif path == self._get_temp_path('f2.txt') or \
path == self._get_temp_path('f3.txt'):
faked[stat.ST_SIZE] = self._F2_SIZE
faked[stat.ST_MTIME] = self._F2_MTIME
return os.stat_result(faked)
elif path == self._get_temp_path('subdir/s1.txt'):
faked[stat.ST_SIZE] = self._S1_SIZE
faked[stat.ST_MTIME] = self._S1_MTIME
return os.stat_result(faked)
else:
return orig_os_stat(path)
def setUp(self):
# Create a temporary directory
self._test_dir = tempfile.mkdtemp()
subdir = Path(self._test_dir).joinpath('subdir')
subdir.mkdir(exist_ok=True)
Path(self._test_dir).joinpath('f1.txt').write_text('xxx')
Path(self._test_dir).joinpath('f2.txt').write_text('xxx')
subdir.joinpath('s1.txt').write_text('xxx')
# Mocks os.stat
self._orig_os_stat = os.stat
def fake_stat(path, *arg, **kwargs):
return self._get_file_stat(self._orig_os_stat, path)
os.stat = fake_stat
self._fm = DefaultFileManager()
def tearDown(self):
os.stat = self._orig_os_stat
# Remove the directory after the test
shutil.rmtree(self._test_dir)
def _get_temp_path(self, file_path: str = None) -> str:
return str(Path(self._test_dir, file_path or '').absolute())
def test_can_handle(self):
self.assertTrue(self._fm.can_handle('/data/abc'))
self.assertFalse(self._fm.can_handle('data'))
def test_ls(self):
# List file
self.assertEqual(self._fm.ls(self._get_temp_path('f1.txt')), [
File(path=self._get_temp_path('f1.txt'),
size=self._F1_SIZE,
mtime=self._F1_MTIME)
])
# List folder
self.assertEqual(
sorted(self._fm.ls(self._get_temp_path()),
key=lambda file: file.path),
sorted([
File(path=self._get_temp_path('f1.txt'),
size=self._F1_SIZE,
mtime=self._F1_MTIME),
File(path=self._get_temp_path('f2.txt'),
size=self._F2_SIZE,
mtime=self._F2_MTIME)
],
key=lambda file: file.path))
# List folder recursively
self.assertEqual(
sorted(self._fm.ls(self._get_temp_path(), recursive=True),
key=lambda file: file.path),
sorted([
File(path=self._get_temp_path('f1.txt'),
size=self._F1_SIZE,
mtime=self._F1_MTIME),
File(path=self._get_temp_path('f2.txt'),
size=self._F2_SIZE,
mtime=self._F2_MTIME),
File(path=self._get_temp_path('subdir/s1.txt'),
size=self._S1_SIZE,
mtime=self._S1_MTIME),
],
key=lambda file: file.path))
def test_move(self):
# Moves to another folder
self._fm.move(self._get_temp_path('f1.txt'),
self._get_temp_path('subdir/'))
self.assertEqual(
sorted(self._fm.ls(self._get_temp_path('subdir')),
key=lambda file: file.path),
sorted([
File(path=self._get_temp_path('subdir/s1.txt'),
size=self._S1_SIZE,
mtime=self._S1_MTIME),
File(path=self._get_temp_path('subdir/f1.txt'),
size=self._F1_SIZE,
mtime=self._F1_MTIME),
],
key=lambda file: file.path))
# Renames
self._fm.move(self._get_temp_path('f2.txt'),
self._get_temp_path('f3.txt'))
self.assertEqual(self._fm.ls(self._get_temp_path('f2.txt')), [])
self.assertEqual(self._fm.ls(self._get_temp_path('f3.txt')), [
File(path=self._get_temp_path('f3.txt'),
size=self._F2_SIZE,
mtime=self._F2_MTIME)
])
def test_remove(self):
self._fm.remove(self._get_temp_path('f1.txt'))
self._fm.remove(self._get_temp_path('subdir'))
self.assertEqual(self._fm.ls(self._get_temp_path(), recursive=True), [
File(path=self._get_temp_path('f2.txt'),
size=self._F2_SIZE,
mtime=self._F2_MTIME)
])
def test_copy(self):
self._fm.copy(self._get_temp_path('f1.txt'),
self._get_temp_path('subdir'))
self.assertEqual(self._fm.ls(self._get_temp_path('f1.txt')), [
File(path=self._get_temp_path('f1.txt'),
size=self._F1_SIZE,
mtime=self._F1_MTIME)
])
self.assertEqual(self._fm.ls(self._get_temp_path('subdir/f1.txt')), [
File(path=self._get_temp_path('subdir/f1.txt'),
size=self._F1_SIZE,
mtime=self._F1_MTIME)
])
def test_mkdir(self):
self._fm.mkdir(os.path.join(self._get_temp_path(), 'subdir2'))
self.assertTrue(os.path.isdir(self._get_temp_path('subdir2')))
class HdfsFileManagerTest(unittest.TestCase):
def setUp(self):
self._envs_patcher = patch(
'fedlearner_webconsole.envs.Envs.HDFS_SERVER',
'hdfs://haruna/'
)
self._envs_patcher.start()
self._mock_client = MagicMock()
self._mock_client_generator = MagicMock()
self._mock_client_generator.from_uri.return_value = (self._mock_client,
'/')
self._client_patcher = patch(
'fedlearner_webconsole.utils.file_manager.FileSystem',
self._mock_client_generator)
self._client_patcher.start()
self._fm = HdfsFileManager()
def tearDown(self):
self._envs_patcher.stop()
self._client_patcher.stop()
def test_can_handle(self):
self.assertFalse(self._fm.can_handle('/data/abc'))
self.assertTrue(self._fm.can_handle('hdfs://abc'))
def test_ls(self):
mock_ls = MagicMock()
self._mock_client.get_file_info = mock_ls
mock_ls.return_value = [
fs.FileInfo(type=fs.FileType.File,
path='/data/abc',
size=1024,
mtime_ns=1367317325346000000),
fs.FileInfo(type=fs.FileType.Directory,
path='/data',
size=1024,
mtime_ns=1367317325346000000),
]
self.assertEqual(
self._fm.ls('hdfs:///data', recursive=True),
[File(path='hdfs:///data/abc', size=1024, mtime=1367317325)])
mock_ls.assert_called_once()
@staticmethod
def _yield_files(files):
for file in files:
yield file
def test_move(self):
mock_rename = MagicMock()
self._mock_client.move = mock_rename
mock_rename.return_value = self._yield_files(['/data/123'])
self.assertTrue(self._fm.move('hdfs:///data/abc', 'hdfs:///data/123'))
mock_rename.assert_called_once_with('/data/abc', '/data/123')
mock_rename.return_value = self._yield_files([])
self.assertFalse(self._fm.move('hdfs:///data/abc', 'hdfs:///data/123'))
def test_remove_dir(self):
mock_get_file_info = MagicMock()
self._mock_client.get_file_info = mock_get_file_info
mock_get_file_info.return_value = fs.FileInfo(type=fs.FileType.File,
path='/data/123',
size=1024)
self.assertTrue(self._fm.remove('hdfs:///data/123'))
self._mock_client.delete_file.assert_called_once_with('/data/123')
self._mock_client.delete_dir.assert_not_called()
def test_remove_file(self):
mock_get_file_info = MagicMock()
self._mock_client.get_file_info = mock_get_file_info
mock_get_file_info.return_value = fs.FileInfo(type=fs.FileType.Directory,
path='/data/123',
size=1024)
self.assertTrue(self._fm.remove('hdfs:///data/123'))
self._mock_client.delete_file.assert_not_called()
self._mock_client.delete_dir.assert_called_once_with('/data/123')
@patch('tensorflow.io.gfile.copy')
def test_copy(self, mock_copy):
self.assertTrue(self._fm.copy('hdfs:///source', 'hdfs:///dest'))
mock_copy.assert_called_once_with('hdfs:///source', 'hdfs:///dest')
def test_mkdir(self):
mock_mkdir = MagicMock()
self._mock_client.create_dir = mock_mkdir
self.assertTrue(self._fm.mkdir('hdfs:///data'))
mock_mkdir.assert_called_once_with('/data')
class FileManagerTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
os.environ[
'CUSTOMIZED_FILE_MANAGER'] = 'testing.fake_file_manager:FakeFileManager'
@classmethod
def tearDownClass(cls):
del os.environ['CUSTOMIZED_FILE_MANAGER']
def setUp(self):
self._fm = FileManager()
def test_can_handle(self):
self.assertTrue(self._fm.can_handle('fake://123'))
# Falls back to default manager
self.assertTrue(self._fm.can_handle('/data/123'))
self.assertFalse(self._fm.can_handle('hdfs:///123'))
def test_ls(self):
self.assertEqual(self._fm.ls('fake://data'), [{
'path': 'fake://data/f1.txt',
'size': 0
}])
def test_move(self):
self.assertTrue(self._fm.move('fake://move/123', 'fake://move/234'))
self.assertFalse(
self._fm.move('fake://do_not_move/123', 'fake://move/234'))
# No file manager can handle this
self.assertRaises(RuntimeError,
lambda: self._fm.move('hdfs://123', 'fake://abc'))
def test_remove(self):
self.assertTrue(self._fm.remove('fake://remove/123'))
self.assertFalse(self._fm.remove('fake://do_not_remove/123'))
# No file manager can handle this
self.assertRaises(RuntimeError, lambda: self._fm.remove('hdfs://123'))
def test_copy(self):
self.assertTrue(self._fm.copy('fake://copy/123', 'fake://copy/234'))
self.assertFalse(
self._fm.copy('fake://do_not_copy/123', 'fake://copy/234'))
# No file manager can handle this
self.assertRaises(RuntimeError,
lambda: self._fm.copy('hdfs://123', 'fake://abc'))
def test_mkdir(self):
self.assertTrue(self._fm.mkdir('fake://mkdir/123'))
self.assertFalse(self._fm.mkdir('fake://do_not_mkdir/123'))
# No file manager can handle this
self.assertRaises(RuntimeError, lambda: self._fm.mkdir('hdfs:///123'))
if __name__ == '__main__':
unittest.main()
| {"/test/data_join/test_data_portal_master.py": ["/fedlearner/data_join/data_portal_master.py"], "/fedlearner/trainer_master/leader_tm.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/disabled_train_master.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/data/data_block_set.py"], "/fedlearner/data_join/cmd/data_portal_master_service.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_master.py"], "/fedlearner/data_join/data_portal_master.py": ["/fedlearner/data_join/data_portal_job_manager.py"], "/fedlearner/data_join/cmd/data_portal_worker_cli.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_worker.py"], "/fedlearner/trainer/trainer_worker.py": ["/fedlearner/trainer/bridge.py", "/fedlearner/trainer/estimator.py"], "/fedlearner/trainer_master/follower_tm.py": ["/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/test_nn_online_training.py": ["/fedlearner/trainer_master/leader_tm.py", "/fedlearner/trainer_master/follower_tm.py"], "/test/data_join/test_data_portal_worker.py": ["/fedlearner/data_join/data_portal_worker.py"]} |
76,989 | piiswrong/fedlearner | refs/heads/master | /fedlearner/trainer/trainer_worker.py | # Copyright 2020 The FedLearner Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
import argparse
import json
import logging
import tensorflow.compat.v1 as tf
from fedlearner.common import common as fcc
from fedlearner.common import metrics
from fedlearner.common.summary_hook import SummaryHook
from fedlearner.trainer.bridge import Bridge
from fedlearner.trainer.estimator import FLEstimator
from fedlearner.trainer.sparse_estimator import SparseFLEstimator
from fedlearner.trainer.trainer_master_client import LocalTrainerMasterClient
from fedlearner.trainer.trainer_master_client import TrainerMasterClient
class StepMetricsHook(tf.estimator.SessionRunHook):
def __init__(self, tensor_dict=None, every_n_iter=5):
if tensor_dict is None:
tensor_dict = {}
self._tensor_dict = tensor_dict
self._every_n_iter = every_n_iter
self._iter = 0
def before_run(self, run_context):
return tf.estimator.SessionRunArgs(self._tensor_dict)
def after_run(self, run_context, run_value):
self._iter += 1
if self._iter % self._every_n_iter == 0:
result = run_value.results
tags = {}
if 'event_time' in result:
event_time = result.pop('event_time').decode()
tags['event_time'] = fcc.convert_to_datetime(
event_time.decode(), True
).isoformat(timespec='microseconds')
for name, value in result.items():
metrics.emit_store(name=name, value=value, tags=tags)
class StepLossAucMetricsHook(StepMetricsHook):
def __init__(self, loss_tensor, auc_tensor, every_n_iter=5,
event_time_tensor=None):
tensor_dict = {"loss": loss_tensor,
"auc": auc_tensor}
if event_time_tensor is not None:
tensor_dict["event_time"] = event_time_tensor
super(StepLossAucMetricsHook, self).__init__(tensor_dict, every_n_iter)
def create_argument_parser():
parser = argparse.ArgumentParser(description='FedLearner Trainer.')
parser.add_argument('--local-addr', type=str,
help='Listen address of the local bridge, ' \
'in [IP]:[PORT] format')
parser.add_argument('--peer-addr', type=str,
help='Address of peer\'s bridge, ' \
'in [IP]:[PORT] format')
parser.add_argument('--cluster-spec', type=str,
help='ClusterSpec description for master/ps/worker, '\
'in json format')
parser.add_argument('--worker-rank',
type=int,
default=0,
help='the rank of this worker.')
parser.add_argument('--ps-addrs', type=str, default=None,
help='Comma-separated list of parameter server ' \
'addresses in [IP]:[PORT] format. ' \
'value for this argument must be identical ' \
'for all workers.')
parser.add_argument('--data-source', type=str, default=None,
help='path to data source for distributed file system' \
'training. Ignored when --master-addr is set.')
parser.add_argument('--data-path', type=str, default=None,
help='path to data block files for non-distributed ' \
'training. Ignored when --master-addr is set.')
parser.add_argument('--application-id', type=str, default=None,
help='application id on distributed ' \
'training.')
parser.add_argument('--start-time', type=str, default=None,
help='start-time on data source ' \
'training. Ignored when --master-addr is set.')
parser.add_argument('--end-time', type=str, default=None,
help='end-time on data source ' \
'training. Ignored when --master-addr is set.')
parser.add_argument('--master-addr', type=str, default=None,
help='Address of trainer master, ' \
'in [IP]:[PORT] format. ' \
'Use local master for testing if set to None.')
parser.add_argument('--tf-addr', type=str, default=None,
help='Address of tensorflow server, ' \
'in localhost:[PORT] format')
parser.add_argument('--export-path',
type=str,
default=None,
help='Path to save exported models.')
parser.add_argument('--checkpoint-path',
type=str,
default=None,
help='Path to save and load model checkpoints.')
parser.add_argument('--save-checkpoint-steps',
type=int,
default=None,
help='Number of steps between checkpoints.')
parser.add_argument('--sparse-estimator',
type=bool,
default=False,
help='Whether using sparse estimator.')
parser.add_argument('--mode',
type=str,
default='train',
help='Train or eval.')
parser.add_argument('--epoch_num',
type=int,
default=1,
help='number of epoch for training')
parser.add_argument('--save-checkpoint-secs',
type=int,
default=None,
help='Number of secs between checkpoints.')
parser.add_argument('--summary-path',
type=str,
default=None,
help='Path to save summary files used by tensorboard.')
parser.add_argument('--summary-save-steps',
type=int,
default=None,
help='Number of steps to save summary files.')
parser.add_argument('--verbosity',
type=int,
default=1,
help='Logging level.')
return parser
def train(role, args, input_fn, model_fn, serving_input_receiver_fn):
logging.basicConfig(
format="%(asctime)-15s [%(filename)s:%(lineno)d] " \
"%(levelname)s : %(message)s")
if args.verbosity == 0:
logging.getLogger().setLevel(logging.WARNING)
elif args.verbosity == 1:
logging.getLogger().setLevel(logging.INFO)
elif args.verbosity > 1:
logging.getLogger().setLevel(logging.DEBUG)
if args.application_id:
bridge = Bridge(role, int(args.local_addr.split(':')[1]),
args.peer_addr, args.application_id, args.worker_rank)
else:
bridge = Bridge(role, int(args.local_addr.split(':')[1]),
args.peer_addr)
if args.data_path:
trainer_master = LocalTrainerMasterClient(role,
args.data_path,
epoch_num=args.epoch_num)
if args.ps_addrs is not None:
ps_addrs = args.ps_addrs.split(",")
cluster_spec = tf.train.ClusterSpec({
'ps': ps_addrs,
'worker': {
args.worker_rank: args.tf_addr
}
})
else:
cluster_spec = None
elif args.cluster_spec:
cluster_spec = json.loads(args.cluster_spec)
assert 'clusterSpec' in cluster_spec, \
"cluster_spec do not meet legal format"
assert 'Master' in cluster_spec['clusterSpec'],\
"cluster_spec must include Master"
assert isinstance(cluster_spec['clusterSpec']['Master'], list), \
"Master must be list"
assert 'Worker' in cluster_spec['clusterSpec'],\
"cluster_spec must include Worker"
assert isinstance(cluster_spec['clusterSpec']['Worker'], list), \
"Worker must be list"
trainer_master = TrainerMasterClient(
cluster_spec['clusterSpec']['Master'][0], role, args.worker_rank)
cluster_spec = tf.train.ClusterSpec({
'ps':
cluster_spec['clusterSpec']['PS'],
'worker': {
args.worker_rank: args.tf_addr
}
})
elif args.master_addr:
assert args.tf_addr is not None, \
"--tf-addr must be set when master_addr is set."
trainer_master = TrainerMasterClient(args.master_addr, role,
args.worker_rank)
ps_addrs = args.ps_addrs.split(",")
cluster_spec = tf.train.ClusterSpec({
'ps': ps_addrs,
'worker': {
args.worker_rank: args.tf_addr
}
})
elif args.data_source:
if args.start_time is None or args.end_time is None:
raise ValueError(
"data source must be set with start-date and end-date")
trainer_master = LocalTrainerMasterClient(role,
args.data_source,
start_time=args.start_time,
end_time=args.end_time,
epoch_num=args.epoch_num)
cluster_spec = None
else:
raise ValueError("Either --master-addr or --data-path must be set")
if args.summary_path:
SummaryHook.summary_path = args.summary_path
SummaryHook.worker_rank = args.worker_rank
SummaryHook.role = role
if args.summary_save_steps:
SummaryHook.save_steps = args.summary_save_steps
if args.sparse_estimator:
estimator = SparseFLEstimator(model_fn,
bridge,
trainer_master,
role,
worker_rank=args.worker_rank,
application_id=args.application_id,
cluster_spec=cluster_spec)
else:
estimator = FLEstimator(model_fn,
bridge,
trainer_master,
role,
worker_rank=args.worker_rank,
application_id=args.application_id,
cluster_spec=cluster_spec)
run_mode = args.mode.lower()
if run_mode == 'train':
estimator.train(input_fn,
checkpoint_path=args.checkpoint_path,
save_checkpoint_steps=args.save_checkpoint_steps,
save_checkpoint_secs=args.save_checkpoint_secs)
if args.export_path and args.worker_rank == 0:
export_path = '%s/%d' % (args.export_path, bridge.terminated_at)
estimator.export_saved_model(export_path,
serving_input_receiver_fn,
checkpoint_path=args.checkpoint_path)
fsuccess = tf.io.gfile.GFile('%s/_SUCCESS' % export_path, 'w')
fsuccess.write('%d' % bridge.terminated_at)
fsuccess.close()
elif run_mode == 'eval':
estimator.evaluate(input_fn, checkpoint_path=args.checkpoint_path)
else:
raise ValueError('Allowed values are: --mode=train|eval')
| {"/test/data_join/test_data_portal_master.py": ["/fedlearner/data_join/data_portal_master.py"], "/fedlearner/trainer_master/leader_tm.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/disabled_train_master.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/data/data_block_set.py"], "/fedlearner/data_join/cmd/data_portal_master_service.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_master.py"], "/fedlearner/data_join/data_portal_master.py": ["/fedlearner/data_join/data_portal_job_manager.py"], "/fedlearner/data_join/cmd/data_portal_worker_cli.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_worker.py"], "/fedlearner/trainer/trainer_worker.py": ["/fedlearner/trainer/bridge.py", "/fedlearner/trainer/estimator.py"], "/fedlearner/trainer_master/follower_tm.py": ["/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/test_nn_online_training.py": ["/fedlearner/trainer_master/leader_tm.py", "/fedlearner/trainer_master/follower_tm.py"], "/test/data_join/test_data_portal_worker.py": ["/fedlearner/data_join/data_portal_worker.py"]} |
76,990 | piiswrong/fedlearner | refs/heads/master | /fedlearner/trainer_master/follower_tm.py | # Copyright 2020 The FedLearner Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
import argparse
import logging
import os
from concurrent import futures
import grpc
from fedlearner.data_join.data_block_visitor import DataBlockVisitor
from fedlearner.common import trainer_master_service_pb2_grpc as tm_grpc
from fedlearner.common import trainer_master_service_pb2 as tm_pb
from fedlearner.common import common_pb2 as common_pb
from .trainer_master_service import TrainerMasterServer
kvstore_type = os.environ.get('KVSTORE_TYPE', 'etcd')
class FollowerTrainerMaster(object):
def __init__(self, application_id, data_source, online_training):
self._application_id = application_id
self._online_training = online_training
kvstore_use_mock = os.environ.get('KVSTORE_USE_MOCK', "off") == "on"
self._data_block_visitor = DataBlockVisitor(
data_source, kvstore_type, kvstore_use_mock)
def run(self, listen_port):
self._server = grpc.server(futures.ThreadPoolExecutor(max_workers=10))
tm_grpc.add_TrainerMasterServiceServicer_to_server(
TrainerMasterServer(self._data_block_response,
self._get_checkpoint_fn,
self._restore_checkpoint_fn), self._server)
self._server.add_insecure_port('[::]:%d' % listen_port)
self._server.start()
logging.info('Trainer Master Server start on port[%d].', listen_port)
self._server.wait_for_termination()
def _get_checkpoint_fn(self, request):
response = tm_pb.GetDataBlockCheckpointResponse()
response.status.code = common_pb.STATUS_SUCCESS
response.status.error_message = 'success'
logging.info("Follower _get_checkpoint_fn, do nothing")
return response
def _restore_checkpoint_fn(self, request):
response = tm_pb.RestoreDataBlockCheckpointResponse()
response.status.code = common_pb.STATUS_SUCCESS
response.status.error_message = "success"
logging.info("Follower _restore_checkpoint_fn, do nothing")
return response
def _alloc_data_block(self, block_id=None):
logging.info("FollowerTrainerMaster is getting block %s", block_id)
if not block_id:
raise Exception('follower tm need block_id to alloc.')
return self._data_block_visitor.LoadDataBlockRepByBlockId(block_id)
def _data_block_response(self, request):
response = tm_pb.DataBlockResponse()
data_block = self._alloc_data_block(block_id=request.block_id)
if data_block:
logging.info("%s allocated worker_%d with block id %s",
self.__class__.__name__,
request.worker_rank,
data_block.block_id)
response.status.code = common_pb.STATUS_SUCCESS
response.status.error_message = 'success'
response.data_block_info.data_path = \
str(data_block.data_block_fpath)
response.data_block_info.meta_path = ''
response.data_block_info.block_id = str(data_block.block_id)
elif self._online_training:
logging.info("%s allocated worker_%d with empty data block. "\
"wait for new data block since online traning",
self.__class__.__name__, request.worker_rank)
response.status.code = common_pb.STATUS_NO_MORE_DATA
response.status.error_message = 'please wait for datablock ready'
else:
logging.info("%s allocated worker_%d with empty data block. "\
"exit running since since batch traning",
self.__class__.__name__, request.worker_rank)
response.status.code = common_pb.STATUS_DATA_FINISHED
response.status.error_message = 'datablock finished'
return response
if __name__ == '__main__':
logging.getLogger().setLevel(logging.DEBUG)
parser = argparse.ArgumentParser('follower trainer master cmd.')
parser.add_argument('-p',
'--port',
type=int,
default=50002,
help='Listen port of follower trainer master')
parser.add_argument('-app_id',
'--application_id',
required=True,
help='application_id')
parser.add_argument('-data_source',
'--data_source',
required=True,
help='training example data source')
# FIXME: deprecated
parser.add_argument('-start_date',
'--start_date',
default=None,
help='training data start date')
# FIXME: deprecated
parser.add_argument('-end_date',
'--end_date',
default=None,
help='training data end date')
parser.add_argument('--online_training',
action='store_true',
help='the train master run for online training')
FLAGS = parser.parse_args()
start_date = int(FLAGS.start_date) if FLAGS.start_date else None
end_date = int(FLAGS.end_date) if FLAGS.end_date else None
follower_tm = FollowerTrainerMaster(
FLAGS.application_id, FLAGS.data_source,
FLAGS.online_training)
follower_tm.run(listen_port=FLAGS.port)
| {"/test/data_join/test_data_portal_master.py": ["/fedlearner/data_join/data_portal_master.py"], "/fedlearner/trainer_master/leader_tm.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/disabled_train_master.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/data/data_block_set.py"], "/fedlearner/data_join/cmd/data_portal_master_service.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_master.py"], "/fedlearner/data_join/data_portal_master.py": ["/fedlearner/data_join/data_portal_job_manager.py"], "/fedlearner/data_join/cmd/data_portal_worker_cli.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_worker.py"], "/fedlearner/trainer/trainer_worker.py": ["/fedlearner/trainer/bridge.py", "/fedlearner/trainer/estimator.py"], "/fedlearner/trainer_master/follower_tm.py": ["/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/test_nn_online_training.py": ["/fedlearner/trainer_master/leader_tm.py", "/fedlearner/trainer_master/follower_tm.py"], "/test/data_join/test_data_portal_worker.py": ["/fedlearner/data_join/data_portal_worker.py"]} |
76,991 | piiswrong/fedlearner | refs/heads/master | /web_console_v2/api/fedlearner_webconsole/utils/file_manager.py | # Copyright 2020 The FedLearner Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
import importlib
import logging
import os
import shutil
from collections import namedtuple
from pathlib import Path
from typing import List
from pyarrow import fs
from pyarrow.fs import FileSystem
from tensorflow.io import gfile
import tensorflow_io # pylint: disable=unused-import
from fedlearner_webconsole.envs import Envs
# path: absolute path of the file
# size: file size in bytes
# mtime: time of last modification, unix timestamp in seconds.
File = namedtuple('File', ['path', 'size', 'mtime'])
class FileManagerBase(object):
"""A base interface for file manager, please implement this interface
if you have specific logic to handle files, for example, HDFS with ACL."""
def can_handle(self, path: str) -> bool:
"""If the manager can handle such file."""
raise NotImplementedError()
def ls(self, path: str, recursive=False) -> List[str]:
"""Lists files under a path."""
raise NotImplementedError()
def move(self, source: str, destination: str) -> bool:
"""Moves a file from source to destination, if destination
is a folder then move into that folder."""
raise NotImplementedError()
def remove(self, path: str) -> bool:
"""Removes files under a path."""
raise NotImplementedError()
def copy(self, source: str, destination: str) -> bool:
"""Copies a file from source to destination, if destination
is a folder then move into that folder."""
raise NotImplementedError()
def mkdir(self, path: str) -> bool:
"""Creates a directory. If already exists, return False"""
raise NotImplementedError()
class DefaultFileManager(FileManagerBase):
"""Default file manager for native file system or NFS."""
def can_handle(self, path):
return path.startswith('/')
def ls(self, path: str, recursive=False) -> List[File]:
def _get_file_stats(path: str):
stat = os.stat(path)
return File(path=path, size=stat.st_size, mtime=int(stat.st_mtime))
if not Path(path).exists():
return []
# If it is a file
if Path(path).is_file():
return [_get_file_stats(path)]
files = []
if recursive:
for root, _, res in os.walk(path):
for file in res:
if Path(os.path.join(root, file)).is_file():
files.append(_get_file_stats(os.path.join(root, file)))
else:
for file in os.listdir(path):
if Path(os.path.join(path, file)).is_file():
files.append(_get_file_stats(os.path.join(path, file)))
# Files only
return files
def move(self, source: str, destination: str) -> bool:
try:
shutil.move(source, destination)
return True
except Exception as e: # pylint: disable=broad-except
logging.error('Error during move %s', e)
return False
def remove(self, path: str) -> bool:
try:
if os.path.isfile(path):
os.remove(path)
return True
if os.path.isdir(path):
shutil.rmtree(path)
return True
except Exception as e: # pylint: disable=broad-except
logging.error('Error during remove %s', str(e))
return False
def copy(self, source: str, destination: str) -> bool:
try:
shutil.copy(source, destination)
return True
except Exception as e: # pylint: disable=broad-except
logging.error('Error during copy %s', e)
return False
def mkdir(self, path: str) -> bool:
try:
os.makedirs(path, exist_ok=True)
return True
except Exception as e: # pylint: disable=broad-except
logging.error('Error during create %s', e)
return False
class HdfsFileManager(FileManagerBase):
"""A wrapper of snakebite client."""
def can_handle(self, path):
return path.startswith('hdfs://')
def __init__(self):
self._client, _ = FileSystem.from_uri(Envs.HDFS_SERVER)
def _unwrap_path(self, path):
if path.startswith('hdfs://'):
return path[7:]
return path
def _wrap_path(self, path):
if not path.startswith('hdfs://'):
return f'hdfs://{path}'
return path
def ls(self, path: str, recursive=False) -> List[File]:
path = self._unwrap_path(path)
files = []
try:
for file in self._client.get_file_info(
fs.FileSelector(path, recursive=recursive)):
if file.type == fs.FileType.File:
files.append(
File(
path=self._wrap_path(file.path),
size=file.size,
# ns to second
mtime=int(file.mtime_ns / 1e9)))
except RuntimeError as error:
# This is a hack that snakebite can not handle generator
if str(error) == 'generator raised StopIteration':
pass
else:
raise
return files
def move(self, source: str, destination: str) -> bool:
source = self._unwrap_path(source)
destination = self._unwrap_path(destination)
return len(list(self._client.move(source, destination))) > 0
def remove(self, path: str) -> bool:
path = self._unwrap_path(path)
try:
if self._client.get_file_info(path).is_file:
self._client.delete_file(path)
else:
self._client.delete_dir(path)
return True
except Exception as e: # pylint: disable=broad-except
logging.error('Error during remove %s', str(e))
return False
def copy(self, source: str, destination: str) -> bool:
try:
gfile.copy(source, destination)
return True
except Exception as e: # pylint: disable=broad-except
logging.error('Error during copy %s', e)
return False
def mkdir(self, path: str) -> bool:
path = self._unwrap_path(path)
self._client.create_dir(path)
return True
class GFileFileManager(FileManagerBase):
"""Gfile file manager for all FS supported by TF."""
def can_handle(self, path):
# TODO: List tf support
if path.startswith('fake://'):
return False
if not Envs.SUPPORT_HDFS and path.startswith('hdfs://'):
return False
return True
def ls(self, path: str, recursive=False) -> List[File]:
def _get_file_stats(path: str):
stat = gfile.stat(path)
return File(path=path,
size=stat.length,
mtime=int(stat.mtime_nsec/1e9))
if not gfile.exists(path):
return []
# If it is a file
if not gfile.isdir(path):
return [_get_file_stats(path)]
files = []
if recursive:
for root, _, res in gfile.walk(path):
for file in res:
if not gfile.isdir(os.path.join(root, file)):
files.append(
_get_file_stats(os.path.join(root, file)))
else:
for file in gfile.listdir(path):
if not gfile.isdir(os.path.join(path, file)):
files.append(
_get_file_stats(os.path.join(path, file)))
# Files only
return files
def move(self, source: str, destination: str) -> bool:
try:
self.copy(source, destination)
self.remove(source)
return True
except Exception as e: # pylint: disable=broad-except
logging.error('Error during move %s', e)
return False
def remove(self, path: str) -> bool:
try:
if not gfile.isdir(path):
os.remove(path)
return True
if gfile.isdir(path):
gfile.rmtree(path)
return True
except Exception as e: # pylint: disable=broad-except
logging.error('Error during remove %s', str(e))
return False
def copy(self, source: str, destination: str) -> bool:
try:
gfile.copy(source, destination)
return True
except Exception as e: # pylint: disable=broad-except
logging.error('Error during copy %s', e)
return False
def mkdir(self, path: str) -> bool:
try:
gfile.makedirs(path)
return True
except Exception as e: # pylint: disable=broad-except
logging.error('Error during create %s', e)
return False
class FileManager(FileManagerBase):
"""A centralized manager to handle files.
Please extend `FileManagerBase` and put the class path into
`CUSTOMIZED_FILE_MANAGER`. For example,
'fedlearner_webconsole.utils.file_manager:HdfsFileManager'"""
def __init__(self):
self._file_managers = []
cfm_path = os.environ.get('CUSTOMIZED_FILE_MANAGER')
if cfm_path:
module_path, class_name = cfm_path.split(':')
module = importlib.import_module(module_path)
# Dynamically construct a file manager
customized_file_manager = getattr(module, class_name)
self._file_managers.append(customized_file_manager())
if Envs.HDFS_SERVER:
self._file_managers.append(HdfsFileManager())
self._file_managers.append(DefaultFileManager())
self._file_managers.append(GFileFileManager())
def can_handle(self, path):
for fm in self._file_managers:
if fm.can_handle(path):
return True
return False
def ls(self, path: str, recursive=False) -> List[File]:
for fm in self._file_managers:
if fm.can_handle(path):
return fm.ls(path, recursive=recursive)
raise RuntimeError('ls is not supported')
def move(self, source: str, destination: str) -> bool:
logging.info('Moving files from [%s] to [%s]', source, destination)
for fm in self._file_managers:
if fm.can_handle(source) and fm.can_handle(destination):
return fm.move(source, destination)
raise RuntimeError('move is not supported')
def remove(self, path: str) -> bool:
logging.info('Removing file [%s]', path)
for fm in self._file_managers:
if fm.can_handle(path):
return fm.remove(path)
raise RuntimeError('remove is not supported')
def copy(self, source: str, destination: str) -> bool:
logging.info('Copying file from [%s] to [%s]', source, destination)
for fm in self._file_managers:
if fm.can_handle(source) and fm.can_handle(destination):
return fm.copy(source, destination)
raise RuntimeError('copy is not supported')
def mkdir(self, path: str) -> bool:
logging.info('Create directory [%s]', path)
for fm in self._file_managers:
if fm.can_handle(path):
return fm.mkdir(path)
raise RuntimeError('mkdir is not supported')
| {"/test/data_join/test_data_portal_master.py": ["/fedlearner/data_join/data_portal_master.py"], "/fedlearner/trainer_master/leader_tm.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/disabled_train_master.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/data/data_block_set.py"], "/fedlearner/data_join/cmd/data_portal_master_service.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_master.py"], "/fedlearner/data_join/data_portal_master.py": ["/fedlearner/data_join/data_portal_job_manager.py"], "/fedlearner/data_join/cmd/data_portal_worker_cli.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_worker.py"], "/fedlearner/trainer/trainer_worker.py": ["/fedlearner/trainer/bridge.py", "/fedlearner/trainer/estimator.py"], "/fedlearner/trainer_master/follower_tm.py": ["/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/test_nn_online_training.py": ["/fedlearner/trainer_master/leader_tm.py", "/fedlearner/trainer_master/follower_tm.py"], "/test/data_join/test_data_portal_worker.py": ["/fedlearner/data_join/data_portal_worker.py"]} |
76,992 | piiswrong/fedlearner | refs/heads/master | /test/trainer/test_nn_online_training.py | # Copyright 2020 The FedLearner Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
import unittest
import fedlearner
import test_nn_trainer
import numpy as np
import unittest
import threading
import random
import os
import time
import logging
from multiprocessing import Process
import tensorflow.compat.v1 as tf
from tensorflow.compat.v1 import gfile
from queue import PriorityQueue
import enum
from tensorflow.core.example.feature_pb2 import FloatList, Features, Feature, \
Int64List, BytesList
from tensorflow.core.example.example_pb2 import Example
import numpy as np
from fedlearner.data_join import (
data_block_manager, common,
data_block_visitor, raw_data_manifest_manager
)
from fedlearner.common import (
db_client, common_pb2 as common_pb,
data_join_service_pb2 as dj_pb,
trainer_master_service_pb2 as tm_pb
)
from fedlearner.data_join.data_block_manager import DataBlockBuilder
from fedlearner.data_join.raw_data_iter_impl.tf_record_iter import TfExampleItem
from fedlearner.trainer_master.leader_tm import LeaderTrainerMaster
from fedlearner.trainer_master.follower_tm import FollowerTrainerMaster
class TestDataSource(object):
def __init__(self, base_path, name, role, partition_num=1,
start_time=0, end_time=100000):
if role == 'leader':
role = 0
elif role == 'follower':
role = 1
else:
raise ValueError("Unknown role %s"%role)
data_source = common_pb.DataSource()
data_source.data_source_meta.name = name
data_source.data_source_meta.partition_num = partition_num
data_source.data_source_meta.start_time = start_time
data_source.data_source_meta.end_time = end_time
data_source.output_base_dir = "{}/{}_{}/data_source/".format(
base_path, data_source.data_source_meta.name, role)
data_source.role = role
if gfile.Exists(data_source.output_base_dir):
gfile.DeleteRecursively(data_source.output_base_dir)
self._data_source = data_source
self._kv_store = db_client.DBClient("etcd", True)
common.commit_data_source(self._kv_store, self._data_source)
self._dbms = []
for i in range(partition_num):
manifest_manager = raw_data_manifest_manager.RawDataManifestManager(
self._kv_store, self._data_source)
manifest_manager._finish_partition('join_example_rep',
dj_pb.JoinExampleState.UnJoined, dj_pb.JoinExampleState.Joined,
-1, i)
self._dbms.append(
data_block_manager.DataBlockManager(self._data_source, i))
def add_data_block(self, partition_id, x, y):
dbm = self._dbms[partition_id]
builder = DataBlockBuilder(
common.data_source_data_block_dir(self._data_source),
self._data_source.data_source_meta.name, partition_id,
dbm.get_dumped_data_block_count(),
dj_pb.WriterOptions(output_writer="TF_RECORD"), None)
builder.set_data_block_manager(dbm)
for i in range(x.shape[0]):
feat = {}
exam_id = '{}'.format(i).encode()
feat['example_id'] = Feature(
bytes_list=BytesList(value=[exam_id]))
feat['event_time'] = Feature(
int64_list = Int64List(value=[i])
)
feat['x'] = Feature(float_list=FloatList(value=list(x[i])))
if y is not None:
feat['y'] = Feature(int64_list=Int64List(value=[y[i]]))
example = Example(features=Features(feature=feat))
builder.append_item(TfExampleItem(example.SerializeToString()), i, 0)
return builder.finish_data_block()
class TestOnlineTraining(unittest.TestCase):
def test_online_training(self):
leader_ds = TestDataSource('./output', 'test_ds', 'leader')
leader_ds.add_data_block(0, np.zeros((100, 10)), np.zeros((100,), dtype=np.int32))
leader_tm = fedlearner.trainer_master.leader_tm.LeaderTrainerMaster(
'leader_test', 'test_ds', None, None, True, False, 1)
leader_thread = threading.Thread(target=leader_tm.run, args=(50051,))
leader_thread.daemon = True
leader_thread.start()
follower_ds = TestDataSource('./output', 'test_ds', 'follower')
follower_ds.add_data_block(0, np.zeros((100, 10)), np.zeros((100,), dtype=np.int32))
follower_tm = fedlearner.trainer_master.follower_tm.FollowerTrainerMaster(
'follower_test', 'test_ds', True)
follower_thread = threading.Thread(target=follower_tm.run, args=(50052,))
follower_thread.daemon = True
follower_thread.start()
leader_tmc = fedlearner.trainer.trainer_master_client.TrainerMasterClient(
'localhost:50051', 'leader', 0)
leader_tmc.restore_data_block_checkpoint('leader_test', [])
block1 = leader_tmc.request_data_block().block_id
self.assertEqual(block1, 'test_ds.partition_0000.00000000.0-99')
leader_ds.add_data_block(0, np.zeros((100, 10)), np.zeros((100,), dtype=np.int32))
block2 = leader_tmc.request_data_block().block_id
self.assertEqual(block2, 'test_ds.partition_0000.00000001.0-99')
follower_tmc = fedlearner.trainer.trainer_master_client.TrainerMasterClient(
'localhost:50052', 'follower', 0)
follower_tmc.restore_data_block_checkpoint('follower_test', [])
self.assertEqual(block1, follower_tmc.request_data_block(block1).block_id)
follower_ds.add_data_block(0, np.zeros((100, 10)), np.zeros((100,), dtype=np.int32))
self.assertEqual(block2, follower_tmc.request_data_block(block2).block_id)
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
unittest.main()
| {"/test/data_join/test_data_portal_master.py": ["/fedlearner/data_join/data_portal_master.py"], "/fedlearner/trainer_master/leader_tm.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/disabled_train_master.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/data/data_block_set.py"], "/fedlearner/data_join/cmd/data_portal_master_service.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_master.py"], "/fedlearner/data_join/data_portal_master.py": ["/fedlearner/data_join/data_portal_job_manager.py"], "/fedlearner/data_join/cmd/data_portal_worker_cli.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_worker.py"], "/fedlearner/trainer/trainer_worker.py": ["/fedlearner/trainer/bridge.py", "/fedlearner/trainer/estimator.py"], "/fedlearner/trainer_master/follower_tm.py": ["/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/test_nn_online_training.py": ["/fedlearner/trainer_master/leader_tm.py", "/fedlearner/trainer_master/follower_tm.py"], "/test/data_join/test_data_portal_worker.py": ["/fedlearner/data_join/data_portal_worker.py"]} |
76,993 | piiswrong/fedlearner | refs/heads/master | /fedlearner/data_join/data_portal_job_manager.py | # Copyright 2020 The FedLearner Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
import threading
import logging
from os import path
from fnmatch import fnmatch
from google.protobuf import text_format
import tensorflow_io # pylint: disable=unused-import
from tensorflow.compat.v1 import gfile
from fedlearner.common import data_portal_service_pb2 as dp_pb
from fedlearner.data_join import common
from fedlearner.data_join.raw_data_publisher import RawDataPublisher
from fedlearner.data_join.sort_run_merger import MergedSortRunMeta
class DataPortalJobManager(object):
def __init__(self, kvstore, portal_name, long_running, check_success_tag):
self._lock = threading.Lock()
self._kvstore = kvstore
self._portal_name = portal_name
self._check_success_tag = check_success_tag
self._portal_manifest = None
self._processing_job = None
self._sync_portal_manifest()
self._sync_processing_job()
self._publisher = \
RawDataPublisher(kvstore,
self._portal_manifest.raw_data_publish_dir)
self._long_running = long_running
assert self._portal_manifest is not None
self._processed_fpath = set()
for job_id in range(0, self._portal_manifest.next_job_id):
job = self._sync_portal_job(job_id)
assert job is not None and job.job_id == job_id
for fpath in job.fpaths:
self._processed_fpath.add(fpath)
self._job_part_map = {}
if self._portal_manifest.processing_job_id >= 0:
self._check_processing_job_finished()
if self._portal_manifest.processing_job_id < 0:
self._launch_new_portal_job()
def get_portal_manifest(self):
with self._lock:
return self._sync_portal_manifest()
def alloc_task(self, rank_id):
with self._lock:
self._sync_processing_job()
if self._processing_job is not None:
partition_id = self._try_to_alloc_part(rank_id,
dp_pb.PartState.kInit,
dp_pb.PartState.kIdMap)
if partition_id is not None:
return False, self._create_map_task(rank_id, partition_id)
if self._all_job_part_mapped() and \
(self._portal_manifest.data_portal_type ==
dp_pb.DataPortalType.Streaming):
partition_id = self._try_to_alloc_part(
rank_id,
dp_pb.PartState.kIdMapped,
dp_pb.PartState.kEventTimeReduce
)
if partition_id is not None:
return False, self._create_reduce_task(rank_id,
partition_id)
return (not self._long_running and
self._all_job_part_finished()), None
return not self._long_running, None
def finish_task(self, rank_id, partition_id, part_state):
with self._lock:
processing_job = self._sync_processing_job()
if processing_job is None:
return
job_id = self._processing_job.job_id
job_part = self._sync_job_part(job_id, partition_id)
if job_part.rank_id == rank_id and \
job_part.part_state == part_state:
if job_part.part_state == dp_pb.PartState.kIdMap:
self._finish_job_part(job_id, partition_id,
dp_pb.PartState.kIdMap,
dp_pb.PartState.kIdMapped)
logging.info("Data portal worker-%d finish map task "\
"for partition %d of job %d",
rank_id, partition_id, job_id)
elif job_part.part_state == dp_pb.PartState.kEventTimeReduce:
self._finish_job_part(job_id, partition_id,
dp_pb.PartState.kEventTimeReduce,
dp_pb.PartState.kEventTimeReduced)
logging.info("Data portal worker-%d finish reduce task "\
"for partition %d of job %d",
rank_id, partition_id, job_id)
self._check_processing_job_finished()
def backgroup_task(self):
with self._lock:
if self._sync_processing_job() is not None:
self._check_processing_job_finished()
if self._sync_processing_job() is None and self._long_running:
self._launch_new_portal_job()
def _all_job_part_mapped(self):
processing_job = self._sync_processing_job()
assert processing_job is not None
job_id = processing_job.job_id
for partition_id in range(self._output_partition_num):
job_part = self._sync_job_part(job_id, partition_id)
if job_part.part_state <= dp_pb.PartState.kIdMap:
return False
return True
def _all_job_part_finished(self):
processing_job = self._sync_processing_job()
assert processing_job is not None
job_id = self._processing_job.job_id
for partition_id in range(self._output_partition_num):
job_part = self._sync_job_part(job_id, partition_id)
if not self._is_job_part_finished(job_part):
return False
return True
def _finish_job_part(self, job_id, partition_id, src_state, target_state):
job_part = self._sync_job_part(job_id, partition_id)
assert job_part is not None and job_part.part_state == src_state
new_job_part = dp_pb.PortalJobPart()
new_job_part.MergeFrom(job_part)
new_job_part.part_state = target_state
new_job_part.rank_id = -1
self._update_job_part(new_job_part)
def _create_map_task(self, rank_id, partition_id):
assert self._processing_job is not None
job = self._processing_job
map_fpaths = []
for fpath in job.fpaths:
if hash(fpath) % self._output_partition_num == partition_id:
map_fpaths.append(fpath)
task_name = '{}-dp_portal_job_{:08}-part-{:04}-map'.format(
self._portal_manifest.name, job.job_id, partition_id
)
logging.info("Data portal worker-%d is allocated map task %s for "\
"partition %d of job %d. the map task has %d files"\
"-----------------\n", rank_id, task_name,
partition_id, job.job_id, len(map_fpaths))
for seq, fpath in enumerate(map_fpaths):
logging.info("%d. %s", seq, fpath)
logging.info("---------------------------------\n")
manifset = self._sync_portal_manifest()
return dp_pb.MapTask(task_name=task_name,
fpaths=map_fpaths,
output_base_dir=self._map_output_dir(job.job_id),
output_partition_num=self._output_partition_num,
partition_id=partition_id,
part_field=self._get_part_field(),
data_portal_type=manifset.data_portal_type)
def _get_part_field(self):
portal_mainifest = self._sync_portal_manifest()
if portal_mainifest.data_portal_type == dp_pb.DataPortalType.PSI:
return 'raw_id'
assert portal_mainifest.data_portal_type == \
dp_pb.DataPortalType.Streaming
return 'example_id'
def _create_reduce_task(self, rank_id, partition_id):
assert self._processing_job is not None
job = self._processing_job
job_id = job.job_id
task_name = '{}-dp_portal_job_{:08}-part-{:04}-reduce'.format(
self._portal_manifest.name, job_id, partition_id
)
logging.info("Data portal worker-%d is allocated reduce task %s for "\
"partition %d of job %d. the reduce base dir %s"\
"-----------------\n", rank_id, task_name,
partition_id, job_id, self._reduce_output_dir(job_id))
return dp_pb.ReduceTask(task_name=task_name,
map_base_dir=self._map_output_dir(job_id),
reduce_base_dir=self._reduce_output_dir(job_id),
partition_id=partition_id)
def _try_to_alloc_part(self, rank_id, src_state, target_state):
alloc_partition_id = None
processing_job = self._sync_processing_job()
assert processing_job is not None
job_id = self._processing_job.job_id
for partition_id in range(self._output_partition_num):
part_job = self._sync_job_part(job_id, partition_id)
if part_job.part_state == src_state and \
alloc_partition_id is None:
alloc_partition_id = partition_id
if part_job.part_state == target_state and \
part_job.rank_id == rank_id:
alloc_partition_id = partition_id
break
if alloc_partition_id is None:
return None
part_job = self._job_part_map[alloc_partition_id]
if part_job.part_state == src_state:
new_job_part = dp_pb.PortalJobPart(job_id=job_id,
rank_id=rank_id,
partition_id=alloc_partition_id,
part_state=target_state)
self._update_job_part(new_job_part)
return alloc_partition_id
def _sync_portal_job(self, job_id):
kvstore_key = common.portal_job_kvstore_key(self._portal_name, job_id)
data = self._kvstore.get_data(kvstore_key)
if data is not None:
return text_format.Parse(data, dp_pb.DataPortalJob())
return None
def _sync_processing_job(self):
assert self._sync_portal_manifest() is not None
if self._portal_manifest.processing_job_id < 0:
self._processing_job = None
elif self._processing_job is None or \
(self._processing_job.job_id !=
self._portal_manifest.processing_job_id):
job_id = self._portal_manifest.processing_job_id
self._processing_job = self._sync_portal_job(job_id)
assert self._processing_job is not None
return self._processing_job
def _update_processing_job(self, job):
self._processing_job = None
kvstore_key = common.portal_job_kvstore_key(self._portal_name,
job.job_id)
self._kvstore.set_data(kvstore_key, text_format.MessageToString(job))
self._processing_job = job
def _sync_portal_manifest(self):
if self._portal_manifest is None:
kvstore_key = common.portal_kvstore_base_dir(self._portal_name)
data = self._kvstore.get_data(kvstore_key)
if data is not None:
self._portal_manifest = \
text_format.Parse(data, dp_pb.DataPortalManifest())
return self._portal_manifest
def _update_portal_manifest(self, new_portal_manifest):
self._portal_manifest = None
kvstore_key = common.portal_kvstore_base_dir(self._portal_name)
data = text_format.MessageToString(new_portal_manifest)
self._kvstore.set_data(kvstore_key, data)
self._portal_manifest = new_portal_manifest
def _launch_new_portal_job(self):
assert self._sync_processing_job() is None
rest_fpaths = self._list_input_dir()
if len(rest_fpaths) == 0:
logging.info("no file left for portal")
return
rest_fpaths.sort()
portal_mainifest = self._sync_portal_manifest()
new_job = dp_pb.DataPortalJob(job_id=portal_mainifest.next_job_id,
finished=False,
fpaths=rest_fpaths)
self._update_processing_job(new_job)
new_portal_manifest = dp_pb.DataPortalManifest()
new_portal_manifest.MergeFrom(portal_mainifest)
new_portal_manifest.next_job_id += 1
new_portal_manifest.processing_job_id = new_job.job_id
self._update_portal_manifest(new_portal_manifest)
for partition_id in range(self._output_partition_num):
self._sync_job_part(new_job.job_id, partition_id)
logging.info("Data Portal job %d has lanuched. %d files will be"\
"processed\n------------\n",
new_job.job_id, len(new_job.fpaths))
for seq, fpath in enumerate(new_job.fpaths):
logging.info("%d. %s", seq, fpath)
logging.info("---------------------------------\n")
def _list_input_dir(self):
all_inputs = []
wildcard = self._portal_manifest.input_file_wildcard
dirs = [self._portal_manifest.input_base_dir]
num_dirs = 0
num_files = 0
num_target_files = 0
while len(dirs) > 0:
fdir = dirs[0]
dirs = dirs[1:]
# filter directories start with '_'(e.g. _tmp)
# TODO: format the inputs' directory name
if fdir.startswith('_'):
continue
fnames = gfile.ListDirectory(fdir)
for fname in fnames:
fpath = path.join(fdir, fname)
# OSS does not retain folder structure.
# For example, if we have file oss://test/1001/a.txt
# list(oss://test) returns 1001/a.txt instead of 1001
basename = path.basename(fpath)
# filter directories start with '_'(e.g. _tmp/_SUCCESS)
# TODO: format the inputs' directory name
if basename.startswith('_'):
continue
if gfile.IsDirectory(fpath):
dirs.append(fpath)
num_dirs += 1
continue
num_files += 1
if len(wildcard) == 0 or fnmatch(basename, wildcard):
num_target_files += 1
if self._check_success_tag:
has_succ = gfile.Exists(
path.join(path.dirname(fpath), '_SUCCESS'))
if not has_succ:
logging.warning(
'File %s skipped because _SUCCESS file is '
'missing under %s',
fpath, fdir)
continue
all_inputs.append(fpath)
rest_fpaths = []
for fpath in all_inputs:
if fpath not in self._processed_fpath:
rest_fpaths.append(fpath)
logging.info(
'Listing %s: found %d dirs, %d files, %d files matching wildcard, '
'%d files with success tag, %d new files to process',
self._portal_manifest.input_base_dir, num_dirs, num_files,
num_target_files, len(all_inputs), len(rest_fpaths))
return rest_fpaths
def _sync_job_part(self, job_id, partition_id):
if partition_id not in self._job_part_map or \
self._job_part_map[partition_id] is None or \
self._job_part_map[partition_id].job_id != job_id:
kvstore_key = common.portal_job_part_kvstore_key(self._portal_name,
job_id, partition_id)
data = self._kvstore.get_data(kvstore_key)
if data is None:
self._job_part_map[partition_id] = dp_pb.PortalJobPart(
job_id=job_id, rank_id=-1,
partition_id=partition_id
)
else:
self._job_part_map[partition_id] = \
text_format.Parse(data, dp_pb.PortalJobPart())
return self._job_part_map[partition_id]
def _update_job_part(self, job_part):
partition_id = job_part.partition_id
if partition_id not in self._job_part_map or \
self._job_part_map[partition_id] != job_part:
self._job_part_map[partition_id] = None
kvstore_key = common.portal_job_part_kvstore_key(self._portal_name,
job_part.job_id,
partition_id)
data = text_format.MessageToString(job_part)
self._kvstore.set_data(kvstore_key, data)
self._job_part_map[partition_id] = job_part
def _check_processing_job_finished(self):
if not self._all_job_part_finished():
return False
processing_job = self._sync_processing_job()
if not processing_job.finished:
finished_job = dp_pb.DataPortalJob()
finished_job.MergeFrom(self._processing_job)
finished_job.finished = True
self._update_processing_job(finished_job)
for fpath in processing_job.fpaths:
self._processed_fpath.add(fpath)
self._processing_job = None
self._job_part_map = {}
portal_mainifest = self._sync_portal_manifest()
if portal_mainifest.processing_job_id >= 0:
self._publish_raw_data(portal_mainifest.processing_job_id)
new_portal_manifest = dp_pb.DataPortalManifest()
new_portal_manifest.MergeFrom(self._sync_portal_manifest())
new_portal_manifest.processing_job_id = -1
self._update_portal_manifest(new_portal_manifest)
if processing_job is not None:
logging.info("Data Portal job %d has finished. Processed %d "\
"following fpaths\n------------\n",
processing_job.job_id, len(processing_job.fpaths))
for seq, fpath in enumerate(processing_job.fpaths):
logging.info("%d. %s", seq, fpath)
logging.info("---------------------------------\n")
return True
@property
def _output_partition_num(self):
return self._portal_manifest.output_partition_num
def _is_job_part_finished(self, job_part):
assert self._portal_manifest is not None
if self._portal_manifest.data_portal_type == dp_pb.DataPortalType.PSI:
return job_part.part_state == dp_pb.PartState.kIdMapped
return job_part.part_state == dp_pb.PartState.kEventTimeReduced
def _map_output_dir(self, job_id):
return common.portal_map_output_dir(
self._portal_manifest.output_base_dir, job_id
)
def _reduce_output_dir(self, job_id):
return common.portal_reduce_output_dir(
self._portal_manifest.output_base_dir, job_id
)
def _publish_raw_data(self, job_id):
portal_manifest = self._sync_portal_manifest()
output_dir = None
if portal_manifest.data_portal_type == dp_pb.DataPortalType.PSI:
output_dir = common.portal_map_output_dir(
portal_manifest.output_base_dir, job_id
)
else:
output_dir = common.portal_reduce_output_dir(
portal_manifest.output_base_dir, job_id
)
for partition_id in range(self._output_partition_num):
dpath = path.join(output_dir, common.partition_repr(partition_id))
fnames = []
if gfile.Exists(dpath) and gfile.IsDirectory(dpath):
fnames = [f for f in gfile.ListDirectory(dpath)
if f.endswith(common.RawDataFileSuffix)]
publish_fpaths = []
if portal_manifest.data_portal_type == dp_pb.DataPortalType.PSI:
publish_fpaths = self._publish_psi_raw_data(partition_id,
dpath, fnames)
else:
publish_fpaths = self._publish_streaming_raw_data(partition_id,
dpath, fnames)
logging.info("Data Portal Master publish %d file for partition "\
"%d of streaming job %d\n----------\n",
len(publish_fpaths), partition_id, job_id)
for seq, fpath in enumerate(publish_fpaths):
logging.info("%d. %s", seq, fpath)
logging.info("------------------------------------------\n")
def _publish_streaming_raw_data(self, partition_id, dpath, fnames):
metas = [MergedSortRunMeta.decode_sort_run_meta_from_fname(fname)
for fname in fnames]
metas.sort()
fpaths = [path.join(dpath, meta.encode_merged_sort_run_fname())
for meta in metas]
self._publisher.publish_raw_data(partition_id, fpaths)
return fpaths
def _publish_psi_raw_data(self, partition_id, dpath, fnames):
fpaths = [path.join(dpath, fname) for fname in fnames]
self._publisher.publish_raw_data(partition_id, fpaths)
self._publisher.finish_raw_data(partition_id)
return fpaths
| {"/test/data_join/test_data_portal_master.py": ["/fedlearner/data_join/data_portal_master.py"], "/fedlearner/trainer_master/leader_tm.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/disabled_train_master.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/data/data_block_set.py"], "/fedlearner/data_join/cmd/data_portal_master_service.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_master.py"], "/fedlearner/data_join/data_portal_master.py": ["/fedlearner/data_join/data_portal_job_manager.py"], "/fedlearner/data_join/cmd/data_portal_worker_cli.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_worker.py"], "/fedlearner/trainer/trainer_worker.py": ["/fedlearner/trainer/bridge.py", "/fedlearner/trainer/estimator.py"], "/fedlearner/trainer_master/follower_tm.py": ["/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/test_nn_online_training.py": ["/fedlearner/trainer_master/leader_tm.py", "/fedlearner/trainer_master/follower_tm.py"], "/test/data_join/test_data_portal_worker.py": ["/fedlearner/data_join/data_portal_worker.py"]} |
76,994 | piiswrong/fedlearner | refs/heads/master | /test/data_join/test_data_portal_worker.py | # Copyright 2020 The FedLearner Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
import os
import random
import unittest
import logging
import tensorflow_io
from tensorflow.compat.v1 import gfile
import tensorflow.compat.v1 as tf
tf.enable_eager_execution()
from cityhash import CityHash32
from fedlearner.common import data_join_service_pb2 as dj_pb
from fedlearner.common import data_portal_service_pb2 as dp_pb
from fedlearner.data_join.data_portal_worker import DataPortalWorker
from fedlearner.data_join.raw_data_iter_impl.tf_record_iter import TfExampleItem
from fedlearner.data_join import common
class TestDataPortalWorker(unittest.TestCase):
def _get_input_fpath(self, partition_id):
return "{}/raw_data_partition_{}".format(self._input_dir, partition_id)
def _generate_one_partition(self, partition_id, example_id, num_examples):
fpath = self._get_input_fpath(partition_id)
with tf.io.TFRecordWriter(fpath) as writer:
for i in range(num_examples):
example_id += random.randint(1, 5)
# real_id = example_id.encode("utf-8")
event_time = 150000000 + random.randint(10000000, 20000000)
feat = {}
label = random.choice([1, 0])
if random.random() < 0.8:
feat['label'] = tf.train.Feature(
int64_list=tf.train.Int64List(value=[label]))
feat['example_id'] = tf.train.Feature(
bytes_list=tf.train.BytesList(value=[str(example_id).encode('utf-8')]))
feat['raw_id'] = tf.train.Feature(
bytes_list=tf.train.BytesList(value=[str(example_id).encode('utf-8')]))
feat['event_time'] = tf.train.Feature(
int64_list=tf.train.Int64List(value=[event_time]))
example = tf.train.Example(features=tf.train.Features(feature=feat))
writer.write(example.SerializeToString())
return example_id
def _generate_input_data(self):
self._partition_item_num = 1 << 16
self._clean_up()
gfile.MakeDirs(self._input_dir)
success_flag_fpath = "{}/_SUCCESS".format(self._input_dir)
example_id = 1000001
for partition_id in range(self._input_partition_num):
example_id = self._generate_one_partition(partition_id, example_id, self._partition_item_num)
with gfile.GFile(success_flag_fpath, 'w') as fh:
fh.write('')
def _make_portal_worker(self):
portal_worker_options = dp_pb.DataPortalWorkerOptions(
raw_data_options=dj_pb.RawDataOptions(
raw_data_iter="TF_RECORD",
read_ahead_size=1<<20,
read_batch_size=128,
optional_fields=['label']
),
writer_options=dj_pb.WriterOptions(
output_writer="TF_RECORD"
),
batch_processor_options=dj_pb.BatchProcessorOptions(
batch_size=128,
max_flying_item=300000
),
merger_read_ahead_size=1000000,
merger_read_batch_size=128
)
os.environ['ETCD_BASE_DIR'] = "portal_worker_0"
self._portal_worker = DataPortalWorker(portal_worker_options,
"localhost:5005", 0,
"etcd", True)
def _clean_up(self):
if gfile.Exists(self._input_dir):
gfile.DeleteRecursively(self._input_dir)
if gfile.Exists(self._partition_output_dir):
gfile.DeleteRecursively(self._partition_output_dir)
if gfile.Exists(self._merge_output_dir):
gfile.DeleteRecursively(self._merge_output_dir)
def _prepare_test(self):
self._input_dir = './portal_worker_input'
self._partition_output_dir = './portal_worker_partition_output'
self._merge_output_dir = './portal_worker_merge_output'
self._input_partition_num = 4
self._output_partition_num = 2
self._generate_input_data()
self._make_portal_worker()
def _check_partitioner(self, map_task):
output_partitions = gfile.ListDirectory(map_task.output_base_dir)
output_partitions = [x for x in output_partitions if "SUCCESS" not in x]
self.assertEqual(len(output_partitions), map_task.output_partition_num)
partition_dirs = ["{}/{}".format(map_task.output_base_dir, x) \
for x in output_partitions]
total_cnt = 0
for partition in output_partitions:
dpath = "{}/{}".format(map_task.output_base_dir, partition)
partition_id = partition.split("_")[-1]
partition_id = int(partition_id)
segments = gfile.ListDirectory(dpath)
for segment in segments:
fpath = "{}/{}".format(dpath, segment)
event_time = 0
for record in tf.python_io.tf_record_iterator(fpath):
tf_item = TfExampleItem(record)
self.assertTrue(tf_item.event_time >= event_time, "{}, {}".format(tf_item.event_time, event_time))
event_time = tf_item.event_time ## assert order
self.assertEqual(partition_id, CityHash32(tf_item.raw_id) \
% map_task.output_partition_num)
total_cnt += 1
self.assertEqual(total_cnt, self._partition_item_num * self._input_partition_num)
def _check_merge(self, reduce_task):
dpath = os.path.join(self._merge_output_dir, \
common.partition_repr(reduce_task.partition_id))
fpaths = gfile.ListDirectory(dpath)
fpaths = sorted(fpaths, key = lambda fpath: fpath, reverse = False)
event_time = 0
total_cnt = 0
for fpath in fpaths:
fpath = os.path.join(dpath, fpath)
logging.info("check merge path:{}".format(fpath))
for record in tf.python_io.tf_record_iterator(fpath):
tf_item = TfExampleItem(record)
self.assertTrue(tf_item.event_time >= event_time)
event_time = tf_item.event_time
total_cnt += 1
return total_cnt
def test_portal_worker(self):
self._prepare_test()
map_task = dp_pb.MapTask()
map_task.output_base_dir = self._partition_output_dir
map_task.output_partition_num = self._output_partition_num
map_task.partition_id = 0
map_task.task_name = 'map_part_{}'.format(map_task.partition_id)
map_task.part_field = 'example_id'
map_task.data_portal_type = dp_pb.DataPortalType.Streaming
for partition_id in range(self._input_partition_num):
map_task.fpaths.append(self._get_input_fpath(partition_id))
# partitioner
task = dp_pb.NewTaskResponse()
task.map_task.CopyFrom(map_task)
self._portal_worker._run_map_task(task.map_task)
self._check_partitioner(task.map_task)
# merge
total_cnt = 0
for partition_id in range(self._output_partition_num):
reduce_task = dp_pb.ReduceTask()
reduce_task.map_base_dir = self._partition_output_dir
reduce_task.reduce_base_dir = self._merge_output_dir
reduce_task.partition_id = partition_id
reduce_task.task_name = 'reduce_part_{}'.format(partition_id)
self._portal_worker._run_reduce_task(reduce_task)
total_cnt += self._check_merge(reduce_task)
self.assertEqual(total_cnt, self._partition_item_num * self._input_partition_num)
self._clean_up()
if __name__ == '__main__':
logging.getLogger().setLevel(logging.INFO)
logging.basicConfig(format="%(asctime)s %(filename)s "\
"%(lineno)s %(levelname)s - %(message)s")
unittest.main()
| {"/test/data_join/test_data_portal_master.py": ["/fedlearner/data_join/data_portal_master.py"], "/fedlearner/trainer_master/leader_tm.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/disabled_train_master.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/data/data_block_set.py"], "/fedlearner/data_join/cmd/data_portal_master_service.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_master.py"], "/fedlearner/data_join/data_portal_master.py": ["/fedlearner/data_join/data_portal_job_manager.py"], "/fedlearner/data_join/cmd/data_portal_worker_cli.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_worker.py"], "/fedlearner/trainer/trainer_worker.py": ["/fedlearner/trainer/bridge.py", "/fedlearner/trainer/estimator.py"], "/fedlearner/trainer_master/follower_tm.py": ["/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/test_nn_online_training.py": ["/fedlearner/trainer_master/leader_tm.py", "/fedlearner/trainer_master/follower_tm.py"], "/test/data_join/test_data_portal_worker.py": ["/fedlearner/data_join/data_portal_worker.py"]} |
76,995 | piiswrong/fedlearner | refs/heads/master | /web_console_v2/api/test/auth_test.py | # Copyright 2020 The FedLearner Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
import json
import unittest
from http import HTTPStatus
from testing.common import BaseTestCase
class AuthApiTest(BaseTestCase):
def test_auth(self):
self.signout_helper()
resp = self.get_helper('/api/v2/auth/users')
self.assertEqual(resp.status_code, HTTPStatus.UNAUTHORIZED)
resp = self.client.post(
'/api/v2/auth/signin',
data=json.dumps({
'username': 'ada',
'password': 'wrongpassword'
}),
content_type='application/json')
self.assertEqual(resp.status_code, HTTPStatus.UNAUTHORIZED)
self.signin_helper()
resp = self.get_helper('/api/v2/auth/users')
self.assertEqual(resp.status_code, HTTPStatus.OK)
self.assertEqual(len(resp.json.get('data')), 1)
self.assertEqual(resp.json.get('data')[0]['username'], 'ada')
resp = self.post_helper(
'/api/v2/auth/users',
data={
'username': 'ada',
'password': 'ada'
})
self.assertEqual(resp.status_code, HTTPStatus.CONFLICT)
resp = self.post_helper(
'/api/v2/auth/users',
data={
'username': 'ada1',
'password': 'ada1'
})
self.assertEqual(resp.status_code, HTTPStatus.CREATED)
self.signin_helper('ada1', 'ada1')
resp = self.get_helper('/api/v2/auth/users')
self.assertEqual(resp.status_code, HTTPStatus.OK)
self.assertEqual(len(resp.json.get('data')), 2)
self.assertEqual(resp.json.get('data')[1]['username'], 'ada1')
user_id = resp.json.get('data')[1]['id']
resp = self.put_helper(
'/api/v2/auth/users/10',
data={})
self.assertEqual(resp.status_code, HTTPStatus.NOT_FOUND)
resp = self.put_helper(
'/api/v2/auth/users/%d'%user_id,
data={
'wrongfield': 'ada1',
})
self.assertEqual(resp.status_code, HTTPStatus.BAD_REQUEST)
resp = self.put_helper(
'/api/v2/auth/users/%d'%user_id,
data={
'old_password': 'ada1',
'new_password': 'ada2',
})
self.assertEqual(resp.status_code, HTTPStatus.OK)
self.signin_helper('ada1', 'ada2')
self.delete_helper('/api/v2/auth/users/%d'%user_id)
resp = self.get_helper('/api/v2/auth/users')
self.assertEqual(resp.status_code, HTTPStatus.OK)
self.assertEqual(len(resp.json.get('data')), 1)
if __name__ == '__main__':
unittest.main()
| {"/test/data_join/test_data_portal_master.py": ["/fedlearner/data_join/data_portal_master.py"], "/fedlearner/trainer_master/leader_tm.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/disabled_train_master.py": ["/fedlearner/trainer_master/data/data_block_queue.py", "/fedlearner/trainer_master/data/data_block_set.py"], "/fedlearner/data_join/cmd/data_portal_master_service.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_master.py"], "/fedlearner/data_join/data_portal_master.py": ["/fedlearner/data_join/data_portal_job_manager.py"], "/fedlearner/data_join/cmd/data_portal_worker_cli.py": ["/fedlearner/common/common.py", "/fedlearner/data_join/data_portal_worker.py"], "/fedlearner/trainer/trainer_worker.py": ["/fedlearner/trainer/bridge.py", "/fedlearner/trainer/estimator.py"], "/fedlearner/trainer_master/follower_tm.py": ["/fedlearner/trainer_master/trainer_master_service.py"], "/test/trainer/test_nn_online_training.py": ["/fedlearner/trainer_master/leader_tm.py", "/fedlearner/trainer_master/follower_tm.py"], "/test/data_join/test_data_portal_worker.py": ["/fedlearner/data_join/data_portal_worker.py"]} |
77,022 | sublee/korean | refs/heads/master | /korean/morphology/substantive.py | # -*- coding: utf-8 -*-
"""
korean.morphology.substantive
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2012-2013 by Heungsub Lee
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import, unicode_literals
from collections import deque
import itertools
import re
from .morpheme import Morpheme
from ..hangul import is_hangul
__all__ = ['Substantive', 'Noun', 'NumberWord', 'Loanword']
class Substantive(Morpheme):
"""A class for Korean substantive that is called "체언" in Korean."""
def __format__(self, spec):
""":class:`Substantive`'s custom formatter appends the correct particle
after the substantive string using particle format spec such as
``{0:은}`` or ``{1:로}``:
>>> format(Noun('엄마'), '을')
'엄마를'
>>> '{0:은} {1:로}'.format(Noun('아들'), Noun('마을'))
'아들은 마을로'
>>> '{0:은} {1:로}'.format(Noun('아들'), Noun('산'))
'아들은 산으로'
"""
from .particle import Particle
from . import merge
separated_spec = spec.split(':')
if separated_spec[0] and is_hangul(separated_spec[0][0]):
text = merge(self, Particle(separated_spec.pop(0)))
else:
text = unicode(self)
try:
spec = separated_spec[0]
except IndexError:
spec = ''
return format(text, spec)
class Noun(Substantive):
"""A class for Korean noun that is called "명사" in Korean."""
READING_PATTERN = re.compile(r'(?P<other>[^0-9]+)?(?P<number>[0-9]+)?')
def read(self):
"""Reads a noun as Korean. The result will be Hangul.
>>> Noun('레벨42').read()
'레벨사십이'
"""
rv = []
for match in self.READING_PATTERN.finditer(unicode(self)):
if match.group('other'):
rv.append(match.group('other'))
if match.group('number'):
rv.append(NumberWord(int(match.group('number'))).read())
return ''.join(rv)
class NumberWord(Substantive):
"""A class for Korean number word that is called "수사" in Korean."""
__numbers__ = {}
__digits__ = {}
__unary_operations__ = {}
def __init__(self, number):
self.number = number
def read(self):
"""Reads number as Korean.
>>> NumberWord(1234567890).read()
'십이억삼천사백오십육만칠천팔백구십'
>>> NumberWord.read(10000)
'만'
>>> NumberWord.read(0)
'영'
"""
return ''.join(type(self).read_phases(self.number))
@classmethod
def read_phases(cls, number):
"""Reads number as Korean but seperates the result at each 10k.
>>> NumberWord.read_phases(1234567890)
('십이억', '삼천사백오십육만', '칠천팔백구십')
>>> NumberWord.read_phases(10000)
('만', '')
>>> NumberWord.read_phases(0)
('영',)
"""
phase = deque()
chunks = deque()
negative = number < 0
number = abs(number)
for digit in itertools.count():
unit = number % 10
number //= 10
if digit >= 4 and digit % 4 == 0:
# 만, 억, 조, ...
phase.appendleft(cls.__digits__[digit])
if unit:
if digit % 4 != 0:
# 십, 백, 척
phase.appendleft(cls.__digits__[digit % 4])
if unit != 1 or digit % 4 == 0:
# 일, 이, 삼, ...
phase.appendleft(cls.__numbers__[unit])
if not number or digit % 4 == 3:
if not number or digit < 4 or len(phase) > 1:
chunks.appendleft(''.join(phase))
else:
chunks.appendleft('')
phase.clear()
if not number:
break
# 일만, 일억 -> 만, 억
one = cls.__numbers__[1]
for place in cls.__digits__.values():
if chunks[0].startswith(one + place):
chunks[0] = chunks[0][len(one):]
break
if negative:
chunks.appendleft(cls.__unary_operations__['-'])
return tuple(chunks)
def basic(self):
return unicode(self.number)
def __format__(self, spec):
if ':' in spec:
number_spec, spec = spec.split(':', 1)
formatted_number = format(self.number, number_spec)
else:
formatted_number = None
try:
rv = super(NumberWord, self).__format__(spec)
except ValueError:
return format(self.number, spec)
if formatted_number is not None:
rv = formatted_number + rv[len(str(self.number)):]
return rv
class Loanword(Substantive):
"""A class for loanword that is called "외래어" in Korean. This depends
on `Hangulize <http://packages.python.org/hangulize>`_ which automatically
transcribes a non-Korean word into Hangul.
.. versionadded:: 0.1.4
"""
def _import_hangulize(self):
try:
return self._hangulize
except AttributeError:
pass
try:
import hangulize
except ImportError:
raise ImportError('%s needs hangulize>=0.0.5' %
type(self).__name__)
self._hangulize = hangulize
return hangulize
def __init__(self, word, code=None, iso639=None, lang=None):
hangulize = self._import_hangulize()
self.lang = lang or hangulize.get_lang(code, iso639)
super(Loanword, self).__init__(word)
def read(self):
"""Transcribes into Hangul using `Hangulize
<http://packages.python.org/hangulize>`_.
>>> Loanword('Guido van Rossum', 'nld').read()
'히도 판로쉼'
>>> Loanword('საქართველო', 'kat').read()
'사카르트벨로'
>>> Loanword('Leonardo da Vinci', 'ita').read()
'레오나르도 다 빈치'
"""
hangulize = self._import_hangulize()
return hangulize.hangulize(self.basic(), lang=self.lang)
| {"/korean/morphology/substantive.py": ["/korean/morphology/morpheme.py", "/korean/hangul.py", "/korean/morphology/particle.py", "/korean/morphology/__init__.py"], "/korean/morphology/__init__.py": ["/korean/__init__.py", "/korean/morphology/morpheme.py", "/korean/morphology/particle.py", "/korean/morphology/substantive.py"], "/korean/morphology/particle.py": ["/korean/morphology/__init__.py", "/korean/morphology/morpheme.py", "/korean/morphology/substantive.py", "/korean/__init__.py"], "/koreantests.py": ["/korean/__init__.py", "/korean/ext/gettext.py"], "/korean/ext/jinja2.py": ["/korean/__init__.py"], "/korean/morphology/morpheme.py": ["/korean/hangul.py", "/korean/morphology/__init__.py"], "/korean/ext/gettext.py": ["/korean/l10n/__init__.py"], "/korean/l10n/__init__.py": ["/korean/morphology/__init__.py", "/korean/ext/gettext.py"], "/korean/ext/django/templatetags/korean.py": ["/korean/__init__.py"], "/korean/__init__.py": ["/korean/morphology/__init__.py"], "/korean/l10n/jinja2ext.py": ["/korean/ext/jinja2.py"]} |
77,023 | sublee/korean | refs/heads/master | /setup.py | # -*- coding: utf-8 -*-
"""
Korean -- A library for Korean morphology
=========================================
Sometimes you should localize your project to Korean. But common i18n solutions
such as gettext are not working with non Indo-European language well. Korean
also has many morphological difference. "korean" a Python module provides
useful Korean morphological functions.
Do not use "을(를)" anymore
```````````````````````````
::
>>> from korean import Noun
>>> fmt = u'{subj:은} {obj:을} 먹었다.'
>>> print fmt.format(subj=Noun(u'나'), obj=Noun(u'밥'))
나는 밥을 먹었다.
>>> print fmt.format(subj=Noun(u'학생'), obj=Noun(u'돈까스'))
학생은 돈까스를 먹었다.
Links
`````
* `GitHub repository <http://github.com/sublee/korean>`_
* `development version
<http://github.com/sublee/korean/zipball/master#egg=korean-dev>`_
"""
from __future__ import with_statement
import re
from setuptools import find_packages, setup
from setuptools.command.test import test
import sys
# detect the current version
with open('korean/__init__.py') as f:
version = re.search(r'__version__\s*=\s*\'(.+?)\'', f.read()).group(1)
assert version
# use pytest instead
def run_tests(self):
pyc = re.compile(r'\.pyc|\$py\.class')
test_file = pyc.sub('.py', __import__(self.test_suite).__file__)
raise SystemExit(__import__('pytest').main([test_file]))
test.run_tests = run_tests
tests_require = ['pytest', 'jinja2']
if sys.version_info < (3,):
tests_require.extend(['hangulize', 'django'])
setup(
name='korean',
version=version,
license='BSD',
author='Heungsub Lee',
author_email=re.sub('((sub).)(.*)', r'\2@\1.\3', 'sublee'),
url='http://pythonhosted.org/korean',
description='A library for Korean morphology',
long_description=__doc__,
platforms='any',
packages=find_packages(),
include_package_data=True,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: BSD License',
'Natural Language :: Korean',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.1',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Software Development :: Localization',
'Topic :: Text Processing :: Linguistic',
],
install_requires=['setuptools', 'six'],
test_suite='koreantests',
tests_require=tests_require,
use_2to3=(sys.version_info >= (3,)),
)
| {"/korean/morphology/substantive.py": ["/korean/morphology/morpheme.py", "/korean/hangul.py", "/korean/morphology/particle.py", "/korean/morphology/__init__.py"], "/korean/morphology/__init__.py": ["/korean/__init__.py", "/korean/morphology/morpheme.py", "/korean/morphology/particle.py", "/korean/morphology/substantive.py"], "/korean/morphology/particle.py": ["/korean/morphology/__init__.py", "/korean/morphology/morpheme.py", "/korean/morphology/substantive.py", "/korean/__init__.py"], "/koreantests.py": ["/korean/__init__.py", "/korean/ext/gettext.py"], "/korean/ext/jinja2.py": ["/korean/__init__.py"], "/korean/morphology/morpheme.py": ["/korean/hangul.py", "/korean/morphology/__init__.py"], "/korean/ext/gettext.py": ["/korean/l10n/__init__.py"], "/korean/l10n/__init__.py": ["/korean/morphology/__init__.py", "/korean/ext/gettext.py"], "/korean/ext/django/templatetags/korean.py": ["/korean/__init__.py"], "/korean/__init__.py": ["/korean/morphology/__init__.py"], "/korean/l10n/jinja2ext.py": ["/korean/ext/jinja2.py"]} |
77,024 | sublee/korean | refs/heads/master | /korean/ext/django/__init__.py | # -*- coding: utf-8 -*-
"""
korean.ext.django
~~~~~~~~~~~~~~~~~
A Django app offering templatetags and filters for korean.
.. versionadded:: 0.1.7
.. versionchanged:: 0.1.9
.. _Django: https://www.djangoproject.com/
:copyright: (c) 2012-2013 by Heungsub Lee
:license: BSD, see LICENSE for more details.
"""
default_app_config = 'korean.ext.django.apps.KoreanConfig'
| {"/korean/morphology/substantive.py": ["/korean/morphology/morpheme.py", "/korean/hangul.py", "/korean/morphology/particle.py", "/korean/morphology/__init__.py"], "/korean/morphology/__init__.py": ["/korean/__init__.py", "/korean/morphology/morpheme.py", "/korean/morphology/particle.py", "/korean/morphology/substantive.py"], "/korean/morphology/particle.py": ["/korean/morphology/__init__.py", "/korean/morphology/morpheme.py", "/korean/morphology/substantive.py", "/korean/__init__.py"], "/koreantests.py": ["/korean/__init__.py", "/korean/ext/gettext.py"], "/korean/ext/jinja2.py": ["/korean/__init__.py"], "/korean/morphology/morpheme.py": ["/korean/hangul.py", "/korean/morphology/__init__.py"], "/korean/ext/gettext.py": ["/korean/l10n/__init__.py"], "/korean/l10n/__init__.py": ["/korean/morphology/__init__.py", "/korean/ext/gettext.py"], "/korean/ext/django/templatetags/korean.py": ["/korean/__init__.py"], "/korean/__init__.py": ["/korean/morphology/__init__.py"], "/korean/l10n/jinja2ext.py": ["/korean/ext/jinja2.py"]} |
77,025 | sublee/korean | refs/heads/master | /korean/morphology/__init__.py | # -*- coding: utf-8 -*-
"""
korean.morphology
~~~~~~~~~~~~~~~~~
:copyright: (c) 2012-2013 by Heungsub Lee
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import, unicode_literals
import sys
import types
from .. import hangul
__all__ = ['Morphology', 'Morpheme', 'Particle', 'Substantive', 'Noun',
'NumberWord', 'Loanword', 'pick_allomorph', 'merge',
'define_allomorph_picker']
class Morphology(object):
_registry = {}
@classmethod
def _register_morpheme(cls, morpheme_cls):
for attr in dir(morpheme_cls):
if not attr.startswith('$'):
continue
for keyword, func in getattr(morpheme_cls, attr):
keyword = (morpheme_cls,) + keyword
if keyword in cls._registry:
raise ValueError('Already defined rule')
try:
cls._registry[attr][keyword] = func
except KeyError:
cls._registry[attr] = {keyword: func}
@classmethod
def _make_decorator(cls, tmp_attr, keyword):
assert tmp_attr.startswith('$')
frm = sys._getframe(2)
def decorator(func):
rule = (keyword, func)
try:
frm.f_locals[tmp_attr].append(rule)
except KeyError:
frm.f_locals[tmp_attr] = [rule]
return func
return decorator
@classmethod
def define_allomorph_picker(cls, prefix_of=None, suffix_of=None):
if not (prefix_of or suffix_of):
raise TypeError('prefix_of or suffix_of should be defined')
elif bool(prefix_of) == bool(suffix_of):
raise TypeError('Cannot specify prefix_of and suffix_of both')
keyword = (prefix_of, suffix_of)
return cls._make_decorator('$allomorph_pickers', keyword)
@classmethod
def pick_allomorph(cls, morpheme, prefix_of=None, suffix_of=None):
prefix_type = prefix_of and type(prefix_of)
suffix_type = suffix_of and type(suffix_of)
keyword = (type(morpheme), prefix_type, suffix_type)
func = cls._registry['$allomorph_pickers'][keyword]
bound_func = types.MethodType(func, morpheme)
return bound_func(prefix_of or suffix_of)
@classmethod
def merge(cls, prefix, suffix):
try:
prefix = cls.pick_allomorph(prefix, prefix_of=suffix)
except KeyError:
pass
try:
suffix = cls.pick_allomorph(suffix, suffix_of=prefix)
except KeyError:
pass
if hangul.is_final(suffix[0]):
prefix = prefix.read()
splitted = hangul.split_char(prefix[-1])
assert not splitted[2]
mid = hangul.join_char((splitted[0], splitted[1], suffix[0]))
return '{0}{1}{2}'.format(prefix[:-1], mid, suffix[1:])
else:
return '{0}{1}'.format(prefix, suffix)
pick_allomorph = Morphology.pick_allomorph
define_allomorph_picker = Morphology.define_allomorph_picker
merge = Morphology.merge
#: Imports submodules on the end. Because they might need :class:`Morphology`.
from .morpheme import Morpheme
from .particle import Particle
from .substantive import Substantive, Noun, NumberWord, Loanword
| {"/korean/morphology/substantive.py": ["/korean/morphology/morpheme.py", "/korean/hangul.py", "/korean/morphology/particle.py", "/korean/morphology/__init__.py"], "/korean/morphology/__init__.py": ["/korean/__init__.py", "/korean/morphology/morpheme.py", "/korean/morphology/particle.py", "/korean/morphology/substantive.py"], "/korean/morphology/particle.py": ["/korean/morphology/__init__.py", "/korean/morphology/morpheme.py", "/korean/morphology/substantive.py", "/korean/__init__.py"], "/koreantests.py": ["/korean/__init__.py", "/korean/ext/gettext.py"], "/korean/ext/jinja2.py": ["/korean/__init__.py"], "/korean/morphology/morpheme.py": ["/korean/hangul.py", "/korean/morphology/__init__.py"], "/korean/ext/gettext.py": ["/korean/l10n/__init__.py"], "/korean/l10n/__init__.py": ["/korean/morphology/__init__.py", "/korean/ext/gettext.py"], "/korean/ext/django/templatetags/korean.py": ["/korean/__init__.py"], "/korean/__init__.py": ["/korean/morphology/__init__.py"], "/korean/l10n/jinja2ext.py": ["/korean/ext/jinja2.py"]} |
77,026 | sublee/korean | refs/heads/master | /korean/morphology/particle.py | # -*- coding: utf-8 -*-
"""
korean.morphology.particle
~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2012-2013 by Heungsub Lee
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import, unicode_literals
import unicodedata
from . import define_allomorph_picker
from .morpheme import Morpheme
from .substantive import Noun, NumberWord, Loanword
from .. import hangul
__all__ = ['Particle']
class Particle(Morpheme):
"""Particle (조사) is a postposition in Korean. Some particles have
different allomorphs such as 을/를, 이/가. These forms follow forward
syllable ends what phoneme; a vowel, a consonant, or a Rieul (ㄹ).
"""
def __init__(self, after_vowel, after_consonant=None, after_rieul=None):
if after_rieul:
forms = (after_vowel, after_consonant, after_rieul)
elif after_consonant:
forms = (after_vowel, after_consonant)
else:
forms = (after_vowel,)
super(Particle, self).__init__(*forms)
@classmethod
def get(cls, key):
try:
return super(Particle, cls).get(key)
except KeyError:
return cls.guess(key)
@classmethod
def guess(cls, key):
length_of_first = lambda x: len(x[0])
for other_key, particle in sorted(cls._registry.items(),
key=length_of_first):
if key.startswith(other_key):
suffix = key[len(other_key):]
return cls(*(form + suffix for form in particle.forms))
raise KeyError('There is no guessable particle')
@property
def after_vowel(self):
return self.basic()
@property
def after_consonant(self):
try:
return self.forms[1]
except IndexError:
return self.basic()
@property
def after_rieul(self):
try:
return self.forms[2]
except IndexError:
return self.basic()
def naive(self):
rv = []
seen = set()
unique_forms = [form for form in self.forms
if form not in seen and seen.add(form) is None]
for forms in zip(unique_forms[:-1], unique_forms[1:]):
length = map(len, forms)
if len(set(length)) == 1:
# such as "를(을)", "을(를)", "(를)을", "(을)를"
rv.append('{0}({1})'.format(*forms))
rv.append('{1}({0})'.format(*forms))
rv.append('({0}){1}'.format(*forms))
rv.append('({1}){0}'.format(*forms))
else:
# such as "(으)로"
x = int(length[0] > length[1])
args = forms[1 - x].rstrip(forms[x]), forms[x]
rv.append('({0}){1}'.format(*args))
return tuple(rv)
def pick_allomorph_after_char(self, char):
final = hangul.get_final(char)
if not final:
return self.after_vowel
elif final == 'ㄹ':
return self.after_rieul
else:
return self.after_consonant
@define_allomorph_picker(suffix_of=Noun)
@define_allomorph_picker(suffix_of=NumberWord)
@define_allomorph_picker(suffix_of=Loanword)
def pick_allomorph_after_substantive(self, substantive):
reading = substantive.read()
for char in reversed(reading):
cat = unicodedata.category(char)
if cat[0] == 'P' or cat[0] == 'S':
# skip punctuations and symbols
continue
return self.pick_allomorph_after_char(char)
raise AssertionError()
| {"/korean/morphology/substantive.py": ["/korean/morphology/morpheme.py", "/korean/hangul.py", "/korean/morphology/particle.py", "/korean/morphology/__init__.py"], "/korean/morphology/__init__.py": ["/korean/__init__.py", "/korean/morphology/morpheme.py", "/korean/morphology/particle.py", "/korean/morphology/substantive.py"], "/korean/morphology/particle.py": ["/korean/morphology/__init__.py", "/korean/morphology/morpheme.py", "/korean/morphology/substantive.py", "/korean/__init__.py"], "/koreantests.py": ["/korean/__init__.py", "/korean/ext/gettext.py"], "/korean/ext/jinja2.py": ["/korean/__init__.py"], "/korean/morphology/morpheme.py": ["/korean/hangul.py", "/korean/morphology/__init__.py"], "/korean/ext/gettext.py": ["/korean/l10n/__init__.py"], "/korean/l10n/__init__.py": ["/korean/morphology/__init__.py", "/korean/ext/gettext.py"], "/korean/ext/django/templatetags/korean.py": ["/korean/__init__.py"], "/korean/__init__.py": ["/korean/morphology/__init__.py"], "/korean/l10n/jinja2ext.py": ["/korean/ext/jinja2.py"]} |
77,027 | sublee/korean | refs/heads/master | /korean/hangul.py | # -*- coding: utf-8 -*-
"""
korean.hangul
~~~~~~~~~~~~~
Processing a string written by Hangul. All code of here is based on
`hangul.py
<https://raw.github.com/sublee/hangulize/master/hangulize/hangul.py>`_ by
`Hye-Shik Chang <http://openlook.org/>`_ at 2003.
:copyright: (c) 2012-2013 by Heungsub Lee and 2003 by Hye-Shik Chang
:license: BSD, see LICENSE for more details.
"""
from __future__ import unicode_literals
from six.moves import xrange
__all__ = ['char_offset', 'is_hangul', 'is_vowel', 'is_consonant',
'is_initial', 'is_final', 'get_initial', 'get_vowel', 'get_final',
'split_char', 'join_char']
def S(*sequences):
def to_tuple(sequence):
if not sequence:
return (sequence,)
return tuple(sequence)
return sum(map(to_tuple, sequences), ())
VOWELS = S('ㅏㅐㅑㅒㅓㅔㅕㅖㅗㅘㅙㅚㅛㅜㅝㅞㅟㅠㅡㅢㅣ')
CONSONANTS = S('ㄱㄲㄳㄴㄵㄶㄷㄸㄹㄺㄻㄼㄽㄾㄿㅀㅁㅂㅃㅄㅅㅆㅇㅈㅉㅊㅋㅌㅍㅎ')
INITIALS = S('ㄱㄲㄴㄷㄸㄹㅁㅂㅃㅅㅆㅇㅈㅉㅊㅋㅌㅍㅎ')
FINALS = S('', 'ㄱㄲㄳㄴㄵㄶㄷㄹㄺㄻㄼㄽㄾㄿㅀㅁㅂㅄㅅㅆㅇㅈㅊㅋㅌㅍㅎ')
LETTER_ELEMENTS = (INITIALS, VOWELS, FINALS)
HANGUL_RANGE = xrange(ord('가'), ord('힣') + 1)
FIRST_HANGUL = HANGUL_RANGE[0]
del S
def char_offset(char):
"""Returns Hangul character offset from "가"."""
if isinstance(char, int):
offset = char
else:
assert len(char) == 1
assert is_hangul(char)
offset = ord(char) - FIRST_HANGUL
assert offset < len(HANGUL_RANGE)
return offset
def is_hangul(char):
"""Checks if the given character is written in Hangul."""
return ord(char) in HANGUL_RANGE
def is_vowel(char):
"""Checks if the given character is a vowel of Hangul."""
return char in VOWELS
def is_consonant(char):
"""Checks if the given character is a consonant of Hangul."""
return char in CONSONANTS
def is_initial(char):
"""Checks if the given character is an initial consonant of Hangul."""
return char in INITIALS
def is_final(char):
"""Checks if the given character is a final consonant of Hangul. The final
consonants contain what a joined multiple consonant and empty character.
"""
return char in FINALS
def get_initial(char):
"""Returns an initial consonant from the given character."""
if is_initial(char):
return char
return INITIALS[int(char_offset(char) / (len(VOWELS) * len(FINALS)))]
def get_vowel(char):
"""Returns a vowel from the given character."""
if is_vowel(char):
return char
return VOWELS[int(char_offset(char) / len(FINALS)) % len(VOWELS)]
def get_final(char):
"""Returns a final consonant from the given character."""
if is_final(char):
return char
return FINALS[char_offset(char) % len(FINALS)]
def split_char(char):
"""Splits the given character to a tuple where the first item is the
initial consonant and the second the vowel and the third the final.
"""
code = char_offset(char)
return (get_initial(code), get_vowel(code), get_final(code))
def join_char(splitted):
"""Joins a tuple in the form ``(initial, vowel, final)`` to a Hangul
character.
"""
assert len(splitted) == len(LETTER_ELEMENTS)
if not (splitted[0] and splitted[1]):
return splitted[0] or splitted[1]
indexes = [tuple.index(*args) for args in zip(LETTER_ELEMENTS, splitted)]
offset = (indexes[0] * len(VOWELS) + indexes[1]) * len(FINALS) + indexes[2]
return unichr(FIRST_HANGUL + offset)
| {"/korean/morphology/substantive.py": ["/korean/morphology/morpheme.py", "/korean/hangul.py", "/korean/morphology/particle.py", "/korean/morphology/__init__.py"], "/korean/morphology/__init__.py": ["/korean/__init__.py", "/korean/morphology/morpheme.py", "/korean/morphology/particle.py", "/korean/morphology/substantive.py"], "/korean/morphology/particle.py": ["/korean/morphology/__init__.py", "/korean/morphology/morpheme.py", "/korean/morphology/substantive.py", "/korean/__init__.py"], "/koreantests.py": ["/korean/__init__.py", "/korean/ext/gettext.py"], "/korean/ext/jinja2.py": ["/korean/__init__.py"], "/korean/morphology/morpheme.py": ["/korean/hangul.py", "/korean/morphology/__init__.py"], "/korean/ext/gettext.py": ["/korean/l10n/__init__.py"], "/korean/l10n/__init__.py": ["/korean/morphology/__init__.py", "/korean/ext/gettext.py"], "/korean/ext/django/templatetags/korean.py": ["/korean/__init__.py"], "/korean/__init__.py": ["/korean/morphology/__init__.py"], "/korean/l10n/jinja2ext.py": ["/korean/ext/jinja2.py"]} |
77,028 | sublee/korean | refs/heads/master | /koreantests.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals, with_statement
import contextlib
import sys
import textwrap
from pytest import deprecated_call, raises
from korean import *
@contextlib.contextmanager
def disable_imports(*names):
"""Stolen from Attest."""
import __builtin__
import_ = __builtin__.__import__
def __import__(name, *args, **kwargs):
if name in names:
raise ImportError('%r is disabled' % name)
return import_(name, *args, **kwargs)
__builtin__.__import__ = __import__
try:
yield
finally:
__builtin__.__import__ = import_
class TestParticle(object):
def test_allomorph(self):
# case clitics
assert Particle('가') is Particle('이')
assert Particle('를') is Particle('을')
assert Particle('로') is Particle('으로')
assert Particle('와') is Particle('과')
assert Particle('랑') is Particle('이랑')
# informational litics
assert Particle('는') is Particle('은')
assert Particle('나') is Particle('이나')
def test_naive(self):
assert Particle('을').naive() == \
('를(을)', '을(를)', '(를)을', '(을)를')
assert Particle('로').naive() == ('(으)로',)
def test_pick_allomorph_with_noun(self):
pick_allomorph = morphology.pick_allomorph
P, N = Particle, Noun
assert pick_allomorph(P('가'), suffix_of=N('받침')) == '이'
assert pick_allomorph(P('가'), suffix_of=N('나비')) == '가'
assert pick_allomorph(P('로'), suffix_of=N('마을')) == '로'
assert pick_allomorph(P('로'), suffix_of=N('파이썬')) == '으로'
assert pick_allomorph(P('다'), suffix_of=N('파이썬')) == '이다'
assert pick_allomorph(P('일랑'), suffix_of=N('게임')) == '일랑'
assert pick_allomorph(P('일랑'), suffix_of=N('서버')) == 'ㄹ랑'
assert pick_allomorph(P('란'), suffix_of=N('자바')) == '란'
assert pick_allomorph(P('란'), suffix_of=N('파이썬')) == '이란'
def test_pick_allomorph_with_number_word(self):
pick_allomorph = morphology.pick_allomorph
P, Nw = Particle, NumberWord
assert pick_allomorph(P('가'), suffix_of=Nw(1)) == '이'
assert pick_allomorph(P('가'), suffix_of=Nw(2)) == '가'
assert pick_allomorph(P('일랑'), suffix_of=Nw(3)) == '일랑'
#assert pick_allomorph(P('일랑'), suffix_of=Nw(4)) == '일랑'
def test_pick_allomorph_with_loanword(self):
pick_allomorph = morphology.pick_allomorph
P, Lw = Particle, Loanword
assert pick_allomorph(P('가'), suffix_of=Lw('Emil', 'ron')) == '이'
def test_merge_with_noun(self):
merge = morphology.merge
P, N = Particle, Noun
assert merge(N('게임'), P('일랑')) == '게임일랑'
assert merge(N('서버'), P('일랑')) == '서벌랑'
class TestNoun(object):
def test_read(self):
assert Noun('주인공').read() == '주인공'
assert Noun('컴퓨터').read() == '컴퓨터'
assert Noun('한국어').read() == '한국어'
def test_read_with_number(self):
assert Noun('레벨 4').read() == '레벨 사'
assert Noun('레벨 50').read() == '레벨 오십'
assert Noun('64렙').read() == '육십사렙'
def test_null_format(self):
assert '{0}'.format(Noun('소년')) == '소년'
def test_unicode_format(self):
assert '{0:6}'.format(Noun('소년')) == '소년 '
assert '{0:^6}'.format(Noun('소녀')) == ' 소녀 '
assert '{0:>6}'.format(Noun('한국어')) == ' 한국어'
def test_particle_format(self):
assert '{0:는}'.format(Noun('소년')) == '소년은'
assert '{0:는}'.format(Noun('소녀')) == '소녀는'
assert '{0:을}'.format(Noun('한국어')) == '한국어를'
assert '{0:이}'.format(Noun('레벨 2')) == '레벨 2가'
def test_undefined_particle_format(self):
assert '{0:에게}'.format(Noun('소년')) == '소년에게'
def test_guessable_particle_format(self):
assert '{0:로서}'.format(Noun('학생')) == '학생으로서'
assert '{0:로써}'.format(Noun('컴퓨터')) == '컴퓨터로써'
assert '{0:로써}'.format(Noun('칼')) == '칼로써'
assert '{0:로써}'.format(Noun('음식')) == '음식으로써'
assert '{0:랑은}'.format(Noun('녀석')) == '녀석이랑은'
def test_combination_format(self):
with raises(ValueError):
'{0:을:를}'.format(Noun('한국어'))
assert '{0:는:5}'.format(Noun('소년')) == '소년은 '
assert '{0:는:^5}'.format(Noun('소녀')) == ' 소녀는 '
assert '{0:을:>5}'.format(Noun('한국어')) == ' 한국어를'
class TestNumberWord(object):
def test_read(self):
assert NumberWord(5).read() == '오'
assert NumberWord(32).read() == '삼십이'
assert NumberWord(42).read() == '사십이'
assert NumberWord(152400).read() == '십오만이천사백'
assert NumberWord(600000109).read() == '육억백구'
assert NumberWord(72009852).read() == '칠천이백만구천팔백오십이'
assert NumberWord(-8).read() == '마이너스팔'
assert NumberWord(10000).read() == '만'
assert NumberWord(110000).read() == '십일만'
assert NumberWord(113386).read() == '십일만삼천삼백팔십육'
def test_read_phases(self):
assert NumberWord.read_phases(32) == ('삼십이',)
assert NumberWord.read_phases(42) == ('사십이',)
assert NumberWord.read_phases(152400) == ('십오만', '이천사백')
assert NumberWord.read_phases(600000109) == ('육억', '', '백구')
assert NumberWord.read_phases(-8) == ('마이너스', '팔')
assert NumberWord.read_phases(10000) == ('만', '')
def test_null_format(self):
assert '{0}'.format(NumberWord(12)) == '12'
def test_number_format(self):
assert '{0:.1f}'.format(NumberWord(4)) == '4.0'
assert '{0:4d}'.format(NumberWord(4)) == ' 4'
def test_particle_format(self):
assert '레벨 {0:이}'.format(NumberWord(4)) == '레벨 4가'
assert '레벨 {0:이}'.format(NumberWord(3)) == '레벨 3이'
assert '레벨 {0:이}'.format(NumberWord(15)) == '레벨 15가'
def test_combination_format(self):
with raises(ValueError):
'{0:을:를}'.format(NumberWord(19891212))
if sys.version_info > (2, 7):
# Python 2.6 doesn't support PEP 378
assert '{0:,:을}'.format(NumberWord(19891212)) == '19,891,212를'
class TestLoanword(object):
def test_need_hangulize(self):
with disable_imports('hangulize'):
with raises(ImportError):
Loanword('štěstí', 'ces')
def test_read(self):
assert Loanword('italia', 'ita').read() == '이탈리아'
assert Loanword('gloria', 'ita').read() == '글로리아'
assert Loanword('Αλεξάνδρεια', 'ell').read() == '알렉산드리아'
def test_null_format(self):
assert '{0}'.format(Loanword('Вадзім Махнеў', 'bel')) == \
'Вадзім Махнеў'
def test_particle_format(self):
assert '{0:으로} 여행 가자'.format(Loanword('Italia', 'ita')) == \
'Italia로 여행 가자'
van_gogh = Loanword('Vincent Willem van Gogh', 'nld')
assert '이 작품은 {0:이} 그렸다.'.format(van_gogh) == \
'이 작품은 Vincent Willem van Gogh가 그렸다.'
class TestLocalization(object):
def test_template(self):
assert l10n.Template('{0:로}').format(123) == '123으로'
if sys.version_info < (3,):
assert l10n.Template('{0:로}').format(long(123)) == '123으로'
def test_proofreading(self):
assert l10n.proofread('사과은(는) 맛있다.') == '사과는 맛있다.'
assert l10n.proofread('집(으)로 가자.') == '집으로 가자.'
assert l10n.proofread('용사은(는) 검을(를) 획득했다.') == \
'용사는 검을 획득했다.'
assert l10n.proofread('마법서 "파이어 볼"을(를) 얻었습니다.') == \
'마법서 "파이어 볼"을 얻었습니다.'
assert l10n.proofread('가나다순에서 "쥐"은(는) "줘" 다음에 온다.') == \
'가나다순에서 "쥐"는 "줘" 다음에 온다.'
def test_meaningless_proofreading(self):
assert l10n.proofread('사과다.') == '사과다.'
assert l10n.proofread('집') == '집'
assert l10n.proofread('의 식 주') == '의 식 주'
assert l10n.proofread('the grammatical rules of a language') == \
'the grammatical rules of a language'
def test_unworkable_proofreading(self):
assert l10n.proofread('Korean를(을)') == 'Korean를(을)'
assert l10n.proofread('Korean을(를)') == 'Korean를(을)'
assert l10n.proofread('Korean(을)를') == 'Korean를(을)'
assert l10n.proofread('한국인 혹은 Korean(을)를') == '한국인 혹은 Korean를(을)'
def test_complex_proofreading(self):
assert l10n.proofread('말을(를)(를)') == '말을(를)'
def test_proofreading_lyrics(self):
assert textwrap.dedent(l10n.proofread('''
나의 영혼 물어다줄 평화시장 비둘기 위(으)로 떨어지는 투명한 소나기
다음날엔 햇빛 쏟아지길 바라며 참아왔던 고통이(가) 찢겨져 버린 가지
될 때까지 묵묵히 지켜만 보던 벙어리 몰아치는 회오리 속에 지친 모습이(가)
말해주는 가슴에 맺힌 응어리 여전히 가슴속에 쏟아지는 빛줄기
''')) == textwrap.dedent('''
나의 영혼 물어다줄 평화시장 비둘기 위로 떨어지는 투명한 소나기
다음날엔 햇빛 쏟아지길 바라며 참아왔던 고통이 찢겨져 버린 가지
될 때까지 묵묵히 지켜만 보던 벙어리 몰아치는 회오리 속에 지친 모습이
말해주는 가슴에 맺힌 응어리 여전히 가슴속에 쏟아지는 빛줄기
''')
assert textwrap.dedent(l10n.proofread('''
빨간 꽃 노란 꽃 꽃밭 가득 피어도 하얀 나비 꽃나비 담장 위에 날아도
따스한 봄바람이(가) 불고 또 불어도 미싱은(는) 잘도 도네 돌아가네
흰 구름 솜구름 탐스러운 애기 구름 짧은 셔츠 짧은치마 뜨거운 여름
소금 땀 피지 땀 흐르고 또 흘러도 미싱은(는) 잘도 도네 돌아가네
저 하늘엔 별들이(가) 밤새 빛나고
찬바람 소슬바람 산너머 부는 바람 간밤에 편지 한 장 적어 실어 보내고
낙엽은(는) 떨어지고 쌓이고 또 쌓여도 미싱은(는) 잘도 도네 돌아가네
흰눈이 온 세상에 소복소복 쌓이면 하얀 공장 하얀 불빛 새하얀 얼굴들
우리네 청춘이(가) 저물고 저물도록 미싱은(는) 잘도 도네 돌아가네
공장엔 작업등이(가) 밤새 비추고
빨간 꽃 노란 꽃 꽃밭 가득 피어도 하얀 나비 꽃나비 담장 위에 날아도
따스한 봄바람이(가) 불고 또 불어도 미싱은(는) 잘도 도네 돌아가네
''')) == textwrap.dedent('''
빨간 꽃 노란 꽃 꽃밭 가득 피어도 하얀 나비 꽃나비 담장 위에 날아도
따스한 봄바람이 불고 또 불어도 미싱은 잘도 도네 돌아가네
흰 구름 솜구름 탐스러운 애기 구름 짧은 셔츠 짧은치마 뜨거운 여름
소금 땀 피지 땀 흐르고 또 흘러도 미싱은 잘도 도네 돌아가네
저 하늘엔 별들이 밤새 빛나고
찬바람 소슬바람 산너머 부는 바람 간밤에 편지 한 장 적어 실어 보내고
낙엽은 떨어지고 쌓이고 또 쌓여도 미싱은 잘도 도네 돌아가네
흰눈이 온 세상에 소복소복 쌓이면 하얀 공장 하얀 불빛 새하얀 얼굴들
우리네 청춘이 저물고 저물도록 미싱은 잘도 도네 돌아가네
공장엔 작업등이 밤새 비추고
빨간 꽃 노란 꽃 꽃밭 가득 피어도 하얀 나비 꽃나비 담장 위에 날아도
따스한 봄바람이 불고 또 불어도 미싱은 잘도 도네 돌아가네
''')
assert textwrap.dedent(l10n.proofread('''
어둠에다크에서 죽음의데스(을)를 느끼며
서쪽에서 불어오는 바람의윈드을(를) 맞았다.
그것은(는) 운명의데스티니.
그(은)는 인생의 라이프를(을) 끝내기 위해 디엔드.
모든것을(를) 옭아매는 폭풍같은 스톰에서 벗어나기 위해
결국 자신 스스로(을)를 죽음에데스(으)로 몰아갔다.
후에 전설의 레전드로써 기억에 메모리- 기적에미라클
길이길이 가슴속의하트에 기억될 리멤버.
-끝에 Fin-
''')) == textwrap.dedent('''
어둠에다크에서 죽음의데스를 느끼며
서쪽에서 불어오는 바람의윈드를 맞았다.
그것은 운명의데스티니.
그는 인생의 라이프를 끝내기 위해 디엔드.
모든것을 옭아매는 폭풍같은 스톰에서 벗어나기 위해
결국 자신 스스로를 죽음에데스로 몰아갔다.
후에 전설의 레전드로써 기억에 메모리- 기적에미라클
길이길이 가슴속의하트에 기억될 리멤버.
-끝에 Fin-
''')
def test_parse(self):
assert l10n.proofread.parse('말을(를)(를)') == \
('말', Particle('를'), '(를)')
assert l10n.proofread.parse('용사은(는) 감를(을) 먹었다.') == \
('용사', Particle('은'), ' 감', Particle('을'), ' 먹었다.')
class TestExtensions(object):
def generate_translations(self):
# from io import BytesIO
# from babel.messages import Catalog, mofile, pofile
# from babel.support import Translations
# catalog = Catalog(locale='ko_KR')
# po = '''
# # ugettext
# msgid "I like a {0}."
# msgstr "나는 {0:을} 좋아합니다.'
# # ungettext
# msgid "Here is a {0}."
# msgid_plural "Here are {1} {0}."
# msgstr[0] "여기 {0:이} 있습니다."
# msgstr[1] "여기 {0:이} {1}개 있습니다."
# # ugettext
# msgid "I reached level {0}."
# msgstr "나는 레벨{0:이} 되었습니다.'
# '''
# catalog = pofile.read_po(BytesIO(po.encode('utf-8')))
# buf = BytesIO()
# mofile.write_mo(buf, catalog)
# buf.seek(0)
# return Translations(buf)
from io import BytesIO
import gettext
# .mo binary generated from the above .po string
buf = BytesIO(b'\xde\x12\x04\x95\x00\x00\x00\x00\x04\x00\x00\x00\x1c'
b'\x00\x00\x00<\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\\\x00\x00\x00 \x00\x00\x00]\x00'
b'\x00\x00\r\x00\x00\x00~\x00\x00\x00\x14\x00\x00\x00'
b'\x8c\x00\x00\x00\\\x01\x00\x00\xa1\x00\x00\x00@\x00'
b'\x00\x00\xfe\x01\x00\x00\x1f\x00\x00\x00?\x02\x00\x00%'
b'\x00\x00\x00_\x02\x00\x00\x00Here is a {0}.\x00Here '
b'are {1} {0}.\x00I like a {0}.\x00I reached level {0}.'
b'\x00Project-Id-Version: PROJECT VERSION\nReport-Msgid-'
b'Bugs-To: EMAIL@ADDRESS\nPOT-Creation-Date: 2013-01-03 '
b'22:35+0900\nPO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n'
b'Last-Translator: FULL NAME <EMAIL@ADDRESS>\nLanguage-'
b'Team: LANGUAGE <LL@li.org>\nMIME-Version: 1.0\nContent'
b'-Type: text/plain; charset=utf-8\nContent-Transfer-'
b'Encoding: 8bit\nGenerated-By: Babel 0.9.6\n\x00\xec'
b'\x97\xac\xea\xb8\xb0 {0:\xec\x9d\xb4} \xec\x9e\x88\xec'
b'\x8a\xb5\xeb\x8b\x88\xeb\x8b\xa4.\x00\xec\x97\xac\xea'
b'\xb8\xb0 {0:\xec\x9d\xb4} {1}\xea\xb0\x9c \xec\x9e\x88'
b'\xec\x8a\xb5\xeb\x8b\x88\xeb\x8b\xa4.\x00\xeb\x82\x98'
b'\xeb\x8a\x94 {0:\xec\x9d\x84} \xec\xa2\x8b\xec\x95\x84'
b'\xed\x95\xa9\xeb\x8b\x88\xeb\x8b\xa4.\x00\xeb\x82\x98'
b'\xeb\x8a\x94 \xeb\xa0\x88\xeb\xb2\xa8{0:\xec\x9d\xb4} '
b'\xeb\x90\x98\xec\x97\x88\xec\x8a\xb5\xeb\x8b\x88\xeb'
b'\x8b\xa4.\x00')
return gettext.GNUTranslations(buf)
def gettext_functions(self, translations):
try:
gettext = translations.ugettext
except AttributeError:
# gettext.GNUTranslations on Python 3 hasn't ugettext
gettext = translations.gettext
ngettext = translations.ngettext
else:
ngettext = translations.ungettext
return (gettext, ngettext)
def test_patched_gettext(self):
from korean.ext.gettext import patch_gettext
t = patch_gettext(self.generate_translations())
_, ngettext = self.gettext_functions(t)
assert isinstance(_(''), l10n.Template)
assert _('I like a {0}.').format('바나나') == \
'나는 바나나를 좋아합니다.'
assert _('I reached level {0}.').format(4) == \
'나는 레벨4가 되었습니다.'
assert _('Undefined') == 'Undefined'
def gen_text(obj, n):
fmt = ngettext('Here is a {0}.', 'Here are {1} {8}.', n)
return fmt.format(obj, n)
assert gen_text('콩', 1) == '여기 콩이 있습니다.'
assert gen_text('사과', 2) == '여기 사과가 2개 있습니다.'
def test_deprecated_patch_gettext(self):
t = deprecated_call(l10n.patch_gettext, self.generate_translations())
_, ngettext = self.gettext_functions(t)
assert isinstance(_(''), l10n.Template)
def test_jinja2_ext(self):
from jinja2 import Environment
env = Environment(extensions=['korean.ext.jinja2.proofread'])
context = dict(name='용사', obj='검')
expectation = '용사는 검을 획득했다.'
assert 'proofread' in env.filters
templ1 = env.from_string('''
{{ (name ~ '은(는) ' ~ obj ~ '을(를) 획득했다.')|proofread }}
''')
assert templ1.render(**context).strip() == expectation
templ2 = env.from_string('''
{{ '%s은(는) %s을(를) 획득했다.'|format(name, obj)|proofread }}
''')
assert templ2.render(**context).strip() == expectation
templ3 = env.from_string('''
{% proofread %}
{{ name }}은(는) {{ obj }}을(를) 획득했다.
{% endproofread %}
''')
assert templ3.render(**context).strip() == expectation
templ4 = env.from_string('''
{% proofread true %}
{{ name }}은(는) {{ obj }}을(를) 획득했다.
{% endproofread %}
''')
assert templ4.render(**context).strip() == expectation
templ5 = env.from_string('''
{% proofread false %}
{{ name }}은(는) {{ obj }}을(를) 획득했다.
{% endproofread %}
''')
assert templ5.render(**context).strip() != expectation
templ6 = env.from_string('''
{% proofread locale.startswith('ko') %}
{{ name }}은(는) {{ obj }}을(를) 획득했다.
{% endproofread %}
''')
assert templ6.render(locale='ko_KR', **context).strip() == expectation
templ7 = env.from_string('''
{% autoproofread locale.startswith('ko') %}
{{ name }}은(는) {{ obj }}을(를) 획득했다.
{% endautoproofread %}
''')
assert templ7.render(locale='ko_KR', **context).strip() == expectation
def test_deprecated_jinja2_ext_location(self):
from jinja2 import Environment
old_ext_name = 'korean.l10n.jinja2ext.proofread'
env = deprecated_call(Environment, extensions=[old_ext_name])
assert 'proofread' in env.filters
def test_django_ext(self):
from django.conf import settings
from django.template import Context, Template
settings.configure(INSTALLED_APPS=('korean.ext.django',))
context = Context({'name': '용사', 'obj': '검'})
expectation = '용사는 검을 획득했다.'
templ1 = Template('''
{% load korean %}
{{ '용사은(는) 검을(를) 획득했다.'|proofread }}
''')
assert templ1.render(Context()).strip() == expectation
templ2 = Template('''
{% load korean %}
{% proofread %}
{{ name }}은(는) {{ obj }}을(를) 획득했다.
{% endproofread %}
''')
assert templ2.render(context).strip() == expectation
try:
__import__('hangulize')
except ImportError:
del TestParticle.test_pick_allomorph_with_loanword
del TestLoanword
try:
__import__('django')
except ImportError:
del TestExtensions.test_django_ext
| {"/korean/morphology/substantive.py": ["/korean/morphology/morpheme.py", "/korean/hangul.py", "/korean/morphology/particle.py", "/korean/morphology/__init__.py"], "/korean/morphology/__init__.py": ["/korean/__init__.py", "/korean/morphology/morpheme.py", "/korean/morphology/particle.py", "/korean/morphology/substantive.py"], "/korean/morphology/particle.py": ["/korean/morphology/__init__.py", "/korean/morphology/morpheme.py", "/korean/morphology/substantive.py", "/korean/__init__.py"], "/koreantests.py": ["/korean/__init__.py", "/korean/ext/gettext.py"], "/korean/ext/jinja2.py": ["/korean/__init__.py"], "/korean/morphology/morpheme.py": ["/korean/hangul.py", "/korean/morphology/__init__.py"], "/korean/ext/gettext.py": ["/korean/l10n/__init__.py"], "/korean/l10n/__init__.py": ["/korean/morphology/__init__.py", "/korean/ext/gettext.py"], "/korean/ext/django/templatetags/korean.py": ["/korean/__init__.py"], "/korean/__init__.py": ["/korean/morphology/__init__.py"], "/korean/l10n/jinja2ext.py": ["/korean/ext/jinja2.py"]} |
77,029 | sublee/korean | refs/heads/master | /korean/ext/jinja2.py | # -*- coding: utf-8 -*-
"""
korean.ext.jinja2
~~~~~~~~~~~~~~~~~
Jinja2_ is one of the most used template engines for Python. This module
contains Jinja2 template engine extensions to make :mod:`korean` easy to
use.
.. versionadded:: 0.1.5
.. versionchanged:: 0.1.6
Moved from :mod:`korean.l10n.jinja2ext` to :mod:`korean.ext.jinja2`.
.. _Jinja2: http://jinja.pocoo.org/docs
:copyright: (c) 2012-2013 by Heungsub Lee
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import, unicode_literals
from jinja2 import nodes
from jinja2.ext import Extension
from jinja2.utils import Markup
from .. import l10n
class ProofreadingExtension(Extension):
"""A Jinja2 extention which registers the ``proofread`` filter and the
``proofread`` block:
.. sourcecode:: jinja
<h1>ProofreadingExtension Usage</h1>
<h2>Single filter</h2>
{{ (name ~ '은(는) ' ~ obj ~ '을(를) 획득했다.')|proofread }}
<h2>Filter chaining</h2>
{{ '%s은(는) %s을(를) 획득했다.'|format(name, obj)|proofread }}
<h2><code>proofread</code> block</h2>
{% proofread %}
{{ name }}은(는) {{ obj }}을(를) 획득했다.
{% endproofread %}
<h2>Conditional <code>proofread</code> block</h2>
{% proofread locale.startswith('ko') %}
{{ name }}은(는) {{ obj }}을(를) 획득했다.
{% endproofread %}
The import name is ``korean.ext.jinja2.proofread``. Just add it into
your Jinja2 environment by the following code::
from jinja2 import Environment
jinja_env = Environment(extensions=['korean.ext.jinja2.proofread'])
.. versionadded:: 0.1.5
.. versionchanged:: 0.1.6
Added ``enabled`` argument to ``{% proofread %}``.
"""
tags = ['proofread', 'autoproofread']
def __init__(self, environment):
environment.filters['proofread'] = l10n.proofread
def _proofread(self, enabled, caller):
return l10n.proofread(caller()) if enabled else caller()
def parse(self, parser):
tag = parser.stream.current.value
lineno = next(parser.stream).lineno
if parser.stream.current.type == 'block_end':
args = [nodes.Const(True)]
else:
args = [parser.parse_expression()]
body = parser.parse_statements(['name:end%s' % tag], drop_needle=True)
call = self.call_method('_proofread', args)
return nodes.CallBlock(call, [], [], body, lineno=lineno)
# nicer import name
proofread = ProofreadingExtension
| {"/korean/morphology/substantive.py": ["/korean/morphology/morpheme.py", "/korean/hangul.py", "/korean/morphology/particle.py", "/korean/morphology/__init__.py"], "/korean/morphology/__init__.py": ["/korean/__init__.py", "/korean/morphology/morpheme.py", "/korean/morphology/particle.py", "/korean/morphology/substantive.py"], "/korean/morphology/particle.py": ["/korean/morphology/__init__.py", "/korean/morphology/morpheme.py", "/korean/morphology/substantive.py", "/korean/__init__.py"], "/koreantests.py": ["/korean/__init__.py", "/korean/ext/gettext.py"], "/korean/ext/jinja2.py": ["/korean/__init__.py"], "/korean/morphology/morpheme.py": ["/korean/hangul.py", "/korean/morphology/__init__.py"], "/korean/ext/gettext.py": ["/korean/l10n/__init__.py"], "/korean/l10n/__init__.py": ["/korean/morphology/__init__.py", "/korean/ext/gettext.py"], "/korean/ext/django/templatetags/korean.py": ["/korean/__init__.py"], "/korean/__init__.py": ["/korean/morphology/__init__.py"], "/korean/l10n/jinja2ext.py": ["/korean/ext/jinja2.py"]} |
77,030 | sublee/korean | refs/heads/master | /korean/ext/django/apps.py | # -*- coding: utf-8 -*-
"""
korean.ext.django.apps
~~~~~~~~~~~~~~~~~~~~~~
A default AppConfig definition for Django 1.7+.
.. versionadded:: 0.1.9
.. _Django: https://www.djangoproject.com/
:copyright: (c) 2012-2013 by Heungsub Lee
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import, unicode_literals
try:
from django.apps import AppConfig
except ImportError:
pass
else:
class KoreanConfig(AppConfig):
name = 'korean.ext.django'
label = 'korean'
def ready(self):
pass
| {"/korean/morphology/substantive.py": ["/korean/morphology/morpheme.py", "/korean/hangul.py", "/korean/morphology/particle.py", "/korean/morphology/__init__.py"], "/korean/morphology/__init__.py": ["/korean/__init__.py", "/korean/morphology/morpheme.py", "/korean/morphology/particle.py", "/korean/morphology/substantive.py"], "/korean/morphology/particle.py": ["/korean/morphology/__init__.py", "/korean/morphology/morpheme.py", "/korean/morphology/substantive.py", "/korean/__init__.py"], "/koreantests.py": ["/korean/__init__.py", "/korean/ext/gettext.py"], "/korean/ext/jinja2.py": ["/korean/__init__.py"], "/korean/morphology/morpheme.py": ["/korean/hangul.py", "/korean/morphology/__init__.py"], "/korean/ext/gettext.py": ["/korean/l10n/__init__.py"], "/korean/l10n/__init__.py": ["/korean/morphology/__init__.py", "/korean/ext/gettext.py"], "/korean/ext/django/templatetags/korean.py": ["/korean/__init__.py"], "/korean/__init__.py": ["/korean/morphology/__init__.py"], "/korean/l10n/jinja2ext.py": ["/korean/ext/jinja2.py"]} |
77,031 | sublee/korean | refs/heads/master | /korean/morphology/morpheme.py | # -*- coding: utf-8 -*-
"""
korean.morphology.morpheme
~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2012-2013 by Heungsub Lee
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import, unicode_literals
import sys
import six
from ..hangul import get_final, is_hangul
__all__ = ['Morpheme']
class MorphemeMetaclass(type):
def __new__(meta, name, bases, attrs):
from . import Morphology
cls = type.__new__(meta, name, bases, attrs)
cls._registry = {}
Morphology._register_morpheme(cls)
return cls
def __call__(cls, *forms):
if len(forms) == 1:
try:
return cls.get(forms[0])
except KeyError:
pass
return super(MorphemeMetaclass, cls).__call__(*forms)
@six.add_metaclass(MorphemeMetaclass)
class Morpheme(object):
"""This class presents a morpheme (형태소) or allomorph (이형태). It
can have one or more forms. The first form means the basic allomorph
(기본형).
:param forms: each forms of allomorph. the first form will be basic
allomorph.
"""
_registry = None
def __init__(self, *forms):
assert all([isinstance(form, six.text_type) for form in forms])
self.forms = forms
@classmethod
def get(cls, key):
"""Returns a pre-defined morpheme object by the given key."""
return cls._registry[key]
@classmethod
def register(cls, key, obj):
"""Registers a pre-defined morpheme object to the given key."""
cls._registry[key] = obj
def read(self):
"""Every morpheme class would implement this method. They should make a
morpheme to the valid Korean text with Hangul.
"""
return six.text_type(self)
def basic(self):
"""The basic form of allomorph."""
return self.forms[0]
def __unicode__(self):
return self.basic()
def __str__(self):
return six.text_type(self).encode('utf-8')
if sys.version_info >= (3,):
__str__ = __unicode__
del __unicode__
def __getitem__(self, i):
return six.text_type(self)[i]
def __getslice__(self, start, stop, step=None):
return six.text_type(self)[start:stop:step]
def __format__(self, suffix):
return '{0!s}{1}'.format(self, suffix)
def __repr__(self):
return '{0}({1!s})'.format(type(self).__name__, six.text_type(self))
| {"/korean/morphology/substantive.py": ["/korean/morphology/morpheme.py", "/korean/hangul.py", "/korean/morphology/particle.py", "/korean/morphology/__init__.py"], "/korean/morphology/__init__.py": ["/korean/__init__.py", "/korean/morphology/morpheme.py", "/korean/morphology/particle.py", "/korean/morphology/substantive.py"], "/korean/morphology/particle.py": ["/korean/morphology/__init__.py", "/korean/morphology/morpheme.py", "/korean/morphology/substantive.py", "/korean/__init__.py"], "/koreantests.py": ["/korean/__init__.py", "/korean/ext/gettext.py"], "/korean/ext/jinja2.py": ["/korean/__init__.py"], "/korean/morphology/morpheme.py": ["/korean/hangul.py", "/korean/morphology/__init__.py"], "/korean/ext/gettext.py": ["/korean/l10n/__init__.py"], "/korean/l10n/__init__.py": ["/korean/morphology/__init__.py", "/korean/ext/gettext.py"], "/korean/ext/django/templatetags/korean.py": ["/korean/__init__.py"], "/korean/__init__.py": ["/korean/morphology/__init__.py"], "/korean/l10n/jinja2ext.py": ["/korean/ext/jinja2.py"]} |
77,032 | sublee/korean | refs/heads/master | /korean/ext/gettext.py | # -*- coding: utf-8 -*-
"""
korean.ext.gettext
~~~~~~~~~~~~~~~~~~
`Gettext <http://www.gnu.org/software/gettext>`_ is an internationalization
and localization system commonly used for writing multilingual programs on
Unix-like OS. This module contains utilities to integrate Korean and the
Gettext system. It also works well with Babel_.
.. _Babel: http://babel.edgewall.org/
:copyright: (c) 2012-2013 by Heungsub Lee
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import, unicode_literals
from functools import partial
from ..l10n import Template
def patch_gettext(translations):
"""Patches Gettext translations object to wrap the result with
:class:`korean.l10n.Template`. Then the result can work with a particle
format spec.
For example, here's a Gettext catalog for ko_KR:
.. sourcecode:: pot
msgid "{0} appears."
msgstr "{0:이} 나타났다."
msgid "John"
msgstr "존"
msgid "Christina"
msgstr "크리스티나"
You can use a particle format spec in Gettext messages after translations
object is patched:
.. sourcecode:: pycon
>>> translations = patch_gettext(translations)
>>> _ = translations.ugettext
>>> _('{0} appears.').format(_('John'))
'존이 나타났다.'
>>> _('{0} appears.').format(_('Christina'))
'크리스티나가 나타났다.'
:param translations: the Gettext translations object to be patched that
would refer the catalog for ko_KR.
"""
methods_to_patch = ['gettext', 'ngettext']
if hasattr(translations, 'ugettext'):
methods_to_patch = ['u' + meth for meth in methods_to_patch]
for meth in methods_to_patch:
def patched(orig, *args, **kwargs):
return Template(orig(*args, **kwargs))
patched.__name__ = str(meth)
orig = getattr(translations, meth)
setattr(translations, meth, partial(patched, orig))
return translations
| {"/korean/morphology/substantive.py": ["/korean/morphology/morpheme.py", "/korean/hangul.py", "/korean/morphology/particle.py", "/korean/morphology/__init__.py"], "/korean/morphology/__init__.py": ["/korean/__init__.py", "/korean/morphology/morpheme.py", "/korean/morphology/particle.py", "/korean/morphology/substantive.py"], "/korean/morphology/particle.py": ["/korean/morphology/__init__.py", "/korean/morphology/morpheme.py", "/korean/morphology/substantive.py", "/korean/__init__.py"], "/koreantests.py": ["/korean/__init__.py", "/korean/ext/gettext.py"], "/korean/ext/jinja2.py": ["/korean/__init__.py"], "/korean/morphology/morpheme.py": ["/korean/hangul.py", "/korean/morphology/__init__.py"], "/korean/ext/gettext.py": ["/korean/l10n/__init__.py"], "/korean/l10n/__init__.py": ["/korean/morphology/__init__.py", "/korean/ext/gettext.py"], "/korean/ext/django/templatetags/korean.py": ["/korean/__init__.py"], "/korean/__init__.py": ["/korean/morphology/__init__.py"], "/korean/l10n/jinja2ext.py": ["/korean/ext/jinja2.py"]} |
77,033 | sublee/korean | refs/heads/master | /korean/l10n/__init__.py | # -*- coding: utf-8 -*-
"""
korean.l10n
~~~~~~~~~~~
Helpers for localization to Korean.
:copyright: (c) 2012-2013 by Heungsub Lee
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import, unicode_literals
from itertools import chain, product
import re
import six
import warnings
from ..morphology import Noun, NumberWord, Particle, pick_allomorph
__all__ = ['Proofreading', 'proofread', 'Template', 'patch_gettext']
class Proofreading(object):
"""A function-like class. These :meth:`__call__` replaces naive particles
to be correct. First, it finds naive particles such as "을(를)" or
"(으)로". Then it checks the forward character of the particle and replace
with a correct particle.
:param token_types: specific types to make as token.
"""
def __init__(self, token_types):
# TODO: support various token types
pass
def parse(self, text):
"""Tokenizes the given text with unicode text or :class:`Particle`.
:param text: the string that has been written with naive particles.
"""
tokens = []
naive_particles = []
particle_map = {}
for particle in set(Particle._registry.itervalues()):
for naive in particle.naive():
particle_map[naive] = particle
naive_particles.append(naive)
particle_pattern = '(%s)' % '|'.join(map(re.escape, naive_particles))
particle_pattern = re.compile(particle_pattern)
prev_span = [0, 0]
for match in particle_pattern.finditer(text):
span = match.span()
tokens.append(text[prev_span[1]:span[0]])
tokens.append(particle_map[match.group(1)])
prev_span = span
try:
tokens.append(text[span[1]:])
except UnboundLocalError:
tokens.append(text)
return tuple(tokens)
def __call__(self, text):
"""Do proofread. More information in :class:`Proofreading`.
:param text: the string that has been written with naive particles.
"""
buf = []
for token in self.parse(text):
if isinstance(token, Particle):
noun = Noun(buf[-1])
try:
token = pick_allomorph(token, suffix_of=noun)
except:
token = token.naive()[0]
buf.append(token)
return ''.join(buf)
#: Default :class:`Proofreading` object. It tokenizes ``unicode`` and
#: :class:`korean.Particle`. Use it like a function.
proofread = Proofreading([six.text_type, Particle])
class Template(six.text_type):
"""The :class:`Template` object extends :class:`unicode` and overrides
:meth:`format` method. This can format particle format spec without
evincive :class:`Noun` or :class:`NumberWord` arguments.
Basically this example:
>>> import korean
>>> korean.l10n.Template('{0:을} 좋아합니다.').format('향수')
'향수를 좋아합니다.'
Is equivalent to the following:
>>> import korean
>>> '{0:을 좋아합니다.}'.format(korean.Noun('향수'))
'향수를 좋아합니다.'
"""
def format(self, *args, **kwargs):
args = list(args)
for seq, (key, val) in chain(product([args], enumerate(args)),
product([kwargs], kwargs.items())):
if isinstance(val, six.text_type):
seq[key] = Noun(val)
elif isinstance(val, (long, int)):
seq[key] = NumberWord(int(val))
return super(Template, self).format(*args, **kwargs)
def __repr__(self):
return '<%s %s>' % \
(type(self).__name__, super(Template, self).__repr__())
def patch_gettext(translations):
from ..ext.gettext import patch_gettext as original_patch_gettext
warnings.warn('\'korean.l10n.patch_gettext\' is now called '
'\'korean.ext.gettext.patch_gettext\'', DeprecationWarning)
return original_patch_gettext(translations)
| {"/korean/morphology/substantive.py": ["/korean/morphology/morpheme.py", "/korean/hangul.py", "/korean/morphology/particle.py", "/korean/morphology/__init__.py"], "/korean/morphology/__init__.py": ["/korean/__init__.py", "/korean/morphology/morpheme.py", "/korean/morphology/particle.py", "/korean/morphology/substantive.py"], "/korean/morphology/particle.py": ["/korean/morphology/__init__.py", "/korean/morphology/morpheme.py", "/korean/morphology/substantive.py", "/korean/__init__.py"], "/koreantests.py": ["/korean/__init__.py", "/korean/ext/gettext.py"], "/korean/ext/jinja2.py": ["/korean/__init__.py"], "/korean/morphology/morpheme.py": ["/korean/hangul.py", "/korean/morphology/__init__.py"], "/korean/ext/gettext.py": ["/korean/l10n/__init__.py"], "/korean/l10n/__init__.py": ["/korean/morphology/__init__.py", "/korean/ext/gettext.py"], "/korean/ext/django/templatetags/korean.py": ["/korean/__init__.py"], "/korean/__init__.py": ["/korean/morphology/__init__.py"], "/korean/l10n/jinja2ext.py": ["/korean/ext/jinja2.py"]} |
77,034 | sublee/korean | refs/heads/master | /korean/__main__.py | # -*- coding: utf-8 -*-
"""
korean.__main__
~~~~~~~~~~~~~~~
Command-line tools.
:copyright: (c) 2012-2013 by Heungsub Lee
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
import contextlib
import sys
from baker import Baker
from . import l10n
baker = Baker()
@contextlib.contextmanager
def file_or_stdin(path):
f = open(path) if path is not None else sys.stdin
yield f
f.close()
@baker.command
def proofread(path=None, charset='utf-8'):
with file_or_stdin(path) as f:
for line in f.xreadlines():
print l10n.proofread(line.decode(charset)),
@baker.command
def validate(path=None, charset='utf-8'):
pass
if __name__ == '__main__':
baker.run()
| {"/korean/morphology/substantive.py": ["/korean/morphology/morpheme.py", "/korean/hangul.py", "/korean/morphology/particle.py", "/korean/morphology/__init__.py"], "/korean/morphology/__init__.py": ["/korean/__init__.py", "/korean/morphology/morpheme.py", "/korean/morphology/particle.py", "/korean/morphology/substantive.py"], "/korean/morphology/particle.py": ["/korean/morphology/__init__.py", "/korean/morphology/morpheme.py", "/korean/morphology/substantive.py", "/korean/__init__.py"], "/koreantests.py": ["/korean/__init__.py", "/korean/ext/gettext.py"], "/korean/ext/jinja2.py": ["/korean/__init__.py"], "/korean/morphology/morpheme.py": ["/korean/hangul.py", "/korean/morphology/__init__.py"], "/korean/ext/gettext.py": ["/korean/l10n/__init__.py"], "/korean/l10n/__init__.py": ["/korean/morphology/__init__.py", "/korean/ext/gettext.py"], "/korean/ext/django/templatetags/korean.py": ["/korean/__init__.py"], "/korean/__init__.py": ["/korean/morphology/__init__.py"], "/korean/l10n/jinja2ext.py": ["/korean/ext/jinja2.py"]} |
77,035 | sublee/korean | refs/heads/master | /korean/ext/__init__.py | # -*- coding: utf-8 -*-
"""
korean.ext
~~~~~~~~~~
This modules provides few extensions for other system such as Jinja2_. Now
it contains the following submodules:
- :mod:`korean.ext.jinja2` -- extensions for the Jinja2 template engine
.. _Jinja2: http://jinja.pocoo.org/docs
.. versionadded:: 0.1.6
- :mod:`korean.ext.django.templatetags.korean` -- extensions for the Django template engine
.. _Django: https://www.djangoproject.com/
.. versionadded:: 0.1.7
:copyright: (c) 2012-2013 by Heungsub Lee
:license: BSD, see LICENSE for more details.
"""
| {"/korean/morphology/substantive.py": ["/korean/morphology/morpheme.py", "/korean/hangul.py", "/korean/morphology/particle.py", "/korean/morphology/__init__.py"], "/korean/morphology/__init__.py": ["/korean/__init__.py", "/korean/morphology/morpheme.py", "/korean/morphology/particle.py", "/korean/morphology/substantive.py"], "/korean/morphology/particle.py": ["/korean/morphology/__init__.py", "/korean/morphology/morpheme.py", "/korean/morphology/substantive.py", "/korean/__init__.py"], "/koreantests.py": ["/korean/__init__.py", "/korean/ext/gettext.py"], "/korean/ext/jinja2.py": ["/korean/__init__.py"], "/korean/morphology/morpheme.py": ["/korean/hangul.py", "/korean/morphology/__init__.py"], "/korean/ext/gettext.py": ["/korean/l10n/__init__.py"], "/korean/l10n/__init__.py": ["/korean/morphology/__init__.py", "/korean/ext/gettext.py"], "/korean/ext/django/templatetags/korean.py": ["/korean/__init__.py"], "/korean/__init__.py": ["/korean/morphology/__init__.py"], "/korean/l10n/jinja2ext.py": ["/korean/ext/jinja2.py"]} |
77,036 | sublee/korean | refs/heads/master | /korean/ext/django/templatetags/korean.py | # -*- coding: utf-8 -*-
"""
korean.ext.django.templatetags.korean
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
A module containing Django template tag and filter for korean.
.. versionadded:: 0.1.7
.. _Django: https://www.djangoproject.com/
:copyright: (c) 2012-2013 by Heungsub Lee, Hyunwoo Park
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import, unicode_literals
from django import template
from django.template.defaultfilters import stringfilter
from .... import l10n
register = template.Library()
class ProofReadNode(template.Node):
def __init__(self, nodelist):
self.nodelist = nodelist
def render(self, context):
output = self.nodelist.render(context)
return l10n.proofread(output)
@register.tag(name='proofread')
def do_proofread(parser, token):
"""A Django tag for ``proofread``
.. sourcecode:: django
<h1>proofread tag Usage</h1>
{% load korean %}
{% proofread %}
{{ name }}은(는) {{ obj }}을(를) 획득했다.
{% endproofread %}
"""
nodelist = parser.parse(['endproofread'])
parser.delete_first_token()
return ProofReadNode(nodelist)
@register.filter
@stringfilter
def proofread(value):
"""A Django filter for ``proofread``
.. sourcecode:: django
<h1>proofread filter Usage</h1>
{% load korean %}
{{ 용사은(는) 검을(를) 획득했다.|proofread }}
"""
return l10n.proofread(value)
| {"/korean/morphology/substantive.py": ["/korean/morphology/morpheme.py", "/korean/hangul.py", "/korean/morphology/particle.py", "/korean/morphology/__init__.py"], "/korean/morphology/__init__.py": ["/korean/__init__.py", "/korean/morphology/morpheme.py", "/korean/morphology/particle.py", "/korean/morphology/substantive.py"], "/korean/morphology/particle.py": ["/korean/morphology/__init__.py", "/korean/morphology/morpheme.py", "/korean/morphology/substantive.py", "/korean/__init__.py"], "/koreantests.py": ["/korean/__init__.py", "/korean/ext/gettext.py"], "/korean/ext/jinja2.py": ["/korean/__init__.py"], "/korean/morphology/morpheme.py": ["/korean/hangul.py", "/korean/morphology/__init__.py"], "/korean/ext/gettext.py": ["/korean/l10n/__init__.py"], "/korean/l10n/__init__.py": ["/korean/morphology/__init__.py", "/korean/ext/gettext.py"], "/korean/ext/django/templatetags/korean.py": ["/korean/__init__.py"], "/korean/__init__.py": ["/korean/morphology/__init__.py"], "/korean/l10n/jinja2ext.py": ["/korean/ext/jinja2.py"]} |
77,037 | sublee/korean | refs/heads/master | /korean/__init__.py | # -*- coding: utf-8 -*-
"""
korean
~~~~~~
A library for Korean morphology.
:copyright: (c) 2012-2013 by Heungsub Lee
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import, unicode_literals
import codecs
import sys
import six
from . import hangul, l10n, morphology
from .morphology import (Morpheme, Noun, NumberWord, Loanword, Particle,
Substantive)
__version__ = '0.1.9'
__all__ = ['hangul', 'l10n', 'morphology', 'Morpheme', 'Noun', 'NumberWord',
'Loanword', 'Particle', 'Substantive']
# Python 2's import seems to do not work with unicode __all__.
# __future__.unicode_literals could make a TypeError with "from __ import *".
if sys.version_info < (3,):
for mod in [globals(), hangul, l10n, morphology]:
if isinstance(mod, dict):
mod['__all__'] = map(str, mod['__all__'])
else:
mod.__all__ = map(str, mod.__all__)
def _load_data():
"""Loads allomorphic particles and number words from :file:`data.json`."""
import json
import os
path = os.path.join(os.path.dirname(__file__), 'data.json')
with codecs.open(path, 'r', encoding='utf-8') as f:
data = json.load(f)
# register allomorphic particles
for forms in six.itervalues(data['allomorphic_particles']):
particle = Particle(*forms)
for form in forms:
Particle.register(form, particle)
# register numbers and digits
for number, form in six.iteritems(data['numbers']):
NumberWord.__numbers__[int(number)] = form
for digit, form in six.iteritems(data['digits']):
NumberWord.__digits__[int(digit)] = form
for operation, form in six.iteritems(data['unary_operations']):
NumberWord.__unary_operations__[operation] = form
_load_data()
| {"/korean/morphology/substantive.py": ["/korean/morphology/morpheme.py", "/korean/hangul.py", "/korean/morphology/particle.py", "/korean/morphology/__init__.py"], "/korean/morphology/__init__.py": ["/korean/__init__.py", "/korean/morphology/morpheme.py", "/korean/morphology/particle.py", "/korean/morphology/substantive.py"], "/korean/morphology/particle.py": ["/korean/morphology/__init__.py", "/korean/morphology/morpheme.py", "/korean/morphology/substantive.py", "/korean/__init__.py"], "/koreantests.py": ["/korean/__init__.py", "/korean/ext/gettext.py"], "/korean/ext/jinja2.py": ["/korean/__init__.py"], "/korean/morphology/morpheme.py": ["/korean/hangul.py", "/korean/morphology/__init__.py"], "/korean/ext/gettext.py": ["/korean/l10n/__init__.py"], "/korean/l10n/__init__.py": ["/korean/morphology/__init__.py", "/korean/ext/gettext.py"], "/korean/ext/django/templatetags/korean.py": ["/korean/__init__.py"], "/korean/__init__.py": ["/korean/morphology/__init__.py"], "/korean/l10n/jinja2ext.py": ["/korean/ext/jinja2.py"]} |
77,038 | sublee/korean | refs/heads/master | /korean/l10n/jinja2ext.py | # -*- coding: utf-8 -*-
"""
korean.l10n.jinja2ext
~~~~~~~~~~~~~~~~~~~~~
This module has been moved to :mod:`korean.ext.jinja2`.
.. versionadded:: 0.1.5
.. versionchanged:: 0.1.6
Moved to :mod:`korean.ext.jinja2`.
:copyright: (c) 2012-2013 by Heungsub Lee
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import, unicode_literals
import warnings
from ..ext.jinja2 import ProofreadingExtension, proofread
warnings.warn('This module has been moved to %r' % proofread.__module__,
DeprecationWarning)
| {"/korean/morphology/substantive.py": ["/korean/morphology/morpheme.py", "/korean/hangul.py", "/korean/morphology/particle.py", "/korean/morphology/__init__.py"], "/korean/morphology/__init__.py": ["/korean/__init__.py", "/korean/morphology/morpheme.py", "/korean/morphology/particle.py", "/korean/morphology/substantive.py"], "/korean/morphology/particle.py": ["/korean/morphology/__init__.py", "/korean/morphology/morpheme.py", "/korean/morphology/substantive.py", "/korean/__init__.py"], "/koreantests.py": ["/korean/__init__.py", "/korean/ext/gettext.py"], "/korean/ext/jinja2.py": ["/korean/__init__.py"], "/korean/morphology/morpheme.py": ["/korean/hangul.py", "/korean/morphology/__init__.py"], "/korean/ext/gettext.py": ["/korean/l10n/__init__.py"], "/korean/l10n/__init__.py": ["/korean/morphology/__init__.py", "/korean/ext/gettext.py"], "/korean/ext/django/templatetags/korean.py": ["/korean/__init__.py"], "/korean/__init__.py": ["/korean/morphology/__init__.py"], "/korean/l10n/jinja2ext.py": ["/korean/ext/jinja2.py"]} |
77,048 | hyoin157/deploy | refs/heads/main | /run.py | # 간단한 기본 서버 구축
from flask import Flask
app = Flask(__name__)
@app.route('/')
def home():
return 'aws 홈페이지'
# run.py가 앤트리포인트일 경우에만 작동한다.
# 리눅스 서버로 가면 wsgi.py이 앤트리포인트이므로, 작동이 안함
# 페브릭을 설정한 룰에 의해서 서버가 작동된다.
if __name__ == '__main__':
app.run(debug=True) | {"/wsgi.py": ["/run.py"]} |
77,049 | hyoin157/deploy | refs/heads/main | /wsgi.py | '''
이름은 자유롭게 구성이 가능
wsgi 라는 의미는 특정 플랫폼이 웹서비스가 가능할 때 wsgi모듈이 지원된다.(표현)
flask, django => 단독으로 서비스하는 것보다, apache/nginx라는 웹서비스와 연동하여
주로 서비스한다.
apache 서버가 바라보는 엔트리 포인트(시작되는 파이썬 파일)을 이 파일로 지정(용도)
'''
import sys
import os
# 현재 경로
cur_dir = os.getcwd()
print(cur_dir)
# 애러 출력을 표준출력으로 보낸다.
sys.stdout = sys.stderr
# path 설정
sys.path.insert(0,cur_dir )
# 서버 가동을 위한 모듈 가져오기
from run import app as application | {"/wsgi.py": ["/run.py"]} |
77,050 | adhearn/sylph-py | refs/heads/master | /sylph/interpreter.py | import copy
class NoSuchVariableException(BaseException):
pass
class Environment:
def extend(self, var, value):
raise NotImplementedError
def lookup(self, var):
raise NotImplementedError
class DictEnvironment(Environment):
def __init__(self, initial_state=None):
if initial_state:
self.env = copy.copy(initial_state)
else:
self.env = {}
def extend(self, var, value):
copied_env = copy.copy(self.env)
copied_env[var] = value
return DictEnvironment(copied_env)
def lookup(self, var):
if var in self.env:
return self.env[var]
else:
raise NoSuchVariableException("No such variable: '{}' (env: {})".format(var, self.env))
class Interpreter:
BUILTIN_FNS = ["+", "-", "*"]
KEYWORDS = ["lambda"]
def lookup(self, env, var):
if var in env:
return self.env[var]
else:
raise NoSuchVariableException
def eval_lambda(self, operands, env):
assert len(operands) == 2
params = operands[0]
body = operands[1]
assert type(params) == list
assert len(params) == 1
param = params[0]
def fn(arg):
extended_env = env.extend(param, arg)
return self.eval(body, extended_env)
return fn
def eval_keyword(self, keyword, operands, env):
if keyword == "lambda":
return self.eval_lambda(operands, env)
def eval_builtin(self, builtin):
if builtin == "+":
def sum(rand1, rand2):
return rand1 + rand2
return sum
elif builtin == "-":
def sub(rand1, rand2):
return rand1 - rand2
return sub
elif builtin == "*":
def product(rand1, rand2):
return rand1 * rand2
return product
def eval(self, expression, env=None):
if env is None:
env = DictEnvironment()
if type(expression) == list:
operator = expression[0]
operands = expression[1:]
if operator in self.KEYWORDS:
return self.eval_keyword(operator, operands, env)
else:
evaled_operands = [self.eval(operand, env) for operand in operands]
evaled_operator = self.eval(operator, env)
return evaled_operator(*evaled_operands)
elif type(expression) == int:
return expression
elif type(expression) == str:
if expression in self.BUILTIN_FNS:
return self.eval_builtin(expression)
else:
return env.lookup(expression)
| {"/sylph/__init__.py": ["/sylph/interpreter.py", "/sylph/sylph_tests.py"], "/sylph/sylph_tests.py": ["/sylph/__init__.py"]} |
77,051 | adhearn/sylph-py | refs/heads/master | /sylph/__init__.py | from .interpreter import Interpreter, DictEnvironment
from .sylph_tests import TestBasics, TestArithmetic, TestEnv, TestLambda | {"/sylph/__init__.py": ["/sylph/interpreter.py", "/sylph/sylph_tests.py"], "/sylph/sylph_tests.py": ["/sylph/__init__.py"]} |
77,052 | adhearn/sylph-py | refs/heads/master | /sylph/sylph_tests.py | import unittest
from sylph import Interpreter, DictEnvironment
class TestBasics(unittest.TestCase):
def test_primitives_numbers(self):
interp = Interpreter()
self.assertEqual(interp.eval(7), 7)
self.assertEqual(interp.eval(-42), -42)
class TestArithmetic(unittest.TestCase):
def test_addition(self):
interp = Interpreter()
simple = ["+", 23, 42]
self.assertEqual(interp.eval(simple), 65)
complex = ["+", ["+", 2, 3], ["+", 23, 42]]
self.assertEqual(interp.eval(complex), 70)
def test_subtraction(self):
interp = Interpreter()
simple = ["-", 23, 42]
self.assertEqual(interp.eval(simple), -19)
complex = ["-", ["-", 42, 23], ["-", 3, 2]]
self.assertEqual(interp.eval(complex), 18)
def test_multiplication(self):
interp = Interpreter()
simple = ["*", 2, 3]
self.assertEqual(interp.eval(simple), 6)
def test_various(self):
interp = Interpreter()
expr = ["*", ["+", 4, 5], ["-", 8, 5]]
self.assertEqual(interp.eval(expr), 27)
class TestEnv(unittest.TestCase):
def test_simple(self):
env = DictEnvironment()
env = env.extend("a", 23)
self.assertEqual(env.lookup("a"), 23)
class TestLambda(unittest.TestCase):
def test_simple(self):
interp = Interpreter()
expr = [["lambda", ["x"], "x"], 42]
val = interp.eval(expr)
self.assertEqual(val, 42)
def test_add1(self):
interp = Interpreter()
expr = [["lambda", ["x"], ["+", "x", 1]], 42]
val = interp.eval(expr)
self.assertEqual(val, 43)
def test_curried(self):
interp = Interpreter()
expr = [[["lambda", ["x"], ["lambda", ["y"], ["+", "x", "y"]]], 23], 42]
val = interp.eval(expr)
self.assertEqual(val, 65)
if __name__ == '__main__':
unittest.main()
| {"/sylph/__init__.py": ["/sylph/interpreter.py", "/sylph/sylph_tests.py"], "/sylph/sylph_tests.py": ["/sylph/__init__.py"]} |
77,053 | ioluwayo/Hackernews | refs/heads/master | /hackernews.py | import json, logging, sys, argparse, requests
from bs4 import BeautifulSoup
from rfc3986 import validators, uri_reference
from rfc3986.exceptions import ValidationError as UriValidationError
LOGGER = logging.getLogger(__name__)
LOGGER.addHandler(logging.StreamHandler(sys.stdout))
URI_VALIDATOR = (
validators.Validator()
.require_presence_of("scheme", "host")
.check_validity_of(
"scheme", "userinfo", "host", "port", "path", "query", "fragment"
)
)
BASE_URL = "https://news.ycombinator.com/"
def is_valid_uri(uri):
try:
URI_VALIDATOR.validate(uri_reference(uri))
return True
except UriValidationError as exc:
LOGGER.debug(exc)
return False
def is_valid_string(string):
if not string or len(string) > 256:
return False
return True
def retrieve_title(storylink_tag):
title = storylink_tag.text
if is_valid_string(title):
return title
LOGGER.debug("title is invalid")
def retrieve_uri(storylink_tag):
uri = storylink_tag["href"]
if is_valid_uri(uri):
return uri
LOGGER.debug("uri is invalid")
def retrieve_rank(rank_tag):
rank = rank_tag.text
rank = rank.rstrip(".")
if rank.isdigit():
rank = int(rank)
if rank >= 0:
return rank
else:
LOGGER.debug("rank is less than 0..")
else:
LOGGER.debug("rank is not a number")
def retrieve_author(author_tag):
author = author_tag.text
if is_valid_string(author):
return author
LOGGER.debug("author is Invalid")
def retrieve_points(points_tag):
points = points_tag.text
points = points.split()[0]
if points.isdigit():
points = int(points)
if points >= 0:
return points
else:
LOGGER.debug("points is less than 0")
else:
LOGGER.debug("points is not a number")
def retrieve_comments(comments_tag):
comments = comments_tag.text
comments = comments.split()[0]
if comments.isdigit():
comments = int(comments)
if comments >= 0:
return comments
else:
LOGGER.debug("comments is less than 0")
elif (
comments == "discuss"
): # when there are no comments on a post, this value is "discuss"
comments = 0 # no comments implies 0
return comments
else:
LOGGER.debug("comments is not a number")
def retrieve_valid_posts(post_tags, max_posts):
valid_posts = []
count = 0
for post_tag in post_tags:
LOGGER.debug(f"Processing post ID: {post_tag['id']}")
storylink_tag = post_tag.find("a", {"class": "storylink"})
if not storylink_tag:
LOGGER.debug("Skipping post. Unable to find storylink tag")
continue
title = retrieve_title(storylink_tag)
if title is None:
LOGGER.debug("Skipping post")
continue
uri = retrieve_uri(storylink_tag)
if uri is None:
LOGGER.debug("Skipping post")
continue
rank_tag = post_tag.find("span", {"class": "rank"})
if not rank_tag:
LOGGER.debug("Skipping post. Unable to find rank tag")
continue
rank = retrieve_rank(rank_tag)
if rank is None:
LOGGER.debug("Skipping post")
continue
# only proceed to find subtext/sibling when prior validations are successful. avoid unnecessary work
subtext_tag = post_tag.next_sibling.find("td", {"class": "subtext"})
if not subtext_tag:
LOGGER.debug("Skipping post. Unable to find subtext_tag")
continue
author_tag = subtext_tag.find("a", {"class": "hnuser"})
if not author_tag:
LOGGER.debug("Skipping post. Unable to find author tag")
continue
author = retrieve_author(author_tag)
if author is None:
LOGGER.debug("Skipping post")
continue
points_tag = subtext_tag.find("span", {"class": "score"})
if not points_tag:
LOGGER.debug("Skipping post. Unable to find points tag")
continue
points = retrieve_points(points_tag)
if points is None:
LOGGER.debug("Skipping post")
continue
comments_tag = subtext_tag.find_all("a", recursive=False)[
2
] # find_all maintains order
if not comments_tag:
LOGGER.debug("Skipping post. Unable to find comment tag")
continue
comments = retrieve_comments(comments_tag)
if comments is None:
LOGGER.debug("Skipping post")
continue
# all data is valid. create post dict and append to list
valid_posts.append(
{
"title": title,
"uri": uri,
"author": author,
"points": points,
"comments": comments,
"rank": rank,
}
)
count += 1
if count == max_posts: # stop retrieving posts to avoid unnecessary work
break
return valid_posts
def scrape_posts(n):
posts = []
url = BASE_URL
page = 1
while len(posts) < n:
required = n - len(posts)
LOGGER.debug(f"scraping {url} for {required} posts")
try:
response = requests.get(url)
athings = BeautifulSoup(response.text, "html.parser").find_all(
"tr", {"class": "athing"},
)
LOGGER.debug(athings)
posts.extend(retrieve_valid_posts(athings, required))
page += 1
url = BASE_URL + "news?p=" + str(page)
except requests.exceptions.RequestException as exc:
LOGGER.error(exc)
LOGGER.error("Exiting")
sys.exit(1)
return json.dumps(posts, indent=2)
def main():
def validate_post_input(val):
try:
val = int(val) # if not integer, will raise
if val < 0 or val > 100:
raise argparse.ArgumentTypeError(f"{val} is not in the range 0-100")
except ValueError:
raise argparse.ArgumentTypeError(f"invalid int value: '{val}'")
return val
parser = argparse.ArgumentParser(
description="This script scrapes https://news.ycombinator.com/ and prints to stdout the top posts. "
"The output is in json format. "
"Sample usage: hackernews --posts 10"
)
parser.add_argument(
"--posts",
type=validate_post_input,
metavar="n",
required=True,
help="The number of posts to display. Value must be in the range 0-100",
)
parser.add_argument(
"-v", "--verbose", help="sets logging level to debug", action="store_true"
)
args = parser.parse_args()
if args.verbose:
LOGGER.setLevel(logging.DEBUG)
posts = scrape_posts(args.posts)
print(posts)
if __name__ == "__main__":
main()
| {"/test_hackernews.py": ["/hackernews.py"]} |
77,054 | ioluwayo/Hackernews | refs/heads/master | /setup.py | from setuptools import find_packages, setup
setup(
name="hackernews",
description="Hackernews scraper",
version="1",
author="ibukun",
author_email="ioluwayo@gmail.com",
scripts=["hackernews.py"],
install_requires=[
dependency.strip() for dependency in open("requirements.txt").readlines()
],
packages=find_packages(),
entry_points={"console_scripts": ["hackernews=hackernews:main"]},
)
| {"/test_hackernews.py": ["/hackernews.py"]} |
77,055 | ioluwayo/Hackernews | refs/heads/master | /test_hackernews.py | import unittest
from unittest import mock
import json
import hackernews
class HackerNewsE2ETest(unittest.TestCase):
@mock.patch("requests.get")
def test_scrape_posts(self, mock_get):
"""
This test verifies that the correct number of posts and the json payload are accurate.
"""
with open("test_data/hackernews.html") as test_data:
mock_get.return_value = mock.Mock(ok=True)
mock_get.return_value.text = test_data.read()
actual_posts = hackernews.scrape_posts(30)
self.assertEqual(
len(json.loads(actual_posts)), 30
) # verify that the number of posts retrieved is accurate
actual_posts = hackernews.scrape_posts(2)
expected_posts = [
{
"title": "Why are not some things darker when wet?",
"uri": "https://aryankashyap.com/why-are-some-things-darker-when-wet",
"author": "aryankashyap",
"points": 63,
"comments": 5,
"rank": 1,
},
{
"title": "Broot – A new way to see and navigate directory trees",
"uri": "https://dystroy.org/broot/",
"author": "gilad",
"points": 631,
"comments": 158,
"rank": 2,
},
]
self.assertEqual(json.loads(actual_posts), expected_posts)
@mock.patch("requests.get")
def test_skip_invalid_post(self, mock_get):
"""
If the url/author/rank/points/comment of a post are not valid, the post should not be included in the result.
This test verifies this.
See test_data/hackernews_bad_comment.html
"""
expected_posts = [
{
"title": "Broot – A new way to see and navigate directory trees",
"uri": "https://dystroy.org/broot/",
"author": "gilad",
"points": 631,
"comments": 158,
"rank": 2,
},
{
"author": "hellofunk",
"comments": 0,
"points": 8,
"rank": 3,
"title": "A simple C++11 Thread Pool implementation",
"uri": "https://github.com/progschj/ThreadPool",
},
]
with open("test_data/hackernews_bad_url.html") as test_data:
mock_get.return_value = mock.Mock(ok=True)
mock_get.return_value.text = test_data.read()
actual_posts = hackernews.scrape_posts(2)
self.assertEqual(json.loads(actual_posts), expected_posts)
with open("test_data/hackernews_bad_comment.html") as test_data:
mock_get.return_value = mock.Mock(ok=True)
mock_get.return_value.text = test_data.read()
actual_posts = hackernews.scrape_posts(2)
self.assertEqual(json.loads(actual_posts), expected_posts)
with open("test_data/hackernews_bad_author.html") as test_data:
mock_get.return_value = mock.Mock(ok=True)
mock_get.return_value.text = test_data.read()
actual_posts = hackernews.scrape_posts(2)
self.assertEqual(json.loads(actual_posts), expected_posts)
with open("test_data/hackernews_bad_rank.html") as test_data:
mock_get.return_value = mock.Mock(ok=True)
mock_get.return_value.text = test_data.read()
actual_posts = hackernews.scrape_posts(2)
self.assertEqual(json.loads(actual_posts), expected_posts)
with open("test_data/hackernews_bad_points.html") as test_data:
mock_get.return_value = mock.Mock(ok=True)
mock_get.return_value.text = test_data.read()
actual_posts = hackernews.scrape_posts(2)
self.assertEqual(json.loads(actual_posts), expected_posts)
# if all posts are valid as in test_data/hackernews.html then no post should be skipped
with open("test_data/hackernews.html") as test_data:
mock_get.return_value = mock.Mock(ok=True)
mock_get.return_value.text = test_data.read()
actual_posts = hackernews.scrape_posts(2)
self.assertNotEqual(json.loads(actual_posts), expected_posts)
expected_posts = [
{
"title": "Why are not some things darker when wet?",
"uri": "https://aryankashyap.com/why-are-some-things-darker-when-wet",
"author": "aryankashyap",
"points": 63,
"comments": 5,
"rank": 1,
},
{
"title": "Broot – A new way to see and navigate directory trees",
"uri": "https://dystroy.org/broot/",
"author": "gilad",
"points": 631,
"comments": 158,
"rank": 2,
},
]
self.assertEqual(json.loads(actual_posts), expected_posts)
if __name__ == "__main__":
unittest.main()
| {"/test_hackernews.py": ["/hackernews.py"]} |
77,068 | arnauldb/twitter-image-downloader | refs/heads/master | /twt_img.py | import argparse
import base64
import json
import os
import shutil
import sys
import dateutil.parser
from datetime import datetime
import requests
from exceptions import *
class Downloader:
def __init__(self, api_key, api_secret):
self.bearer_token = self.bearer(api_key, api_secret)
#print ('Bearer token is ' + self.bearer_token)
self.last_tweet = None
self.count = 0
def download_images(self, user, save_dest, size='large', limit=3200, rts=False):
'''Download and save images that user uploaded.
Args:
user: User ID.
save_dest: The directory where images will be saved.
size: Which size of images to download.
rts: Whether to include retweets or not.
'''
if not os.path.isdir(save_dest):
raise InvalidDownloadPathError()
num_tweets_checked = 0
tweets = self.get_tweets(user, self.last_tweet, limit, rts)
if not tweets:
print ("Got an empty list of tweets")
while len(tweets) > 0 and num_tweets_checked < limit:
for tweet in tweets:
# create a file name using the timestamp of the image
timestamp = dateutil.parser.parse(tweet['created_at']).timestamp()
timestamp = int(timestamp)
value = datetime.fromtimestamp(timestamp)
fname = value.strftime('%Y-%m-%d-%H-%M-%S')
# save the image
images = self.extract_image(tweet)
if images is not None:
counter = 0
for image in images:
if counter == 0:
self.save_image(image, save_dest, fname, size)
else:
self.save_image(image, save_dest, fname+'_'+str(counter), size)
counter+=1
num_tweets_checked += 1
self.last_tweet = tweet['id']
tweets = self.get_tweets(user, self.last_tweet, count=limit)
def bearer(self, key, secret):
'''Receive the bearer token and return it.
Args:
key: API key.
secret: API string.
'''
# setup
credential = base64.b64encode(bytes('{}:{}'.format(key, secret), 'utf-8')).decode()
url = 'https://api.twitter.com/oauth2/token'
headers = {
'Authorization': 'Basic {}'.format(credential),
'Content-Type': 'application/x-www-form-urlencoded;charset=UTF-8'
}
payload = {'grant_type': 'client_credentials'}
# post the request
r = requests.post(url, headers=headers, params=payload)
# check the response
if r.status_code == 200:
return r.json()['access_token']
else:
raise BearerTokenNotFetchedError()
def get_tweets(self, user, start=None, count=200, rts=False):
'''Download user's tweets and return them as a list.
Args:
user: User ID.
start: Tweet ID.
rts: Whether to include retweets or not.
'''
# setup
bearer_token = self.bearer_token
url = 'https://api.twitter.com/1.1/statuses/user_timeline.json'
headers = {
'Authorization': 'Bearer {}'.format(bearer_token)
}
payload = {'screen_name': user, 'count': count, 'include_rts': rts, 'tweet_mode': 'extended'}
if start:
payload['max_id'] = start
# get the request
r = requests.get(url, headers=headers, params=payload)
# check the response
if r.status_code == 200:
tweets = r.json()
if len(tweets) == 1:
return []
else:
print('Got ' + str(len(tweets)) + ' tweets')
return tweets if not start else tweets[1:]
else:
print ('An error occurred with the request, status code was ' + str(r.status_code))
return []
def extract_image(self, tweet):
'''Return a list of url(s) which represents the image(s) embedded in tweet.
Args:
tweet: A dict object representing a tweet.
'''
if 'media' in tweet['entities']:
urls = [x['media_url'] for x in tweet['entities']['media']]
if 'extended_entities' in tweet:
extra = [x['media_url'] for x in tweet['extended_entities']['media']]
urls = set(urls+extra)
return urls
else:
return None
def save_image(self, image, path, timestamp, size='large'):
'''Download and save an image to path.
Args:
image: The url of the image.
path: The directory where the image will be saved.
timestamp: The time that the image was uploaded.
It is used for naming the image.
size: Which size of images to download.
'''
if image:
# image's path with a new name
ext = os.path.splitext(image)[1]
save_dest = os.path.join(path, timestamp + ext)
# save the image in the specified directory (or don't)
if not (os.path.exists(save_dest)):
print('Saving ' + image)
r = requests.get(image + ':' + size, stream=True)
if r.status_code == 200:
with open(save_dest, 'wb') as f:
r.raw.decode_content = True
shutil.copyfileobj(r.raw, f)
self.count += 1
else:
print('Skipping ' + image + ' because it was already dowloaded')
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="Download all images uploaded by a twitter user you specify")
parser.add_argument('user_id', help='an ID of a twitter user')
parser.add_argument('dest', help='specify where to put images')
parser.add_argument('-c', '--confidentials', help='a json file containing a key and a secret')
parser.add_argument('-s', '--size', help='specify the size of images', default='large', choices=['large', 'medium', 'small', 'thumb', 'orig'])
parser.add_argument('-l', '--limit', type=int, help='the maximum number of tweets to check (most recent first)', default=3200)
parser.add_argument('--rts', help='save images contained in retweets', action="store_true")
args = parser.parse_args()
if args.confidentials:
with open(args.confidentials) as f:
confidentials = json.loads(f.read())
if 'api_key' not in confidentials or 'api_secret' not in confidentials:
raise ConfidentialsNotSuppliedError()
api_key = confidentials['api_key']
api_secret = confidentials['api_secret']
else:
raise ConfidentialsNotSuppliedError()
downloader = Downloader(api_key, api_secret)
downloader.download_images(args.user_id, args.dest, args.size, args.limit, args.rts)
| {"/test_app.py": ["/twt_img.py"]} |
77,069 | arnauldb/twitter-image-downloader | refs/heads/master | /test_app.py | import os
import time
import pytest
from twt_img import Downloader
from exceptions import *
api_key = os.environ['KEY']
api_secret = os.environ['SECRET']
downloader = Downloader(api_key, api_secret)
tweet = {
"entities": {
"media": [
{
"type": "photo",
"media_url": "http://pbs.twimg.com/media/foo.jpg",
"sizes": {
"medium": {
"resize": "fit",
"h": 823,
"w": 600
},
"thumb": {
"resize": "crop",
"h": 150,
"w": 150
},
"large": {
"resize": "fit",
"h": 1024,
"w": 746
},
"small": {
"resize": "fit",
"h": 466,
"w": 340
}
}
}
]
}
}
def test_invalid_confidentials_should_fail():
with pytest.raises(BearerTokenNotFetchedError):
invalid_downloader = Downloader('my api key', 'my api secret')
def test_get_tweets():
tweets = downloader.get_tweets('BarackObama', rts=True)
assert len(tweets) == 200
def test_image_properly_extracted():
assert downloader.extract_image(tweet)[0] == "http://pbs.twimg.com/media/foo.jpg"
def test_should_fail_if_no_images():
dummy_tweet = {'entities': []}
assert downloader.extract_image(dummy_tweet) == None
def test_save_image(tmpdir):
now = str(int(time.time()))
downloader.save_image('http://pbs.twimg.com/media/CRd-x43VAAAV9k2.png', tmpdir, now)
image = os.listdir(tmpdir)
assert len(image) > 0
| {"/test_app.py": ["/twt_img.py"]} |
77,083 | yiskylee/dmref_analyzer | refs/heads/master | /dmref_analyzer/DataCleaner.py | import util
import pandas as pd
import sys
import re
def rename_param(input_dir, old_name, new_name):
param_pattern = "Parameters_(\d\d*).csv"
param_files = util.find_files_with_regex(input_dir, param_pattern)
for param_file in param_files:
param_df = pd.read_csv(param_file)
if old_name not in param_df.columns:
print "Old name does not exist in " + param_file + " continue..."
continue
else:
param_df.rename(columns={old_name: new_name}, inplace=True)
# sample_id = re.match(param_pattern, param_file.split('/')[-1]).group(1)
# if int(sample_id) == 528:
# param_df.to_csv("./528.csv", index=False)
param_df.to_csv(param_file, index=False)
def update_param_file(input_dir, new_param_rule_file):
param_pattern = "Parameters_(\d\d*).csv"
param_files = util.find_files_with_regex(input_dir, param_pattern)
new_params = pd.read_csv(new_param_rule_file)['name']
new_param_empty_df = pd.DataFrame(columns=new_params)
for param_file in param_files:
param_df = pd.read_csv(param_file)
sample_id = re.match(param_pattern, param_file.split('/')[-1]).group(1)
new_param_df = pd.merge(param_df, new_param_empty_df, how='outer')
# Merge does not preserve the column order
# Rearrange the column order to match that of new_param_empty_df
new_param_df = new_param_df[new_params]
new_param_df.to_csv(param_file, index=False)
if __name__ == "__main__":
path = "/home/xiangyu/Dropbox/DMREF/Database/"
new_param_rule_file = "/home/xiangyu/Dropbox/DMREF/parameter_rules_new.csv"
update_param_file(path, new_param_rule_file)
# rename_param(path, "twistOrientation", "twistOrientation") | {"/dmref_analyzer/__init__.py": ["/dmref_analyzer/DataMatrix.py"], "/dmref_analyzer/ModelSelection.py": ["/dmref_analyzer/PlotGenerator.py"]} |
77,084 | yiskylee/dmref_analyzer | refs/heads/master | /dmref_analyzer/__init__.py | from .DataMatrix import DataMatrix
| {"/dmref_analyzer/__init__.py": ["/dmref_analyzer/DataMatrix.py"], "/dmref_analyzer/ModelSelection.py": ["/dmref_analyzer/PlotGenerator.py"]} |
77,085 | yiskylee/dmref_analyzer | refs/heads/master | /dmref_analyzer/FileBrowser.py | import glob, os
import fnmatch
import re
import numpy as np
import util
import pandas as pd
import sys
class FileBrowser(object):
def __init__(self, root_dir=None, param_rule_file=None):
if root_dir is None:
self.root_dir = os.path.expanduser('~/Dropbox/DMREF/Database/')
else:
self.root_dir = root_dir
if param_rule_file is None:
param_rule_file = os.path.expanduser(
'~/Dropbox/DMREF/parameter_rules.csv')
self.param_rule = pd.read_csv(param_rule_file, index_col=1)
def show_file_with_ext(self, file_ext):
files_with_ext = [os.path.join(root, f)
for root, dirs, files in os.walk(self.root_dir)
for f in fnmatch.filter(files, '*.' + file_ext)]
for file in files_with_ext:
print file
def walk_dir(self):
for (root, dirs, filenames) in os.walk(self.root_dir):
print "ROOT: ", root
print "DIRS: ", dirs
print "FILES: ", filenames
print "================================================"
def show_param(self, sample_id):
param_file_pattern = "Parameters_" + str(sample_id) + "(_\d)*" + ".csv"
param_file = util.find_file_with_regex(self.root_dir, param_file_pattern, sample_id)
return pd.read_csv(param_file)
def show_sample_with_experiments(self, sample_rng=np.arange(1, 10000)):
all_experiment_types = map(str.strip, self.param_rule.ix['experimentType', 'options'].split(','))
df_columns = all_experiment_types
# Used to count number of fusion files as well
# + ['numFusionFiles'] + ['totalFusionTime']
# Pre-allocate rows, the result usually contains less number of rows
file_summary_df = pd.DataFrame(index=sample_rng, columns=df_columns)
file_summary_df.index.name = 'sampleID'
for sample_id in sample_rng:
param_file_pattern = "Parameters_" + str(sample_id) + "(_\d)*" + ".csv"
# fusion_file_pattern = "F_" + str(sample_id) + "(_\d)*" + ".csv"
param_file = util.find_file_with_regex(self.root_dir, param_file_pattern, sample_id)
# fusion_file_paths = util.find_files_with_regex(input_dir, fusion_file_pattern)
if not param_file:
continue
# if fusion_file_paths:
# num_fusion_files = len(fusion_file_paths)
# total_fusion_time = util.gen_total_fusion_time(fusion_file_paths)
# file_summary_df.loc[row_num]["numFusionFiles"] = num_fusion_files
# file_summary_df.loc[row_num]["totalFusionTime"] = total_fusion_time
param_df = pd.read_csv(param_file)
experiment_types = map(str.strip, param_df['experimentType'])
entry = ['Yes' if exp in experiment_types else 'No' for exp in all_experiment_types]
entry_series = pd.Series(index=all_experiment_types, data=entry)
# Use loc or ix to append rows to existing data frames
# iloc doesn't work in this case
file_summary_df.loc[sample_id] = entry_series
# file_summary_df.loc[row_num]['sampleID'] = sample_id
file_summary_df.dropna(axis=0, how='all', inplace=True)
# file_summary_df = file_summary_df.ix[:, (file_summary_df != 0).any(axis=0)]
# file_summary_df.sort_values(by='sampleID', inplace=True)
# file_summary_df.set_index('sampleID', inplace=True)
return file_summary_df | {"/dmref_analyzer/__init__.py": ["/dmref_analyzer/DataMatrix.py"], "/dmref_analyzer/ModelSelection.py": ["/dmref_analyzer/PlotGenerator.py"]} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.