index int64 | repo_name string | branch_name string | path string | content string | import_graph string |
|---|---|---|---|---|---|
76,658 | lucashalbert/OOA | refs/heads/master | /classes/alu.py | '''
Filename: alu.py
Author: Lucas Halbert
Date: 4/22/15
Modified: 4/29/15
Description: Class declarations for different functions of the ALU (arithmetic logic unit)
'''
import sys
import re
import json
import hw
#NEEDS TO FLAG HAZARDS
'''
Class declarations for the ALU
'''
class ALU(object):
'''
This class is used to perform arithmetic operations on instructions
'''
def __init__(self, data_reg, ALU_in):
'''
This constructor initilizes the instruction variable and splits it into its proper
fields based on the last character of the op portion. If an "i" is present, the
instruction is expecting field[3] to be an immediate value.
'''
# Read and Open dictionary file relative to root of project
#self.inst_dict = json.loads(open("dictionaries/instruction_dictionary.py").read())
# Initialize variables
self.data_reg = data_reg
self.ALU_in = ALU_in
self.ALU_out = []
if self.ALU_in == None:
return
print("Self.ALU_in:",self.ALU_in)
# Initialize/clear self.ALU_out before next instruction data
self.executeOperation()
# Clear self.ALU_in for new incoming data
self.ALU_in = []
def executeOperation(self):
'''
This constructor calls the operator constructor based on the OP filed of the instruction
'''
if self.ALU_in == None:
self.stall = True
self.noop()
elif self.ALU_in[0] == "ld":
self.ld()
elif self.ALU_in[0] == "st":
self.st()
elif self.ALU_in[0] == "move":
self.move()
elif self.ALU_in[0] == "swap":
self.swap()
elif self.ALU_in[0] == "add":
self.add()
elif self.ALU_in[0] == "sub":
self.sub()
elif self.ALU_in[0] == "mul":
self.mul()
elif self.ALU_in[0] == "div":
self.div()
elif self.ALU_in[0] == "addi":
self.addi()
elif self.ALU_in[0] == "subi":
self.subi()
elif self.ALU_in[0] == "muli":
self.muli()
elif self.ALU_in[0] == "divi":
self.divi()
elif self.ALU_in[0] == "and":
self.and1()
elif self.ALU_in[0] == "or":
self.or1()
elif self.ALU_in[0] == "not":
self.not1()
elif self.ALU_in[0] == "nand":
self.nand()
elif self.ALU_in[0] == "nor":
self.nor()
elif self.ALU_in[0] == "beq":
self.beq()
elif self.ALU_in[0] == "bne":
self.bne()
elif self.ALU_in[0] == "bez":
self.bez()
elif self.ALU_in[0] == "bnz":
self.bnz()
elif self.ALU_in[0] == "bgt":
self.bgt()
elif self.ALU_in[0] == "blt":
self.blt()
elif self.ALU_in[0] == "bge":
self.bge()
elif self.ALU_in[0] == "ble":
self.ble()
def noop(self):
print("In the noop constructor")
self.ALU_out = self.ALU_in
def ld(self):
print("In the ld constructor")
# Pass mem operation to MEM stage
self.ALU_out = self.ALU_in
print(self.ALU_out)
def st(self):
print("In the st constructor")
# Pass mem operation to MEM stage
self.ALU_out = self.ALU_in
print(self.ALU_out)
def move(self):
print("In the move constructor")
# Pass mem operation to MEM stage
self.ALU_out = self.ALU_in
print(self.ALU_out)
def swap(self):
print("In the swap constructor")
# Pass mem operation to MEM stage
self.ALU_out = self.ALU_in
print(self.ALU_out)
def add(self):
print("In the add constructor")
# Append instruction operation to ALU_out
self.ALU_out.append(self.ALU_in[0])
# Append register destination to ALU_out
self.ALU_out.append(int(self.data_reg[self.ALU_in[1]].read(), 2))
# Perform arithmatic
self.ALU_out.append(int(self.data_reg[self.ALU_in[2]].read(), 2) + int(self.data_reg[self.ALU_in[3]].read(), 2))
print(self.ALU_out)
def sub(self):
print("In the sub constructor")
# Append instruction operation to ALU_out
self.ALU_out.append(self.ALU_in[0])
# Append register destination to ALU_out
self.ALU_out.append(int(self.data_reg[self.ALU_in[1]].read(), 2))
# Perform arithmatic
self.ALU_out.append(int(self.data_reg[self.ALU_in[2]].read(), 2) - int(self.data_reg[self.ALU_in[3]].read(), 2))
print(self.ALU_out)
def mul(self):
print("In the mul constructor")
# Append instruction operation to ALU_out
self.ALU_out.append(self.ALU_in[0])
# Append register destination to ALU_out
self.ALU_out.append(int(self.data_reg[self.ALU_in[1]].read(), 2))
# Perform arithmatic
self.ALU_out.append(int(self.data_reg[self.ALU_in[2]].read(), 2) * int(self.data_reg[self.ALU_in[3]].read(), 2))
print(self.ALU_out)
def div(self):
print("In the div constructor")
# Append instruction operation to ALU_out
self.ALU_out.append(self.ALU_in[0])
# Append register destination to ALU_out
self.ALU_out.append(int(self.data_reg[self.ALU_in[1]].read(), 2))
# Perform arithmatic
self.ALU_out.append(int(self.data_reg[self.ALU_in[2]].read(), 2) / int(self.data_reg[self.ALU_in[3]].read(), 2))
print(self.ALU_out)
def addi(self):
print("In the addi constructor")
# Append instruction operation to ALU_out
self.ALU_out.append(self.ALU_in[0])
# Append register destination to ALU_out
self.ALU_out.append(int(self.data_reg[self.ALU_in[1]].read(), 2))
# Perform arithmatic
self.ALU_out.append(int(self.data_reg[self.ALU_in[2]].read(), 2) + int(self.ALU_in[3]))
print(self.ALU_out)
def subi(self):
print("In the subi constructor")
# Append instruction operation to ALU_out
self.ALU_out.append(self.ALU_in[0])
self.ALU_out.append(int(self.data_reg[self.ALU_in[1]].read(), 2))
# Perform arithmatic
self.ALU_out.append(int(self.data_reg[self.ALU_in[2]].read(), 2) - int(self.ALU_in[3]))
print(self.ALU_out)
def and1(self):
print("In the and constructor")
# Append instruction operation to ALU_out
self.ALU_out.append(self.ALU_in[0])
print(self.operation + self.destination + self.source1 + self.source2)
def or1(self):
print("In the or constructor")
# Append instruction operation to ALU_out
self.ALU_out.append(self.ALU_in[0])
print(self.operation + self.destination + self.source1 + self.source2)
def not1(self):
print("In the not constructor")
# Append instruction operation to ALU_out
self.ALU_out.append(self.ALU_in[0])
print(self.operation + self.destination + self.source1 + self.source2)
def nand(self):
print("In the nand constructor")
# Append instruction operation to ALU_out
self.ALU_out.append(self.ALU_in[0])
print(self.operation + self.destination + self.source1 + self.source2)
def nor(self):
print("In the nor constructor")
# Append instruction operation to ALU_out
self.ALU_out.append(self.ALU_in[0])
print(self.operation + self.destination + self.source1 + self.source2)
def beq(self):
print("In the beq constructor")
# Append instruction operation to ALU_out
self.ALU_out.append(self.ALU_in[0])
# Pass branch operation to MEM stage
self.ALU_out = self.ALU_in
print(self.ALU_out)
def bne(self):
print("In the bne constructor")
# Append instruction operation to ALU_out
self.ALU_out.append(self.ALU_in[0])
# Pass branch operation to MEM stage
self.ALU_out = self.ALU_in
print(self.ALU_out)
def bez(self):
print("In the bez constructor")
# Append instruction operation to ALU_out
self.ALU_out.append(self.ALU_in[0])
# Pass branch operation to MEM stage
self.ALU_out = self.ALU_in
print(self.ALU_out)
def bnz(self):
print("In the bnz constructor")
# Append instruction operation to ALU_out
self.ALU_out.append(self.ALU_in[0])
# Pass branch operation to MEM stage
self.ALU_out = self.ALU_in
print(self.ALU_out)
def bgt(self):
print("In the bgt constructor")
# Append instruction operation to ALU_out
self.ALU_out.append(self.ALU_in[0])
# Pass branch operation to MEM stage
self.ALU_out = self.ALU_in
print(self.ALU_out)
def blt(self):
print("In the blt constructor")
# Append instruction operation to ALU_out
self.ALU_out.append(self.ALU_in[0])
# Pass branch operation to MEM stage
self.ALU_out = self.ALU_in
print(self.ALU_out)
def bge(self):
print("In the bge constructor")
# Append instruction operation to ALU_out
self.ALU_out.append(self.ALU_in[0])
# Pass branch operation to MEM stage
self.ALU_out = self.ALU_in
print(self.ALU_out)
def ble(self):
print("In the ble constructor")
# Append instruction operation to ALU_out
self.ALU_out.append(self.ALU_in[0])
# Pass branch operation to MEM stage
self.ALU_out = self.ALU_in
print(self.ALU_out)
| {"/pipeline.py": ["/hw.py", "/classes/decode.py", "/classes/alu.py"], "/assemblyfile2bin.py": ["/classes/encode.py"], "/classes/alu.py": ["/hw.py"], "/__init__.py": ["/hw.py", "/assemblyfile2bin.py", "/pipeline.py"], "/classes/decode.py": ["/hw.py"]} |
76,659 | lucashalbert/OOA | refs/heads/master | /classes/encode.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
'''
Filename: encode.py
Author: Lucas Halbert
Date: 4/16/15
Modified: 4/17/15
Description: Encodes instructions passed to it via assembly2Bin.py to the instructions
binary representation and writes it out to the specified file.
'''
import sys
import re
import json
class INSTRUCTIONEncode(object):
'''
This class is used to encode instructions passed to it. The instruction dictionary
contains all specific bit mappings for each instruction operand.
'''
def __init__(self, instruction):
'''
This constructor initilizes the instruction variable and splits it into its proper
fields based on the last character of the op portion. If an "i" is present, the
instruction is expecting field[3] to be an immediate value.
'''
print("Being encoding inst: ", instruction)
# Read and Open dictionary file relative to root of project
self.inst_dict = json.loads(open("dictionaries/instruction_dictionary.py").read())
# Swap keys and values of dictionary
self.inst_dict = dict(zip(self.inst_dict.values(),self.inst_dict.keys()))
# Initialize instuction
self.instruction = instruction
#print("Instruction:",self.instruction)
# Split instruction by "," and store in inst_fields
self.inst_fields = self.instruction.split(',')
#print("Instruction Fields:",self.inst_fields)
self.field_array = [None]*5
self.field_array[0] = self.encodeOpField()
self.op_type = self.op_type()
self.field_array[1] = self.encodeRegister(1)
print("Op type is ", self.op_type)
# 1 is memory operation
if (self.op_type == 1):
# 3rd field is
# 4th field is memory address
self.encodeMemory()
# 2 is memory operation immediate
elif (self.op_type == 2):
# 3rd field is immediate index value
# 4th field is memory address
self.field_array[2] = "00000"
self.field_array[3] = self.encodeMemoryImmediate()
# 3 is arithmatic operation
elif (self.op_type == 3):
# 3rd field is first source register
# 4th field is second source register
self.field_array[2] = self.encodeRegister(2)
self.field_array[3] = self.encodeRegister(3)
# 4 is arithmatic immediate operation
elif (self.op_type == 4):
# 3rd field is source register
# 4th field is immediate value
self.field_array[2] = self.encodeRegister(2)
self.field_array[3] = self.encodeImmediate(3)
# 5 is branch operation
elif (self.op_type == 5):
# 3rd field is source register
# 4th field is 2nd source register
self.field_array[2] = self.encodeRegister(2)
self.field_array[3] = self.encodeRegister(3)
print("0 = ", self.field_array[0])
print("1 = ", self.field_array[1])
print("2 = ", self.field_array[2])
print("3 = ", self.field_array[3])
print("Instruction encoded= ", self.constructByteCode())
#return self.constructByteCode()
def encodeImmediate(self, index):
#
if int(self.inst_fields[index].split('#')[1]) > 13171:
print("Immediate", self.inst_fields[index],"is too big")
sys.exit(4)
return "{0:b}".format(int(self.inst_fields[index].split('#')[1])).rjust(17, '0')
def encodeRegister(self, index):
if int(self.inst_fields[index].split('$')[1]) > 31:
print("Register", self.inst_fields[index],"does not exist")
sys.exit(4)
return "{0:b}".format(int(self.inst_fields[index].split('$')[1])).rjust(5, '0')
def encodeMemory(self):
self.field_array[2] = self.inst_fields[2].split("(")[0].rjust(17, '0')
self.field_array[3] = self.inst_fields[2].split("(")[1].split(")")[0].split("$")[1].rjust(5, '0')
index_bin = "{0:b}".format(int(self.field_array[2])).rjust(17, '0')
reg_bin= "{0:b}".format(int(self.field_array[3])).rjust(5, '0')
return index_bin+reg_bin
def encodeMemoryImmediate(self):
return "{0:b}".format(int(self.inst_fields[2].split('#')[1])).rjust(17, '0')
def encodeOpField(self):
'''
This constructor encodes the OP field of the instruction.
'''
# Extract Instruction Operator from field 0
self.inst_op = self.inst_fields[0]
# Check if 4th character of Operator specifies immediate value
if self.inst_op[len(self.inst_op)-1] == "i":
self.immediate = 1
else:
self.immediate = 0
#print("Immediate?:",self.immediate)
self.inst_op_bin = self.inst_dict[self.inst_fields[0]]
#print("Instruction OP:",self.inst_fields[0])
#print("Instruction OP Binary:",self.inst_op_bin)
return self.inst_op_bin
#encodeDestField()
def op_type(self):
self.inst_op = self.inst_fields[0]
#########################
# Memory Operations #
#########################
if (self.inst_op == "ld") or (self.inst_op == "st") or (self.inst_op == "move") or (self.inst_op == "swap"):
self.op_type = 1
########################
# Immediate Memory Operations #
########################
elif (self.inst_op == "ldi") or (self.inst_op == "sti"):
self.op_type = 2
#########################
# Arithmatic Operations #
#########################
elif (self.inst_op == "add") or (self.inst_op == "sub") or (self.inst_op == "mul") or (self.inst_op == "div"):
self.op_type = 3
###################################
# Immediate Arithmetic Operations #
###################################
elif (self.inst_op == "addi") or (self.inst_op == "subi"):
self.op_type = 4
#########################
# Logical Operations #
#########################
#if (self.inst_op == "ld") or (self.inst_op == "st") or (self.inst_op == "move") or (self.inst_op == "swap"):
#
#########################
# Branch Operations #
#########################
if (self.inst_op == "ld") or (self.inst_op == "st") or (self.inst_op == "move") or (self.inst_op == "swap"):
self.op_type = 5
return self.op_type
def encodeDestField(self):
'''
This constructor encodes the destination field of the instruction
'''
# Extract instruction destination from field 1
self.inst_dest = self.inst_fields[1]
# Error check to confirm that field 1 does not contain a register that does not exist(>31)
if int(self.inst_dest.split('$')[1]) > 31:
print("Register",self.inst_dest,"does not exist")
sys.exit(4)
# Convert instruction destination to binary and pad to 5 bits MSB
self.inst_dest_bin = "{0:b}".format(int(self.inst_fields[1].split('$')[1])).rjust(5, '0')
#print("Instruction Destination:",self.inst_dest)
#print("Instruction Dest Binary:",self.inst_dest_bin)
return self.inst_dest_bin
def encodeSource1Field(self):
'''
This constructor encodes the source1 field of the instruction
'''
# Extract instruction source1 from field 2
self.inst_source1 = self.inst_fields[2]
# Error check to confirm that field 2 does not contain a register that does not exist(>31)
if int(self.inst_source1.split('$')[1]) > 31:
print("Register",self.inst_source1,"does not exist")
sys.exit(4)
# Convert instruction source1 to binary and pad to 5 bits MSB
self.inst_source1_bin = "{0:b}".format(int(self.inst_fields[2].split('$')[1])).rjust(5, '0')
#print("Instruction Source1:",self.inst_source1)
#print("Instruction source1 Binary:",self.inst_source1_bin)
return self.inst_source1_bin
def encodeSource2Field(self):
'''
This constructor encodes the source2 field of the instruction
'''
# Extract instruction source1 from field 3
self.inst_source2 = self.inst_fields[3]
# Error check to confirm that field 3 does not contain a register that does not exist(>31)
if int(self.inst_source2.split('$')[1]) > 31:
print("Register",self.inst_source2,"does not exist")
sys.exit(4)
# Convert instruction source2 to binary and pad to 5 bits MSB
self.inst_source2_bin = "{0:b}".format(int(self.inst_fields[3].split('$')[1])).rjust(5, '0')
#print("Instruction Source2:",self.inst_source1)
#print("Instruction source2 Binary:",self.inst_source2_bin)
return self.inst_source2_bin
def encodeImmediateValue(self):
'''
This constructor decodes the immediate value field of the instruction if self.immediate = 1
'''
# Extract instruction immediate value from field 3
self.inst_immediate = self.inst_fields[3]
# Error check to confirm that field 3 does not contain a register that does not exist(>31)
if int(self.inst_immediate.split('#')[1]) > 131071 :
print("Values greater than 131071 cannot be entered")
sys.exit(5)
# Convert instruction immediate value to binary and pad to 17 bits LSB
self.inst_immediate_bin = "{0:b}".format(int(self.inst_fields[3].split('#')[1])).rjust(17, '0')
#print("Instruction Immediate:",self.inst_immediate)
#print("Instruction Immediate Binary:",self.inst_immediate_bin)
return self.inst_immediate_bin
def constructByteCode(self):
'''
This constructor compiles all of the binary fields into a single 32 bit binary string to be passed
to other stages of the pipeline.
'''
# Combine OP, Dest, Source1, and Source2 into compiled binary
#if self.immediate == 1:
#self.inst_bin = self.inst_op_bin + self.inst_dest_bin + self.inst_source1_bin + self.inst_immediate_bin
#elif self.immediate == 0:
#self.inst_bin = self.inst_op_bin + self.inst_dest_bin + self.inst_source1_bin + self.inst_source1_bin
print(self.field_array[0] + self.field_array[1] + self.field_array[2] + self.field_array[3])
self.inst_bin = self.field_array[0] + self.field_array[1] + self.field_array[2] + self.field_array[3]
self.inst_bin_len = len(self.inst_bin)
#print("Instruction Length:",len(self.inst_bin))
# Force 32 bit length
self.inst_bin = self.inst_bin.ljust(32, '0')
#print("Instruction Length:",len(self.inst_bin))
#print("Complete Instruction Binary:",self.inst_bin)
return self.inst_bin
| {"/pipeline.py": ["/hw.py", "/classes/decode.py", "/classes/alu.py"], "/assemblyfile2bin.py": ["/classes/encode.py"], "/classes/alu.py": ["/hw.py"], "/__init__.py": ["/hw.py", "/assemblyfile2bin.py", "/pipeline.py"], "/classes/decode.py": ["/hw.py"]} |
76,660 | lucashalbert/OOA | refs/heads/master | /__init__.py | from hw import register
from hw import mem_collection
from assemblyfile2bin import FileToBin
from pipeline import pipeline
import sys
NUM_OF_REG=31
SIZE_OF_INST_MEM=2048
SIZE_OF_DATA_MEM=32768
# Pull source file from command line arguments
SOURCE_FILE = sys.argv[1]
if SOURCE_FILE == "":
SOURCE_FILE = "assemblySource.txt"
BIN_FILE = "binFile.txt"
def main():
# Create Stack Pointer
stack_ptr=register()
# Create Instruction Register
inst_reg=register()
ALU_in=None # Output of ALU
ALU_out=None # Output of ALU
MEM_out=None # Result of reading from MEM
WB_addr=None # Address to write back to
# Create registers
data_reg=[]
for it in range (0,NUM_OF_REG):
data_reg.append(register())
#create memory
inst_mem=mem_collection("inst", SIZE_OF_INST_MEM)
data_mem=mem_collection("data", SIZE_OF_DATA_MEM)
print("Done initializing mem and reg")
# All data Registers
print("\nData Registers")
for it in range (0,NUM_OF_REG):
print(data_reg[it].read())
# Instruction Mem
print("\nInstruction Memory")
print(inst_mem.load(2))
# Data Mem
print("\nData Memory")
print(data_mem.load(2))
print("\n\nRead assembly file and convert to binary")
f = FileToBin(SOURCE_FILE, BIN_FILE)
# Read source file
f.read()
# write binary to bin file and return binary
inst_binary_array = f.write()
# print entire inst_binary_array
print(inst_binary_array)
# store all encoded binary to instruction memory
inst_mem.save_all(inst_binary_array)
'''
Pipeline Starts here
'''
pipeline(stack_ptr, inst_reg, data_reg, data_mem, inst_mem, ALU_in, ALU_out, MEM_out, WB_addr)
# print element 2 of instruction memory
#print(inst_mem.load(1))
if __name__ == "__main__":
main()
| {"/pipeline.py": ["/hw.py", "/classes/decode.py", "/classes/alu.py"], "/assemblyfile2bin.py": ["/classes/encode.py"], "/classes/alu.py": ["/hw.py"], "/__init__.py": ["/hw.py", "/assemblyfile2bin.py", "/pipeline.py"], "/classes/decode.py": ["/hw.py"]} |
76,661 | lucashalbert/OOA | refs/heads/master | /classes/decode.py | '''
Filename: decode.py
Author: Lucas Halbert
Date: 4/17/15
Modified: 4/29/15
Description: decodes binary to assembly
'''
import sys
import re
import json
import hw
'''
Class declarations for each stage of the pipeline
'''
class INSTRUCTIONDecode(object):
'''
This class is used to decode instructions passed to it. The instruction dictionary
contains all specific bit mappings for each instruction operand.
'''
def __init__(self, instruction, data_reg, WB_addr, ALU_in, ALU_out):
'''
This constructor initilizes the instruction variable and splits it into its proper
fields based on the last character of the op portion. If an "i" is present, the
instruction is expecting field[3] to be an immediate value.
'''
# Read and Open dictionary file relative to root of project
self.inst_dict = json.loads(open("dictionaries/instruction_dictionary.py").read())
# Initialize instuction
self.instruction = instruction
# Initialize data register
self.data_reg = data_reg
#print("Instruction:",self.instruction)
# Initialize Write Back address
self.WB_addr = WB_addr
# Create ALU_IN array
self.ALU_in = []
#print("Instruction:",self.instruction)
# Start the decode process
self.decodeField0()
print("decode-self.ALU_in",self.ALU_in)
#return self.ALU_in
def decodeField0(self):
'''
This constructor decodes the OP field of the instruction.
'''
# Extract the first 5 characters of the binary instruction
inst_op_bin = self.instruction[:5]
print("Instruction Op Binary:", inst_op_bin)
# Lookup the operation of the extracted binary
self.inst_op = self.inst_dict[inst_op_bin]
print("Instruction Op:",type(self.inst_op),self.inst_op)
# Check if last character of Operator specifies immediate value
#if self.inst_op[len(self.inst_op)-1] == "i":
# self.immediate = 1
#else:
# self.immediate = 0
#print("Immediate?:",self.immediate)
#return self.inst_op
# Append instruction operation to ALU_in as element 0
self.ALU_in.append(self.inst_op)
#########################
# Memory Operations #
#########################
if (self.inst_op == "ld") or (self.inst_op == "st") or (self.inst_op == "move") or (self.inst_op == "swap"):
'''
Memory Operation Structure
|-----------------------|
| OP , dest , source |
| ld , $1 , 0($2) |
|-----------------------|
| OP , source , dest |
| st , $1 , 0($2) |
|-----------------------|
| OP , dest , source |
| move , $1 , 0($2) |
|-----------------------|
| OP , dest , dest |
| swap , $1 , 0($2) |
|-----------------------|
'''
# Decode Destination Field
destination = int(self.decodeField1().split("$")[1])
# Decode Mem operation
source = self.decodeMem()
print("Source",source)
# split source into index and source register address
index = int(source.split("(")[0], 2)
print("source?:",source.split("(")[1].split(")")[0].split("$")[1])
source = int(source.split("(")[1].split(")")[0].split("$")[1])
# fetch register value and convert to int
print("reg address",source)
mem_address = int(self.data_reg[source].read(), 2)
# add index to memory address
self.mem_address = (index + mem_address)
print(self.mem_address)
# Append destination register to ALU_in as element 1
self.ALU_in.append(destination)
# Append index to ALU_in as element 2
self.ALU_in.append(index)
# Append source to ALU_in as element 3
self.ALU_in.append(source)
########################
# Immediate Operations #
########################
elif (self.inst_op == "ldi") or (self.inst_op == "sti"):
'''
Immediate Operation Structure
|-------------------------|
| OP , dest , Immediate |
| ldi , $2 , #34266 |
|-------------------------|
'''
# Decode Destination Field
destination = int(self.decodeField1().split("$")[1])
# Decode Immediate operation
immediate = int(self.decodeImmediateValue().split("#")[1])
# Append destination register to ALU_in as element 1
self.ALU_in.append(destination)
# Append immediate value to ALU_in as element 2
self.ALU_in.append(immediate)
#########################
# Arithmetic Operations #
#########################
elif (self.inst_op == "add") or (self.inst_op == "sub") or (self.inst_op == "mul") or (self.inst_op == "div"):
'''
Arithmetic Operation Structure
|--------------------------|
| OP , dest , src1 | src2 |
| add , $1 , $2 | $3 |
|--------------------------|
| OP , dest , src1 | src2 |
| sub , $1 , $2 | $3 |
|--------------------------|
| OP , dest , src1 | src2 |
| mul , $1 , $2 | $3 |
|--------------------------|
| OP , dest , src1 | src2 |-----> Remainder placed in remainder register???
| div , $1 , $2 | $3 |
|--------------------------|
'''
# Decode Destination Field
destination = int(self.decodeField1().split("$")[1])
# Decode Source 1 & 2 Fields
source1 = int(self.decodeSource1Field().split("$")[1])
source2 = int(self.decodeSource2Field().split("$")[1])
# Print everything
print(self.inst_op,destination,source1,source2)
# Append destination register to ALU_in as element 1
self.ALU_in.append(destination)
# Append source1 and source2 to ALU_in as elements 2 and 3
self.ALU_in.append(source1)
self.ALU_in.append(source2)
###################################
# Immediate Arithmetic Operations #
###################################
elif (self.inst_op == "addi") or (self.inst_op == "subi"):
'''
Immediate Arithmetic Operation Structure
|--------------------------------|
| OP , dest , src1 | immediate |
| addi , $1 , $2 | #34233 |
|--------------------------------|
| OP , dest , src1 | immediate |
| subi , $1 , $2 | #34233 |
|--------------------------------|
'''
# Decode Destination Field
destination = int(self.decodeField1().split("$")[1])
# Decode Source 1 Field
source1 = int(self.decodeSource1Field().split("$")[1])
# Decode Immediate
immediate = int(self.decodeImmediateValue().split("#")[1])
print(self.inst_op,destination,source1,immediate)
# Append destination register to ALU_in as element 1
self.ALU_in.append(destination)
# Append source1 to ALU_in as element 2
self.ALU_in.append(source1)
# Append immediate to ALU_in as element 3
self.ALU_in.append(immediate)
######################
# logical Operations #
######################
elif (self.inst_op == "and") or (self.inst_op == "or") or (self.inst_op == "not") or (self.inst_op == "nand") or (self.inst_op == "nor"):
'''
Logical Operation Structure
|---------------------------|
| OP , dest , src1 , src2 |
| and , $1 , $2 , $3 |
| or , $1 , $2 , $3 |
| not , $1 , $2 , $3 |
| nand , $1 , $2 , $3 |
| nor , $1 , $2 , $3 |
|---------------------------|
'''
#####################
# Branch Operations #
#####################
elif (self.inst_op == "beq") or (self.inst_op == "bne") or (self.inst_op == "bez") or (self.inst_op == "bnz") or (self.inst_op == "bgt") or (self.inst_op == "blt") or (self.inst_op == "bge") or (self.inst_op == "ble"):
'''
Branch Operation Structure
|---------------------------|
| OP , src1 , src2 , LABEL |
| beq , $1 , $2 , Loop |
| bne , $1 , $2 , Loop |
| bgt , $1 , $2 , Loop |
| blt , $1 , $2 , Loop |
| bge , $1 , $2 , Loop |
| ble , $1 , $2 , Loop |
|---------------------------|
| OP , src1 , LABEL |
| bez , $1 , Loop |
|---------------------------|
'''
if (self.inst_op == "bez") or (self.inst_op == "bnz"):
# Decode Source 1 Field
source1 = int(self.decodeField1().split("$")[1])
# fetch register value and convert to int
value1 = int(self.data_reg[source1].read(), 2)
# Compare value to zero
if (self.inst_op == "bez"):
if value1 == 0:
print("true! value == 0")
else:
print("false! value != 0")
if (self.inst_op == "bnz"):
if not value1 == 0:
print("true! value != 0")
else:
print("false! value == 0")
else:
# Decode Source 1 Field
source1 = self.decodeField1()
# Decode Source 2 Field
source2 = self.decodeSource1Field()
# split source into index and source register address
source1 = source1.split("$")[1]
source2 = source2.split("$")[1]
# Decode label??
# fetch register value and convert to int
print("source1 address",source1)
print("source2 address",source2)
value1 = int(self.data_reg[int(source1)].read(), 2)
value2 = int(self.data_reg[int(source2)].read(), 2)
if (self.inst_op == "beq"):
if value1 == value2:
print("true! value1 == value2")
else:
print("false! value1 != value2")
elif (self.inst_op == "bne"):
if not value1 == value2:
print("true! value1 != value2")
else:
print("false! value1 == value2")
elif (self.inst_op == "bgt"):
if value1 > value2:
print("true! value1 > value2")
else:
print("false! value1 < value2")
elif (self.inst_op == "blt"):
if value1 < value2:
print("true! value1 < value2")
else:
print("false! value1 > value2")
elif (self.inst_op == "bge"):
if value1 >= value2:
print("true! value1 >= value2")
else:
print("false! value1 <= value2")
elif (self.inst_op == "ble"):
if value1 <= value2:
print("true! value1 <= value2")
else:
print("false! value1 >= value2")
########################################################
def decodeField1(self):
'''
This constructor decodes the destination field of the instruction
'''
# Extract the next 5 characters of the binary instruction
self.inst_dest_bin = self.instruction[5:10]
# Error check to confirm that destination field does not contain a register that does not exist(>32)
if int(self.inst_dest_bin, 2) > 31:
print("Register",int(self.inst_dest_bin, 2),"does not exist")
sys.exit(4)
# Convert the extracted binary to a register number
self.inst_dest = "$" + str(int(self.inst_dest_bin, 2))
print("Instruction Dest:",self.inst_dest)
return self.inst_dest
def decodeSource1Field(self):
'''
This constructor decodes the source1 field of the instruction
'''
# Extract the next 5 characters of the binary instruction
self.inst_source1_bin = self.instruction[10:15]
#print("Instruction Source1 Binary:",self.inst_source1_bin)
# Error check to confirm that destination field does not contain a register that does not exist(>32)
if int(self.inst_source1_bin, 2) > 31:
print("Register",int(self.inst_source1_bin, 2),"does not exist")
sys.exit(4)
# Convert the extracted binary to a register number
self.inst_source1 = "$" + str(int(self.inst_source1_bin, 2))
#print("Instruction Dest:",self.inst_source1)
return self.inst_source1
def decodeSource2Field(self):
'''
This constructor decodes the source2 field of the instruction
'''
# Extract the next 5 characters of the binary instruction
self.inst_source2_bin = self.instruction[15:20]
#print("Instruction Source2 Binary:",self.inst_source2_bin)
# Error check to confirm that destination field does not contain a register that does not exist(>32)
if int(self.inst_source2_bin, 2) > 31:
print("Register",int(self.inst_source2_bin, 2),"does not exist")
sys.exit(4)
# Convert the extracted binary to a register number
self.inst_source2 = "$" + str(int(self.inst_source2_bin, 2))
#print("Instruction Dest:",self.inst_source2)
return self.inst_source2
def decodeImmediateValue(self):
'''
This constructor decodes the immediate value field of the instruction if self.immediate = 1
'''
# Extract the next 5 characters of the binary instruction
self.inst_immediate_bin = self.instruction[15:32]
#print("Length:",len(str(self.inst_immediate_bin)))
#print("Instruction Immediate Binary:",self.inst_immediate_bin)
# Error check to confirm that destination field does not contain a register that does not exist(>32)
if int(self.inst_immediate_bin, 2) > 131071:
print("Cannot use numbers large than 131071")
sys.exit(4)
# Convert the extracted binary to a register number
self.inst_immediate = "#" + str(int(self.inst_immediate_bin))
#print("Immediate Value:",self.inst_immediate)
return self.inst_immediate
def decodeMem(self):
'''
This constructor decodes the index and register location for a memory operation
'''
# Extract characters 11-27 of binary as index
source_index_bin = self.instruction[11:27]
# Error check to confirm that destination field does not contain a register that does not exist(>32)
if int(source_index_bin, 2) > 131071:
print("Cannot use index numbers large than 131071")
sys.exit(4)
# Convert extracted binary to an int
source_index = str(int(source_index_bin, 2)) + "("
# Extract characters 28-32 of binary as source register
source_reg_bin = self.instruction[28:32]
# Convert extracted binary to a register number
source_reg = "$" + str(int(source_reg_bin, 2)) + ")"
# Combine index and register number
source = source_index + source_reg
print("mem operation decode:", source)
return source
def constructInstruction(self):
'''
This constructor compiles all of the binary fields into a single 32 bit binary string to be passed
to other stages of the pipeline.
'''
# Combine OP, Dest, Source1, and Source2 into compiled binary
if self.immediate == 1:
self.inst = self.inst_op + "," + self.inst_dest + "," + self.inst_source1 + "," + self.inst_immediate
elif self.immediate == 0:
self.inst = self.inst_op + "," + self.inst_dest + "," + self.inst_source1 + "," + self.inst_source1
#print("Instruction Length:",len(self.inst))
print("Complete Instruction:",self.inst)
self.checkIfStallNeeded()
return self.inst
def checkIfStallNeeded(self):
if ((self.inst_source1 or self.inst_source2) == self.WB_addr):
self.need_stall = True
| {"/pipeline.py": ["/hw.py", "/classes/decode.py", "/classes/alu.py"], "/assemblyfile2bin.py": ["/classes/encode.py"], "/classes/alu.py": ["/hw.py"], "/__init__.py": ["/hw.py", "/assemblyfile2bin.py", "/pipeline.py"], "/classes/decode.py": ["/hw.py"]} |
76,684 | guchio3/kaggle-plasticc | refs/heads/master | /tools/_feature_tools.py | import pandas as pd
import numpy as np
from scipy import signal
import gc
from multiprocessing import Pool
from tqdm import tqdm
import warnings
import cesium.featurize as featurize
from tsfresh.feature_extraction import extract_features
warnings.simplefilter('ignore', RuntimeWarning)
np.random.seed(71)
# =======================================
# util functions
# =======================================
def split_idxes(df, nthread, logger, nclass=14):
logger.info('calculating uniq object_id num')
object_ids = df.object_id.unique()
logger.info('getting groups')
groups = np.array_split(object_ids, nclass)
logger.info('splitting df')
idxes = [df[df.object_id.isin(group)].index for group in groups]
return idxes
def get_group_df(df_and_group):
df, group = df_and_group
return df[df.object_id.isin(set(group))]
def split_dfs(df, nthread, logger, save_flg=False):
logger.info('calculating uniq object_id num')
object_ids = df.object_id.unique()
logger.info('getting groups')
groups = np.array_split(object_ids, nthread)
logger.info('splitting df')
dfs = []
for group in tqdm(list(groups)):
dfs.append(df[df.object_id.isin(set(group))])
if save_flg:
logger.info('saving the split dfs...')
for i, df in tqdm(list(enumerate(dfs))):
df.reset_index().to_feather('./test_dfs/{}.fth'.format(i))
return dfs
def load_test_set_dfs(nthread, logger):
logger.info('loading dfs...')
dfs_paths = [
'/home/naoya.taguchi/workspace/kaggle/plasticc-2018/test_dfs/{}.fth'.format(i) for i in range(62)]
p = Pool(nthread)
dfs = p.map(pd.read_feather, dfs_paths)
p.close()
p.join()
logger.info('done')
return dfs
# def normalize_flux(set_df, new_flux_name='flux'):
# normalize_base_df = set_df.groupby('object_id').\
# flux.median().\
# reset_index().\
# rename(columns={'flux': 'flux_median'})
# normalize_bases = set_df.merge(
# normalize_base_df,
# on='object_id',
# how='left').flux_median
# set_df[new_flux_name] = set_df.flux
# set_df[new_flux_name] /= normalize_bases
# return set_df
def _normalize_flux(set_df):
flux_band_stat_df = set_df.groupby(['object_id', 'passband']).\
agg({'flux': ['mean', 'std']}).\
reset_index()
flux_band_stat_df.columns = pd.Index(
[e[0] + "_" + e[1] for e in flux_band_stat_df.columns.tolist()])
stats_for_normalize = set_df.merge(
flux_band_stat_df,
on=['object_id', 'passband'],
how='left')
set_df['flux'] -= stats_for_normalize.flux_mean
set_df['flux'] /= stats_for_normalize.flux_std
del flux_band_stat_df, stats_for_normalize
gc.collect()
return set_df
def normalise(ts):
return (ts - ts.mean()) / ts.std()
def get_phase_features(set_df):
groups = set_df[['object_id', 'passband', 'mjd', 'flux', 'flux_err']].\
groupby(['object_id', 'passband'])
# times = groups.apply(lambda block: block['phase'].values).\
times = groups.apply(lambda block: block['mjd'].values).\
reset_index().\
rename(columns={0: 'seq'})
flux = groups.apply(lambda block: normalise(block['flux']).values).\
reset_index().\
rename(columns={0: 'seq'})
flux_err = groups.apply(lambda block: normalise(block['flux_err']).values).\
reset_index().\
rename(columns={0: 'seq'})
times_list = times.groupby('object_id').\
apply(lambda x: x['seq'].tolist()).\
tolist()
flux_list = flux.groupby('object_id').\
apply(lambda x: x['seq'].tolist()).\
tolist()
flux_err_list = flux_err.groupby('object_id').\
apply(lambda x: x['seq'].tolist()).\
tolist()
warnings.simplefilter('ignore', RuntimeWarning)
phase_df = featurize.\
featurize_time_series(times=times_list,
values=flux_list,
errors=flux_err_list,
features_to_use=[
# 'amplitude',
'freq1_freq',
# 'freq1_signif',
# 'freq1_amplitude1',
# 'freq2_freq',
# 'freq2_amplitude1',
# 'percent_beyond_1_std',
# 'freq3_freq',
### 'flux_percentile_ratio_mid20',
### 'max_slope',
# 'period_fast'
### 'qso_log_chi2_qsonu',
],
scheduler=None)
# print(phase_df.head(10))
phase_df.columns = [str(e[0]) + '_' + str(e[1])
for e in phase_df.columns.tolist()]
phase_df['object_id'] = times.object_id
del times, flux, times_list, flux_list
gc.collect()
return phase_df
def _get_astro_distance(z, c=299790, h=67.15):
# http://micha072.blog.fc2.com/blog-entry-1378.html
_pow_z = np.power(z+1, 2)
v = c * (-1 + _pow_z) / (1 + _pow_z)
d = v / h
return d
def _get_pogson_magnitude(flux):
return 22.5 - 2.5 * np.log10(flux)
def add_corrected_flux(set_df, set_metadata_df):
# _set_metadata_df = set_metadata_df[
# (set_metadata_df.hostgal_photoz_err < 0.5) &
# (set_metadata_df.hostgal_photoz_err > 0.)]
_set_metadata_df = set_metadata_df
set_df = set_df.merge(
_set_metadata_df[['object_id', 'hostgal_photoz']],
on='object_id',
how='left')
# set_df['corrected_flux'] = set_df.flux * (set_df.hostgal_photoz.apply(_get_astro_distance)**2)
set_df['corrected_flux'] = set_df.flux * (set_df.hostgal_photoz**2)
set_df['pogson_magnitude'] = set_df.flux.apply(_get_pogson_magnitude)
# set_df['corrected_flux'] = set_df.flux / (set_df.hostgal_photoz**2)
return set_df
# =======================================
# feature functions
# =======================================
def weighted_mean(flux, dflux):
return np.sum(flux * (flux / dflux)**2) /\
np.sum((flux / dflux)**2)
def normalized_flux_std(flux, wMeanFlux):
return np.std(flux / wMeanFlux, ddof=1)
def normalized_amplitude(flux, wMeanFlux):
return (np.max(flux) - np.min(flux)) / wMeanFlux
def normalized_MAD(flux, wMeanFlux):
return np.median(np.abs((flux - np.median(flux)) / wMeanFlux))
def beyond_1std(flux, wMeanFlux):
return sum(np.abs(flux - wMeanFlux) > np.std(flux, ddof=1)) / len(flux)
def get_starter_features(_id_grouped_df):
f = _id_grouped_df.flux
df = _id_grouped_df.flux_err
m = weighted_mean(f, df)
std = normalized_flux_std(f, df)
amp = normalized_amplitude(f, m)
mad = normalized_MAD(f, m)
beyond = beyond_1std(f, m)
return m, std, amp, mad, beyond
def diff_mean(x):
return x.diff().mean()
def diff_max(x):
return x.diff().max()
def diff_min(x):
return x.diff().min()
def diff_std(x):
return x.diff().std()
def diff_sum(x):
return x.diff().sum()
def get_max_min_diff(x):
return x.max() - x.min()
# =======================================
# feature engineering part
# =======================================
def _for_set_df(set_df):
# set_df = normalize_flux(set_df)
# min_fluxes = set_df.groupby('object_id').\
# flux.min().\
# reset_index().\
# rename(columns={'flux': '_temp_flux_min'})
# set_df = set_df.merge(min_fluxes, on='object_id', how='left')
# set_df['minused_flux'] = set_df.flux - set_df._temp_flux_min
# set_df.flux -= 0.
# 25 $B$OBgBN(B train $B$NJ?6Q(B
# set_df = set_df[set_df.flux_err < 25]
set_df['flux_ratio_to_flux_err'] = \
set_df['flux'] / set_df['flux_err']
# 'kurtosis' $B$O;H$($J$$(B...$B!)(B
aggregations = {
# 'passband': ['mean', 'std', 'var'],
# 'mjd': ['max', 'min', 'var'],
# 'mjd': [diff_mean, diff_max],
# 'phase': [diff_mean, diff_max],
'flux': ['min', 'max', 'mean', 'median',
'std', 'var', 'skew', 'count'],
#### 'corrected_flux': ['min', 'max', 'mean', 'median', 'skew', ],
#### 'pogson_magnitude': ['min', 'max', 'mean', 'median', 'skew', ],
'flux_err': ['min', 'max', 'mean', 'median', 'std', 'var', 'skew'],
'flux_ratio_to_flux_err': ['min', 'max', ],
'detected': ['mean', ],
'flux_ratio_sq': ['sum', 'skew'],
'flux_by_flux_ratio_sq': ['sum', 'skew'],
#### 'corrected_flux_ratio_sq': ['sum', 'skew'],
#### 'corrected_flux_by_flux_ratio_sq': ['sum', 'skew'],
# 'minused_flux': ['min', 'max', 'mean', 'median',
# 'std', 'var', 'skew'],
# 'normed_flux': ['mean', 'median', 'skew'],
}
detected_aggregations = {
'mjd': [get_max_min_diff, 'var', ],
}
# non_detected_aggregations = {
# 'flux': ['var'],
# }
mean_upper_flux_aggregations = {
'mjd': [get_max_min_diff, 'var', ],
'flux': ['mean', ]
# 'phase': [get_max_min_diff, 'var', ],
# 'mjd': ['min', 'max', 'var', ],
}
passband_aggregations = {
'flux': ['min', 'max', 'count', 'var', 'mean', 'skew', ],
'detected': ['mean', ],
'flux_ratio_sq': ['sum', 'skew'],
'flux_by_flux_ratio_sq': ['sum', 'skew'],
}
# === run aggregations ===
# fe before agggregations
set_df['flux_ratio_sq'] = np.power(
set_df['flux'] / set_df['flux_err'], 2.0)
set_df['flux_by_flux_ratio_sq'] = set_df['flux'] * \
set_df['flux_ratio_sq']
#### set_df['corrected_flux_ratio_sq'] = np.power(
#### set_df['corrected_flux'] / set_df['flux_err'], 2.0)
#### set_df['corrected_flux_by_flux_ratio_sq'] = set_df['corrected_flux'] * \
#### set_df['flux_ratio_sq']
fe_set_df = set_df.groupby('object_id').agg({**aggregations})
fe_set_df.columns = pd.Index(
[e[0] + "_" + e[1] for e in fe_set_df.columns.tolist()])
# === run mean upper aggregation ===
# $BJ?6QCM$h$j9b$$0LCV$K$"$k(B flux $B$N(B mjd $BE*5wN%$r;H$&$?$a$K2C9)!#(B
# $BMW$O(B period $B$rI=8=$7$?$$!#(B
object_flux_mean_df = set_df[['object_id', 'flux']].\
groupby('object_id').\
mean().\
rename(columns={'flux': 'flux_mean'})
mean_upper_flux_df = set_df.merge(
object_flux_mean_df, on='object_id', how='left')
mean_upper_flux_df = mean_upper_flux_df[mean_upper_flux_df.flux >
mean_upper_flux_df.flux_mean]
fe_mean_upper_flux_df = mean_upper_flux_df.groupby('object_id').\
agg({**mean_upper_flux_aggregations})
fe_mean_upper_flux_df.columns = pd.Index(
['mean_upper_' + e[0] + "_" + e[1]
for e in fe_mean_upper_flux_df.columns.tolist()])
# fe_mean_upper_flux_df['mean_upper_mjd_diff'] = \
# fe_mean_upper_flux_df['mean_upper_mjd_max'] - \
# fe_mean_upper_flux_df['mean_upper_mjd_min']
# fe_mean_upper_flux_df.drop(['mjd_max', 'mjd_min'], axis=1, inplace=True)
fe_set_df = fe_set_df.merge(
fe_mean_upper_flux_df,
on='object_id',
how='left')
del object_flux_mean_df, mean_upper_flux_df, fe_mean_upper_flux_df
gc.collect()
# === detected aggregation ===
detected_df = set_df[set_df.detected == 1]
fe_detected_df = detected_df.groupby('object_id').\
agg({**detected_aggregations})
fe_detected_df.columns = pd.Index(
['detected_' + e[0] + "_" + e[1]
for e in fe_detected_df.columns.tolist()])
fe_set_df = fe_set_df.merge(
fe_detected_df,
on='object_id',
how='left')
del detected_df, fe_detected_df
gc.collect()
# === non_detected aggregation ===
# non_detected_df = set_df[set_df.detected == 0]
# fe_non_detected_df = non_detected_df.groupby('object_id').\
# agg({**non_detected_aggregations})
# fe_non_detected_df.columns = pd.Index(
# ['non_detected_' + e[0] + "_" + e[1]
# for e in fe_non_detected_df.columns.tolist()])
# fe_set_df = fe_set_df.merge(
# fe_non_detected_df,
# on='object_id',
# how='left')
# del non_detected_df, fe_non_detected_df
# gc.collect()
# === passband $B$4$H$K=hM}(B ===
passband_df = pd.DataFrame(fe_set_df[['flux_count', 'flux_mean']])
passbands = [0, 1, 2, 3, 4, 5]
for passband in passbands:
band_prefix = 'band-{}_'.format(passband)
# _passband_set_df = normalize_flux(set_df[set_df.passband == passband])
_passband_set_df = set_df[set_df.passband == passband]
# starter kit type fe
starter_fe_series = _passband_set_df.\
groupby('object_id').\
apply(get_starter_features)
starter_fe_df = starter_fe_series.\
apply(lambda x: pd.Series(x)).\
rename(columns={
0: band_prefix + 'wmean',
1: band_prefix + 'normed_std',
2: band_prefix + 'normed_amp',
3: band_prefix + 'normed_mad',
4: band_prefix + 'beyond_1std',
})
# aggregation type fe
band_fe_set_df = _passband_set_df.\
groupby('object_id').\
agg({**passband_aggregations})
band_fe_set_df.columns = pd.Index(
['band-{}_'.format(passband) + e[0] + "_" + e[1]
for e in band_fe_set_df.columns.tolist()])
band_fe_set_df[band_prefix + 'flux_diff'] = \
band_fe_set_df[band_prefix + 'flux_max'] - \
band_fe_set_df[band_prefix + 'flux_min']
# feature $B2aB?$J$N$G(B drop
passband_df = passband_df.merge(
starter_fe_df, on='object_id', how='left')
passband_df = passband_df.merge(
band_fe_set_df, on='object_id', how='left')
# passband_df['band-{}_flux_count'.format(passband)] = \
# passband_df['band-{}_flux_count'.format(passband)]\
# / passband_df['flux_count']
# feature engineering for passband_df
for lpb in passbands:
rpb = (lpb + 1) % 6
lMean = passband_df['band-{}_wmean'.format(lpb)]
rMean = passband_df['band-{}_wmean'.format(rpb)]
lstd = passband_df['band-{}_normed_std'.format(lpb)]
rstd = passband_df['band-{}_normed_std'.format(rpb)]
lamp = passband_df['band-{}_normed_amp'.format(lpb)]
ramp = passband_df['band-{}_normed_amp'.format(rpb)]
# lmad = passband_df['band-{}_normed_mad'.format(lpb)]
# rmad = passband_df['band-{}_normed_mad'.format(rpb)]
# l1std = passband_df['band-{}_beyond_1std'.format(lpb)]
# r1std = passband_df['band-{}_beyond_1std'.format(rpb)]
mean_diff = -2.5 * np.log10(lMean / rMean)
std_diff = lstd - rstd
amp_diff = lamp - ramp
# mad_diff = lmad-rmad
# beyond_diff = l1std-r1std
mean_diff_colname = '{}_minus_{}_wmean'.format(lpb, rpb)
std_diff_colname = '{}_minus_{}_std'.format(lpb, rpb)
amp_diff_colname = '{}_minus_{}_amp'.format(lpb, rpb)
# mad_diff_colname = '{}_minus_{}_mad'.format(lpb, rpb)
# beyond_diff_colname = '{}_minus_{}_beyond'.format(lpb, rpb)
passband_df[mean_diff_colname] = mean_diff
passband_df[std_diff_colname] = std_diff
passband_df[amp_diff_colname] = amp_diff # $B$3$l$,$J$$$H(B 0.0001 $B$/$i$$2<$,$k(B
# passband_df[mad_diff_colname] = mad_diff
# passband_df[beyond_diff_colname] = beyond_diff
# passband_df[(lMean <= 0) | (rMean <= 0)][mean_diff_colname] = -999
fe_set_df = fe_set_df.merge(
passband_df.drop([
'flux_count',
'flux_mean',
],
axis=1),
on='object_id',
how='left')
del _passband_set_df, starter_fe_series, starter_fe_df, \
band_fe_set_df, passband_df
gc.collect()
# feature engineering after aggregations
fe_set_df['flux_diff'] = fe_set_df['flux_max'] - fe_set_df['flux_min']
fe_set_df['flux_dif2'] = (fe_set_df['flux_max'] - fe_set_df['flux_min'])\
/ fe_set_df['flux_mean']
fe_set_df['flux_w_mean'] = fe_set_df['flux_by_flux_ratio_sq_sum'] / \
fe_set_df['flux_ratio_sq_sum']
fe_set_df['flux_dif3'] = (fe_set_df['flux_max'] - fe_set_df['flux_min'])\
/ fe_set_df['flux_w_mean']
#### fe_set_df['corrected_flux_diff'] = fe_set_df['corrected_flux_max'] - fe_set_df['corrected_flux_min']
#### fe_set_df['corrected_flux_dif2'] = (fe_set_df['corrected_flux_max'] - fe_set_df['corrected_flux_min'])\
#### / fe_set_df['corrected_flux_mean']
#### fe_set_df['corrected_flux_w_mean'] = fe_set_df['corrected_flux_by_flux_ratio_sq_sum'] / \
#### fe_set_df['corrected_flux_ratio_sq_sum']
#### fe_set_df['corrected_flux_dif3'] = (fe_set_df['corrected_flux_max'] - fe_set_df['corrected_flux_min'])\
#### / fe_set_df['corrected_flux_w_mean']
passband_flux_maxes = \
['band-{}_flux_max'.format(i) for i in passbands]
# fe_set_df['passband_flux_maxes_var'] = \
# fe_set_df[passband_flux_maxes].var(axis=1)
for passband_flux_max in passband_flux_maxes:
fe_set_df[passband_flux_max + '_ratio_to_the_max'] = \
fe_set_df[passband_flux_max] / fe_set_df['flux_max']
passband_flux_mins = \
['band-{}_flux_min'.format(i) for i in passbands]
fe_set_df['passband_flux_min_var'] = \
fe_set_df[passband_flux_mins].var(axis=1)
# for passband_flux_min in passband_flux_mins:
# fe_set_df[passband_flux_min + '_ratio_to_the_min'] = \
# fe_set_df[passband_flux_min] / fe_set_df['flux_min']
passband_flux_means = \
['band-{}_flux_mean'.format(i) for i in passbands]
fe_set_df['passband_flux_means_var'] = \
fe_set_df[passband_flux_means].var(axis=1)
passband_flux_counts = \
['band-{}_flux_count'.format(i) for i in passbands]
fe_set_df['passband_flux_counts_var'] = \
fe_set_df[passband_flux_counts].var(axis=1)
passband_detected_means = \
['band-{}_detected_mean'.format(i) for i in passbands]
fe_set_df['passband_detected_means_var'] = \
fe_set_df[passband_detected_means].var(axis=1)
# passband_flux_ratio_sq_sum = \
# ['band-{}_flux_ratio_sq_sum'.format(i) for i in passbands]
# fe_set_df['passband_flux_ratio_sq_sum_var'] = \
# fe_set_df[passband_flux_ratio_sq_sum].var(axis=1)
# passband_flux_ratio_sq_skew = \
# ['band-{}_flux_ratio_sq_skew'.format(i) for i in passbands]
# fe_set_df['passband_flux_ratio_sq_skew_var'] = \
# fe_set_df[passband_flux_ratio_sq_skew].var(axis=1)
# band $B$N7gB;N($N(B var $B$H$+$bNI$5$=$&(B
# $B:G8e$K$$$i$J$$(B features $B$r(B drop $B$9$k$H$3$m(B
drop_cols = [
'flux_ratio_sq_sum',
]
drop_cols += passband_flux_counts
drop_cols += passband_flux_maxes
drop_cols += passband_flux_mins
drop_cols += passband_flux_means
# drop_cols += passband_flux_ratio_sq_sum
fe_set_df.drop(drop_cols, axis=1, inplace=True)
return fe_set_df
def feature_engineering(set_df, set_metadata_df, nthread,
logger, test_flg=False):
logger.info('getting split dfs ...')
if test_flg:
set_dfs = load_test_set_dfs(nthread, logger)
#set_dfs = split_dfs(set_df, nthread, logger, save_flg=True)
else:
set_dfs = split_dfs(set_df, nthread, logger)
#### logger.info('adding corrected flux...')
#### for i, _set_df in tqdm(enumerate(set_dfs)):
#### set_dfs[i] = add_corrected_flux(_set_df, set_metadata_df)
#### del _set_df
gc.collect()
logger.info('start fature engineering ...')
logger.info('feature engineering ...')
p = Pool(nthread)
set_res_list = p.map(_for_set_df, set_dfs)
p.close()
p.join()
set_res_df = pd.concat(set_res_list, axis=0)
set_res_df.reset_index(inplace=True)
gc.collect()
# logger.info('cesium features ...')
# p = Pool(nthread)
# phase_res_list = p.map(get_phase_features, set_dfs)
# p.close()
# p.join()
# phase_df = pd.concat(phase_res_list, axis=0).reset_index(drop=True)
# gc.collect()
### if test_flg:
### _phase_df = pd.read_csv('/home/naoya.taguchi/src/train_set_full_features.csv')
### phase_df = pd.read_csv('/home/naoya.taguchi/src/single_output_test_ts_features.csv')
### phase_df.columns = _phase_df.columns[:-5]
### else:
### phase_df = pd.read_csv('/home/naoya.taguchi/src/train_set_full_features.csv')
### phase_df = phase_df[[
### 'object_id',
### '__max_slope___0_',
### '__max_slope___1_',
### '__max_slope___2_',
### '__max_slope___3_',
### '__max_slope___4_',
### '__max_slope___5_',
### '__median_absolute_deviation___0_',
### '__median_absolute_deviation___1_',
### '__median_absolute_deviation___2_',
### '__median_absolute_deviation___3_',
### '__median_absolute_deviation___4_',
### '__median_absolute_deviation___5_',
### '__freq_varrat___0_',
### '__freq_varrat___1_',
### '__freq_varrat___2_',
### '__freq_varrat___3_',
### '__freq_varrat___4_',
### '__freq_varrat___5_',
### ]]
# phase_dfs = []
# for df in tqdm(set_dfs):
# phase_dfs.append(get_phase_features(df))
# phase_df = pd.concat(phase_dfs, axis=0).reset_index(drop=True)
# phase_df.set_index('object_id', inplace=True)
# phase_df.to_csv('./temp.csv', index=False)
# phase_df = pd.read_csv('./temp.csv').reset_index(drop=True)
# print(phase_df)
# print(set_res_df)
# fe_set_df = fe_set_df.merge(phase_df, on='object_id')
# set_res_df = pd.concat([set_res_df, phase_df], axis=1)
# logger.info('adding fft features ...')
# fcp = {'fft_coefficient': [{'coeff': 0, 'attr': 'abs'},{'coeff': 1, 'attr': 'abs'}],
# 'kurtosis' : None,
# 'skewness' : None}
# agg_df_ts = extract_features(
# set_df,
# column_id='object_id',
# column_sort='mjd',
# column_kind='passband',
# column_value = 'flux',
# default_fc_parameters = fcp,
# n_jobs=nthread)
# agg_df_ts.index.rename('object_id',inplace=True)
### set_res_df = set_res_df.merge(phase_df, on='object_id', how='left')
# set_res_df = set_res_df.merge(agg_df_ts, on='object_id', how='left')
# del set_df, phase_df
del set_df
gc.collect()
logger.info('post processing ...')
res_df = set_metadata_df.merge(set_res_df, on='object_id', how='left')
# res_df = res_df.merge(phase_df, on='object_id')
res_df['internal'] = res_df.hostgal_photoz == 0.
# res_df['astrodist'] = res_df.hostgal_photoz.apply(_get_astro_distance)
# res_df['hostgal_photoz_square'] = np.power(res_df.hostgal_photoz, 2)
# res_df.drop(['object_id', 'hostgal_specz', 'ra', 'decl',
res_df.drop(['object_id', 'hostgal_specz', 'hostgal_photoz', 'ra', 'decl',
'gal_l', 'gal_b', 'ddf', 'mwebv'], axis=1, inplace=True)
passbands = [0, 1, 2, 3, 4, 5]
band_x_beyound_1stds = ['band-{}_beyond_1std'.format(i) for i in passbands]
# band_x_flux_by_flux_ratio_sq_sums = ['band-{}_flux_by_flux_ratio_sq_sum'.format(i) for i in passbands]
# band_x_flux_max_ratio_to_the_max = ['band-{}_flux_max_ratio_to_the_max'.format(i) for i in passbands]
# band_x_flux_diff = ['band-{}_flux_diff'.format(i) for i in passbands]
band_x_normed_mad = ['band-{}_normed_mad'.format(i) for i in passbands]
# band_x_normed_std = ['band-{}_normed_std'.format(i) for i in passbands]
# band_x_flux_var = ['band-{}_flux_var'.format(i) for i in passbands]
# band_x_wmean = ['band-{}_wmean'.format(i) for i in passbands]
# band_x_flux_by_flux_ratio_sq_skew = ['band-{}_flux_by_flux_ratio_sq_skew'.format(i) for i in passbands]
# band_x_flux_skew = ['band-{}_flux_skew'.format(i) for i in passbands]
# band_x_flux_ratio_sq_sum = ['band-{}_flux_ratio_sq_sum'.format(i) for i in passbands]
others = []
#res_df.drop(band_x_beyound_1stds + band_x_normed_mad + others, axis=1, inplace=True)
del set_res_df
gc.collect()
return res_df
| {"/softmax_train_using_features.py": ["/tools/my_logging.py", "/tools/feature_tools.py", "/tools/objective_function.py", "/tools/model_io.py", "/tools/fold_resampling.py"], "/train.py": ["/tools/my_logging.py", "/tools/feature_tools.py", "/tools/objective_function.py", "/tools/model_io.py", "/tools/fold_resampling.py"], "/from_onoderasan.py": ["/tools/objective_function.py"]} |
76,685 | guchio3/kaggle-plasticc | refs/heads/master | /tools/plasticc_features.py | import pandas as pd
import numpy as np
from scipy import signal
from scipy.stats import kurtosis
import gc
from multiprocessing import Pool
from tqdm import tqdm
import warnings
import cesium.featurize as featurize
from tsfresh.feature_extraction import extract_features
from features import featureCreator, MulHelper, toapply
from astropy.cosmology import FlatLambdaCDM
warnings.simplefilter('ignore', RuntimeWarning)
warnings.filterwarnings('ignore')
np.random.seed(71)
# =======================================
# feature functions
# =======================================
def weighted_mean(flux, dflux):
return np.sum(flux * (flux / dflux)**2) /\
np.sum((flux / dflux)**2)
def normalized_flux_std(flux, wMeanFlux):
return np.std(flux / wMeanFlux, ddof=1)
def normalized_amplitude(flux, wMeanFlux):
return (np.max(flux) - np.min(flux)) / wMeanFlux
def normalized_MAD(flux, wMeanFlux):
return np.median(np.abs((flux - np.median(flux)) / wMeanFlux))
def beyond_1std(flux, wMeanFlux):
return sum(np.abs(flux - wMeanFlux) > np.std(flux, ddof=1)) / len(flux)
def get_starter_features(_id_grouped_df):
f = _id_grouped_df.flux
df = _id_grouped_df.flux_err
m = weighted_mean(f, df)
std = normalized_flux_std(f, df)
amp = normalized_amplitude(f, m)
mad = normalized_MAD(f, m)
beyond = beyond_1std(f, m)
return m, std, amp, mad, beyond
def get_flux_mjd_diff(df):
return df.flux.diff()/df.mjd.diff()
def get_flux_mjd_diff_mean(df):
return get_flux_mjd_diff(df).mean()
def get_flux_mjd_diff_max(df):
return get_flux_mjd_diff(df).max()
def get_flux_mjd_diff_min(df):
return get_flux_mjd_diff(df).min()
def get_flux_mjd_diff_std(df):
return get_flux_mjd_diff(df).std()
def get_flux_mjd_diff_var(df):
return get_flux_mjd_diff(df).var()
def diff_mean(x):
return x.diff().mean()
def diff_max(x):
return x.diff().max()
def diff_std(x):
return x.diff().std()
def diff_var(x):
return x.diff().var()
def diff_sum(x):
return x.diff().sum()
def get_max_min_diff(x):
return x.max() - x.min()
def quantile10(x):
return x.quantile(0.10)
def quantile25(x):
return x.quantile(0.25)
def quantile75(x):
return x.quantile(0.75)
def quantile90(x):
return x.quantile(0.90)
def quantile95(x):
return x.quantile(0.95)
def minmax_range(x):
return x.max() - x.min()
def quantile2575_range(x):
return quantile75(x) - quantile25(x)
def quantile1090_range(x):
return quantile90(x) - quantile10(x)
def calc_flux_mjd_skewness(df):
mjd = df.mjd
flux = df.flux.clip(0., None)
mean = (df.mjd * flux).sum() / flux.sum()
std = np.abs(np.sqrt(((mjd - mean)**2 * flux).sum() / flux.sum()))
fm_skew = ((((mjd - mean) * flux).sum())/flux.sum())**3 / std**3
return fm_skew
def calc_flux_mjd_kurtosis(df):
mjd = df.mjd
flux = df.flux.clip(0., None)
mean = (df.mjd * flux).sum() / flux.sum()
std = np.abs(np.sqrt(((mjd - mean)**2 * flux).sum() / flux.sum()))
fm_kurt = ((((mjd - mean) * flux).sum())/flux.sum())**4 / std**4
return fm_kurt
# =======================================
# feature creator
# =======================================
class featureCreatorPreprocess(featureCreator):
def __init__(self, load_dir, save_dir,
src_df_dict=None, logger=None, nthread=1, train=True):
super(featureCreatorPreprocess, self).\
__init__(load_dir=load_dir,
save_dir=save_dir,
src_df_dict=src_df_dict,
logger=logger,
nthread=nthread)
self.train = train
def _load(self):
if self.train:
path_dict = {
'set_df': self.load_dir + 'training_set.csv',
'set_metadata_df': self.load_dir + 'training_set_metadata.csv'}
self.src_df_dict['set_df'] = pd.read_hdf('/home/naoya.taguchi/.kaggle/competitions/PLAsTiCC-2018/kyle_final_augment.h5', 'df')
self.src_df_dict['set_metadata_df'] = pd.read_hdf('/home/naoya.taguchi/.kaggle/competitions/PLAsTiCC-2018/kyle_final_augment.h5', 'meta')
else:
path_dict = {'set_metadata_df': self.load_dir + 'test_set_metadata.csv'}
for i in tqdm([i for i in range(62)]):
path_dict[f'test_set_{i}_df'] = f'../test_dfs/{i}.fth'
# self._load_dfs_from_paths(path_dict=path_dict)
def _split_dfs(self, df, nthread, save_flg=False):
self._log_print('calculating uniq object_id num')
object_ids = df.object_id.unique()
self._log_print('getting groups')
groups = np.array_split(object_ids, nthread)
self._log_print('splitting df')
dfs = []
for group in tqdm(list(groups)):
dfs.append(df[df.object_id.isin(set(group))])
if save_flg:
self._log_print('saving the split dfs...')
for i, df in tqdm(list(enumerate(dfs))):
df.reset_index().to_feather('./test_dfs/{}.fth'.format(i))
return dfs
def _add_corrected_flux(self, set_df, set_metadata_df):
# _set_metadata_df = set_metadata_df[
# (set_metadata_df.hostgal_photoz_err < 0.5) &
# (set_metadata_df.hostgal_photoz_err > 0.)]
# cosmo = FlatLambdaCDM(H0=70, Om0=0.3, Tcmb0=2.725)
# distance_modulus = cosmo.distmod(set_metadata_df.hostgal_specz)
# set_metadata_df['z_distmod'] = distance_modulus
set_metadata_df['lumi_dist'] = 10**((set_metadata_df.distmod+5)/5)
# set_metadata_df['z_lumi_dist'] = 10**((set_metadata_df.distmod+5)/5)
_set_metadata_df = set_metadata_df
set_df = set_df.merge(
_set_metadata_df[['object_id', 'hostgal_photoz', 'lumi_dist', 'distmod', 'hostgal_specz']],
on='object_id',
how='left')
set_df['corrected_flux'] = set_df.flux / (set_df.hostgal_photoz**2)
set_df['z_corrected_flux'] = set_df.flux / (set_df.hostgal_specz**2)
set_df['normed_flux'] = (set_df.flux - set_df.flux.min()) / set_df.flux.max()
# set_df['luminosity'] = 4*np.pi*(set_df.lumi_dist**2)*set_df.flux
# set_df['z_luminosity'] = 4*np.pi*(set_df.z_lumi_dist**2)*set_df.flux
# set_df['magnitude'] = -2.5*np.log10(set_df.flux)
# set_df['abs_magnitude'] = set_df.magnitude - set_df.distmod
del set_df['distmod'], set_df['hostgal_specz'], set_df['lumi_dist']#, set_df['z_lumi_dist'], set_df['magnitude']
gc.collect()
return set_df
def _create_features(self):
if self.train:
set_dfs = self._split_dfs(self.src_df_dict['set_df'], self.nthread)
for i in tqdm([i for i in range(62)]):
splitted_set_df = set_dfs[i]
set_df_name = f'test_set_{i}_df'
self.src_df_dict[set_df_name] = splitted_set_df
# flux $B$NJd@5$rF~$l$k(B
self._log_print('adding corrected flux...')
for i in tqdm([i for i in range(62)]):
set_df_name = f'test_set_{i}_df'
self.src_df_dict[set_df_name] = \
self._add_corrected_flux(
self.src_df_dict[set_df_name],
self.src_df_dict['set_metadata_df']
)
self._log_print('pre-processing set dfs ...')
for i in tqdm([i for i in range(62)]):
set_df_name = f'test_set_{i}_df'
_set_df = self.src_df_dict[set_df_name]
# preprocess
_set_df['flux_ratio_to_flux_err'] = _set_df['flux'] / _set_df['flux_err']
_set_df['flux_ratio_sq'] = np.power(
_set_df['flux'] / _set_df['flux_err'], 2.0)
_set_df['flux_by_flux_ratio_sq'] = _set_df['flux'] * \
_set_df['flux_ratio_sq']
_set_df['corrected_flux_ratio_sq'] = np.power(
_set_df['corrected_flux'] / _set_df['flux_err'], 2.0)
_set_df['corrected_flux_by_flux_ratio_sq'] = _set_df['corrected_flux'] * \
_set_df['flux_ratio_sq']
_set_df['z_corrected_flux_ratio_sq'] = np.power(
_set_df['z_corrected_flux'] / _set_df['flux_err'], 2.0)
_set_df['z_corrected_flux_by_flux_ratio_sq'] = _set_df['z_corrected_flux'] * \
_set_df['flux_ratio_sq']
# replace
self.src_df_dict[set_df_name] = _set_df
def fe_set_df_base(corrected_set_df):
aggregations = {
# 'passband': ['mean', 'std', 'var'],
# 'mjd': ['max', 'min', 'var'],
# 'mjd': [diff_mean, diff_max],
# 'phase': [diff_mean, diff_max],
'flux': ['min', 'max', 'mean', 'median',
'std', 'var', 'skew', 'count', kurtosis],
# 'abs_magnitude': ['min', 'max', 'mean', 'median',
# 'std', 'var', 'skew', 'count', kurtosis],
'corrected_flux': ['min', 'max', 'mean', 'median',
'std', 'var', 'skew', ],
'z_corrected_flux': ['min', 'max', 'mean', 'median',
'std', 'var', 'skew', ],
'flux_err': ['min', 'max', 'mean', 'median', 'std', 'var', 'skew', kurtosis],
'flux_ratio_to_flux_err': ['min', 'max', ],
'detected': ['mean', ],
'flux_ratio_sq': ['sum', 'skew', 'mean', kurtosis, 'max'],
'flux_by_flux_ratio_sq': ['sum', 'skew', ],
'corrected_flux_ratio_sq': ['sum', 'skew', ],
'corrected_flux_by_flux_ratio_sq': ['sum', 'skew'],
'z_corrected_flux_ratio_sq': ['sum', 'skew', ],
'z_corrected_flux_by_flux_ratio_sq': ['sum', 'skew'],
# 'luminosity': ['min', 'max', 'mean', 'median',
# 'std', 'var', 'skew', 'count', kurtosis],
# 'z_luminosity': ['min', 'max', 'mean', 'median',
# 'std', 'var', 'skew', 'count', kurtosis],
# 'minused_flux': ['min', 'max', 'mean', 'median',
# 'std', 'var', 'skew'],
# 'normed_flux': ['mean', 'median', 'skew'],
# 'diff_flux_by_diff_mjd': ['min', 'max', 'var', ],
}
fe_set_df = corrected_set_df.groupby('object_id').agg({**aggregations})
fe_set_df.columns = pd.Index([e[0] + "_" + e[1] for e in fe_set_df.columns.tolist()])
return fe_set_df
def fe_set_df_detected(corrected_set_df):
detected_corrected_set_df = corrected_set_df[corrected_set_df.detected == 1]
detected_aggregations = {
'mjd': [get_max_min_diff, 'skew'],
}
fe_set_df = detected_corrected_set_df.groupby('object_id').agg({**detected_aggregations})
fe_set_df.columns = pd.Index(['detected_' + e[0] + "_" + e[1] for e in fe_set_df.columns.tolist()])
return fe_set_df
def fe_set_df_std_upper_and_lower(corrected_set_df):
object_flux_std_df = corrected_set_df[['object_id', 'flux']].\
groupby('object_id').\
std().\
rename(columns={'flux': 'flux_std'})
object_flux_mean_df = corrected_set_df[['object_id', 'flux']].\
groupby('object_id').\
mean().\
rename(columns={'flux': 'flux_mean'})
corrected_set_df = corrected_set_df.merge(
object_flux_std_df, on='object_id', how='left')
corrected_set_df = corrected_set_df.merge(
object_flux_mean_df, on='object_id', how='left')
std_upper_corrected_set_df = corrected_set_df[corrected_set_df.flux >
corrected_set_df.flux_std + corrected_set_df.flux_mean]
std_lower_corrected_set_df = corrected_set_df[corrected_set_df.flux <
corrected_set_df.flux_std - corrected_set_df.flux_mean]
std_upper_aggregations = {
'mjd': [get_max_min_diff, 'var', 'skew', ],
# 'flux_err': ['min', 'max', 'mean', 'median', 'std', 'var', 'skew', kurtosis],
'flux': ['count', 'min'],
# 'mjd': ['min', 'max', 'var', ],
}
std_lower_aggregations = {
'mjd': [get_max_min_diff, 'var', 'skew', ],
'flux': ['count', 'max'],
}
std_upper_fe_set_df = std_upper_corrected_set_df.groupby('object_id').agg({**std_upper_aggregations})
std_upper_fe_set_df.columns = pd.Index(['std_upper_' + e[0] + "_" + e[1] for e in std_upper_fe_set_df.columns.tolist()])
std_lower_fe_set_df = std_lower_corrected_set_df.groupby('object_id').agg({**std_lower_aggregations})
std_lower_fe_set_df.columns = pd.Index(['std_lower_' + e[0] + "_" + e[1] for e in std_lower_fe_set_df.columns.tolist()])
fe_set_df = std_upper_fe_set_df.merge(std_lower_fe_set_df, on='object_id', how='left')
return fe_set_df
def fe_set_df_passband(corrected_set_df):
passband_aggregations = {
# 'mjd': [diff_mean, diff_max],
# 'phase': [diff_mean, diff_max],
'flux': ['min', 'max', 'count', 'var', 'mean', 'skew', kurtosis, quantile10,quantile25, quantile75, quantile90, quantile2575_range, quantile1090_range, get_max_min_diff],
'normed_flux': [diff_mean, ],
#'flux_err': ['min', 'max', 'mean', 'median', 'std', 'var', 'skew', kurtosis],
# 'flux_err': ['var'],
'detected': ['mean', ],
'flux_ratio_sq': ['sum', 'skew', 'max', 'min', get_max_min_diff],
'flux_by_flux_ratio_sq': ['sum', 'skew'],
# 'luminosity': ['max', kurtosis],
'corrected_flux': ['min', 'max', 'mean', 'median',
'std', 'var', 'skew',
diff_var, get_max_min_diff],
'z_corrected_flux': ['min', 'max', 'mean', 'median',
'std', 'var', 'skew',
diff_var, get_max_min_diff],
}
fe_set_df = pd.DataFrame()
passbands = [0, 1, 2, 3, 4, 5]
for passband in passbands:
_passband_set_df = corrected_set_df[corrected_set_df.passband == passband]
# starter kit type fe
starter_fe_series = _passband_set_df.\
groupby('object_id').\
apply(get_starter_features)
starter_fe_df = starter_fe_series.\
apply(lambda x: pd.Series(x)).\
rename(columns={
0: 'band-{}_wmean'.format(passband),
1: 'band-{}_normed_std'.format(passband),
2: 'band-{}_normed_amp'.format(passband),
3: 'band-{}_normed_mad'.format(passband),
4: 'band-{}_beyond_1std'.format(passband),
})
# the other aggregations
band_fe_set_df = _passband_set_df.\
groupby('object_id').\
agg({**passband_aggregations})
band_fe_set_df.columns = pd.Index(
['band-{}_'.format(passband) + e[0] + "_" + e[1]
for e in band_fe_set_df.columns.tolist()])
if fe_set_df.shape[0] != 0:
fe_set_df = fe_set_df.merge(
starter_fe_df, on='object_id', how='left')
else:
fe_set_df = starter_fe_df
fe_set_df = fe_set_df.merge(
band_fe_set_df, on='object_id', how='left')
return fe_set_df
def fe_set_df_passband_std_upper(corrected_set_df):
band_std_upper_flux_aggregations = {
'mjd': [get_max_min_diff, 'var', 'skew', diff_mean],
'flux': ['count', diff_mean, quantile10, quantile25, quantile75, quantile90, quantile2575_range, quantile1090_range],
}
fe_set_df = pd.DataFrame()
passbands = [0, 1, 2, 3, 4, 5]
for passband in passbands:
_passband_set_df = corrected_set_df[corrected_set_df.passband == passband]
band_object_flux_std_df = _passband_set_df[['object_id', 'flux']].\
groupby('object_id').\
std().\
rename(columns={'flux': 'flux_std'})
band_object_flux_mean_df = _passband_set_df[['object_id', 'flux']].\
groupby('object_id').\
mean().\
rename(columns={'flux': 'flux_mean'})
_passband_set_df = _passband_set_df.merge(
band_object_flux_std_df, on='object_id', how='left')
_passband_set_df = _passband_set_df.merge(
band_object_flux_mean_df, on='object_id', how='left')
band_std_upper_flux_df = _passband_set_df[_passband_set_df.flux >
_passband_set_df.flux_std +
_passband_set_df.flux_mean]
band_fe_std_upper_flux_df = band_std_upper_flux_df.groupby('object_id').\
agg({**band_std_upper_flux_aggregations})
band_fe_std_upper_flux_df.columns = pd.Index(
['band-{}_std_upper_'.format(passband) + e[0] + "_" + e[1]
for e in band_fe_std_upper_flux_df.columns.tolist()])
if fe_set_df.shape[0] != 0:
fe_set_df = fe_set_df.merge(
band_fe_std_upper_flux_df, on='object_id', how='left')
else:
fe_set_df = band_fe_std_upper_flux_df
return fe_set_df
def fe_set_df_passband_detected(corrected_set_df):
band_detected_aggregations = {
'mjd': [get_max_min_diff, 'var', 'skew', diff_mean],
}
fe_set_df = pd.DataFrame()
passbands = [0, 1, 2, 3, 4, 5]
for passband in passbands:
_passband_set_df = corrected_set_df[corrected_set_df.passband == passband]
band_detected_df = _passband_set_df[_passband_set_df.detected == 1]
band_fe_detected_df = band_detected_df.groupby('object_id').\
agg({**band_detected_aggregations})
band_fe_detected_df.columns = pd.Index(
['band-{}_detected_'.format(passband) + e[0] + "_" + e[1]
for e in band_fe_detected_df.columns.tolist()])
if fe_set_df.shape[0] != 0:
fe_set_df = fe_set_df.merge(
band_fe_detected_df, on='object_id', how='left')
else:
fe_set_df = band_fe_detected_df
return fe_set_df
def _get_peak_mjd(df):
return df[df.flux == df.flux.max()].iloc[0].mjd
def fe_set_df_peak_around(corrected_set_df):
date_lwidths = [14, 14, 0, 30, 0, 30, 90, 0, 90]
date_rwidths = [14, 0, 14, 30, 30, 0, 90, 90, 0]
# detected $B$7$J$$$H(B overfit $B$9$k(B
det_corrected_set_df = corrected_set_df.query('detected == 1')
fe_set_df = pd.DataFrame(det_corrected_set_df.object_id.unique(), columns=['object_id'])
for date_lwidth, date_rwidth in zip(date_lwidths, date_rwidths):
peak_df = det_corrected_set_df.\
merge(det_corrected_set_df.groupby('object_id').
apply(_get_peak_mjd).
reset_index().
rename(columns={0: 'peak_mjd'}),
on='object_id', how='left')
peak_df = peak_df[
(peak_df.mjd <= peak_df.peak_mjd + date_rwidth) &
(peak_df.mjd >= peak_df.peak_mjd - date_lwidth)]
peak_aggregations = {
# 'passband': ['mean', 'std', 'var'],
# 'mjd': ['max', 'min', 'var'],
# 'mjd': [diff_mean, diff_max],
# 'phase': [diff_mean, diff_max],
'flux': ['min', 'max', 'mean', 'median',
'std', 'var', 'skew', 'count', kurtosis,
diff_var, get_max_min_diff],
# 'abs_magnitude': ['min', 'max', 'mean', 'median',
# 'std', 'var', 'skew', 'count', kurtosis,
# diff_var, get_max_min_diff],
'corrected_flux': ['min', 'max', 'mean', 'median',
'std', 'var', 'skew',
diff_var, get_max_min_diff],
'flux_ratio_to_flux_err': ['min', 'max', ],
'detected': ['mean', ],
'flux_ratio_sq': ['sum', 'skew', 'mean', kurtosis, ],
'flux_by_flux_ratio_sq': ['sum', 'skew', ],
'corrected_flux_ratio_sq': ['sum', 'skew', ],
'corrected_flux_by_flux_ratio_sq': ['sum', 'skew'],
# 'luminosity': ['min', 'max', 'mean', 'median',
# 'std', 'var', 'skew', 'count', kurtosis],
# 'minused_flux': ['min', 'max', 'mean', 'median',
# 'std', 'var', 'skew'],
# 'normed_flux': ['mean', 'median', 'skew'],
# 'diff_flux_by_diff_mjd': ['min', 'max', 'var', ],
}
_fe_set_df = peak_df.groupby('object_id').agg({**peak_aggregations})
_fe_set_df.columns = pd.Index([f'peak-{date_lwidth}-{date_rwidth}_' + e[0] + "_" + e[1] for e in _fe_set_df.columns.tolist()])
fe_set_df = fe_set_df.merge(_fe_set_df, on='object_id', how='left')
return fe_set_df
def fe_set_df_passband_peak_around(corrected_set_df):
passbands = [0, 1, 2, 3, 4, 5]
date_lwidths = [14, 14, 0, 30, 0, 30, 90, 0, 90]
date_rwidths = [14, 0, 14, 30, 30, 0, 90, 90, 0]
fe_set_df = pd.DataFrame(corrected_set_df.object_id.unique(), columns=['object_id'])
for date_lwidth, date_rwidth in zip(date_lwidths, date_rwidths):
for passband in passbands:
print('a')
peak_df = corrected_set_df.\
merge(corrected_set_df.groupby('object_id').
apply(_get_peak_mjd).
reset_index().
rename(columns={0: 'peak_mjd'}),
on='object_id', how='left')
peak_df = peak_df[
(peak_df.mjd <= peak_df.peak_mjd + date_rwidth) &
(peak_df.mjd >= peak_df.peak_mjd - date_lwidth)]
passband_peak_aggregations = {
# 'passband': ['mean', 'std', 'var'],
# 'mjd': ['max', 'min', 'var'],
# 'mjd': [diff_mean, diff_max],
# 'phase': [diff_mean, diff_max],
'flux': ['min', 'max', 'mean', 'median',
'std', 'var', 'skew', 'count', kurtosis,
diff_var],
# 'abs_magnitude': ['min', 'max', 'mean', 'median',
# 'std', 'var', 'skew', 'count', kurtosis,
# diff_var],
'corrected_flux': ['min', 'max', 'mean', 'median',
'std', 'var', 'skew',
diff_var],
'z_corrected_flux': ['min', 'max', 'mean', 'median',
'std', 'var', 'skew',
diff_var],
'flux_ratio_to_flux_err': ['min', 'max', ],
'detected': ['mean', ],
'flux_ratio_sq': ['sum', 'skew', 'mean', kurtosis],
'flux_by_flux_ratio_sq': ['sum', 'skew', ],
'corrected_flux_ratio_sq': ['sum', 'skew', ],
'z_corrected_flux_ratio_sq': ['sum', 'skew', ],
'z_corrected_flux_by_flux_ratio_sq': ['sum', 'skew'],
# 'luminosity': ['min', 'max', 'mean', 'median',
# 'std', 'var', 'skew', 'count', kurtosis],
# 'minused_flux': ['min', 'max', 'mean', 'median',
# 'std', 'var', 'skew'],
# 'normed_flux': ['mean', 'median', 'skew'],
# 'diff_flux_by_diff_mjd': ['min', 'max', 'var', ],
}
_fe_set_df = peak_df.groupby('object_id').agg({**passband_peak_aggregations})
_fe_set_df.columns = pd.Index([f'peak-{date_lwidth}-{date_rwidth}_' + e[0] + "_" + e[1] for e in _fe_set_df.columns.tolist()])
fe_set_df = fe_set_df.merge(_fe_set_df, on='object_id', how='left')
return fe_set_df
def _get_ratsq_peak_mjd(df):
return df[df.flux_ratio_sq == df.flux_ratio_sq.max()].iloc[0].mjd
def fe_set_df_ratsq_peak_around(corrected_set_df):
date_lwidths = [14, 14, 0, 30, 0, 30, 90, 0, 90]
date_rwidths = [14, 0, 14, 30, 30, 0, 90, 90, 0]
fe_set_df = pd.DataFrame(corrected_set_df.object_id.unique(), columns=['object_id'])
for date_lwidth, date_rwidth in zip(date_lwidths, date_rwidths):
peak_df = corrected_set_df.\
merge(corrected_set_df.groupby('object_id').
apply(_get_ratsq_peak_mjd).
reset_index().
rename(columns={0: 'peak_mjd'}),
on='object_id', how='left')
peak_df = peak_df[
(peak_df.mjd <= peak_df.peak_mjd + date_rwidth) &
(peak_df.mjd >= peak_df.peak_mjd - date_lwidth)]
peak_aggregations = {
# 'passband': ['mean', 'std', 'var'],
# 'mjd': ['max', 'min', 'var'],
# 'mjd': [diff_mean, diff_max],
# 'phase': [diff_mean, diff_max],
'flux': ['min', 'max', 'mean', 'median',
'std', 'var', 'skew', 'count', kurtosis,
diff_var, get_max_min_diff],
# 'abs_magnitude': ['min', 'max', 'mean', 'median',
# 'std', 'var', 'skew', 'count', kurtosis,
# diff_var, get_max_min_diff],
'corrected_flux': ['min', 'max', 'mean', 'median',
'std', 'var', 'skew',
diff_var, get_max_min_diff],
'flux_ratio_to_flux_err': ['min', 'max', ],
'detected': ['mean', ],
'flux_ratio_sq': ['sum', 'skew', 'mean', kurtosis, 'var', get_max_min_diff],
'flux_by_flux_ratio_sq': ['sum', 'skew', ],
'corrected_flux_ratio_sq': ['sum', 'skew', ],
'corrected_flux_by_flux_ratio_sq': ['sum', 'skew'],
# 'luminosity': ['min', 'max', 'mean', 'median',
# 'std', 'var', 'skew', 'count', kurtosis],
# 'minused_flux': ['min', 'max', 'mean', 'median',
# 'std', 'var', 'skew'],
# 'normed_flux': ['mean', 'median', 'skew'],
# 'diff_flux_by_diff_mjd': ['min', 'max', 'var', ],
}
_fe_set_df = peak_df.groupby('object_id').agg({**peak_aggregations})
_fe_set_df.columns = pd.Index([f'ratsq-peak-{date_lwidth}-{date_rwidth}_' + e[0] + "_" + e[1] for e in _fe_set_df.columns.tolist()])
fe_set_df = fe_set_df.merge(_fe_set_df, on='object_id', how='left')
return fe_set_df
def fe_set_df_my_skew_kurt(corrected_set_df):
skew_df = corrected_set_df.groupby('object_id').\
apply(calc_flux_mjd_skewness).\
rename('my_skew')
skew_df = (skew_df * 1e40).reset_index()
kurt_df = corrected_set_df.groupby('object_id').\
apply(calc_flux_mjd_kurtosis).\
rename('my_kurt')
kurt_df = (kurt_df * 1e55).reset_index()
fe_set_df = skew_df.merge(kurt_df, on='object_id', how='left')
# detected type
det_skew_df = corrected_set_df.query('detected==1').groupby('object_id').\
apply(calc_flux_mjd_skewness).\
rename('det_my_skew')
det_skew_df = (det_skew_df * 1e40).reset_index()
det_kurt_df = corrected_set_df.query('detected==1').groupby('object_id').\
apply(calc_flux_mjd_kurtosis).\
rename('det_my_kurt')
det_kurt_df = (det_kurt_df * 1e55).reset_index()
fe_set_df = fe_set_df.merge(det_skew_df, on='object_id', how='left')
fe_set_df = fe_set_df.merge(det_kurt_df, on='object_id', how='left')
for passband in range(6):
band_df = corrected_set_df[corrected_set_df.passband == passband]
band_skew_df = band_df.\
groupby('object_id').\
apply(calc_flux_mjd_skewness).\
rename(f'band-{passband}_my_skew')
band_skew_df = (band_skew_df * 1e40).reset_index()
band_kurt_df = band_df.\
groupby('object_id').\
apply(calc_flux_mjd_kurtosis).\
rename(f'band-{passband}_my_kurt')
band_kurt_df = (band_kurt_df * 1e55).reset_index()
fe_set_df = fe_set_df.merge(band_skew_df, on='object_id', how='left')
fe_set_df = fe_set_df.merge(band_kurt_df, on='object_id', how='left')
return fe_set_df
def fe_set_df_deficits(corrected_set_df):
det_mjd_diff = corrected_set_df[corrected_set_df['detected']==1].pivot_table('mjd','object_id',aggfunc=[min,max])
det_mjd_diff.columns = ['min_mjd', 'max_mjd']
# detected==1$B$NA08e$N4V3V$rDI2C(B
mjd_diff_ = corrected_set_df[['object_id','mjd']].merge(right=det_mjd_diff, on=['object_id'], how='left')
max_mjd_bf_det1 = mjd_diff_[mjd_diff_.mjd < mjd_diff_.min_mjd].groupby('object_id')[['object_id','mjd', 'min_mjd']].max().rename(columns={'mjd': 'max_mjd_bf_det1'})
mjd_diff_bf_det1 = max_mjd_bf_det1['min_mjd'] - max_mjd_bf_det1['max_mjd_bf_det1']
mjd_diff_bf_det1 = mjd_diff_bf_det1.rename('mjd_diff_bf_det1').reset_index()
min_mjd_af_det1 = mjd_diff_[mjd_diff_.mjd > mjd_diff_.max_mjd].groupby('object_id')[['object_id','mjd', 'max_mjd']].min().rename(columns={'mjd': 'min_mjd_af_det1'})
mjd_diff_af_det1 = min_mjd_af_det1['min_mjd_af_det1'] - min_mjd_af_det1['max_mjd']
mjd_diff_af_det1 = mjd_diff_af_det1.rename('mjd_diff_af_det1').reset_index()
fe_set_df = mjd_diff_bf_det1.merge(mjd_diff_af_det1, on ='object_id', how='left')
fe_set_df['mjd_diff_ab_sum'] = fe_set_df['mjd_diff_af_det1'] + fe_set_df['mjd_diff_bf_det1']
return fe_set_df.set_index('object_id')
class featureCreatorSet(featureCreator):
def __init__(self, fe_set_df, set_res_df_name, load_dir, save_dir,
src_df_dict=None, logger=None, nthread=1):
super(featureCreatorSet, self).\
__init__(load_dir=load_dir,
save_dir=save_dir,
src_df_dict=src_df_dict,
logger=logger,
nthread=nthread)
self.fe_set_df = fe_set_df
self.set_res_df_name = set_res_df_name
def _load(self, ):
None
# def _fe_set_df(self, set_df):
# aggregations = {
# # 'passband': ['mean', 'std', 'var'],
# # 'mjd': ['max', 'min', 'var'],
# # 'mjd': [diff_mean, diff_max],
# # 'phase': [diff_mean, diff_max],
# 'flux': ['min', 'max', 'mean', 'median',
# 'std', 'var', 'skew', 'count', kurtosis],
# 'corrected_flux': ['min', 'max', 'mean', 'median',
# 'std', 'var', 'skew', ],
# 'flux_err': ['min', 'max', 'mean', 'median', 'std', 'var', 'skew', kurtosis],
# 'flux_ratio_to_flux_err': ['min', 'max', ],
# 'detected': ['mean', ],
# 'flux_ratio_sq': ['sum', 'skew', 'mean', kurtosis],
# 'flux_by_flux_ratio_sq': ['sum', 'skew', ],
# 'corrected_flux_ratio_sq': ['sum', 'skew', ],
# 'corrected_flux_by_flux_ratio_sq': ['sum', 'skew'],
# # 'luminosity': ['median', 'var', 'skew', kurtosis],
# # 'minused_flux': ['min', 'max', 'mean', 'median',
# # 'std', 'var', 'skew'],
# # 'normed_flux': ['mean', 'median', 'skew'],
# # 'diff_flux_by_diff_mjd': ['min', 'max', 'var', ],
# }
#
# fe_set_df = set_df.groupby('object_id').agg({**aggregations})
# fe_set_df.columns = pd.Index(
# [e[0] + "_" + e[1] for e in fe_set_df.columns.tolist()])
#
# return fe_set_df
def _create_features(self):
set_df_name = [f'test_set_{i}_df' for i in range(62)]
set_dfs = [self.src_df_dict[f].copy() for f in set_df_name]
#set_dfs = [self.src_df_dict[f] for f in set_df_name]
with Pool(self.nthread) as p:
self._log_print('start fature engineering ...')
#set_res_list = p.map(self._fe_set_df_base, set_dfs)
set_res_list = p.map(self.fe_set_df, set_dfs)
#set_res_list = p.map(MulHelper(self, '_fe_set_df'), set_dfs)
#set_res_list = p.apply(toapply, (self, '_fe_set_df', set_dfs))
p.close()
p.join()
set_res_df = pd.concat(set_res_list, axis=0)
gc.collect()
# set the result in df_dict
set_res_df.reset_index(inplace=True)
self.df_dict[self.set_res_df_name] = set_res_df
def fe_meta(meta_df):
# band feature engineerings
passbands = [0, 1, 2, 3, 4, 5]
for passband in passbands:
meta_df[f'band-{passband}_flux_count_ratio'] = \
meta_df[f'band-{passband}_flux_count'] / meta_df['flux_count']
meta_df[f'band-{passband}_std_upper_flux_count_ratio'] = \
meta_df[f'band-{passband}_std_upper_flux_count'] / meta_df['flux_count']
meta_df[f'band-{passband}_flux_ratio_sq_max_ratio'] = \
meta_df[f'band-{passband}_flux_ratio_sq_max'] / meta_df['flux_ratio_sq_max']
# starter type fe
lpb = passband
rpb = (lpb + 1) % 6
lMean = meta_df['band-{}_wmean'.format(lpb)]
rMean = meta_df['band-{}_wmean'.format(rpb)]
lstd = meta_df['band-{}_normed_std'.format(lpb)]
rstd = meta_df['band-{}_normed_std'.format(rpb)]
lamp = meta_df['band-{}_normed_amp'.format(lpb)]
ramp = meta_df['band-{}_normed_amp'.format(rpb)]
lmad = meta_df['band-{}_normed_mad'.format(lpb)]
rmad = meta_df['band-{}_normed_mad'.format(rpb)]
l1std = meta_df['band-{}_beyond_1std'.format(lpb)]
r1std = meta_df['band-{}_beyond_1std'.format(rpb)]
ldmgmmd = meta_df[f'band-{lpb}_detected_mjd_get_max_min_diff']
rdmgmmd = meta_df[f'band-{rpb}_detected_mjd_get_max_min_diff']
lskew = meta_df[f'band-{lpb}_flux_skew']
rskew = meta_df[f'band-{rpb}_flux_skew']
lkurt = meta_df[f'band-{lpb}_flux_kurtosis']
rkurt = meta_df[f'band-{rpb}_flux_kurtosis']
lq2575_rng = meta_df[f'band-{lpb}_flux_quantile2575_range']
rq2575_rng = meta_df[f'band-{rpb}_flux_quantile2575_range']
lmax = meta_df['band-{}_flux_max'.format(lpb)]
rmax = meta_df['band-{}_flux_max'.format(rpb)]
lratsqmax = meta_df['band-{}_flux_ratio_sq_max'.format(lpb)]
rratsqmax = meta_df['band-{}_flux_ratio_sq_max'.format(rpb)]
rcorrmax = meta_df['band-{}_corrected_flux_max'.format(rpb)]
lcorrmax = meta_df['band-{}_corrected_flux_max'.format(lpb)]
rzcorrmax = meta_df['band-{}_z_corrected_flux_max'.format(rpb)]
lzcorrmax = meta_df['band-{}_z_corrected_flux_max'.format(lpb)]
mean_diff = -2.5 * np.log10(lMean / rMean)
std_diff = lstd - rstd
amp_diff = lamp - ramp
mad_diff = lmad-rmad
beyond_diff = l1std-r1std
dmgmmd_diff = ldmgmmd - rdmgmmd
skew_diff = lskew - rskew
kurt_diff = lkurt - rkurt
q2575_rng_diff = lq2575_rng - rq2575_rng
max_diff = lmax - rmax
ratsqmax_diff = lratsqmax - rratsqmax
corrmax_diff = lcorrmax - rcorrmax
zcorrmax_diff = lzcorrmax - rzcorrmax
ratsqmax_diff_log = -2.5 * np.log10(lratsqmax/rratsqmax)
mean_diff_colname = '{}_minus_{}_wmean'.format(lpb, rpb)
std_diff_colname = '{}_minus_{}_std'.format(lpb, rpb)
amp_diff_colname = '{}_minus_{}_amp'.format(lpb, rpb)
mad_diff_colname = '{}_minus_{}_mad'.format(lpb, rpb)
beyond_diff_colname = '{}_minus_{}_beyond'.format(lpb, rpb)
dmgmmd_diff_colname = f'{lpb}_minus_{rpb}_dmgmmd'
skew_diff_colname = f'{lpb}_minus_{rpb}_skew'
kurt_diff_colname = f'{lpb}_minus_{rpb}_kurt'
q2575_rng_diff_colname = f'{lpb}_minus_{rpb}_q2575_rng'
max_diff_colname = f'{lpb}_minus_{rpb}_max'
ratsqmax_diff_colname = f'{lpb}_minus_{rpb}_ratsqmax'
ratsqmax_diff_log_colname = f'{lpb}_minus_{rpb}_ratsqmax_log'
corrmax_diff_colname = f'{lpb}_minus_{rpb}_corrmax_diff'
zcorrmax_diff_colname = f'{lpb}_minus_{rpb}_zcorrmax_diff'
meta_df[mean_diff_colname] = mean_diff
meta_df[std_diff_colname] = std_diff
meta_df[amp_diff_colname] = amp_diff
meta_df[dmgmmd_diff_colname] = dmgmmd_diff
meta_df[skew_diff_colname] = skew_diff
meta_df[kurt_diff_colname] = kurt_diff
meta_df[q2575_rng_diff_colname] = q2575_rng_diff
meta_df[max_diff_colname] = max_diff
meta_df[ratsqmax_diff_colname] = ratsqmax_diff
meta_df[ratsqmax_diff_log_colname] = ratsqmax_diff_log
meta_df[corrmax_diff_colname] = corrmax_diff
meta_df[zcorrmax_diff_colname] = zcorrmax_diff
# non band feature engineering
meta_df['flux_diff'] = meta_df['flux_max'] - meta_df['flux_min']
meta_df['flux_dif2'] = (meta_df['flux_max'] - meta_df['flux_min'])\
/ meta_df['flux_mean']
meta_df['flux_w_mean'] = meta_df['flux_by_flux_ratio_sq_sum'] / \
meta_df['flux_ratio_sq_sum']
meta_df['flux_dif3'] = (meta_df['flux_max'] - meta_df['flux_min'])\
/ meta_df['flux_w_mean']
meta_df['corrected_flux_diff'] = meta_df['corrected_flux_max'] - meta_df['corrected_flux_min']
meta_df['corrected_flux_dif2'] = (meta_df['corrected_flux_max'] - meta_df['corrected_flux_min'])\
/ meta_df['corrected_flux_mean']
meta_df['corrected_flux_w_mean'] = meta_df['corrected_flux_by_flux_ratio_sq_sum'] / \
meta_df['corrected_flux_ratio_sq_sum']
meta_df['corrected_flux_dif3'] = (meta_df['corrected_flux_max'] - meta_df['corrected_flux_min'])\
/ meta_df['corrected_flux_w_mean']
meta_df['z_corrected_flux_diff'] = meta_df['z_corrected_flux_max'] - meta_df['z_corrected_flux_min']
meta_df['z_corrected_flux_dif2'] = (meta_df['z_corrected_flux_max'] - meta_df['z_corrected_flux_min'])\
/ meta_df['z_corrected_flux_mean']
meta_df['z_corrected_flux_w_mean'] = meta_df['z_corrected_flux_by_flux_ratio_sq_sum'] / \
meta_df['z_corrected_flux_ratio_sq_sum']
meta_df['z_corrected_flux_dif3'] = (meta_df['z_corrected_flux_max'] - meta_df['z_corrected_flux_min'])\
/ meta_df['z_corrected_flux_w_mean']
meta_df['std_upper_rat'] = meta_df['std_upper_flux_count'] / meta_df['flux_count']
passband_flux_maxes = \
['band-{}_flux_max'.format(i) for i in passbands]
# meta_df['passband_flux_maxes_var'] = \
# meta_df[passband_flux_maxes].var(axis=1)
for passband_flux_max in passband_flux_maxes:
meta_df[passband_flux_max + '_ratio_to_the_max'] = \
meta_df[passband_flux_max] / meta_df['flux_max']
# passband_maxes = meta_df[passband_flux_maxes].values
# passband_maxes_argmaxes = np.argmax(passband_maxes, axis=1)
# meta_df['passband_maxes_argmaxes'] = passband_maxes_argmaxes
# meta_df[passband_flux_max + '_from_the_max'] = \
# meta_df['flux_max'] - meta_df[passband_flux_max]
# passband_flux_maxes_from_the_max = \
# ['band-{}_flux_max_from_the_max'.format(i) for i in passbands]
# passband_flux_maxes_from_the_max_value = meta_df[passband_flux_maxes_from_the_max].values
# passband_flux_maxes_from_the_max_value.sort(axis=1)
# meta_df['2nd_passband_flux_max_diff'] = passband_flux_maxes_from_the_max_value[:,1]
# meta_df['3rd_passband_flux_max_diff'] = passband_flux_maxes_from_the_max_value[:,2]
# meta_df['2nd_passband_flux_max_diff_rat'] = meta_df['2nd_passband_flux_max_diff'] / meta_df.flux_max
# meta_df['3rd_passband_flux_max_diff_rat'] = meta_df['3rd_passband_flux_max_diff'] / meta_df.flux_max
passband_flux_mins = \
['band-{}_flux_min'.format(i) for i in passbands]
meta_df['passband_flux_min_var'] = \
meta_df[passband_flux_mins].var(axis=1)
# for passband_flux_min in passband_flux_mins:
# meta_df[passband_flux_min + '_ratio_to_the_min'] = \
# meta_df[passband_flux_min] / meta_df['flux_min']
passband_flux_means = \
['band-{}_flux_mean'.format(i) for i in passbands]
meta_df['passband_flux_means_var'] = \
meta_df[passband_flux_means].var(axis=1)
passband_flux_counts = \
['band-{}_flux_count_ratio'.format(i) for i in passbands]
meta_df['passband_flux_counts_var'] = \
meta_df[passband_flux_counts].var(axis=1)
passband_detected_means = \
['band-{}_detected_mean'.format(i) for i in passbands]
meta_df['passband_detected_means_var'] = \
meta_df[passband_detected_means].var(axis=1)
# passband_flux_ratio_sq_sum = \
# ['band-{}_flux_ratio_sq_sum'.format(i) for i in passbands]
# meta_df['passband_flux_ratio_sq_sum_var'] = \
# meta_df[passband_flux_ratio_sq_sum].var(axis=1)
# passband_flux_ratio_sq_skew = \
# ['band-{}_flux_ratio_sq_skew'.format(i) for i in passbands]
# meta_df['passband_flux_ratio_sq_skew_var'] = \
# meta_df[passband_flux_ratio_sq_skew].var(axis=1)
# band $B$N7gB;N($N(B var $B$H$+$bNI$5$=$&(B
passband_flux_vars = \
['band-{}_flux_var'.format(i) for i in passbands]
passband_flux_diffs = \
['band-{}_flux_get_max_min_diff'.format(i) for i in passbands]
meta_df['band_flux_diff_max'] = meta_df[passband_flux_diffs].max(axis=1)
meta_df['band_flux_diff_min'] = meta_df[passband_flux_diffs].min(axis=1)
meta_df['band_flux_diff_diff'] = meta_df['band_flux_diff_max'] - meta_df['band_flux_diff_min']
meta_df['band_flux_diff_diff_rat'] = meta_df['band_flux_diff_diff'] / meta_df['band_flux_diff_max']
meta_df['band_flux_max_min_rat'] = meta_df['band_flux_diff_min'] / meta_df['band_flux_diff_max']
meta_df['internal'] = meta_df.hostgal_photoz == 0.
meta_df['lumi_dist'] = 10**((meta_df.distmod+5)/5)
# peak around features
meta_df['peak_kurt_14to30'] = meta_df['peak-14-14_flux_kurtosis'] - meta_df['peak-30-30_flux_kurtosis']
meta_df['peak_kurt_14to90'] = meta_df['peak-14-14_flux_kurtosis'] - meta_df['peak-90-90_flux_kurtosis']
meta_df['peak_kurt_30to90'] = meta_df['peak-30-30_flux_kurtosis'] - meta_df['peak-90-90_flux_kurtosis']
meta_df['peak_skew_14to30'] = meta_df['peak-14-14_flux_skew'] - meta_df['peak-30-30_flux_skew']
meta_df['peak_skew_14to90'] = meta_df['peak-14-14_flux_skew'] - meta_df['peak-90-90_flux_skew']
meta_df['peak_skew_30to90'] = meta_df['peak-30-30_flux_skew'] - meta_df['peak-90-90_flux_skew']
return meta_df
class featureCreatorMeta(featureCreator):
def __init__(self, fe_set_df, set_res_df_name, load_dir, save_dir,
src_df_dict=None, logger=None, nthread=1, train=True):
super(featureCreatorMeta, self).\
__init__(load_dir=load_dir,
save_dir=save_dir,
src_df_dict=src_df_dict,
logger=logger,
nthread=nthread)
self.fe_set_df = fe_set_df
self.set_res_df_name = set_res_df_name
self.train = train
if self.train:
self.meta_file = '/home/naoya.taguchi/.kaggle/competitions/PLAsTiCC-2018/training_set_metadata.csv'
else:
self.meta_file = '/home/naoya.taguchi/.kaggle/competitions/PLAsTiCC-2018/test_set_metadata.csv'
def _load(self):
path_dict = {
# 'meta_features': self.meta_file,
'set_base_features': self.save_dir + 'set_base_features.ftr',
'set_passband_std_upper_features': self.save_dir + 'set_passband_std_upper_features.ftr',
'set_passband_detected_features': self.save_dir + 'set_passband_detected_features.ftr',
'set_detected_features': self.save_dir + 'set_detected_features.ftr',
'set_std_upper_and_lower_features': self.save_dir + 'set_std_upper_and_lower_features.ftr',
'set_passband_features': self.save_dir + 'set_passband_features.ftr',
'set_tsfresh_features': self.save_dir + 'set_tsfresh_features.ftr',
'set_peak_around_features': self.save_dir + 'set_peak_around_features.ftr',
'set_ratsq_peak_around_features': self.save_dir + 'set_ratsq_peak_around_features.ftr',
'set_skkt_features': self.save_dir + 'set_skkt_features.ftr',
'set_deficits_features': self.save_dir + 'set_deficits_features.ftr',
}
self._load_dfs_from_paths(path_dict=path_dict)
self.src_df_dict['meta_features'] = pd.read_hdf('/home/naoya.taguchi/.kaggle/competitions/PLAsTiCC-2018/kyle_final_augment.h5', 'meta')
self.src_df_dict['merged_meta_df'] = self.src_df_dict['meta_features']
self._log_print('merging meta dfs ...')
for key in tqdm(self.src_df_dict.keys()):
print(key)
if key == 'meta_features' or key == 'merged_meta_df':
continue
self.src_df_dict['merged_meta_df'] = self.src_df_dict['merged_meta_df'].\
merge(self.src_df_dict[key], on='object_id', how='left')
# del self.src_df_dict[key]
gc.collect()
if self.train:
okumura_df1 = pd.read_pickle('../lcfit/LCfit_feature_train_v4_20181205.pkl.gz', compression='gzip')
self.src_df_dict['merged_meta_df'] = self.src_df_dict['merged_meta_df'].\
merge(okumura_df1, on='object_id', how='left')
del okumura_df1
gc.collect()
#okumura_df2 = pd.read_pickle('../lcfit/train_v2_20181213.pkl.gz', compression='gzip')
#self.src_df_dict['merged_meta_df'] = self.src_df_dict['merged_meta_df'].\
# merge(okumura_df2, on='object_id', how='left')
#del okumura_df2
#gc.collect()
okumura_df2 = pd.read_pickle('../lcfit/the_last_train_okumurasan_feats.pkl.gz', compression='gzip')
okumura_df2['dmax_g_std'] = okumura_df2[['c42_g_z0_dmax', 'c42_g_z1_dmax', 'c42_g_z2_dmax', 'c42_g_z3_dmax']].std(axis=1)
okumura_df2['dmax_i_std'] = okumura_df2[['c42_i_z0_dmax', 'c42_i_z1_dmax', 'c42_i_z2_dmax', 'c42_i_z3_dmax']].std(axis=1)
okumura_df2['dmax_z_std'] = okumura_df2[['c42_z_z0_dmax', 'c42_z_z1_dmax', 'c42_z_z2_dmax', 'c42_z_z3_dmax']].std(axis=1)
okumura_df2['dmax_r_std'] = okumura_df2[['c42_r_z0_dmax', 'c42_r_z1_dmax', 'c42_r_z2_dmax', 'c42_r_z3_dmax']].std(axis=1)
okumura_df2['dmax_g_mean'] = okumura_df2[['c42_g_z0_dmax', 'c42_g_z1_dmax', 'c42_g_z2_dmax', 'c42_g_z3_dmax']].mean(axis=1)
okumura_df2['dmax_i_mean'] = okumura_df2[['c42_i_z0_dmax', 'c42_i_z1_dmax', 'c42_i_z2_dmax', 'c42_i_z3_dmax']].mean(axis=1)
okumura_df2['dmax_z_mean'] = okumura_df2[['c42_z_z0_dmax', 'c42_z_z1_dmax', 'c42_z_z2_dmax', 'c42_z_z3_dmax']].mean(axis=1)
okumura_df2['dmax_r_mean'] = okumura_df2[['c42_r_z0_dmax', 'c42_r_z1_dmax', 'c42_r_z2_dmax', 'c42_r_z3_dmax']].mean(axis=1)
self.src_df_dict['merged_meta_df'] = self.src_df_dict['merged_meta_df'].\
merge(okumura_df2, on='object_id', how='left')
del okumura_df2
gc.collect()
else:
okumura_df1 = pd.read_pickle('../lcfit/LCfit_feature_test_v4_20181205.pkl.gz', compression='gzip')
self.src_df_dict['merged_meta_df'] = self.src_df_dict['merged_meta_df'].\
merge(okumura_df1, on='object_id', how='left')
del okumura_df1
#okumura_df2 = pd.read_pickle('../lcfit/test_v2_20181213.pkl.gz', compression='gzip')
#self.src_df_dict['merged_meta_df'] = self.src_df_dict['merged_meta_df'].\
# merge(okumura_df2, on='object_id', how='left')
#del okumura_df2
#gc.collect()
okumura_df2 = pd.read_pickle('../lcfit/the_last_test_okumurasan_feats.pkl.gz', compression='gzip')
okumura_df2['dmax_g_std'] = okumura_df2[['c42_g_z0_dmax', 'c42_g_z1_dmax', 'c42_g_z2_dmax', 'c42_g_z3_dmax']].std(axis=1)
okumura_df2['dmax_i_std'] = okumura_df2[['c42_i_z0_dmax', 'c42_i_z1_dmax', 'c42_i_z2_dmax', 'c42_i_z3_dmax']].std(axis=1)
okumura_df2['dmax_z_std'] = okumura_df2[['c42_z_z0_dmax', 'c42_z_z1_dmax', 'c42_z_z2_dmax', 'c42_z_z3_dmax']].std(axis=1)
okumura_df2['dmax_r_std'] = okumura_df2[['c42_r_z0_dmax', 'c42_r_z1_dmax', 'c42_r_z2_dmax', 'c42_r_z3_dmax']].std(axis=1)
okumura_df2['dmax_g_mean'] = okumura_df2[['c42_g_z0_dmax', 'c42_g_z1_dmax', 'c42_g_z2_dmax', 'c42_g_z3_dmax']].mean(axis=1)
okumura_df2['dmax_i_mean'] = okumura_df2[['c42_i_z0_dmax', 'c42_i_z1_dmax', 'c42_i_z2_dmax', 'c42_i_z3_dmax']].mean(axis=1)
okumura_df2['dmax_z_mean'] = okumura_df2[['c42_z_z0_dmax', 'c42_z_z1_dmax', 'c42_z_z2_dmax', 'c42_z_z3_dmax']].mean(axis=1)
okumura_df2['dmax_r_mean'] = okumura_df2[['c42_r_z0_dmax', 'c42_r_z1_dmax', 'c42_r_z2_dmax', 'c42_r_z3_dmax']].mean(axis=1)
self.src_df_dict['merged_meta_df'] = self.src_df_dict['merged_meta_df'].\
merge(okumura_df2, on='object_id', how='left')
del okumura_df2
gc.collect()
def _create_features(self):
object_ids = self.src_df_dict['merged_meta_df'].object_id.unique()
meta_dfs = [self.src_df_dict['merged_meta_df'][
self.src_df_dict['merged_meta_df'].object_id.isin(obj_id_grp)]
for obj_id_grp in np.array_split(object_ids, 62)]
with Pool(self.nthread) as p:
self._log_print('start fature engineering ...')
set_res_list = p.map(self.fe_set_df, meta_dfs)
p.close()
p.join()
set_res_df = pd.concat(set_res_list, axis=0)
gc.collect()
# set the result in df_dict
set_res_df.reset_index(inplace=True, drop=True)
self._log_print(set_res_df.columns.tolist())
self.df_dict[self.set_res_df_name] = set_res_df
class featureCreatorTsfresh(featureCreator):
def __init__(self, load_dir, save_dir, src_df_dict=None, logger=None, nthread=1, train=True):
super(featureCreatorTsfresh, self).\
__init__(load_dir=load_dir,
save_dir=save_dir,
src_df_dict=src_df_dict,
logger=logger,
nthread=nthread)
self.train = train
def _load(self):
if self.train:
path_dict = {
'set_df': self.load_dir + 'training_set.csv'}
#'set_df': self.load_dir + 'training_set.csv'}
self.src_df_dict['set_df'] = pd.read_hdf('/home/naoya.taguchi/.kaggle/competitions/PLAsTiCC-2018/kyle_final_augment.h5', 'df')
else:
path_dict = {'set_df': self.load_dir + 'test_set.fth'}
self._load_dfs_from_paths(path_dict=path_dict)
def _get_tsfresh_feats(self, set_df, nthread):
# tsfresh features
fcp = {
'flux': {
'longest_strike_above_mean': None,
'longest_strike_below_mean': None,
'mean_change': None,
'mean_abs_change': None,
'length': None,
# 'number_peaks': [{'n': 1}],
# 'fft_coefficient': [
# {'coeff': 0, 'attr': 'abs'},
# {'coeff': 1, 'attr': 'abs'}
# ],
# 'binned_entropy': [{'max_bin': 20}],
# 'agg_linear_trend': None,
# 'number_cwt_peaks': None,
},
'flux_by_flux_ratio_sq': {
'longest_strike_above_mean': None,
'longest_strike_below_mean': None,
},
'mjd': {
'maximum': None,
'minimum': None,
'mean_change': None,
'mean_abs_change': None,
},
}
# ts_flesh features
fe_set_df = extract_features(
set_df,
column_id='object_id',
column_sort='mjd',
column_kind='passband',
column_value = 'flux',
default_fc_parameters = fcp['flux'],
n_jobs=nthread)
return fe_set_df
def _create_features(self):
set_res_df = self._get_tsfresh_feats(self.src_df_dict['set_df'], self.nthread).\
reset_index().\
rename(columns={'id': 'object_id'})
# set the result in df_dict
set_res_df.reset_index(inplace=True, drop=True)
self.df_dict['set_tsfresh_features'] = set_res_df
| {"/softmax_train_using_features.py": ["/tools/my_logging.py", "/tools/feature_tools.py", "/tools/objective_function.py", "/tools/model_io.py", "/tools/fold_resampling.py"], "/train.py": ["/tools/my_logging.py", "/tools/feature_tools.py", "/tools/objective_function.py", "/tools/model_io.py", "/tools/fold_resampling.py"], "/from_onoderasan.py": ["/tools/objective_function.py"]} |
76,686 | guchio3/kaggle-plasticc | refs/heads/master | /utils/linear_polation_exp.py | import numpy as np
import pandas as pd
import pickle
test_set_metadata_df = pd.read_csv('/home/naoya.taguchi/.kaggle/competitions/PLAsTiCC-2018/test_set_metadata.csv')
object_ids = test_set_metadata_df.object_id
with open('../temp/Booster_weight-multi-logloss-0.612193_2018-11-11-04-49-01_res.csv', 'rb') as fin:
test_reses = pickle.load(fin)
lin_pure_res = np.clip(test_reses[-1], 10**(-15), 1 - 10**(-15))
lin_pure_preds_99 = np.ones((lin_pure_res.shape[0]))
for i in range(lin_pure_res.shape[1]):
lin_pure_preds_99 *= (1 - lin_pure_res[:, i])
lin_pure_preds_99 = 0.14 * lin_pure_preds_99 / np.mean(lin_pure_preds_99)
lin_pure_res_df = pd.DataFrame(lin_pure_res, columns=[
'class_6',
'class_15',
'class_16',
'class_42',
'class_52',
'class_53',
'class_62',
'class_64',
'class_65',
'class_67',
'class_88',
'class_90',
'class_92',
'class_95',
])
lin_pure_res_df['class_99'] = lin_pure_preds_99
lin_pure_res_df['object_id'] = object_ids
lin_pure_res_df.to_csv('../temp/Booster_weight-multi-logloss-0.612193_2018-11-11-04-49-01_res_lin_pure.csv', index=False)
all_mean_res = np.clip(np.mean(test_reses, axis=0), 10**(-15), 1 - 10**(-15))
all_mean_preds_99 = np.ones((all_mean_res.shape[0]))
for i in range(all_mean_res.shape[1]):
all_mean_preds_99 *= (1 - all_mean_res[:, i])
preds_99 = 0.14 * all_mean_preds_99 / np.mean(all_mean_preds_99)
all_mean_res_df = pd.DataFrame(all_mean_res, columns=[
'class_6',
'class_15',
'class_16',
'class_42',
'class_52',
'class_53',
'class_62',
'class_64',
'class_65',
'class_67',
'class_88',
'class_90',
'class_92',
'class_95',
])
all_mean_res_df['class_99'] = all_mean_preds_99
all_mean_res_df['object_id'] = object_ids
all_mean_res_df.to_csv('../temp/Booster_weight-multi-logloss-0.612193_2018-11-11-04-49-01_res_all_mean.csv', index=False)
| {"/softmax_train_using_features.py": ["/tools/my_logging.py", "/tools/feature_tools.py", "/tools/objective_function.py", "/tools/model_io.py", "/tools/fold_resampling.py"], "/train.py": ["/tools/my_logging.py", "/tools/feature_tools.py", "/tools/objective_function.py", "/tools/model_io.py", "/tools/fold_resampling.py"], "/from_onoderasan.py": ["/tools/objective_function.py"]} |
76,687 | guchio3/kaggle-plasticc | refs/heads/master | /tools/model_io.py | import pickle
def load_models(filename):
with open(filename, 'rb') as fin:
models = pickle.load(fin)
return models
def save_models(models, filename):
with open(filename, 'wb') as fout:
pickle.dump(models, fout)
| {"/softmax_train_using_features.py": ["/tools/my_logging.py", "/tools/feature_tools.py", "/tools/objective_function.py", "/tools/model_io.py", "/tools/fold_resampling.py"], "/train.py": ["/tools/my_logging.py", "/tools/feature_tools.py", "/tools/objective_function.py", "/tools/model_io.py", "/tools/fold_resampling.py"], "/from_onoderasan.py": ["/tools/objective_function.py"]} |
76,688 | guchio3/kaggle-plasticc | refs/heads/master | /tools/my_logging.py | import os
from logging import Formatter, StreamHandler, FileHandler, DEBUG
def logInit(logger, log_dir, log_filename):
log_fmt = Formatter('%(asctime)s %(name)s \
%(lineno)d [%(levelname)s] [%(funcName)s] %(message)s ')
handler = StreamHandler()
handler.setLevel('INFO')
handler.setFormatter(log_fmt)
logger.addHandler(handler)
handler = FileHandler(log_dir + log_filename, 'a')
handler.setLevel(DEBUG)
handler.setFormatter(log_fmt)
logger.setLevel(DEBUG)
logger.addHandler(handler)
return logger
| {"/softmax_train_using_features.py": ["/tools/my_logging.py", "/tools/feature_tools.py", "/tools/objective_function.py", "/tools/model_io.py", "/tools/fold_resampling.py"], "/train.py": ["/tools/my_logging.py", "/tools/feature_tools.py", "/tools/objective_function.py", "/tools/model_io.py", "/tools/fold_resampling.py"], "/from_onoderasan.py": ["/tools/objective_function.py"]} |
76,689 | guchio3/kaggle-plasticc | refs/heads/master | /softmax_train_using_features.py | import numpy as np
import pandas as pd
from sklearn.model_selection import StratifiedKFold
from sklearn.preprocessing import LabelEncoder
from sklearn.metrics import confusion_matrix
from imblearn.over_sampling import SMOTE, RandomOverSampler
import lightgbm
from logging import getLogger
from tqdm import tqdm
import argparse
import datetime
import pickle
import warnings
from matplotlib import pyplot as plt
import seaborn as sns
from tools.my_logging import logInit
from tools.feature_tools import feature_engineering
from tools.objective_function import weighted_multi_logloss, lgb_multi_weighted_logloss, wloss_objective, wloss_metric, softmax, calc_team_score, wloss_metric_for_zeropad
from tools.model_io import save_models, load_models
from tools.fold_resampling import get_fold_resampling_dict
np.random.seed(71)
np.set_printoptions(threshold=np.inf)
pd.set_option('display.max_columns', 1000)
pd.set_option('display.max_rows', 1000)
warnings.simplefilter('ignore', RuntimeWarning)
warnings.simplefilter('ignore', UserWarning)
plt.switch_backend('agg')
BASE_DIR = '/home/naoya.taguchi/.kaggle/competitions/PLAsTiCC-2018/'
#BASE_DIR = '/Users/naoya.taguchi/.kaggle/competitions/PLAsTiCC-2018/'
FOLD_NUM = 5
SAMPLING_LOWER = 60
# SAMPLING_LOWER = 10
SAMPLING_LOWER_RATE = 2.
def parse_args():
parser = argparse.ArgumentParser(
prog='train.py',
usage='ex) python train.py --with_test',
description='easy explanation',
epilog='end',
add_help=True,
)
parser.add_argument('-w', '--with_test',
help='flg to specify test type.',
action='store_true',
default=False)
parser.add_argument('-n', '--nthread',
help='number of avalable threads.',
type=int,
required=True)
parser.add_argument('-z', '--specz',
help='flg to use specz',
action='store_true',
default=False)
args = parser.parse_args()
return args
def get_params(args):
PARAMS = {
# 'objective': wloss_objective,
'objective': 'multiclass',
# 'metric': ['multi_logloss', ],
'metric': 'None',
'num_class': 14,
'nthread': args.nthread,
'learning_rate': 0.4,
# 'learning_rate': 0.02,
# 'num_leaves': 32,
'max_depth': 3,
'subsample': .8,
'colsample_bytree': .7,
'reg_alpha': .01,
'reg_lambda': .01,
'min_split_gain': 0.01,
'min_child_weight': 200,
# 'n_estimators': 10000,
'verbose': -1,
'silent': -1,
'random_state': 71,
'seed': 71,
# 'early_stopping_rounds': 100,
# 'min_data_in_leaf': 30,
'max_bin': 20,
# 'min_data_in_leaf': 300,
# 'bagging_fraction': 0.1,
'bagging_freq': 1,
}
return PARAMS
# Display/plot feature importance
def display_importances(feature_importance_df_,
filename='importance_application'):
# cols = feature_importance_df_[["feature",
# "importance"]].groupby("feature").mean().sort_values(by="importance",
# ascending=False).index
csv_df = feature_importance_df_[["feature", "importance"]].groupby(
"feature").agg({'importance': ['mean', 'std']})
csv_df.columns = pd.Index(
[e[0] + "_" + e[1].upper()
for e in csv_df.columns.tolist()])
csv_df['importance_RAT'] = csv_df['importance_STD'] / \
csv_df['importance_MEAN']
csv_df.sort_values(
by="importance_MEAN",
ascending=False).to_csv(
filename +
'.csv')
# best_features = feature_importance_df_.loc[feature_importance_df_.feature.isin(cols)]
# plt.figure(figsize=(8, 10))
# sns.barplot(x="importance", y="feature", data=best_features.sort_values(by="importance", ascending=False))
# plt.title('LightGBM Features (avg over folds)')
# plt.tight_layout()
# plt.savefig(filename + '.png')
def save_importance(df, filename):
df.set_index('feature', inplace=True)
imp_mean = df.mean(axis=1)
imp_std = df.std(axis=1)
df['importance_mean'] = imp_mean
df['importance_std'] = imp_std
df['importance_cov'] = df['importance_std'] / df['importance_mean']
df.sort_values(by="importance_cov", ascending=True).to_csv(filename[:-4] + '.csv')
df.reset_index(inplace=True)
plt.figure(figsize=(8, 30))
sns.barplot(x="importance_mean", y="feature", data=df.sort_values(by="importance_mean", ascending=False))
plt.title('LightGBM Features (avg over folds)')
plt.tight_layout()
plt.savefig(filename)
def plt_confusion_matrics():
1 + 1
def main(args, features):
FEATURES_TO_USE = features
#FEATURES_TO_USE = pd.read_csv('./importances/Booster_weight-multi-logloss-0.521646_2018-12-17-13-29-29_importance.csv').sort_values('importance_mean', ascending=False).head(150).feature.tolist()# + ['object_id']
FEATURES_TO_USE = pd.read_csv('./importances/Booster_weight-multi-logloss-0.528846_2018-12-17-06-30-21_importance.csv').sort_values('importance_mean', ascending=False).head(220).feature.tolist()# + ['object_id']
##### FEATURES_TO_USE = pd.read_csv('./importances/Booster_weight-multi-logloss-0.534367_2018-12-15-18-49-06_importance.csv').sort_values('importance_mean', ascending=False).head(165).feature.tolist()# + ['object_id']
# FEATURES_TO_USE = pd.read_csv('./importances/Booster_weight-multi-logloss-0.534367_2018-12-15-18-49-06_importance.csv').head(165).feature.tolist()# + ['object_id']
logger = getLogger(__name__)
logInit(logger, log_dir='./log/', log_filename='train.log')
logger.info(
'''
start main, the args settings are ...
--with_test : {}
'''.format(args.with_test))
start_time = datetime.datetime.now().strftime('%Y-%m-%d-%H-%M-%S')
logger.info('start training, the starting time is {}'.format(start_time))
PARAMS = get_params(args)
##### logger.info('loading training_set.csv')
##### training_set_df = pd.read_csv(
##### BASE_DIR + 'training_set.csv')
##### logger.info('loading training_set_metadata.csv')
##### training_set_metadata_df = pd.read_csv(
##### BASE_DIR + 'training_set_metadata.csv')
# training_set_metadata_df =
# training_set_metadata_df[training_set_metadata_df.ddf == 1]
##### logger.info('start feagture engineering')
##### train_df = feature_engineering(
##### training_set_df,
##### training_set_metadata_df,
##### nthread=args.nthread,
##### logger=logger)
logger.info('loading train_df ...')
train_df = pd.read_feather('./features/train/meta_features.ftr')
#with open('./lcfit/LCfit_features_train_20181129.pkl', 'rb') as fin:
# train_df = train_df.merge(pickle.load(fin), on='object_id', how='left')
#train_df.drop('object_id', axis=1, inplace=True)
train_df = train_df[FEATURES_TO_USE + ['target']]
# label encoding $B$7$J$$$H(B lgbm $B$,G'<1$7$F$/$l$J$$(B
# $B<c$$(B class $B$K(B $B<c$$(B label $B$,$D$/$HNI$$$s$@$1$I(B...
le = LabelEncoder()
le.fit(train_df['target'].values)
x_train = train_df.drop('target', axis=1).values
y_train = le.transform(train_df.target)
train_set = lightgbm.Dataset(
data=train_df.drop('target', axis=1).values,
label=le.transform(train_df['target'].values),
)
skf = StratifiedKFold(n_splits=FOLD_NUM, shuffle=True, random_state=71)
# folds = skf.split(
# train_df.drop('target', axis=1), le.transform(train_df.target))
folds = skf.split(x_train, y_train)
logger.info('the shape of x_train : {}'.format(x_train.shape))
# logger.info('the shape of train_df : {}'.format(train_df.shape))
logger.debug('the cols of train_df : {}'.
format(train_df.drop('target', axis=1).columns.tolist()))
# categotical_features = ['passband_maxes_argmaxes', ]
# categorical_features_idx = np.argwhere(train_df.drop('target', axis=1).columns == 'passband_maxes_argmaxes')[0]
# logger.debug('categorical features are : {}'.format(categotical_features))
# logger.debug('categorical features indexes are : {}'.format(categotical_features))
# PARAMS['categorical_feature'] = categorical_features_idx
if False: # args.with_test:
cv_hist = lightgbm.cv(
params=PARAMS,
folds=folds,
train_set=train_set,
nfold=FOLD_NUM,
verbose_eval=100,
feval=lgb_multi_weighted_logloss,
)
logger.info('best_scores : {}'.format(
np.min(cv_hist['multi_logloss-mean'])))
logger.debug(cv_hist)
elif False:
best_scores = []
trained_models = []
x_train = train_df.drop('target', axis=1).values
y_train = train_df['target'].values
train_columns = train_df.drop('target', axis=1).columns
feature_importance_df = pd.DataFrame()
i = 1
for trn_idx, val_idx in tqdm(list(folds)):
x_trn, x_val = x_train[trn_idx], x_train[val_idx]
y_trn, y_val = y_train[trn_idx], y_train[val_idx]
lgb = lightgbm.LGBMClassifier(**PARAMS)
lgb.fit(x_trn, y_trn,
eval_set=[(x_trn, y_trn), (x_val, y_val)],
verbose=100,
eval_metric=lgb_multi_weighted_logloss,
# eval_metric=weighted_multi_logloss,
# eval_metric='multi_logloss',
)
# logger.info('best_itr : {}'.format(lgb.best_iteration_))
logger.info('best_scores : {}'.format(lgb.best_score_))
best_scores.append(lgb.best_score_['valid_1']['wloss'])
trained_models.append(lgb)
fold_importance_df = pd.DataFrame()
fold_importance_df["feature"] = train_columns
fold_importance_df["importance"] = lgb.feature_importances_
fold_importance_df["fold"] = i
feature_importance_df = pd.concat(
[feature_importance_df, fold_importance_df], axis=0)
i += 1
else:
best_scores = []
team_scores = []
zeropad_scores = []
val_pred_score_zeropads = []
trained_models = []
best_iterations = []
oof = []
x_train = train_df.drop('target', axis=1).values
y_train = le.transform(train_df['target'].values)
train_columns = train_df.drop('target', axis=1).columns
distmod_col = np.where(train_columns == 'distmod')[0]
feature_importance_df = pd.DataFrame()
feature_importance_df['feature'] = train_columns
conf_y_true = []
conf_y_pred = []
i = 1
for trn_idx, val_idx in tqdm(list(folds)):
x_trn, x_val = x_train[trn_idx], x_train[val_idx]
y_trn, y_val = y_train[trn_idx], y_train[val_idx]
fold_resampling_dict = \
get_fold_resampling_dict(
y_trn,
logger,
SAMPLING_LOWER,
SAMPLING_LOWER_RATE)
ros = RandomOverSampler(
ratio=fold_resampling_dict,
random_state=71)
x_trn, y_trn = ros.fit_sample(x_trn, y_trn)
train_dataset = lightgbm.Dataset(x_trn, y_trn)
valid_dataset = lightgbm.Dataset(x_val, y_val)
booster = lightgbm.train(
PARAMS.copy(), train_dataset,
num_boost_round=2000,
fobj=wloss_objective,
feval=wloss_metric,
valid_sets=[train_dataset, valid_dataset],
verbose_eval=100,
early_stopping_rounds=100,
)
logger.debug('valid info : {}'.format(booster.best_score))
logger.info('best score : {}'.format(booster.best_score['valid_1']['wloss']))
logger.info('best iteration : {}'.format(booster.best_iteration))
best_scores.append(booster.best_score['valid_1']['wloss'])
best_iterations.append(booster.best_iteration)
trained_models.append(booster)
fold_importance_df = pd.DataFrame()
fold_importance_df["feature"] = train_columns
fold_importance_df["importance_{}".format(i)] = booster.feature_importance('gain')
feature_importance_df = feature_importance_df.merge(fold_importance_df, on='feature', how='left')
#feature_importance_df = pd.concat(
# [feature_importance_df, fold_importance_df], axis=0)
val_pred_score = softmax(booster.predict(x_val, raw_score=False))
val_pred_score_zeropad = booster.predict(x_val, raw_score=False)
oof.append([val_pred_score_zeropad, y_val, val_idx])
gal_cols = [0, 2, 5, 8, 12]
ext_gal_cols = [1, 3, 4, 6, 7, 9, 10, 11, 13]
gal_rows = np.where(np.isnan(np.array(x_val[:, distmod_col], dtype=float)))[0]
ext_gal_rows = np.where(~np.isnan(np.array(x_val[:, distmod_col], dtype=float)))[0]
#val_pred_score_zeropad.loc[ext_gal_rows, gal_cols] = 0.
#val_pred_score_zeropad.loc[gal_rows, ext_gal_cols] = 0.
zeropad_score = wloss_metric_for_zeropad(
val_pred_score_zeropad, valid_dataset,
gal_cols=gal_cols, ext_gal_cols=ext_gal_cols,
gal_rows=gal_rows, ext_gal_rows=ext_gal_rows)
logger.info('zeropad score : {}'.format(zeropad_score))
team_score = calc_team_score(y_val, val_pred_score)
logger.info('team score : {}'.format(team_score))
team_scores.append(team_score)
zeropad_scores.append(zeropad_score)
val_pred_score_zeropads.append(pd.concat([pd.DataFrame(val_pred_score_zeropad), pd.Series(y_val)], axis=1))
conf_y_true.append(y_val)
conf_y_pred.append(np.argmax(val_pred_score, axis=1))
# conf_y_true.append(np.argmax(val_pred_score, axis=1))
# conf_y_pred.append(y_val)
i += 1
mean_best_score = np.mean(best_scores)
mean_team_score = np.mean(team_scores)
mean_best_iteration = np.mean(best_iterations)
mean_zeropads_score = np.mean(np.array(zeropad_scores, dtype=float))
logger.info('mean valid score is {}'.format(mean_best_score))
logger.info('mean team score is {}'.format(mean_team_score))
logger.info('mean best iteration is {}'.format(mean_best_iteration))
#logger.info('mean zeropad score is {}'.format(mean_zeropads_score))
val_pred_score_zeropads_path = './val_pred_score_zeropads/{}_weight-multi-logloss-{:.6}_{}.pkl'\
.format(trained_models[0].__class__.__name__,
mean_zeropads_score,
start_time, )
with open(val_pred_score_zeropads_path, 'wb') as fout:
pickle.dump(val_pred_score_zeropads, fout)
oof_path = './oof/{}_weight-multi-logloss-{:.6}_{}.pkl'\
.format(trained_models[0].__class__.__name__,
mean_best_score,
start_time, )
with open(oof_path, 'wb') as fout:
pickle.dump(oof, fout)
models_path = './trained_models/{}_weight-multi-logloss-{:.6}_{}.pkl'\
.format(trained_models[0].__class__.__name__,
mean_best_score,
start_time, )
logger.info('saving models to {} ...'.format(models_path))
save_models(trained_models, models_path)
imp_path = './importances/{}_weight-multi-logloss-{:.6}_{}_importance.png'\
.format(trained_models[0].__class__.__name__,
mean_best_score,
start_time, )
logger.info('saving importance to {} ...'.format(models_path))
save_importance(feature_importance_df, imp_path)
conf_path = './confusion_matrices/{}_weight-multi-logloss-{:.6}_{}_confusion.png'\
.format(trained_models[0].__class__.__name__,
mean_best_score,
start_time, )
logger.info('saving confusion matrix to {} ...'.format(models_path))
conf_y_pred = np.concatenate(conf_y_pred)
conf_y_true = np.concatenate(conf_y_true)
cm = confusion_matrix(conf_y_true, conf_y_pred)
classes = ['class_' + str(clnum)
for clnum in [6, 15, 16, 42, 52, 53, 62, 64, 65, 67, 88, 90, 92, 95]]
cm_df = pd.DataFrame(cm, index=classes, columns=classes)
cm_df[cm_df.columns] = cm_df.values / cm_df.sum(axis=1).values.reshape(-1, 1)
plt.figure(figsize=(14, 14))
sns.heatmap(cm_df, annot=True, cmap=plt.cm.Blues)
#plt.imshow(cm_df.values, interpolat='nearest', cmap=plt.cm.Blues)
plt.title('score : {}'.format(mean_best_score))
plt.ylabel('True label')
plt.xlabel('Predicted label')
plt.savefig(conf_path)
if args.with_test:
logger.info('start linear interpolation training')
interpolated_num_boost_round =\
int(mean_best_iteration * FOLD_NUM / (FOLD_NUM - 1))
logger.info('the num boost round is {}'.format(interpolated_num_boost_round))
fold_resampling_dict = \
get_fold_resampling_dict(
y_train,
logger,
SAMPLING_LOWER,
SAMPLING_LOWER_RATE)
ros = RandomOverSampler(
ratio=fold_resampling_dict,
random_state=71)
x_train, y_train = ros.fit_sample(x_train, y_train)
train_dataset = lightgbm.Dataset(x_train, y_train)
lin_booster = lightgbm.train(
PARAMS.copy(), train_dataset,
num_boost_round=interpolated_num_boost_round,
fobj=wloss_objective,
feval=wloss_metric,
valid_sets=[train_dataset, ],
verbose_eval=100,
early_stopping_rounds=100
)
logger.info('best score : {}'.format(lin_booster.best_score))
models_path = './trained_models/{}_weight-multi-logloss-{:.6}_{}_linear_interpolated.pkl'\
.format(lin_booster.__class__.__name__,
mean_best_score,
start_time, )
logger.info('saving models to {} ...'.format(models_path))
save_models(lin_booster, models_path)
# logger.info('loading test_set.csv')
# test_set_df = pd.read_feather(
# BASE_DIR + 'test_set.fth', nthreads=args.nthread)
##### logger.info('loading test_set_metadata.csv')
##### test_set_metadata_df = pd.read_csv(
##### BASE_DIR + 'test_set_metadata.csv')
# object_ids = test_set_metadata_df.object_id
##### logger.info('feature engineering for test set...')
##### test_df = feature_engineering(
##### None,
##### test_set_metadata_df,
##### nthread=args.nthread,
##### test_flg=True,
##### logger=logger)
logger.info('loading test_df ...')
test_df = pd.read_feather('./features/test/meta_features.ftr', nthreads=args.nthread)
# with open('./lcfit/LCfit_feature_test_v1_20181203.pkl', 'rb') as fin:
# test_df = test_df[list(set(test_df.columns.tolist()) & set(FEATURES_TO_USE)) + ['object_id']].\
# merge(pickle.load(fin), on='object_id', how='left')
object_ids = test_df.object_id
test_df.drop('object_id', axis=1, inplace=True)
test_df = test_df[FEATURES_TO_USE]
# test_df = feature_engineering(
# test_set_df,
# test_set_metadata_df,
# nthread=args.nthread,
# test_flg=True,
# logger=logger)
test_df.reset_index(drop=True).to_feather('./test_dfs/test_df_for_nn.fth')
##### test_df.drop('object_id', axis=1, inplace=True)
logger.info(f'test cols {test_df.columns.tolist()}')
x_test = test_df.values
logger.info(f'test size: {x_test.shape}')
logger.info('predicting')
test_reses = []
for lgb in tqdm(trained_models):
test_reses.append(
softmax(lgb.predict(x_test, raw_score=False)))
# test_reses.append(lgb.predict_proba(x_test, raw_score=False))
# res = np.clip(np.mean(test_reses, axis=0),
# 10**(-15), 1 - 10**(-15))
# prediction of linear interpolated
lin_test_res = \
softmax(lin_booster.predict(x_test, raw_score=False))
# test_reses.append(lin_test_res)
# temp_filename = './temp/{}_weight-multi-logloss-{:.6}_{}_res.csv'\
# .format(trained_models[0].__class__.__name__,
# mean_best_score,
# start_time,)
# with open(temp_filename, 'wb') as fout:
# pickle.dump(test_reses + [lin_test_res], fout)
res = np.clip(np.mean(
[np.mean(test_reses, axis=0),
lin_test_res],
axis=0),
10**(-15), 1 - 10**(-15))
preds_99 = np.ones((res.shape[0]))
for i in range(res.shape[1]):
preds_99 *= (1 - res[:, i])
preds_99 = 0.14 * preds_99 / np.mean(preds_99)
#res *= 8/9
#preds_99 = 1/9
# res = np.concatenate((res, preds_99), axis=1)
# res = np.concatenate((res, np.zeros((res.shape[0], 1))), axis=1)
logger.info('now creating the submission file ...')
res_df = pd.DataFrame(res, columns=[
'class_6',
'class_15',
'class_16',
'class_42',
'class_52',
'class_53',
'class_62',
'class_64',
'class_65',
'class_67',
'class_88',
'class_90',
'class_92',
'class_95',
# 'class_99',
])
res_df['class_99'] = preds_99
submission_file_name = './submissions/{}_weight-multi-logloss-{:.6}_{}.csv'\
.format(trained_models[0].__class__.__name__,
mean_best_score,
start_time,)
logger.info(
'saving the test result to {}'.format(submission_file_name))
pd.concat([object_ids, res_df], axis=1)\
.to_csv(submission_file_name, index=False)
logger.info('finish !')
if __name__ == '__main__':
args = parse_args()
FEATURES_TO_USE = [
# 'hostgal_photoz',
'hostgal_photoz_err',
# 'distmod',
'lumi_dist',
'flux_min',
'flux_max',
'flux_mean',
'flux_median',
'flux_std',
'flux_var',
'flux_skew',
'flux_count',
'flux_kurtosis',
'flux_err_min',
'flux_err_max',
'flux_err_mean',
'flux_err_median',
'flux_err_std',
'flux_err_var',
'flux_err_skew',
'flux_err_kurtosis',
'flux_ratio_to_flux_err_min',
'flux_ratio_to_flux_err_max',
'detected_mean',
'flux_ratio_sq_skew',
'flux_ratio_sq_mean',
'flux_ratio_sq_kurtosis',
'flux_by_flux_ratio_sq_sum',
'flux_by_flux_ratio_sq_skew',
'std_upper_mjd_get_max_min_diff',
'std_upper_mjd_var',
'std_upper_mjd_skew',
'std_upper_flux_count',
'std_upper_flux_min',
'detected_mjd_get_max_min_diff',
'detected_mjd_skew',
'band-0_wmean',
'band-0_normed_std',
'band-0_normed_amp',
'band-0_normed_mad',
'band-0_beyond_1std',
'band-0_flux_var',
'band-0_flux_skew',
'band-0_flux_kurtosis',
'band-0_flux_quantile10',
'band-0_flux_quantile25',
'band-0_flux_quantile75',
'band-0_flux_quantile90',
'band-0_flux_quantile2575_range',
'band-0_flux_quantile1090_range',
'band-0_normed_flux_diff_mean',
'band-0_detected_mean',
'band-0_flux_ratio_sq_sum',
'band-0_flux_ratio_sq_skew',
'band-0_flux_by_flux_ratio_sq_sum',
'band-0_flux_by_flux_ratio_sq_skew',
'band-0_flux_get_max_min_diff',
'band-0_std_upper_mjd_get_max_min_diff',
'band-0_std_upper_mjd_var',
'band-0_std_upper_mjd_skew',
'band-0_std_upper_mjd_diff_mean',
'band-0_std_upper_flux_count',
# 'band-0_std_upper_flux_count_ratio',
'band-0_std_upper_flux_diff_mean',
'band-1_wmean',
'band-1_normed_std',
'band-1_normed_amp',
'band-1_normed_mad',
'band-1_beyond_1std',
'band-1_flux_var',
'band-1_flux_skew',
'band-1_flux_kurtosis',
'band-1_flux_quantile10',
'band-1_flux_quantile25',
'band-1_flux_quantile75',
'band-1_flux_quantile90',
'band-1_flux_quantile2575_range',
'band-1_flux_quantile1090_range',
'band-1_normed_flux_diff_mean',
'band-1_detected_mean',
'band-1_flux_ratio_sq_sum',
'band-1_flux_ratio_sq_skew',
'band-1_flux_by_flux_ratio_sq_sum',
'band-1_flux_by_flux_ratio_sq_skew',
'band-1_flux_get_max_min_diff',
'band-1_std_upper_mjd_get_max_min_diff',
'band-1_std_upper_mjd_var',
'band-1_std_upper_mjd_skew',
'band-1_std_upper_mjd_diff_mean',
'band-1_std_upper_flux_count',
# 'band-1_std_upper_flux_count_ratio',
'band-1_std_upper_flux_diff_mean',
'band-2_wmean',
'band-2_normed_std',
'band-2_normed_amp',
'band-2_normed_mad',
'band-2_beyond_1std',
'band-2_flux_var',
'band-2_flux_skew',
'band-2_flux_kurtosis',
'band-2_flux_quantile10',
'band-2_flux_quantile25',
'band-2_flux_quantile75',
'band-2_flux_quantile90',
'band-2_flux_quantile2575_range',
'band-2_flux_quantile1090_range',
'band-2_normed_flux_diff_mean',
'band-2_detected_mean',
'band-2_flux_ratio_sq_sum',
'band-2_flux_ratio_sq_skew',
'band-2_flux_by_flux_ratio_sq_sum',
'band-2_flux_by_flux_ratio_sq_skew',
'band-2_flux_get_max_min_diff',
'band-2_std_upper_mjd_get_max_min_diff',
'band-2_std_upper_mjd_var',
'band-2_std_upper_mjd_skew',
'band-2_std_upper_mjd_diff_mean',
'band-2_std_upper_flux_count',
# 'band-2_std_upper_flux_count_ratio',
'band-2_std_upper_flux_diff_mean',
'band-3_wmean',
'band-3_normed_std',
'band-3_normed_amp',
'band-3_normed_mad',
'band-3_beyond_1std',
'band-3_flux_var',
'band-3_flux_skew',
'band-3_flux_kurtosis',
'band-3_flux_quantile10',
'band-3_flux_quantile25',
'band-3_flux_quantile75',
'band-3_flux_quantile90',
'band-3_flux_quantile2575_range',
'band-3_flux_quantile1090_range',
'band-3_normed_flux_diff_mean',
'band-3_detected_mean',
'band-3_flux_ratio_sq_sum',
'band-3_flux_ratio_sq_skew',
'band-3_flux_by_flux_ratio_sq_sum',
'band-3_flux_by_flux_ratio_sq_skew',
'band-3_flux_get_max_min_diff',
'band-3_std_upper_mjd_get_max_min_diff',
'band-3_std_upper_mjd_var',
'band-3_std_upper_mjd_skew',
'band-3_std_upper_mjd_diff_mean',
'band-3_std_upper_flux_count',
# 'band-3_std_upper_flux_count_ratio',
'band-3_std_upper_flux_diff_mean',
'band-4_wmean',
'band-4_normed_std',
'band-4_normed_amp',
'band-4_normed_mad',
'band-4_beyond_1std',
'band-4_flux_var',
'band-4_flux_skew',
'band-4_flux_kurtosis',
'band-4_flux_quantile10',
'band-4_flux_quantile25',
'band-4_flux_quantile75',
'band-4_flux_quantile90',
'band-4_flux_quantile2575_range',
'band-4_flux_quantile1090_range',
'band-4_normed_flux_diff_mean',
'band-4_detected_mean',
'band-4_flux_ratio_sq_sum',
'band-4_flux_ratio_sq_skew',
'band-4_flux_by_flux_ratio_sq_sum',
'band-4_flux_by_flux_ratio_sq_skew',
'band-4_flux_get_max_min_diff',
'band-4_std_upper_mjd_get_max_min_diff',
'band-4_std_upper_mjd_var',
'band-4_std_upper_mjd_skew',
'band-4_std_upper_mjd_diff_mean',
'band-4_std_upper_flux_count',
# 'band-4_std_upper_flux_count_ratio',
'band-4_std_upper_flux_diff_mean',
'band-5_wmean',
'band-5_normed_std',
'band-5_normed_amp',
'band-5_normed_mad',
'band-5_beyond_1std',
'band-5_flux_var',
'band-5_flux_skew',
'band-5_flux_kurtosis',
'band-5_flux_quantile10',
'band-5_flux_quantile25',
'band-5_flux_quantile75',
'band-5_flux_quantile90',
'band-5_flux_quantile2575_range',
'band-5_flux_quantile1090_range',
'band-5_normed_flux_diff_mean',
'band-5_detected_mean',
'band-5_flux_ratio_sq_sum',
'band-5_flux_ratio_sq_skew',
'band-5_flux_by_flux_ratio_sq_sum',
'band-5_flux_by_flux_ratio_sq_skew',
'band-5_flux_get_max_min_diff',
'band-5_std_upper_mjd_get_max_min_diff',
'band-5_std_upper_mjd_var',
'band-5_std_upper_mjd_skew',
'band-5_std_upper_mjd_diff_mean',
'band-5_std_upper_flux_count',
# 'band-5_std_upper_flux_count_ratio',
'band-5_std_upper_flux_diff_mean',
'0_minus_1_wmean',
'0_minus_1_std',
'0_minus_1_amp',
'1_minus_2_wmean',
'1_minus_2_std',
'1_minus_2_amp',
'2_minus_3_wmean',
'2_minus_3_std',
'2_minus_3_amp',
'3_minus_4_wmean',
'3_minus_4_std',
'3_minus_4_amp',
'4_minus_5_wmean',
'4_minus_5_std',
'4_minus_5_amp',
'5_minus_0_wmean',
'5_minus_0_std',
'5_minus_0_amp',
'flux_diff',
'flux_dif2',
'flux_w_mean',
'flux_dif3',
'std_upper_rat',
'band-0_flux_max_ratio_to_the_max',
'band-1_flux_max_ratio_to_the_max',
'band-2_flux_max_ratio_to_the_max',
'band-3_flux_max_ratio_to_the_max',
'band-4_flux_max_ratio_to_the_max',
'band-5_flux_max_ratio_to_the_max',
'passband_flux_min_var',
'passband_flux_means_var',
'passband_flux_counts_var',
'passband_detected_means_var',
'band_flux_diff_max',
'band_flux_diff_min',
'band_flux_diff_diff',
'band_flux_diff_diff_rat',
'band_flux_max_min_rat',
'0__length',
'0__longest_strike_above_mean',
'0__longest_strike_below_mean',
'0__mean_abs_change',
'0__mean_change',
'1__length',
'1__longest_strike_above_mean',
'1__longest_strike_below_mean',
'1__mean_abs_change',
'1__mean_change',
'2__length',
'2__longest_strike_above_mean',
'2__longest_strike_below_mean',
'2__mean_abs_change',
'2__mean_change',
'3__length',
'3__longest_strike_above_mean',
'3__longest_strike_below_mean',
'3__mean_abs_change',
'3__mean_change',
'4__length',
'4__longest_strike_above_mean',
'4__longest_strike_below_mean',
'4__mean_abs_change',
'4__mean_change',
'5__length',
'5__longest_strike_above_mean',
'5__longest_strike_below_mean',
'5__mean_abs_change',
'5__mean_change',
'internal',
# 'c90_z_z1',
# 'c90_y_z1',
# 'c52_y_z1',
# 'c67_g_z2',
# 'c67_i_z2',
# 'c67_y_z2',
# 'c52_r_z3',
# 'c42_i_z4',
# 'c42_z_z4',
### 'band-0_detected_mjd_get_max_min_diff', # $B$3$l7O$O(B cv $B",(B lb $B"-(B
# 'band-0_detected_mjd_var',
# 'band-0_detected_mjd_skew',
# 'band-0_detected_mjd_diff_mean',
### 'band-1_detected_mjd_get_max_min_diff',
# 'band-1_detected_mjd_var',
# 'band-1_detected_mjd_skew',
# 'band-1_detected_mjd_diff_mean',
### 'band-2_detected_mjd_get_max_min_diff',
# 'band-2_detected_mjd_var',
# 'band-2_detected_mjd_skew',
# 'band-2_detected_mjd_diff_mean',
### 'band-3_detected_mjd_get_max_min_diff',
# 'band-3_detected_mjd_var',
# 'band-3_detected_mjd_skew',
# 'band-3_detected_mjd_diff_mean',
### 'band-4_detected_mjd_get_max_min_diff',
# 'band-4_detected_mjd_var',
# 'band-4_detected_mjd_skew',
# 'band-4_detected_mjd_diff_mean',
### 'band-5_detected_mjd_get_max_min_diff',
# 'band-5_detected_mjd_var',
# 'band-5_detected_mjd_skew',
# 'band-5_detected_mjd_diff_mean'
# '0_minus_1_dmgmmd',
# '1_minus_2_dmgmmd',
# '2_minus_3_dmgmmd',
# '3_minus_4_dmgmmd',
# '4_minus_5_dmgmmd',
# '5_minus_0_dmgmmd',
# 'std_lower_mjd_get_max_min_diff',
# 'std_lower_mjd_var',
# 'std_lower_mjd_skew',
# 'std_lower_flux_count',
# 'std_lower_flux_max'
# '0_minus_1_skew',
# '1_minus_2_skew',
# '2_minus_3_skew',
# '3_minus_4_skew',
# '4_minus_5_skew',
# '5_minus_0_skew',
# '0_minus_1_kurt',
# '1_minus_2_kurt',
# '2_minus_3_kurt',
# '3_minus_4_kurt',
# '4_minus_5_kurt',
# '5_minus_0_kurt',
'0_minus_1_q2575_rng',
'1_minus_2_q2575_rng',
'2_minus_3_q2575_rng',
'3_minus_4_q2575_rng',
'4_minus_5_q2575_rng',
'5_minus_0_q2575_rng',
'band-0_std_upper_flux_quantile10',
'band-1_std_upper_flux_quantile10',
'band-2_std_upper_flux_quantile10',
'band-3_std_upper_flux_quantile10',
'band-4_std_upper_flux_quantile10',
'band-5_std_upper_flux_quantile10',
# 'band-0_std_upper_flux_quantile25',
# 'band-1_std_upper_flux_quantile25',
# 'band-2_std_upper_flux_quantile25',
# 'band-3_std_upper_flux_quantile25',
# 'band-4_std_upper_flux_quantile25',
# 'band-5_std_upper_flux_quantile25',
# 'band-0_std_upper_flux_quantile75',
# 'band-1_std_upper_flux_quantile75',
# 'band-2_std_upper_flux_quantile75',
# 'band-3_std_upper_flux_quantile75',
# 'band-4_std_upper_flux_quantile75',
# 'band-5_std_upper_flux_quantile75',
'band-0_std_upper_flux_quantile90',
'band-1_std_upper_flux_quantile90',
'band-2_std_upper_flux_quantile90',
'band-3_std_upper_flux_quantile90',
'band-4_std_upper_flux_quantile90',
'band-5_std_upper_flux_quantile90',
# 'band-0_std_upper_flux_quantile2575_range',
# 'band-1_std_upper_flux_quantile2575_range',
# 'band-2_std_upper_flux_quantile2575_range',
# 'band-3_std_upper_flux_quantile2575_range',
# 'band-4_std_upper_flux_quantile2575_range',
# 'band-5_std_upper_flux_quantile2575_range',
# 'band-0_std_upper_flux_quantile1090_range',
# 'band-1_std_upper_flux_quantile1090_range',
# 'band-2_std_upper_flux_quantile1090_range',
# 'band-3_std_upper_flux_quantile1090_range',
# 'band-4_std_upper_flux_quantile1090_range',
# 'band-5_std_upper_flux_quantile1090_range',
# 'band-0_flux_max',
# 'band-1_flux_max',
# 'band-2_flux_max',
# 'band-3_flux_max',
# 'band-4_flux_max',
# 'band-5_flux_max',
'0_minus_1_max',
'1_minus_2_max',
'2_minus_3_max',
'3_minus_4_max',
'4_minus_5_max',
'5_minus_0_max',
# 'abs_magnitude_min',
# 'abs_magnitude_max',
###### 'abs_magnitude_mean',
###### 'abs_magnitude_median',
###### 'abs_magnitude_std',
###### 'abs_magnitude_var',
###### 'abs_magnitude_skew',
# 'abs_magnitude_kurtosis',
##### 'luminosity_max',
##### 'peak-14-14_flux_mean',
##### 'peak-30-30_flux_mean',
##### 'peak-90-90_flux_mean',
##### 'peak-14-14_flux_kurtosis',
##### 'peak-30-30_flux_kurtosis',
##### 'peak-90-90_flux_kurtosis',
##### 'peak_kurt_14to30',
# 'peak_kurt_14to90',
##### 'peak_kurt_30to90',
##### 'peak-14-14_flux_skew',
# 'peak-30-30_flux_skew',
# 'peak-90-90_flux_skew',
##### 'peak_skew_14to30',
# 'peak_skew_14to90',
# 'peak_skew_30to90',
############################### 'peak-0-14_flux_diff_var',
############################### 'peak-0-30_flux_diff_var',
##### 'peak-0-90_flux_diff_var',
# 'peak-14-0_flux_diff_var',
# 'peak-30-0_flux_diff_var',
# 'peak-14-14_flux_get_max_min_diff',
# 'peak-30-30_flux_get_max_min_diff',
# 'peak-90-90_flux_get_max_min_diff',
# 'peak-30-30_luminosity_kurtosis',
# 'peak-14-14_detected_mean',
# 'peak-0-30_abs_magnitude_diff_var',
# 'peak-0-90_abs_magnitude_diff_var',
# 'peak-14-14_abs_magnitude_skew',
##### 'peak-30-30_abs_magnitude_skew',
##### 'peak-90-90_abs_magnitude_skew',
# 'peak-14-14_abs_magnitude_kurtosis',
##### 'peak-30-30_abs_magnitude_kurtosis',
##### 'peak-90-90_abs_magnitude_kurtosis',
# 'peak-14-14_flux_ratio_sq_sum',
# 'peak-30-30_flux_ratio_sq_sum',
# 'peak-90-90_flux_ratio_sq_sum',
##### 'ratsq-peak-14-14_flux_ratio_sq_skew',
# 'peak-30-30_flux_ratio_sq_skew',
##### 'ratsq-peak-90-90_flux_ratio_sq_skew',
# 'peak-14-14_flux_ratio_sq_kurtosis',
# 'peak-30-30_flux_ratio_sq_kurtosis',
# 'peak-90-90_flux_ratio_sq_kurtosis',
# 'peak-0-14_flux_ratio_sq_skew',
# 'peak-14-14_flux_ratio_sq_mean',
# 'peak-30-30_flux_ratio_sq_mean',
# 'peak-90-90_flux_ratio_sq_mean',
# 'peak-14-14_corrected_flux_by_flux_ratio_sq_skew',
# 'peak-30-30_corrected_flux_by_flux_ratio_sq_skew',
# 'band-0_flux_ratio_sq_get_max_min_diff',
# 'band-1_flux_ratio_sq_get_max_min_diff',
# 'band-2_flux_ratio_sq_get_max_min_diff',
# 'band-3_flux_ratio_sq_get_max_min_diff',
# 'band-4_flux_ratio_sq_get_max_min_diff',
# 'band-5_flux_ratio_sq_get_max_min_diff',
'0_minus_1_ratsqmax',
'1_minus_2_ratsqmax',
'2_minus_3_ratsqmax',
'3_minus_4_ratsqmax',
'4_minus_5_ratsqmax',
'5_minus_0_ratsqmax',
# '0_minus_1_ratsqmax_log',
# '1_minus_2_ratsqmax_log',
# '2_minus_3_ratsqmax_log',
# '3_minus_4_ratsqmax_log',
# '4_minus_5_ratsqmax_log',
# '5_minus_0_ratsqmax_log',
# 'band-0_flux_ratio_sq_max_ratio',
# 'band-1_flux_ratio_sq_max_ratio',
# 'band-2_flux_ratio_sq_max_ratio',
# 'band-3_flux_ratio_sq_max_ratio',
# 'band-4_flux_ratio_sq_max_ratio',
# 'band-5_flux_ratio_sq_max_ratio',
# 'flux_ratio_sq_max',
# 'ddf'
'my_skew',
'my_kurt',
'mjd_diff_af_det1',
# 'mjd_diff_bf_det1',
'mjd_diff_ab_sum',
# 'band-0_my_skew',
# 'band-1_my_skew',
# 'band-2_my_skew',
# 'band-3_my_skew',
# 'band-4_my_skew',
# 'band-5_my_skew',
# 'band-0_my_kurt',
# 'band-1_my_kurt',
# 'band-2_my_kurt',
# 'band-3_my_kurt',
# 'band-4_my_kurt',
# 'band-5_my_kurt',
# 'hostgal_photoz',
# 'det_my_skew',
# 'det_my_kurt',
'c52_z_z1',
'c52_y_z1',
'c62_g_z1',
'c67_g_z1',
'c90_z_z1',
'c90_y_z1',
'c52_y_z2',
'c62_g_z2',
'c62_r_z2',
'c62_i_z2',
'c62_y_z2',
# 'c67_g_z2',
# 'c67_i_z2',
'c67_y_z2',
# 'c42_g_z3',
# 'c52_r_z3',
'c52_i_z3',
'c52_z_z3',
'c62_z_z3',
'c90_g_z3',
'c90_i_z3',
'c90_z_z3',
'c42_g_z4',
'c42_r_z4',
'c90_g_z4',
'c90_r_z4',
'c90_i_z4',
'c90_z_z4',
'c42_i_z0_chisq',
'c42_i_z0_redchisq',
'c42_i_z0_dmax',
'c42_i_z0_fmax',
'c42_i_z0_dof',
'c42_i_z1_chisq',
'c42_i_z1_redchisq',
'c42_i_z1_dmax',
'c42_i_z1_fmax',
'c42_i_z1_dof',
'c42_i_z2_chisq',
'c42_i_z2_redchisq',
'c42_i_z2_dmax',
'c42_i_z2_fmax',
'c42_i_z2_dof',
'c42_i_z3_chisq',
'c42_i_z3_redchisq',
'c42_i_z3_dmax',
'c42_i_z3_fmax',
'c42_i_z3_dof',
'c52_i_z0_chisq',
'c52_i_z0_redchisq',
'c52_i_z0_dmax',
'c52_i_z0_fmax',
'c52_i_z0_dof',
'c52_i_z1_chisq',
'c52_i_z1_redchisq',
'c52_i_z1_dmax',
'c52_i_z1_fmax',
'c52_i_z1_dof',
'c52_i_z2_chisq',
'c52_i_z2_redchisq',
'c52_i_z2_dmax',
'c52_i_z2_fmax',
'c52_i_z2_dof',
'c62_i_z0_chisq',
'c62_i_z0_redchisq',
'c62_i_z0_dmax',
'c62_i_z0_fmax',
'c62_i_z0_dof',
'c62_i_z1_chisq',
'c62_i_z1_redchisq',
'c62_i_z1_dmax',
'c62_i_z1_fmax',
'c62_i_z1_dof',
'c62_i_z2_chisq',
'c62_i_z2_redchisq',
'c62_i_z2_dmax',
'c62_i_z2_fmax',
'c62_i_z2_dof',
'c67_i_z0_chisq',
'c67_i_z0_redchisq',
'c67_i_z0_dmax',
'c67_i_z0_fmax',
'c67_i_z0_dof',
'c67_i_z1_chisq',
'c67_i_z1_redchisq',
'c67_i_z1_dmax',
'c67_i_z1_fmax',
'c67_i_z1_dof',
'c67_i_z2_chisq',
'c67_i_z2_redchisq',
'c67_i_z2_dmax',
'c67_i_z2_fmax',
'c67_i_z2_dof',
'c90_i_z0_chisq',
'c90_i_z0_redchisq',
'c90_i_z0_dmax',
'c90_i_z0_fmax',
'c90_i_z0_dof',
'c90_i_z1_chisq',
'c90_i_z1_redchisq',
'c90_i_z1_dmax',
'c90_i_z1_fmax',
'c90_i_z1_dof',
'c90_i_z2_chisq',
'c90_i_z2_redchisq',
'c90_i_z2_dmax',
'c90_i_z2_fmax',
'c90_i_z2_dof',
'c90_i_z3_chisq',
'c90_i_z3_redchisq',
'c90_i_z3_dmax',
'c90_i_z3_fmax',
'c90_i_z3_dof',
'c42_g_z0_chisq',
'c42_g_z0_redchisq',
'c42_g_z0_dmax',
'c42_g_z0_fmax',
'c42_g_z0_dof',
'c42_g_z1_chisq',
'c42_g_z1_redchisq',
'c42_g_z1_dmax',
'c42_g_z1_fmax',
'c42_g_z1_dof',
'c42_g_z2_chisq',
'c42_g_z2_redchisq',
'c42_g_z2_dmax',
'c42_g_z2_fmax',
'c42_g_z2_dof',
'c42_g_z3_chisq',
'c42_g_z3_redchisq',
'c42_g_z3_dmax',
'c42_g_z3_fmax',
'c42_g_z3_dof',
'c52_g_z0_chisq',
'c52_g_z0_redchisq',
'c52_g_z0_dmax',
'c52_g_z0_fmax',
'c52_g_z0_dof',
'c52_g_z1_chisq',
'c52_g_z1_redchisq',
'c52_g_z1_dmax',
'c52_g_z1_fmax',
'c52_g_z1_dof',
'c52_g_z2_chisq',
'c52_g_z2_redchisq',
'c52_g_z2_dmax',
'c52_g_z2_fmax',
'c52_g_z2_dof',
'c62_g_z0_chisq',
'c62_g_z0_redchisq',
'c62_g_z0_dmax',
'c62_g_z0_fmax',
'c62_g_z0_dof',
'c62_g_z1_chisq',
'c62_g_z1_redchisq',
'c62_g_z1_dmax',
'c62_g_z1_fmax',
'c62_g_z1_dof',
'c62_g_z2_chisq',
'c62_g_z2_redchisq',
'c62_g_z2_dmax',
'c62_g_z2_fmax',
'c62_g_z2_dof',
'c67_g_z0_chisq',
'c67_g_z0_redchisq',
'c67_g_z0_dmax',
'c67_g_z0_fmax',
'c67_g_z0_dof',
'c67_g_z1_chisq',
'c67_g_z1_redchisq',
'c67_g_z1_dmax',
'c67_g_z1_fmax',
'c67_g_z1_dof',
'c67_g_z2_chisq',
'c67_g_z2_redchisq',
'c67_g_z2_dmax',
'c67_g_z2_fmax',
'c67_g_z2_dof',
'c90_g_z0_chisq',
'c90_g_z0_redchisq',
'c90_g_z0_dmax',
'c90_g_z0_fmax',
'c90_g_z0_dof',
'c90_g_z1_chisq',
'c90_g_z1_redchisq',
'c90_g_z1_dmax',
'c90_g_z1_fmax',
'c90_g_z1_dof',
'c90_g_z2_chisq',
'c90_g_z2_redchisq',
'c90_g_z2_dmax',
'c90_g_z2_fmax',
'c90_g_z2_dof',
'c90_g_z3_chisq',
'c90_g_z3_redchisq',
'c90_g_z3_dmax',
'c90_g_z3_fmax',
'c90_g_z3_dof',
'c42_r_z0_chisq',
'c42_r_z0_redchisq',
'c42_r_z0_dmax',
'c42_r_z0_fmax',
'c42_r_z0_dof',
'c42_r_z1_chisq',
'c42_r_z1_redchisq',
'c42_r_z1_dmax',
'c42_r_z1_fmax',
'c42_r_z1_dof',
'c42_r_z2_chisq',
'c42_r_z2_redchisq',
'c42_r_z2_dmax',
'c42_r_z2_fmax',
'c42_r_z2_dof',
'c42_r_z3_chisq',
'c42_r_z3_redchisq',
'c42_r_z3_dmax',
'c42_r_z3_fmax',
'c42_r_z3_dof',
'c52_r_z0_chisq',
'c52_r_z0_redchisq',
'c52_r_z0_dmax',
'c52_r_z0_fmax',
'c52_r_z0_dof',
'c52_r_z1_chisq',
'c52_r_z1_redchisq',
'c52_r_z1_dmax',
'c52_r_z1_fmax',
'c52_r_z1_dof',
'c52_r_z2_chisq',
'c52_r_z2_redchisq',
'c52_r_z2_dmax',
'c52_r_z2_fmax',
'c52_r_z2_dof',
'c62_r_z0_chisq',
'c62_r_z0_redchisq',
'c62_r_z0_dmax',
'c62_r_z0_fmax',
'c62_r_z0_dof',
'c62_r_z1_chisq',
'c62_r_z1_redchisq',
'c62_r_z1_dmax',
'c62_r_z1_fmax',
'c62_r_z1_dof',
'c62_r_z2_chisq',
'c62_r_z2_redchisq',
'c62_r_z2_dmax',
'c62_r_z2_fmax',
'c62_r_z2_dof',
'c67_r_z0_chisq',
'c67_r_z0_redchisq',
'c67_r_z0_dmax',
'c67_r_z0_fmax',
'c67_r_z0_dof',
'c67_r_z1_chisq',
'c67_r_z1_redchisq',
'c67_r_z1_dmax',
'c67_r_z1_fmax',
'c67_r_z1_dof',
'c67_r_z2_chisq',
'c67_r_z2_redchisq',
'c67_r_z2_dmax',
'c67_r_z2_fmax',
'c67_r_z2_dof',
'c90_r_z0_chisq',
'c90_r_z0_redchisq',
'c90_r_z0_dmax',
'c90_r_z0_fmax',
'c90_r_z0_dof',
'c90_r_z1_chisq',
'c90_r_z1_redchisq',
'c90_r_z1_dmax',
'c90_r_z1_fmax',
'c90_r_z1_dof',
'c90_r_z2_chisq',
'c90_r_z2_redchisq',
'c90_r_z2_dmax',
'c90_r_z2_fmax',
'c90_r_z2_dof',
'c90_r_z3_chisq',
'c90_r_z3_redchisq',
'c90_r_z3_dmax',
'c90_r_z3_fmax',
'c90_r_z3_dof',
'c42_z_z0_chisq',
'c42_z_z0_redchisq',
'c42_z_z0_dmax',
'c42_z_z0_fmax',
'c42_z_z0_dof',
'c42_z_z1_chisq',
'c42_z_z1_redchisq',
'c42_z_z1_dmax',
'c42_z_z1_fmax',
'c42_z_z1_dof',
'c42_z_z2_chisq',
'c42_z_z2_redchisq',
'c42_z_z2_dmax',
'c42_z_z2_fmax',
'c42_z_z2_dof',
'c42_z_z3_chisq',
'c42_z_z3_redchisq',
'c42_z_z3_dmax',
'c42_z_z3_fmax',
'c42_z_z3_dof',
'c52_z_z0_chisq',
'c52_z_z0_redchisq',
'c52_z_z0_dmax',
'c52_z_z0_fmax',
'c52_z_z0_dof',
'c52_z_z1_chisq',
'c52_z_z1_redchisq',
'c52_z_z1_dmax',
'c52_z_z1_fmax',
'c52_z_z1_dof',
'c52_z_z2_chisq',
'c52_z_z2_redchisq',
'c52_z_z2_dmax',
'c52_z_z2_fmax',
'c52_z_z2_dof',
'c62_z_z0_chisq',
'c62_z_z0_redchisq',
'c62_z_z0_dmax',
'c62_z_z0_fmax',
'c62_z_z0_dof',
'c62_z_z1_chisq',
'c62_z_z1_redchisq',
'c62_z_z1_dmax',
'c62_z_z1_fmax',
'c62_z_z1_dof',
'c62_z_z2_chisq',
'c62_z_z2_redchisq',
'c62_z_z2_dmax',
'c62_z_z2_fmax',
'c62_z_z2_dof',
'c67_z_z0_chisq',
'c67_z_z0_redchisq',
'c67_z_z0_dmax',
'c67_z_z0_fmax',
'c67_z_z0_dof',
'c67_z_z1_chisq',
'c67_z_z1_redchisq',
'c67_z_z1_dmax',
'c67_z_z1_fmax',
'c67_z_z1_dof',
'c67_z_z2_chisq',
'c67_z_z2_redchisq',
'c67_z_z2_dmax',
'c67_z_z2_fmax',
'c67_z_z2_dof',
'c90_z_z0_chisq',
'c90_z_z0_redchisq',
'c90_z_z0_dmax',
'c90_z_z0_fmax',
'c90_z_z0_dof',
'c90_z_z1_chisq',
'c90_z_z1_redchisq',
'c90_z_z1_dmax',
'c90_z_z1_fmax',
'c90_z_z1_dof',
'c90_z_z2_chisq',
'c90_z_z2_redchisq',
'c90_z_z2_dmax',
'c90_z_z2_fmax',
'c90_z_z2_dof',
'c90_z_z3_chisq',
'c90_z_z3_redchisq',
'c90_z_z3_dmax',
'c90_z_z3_fmax',
'c90_z_z3_dof',
'dmax_r_std',
'dmax_r_mean',
'dmax_i_std',
'dmax_i_mean',
'dmax_g_std',
'dmax_g_mean',
'dmax_z_std',
'dmax_z_mean',
'u_switch_cnt',
'g_switch_cnt',
'r_switch_cnt',
'i_switch_cnt',
'z_switch_cnt',
'Y_switch_cnt',
'switch_cnt_mean',
'switch_cnt_std',
'switch_cnt_max',
]
if args.specz:
FEATURES_TO_USE += [
'hostgal_specz',
'z_corrected_flux_diff',
'z_corrected_flux_dif2',
'z_corrected_flux_w_mean',
'z_corrected_flux_dif3',
'z_corrected_flux_min',
'z_corrected_flux_max',
'z_corrected_flux_mean',
'z_corrected_flux_median',
'z_corrected_flux_std',
'z_corrected_flux_var',
'z_corrected_flux_skew',
'z_corrected_flux_ratio_sq_sum',
'z_corrected_flux_ratio_sq_skew',
'z_corrected_flux_by_flux_ratio_sq_sum',
'z_corrected_flux_by_flux_ratio_sq_skew',
# 'band-0_z_corrected_flux_min',
# 'band-1_z_corrected_flux_min',
# 'band-2_z_corrected_flux_min',
# 'band-3_z_corrected_flux_min',
# 'band-4_z_corrected_flux_min',
# 'band-5_z_corrected_flux_min',
'0_minus_1_zcorrmax_diff',
'1_minus_2_zcorrmax_diff',
'2_minus_3_zcorrmax_diff',
'3_minus_4_zcorrmax_diff',
'4_minus_5_zcorrmax_diff',
'5_minus_0_zcorrmax_diff',
]
else:
# FEATURES_TO_USE = FEATURES_TO_USE
FEATURES_TO_USE += [
'corrected_flux_diff',
'corrected_flux_dif2',
'corrected_flux_w_mean',
'corrected_flux_dif3',
'corrected_flux_min',
'corrected_flux_max',
'corrected_flux_mean',
'corrected_flux_median',
'corrected_flux_std',
'corrected_flux_var',
'corrected_flux_skew',
'corrected_flux_ratio_sq_sum',
'corrected_flux_ratio_sq_skew',
'corrected_flux_by_flux_ratio_sq_sum',
'corrected_flux_by_flux_ratio_sq_skew',
]
main(args, FEATURES_TO_USE)
| {"/softmax_train_using_features.py": ["/tools/my_logging.py", "/tools/feature_tools.py", "/tools/objective_function.py", "/tools/model_io.py", "/tools/fold_resampling.py"], "/train.py": ["/tools/my_logging.py", "/tools/feature_tools.py", "/tools/objective_function.py", "/tools/model_io.py", "/tools/fold_resampling.py"], "/from_onoderasan.py": ["/tools/objective_function.py"]} |
76,690 | guchio3/kaggle-plasticc | refs/heads/master | /utils/csv_to_fth.py | import pandas as pd
df = pd.read_csv('/home/naoya.taguchi/.kaggle/competitions/PLAsTiCC-2018/test_set.csv')
df.to_feather('/home/naoya.taguchi/.kaggle/competitions/PLAsTiCC-2018/test_set.fth')
| {"/softmax_train_using_features.py": ["/tools/my_logging.py", "/tools/feature_tools.py", "/tools/objective_function.py", "/tools/model_io.py", "/tools/fold_resampling.py"], "/train.py": ["/tools/my_logging.py", "/tools/feature_tools.py", "/tools/objective_function.py", "/tools/model_io.py", "/tools/fold_resampling.py"], "/from_onoderasan.py": ["/tools/objective_function.py"]} |
76,691 | guchio3/kaggle-plasticc | refs/heads/master | /tools/fold_resampling.py | #
# the tool for resamping each target for column
#
# ros = RandomOverSampler(
# ratio={
# 0: max(151, SAMPLING_LOWER),
# 1: max(495, SAMPLING_LOWER),
# 2: max(924, SAMPLING_LOWER),
# 3: max(1193, SAMPLING_LOWER),
# 4: max(183, SAMPLING_LOWER),
# 5: max(30, SAMPLING_LOWER),
# 6: max(484, SAMPLING_LOWER),
# 7: max(102, SAMPLING_LOWER),
# 8: max(981, SAMPLING_LOWER),
# 9: max(208, SAMPLING_LOWER),
# 10: max(370, SAMPLING_LOWER),
# 11: max(2313, SAMPLING_LOWER),
# 12: max(239, SAMPLING_LOWER),
# 13: max(175, SAMPLING_LOWER),
# }, random_state=71)
# x_train, y_train = ros.fit_sample(x_train, y_train)
def get_fold_resampling_dict(y_sample, logger,
sampling_lower, sampling_lower_rate):
fold_resampling_dict = {}
targets = [i for i in range(14)]
for target in targets:
fold_resampling_dict[target] = y_sample[y_sample == target].shape[0]
logger.debug('fold_samples_num : {}'.format(fold_resampling_dict))
for target in fold_resampling_dict.keys():
fold_resampling_dict[target] = \
int(max(fold_resampling_dict[target], sampling_lower))
# if sampling_lower > fold_resampling_dict[target]:
# fold_resampling_dict[target] = \
# int(max(fold_resampling_dict[target], sampling_lower))
# int(fold_resampling_dict[target] * sampling_lower_rate)
# fold_resampling_dict = {
# 0: 121,
# 1: 396,
# 2: 740,
# 3: 955,
# 4: 147,
# 5: 60,
# 6: 388,
# 7: 82,
# 8: 85,
# 9: 167,
# 10: 296,
# 11: 1851,
# 12: 192,
# 13: 140}
# change_dict = {166: 500, 146: 500}
# for key in fold_resampling_dict:
# if fold_resampling_dict[key] in change_dict:
# print(fold_resampling_dict[key])
# fold_resampling_dict[key] = change_dict[fold_resampling_dict[key]]
# fold_resampling_dict[4] = 300
# fold_resampling_dict[9] = 300
logger.info('resampled fold_samples_num : {}'.format(fold_resampling_dict))
return fold_resampling_dict
def haradasan_get_fold_resampling_dict(y_sample, logger,
sampling_lower, sampling_lower_rate):
fold_resampling_dict = {}
targets = [i for i in range(14)]
for target in targets:
fold_resampling_dict[target] = y_sample[y_sample == target].shape[0]
logger.info('fold_samples_num : {}'.format(fold_resampling_dict))
fold_max_num = max(list(fold_resampling_dict.values()))
for target in fold_resampling_dict.keys():
fold_resampling_dict[target] = fold_max_num
logger.info('resampled fold_samples_num : {}'.format(fold_resampling_dict))
return fold_resampling_dict
| {"/softmax_train_using_features.py": ["/tools/my_logging.py", "/tools/feature_tools.py", "/tools/objective_function.py", "/tools/model_io.py", "/tools/fold_resampling.py"], "/train.py": ["/tools/my_logging.py", "/tools/feature_tools.py", "/tools/objective_function.py", "/tools/model_io.py", "/tools/fold_resampling.py"], "/from_onoderasan.py": ["/tools/objective_function.py"]} |
76,692 | guchio3/kaggle-plasticc | refs/heads/master | /train.py | import numpy as np
import pandas as pd
from sklearn.model_selection import StratifiedKFold
from sklearn.preprocessing import LabelEncoder
from sklearn.metrics import confusion_matrix
from imblearn.over_sampling import SMOTE, RandomOverSampler
import lightgbm
from logging import getLogger
from tqdm import tqdm
import argparse
import datetime
import pickle
import warnings
from matplotlib import pyplot as plt
import seaborn as sns
from tools.my_logging import logInit
from tools.feature_tools import feature_engineering
from tools.objective_function import weighted_multi_logloss, lgb_multi_weighted_logloss, wloss_objective, wloss_metric, softmax, calc_team_score, wloss_metric_for_zeropad
from tools.model_io import save_models, load_models
from tools.fold_resampling import get_fold_resampling_dict
np.random.seed(71)
np.set_printoptions(threshold=np.inf)
pd.set_option('display.max_columns', 1000)
pd.set_option('display.max_rows', 1000)
warnings.simplefilter('ignore', RuntimeWarning)
warnings.simplefilter('ignore', UserWarning)
plt.switch_backend('agg')
BASE_DIR = '/home/naoya.taguchi/.kaggle/competitions/PLAsTiCC-2018/'
#BASE_DIR = '/Users/naoya.taguchi/.kaggle/competitions/PLAsTiCC-2018/'
FOLD_NUM = 5
SAMPLING_LOWER = 60
# SAMPLING_LOWER = 10
SAMPLING_LOWER_RATE = 2.
def parse_args():
parser = argparse.ArgumentParser(
prog='train.py',
usage='ex) python train.py --with_test',
description='easy explanation',
epilog='end',
add_help=True,
)
parser.add_argument('-w', '--with_test',
help='flg to specify test type.',
action='store_true',
default=False)
parser.add_argument('-n', '--nthread',
help='number of avalable threads.',
type=int,
required=True)
args = parser.parse_args()
return args
def get_params(args):
PARAMS = {
# 'objective': wloss_objective,
'objective': 'multiclass',
# 'metric': ['multi_logloss', ],
'num_class': 14,
'nthread': args.nthread,
'learning_rate': 0.4,
# 'learning_rate': 0.02,
# 'num_leaves': 32,
'max_depth': 3,
'subsample': .9,
'colsample_bytree': .7,
'reg_alpha': .01,
'reg_lambda': .01,
'min_split_gain': 0.01,
'min_child_weight': 200,
# 'n_estimators': 10000,
'verbose': -1,
'silent': -1,
'random_state': 71,
'seed': 71,
# 'early_stopping_rounds': 100,
# 'min_data_in_leaf': 30,
'max_bin': 20,
# 'min_data_in_leaf': 300,
# 'bagging_fraction': 0.1,
# 'bagging_freq': 10,
}
return PARAMS
# Display/plot feature importance
def display_importances(feature_importance_df_,
filename='importance_application'):
# cols = feature_importance_df_[["feature",
# "importance"]].groupby("feature").mean().sort_values(by="importance",
# ascending=False).index
csv_df = feature_importance_df_[["feature", "importance"]].groupby(
"feature").agg({'importance': ['mean', 'std']})
csv_df.columns = pd.Index(
[e[0] + "_" + e[1].upper()
for e in csv_df.columns.tolist()])
csv_df['importance_RAT'] = csv_df['importance_STD'] / \
csv_df['importance_MEAN']
csv_df.sort_values(
by="importance_MEAN",
ascending=False).to_csv(
filename +
'.csv')
# best_features = feature_importance_df_.loc[feature_importance_df_.feature.isin(cols)]
# plt.figure(figsize=(8, 10))
# sns.barplot(x="importance", y="feature", data=best_features.sort_values(by="importance", ascending=False))
# plt.title('LightGBM Features (avg over folds)')
# plt.tight_layout()
# plt.savefig(filename + '.png')
def save_importance(df, filename):
df.set_index('feature', inplace=True)
imp_mean = df.mean(axis=1)
imp_std = df.std(axis=1)
df['importance_mean'] = imp_mean
df['importance_std'] = imp_std
df['importance_cov'] = df['importance_std'] / df['importance_mean']
df.sort_values(by="importance_cov", ascending=True).to_csv(filename[:-4] + '.csv')
df.reset_index(inplace=True)
plt.figure(figsize=(8, 30))
sns.barplot(x="importance_mean", y="feature", data=df.sort_values(by="importance_mean", ascending=False))
plt.title('LightGBM Features (avg over folds)')
plt.tight_layout()
plt.savefig(filename)
def plt_confusion_matrics():
1 + 1
def main(args):
logger = getLogger(__name__)
logInit(logger, log_dir='./log/', log_filename='train.log')
logger.info(
'''
start main, the args settings are ...
--with_test : {}
'''.format(args.with_test))
start_time = datetime.datetime.now().strftime('%Y-%m-%d-%H-%M-%S')
logger.info('start training, the starting time is {}'.format(start_time))
PARAMS = get_params(args)
logger.info('loading training_set.csv')
training_set_df = pd.read_csv(
BASE_DIR + 'training_set.csv')
logger.info('loading training_set_metadata.csv')
training_set_metadata_df = pd.read_csv(
BASE_DIR + 'training_set_metadata.csv')
# training_set_metadata_df =
# training_set_metadata_df[training_set_metadata_df.ddf == 1]
logger.info('start feagture engineering')
train_df = feature_engineering(
training_set_df,
training_set_metadata_df,
nthread=args.nthread,
logger=logger)
with open('./lcfit/LCfit_features_train_20181129.pkl', 'rb') as fin:
train_df = train_df.merge(pickle.load(fin), on='object_id', how='left')
train_df.drop('object_id', axis=1, inplace=True)
# label encoding $B$7$J$$$H(B lgbm $B$,G'<1$7$F$/$l$J$$(B
# $B<c$$(B class $B$K(B $B<c$$(B label $B$,$D$/$HNI$$$s$@$1$I(B...
le = LabelEncoder()
le.fit(train_df['target'].values)
x_train = train_df.drop('target', axis=1).values
y_train = le.transform(train_df.target)
train_set = lightgbm.Dataset(
data=train_df.drop('target', axis=1).values,
label=le.transform(train_df['target'].values),
)
skf = StratifiedKFold(n_splits=FOLD_NUM, shuffle=True, random_state=71)
# folds = skf.split(
# train_df.drop('target', axis=1), le.transform(train_df.target))
folds = skf.split(x_train, y_train)
logger.info('the shape of x_train : {}'.format(x_train.shape))
# logger.info('the shape of train_df : {}'.format(train_df.shape))
logger.debug('the cols of train_df : {}'.
format(train_df.drop('target', axis=1).columns.tolist()))
# categotical_features = ['passband_maxes_argmaxes', ]
# categorical_features_idx = np.argwhere(train_df.drop('target', axis=1).columns == 'passband_maxes_argmaxes')[0]
# logger.debug('categorical features are : {}'.format(categotical_features))
# logger.debug('categorical features indexes are : {}'.format(categotical_features))
# PARAMS['categorical_feature'] = categorical_features_idx
if False: # args.with_test:
cv_hist = lightgbm.cv(
params=PARAMS,
folds=folds,
train_set=train_set,
nfold=FOLD_NUM,
verbose_eval=100,
feval=lgb_multi_weighted_logloss,
)
logger.info('best_scores : {}'.format(
np.min(cv_hist['multi_logloss-mean'])))
logger.debug(cv_hist)
elif False:
best_scores = []
trained_models = []
x_train = train_df.drop('target', axis=1).values
y_train = train_df['target'].values
train_columns = train_df.drop('target', axis=1).columns
feature_importance_df = pd.DataFrame()
i = 1
for trn_idx, val_idx in tqdm(list(folds)):
x_trn, x_val = x_train[trn_idx], x_train[val_idx]
y_trn, y_val = y_train[trn_idx], y_train[val_idx]
lgb = lightgbm.LGBMClassifier(**PARAMS)
lgb.fit(x_trn, y_trn,
eval_set=[(x_trn, y_trn), (x_val, y_val)],
verbose=100,
eval_metric=lgb_multi_weighted_logloss,
# eval_metric=weighted_multi_logloss,
# eval_metric='multi_logloss',
)
# logger.info('best_itr : {}'.format(lgb.best_iteration_))
logger.info('best_scores : {}'.format(lgb.best_score_))
best_scores.append(lgb.best_score_['valid_1']['wloss'])
trained_models.append(lgb)
fold_importance_df = pd.DataFrame()
fold_importance_df["feature"] = train_columns
fold_importance_df["importance"] = lgb.feature_importances_
fold_importance_df["fold"] = i
feature_importance_df = pd.concat(
[feature_importance_df, fold_importance_df], axis=0)
i += 1
else:
best_scores = []
team_scores = []
zeropad_scores = []
val_pred_score_zeropads = []
trained_models = []
best_iterations = []
oof = []
x_train = train_df.drop('target', axis=1).values
y_train = le.transform(train_df['target'].values)
train_columns = train_df.drop('target', axis=1).columns
distmod_col = np.where(train_columns == 'distmod')[0]
feature_importance_df = pd.DataFrame()
feature_importance_df['feature'] = train_columns
conf_y_true = []
conf_y_pred = []
i = 1
for trn_idx, val_idx in tqdm(list(folds)):
x_trn, x_val = x_train[trn_idx], x_train[val_idx]
y_trn, y_val = y_train[trn_idx], y_train[val_idx]
fold_resampling_dict = \
get_fold_resampling_dict(
y_trn,
logger,
SAMPLING_LOWER,
SAMPLING_LOWER_RATE)
ros = RandomOverSampler(
ratio=fold_resampling_dict,
random_state=71)
x_trn, y_trn = ros.fit_sample(x_trn, y_trn)
train_dataset = lightgbm.Dataset(x_trn, y_trn)
valid_dataset = lightgbm.Dataset(x_val, y_val)
booster = lightgbm.train(
PARAMS.copy(), train_dataset,
num_boost_round=2000,
fobj=wloss_objective,
feval=wloss_metric,
valid_sets=[train_dataset, valid_dataset],
verbose_eval=100,
early_stopping_rounds=100
)
logger.debug('valid info : {}'.format(booster.best_score))
logger.info('best score : {}'.format(booster.best_score['valid_1']['wloss']))
logger.info('best iteration : {}'.format(booster.best_iteration))
best_scores.append(booster.best_score['valid_1']['wloss'])
best_iterations.append(booster.best_iteration)
trained_models.append(booster)
fold_importance_df = pd.DataFrame()
fold_importance_df["feature"] = train_columns
fold_importance_df["importance_{}".format(i)] = booster.feature_importance('gain')
feature_importance_df = feature_importance_df.merge(fold_importance_df, on='feature', how='left')
#feature_importance_df = pd.concat(
# [feature_importance_df, fold_importance_df], axis=0)
val_pred_score = softmax(booster.predict(x_val, raw_score=False))
val_pred_score_zeropad = booster.predict(x_val, raw_score=False)
oof.append([val_pred_score_zeropad, y_val])
gal_cols = [0, 2, 5, 8, 12]
ext_gal_cols = [1, 3, 4, 6, 7, 9, 10, 11, 13]
gal_rows = np.where(np.isnan(np.array(x_val[:, distmod_col], dtype=float)))[0]
ext_gal_rows = np.where(~np.isnan(np.array(x_val[:, distmod_col], dtype=float)))[0]
#val_pred_score_zeropad.loc[ext_gal_rows, gal_cols] = 0.
#val_pred_score_zeropad.loc[gal_rows, ext_gal_cols] = 0.
zeropad_score = wloss_metric_for_zeropad(
val_pred_score_zeropad, valid_dataset,
gal_cols=gal_cols, ext_gal_cols=ext_gal_cols,
gal_rows=gal_rows, ext_gal_rows=ext_gal_rows)
logger.info('zeropad score : {}'.format(zeropad_score))
team_score = calc_team_score(y_val, val_pred_score)
logger.info('team score : {}'.format(team_score))
team_scores.append(team_score)
zeropad_scores.append(zeropad_score)
val_pred_score_zeropads.append(pd.concat([pd.DataFrame(val_pred_score_zeropad), pd.Series(y_val)], axis=1))
conf_y_true.append(np.argmax(val_pred_score, axis=1))
conf_y_pred.append(y_val)
i += 1
mean_best_score = np.mean(best_scores)
mean_team_score = np.mean(team_scores)
mean_best_iteration = np.mean(best_iterations)
mean_zeropads_score = np.mean(np.array(zeropad_scores, dtype=float))
logger.info('mean valid score is {}'.format(mean_best_score))
logger.info('mean team score is {}'.format(mean_team_score))
logger.info('mean best iteration is {}'.format(mean_best_iteration))
#logger.info('mean zeropad score is {}'.format(mean_zeropads_score))
val_pred_score_zeropads_path = './val_pred_score_zeropads/{}_weight-multi-logloss-{:.6}_{}.pkl'\
.format(trained_models[0].__class__.__name__,
mean_zeropads_score,
start_time, )
with open(val_pred_score_zeropads_path, 'wb') as fout:
pickle.dump(val_pred_score_zeropads, fout)
oof_path = './oof/{}_weight-multi-logloss-{:.6}_{}.pkl'\
.format(trained_models[0].__class__.__name__,
mean_best_score,
start_time, )
with open(oof_path, 'wb') as fout:
pickle.dump(oof, fout)
models_path = './trained_models/{}_weight-multi-logloss-{:.6}_{}.pkl'\
.format(trained_models[0].__class__.__name__,
mean_best_score,
start_time, )
logger.info('saving models to {} ...'.format(models_path))
save_models(trained_models, models_path)
imp_path = './importances/{}_weight-multi-logloss-{:.6}_{}_importance.png'\
.format(trained_models[0].__class__.__name__,
mean_best_score,
start_time, )
logger.info('saving importance to {} ...'.format(models_path))
save_importance(feature_importance_df, imp_path)
conf_path = './confusion_matrices/{}_weight-multi-logloss-{:.6}_{}_confusion.png'\
.format(trained_models[0].__class__.__name__,
mean_best_score,
start_time, )
logger.info('saving confusion matrix to {} ...'.format(models_path))
conf_y_pred = np.concatenate(conf_y_pred)
conf_y_true = np.concatenate(conf_y_true)
cm = confusion_matrix(conf_y_true, conf_y_pred)
classes = ['class_' + str(clnum)
for clnum in [6, 15, 16, 42, 52, 53, 62, 64, 65, 67, 88, 90, 92, 95]]
cm_df = pd.DataFrame(cm, index=classes, columns=classes)
cm_df[cm_df.columns] = cm_df.values / cm_df.sum(axis=1).values.reshape(-1, 1)
plt.figure(figsize=(14, 14))
sns.heatmap(cm_df, annot=True, cmap=plt.cm.Blues)
#plt.imshow(cm_df.values, interpolat='nearest', cmap=plt.cm.Blues)
plt.title('score : {}'.format(mean_best_score))
plt.ylabel('True label')
plt.xlabel('Predicted label')
plt.savefig(conf_path)
if args.with_test:
logger.info('start linear interpolation training')
interpolated_num_boost_round =\
int(mean_best_iteration * FOLD_NUM / (FOLD_NUM - 1))
logger.info('the num boost round is {}'.format(interpolated_num_boost_round))
fold_resampling_dict = \
get_fold_resampling_dict(
y_train,
logger,
SAMPLING_LOWER,
SAMPLING_LOWER_RATE)
ros = RandomOverSampler(
ratio=fold_resampling_dict,
random_state=71)
x_train, y_train = ros.fit_sample(x_train, y_train)
train_dataset = lightgbm.Dataset(x_train, y_train)
lin_booster = lightgbm.train(
PARAMS.copy(), train_dataset,
num_boost_round=interpolated_num_boost_round,
fobj=wloss_objective,
feval=wloss_metric,
valid_sets=[train_dataset, ],
verbose_eval=100,
early_stopping_rounds=100
)
logger.info('best score : {}'.format(lin_booster.best_score))
models_path = './trained_models/{}_weight-multi-logloss-{:.6}_{}_linear_interpolated.pkl'\
.format(lin_booster.__class__.__name__,
mean_best_score,
start_time, )
logger.info('saving models to {} ...'.format(models_path))
save_models(lin_booster, models_path)
# logger.info('loading test_set.csv')
# test_set_df = pd.read_feather(
# BASE_DIR + 'test_set.fth', nthreads=args.nthread)
logger.info('loading test_set_metadata.csv')
test_set_metadata_df = pd.read_csv(
BASE_DIR + 'test_set_metadata.csv')
# object_ids = test_set_metadata_df.object_id
logger.info('feature engineering for test set...')
test_df = feature_engineering(
None,
test_set_metadata_df,
nthread=args.nthread,
test_flg=True,
logger=logger)
with open('./lcfit/LCfit_features_test_20181130.pkl', 'rb') as fin:
test_df = test_df.merge(pickle.load(fin), on='object_id', how='left')
# test_df = feature_engineering(
# test_set_df,
# test_set_metadata_df,
# nthread=args.nthread,
# test_flg=True,
# logger=logger)
test_df.reset_index(drop=True).to_feather('./test_dfs/test_df_for_nn.fth')
test_df.drop('object_id', axis=1, inplace=True)
object_ids = test_df.object_id
logger.info(f'test cols {test_df.columns.tolist()}')
x_test = test_df.values
logger.info(f'test size: {x_test.shape}')
logger.info('predicting')
test_reses = []
for lgb in tqdm(trained_models):
test_reses.append(
softmax(lgb.predict(x_test, raw_score=False)))
# test_reses.append(lgb.predict_proba(x_test, raw_score=False))
# res = np.clip(np.mean(test_reses, axis=0),
# 10**(-15), 1 - 10**(-15))
# prediction of linear interpolated
lin_test_res = \
softmax(lin_booster.predict(x_test, raw_score=False))
# test_reses.append(lin_test_res)
# temp_filename = './temp/{}_weight-multi-logloss-{:.6}_{}_res.csv'\
# .format(trained_models[0].__class__.__name__,
# mean_best_score,
# start_time,)
# with open(temp_filename, 'wb') as fout:
# pickle.dump(test_reses + [lin_test_res], fout)
res = np.clip(np.mean(
[np.mean(test_reses, axis=0),
lin_test_res],
axis=0),
10**(-15), 1 - 10**(-15))
preds_99 = np.ones((res.shape[0]))
for i in range(res.shape[1]):
preds_99 *= (1 - res[:, i])
preds_99 = 0.14 * preds_99 / np.mean(preds_99)
#res *= 8/9
#preds_99 = 1/9
# res = np.concatenate((res, preds_99), axis=1)
# res = np.concatenate((res, np.zeros((res.shape[0], 1))), axis=1)
logger.info('now creating the submission file ...')
res_df = pd.DataFrame(res, columns=[
'class_6',
'class_15',
'class_16',
'class_42',
'class_52',
'class_53',
'class_62',
'class_64',
'class_65',
'class_67',
'class_88',
'class_90',
'class_92',
'class_95',
# 'class_99',
])
res_df['class_99'] = preds_99
submission_file_name = './submissions/{}_weight-multi-logloss-{:.6}_{}.csv'\
.format(trained_models[0].__class__.__name__,
mean_best_score,
start_time,)
logger.info(
'saving the test result to {}'.format(submission_file_name))
pd.concat([object_ids, res_df], axis=1)\
.to_csv(submission_file_name, index=False)
logger.info('finish !')
if __name__ == '__main__':
args = parse_args()
main(args)
| {"/softmax_train_using_features.py": ["/tools/my_logging.py", "/tools/feature_tools.py", "/tools/objective_function.py", "/tools/model_io.py", "/tools/fold_resampling.py"], "/train.py": ["/tools/my_logging.py", "/tools/feature_tools.py", "/tools/objective_function.py", "/tools/model_io.py", "/tools/fold_resampling.py"], "/from_onoderasan.py": ["/tools/objective_function.py"]} |
76,693 | guchio3/kaggle-plasticc | refs/heads/master | /tools/features.py | import re
import time
import os
from tqdm import tqdm
from abc import ABCMeta, abstractmethod
from pathlib import Path
from contextlib import contextmanager
import pickle
import pandas as pd
def tomap(args):
return getattr(args[0], args[1])(*args[2:])
def toapply(cls, mtd_name, *args, **kwargs):
return getattr(cls, mtd_name)(*args, **kwargs)
class MulHelper(object):
def __init__(self, cls, mtd_name):
self.cls = cls
self.mtd_name = mtd_name
def __call__(self, *args, **kwargs):
return getattr(self.cls, self.mtd_name)(*args, **kwargs)
class featureCreator(metaclass=ABCMeta):
"""
feture $BKh$K(B load, save $BEy$9$k@_7W$@$H?tK|(B feature $B$H$+$r(B
concat $B$9$k:]$K=E$/$J$k!#(B
$B$h$C$F(B feature $B72Kh$K07$&@_7W$K$9$k!#O"B3$GF1$8(B data $B$r07$&>l9g$O(B
init $B$G(B src_df_dict $B$r;H$C$F;H$$2s$9!#$=$l0J30$N%G!<%?$O(B load $B$G(B
dataframe $B%l%Y%k$G8F$S=P$7!"J]B8$O(B dataframe $B%l%Y%k$G9T$&!#(B
"""
def __init__(self, load_dir=None, save_dir=None,
src_df_dict=None, logger=None, nthread=1):
if load_dir:
self.load_dir = load_dir if load_dir[-1] == '/' else load_dir + '/'
elif not src_df_dict:
raise 'pleaes set load_dir or src_df_dict at least.'
if save_dir:
self.save_dir = save_dir if save_dir[-1] == '/' else save_dir + '/'
self.src_df_dict = src_df_dict
self.logger = logger
self.nthread = nthread
self.name = self.__class__.__name__
if src_df_dict:
self.src_df_dict = src_df_dict
else:
self.src_df_dict = {}
self.df_dict = {}
def _log_print(self, message):
if self.logger:
self.logger.info(message)
else:
print(message)
@contextmanager
def _timer(self):
t0 = time.time()
start_str = f'[{self.name}] start'
self._log_print(start_str)
try:
yield
finally:
end_str = f'[{self.name}] done in {time.time() - t0:.0f} s'
self._log_print(end_str)
@abstractmethod
def _create_features(self):
'''
create features, and hold the result df as self.df.
'''
raise NotImplementedError
def _load_dfs_from_paths(self, path_dict):
'''
path_dict $B$O(B df_name: path_name $B$N(B dict
'''
self._log_print('now loading features ...')
self._log_print(f'the path dict is {path_dict}')
for df_name in tqdm(path_dict):
path = path_dict[df_name]
ext_name = path.split('.')[-1]
if ext_name == 'csv':
_df = pd.read_csv(path)
elif ext_name == 'ftr' or ext_name == 'fth':
_df = pd.read_feather(path, nthreads=self.nthread)
elif ext_name == 'pkl':
with open(path, 'rb') as fin:
_df = pickle.load(fin)
else:
self._log_print(f'the extension {ext_name} is not supported yet.')
raise NotImplementedError
self.src_df_dict[df_name] = _df
@abstractmethod
def _load(self):
raise NotImplementedError
#loaded_features = []
#for col in tqdm(load_cols):
# load_filename = self.load_dir + str(col) + '.ftr'
# self._log_print(f'loading {col} from {load_filename}')
# loaded_features.append(pd.read_feather(load_filename, nthreads=self.nthread))
def run(self):
with self._timer():
self._load()
with self._timer():
self._create_features()
return self
def save(self):
if len(self.df_dict) > 0:
for key in self.df_dict:
save_filename = self.save_dir + key + '.ftr'
self.df_dict[key].to_feather(save_filename)
#for col in tqdm(self.df.columns):
# save_filename = self.save_dir + str(col) + '.ftr'
# if os.path.isfile(save_filename):
# self._log_print(f'saving {col} to {save_filename}')
# self.df[col].to_feather(save_filename)
else:
self._log_print('The creator does not have any dfs to save.')
self._log_print('Try creating features using run() at first.')
| {"/softmax_train_using_features.py": ["/tools/my_logging.py", "/tools/feature_tools.py", "/tools/objective_function.py", "/tools/model_io.py", "/tools/fold_resampling.py"], "/train.py": ["/tools/my_logging.py", "/tools/feature_tools.py", "/tools/objective_function.py", "/tools/model_io.py", "/tools/fold_resampling.py"], "/from_onoderasan.py": ["/tools/objective_function.py"]} |
76,694 | guchio3/kaggle-plasticc | refs/heads/master | /tools/feature_tools.py | import pandas as pd
import numpy as np
from scipy import signal
from scipy.stats import kurtosis
import gc
from multiprocessing import Pool
from tqdm import tqdm
import warnings
import cesium.featurize as featurize
from tsfresh.feature_extraction import extract_features
warnings.simplefilter('ignore', RuntimeWarning)
warnings.filterwarnings('ignore')
np.random.seed(71)
# =======================================
# util functions
# =======================================
def split_idxes(df, nthread, logger, nclass=14):
logger.info('calculating uniq object_id num')
object_ids = df.object_id.unique()
logger.info('getting groups')
groups = np.array_split(object_ids, nclass)
logger.info('splitting df')
idxes = [df[df.object_id.isin(group)].index for group in groups]
return idxes
def get_group_df(df_and_group):
df, group = df_and_group
return df[df.object_id.isin(set(group))]
def split_dfs(df, nthread, logger, save_flg=False):
logger.info('calculating uniq object_id num')
object_ids = df.object_id.unique()
logger.info('getting groups')
groups = np.array_split(object_ids, nthread)
logger.info('splitting df')
dfs = []
for group in tqdm(list(groups)):
dfs.append(df[df.object_id.isin(set(group))])
if save_flg:
logger.info('saving the split dfs...')
for i, df in tqdm(list(enumerate(dfs))):
df.reset_index().to_feather('./test_dfs/{}.fth'.format(i))
return dfs
def load_test_set_dfs(nthread, logger):
logger.info('loading dfs...')
dfs_paths = [
'/home/naoya.taguchi/workspace/kaggle/plasticc-2018_after_pack/test_dfs/{}.fth'.format(i) for i in range(62)]
p = Pool(nthread)
dfs = p.map(pd.read_feather, dfs_paths)
p.close()
p.join()
logger.info('done')
return dfs
# def normalize_flux(set_df, new_flux_name='flux'):
# normalize_base_df = set_df.groupby('object_id').\
# flux.median().\
# reset_index().\
# rename(columns={'flux': 'flux_median'})
# normalize_bases = set_df.merge(
# normalize_base_df,
# on='object_id',
# how='left').flux_median
# set_df[new_flux_name] = set_df.flux
# set_df[new_flux_name] /= normalize_bases
# return set_df
def _normalize_flux(set_df):
flux_band_stat_df = set_df.groupby(['object_id', 'passband']).\
agg({'flux': ['mean', 'std']}).\
reset_index()
flux_band_stat_df.columns = pd.Index(
[e[0] + "_" + e[1] for e in flux_band_stat_df.columns.tolist()])
stats_for_normalize = set_df.merge(
flux_band_stat_df,
on=['object_id', 'passband'],
how='left')
set_df['flux'] -= stats_for_normalize.flux_mean
set_df['flux'] /= stats_for_normalize.flux_std
del flux_band_stat_df, stats_for_normalize
gc.collect()
return set_df
def normalise(ts):
return (ts - ts.mean()) / ts.std()
def get_phase_features(set_df):
groups = set_df.groupby(['object_id', 'passband'])
# times = groups.apply(lambda block: block['mjd'].values).\
times = groups.apply(lambda block: block['phase'].values).\
reset_index().\
rename(columns={0: 'seq'})
flux = groups.apply(lambda block: normalise(block['flux']).values).\
reset_index().\
rename(columns={0: 'seq'})
times_list = times.groupby('object_id').\
apply(lambda x: x['seq'].tolist()).\
tolist()
flux_list = flux.groupby('object_id').\
apply(lambda x: x['seq'].tolist()).\
tolist()
warnings.simplefilter('ignore', RuntimeWarning)
phase_df = featurize.\
featurize_time_series(times=times_list,
values=flux_list,
features_to_use=['freq1_freq',
# 'freq1_signif',
# 'freq1_amplitude1',
'freq2_freq',
# 'freq2_amplitude1',
# 'percent_beyond_1_std',
'freq3_freq',
],
scheduler=None)
phase_df.columns = [str(e[0]) + '_' + str(e[1])
for e in phase_df.columns.tolist()]
# phase_df['object_id'] = times.object_id
del times, flux, times_list, flux_list
gc.collect()
return phase_df
def _calc_pogson_magnitude(flux):
return 22.5 - 2.5 * np.log10(flux)
#def calc_luminosity(flux, lumi_dist):
# luminosity = 4*np.pi*(lumi_dist)
# return luminosity
def add_corrected_flux(set_df, set_metadata_df):
# _set_metadata_df = set_metadata_df[
# (set_metadata_df.hostgal_photoz_err < 0.5) &
# (set_metadata_df.hostgal_photoz_err > 0.)]
set_metadata_df['lumi_dist'] = 10**((set_metadata_df.distmod+5)/5)
_set_metadata_df = set_metadata_df
set_df = set_df.merge(
_set_metadata_df[['object_id', 'hostgal_photoz', 'lumi_dist']],
on='object_id',
how='left')
set_df['corrected_flux'] = set_df.flux / (set_df.hostgal_photoz**2)
set_df['normed_flux'] = (set_df.flux - set_df.flux.min()) / set_df.flux.max()
set_df['luminosity'] = 4*np.pi*(set_df.lumi_dist**2)*set_df.flux
return set_df
# =======================================
# feature functions
# =======================================
def weighted_mean(flux, dflux):
return np.sum(flux * (flux / dflux)**2) /\
np.sum((flux / dflux)**2)
def normalized_flux_std(flux, wMeanFlux):
return np.std(flux / wMeanFlux, ddof=1)
def normalized_amplitude(flux, wMeanFlux):
return (np.max(flux) - np.min(flux)) / wMeanFlux
def normalized_MAD(flux, wMeanFlux):
return np.median(np.abs((flux - np.median(flux)) / wMeanFlux))
def beyond_1std(flux, wMeanFlux):
return sum(np.abs(flux - wMeanFlux) > np.std(flux, ddof=1)) / len(flux)
def get_starter_features(_id_grouped_df):
f = _id_grouped_df.flux
df = _id_grouped_df.flux_err
m = weighted_mean(f, df)
std = normalized_flux_std(f, df)
amp = normalized_amplitude(f, m)
mad = normalized_MAD(f, m)
beyond = beyond_1std(f, m)
return m, std, amp, mad, beyond
def get_flux_mjd_diff(df):
return df.flux.diff()/df.mjd.diff()
def get_flux_mjd_diff_mean(df):
return get_flux_mjd_diff(df).mean()
def get_flux_mjd_diff_max(df):
return get_flux_mjd_diff(df).max()
def get_flux_mjd_diff_min(df):
return get_flux_mjd_diff(df).min()
def get_flux_mjd_diff_std(df):
return get_flux_mjd_diff(df).std()
def get_flux_mjd_diff_var(df):
return get_flux_mjd_diff(df).var()
def diff_mean(x):
return x.diff().mean()
def diff_max(x):
return x.diff().max()
def diff_std(x):
return x.diff().std()
def diff_var(x):
return x.diff().var()
def diff_sum(x):
return x.diff().sum()
def get_max_min_diff(x):
return x.max() - x.min()
def quantile10(x):
return x.quantile(0.10)
def quantile25(x):
return x.quantile(0.25)
def quantile75(x):
return x.quantile(0.75)
def quantile90(x):
return x.quantile(0.90)
def quantile95(x):
return x.quantile(0.95)
def minmax_range(x):
return x.max() - x.min()
def quantile2575_range(x):
return quantile75(x) - quantile25(x)
def quantile1090_range(x):
return quantile90(x) - quantile10(x)
# =======================================
# feature engineering part
# =======================================
def _for_set_df(set_df):
# set_df = normalize_flux(set_df)
# min_fluxes = set_df.groupby('object_id').\
# flux.min().\
# reset_index().\
# rename(columns={'flux': '_temp_flux_min'})
# set_df = set_df.merge(min_fluxes, on='object_id', how='left')
# set_df['minused_flux'] = set_df.flux - set_df._temp_flux_min
# set_df.flux -= 0.
# 25 $B$OBgBN(B train $B$NJ?6Q(B
# set_df = set_df[set_df.flux_err < 25]
set_df['flux_ratio_to_flux_err'] = \
set_df['flux'] / set_df['flux_err']
# 'kurtosis' $B$O;H$($J$$(B...$B!)(B
aggregations = {
# 'passband': ['mean', 'std', 'var'],
# 'mjd': ['max', 'min', 'var'],
# 'mjd': [diff_mean, diff_max],
# 'phase': [diff_mean, diff_max],
'flux': ['min', 'max', 'mean', 'median',
'std', 'var', 'skew', 'count', kurtosis],
'corrected_flux': ['min', 'max', 'mean', 'median',
'std', 'var', 'skew', ],
'flux_err': ['min', 'max', 'mean', 'median', 'std', 'var', 'skew', kurtosis],
'flux_ratio_to_flux_err': ['min', 'max', ],
'detected': ['mean', ],
'flux_ratio_sq': ['sum', 'skew', 'mean', kurtosis],
'flux_by_flux_ratio_sq': ['sum', 'skew', ],
'corrected_flux_ratio_sq': ['sum', 'skew', ],
'corrected_flux_by_flux_ratio_sq': ['sum', 'skew'],
# 'luminosity': ['median', 'var', 'skew', kurtosis],
# 'minused_flux': ['min', 'max', 'mean', 'median',
# 'std', 'var', 'skew'],
# 'normed_flux': ['mean', 'median', 'skew'],
# 'diff_flux_by_diff_mjd': ['min', 'max', 'var', ],
}
detected_aggregations = {
'mjd': [get_max_min_diff, 'skew'],
}
# non_detected_aggregations = {
# 'flux': ['var'],
# }
mean_upper_flux_aggregations = {
'mjd': [get_max_min_diff, 'var', 'skew', ],
# 'flux_err': ['min', 'max', 'mean', 'median', 'std', 'var', 'skew', kurtosis],
'flux': ['count'],
# 'mjd': ['min', 'max', 'var', ],
}
std_upper_flux_aggregations = {
'mjd': [get_max_min_diff, 'var', 'skew', ],
# 'flux_err': ['min', 'max', 'mean', 'median', 'std', 'var', 'skew', kurtosis],
'flux': ['count', 'min'],
# 'mjd': ['min', 'max', 'var', ],
}
#quantile10, quantile25, quantile75, quantile90, quantile2575_range, quantile1090_range
passband_aggregations = {
# 'mjd': [diff_mean, diff_max],
# 'phase': [diff_mean, diff_max],
'flux': ['min', 'max', 'count', 'var', 'mean', 'skew', kurtosis, quantile10,quantile25, quantile75, quantile90, quantile2575_range, quantile1090_range],
'normed_flux': [diff_mean, ],
#'flux_err': ['min', 'max', 'mean', 'median', 'std', 'var', 'skew', kurtosis],
# 'flux_err': ['var'],
'detected': ['mean', ],
'flux_ratio_sq': ['sum', 'skew', ],
'flux_by_flux_ratio_sq': ['sum', 'skew'],
}
band_std_upper_flux_aggregations = {
'mjd': [get_max_min_diff, 'var', 'skew', diff_mean],
# 'flux_err': ['min', 'max', 'mean', 'median', 'std', 'var', 'skew', kurtosis],
# 'diff_from_flux_abs_std': ['var'],
'flux': ['count', diff_mean, ],
# 'mjd': ['min', 'max', 'var', ],
# 'diff_flux_by_diff_mjd': ['min', 'max', 'var'],
# 'flux_mjd_diff_rat': [quantile10, quantile25, quantile75, quantile90, quantile2575_range, quantile1090_range],
}
# === run aggregations ===
# fe before agggregations
set_df['flux_ratio_sq'] = np.power(
set_df['flux'] / set_df['flux_err'], 2.0)
set_df['flux_by_flux_ratio_sq'] = set_df['flux'] * \
set_df['flux_ratio_sq']
set_df['corrected_flux_ratio_sq'] = np.power(
set_df['corrected_flux'] / set_df['flux_err'], 2.0)
set_df['corrected_flux_by_flux_ratio_sq'] = set_df['corrected_flux'] * \
set_df['flux_ratio_sq']
# set_df['diff_flux_by_diff_mjd'] =\
# set_df['flux'].diff() / set_df['mjd'].diff()
fe_set_df = set_df.groupby('object_id').agg({**aggregations})
fe_set_df.columns = pd.Index(
[e[0] + "_" + e[1] for e in fe_set_df.columns.tolist()])
# === run mean upper aggregation ===
# $BJ?6QCM$h$j9b$$0LCV$K$"$k(B flux $B$N(B mjd $BE*5wN%$r;H$&$?$a$K2C9)!#(B
# $BMW$O(B period $B$rI=8=$7$?$$!#(B
object_flux_mean_df = set_df[['object_id', 'flux']].\
groupby('object_id').\
mean().\
rename(columns={'flux': 'flux_mean'})
mean_upper_flux_df = set_df.merge(
object_flux_mean_df, on='object_id', how='left')
mean_upper_flux_df = mean_upper_flux_df[mean_upper_flux_df.flux >
mean_upper_flux_df.flux_mean]
fe_mean_upper_flux_df = mean_upper_flux_df.groupby('object_id').\
agg({**mean_upper_flux_aggregations})
fe_mean_upper_flux_df.columns = pd.Index(
['mean_upper_' + e[0] + "_" + e[1]
for e in fe_mean_upper_flux_df.columns.tolist()])
# fe_mean_upper_flux_df['mean_upper_mjd_diff'] = \
# fe_mean_upper_flux_df['mean_upper_mjd_max'] - \
# fe_mean_upper_flux_df['mean_upper_mjd_min']
# fe_mean_upper_flux_df.drop(['mjd_max', 'mjd_min'], axis=1, inplace=True)
#### fe_set_df = fe_set_df.merge(
#### fe_mean_upper_flux_df,
#### on='object_id',
#### how='left')
del object_flux_mean_df, mean_upper_flux_df, fe_mean_upper_flux_df
gc.collect()
# === run std upper aggregation ===
object_flux_std_df = set_df[['object_id', 'flux']].\
groupby('object_id').\
std().\
abs().\
rename(columns={'flux': 'flux_abs_std'})
object_flux_mean_df = set_df[['object_id', 'flux']].\
groupby('object_id').\
mean().\
rename(columns={'flux': 'flux_mean'})
std_upper_flux_df = set_df.merge(
object_flux_std_df, on='object_id', how='left')
std_upper_flux_df = std_upper_flux_df.merge(
object_flux_mean_df, on='object_id', how='left')
std_upper_flux_df = std_upper_flux_df[std_upper_flux_df.flux >
abs(std_upper_flux_df.flux_abs_std) +
std_upper_flux_df.flux_mean]
fe_std_upper_flux_df = std_upper_flux_df.groupby('object_id').\
agg({**std_upper_flux_aggregations})
fe_std_upper_flux_df.columns = pd.Index(
['std_upper_' + e[0] + "_" + e[1]
for e in fe_std_upper_flux_df.columns.tolist()])
# fe_std_upper_flux_df['std_upper_mjd_diff'] = \
# fe_std_upper_flux_df['std_upper_mjd_max'] - \
# fe_std_upper_flux_df['std_upper_mjd_min']
# fe_std_upper_flux_df.drop(['mjd_max', 'mjd_min'], axis=1, inplace=True)
fe_set_df = fe_set_df.merge(
fe_std_upper_flux_df,
on='object_id',
how='left')
del object_flux_std_df, std_upper_flux_df, fe_std_upper_flux_df
gc.collect()
# === detected aggregation ===
detected_df = set_df[set_df.detected == 1]
fe_detected_df = detected_df.groupby('object_id').\
agg({**detected_aggregations})
fe_detected_df.columns = pd.Index(
['detected_' + e[0] + "_" + e[1]
for e in fe_detected_df.columns.tolist()])
fe_set_df = fe_set_df.merge(
fe_detected_df,
on='object_id',
how='left')
del detected_df, fe_detected_df
gc.collect()
# phase_feats = pd.DataFrame(set_df.sort_values(['object_id', 'phase']).groupby('object_id').phase.apply(diff_mean))
# fe_set_df = fe_set_df.merge(phase_feats, on='object_id', how='left')
# === non_detected aggregation ===
# non_detected_df = set_df[set_df.detected == 0]
# fe_non_detected_df = non_detected_df.groupby('object_id').\
# agg({**non_detected_aggregations})
# fe_non_detected_df.columns = pd.Index(
# ['non_detected_' + e[0] + "_" + e[1]
# for e in fe_non_detected_df.columns.tolist()])
# fe_set_df = fe_set_df.merge(
# fe_non_detected_df,
# on='object_id',
# how='left')
# del non_detected_df, fe_non_detected_df
# gc.collect()
# === passband $B$4$H$K=hM}(B ===
passband_df = pd.DataFrame(fe_set_df[['flux_count', 'flux_mean']])
passbands = [0, 1, 2, 3, 4, 5]
for passband in passbands:
# _passband_set_df = normalize_flux(set_df[set_df.passband == passband])
_passband_set_df = set_df[set_df.passband == passband]
flux_mjd_diff_rat = _passband_set_df.groupby('object_id').apply(lambda x: x.flux.diff()/x.mjd.diff())
flux_mjd_diff_rat = flux_mjd_diff_rat.reset_index().\
drop(['level_1', 'object_id'], axis=1).\
rename(columns={0: 'flux_mjd_diff_rat'})
_passband_set_df = pd.concat([_passband_set_df, flux_mjd_diff_rat], axis=1)
flux_mjd_diff_rat_rat = _passband_set_df.groupby('object_id').apply(lambda x: x.flux_mjd_diff_rat.diff()/x.mjd.diff())
flux_mjd_diff_rat_rat = flux_mjd_diff_rat_rat.reset_index().\
drop(['level_1', 'object_id'], axis=1).\
rename(columns={0: 'flux_mjd_diff_rat_rat'})
_passband_set_df = pd.concat([_passband_set_df, flux_mjd_diff_rat_rat], axis=1)
# starter kit type fe
starter_fe_series = _passband_set_df.\
groupby('object_id').\
apply(get_starter_features)
starter_fe_df = starter_fe_series.\
apply(lambda x: pd.Series(x)).\
rename(columns={
0: 'band-{}_wmean'.format(passband),
1: 'band-{}_normed_std'.format(passband),
2: 'band-{}_normed_amp'.format(passband),
3: 'band-{}_normed_mad'.format(passband),
4: 'band-{}_beyond_1std'.format(passband),
})
# std upper type fe for each passband
band_object_flux_std_df = _passband_set_df[['object_id', 'flux']].\
groupby('object_id').\
std().\
abs().\
rename(columns={'flux': 'flux_abs_std'})
band_object_flux_mean_df = _passband_set_df[['object_id', 'flux']].\
groupby('object_id').\
mean().\
rename(columns={'flux': 'flux_mean'})
_passband_set_df = _passband_set_df.merge(
band_object_flux_std_df, on='object_id', how='left')
_passband_set_df = _passband_set_df.merge(
band_object_flux_mean_df, on='object_id', how='left')
band_std_upper_flux_df = _passband_set_df[_passband_set_df.flux >
abs(_passband_set_df.flux_abs_std) +
_passband_set_df.flux_mean]
# band_std_upper_flux_df['diff_from_flux_abs_std'] =\
# band_std_upper_flux_df.flux - band_std_upper_flux_df.flux_abs_std
# band_std_upper_flux_df['diff_flux_by_diff_mjd'.format(passband)] =\
# band_std_upper_flux_df['flux'].diff() / band_std_upper_flux_df['mjd'].diff()
band_fe_std_upper_flux_df = band_std_upper_flux_df.groupby('object_id').\
agg({**band_std_upper_flux_aggregations})
band_fe_std_upper_flux_df.columns = pd.Index(
['band-{}_std_upper_'.format(passband) + e[0] + "_" + e[1]
for e in band_fe_std_upper_flux_df.columns.tolist()])
# aggregation type fe
band_fe_set_df = _passband_set_df.\
groupby('object_id').\
agg({**passband_aggregations})
band_fe_set_df.columns = pd.Index(
['band-{}_'.format(passband) + e[0] + "_" + e[1]
for e in band_fe_set_df.columns.tolist()])
band_fe_set_df['band-{}_flux_diff'.format(passband)] = \
band_fe_set_df['band-{}_flux_max'.format(passband)] - \
band_fe_set_df['band-{}_flux_min'.format(passband)]
# feature $B2aB?$J$N$G(B drop
passband_df = passband_df.merge(
starter_fe_df, on='object_id', how='left')
passband_df = passband_df.merge(
band_fe_set_df, on='object_id', how='left')
passband_df = passband_df.merge(
band_fe_std_upper_flux_df,
on='object_id',
how='left')
# fe after agg merge
passband_df['band-{}_flux_count'.format(passband)] = \
passband_df['band-{}_flux_count'.format(passband)]\
/ passband_df['flux_count']
# passband_df['band-{}_flux_mean_diff'.format(passband)] = \
# passband_df['flux_mean'.format(passband)] - \
# passband_df['band-{}_flux_mean'.format(passband)]
### passband_df['band-{}_std_upper_count_rat'.format(passband)] = \
### passband_df['band-{}_std_upper_flux_count'.format(passband)]\
### / passband_df['band-{}_flux_count'.format(passband)]
gc.collect()
# feature engineering for passband_df
for lpb in passbands:
rpb = (lpb + 1) % 6
lMean = passband_df['band-{}_wmean'.format(lpb)]
rMean = passband_df['band-{}_wmean'.format(rpb)]
lstd = passband_df['band-{}_normed_std'.format(lpb)]
rstd = passband_df['band-{}_normed_std'.format(rpb)]
lamp = passband_df['band-{}_normed_amp'.format(lpb)]
ramp = passband_df['band-{}_normed_amp'.format(rpb)]
# lmad = passband_df['band-{}_normed_mad'.format(lpb)]
# rmad = passband_df['band-{}_normed_mad'.format(rpb)]
# l1std = passband_df['band-{}_beyond_1std'.format(lpb)]
# r1std = passband_df['band-{}_beyond_1std'.format(rpb)]
mean_diff = -2.5 * np.log10(lMean / rMean)
std_diff = lstd - rstd
amp_diff = lamp - ramp
# mad_diff = lmad-rmad
# beyond_diff = l1std-r1std
mean_diff_colname = '{}_minus_{}_wmean'.format(lpb, rpb)
std_diff_colname = '{}_minus_{}_std'.format(lpb, rpb)
amp_diff_colname = '{}_minus_{}_amp'.format(lpb, rpb)
# mad_diff_colname = '{}_minus_{}_mad'.format(lpb, rpb)
# beyond_diff_colname = '{}_minus_{}_beyond'.format(lpb, rpb)
passband_df[mean_diff_colname] = mean_diff
passband_df[std_diff_colname] = std_diff
passband_df[amp_diff_colname] = amp_diff
# passband_df[mad_diff_colname] = mad_diff
# passband_df[beyond_diff_colname] = beyond_diff
# passband_df[(lMean <= 0) | (rMean <= 0)][mean_diff_colname] = -999
fe_set_df = fe_set_df.merge(
passband_df.drop([
'flux_count',
'flux_mean',
],
axis=1),
on='object_id',
how='left')
del _passband_set_df, starter_fe_series, starter_fe_df, \
band_fe_set_df, passband_df
gc.collect()
# feature engineering after aggregations
fe_set_df['flux_diff'] = fe_set_df['flux_max'] - fe_set_df['flux_min']
fe_set_df['flux_dif2'] = (fe_set_df['flux_max'] - fe_set_df['flux_min'])\
/ fe_set_df['flux_mean']
fe_set_df['flux_w_mean'] = fe_set_df['flux_by_flux_ratio_sq_sum'] / \
fe_set_df['flux_ratio_sq_sum']
fe_set_df['flux_dif3'] = (fe_set_df['flux_max'] - fe_set_df['flux_min'])\
/ fe_set_df['flux_w_mean']
fe_set_df['corrected_flux_diff'] = fe_set_df['corrected_flux_max'] - fe_set_df['corrected_flux_min']
fe_set_df['corrected_flux_dif2'] = (fe_set_df['corrected_flux_max'] - fe_set_df['corrected_flux_min'])\
/ fe_set_df['corrected_flux_mean']
fe_set_df['corrected_flux_w_mean'] = fe_set_df['corrected_flux_by_flux_ratio_sq_sum'] / \
fe_set_df['corrected_flux_ratio_sq_sum']
fe_set_df['corrected_flux_dif3'] = (fe_set_df['corrected_flux_max'] - fe_set_df['corrected_flux_min'])\
/ fe_set_df['corrected_flux_w_mean']
fe_set_df['std_upper_rat'] = fe_set_df['std_upper_flux_count'] / fe_set_df['flux_count']
passband_flux_maxes = \
['band-{}_flux_max'.format(i) for i in passbands]
# fe_set_df['passband_flux_maxes_var'] = \
# fe_set_df[passband_flux_maxes].var(axis=1)
for passband_flux_max in passband_flux_maxes:
fe_set_df[passband_flux_max + '_ratio_to_the_max'] = \
fe_set_df[passband_flux_max] / fe_set_df['flux_max']
# passband_maxes = fe_set_df[passband_flux_maxes].values
# passband_maxes_argmaxes = np.argmax(passband_maxes, axis=1)
# fe_set_df['passband_maxes_argmaxes'] = passband_maxes_argmaxes
# fe_set_df[passband_flux_max + '_from_the_max'] = \
# fe_set_df['flux_max'] - fe_set_df[passband_flux_max]
# passband_flux_maxes_from_the_max = \
# ['band-{}_flux_max_from_the_max'.format(i) for i in passbands]
# passband_flux_maxes_from_the_max_value = fe_set_df[passband_flux_maxes_from_the_max].values
# passband_flux_maxes_from_the_max_value.sort(axis=1)
# fe_set_df['2nd_passband_flux_max_diff'] = passband_flux_maxes_from_the_max_value[:,1]
# fe_set_df['3rd_passband_flux_max_diff'] = passband_flux_maxes_from_the_max_value[:,2]
# fe_set_df['2nd_passband_flux_max_diff_rat'] = fe_set_df['2nd_passband_flux_max_diff'] / fe_set_df.flux_max
# fe_set_df['3rd_passband_flux_max_diff_rat'] = fe_set_df['3rd_passband_flux_max_diff'] / fe_set_df.flux_max
passband_flux_mins = \
['band-{}_flux_min'.format(i) for i in passbands]
fe_set_df['passband_flux_min_var'] = \
fe_set_df[passband_flux_mins].var(axis=1)
# for passband_flux_min in passband_flux_mins:
# fe_set_df[passband_flux_min + '_ratio_to_the_min'] = \
# fe_set_df[passband_flux_min] / fe_set_df['flux_min']
passband_flux_means = \
['band-{}_flux_mean'.format(i) for i in passbands]
fe_set_df['passband_flux_means_var'] = \
fe_set_df[passband_flux_means].var(axis=1)
passband_flux_counts = \
['band-{}_flux_count'.format(i) for i in passbands]
fe_set_df['passband_flux_counts_var'] = \
fe_set_df[passband_flux_counts].var(axis=1)
passband_detected_means = \
['band-{}_detected_mean'.format(i) for i in passbands]
fe_set_df['passband_detected_means_var'] = \
fe_set_df[passband_detected_means].var(axis=1)
# passband_flux_ratio_sq_sum = \
# ['band-{}_flux_ratio_sq_sum'.format(i) for i in passbands]
# fe_set_df['passband_flux_ratio_sq_sum_var'] = \
# fe_set_df[passband_flux_ratio_sq_sum].var(axis=1)
# passband_flux_ratio_sq_skew = \
# ['band-{}_flux_ratio_sq_skew'.format(i) for i in passbands]
# fe_set_df['passband_flux_ratio_sq_skew_var'] = \
# fe_set_df[passband_flux_ratio_sq_skew].var(axis=1)
# band $B$N7gB;N($N(B var $B$H$+$bNI$5$=$&(B
passband_flux_vars = \
['band-{}_flux_var'.format(i) for i in passbands]
passband_flux_diffs = \
['band-{}_flux_diff'.format(i) for i in passbands]
fe_set_df['band_flux_diff_max'] = fe_set_df[passband_flux_diffs].max(axis=1)
fe_set_df['band_flux_diff_min'] = fe_set_df[passband_flux_diffs].min(axis=1)
fe_set_df['band_flux_diff_diff'] = fe_set_df['band_flux_diff_max'] - fe_set_df['band_flux_diff_min']
fe_set_df['band_flux_diff_diff_rat'] = fe_set_df['band_flux_diff_diff'] / fe_set_df['band_flux_diff_max']
fe_set_df['band_flux_max_min_rat'] = fe_set_df['band_flux_diff_min'] / fe_set_df['band_flux_diff_max']
# $B:G8e$K$$$i$J$$(B features $B$r(B drop $B$9$k$H$3$m(B
drop_cols = [
'flux_ratio_sq_sum',
]
drop_cols += passband_flux_counts
drop_cols += passband_flux_maxes
drop_cols += passband_flux_mins
drop_cols += passband_flux_means
# drop_cols += passband_flux_maxes_from_the_max
# drop_cols += passband_flux_ratio_sq_sum
fe_set_df.drop(drop_cols, axis=1, inplace=True)
# clear memory
# del set_df
# gc.collect()
return fe_set_df
def get_tsfresh_feats(set_df, nthread):
# tsfresh features
fcp = {
'flux': {
'longest_strike_above_mean': None,
'longest_strike_below_mean': None,
'mean_change': None,
'mean_abs_change': None,
'length': None,
# 'number_peaks': [{'n': 1}],
# 'fft_coefficient': [
# {'coeff': 0, 'attr': 'abs'},
# {'coeff': 1, 'attr': 'abs'}
# ],
# 'binned_entropy': [{'max_bin': 20}],
# 'agg_linear_trend': None,
# 'number_cwt_peaks': None,
},
'flux_by_flux_ratio_sq': {
'longest_strike_above_mean': None,
'longest_strike_below_mean': None,
},
'mjd': {
'maximum': None,
'minimum': None,
'mean_change': None,
'mean_abs_change': None,
},
}
# ts_flesh features
#fcp = {'fft_coefficient': [{'coeff': 0, 'attr': 'abs'},{'coeff': 1, 'attr': 'abs'}],
# 'kurtosis' : None,
# 'skewness' : None}
agg_df_ts = extract_features(
set_df,
column_id='object_id',
column_sort='mjd',
column_kind='passband',
column_value = 'flux',
default_fc_parameters = fcp['flux'],
n_jobs=nthread)
return agg_df_ts
def feature_engineering(set_df, set_metadata_df, nthread,
logger, test_flg=False):
logger.info('getting split dfs ...')
if test_flg:
set_dfs = load_test_set_dfs(nthread, logger)
#set_dfs = split_dfs(set_df, nthread, logger, save_flg=True)
else:
set_dfs = split_dfs(set_df, nthread, logger)
logger.info('adding corrected flux...')
for i, _set_df in tqdm(enumerate(set_dfs)):
set_dfs[i] = add_corrected_flux(_set_df, set_metadata_df)
del _set_df
gc.collect()
logger.info('start fature engineering ...')
p = Pool(nthread)
set_res_list = p.map(_for_set_df, set_dfs)
p.close()
p.join()
set_res_df = pd.concat(set_res_list, axis=0)
gc.collect()
if test_flg:
ts_set_df = pd.read_feather('/home/naoya.taguchi/.kaggle/competitions/PLAsTiCC-2018/' + 'test_set.fth', nthreads=nthread)
else:
ts_set_df = pd.read_csv('/home/naoya.taguchi/.kaggle/competitions/PLAsTiCC-2018/' + 'training_set.csv')
tsfresh_df = get_tsfresh_feats(ts_set_df, nthread).reset_index().rename(columns={'id': 'object_id'})
set_res_df = set_res_df.merge(tsfresh_df, on='object_id', how='left')
set_res_df.reset_index(inplace=True)
# p = Pool(nthread)
# phase_res_list = p.map(get_phase_features, set_dfs)
# p.close()
# p.join()
# phase_df = pd.concat(phase_res_list, axis=0).reset_index(drop=True)
# phase_dfs = []
# for df in tqdm(set_dfs):
# phase_dfs.append(get_phase_features(df))
# phase_df = pd.concat(phase_dfs, axis=0).reset_index(drop=True)
# phase_df.to_csv('./temp.csv', index=False)
# phase_df = pd.read_csv('./temp.csv').reset_index(drop=True)
# print(phase_df)
# print(set_res_df)
# fe_set_df = fe_set_df.merge(phase_df, on='object_id')
# set_res_df = pd.concat([set_res_df, phase_df], axis=1)
# del set_df, phase_df
del set_df
gc.collect()
logger.info('post processing ...')
res_df = set_metadata_df.merge(set_res_df, on='object_id', how='left')
res_df['internal'] = res_df.hostgal_photoz == 0.
#res_df['ihostcal_photoz_cetain'] = np.multiply(res_df['hostgal_photoz'].values, np.exp(res_df['hostgal_photoz_err'].values))
# res_df['hostgal_photoz_square'] = np.power(res_df.hostgal_photoz, 2)
# res_df['detected_mjd_get_max_min_diff_corrected'] =\
# res_df['detected_mjd_get_max_min_diff'] / (1 + res_df['hostgal_photoz'])
#res_df.drop(['object_id', 'hostgal_specz', 'hostgal_photoz', 'ra', 'decl',
res_df.drop(['hostgal_specz', 'hostgal_photoz', 'ra', 'decl',
'gal_l', 'gal_b', 'ddf', 'mwebv', 'index'], axis=1, inplace=True)
#feats_df = pd.read_csv('./importances/Booster_weight-multi-logloss-0.579991_2018-11-20-13-06-10_importance.csv')
#feats_df = pd.read_csv('./importances/Booster_weight-multi-logloss-0.577933_2018-11-29-19-53-14_importance.csv')
#res_df = res_df.drop(list(reversed(feats_df.feature.tolist()))[:132], axis=1)
#res_df = res_df.drop(feats_df.feature.tolist()[:170], axis=1)
#res_df = res_df.replace(np.inf, np.nan)
#res_df = res_df.replace(-np.inf, np.nan)
del set_res_df
gc.collect()
return res_df
| {"/softmax_train_using_features.py": ["/tools/my_logging.py", "/tools/feature_tools.py", "/tools/objective_function.py", "/tools/model_io.py", "/tools/fold_resampling.py"], "/train.py": ["/tools/my_logging.py", "/tools/feature_tools.py", "/tools/objective_function.py", "/tools/model_io.py", "/tools/fold_resampling.py"], "/from_onoderasan.py": ["/tools/objective_function.py"]} |
76,695 | guchio3/kaggle-plasticc | refs/heads/master | /from_onoderasan.py | import numpy as np
import pandas as pd
import sys, os, gc
sys.path.append(f'/home/{os.environ.get("USER")}/PythonLibrary')
#import lgbextension as ex
import lightgbm as lgb
from multiprocessing import cpu_count
#import utils, utils_metric
import tools.objective_function as utils_metric
X = pd.read_pickle('./features/onodera_feats/X_train_1_1217-1.pkl.gz')
#y = utils.load_target().target
y = pd.read_csv('/home/naoya.taguchi/.kaggle/competitions/PLAsTiCC-2018/training_set_metadata.csv').target
target_dict = {}
target_dict_r = {}
for i,e in enumerate(y.sort_values().unique()):
target_dict[e] = i
target_dict_r[i] = e
y = y.replace(target_dict)
if X.columns.duplicated().sum()>0:
raise Exception(f'duplicated!: { X.columns[X.columns.duplicated()] }')
print('no dup :) ')
print(f'X.shape {X.shape}')
gc.collect()
SEED = np.random.randint(9999)
np.random.seed(SEED)
print('SEED:', SEED)
NFOLD = 5
param = {
'objective': 'multiclass',
'num_class': 14,
'metric': 'multi_logloss',
'learning_rate': 0.5,
'max_depth': 3,
'num_leaves': 63,
'max_bin': 127,
'min_child_weight': 10,
'min_data_in_leaf': 150,
'reg_lambda': 0.5, # L2 regularization term on weights.
'reg_alpha': 0.5, # L1 regularization term on weights.
'colsample_bytree': 0.5,
'subsample': 0.9,
# 'nthread': 32,
'nthread': cpu_count(),
'bagging_freq': 1,
'verbose':-1,
}
dtrain = lgb.Dataset(X, y.values, #categorical_feature=CAT,
free_raw_data=False)
gc.collect()
param['seed'] = np.random.randint(9999)
ret, models = lgb.cv(param, dtrain, 99999, nfold=NFOLD,
fobj=utils_metric.wloss_objective,
feval=utils_metric.wloss_metric,
early_stopping_rounds=100, verbose_eval=50,
seed=SEED)
| {"/softmax_train_using_features.py": ["/tools/my_logging.py", "/tools/feature_tools.py", "/tools/objective_function.py", "/tools/model_io.py", "/tools/fold_resampling.py"], "/train.py": ["/tools/my_logging.py", "/tools/feature_tools.py", "/tools/objective_function.py", "/tools/model_io.py", "/tools/fold_resampling.py"], "/from_onoderasan.py": ["/tools/objective_function.py"]} |
76,696 | guchio3/kaggle-plasticc | refs/heads/master | /utils/resubmit_w_oliver_99.py | import pandas as pd
import numpy as np
df = pd.read_csv('../submissions/LGBMClassifier_weight-multi-logloss-0.935157_2018-10-28-13-14-25.csv')
_df = df.drop(['object_id', 'class_99'], axis=1).values
preds_99 = np.ones(_df.shape[0])
for i in range(_df.shape[1]):
preds_99 *= (1 - _df[:, i])
df['class_99'] = 0.14 * preds_99 / np.mean(preds_99)
df.to_csv('../submissions/LGBMClassifier_weight-multi-logloss-0.935157_2018-10-28-13-14-25_ovliver-99.csv', index=False)
| {"/softmax_train_using_features.py": ["/tools/my_logging.py", "/tools/feature_tools.py", "/tools/objective_function.py", "/tools/model_io.py", "/tools/fold_resampling.py"], "/train.py": ["/tools/my_logging.py", "/tools/feature_tools.py", "/tools/objective_function.py", "/tools/model_io.py", "/tools/fold_resampling.py"], "/from_onoderasan.py": ["/tools/objective_function.py"]} |
76,697 | guchio3/kaggle-plasticc | refs/heads/master | /utils/set_class_99_to_1above9.py | import pandas as pd
filename='../../plasticc-2018/submissions/Booster_weight-multi-logloss-0.612193_2018-11-10-22-58-58.csv'
df = pd.read_csv(filename)
df.class_99 = 1/9
cols = list(df.columns)
cols.remove('class_99')
cols.remove('object_id')
df[cols] *= 8/9
df.to_csv('../submissions/' + filename.split('/')[-1][:-4] + '_class_99_1above9.csv', index=False)
| {"/softmax_train_using_features.py": ["/tools/my_logging.py", "/tools/feature_tools.py", "/tools/objective_function.py", "/tools/model_io.py", "/tools/fold_resampling.py"], "/train.py": ["/tools/my_logging.py", "/tools/feature_tools.py", "/tools/objective_function.py", "/tools/model_io.py", "/tools/fold_resampling.py"], "/from_onoderasan.py": ["/tools/objective_function.py"]} |
76,698 | guchio3/kaggle-plasticc | refs/heads/master | /utils/apply_sigmoid.py | import numpy as np
import pandas as pd
def sigmoid(x, derivative=False):
return x*(1-x) if derivative else 1/(1+np.exp(-x))
df = pd.read_csv('../submissions/LGBMClassifier_weight-multi-logloss-0.890562_2018-11-06-13-06-21.csv')
df.class_99 = sigmoid(df.class_99 / np.max(df.class_99) * 4 - 2)
df.to_csv('../submissions/LGBMClassifier_weight-multi-logloss-0.890562_2018-11-06-13-06-21_sigmoid.csv', index=False)
| {"/softmax_train_using_features.py": ["/tools/my_logging.py", "/tools/feature_tools.py", "/tools/objective_function.py", "/tools/model_io.py", "/tools/fold_resampling.py"], "/train.py": ["/tools/my_logging.py", "/tools/feature_tools.py", "/tools/objective_function.py", "/tools/model_io.py", "/tools/fold_resampling.py"], "/from_onoderasan.py": ["/tools/objective_function.py"]} |
76,699 | guchio3/kaggle-plasticc | refs/heads/master | /utils/add_phase.py | import numpy as np
import pandas as pd
import cesium.featurize as featurize
import warnings
from multiprocessing import Pool
from logging import getLogger
from tqdm import tqdm
import sys
sys.path.append('../tools/')
from feature_tools import load_test_set_dfs, split_dfs
from my_logging import logInit
warnings.simplefilter('ignore', FutureWarning)
warnings.simplefilter('ignore', RuntimeWarning)
def normalise(ts):
return (ts - ts.mean()) / ts.std()
def get_phase(set_df):
groups = set_df.groupby(['object_id', 'passband'])
times = groups.apply(lambda block: block['mjd'].values).\
reset_index().\
rename(columns={0: 'seq'})
flux = groups.apply(lambda block: normalise(block['flux']).values).\
reset_index().\
rename(columns={0: 'seq'})
times_list = times.groupby('object_id').\
apply(lambda x: x['seq'].tolist()).\
tolist()
flux_list = flux.groupby('object_id').\
apply(lambda x: x['seq'].tolist()).\
tolist()
warnings.simplefilter('ignore', RuntimeWarning)
if np.prod(np.isnan(np.array(times_list))) * np.prod(np.isnan(np.array(flux_list))) > 0:
freq_df = featurize.featurize_time_series(times=times_list,
values=flux_list,
features_to_use=['freq1_freq'],
scheduler=None)
freqs = pd.DataFrame(freq_df.median(axis=1)).rename(columns={0: 'freq_median'})
freqs['object_id'] = set_df.object_id.unique()
set_df = set_df.merge(
freqs,
on='object_id',
how='left').reset_index(drop=True)
set_df['phase'] = set_df['mjd'] * set_df['freq_median'] % 1
set_df.drop(['freq_median'], axis=1, inplace=True)
else:
set_df['phase'] = np.nan
return set_df
def _main(nthread, test_flg):
logger = getLogger(__name__)
logInit(logger, log_dir='../log/', log_filename='add_phase.log')
if test_flg:
set_dfs = load_test_set_dfs(nthread, logger)
for i, df in tqdm(list(enumerate(set_dfs))):
df = get_phase(df)
df.reset_index(drop=True).to_feather('./test_dfs/{}.fth'.format(i))
else:
set_df = pd.read_csv(
'/home/naoya.taguchi/.kaggle/competitions/PLAsTiCC-2018/training_set.csv')
#'/Users/naoya.taguchi/.kaggle/competitions/PLAsTiCC-2018/training_set.csv')
phase_df = get_phase(set_df)
phase_df.to_csv(
'/home/naoya.taguchi/.kaggle/competitions/PLAsTiCC-2018/training_set.csv',
#'/Users/naoya.taguchi/.kaggle/competitions/PLAsTiCC-2018/training_set.csv',
index=False)
def main(nthread, test_flg):
logger = getLogger(__name__)
logInit(logger, log_dir='../log/', log_filename='add_phase.log')
if test_flg:
set_dfs = load_test_set_dfs(nthread, logger)
else:
set_df = pd.read_csv(
#'/home/naoya.taguchi/.kaggle/competitions/PLAsTiCC-2018/training_set.csv')
'/Users/naoya.taguchi/.kaggle/competitions/PLAsTiCC-2018/training_set.csv')
set_dfs = split_dfs(set_df, nthread, logger)
logger.info('start multiprocessing')
p = Pool(nthread)
phase_df_list = p.map(get_phase, set_dfs)
p.close()
p.join()
logger.info('done multiprocessing')
if test_flg:
for i, df in tqdm(list(enumerate(phase_df_list))):
df.reset_index(drop=True).to_feather('/home/naoya.taguchi/workspace/kaggle/plasticc-2018/test_dfs/{}.fth'.format(i))
else:
phase_df = pd.concat(phase_df_list, axis=0).reset_index(drop=True)
phase_df.to_csv(
#'/home/naoya.taguchi/.kaggle/competitions/PLAsTiCC-2018/training_set.csv',
'/Users/naoya.taguchi/.kaggle/competitions/PLAsTiCC-2018/training_set.csv',
index=False)
if __name__ == '__main__':
main(62, True)
#main(62, False)
#main(2, False)
| {"/softmax_train_using_features.py": ["/tools/my_logging.py", "/tools/feature_tools.py", "/tools/objective_function.py", "/tools/model_io.py", "/tools/fold_resampling.py"], "/train.py": ["/tools/my_logging.py", "/tools/feature_tools.py", "/tools/objective_function.py", "/tools/model_io.py", "/tools/fold_resampling.py"], "/from_onoderasan.py": ["/tools/objective_function.py"]} |
76,700 | guchio3/kaggle-plasticc | refs/heads/master | /tools/objective_function.py | import numpy as np
import pandas as pd
from sklearn import preprocessing
import torch
import torch.nn.functional as F
from torch.autograd import grad
from torch.autograd import Variable
# $B$3$l$r99?7$9$kI,MW$O$"$j$=$&(B
class_weights_dict = {
6: 18.8086925422,
15: 18.2715315897,
16: 18.8086925422,
42: 18.8086925422,
52: 18.8086925422,
53: 18.809252663,
62: 18.8086925422,
64: 18.8086925422,
65: 18.8086925422,
67: 18.8086925422,
88: 18.8086925422,
90: 18.8086925422,
92: 18.8086925422,
95: 18.8086925422,
# 99: 18.2712515266,
}
labeled_class_weights_dict = {
0: 18.8086925422,
1: 18.2715315897,
2: 18.8086925422,
3: 18.8086925422,
4: 18.8086925422,
5: 18.809252663,
6: 18.8086925422,
7: 18.8086925422,
8: 18.8086925422,
9: 18.8086925422,
10: 18.8086925422,
11: 18.8086925422,
12: 18.8086925422,
13: 18.8086925422,
# 14: 18.2712515266,
}
lb = preprocessing.LabelBinarizer()
lb.fit(sorted(labeled_class_weights_dict.keys()))
class_weight_dict = labeled_class_weights_dict
def softmax(x, axis=1):
z = np.exp(x)
return z / np.sum(z, axis=axis, keepdims=True)
def weighted_multi_logloss(y_true, y_pred):
'''
$B"-(B $B$N$h$&$J(B input $B$r4|BT(B
[
[0.1, 0.3, 0.6, 0.0, 0.0, ...],
[0.0, 0.0, 0.8, 0.1, 0.0, ...],
[0.1, 0.0, 0.2, 0.0, 0.0, ...],
]
'''
y_pred = np.clip(y_pred, 10**(-15), 1 - 10**(-15))
y_pred = np.reshape(y_pred, (-1, 14))
weights = np.array([class_weight_dict[key]
for key in sorted(class_weight_dict.keys())])
num_classes = [np.sum(y_true == key)
for key in sorted(class_weight_dict.keys())]
true_mask = lb.transform(y_true)
score = -np.sum((weights / num_classes) * true_mask *
np.log(y_pred)) / np.sum(weights)
return 'wloss', score, False
def lgb_multi_weighted_logloss(y_true, y_preds):
"""
@author olivier https://www.kaggle.com/ogrellier
multi logloss for PLAsTiCC challenge
"""
# class_weights taken from Giba's topic : https://www.kaggle.com/titericz
# https://www.kaggle.com/c/PLAsTiCC-2018/discussion/67194
# with Kyle Boone's post https://www.kaggle.com/kyleboone
#classes = [6, 15, 16, 42, 52, 53, 62, 64, 65, 67, 88, 90, 92, 95]
classes = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13]
class_weight = {6: 1, 15: 2, 16: 1, 42: 1, 52: 1, 53: 1, 62: 1, 64: 2, 65: 1, 67: 1, 88: 1, 90: 1, 92: 1, 95: 1}
#class_weight = labeled_class_weights_dict
if len(np.unique(y_true)) > 14:
classes.append(14)
# classes.append(99)
class_weight[14] = 2
y_p = y_preds.reshape(y_true.shape[0], len(classes), order='F')
# Trasform y_true in dummies
y_ohe = pd.get_dummies(y_true)
# Normalize rows and limit y_preds to 1e-15, 1-1e-15
y_p = np.clip(a=y_p, a_min=1e-15, a_max=1 - 1e-15)
# Transform to log
y_p_log = np.log(y_p)
# Get the log for ones, .values is used to drop the index of DataFrames
# Exclude class 99 for now, since there is no class99 in the training set
# we gave a special process for that class
y_log_ones = np.sum(y_ohe.values * y_p_log, axis=0)
# Get the number of positives for each class
nb_pos = y_ohe.sum(axis=0).values.astype(float)
# Weight average and divide by the number of positives
class_arr = np.array([class_weight[k]
for k in sorted(class_weight.keys())])
y_w = y_log_ones * class_arr / nb_pos
loss = - np.sum(y_w) / np.sum(class_arr)
return 'wloss', loss, False
def wloss_metric(preds, train_data):
classes = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13]
class_weight = {6: 1, 15: 2, 16: 1, 42: 1, 52: 1, 53: 1, 62: 1, 64: 2, 65: 1, 67: 1, 88: 1, 90: 1, 92: 1, 95: 1}
weight_tensor = torch.tensor(list(class_weight.values()),
requires_grad=False).type(torch.FloatTensor)
y_t = torch.tensor(train_data.get_label(), requires_grad=False).type(torch.LongTensor)
y_h = torch.zeros(
y_t.shape[0], len(classes), requires_grad=False).scatter(1, y_t.reshape(-1, 1), 1)
y_h /= y_h.sum(dim=0, keepdim=True)
y_p = torch.tensor(preds, requires_grad=False).type(torch.FloatTensor)
if len(y_p.shape) == 1:
y_p = y_p.reshape(len(classes), -1).transpose(0, 1)
ln_p = torch.log_softmax(y_p, dim=1)
wll = torch.sum(y_h * ln_p, dim=0)
loss = -torch.dot(weight_tensor, wll) / torch.sum(weight_tensor)
return 'wloss', loss.numpy() * 1., False
def wloss_objective(preds, train_data):
classes = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13]
class_weight = {6: 1, 15: 2, 16: 1, 42: 1, 52: 1, 53: 1, 62: 1, 64: 2, 65: 1, 67: 1, 88: 1, 90: 1, 92: 1, 95: 1}
#class_weight = {6: 1, 15: 2, 16: 1, 42: 1, 52: 2, 53: 1, 62: 1, 64: 2, 65: 1, 67: 2, 88: 1, 90: 1, 92: 1, 95: 1}
weight_tensor = torch.tensor(list(class_weight.values()),
requires_grad=False).type(torch.FloatTensor)
class_dict = {c: i for i, c in enumerate(classes)}
y_t = torch.tensor(train_data.get_label(), requires_grad=False).type(torch.LongTensor)
y_h = torch.zeros(
y_t.shape[0], len(classes), requires_grad=False).scatter(1, y_t.reshape(-1, 1), 1)
ys = y_h.sum(dim=0, keepdim=True)
y_h /= ys
y_p = torch.tensor(preds, requires_grad=True).type(torch.FloatTensor)
y_r = y_p.reshape(len(classes), -1).transpose(0, 1)
ln_p = torch.log_softmax(y_r, dim=1)
wll = torch.sum(y_h * ln_p, dim=0)
loss = -torch.dot(weight_tensor, wll)
grads = grad(loss, y_p, create_graph=True)[0]
grads *= float(len(classes)) / torch.sum(1 / ys) # scale up grads
hess = torch.ones(y_p.shape) # haven't bothered with properly doing hessian yet
return grads.detach().numpy(), \
hess.detach().numpy()
def calc_team_score(y_true, y_preds):
'''
y_true:$B#1<!85$N(Bnp.array
y_pred:softmax$B8e$N#1(B4$B<!85$N(Bnp.array
'''
class99_prob = 1/9
class99_weight = 2
y_p = y_preds * (1-class99_prob)
y_p = np.clip(a=y_p, a_min=1e-15, a_max=1 - 1e-15)
y_p_log = np.log(y_p)
y_true_ohe = pd.get_dummies(y_true).values
nb_pos = y_true_ohe.sum(axis=0).astype(float)
classes = [6, 15, 16, 42, 52, 53, 62, 64, 65, 67, 88, 90, 92, 95]
class_weight = {6: 1, 15: 2, 16: 1, 42: 1, 52: 1, 53: 1, 62: 1, 64: 2, 65: 1, 67: 1, 88: 1, 90: 1, 92: 1, 95: 1}
class_arr = np.array([class_weight[k] for k in sorted(class_weight.keys())])
y_log_ones = np.sum(y_true_ohe * y_p_log, axis=0)
y_w = y_log_ones * class_arr / nb_pos
score = - np.sum(y_w) / (np.sum(class_arr)+class99_weight)\
+ (class99_weight/(np.sum(class_arr)+class99_weight))*(-np.log(class99_prob))
return score
def wloss_metric_for_zeropad(preds, train_data, gal_cols, ext_gal_cols, gal_rows, ext_gal_rows):
classes = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13]
class_weight = {6: 1, 15: 2, 16: 1, 42: 1, 52: 1, 53: 1, 62: 1, 64: 2, 65: 1, 67: 1, 88: 1, 90: 1, 92: 1, 95: 1}
weight_tensor = torch.tensor(list(class_weight.values()),
requires_grad=False).type(torch.FloatTensor)
y_t = torch.tensor(train_data.get_label(), requires_grad=False).type(torch.LongTensor)
y_h = torch.zeros(
y_t.shape[0], len(classes), requires_grad=False).scatter(1, y_t.reshape(-1, 1), 1)
y_h /= y_h.sum(dim=0, keepdim=True)
y_p = torch.tensor(preds, requires_grad=False).type(torch.FloatTensor)
if len(y_p.shape) == 1:
y_p = y_p.reshape(len(classes), -1).transpose(0, 1)
p = pd.DataFrame(torch.softmax(y_p, dim=0).numpy())
p.loc[ext_gal_rows, gal_cols] = 0.
p.loc[gal_rows, ext_gal_cols] = 0.
p = np.clip(a=p.values/np.sum(p.values, axis=1).reshape((-1, 1)), a_min=1e-15, a_max=1 - 1e-15)
ln_p = np.log(p)
ln_p = torch.tensor(ln_p, requires_grad=False).type(torch.FloatTensor)
# ln_p = torch.log_softmax(y_p, dim=1)
wll = torch.sum(y_h * ln_p, dim=0)
loss = -torch.dot(weight_tensor, wll) / torch.sum(weight_tensor)
return loss.numpy() * 1.
def _sample_gumbel(shape, eps=1e-10, out=None):
"""
Sample from Gumbel(0, 1)
based on
https://github.com/ericjang/gumbel-softmax/blob/3c8584924603869e90ca74ac20a6a03d99a91ef9/Categorical%20VAE.ipynb ,
(MIT license)
"""
U = out.resize_(shape).uniform_() if out is not None else torch.rand(shape)
return - torch.log(eps - torch.log(U + eps))
def _gumbel_softmax_sample(logits, tau=0.1, eps=1e-10):
"""
Draw a sample from the Gumbel-Softmax distribution
based on
https://github.com/ericjang/gumbel-softmax/blob/3c8584924603869e90ca74ac20a6a03d99a91ef9/Categorical%20VAE.ipynb
(MIT license)
"""
dims = logits.dim()
gumbel_noise = Variable(_sample_gumbel(logits.size(), eps=eps, out=logits.data.new()))
y = logits + gumbel_noise
return F.softmax(y / tau, dims - 1)
def wloss_objective_gumbel(preds, train_data):
classes = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13]
class_weight = {6: 1, 15: 2, 16: 1, 42: 1, 52: 1, 53: 1, 62: 1, 64: 2, 65: 1, 67: 1, 88: 1, 90: 1, 92: 1, 95: 1}
#class_weight = {6: 1, 15: 2, 16: 1, 42: 1, 52: 2, 53: 1, 62: 1, 64: 2, 65: 1, 67: 2, 88: 1, 90: 1, 92: 1, 95: 1}
weight_tensor = torch.tensor(list(class_weight.values()),
requires_grad=False).type(torch.FloatTensor)
class_dict = {c: i for i, c in enumerate(classes)}
y_t = torch.tensor(train_data.get_label(), requires_grad=False).type(torch.LongTensor)
y_h = torch.zeros(
y_t.shape[0], len(classes), requires_grad=False).scatter(1, y_t.reshape(-1, 1), 1)
ys = y_h.sum(dim=0, keepdim=True)
y_h /= ys
y_p = torch.tensor(preds, requires_grad=True).type(torch.FloatTensor)
y_r = y_p.reshape(len(classes), -1).transpose(0, 1)
y_r = torch.clamp(y_r, 1e-15, 1 - 1e-15)
ln_p = _gumbel_softmax_sample(torch.log(y_r))
# ln_p = torch.log_softmax(y_r, dim=1)
wll = torch.sum(y_h * ln_p, dim=0)
loss = -torch.dot(weight_tensor, wll)
grads = grad(loss, y_p, create_graph=True)[0]
grads *= float(len(classes)) / torch.sum(1 / ys) # scale up grads
hess = torch.ones(y_p.shape) # haven't bothered with properly doing hessian yet
return grads.detach().numpy(), \
hess.detach().numpy()
def wloss_metric_gumbel(preds, train_data):
classes = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13]
class_weight = {6: 1, 15: 2, 16: 1, 42: 1, 52: 1, 53: 1, 62: 1, 64: 2, 65: 1, 67: 1, 88: 1, 90: 1, 92: 1, 95: 1}
weight_tensor = torch.tensor(list(class_weight.values()),
requires_grad=False).type(torch.FloatTensor)
y_t = torch.tensor(train_data.get_label(), requires_grad=False).type(torch.LongTensor)
y_h = torch.zeros(
y_t.shape[0], len(classes), requires_grad=False).scatter(1, y_t.reshape(-1, 1), 1)
y_h /= y_h.sum(dim=0, keepdim=True)
y_p = torch.tensor(preds, requires_grad=False).type(torch.FloatTensor)
if len(y_p.shape) == 1:
y_p = y_p.reshape(len(classes), -1).transpose(0, 1)
#ln_p = torch.log_softmax(y_p, dim=1)
y_p = torch.clamp(y_p, 1e-15, 1 - 1e-15)
ln_p = _gumbel_softmax_sample(torch.log(y_p))
wll = torch.sum(y_h * ln_p, dim=0)
loss = -torch.dot(weight_tensor, wll) / torch.sum(weight_tensor)
return 'wloss', loss.numpy() * 1., False
| {"/softmax_train_using_features.py": ["/tools/my_logging.py", "/tools/feature_tools.py", "/tools/objective_function.py", "/tools/model_io.py", "/tools/fold_resampling.py"], "/train.py": ["/tools/my_logging.py", "/tools/feature_tools.py", "/tools/objective_function.py", "/tools/model_io.py", "/tools/fold_resampling.py"], "/from_onoderasan.py": ["/tools/objective_function.py"]} |
76,701 | guchio3/kaggle-plasticc | refs/heads/master | /tools/create_plasticc_features.py | import datetime
import argparse
import gc
from logging import getLogger
from my_logging import logInit
from plasticc_features import featureCreatorPreprocess, featureCreatorSet
from plasticc_features import fe_set_df_base, fe_set_df_detected, fe_set_df_std_upper_and_lower, fe_set_df_passband, fe_set_df_passband_std_upper, featureCreatorTsfresh, featureCreatorMeta, fe_meta, fe_set_df_passband_detected, fe_set_df_peak_around, fe_set_df_ratsq_peak_around, fe_set_df_my_skew_kurt, fe_set_df_deficits
LOAD_DIR = '/home/naoya.taguchi/.kaggle/competitions/PLAsTiCC-2018/'
SAVE_DIR_BASE = '../features/'
def parse_args():
parser = argparse.ArgumentParser(
prog='train.py',
usage='ex) python train.py --with_test',
description='easy explanation',
epilog='end',
add_help=True,
)
parser.add_argument('-t', '--train',
help='flg to specify test type.',
action='store_true',
default=False)
parser.add_argument('-n', '--nthread',
help='number of avalable threads.',
type=int,
required=True)
args = parser.parse_args()
return args
def main(args):
logger = getLogger(__name__)
logInit(logger, log_dir='../log/', log_filename='feature_engineering.log')
logger.info(
'''
start main, the args settings are ...
--train : {}
'''.format(args.train))
start_time = datetime.datetime.now().strftime('%Y-%m-%d-%H-%M-%S')
logger.info('start training, the starting time is {}'.format(start_time))
if args.train:
SAVE_DIR = SAVE_DIR_BASE + 'train/'
else:
SAVE_DIR = SAVE_DIR_BASE + 'test/'
# preprocess
logger.info('preprocessing set dfs ...')
prep_feat_creator = featureCreatorPreprocess(
load_dir=LOAD_DIR,
save_dir=None,
src_df_dict=None,
logger=logger,
nthread=args.nthread,
train=args.train)
prep_feat_creator.run()
### feature engineerings using preprocessed_src_df_dict
preprocessed_src_df_dict = prep_feat_creator.src_df_dict
# basic aggregations
logger.info('creating basic features ...')
base_feat_creator = featureCreatorSet(
fe_set_df=fe_set_df_base,
set_res_df_name='set_base_features',
load_dir=LOAD_DIR,
save_dir=SAVE_DIR,
src_df_dict=preprocessed_src_df_dict,
logger=logger,
nthread=args.nthread)
# base_feat_creator.run().save()
del base_feat_creator
gc.collect()
# detected aggregations
logger.info('creating detected features ...')
detected_feat_creator = featureCreatorSet(
fe_set_df=fe_set_df_detected,
set_res_df_name='set_detected_features',
load_dir=LOAD_DIR,
save_dir=SAVE_DIR,
src_df_dict=preprocessed_src_df_dict,
logger=logger,
nthread=args.nthread)
# detected_feat_creator.run().save()
del detected_feat_creator
gc.collect()
# std upper aggregation
logger.info('creating std upper features ...')
std_upper_and_lower_feat_creator = featureCreatorSet(
fe_set_df=fe_set_df_std_upper_and_lower,
set_res_df_name='set_std_upper_and_lower_features',
load_dir=LOAD_DIR,
save_dir=SAVE_DIR,
src_df_dict=preprocessed_src_df_dict,
logger=logger,
nthread=args.nthread)
# std_upper_and_lower_feat_creator.run().save()
del std_upper_and_lower_feat_creator
gc.collect()
# passband aggregation
logger.info('creating passband features ...')
passband_feat_creator = featureCreatorSet(
fe_set_df=fe_set_df_passband,
set_res_df_name='set_passband_features',
load_dir=LOAD_DIR,
save_dir=SAVE_DIR,
src_df_dict=preprocessed_src_df_dict,
logger=logger,
nthread=args.nthread)
# passband_feat_creator.run().save()
del passband_feat_creator
gc.collect()
# passband std upper aggregation
logger.info('creating passband std upper features ...')
passband_std_upper_feat_creator = featureCreatorSet(
fe_set_df=fe_set_df_passband_std_upper,
set_res_df_name='set_passband_std_upper_features',
load_dir=LOAD_DIR,
save_dir=SAVE_DIR,
src_df_dict=preprocessed_src_df_dict,
logger=logger,
nthread=args.nthread)
# passband_std_upper_feat_creator.run().save()
del passband_std_upper_feat_creator
gc.collect()
# passband detected aggregation
logger.info('creating passband detected features ...')
passband_detected_feat_creator = featureCreatorSet(
fe_set_df=fe_set_df_passband_detected,
set_res_df_name='set_passband_detected_features',
load_dir=LOAD_DIR,
save_dir=SAVE_DIR,
src_df_dict=preprocessed_src_df_dict,
logger=logger,
nthread=args.nthread)
# passband_detected_feat_creator.run().save()
del passband_detected_feat_creator
gc.collect()
# peak around
logger.info('creating ratsq peak around features ...')
ratsq_peak_around_feat_creator = featureCreatorSet(
fe_set_df=fe_set_df_ratsq_peak_around,
set_res_df_name='set_ratsq_peak_around_features',
load_dir=LOAD_DIR,
save_dir=SAVE_DIR,
src_df_dict=preprocessed_src_df_dict,
logger=logger,
nthread=args.nthread)
# ratsq_peak_around_feat_creator.run().save()
del ratsq_peak_around_feat_creator
gc.collect()
# peak around
logger.info('creating peak around features ...')
peak_around_feat_creator = featureCreatorSet(
fe_set_df=fe_set_df_peak_around,
set_res_df_name='set_peak_around_features',
load_dir=LOAD_DIR,
save_dir=SAVE_DIR,
src_df_dict=preprocessed_src_df_dict,
logger=logger,
nthread=args.nthread)
# peak_around_feat_creator.run().save()
del peak_around_feat_creator
gc.collect()
# my_skew, my_kurt
logger.info('creating my skkt features ...')
my_skkt_feat_creator = featureCreatorSet(
fe_set_df=fe_set_df_my_skew_kurt,
set_res_df_name='set_skkt_features',
load_dir=LOAD_DIR,
save_dir=SAVE_DIR,
src_df_dict=preprocessed_src_df_dict,
logger=logger,
nthread=args.nthread)
# my_skkt_feat_creator.run().save()
del my_skkt_feat_creator
gc.collect()
# my_skew, my_kurt
logger.info('creating my skkt features ...')
deficits_feat_creator = featureCreatorSet(
fe_set_df=fe_set_df_deficits,
set_res_df_name='set_deficits_features',
load_dir=LOAD_DIR,
save_dir=SAVE_DIR,
src_df_dict=preprocessed_src_df_dict,
logger=logger,
nthread=args.nthread)
# deficits_feat_creator.run().save()
del deficits_feat_creator
gc.collect()
del preprocessed_src_df_dict
gc.collect()
### ts fresh features
logger.info('creating tsfresh features ...')
tsfresh_feat_creator = featureCreatorTsfresh(
load_dir=LOAD_DIR,
save_dir=SAVE_DIR,
src_df_dict=None,
logger=logger,
nthread=args.nthread,
train=args.train)
tsfresh_feat_creator.run().save()
del tsfresh_feat_creator
gc.collect()
### feature engineerings using created features
logger.info('feature engineering on aggregated df ...')
meta_feat_creator = featureCreatorMeta(
fe_set_df=fe_meta,
set_res_df_name='meta_features',
load_dir=SAVE_DIR,
#load_dir=LOAD_DIR,
save_dir=SAVE_DIR,
src_df_dict=None,
logger=logger,
nthread=args.nthread,
train=args.train)
meta_feat_creator.run().save()
del meta_feat_creator
gc.collect()
if __name__ == '__main__':
args = parse_args()
main(args)
| {"/softmax_train_using_features.py": ["/tools/my_logging.py", "/tools/feature_tools.py", "/tools/objective_function.py", "/tools/model_io.py", "/tools/fold_resampling.py"], "/train.py": ["/tools/my_logging.py", "/tools/feature_tools.py", "/tools/objective_function.py", "/tools/model_io.py", "/tools/fold_resampling.py"], "/from_onoderasan.py": ["/tools/objective_function.py"]} |
76,702 | guchio3/kaggle-plasticc | refs/heads/master | /tools/preprocessing.py | import pandas as pd
import numpy as np
from tqdm import tqdm
def unpack_passbands(set_df):
res_df = pd.DataFrame(np.unique(set_df[['object_id', 'mjd']], axis=1))
res_df.columns = ['object_id', 'mjd']
for i in tqdm([0, 1, 2, 3, 4, 5]):
res_df = res_df.merge(
set_df[set_df.passband == i].drop('passband', axis=1).rename(
columns={
'flux': 'flux_{}'.format(i),
'flux_err': 'flux_err_{}'.format(i),
'detected': 'detected_{}'.format(i)}),
on=['object_id', 'mjd'],
how='left')
return res_df
| {"/softmax_train_using_features.py": ["/tools/my_logging.py", "/tools/feature_tools.py", "/tools/objective_function.py", "/tools/model_io.py", "/tools/fold_resampling.py"], "/train.py": ["/tools/my_logging.py", "/tools/feature_tools.py", "/tools/objective_function.py", "/tools/model_io.py", "/tools/fold_resampling.py"], "/from_onoderasan.py": ["/tools/objective_function.py"]} |
76,706 | toodaniels/SCA-Python3 | refs/heads/master | /Clases/ventanaInUser.py | from PyQt5.QtCore import *
from PyQt5.QtWidgets import *
from PyQt5.QtGui import *
from PyQt5 import uic
import sys
import mysql.connector
import time
class IngresoUser(QMainWindow):
"""docstring for IngresoUser"""
def __init__(self):
QMainWindow.__init__(self)
uic.loadUi("/home/deadpool/Escritorio/Practica 2/xml/VingresoUsuario.ui",self)
self.BotonIngresar.clicked.connect(self.valUser)
self.BotonRegresar.clicked.connect(self.Regresa)
def Regresa (self):
self.tipo.show()
self.setVisible(False)
def Ingresar(self):
self.bienvenido.show()
self.bienvenido.user(True,self.name)
self.setVisible(False)
def valUser(self):
user=self.CajaUser.text()
passw=self.CajaContra.text()
conn=mysql.connector.Connect(host='localhost',user='root',password='200388',database='Curso')
cursor=conn.cursor()
Userboo=False
try:
cursor.execute("select user from Usuario")
for i in cursor:
if str(i[0])==user :
Userboo=True
except :
Userboo=False
passwboo=self.valPass(user,passw)
if Userboo and passwboo:
self.name=user;
self.Ingresar()
else :
if Userboo==False:
reply =QMessageBox.information(self, 'Error',"Usuario Incorrecto")
elif passwboo==False:
reply =QMessageBox.information(self, 'Error',"Aun no esta dado de alta")
else:
reply =QMessageBox.information(self, 'Error',"Aun no esta dado de alta")
self.CajaUser.setText("")
self.CajaContra.setText("")
def valPass(self,user,passw):
boo=False
try:
conn=mysql.connector.Connect(host='localhost',user=user,password=passw,database='Curso')
cursor=conn.cursor()
boo=True
except:
boo=False
return boo
def antTipo(self,_tipo):
self.tipo=_tipo
def sigBienvenido(self,_bienvenido):
self.bienvenido=_bienvenido
| {"/main.py": ["/Clases/ventanaBase.py", "/Clases/ventanaBienvenido.py", "/Clases/ventanaInAdmin.py", "/Clases/ventanaInUser.py", "/Clases/ventanaMenuPrincipal.py", "/Clases/ventanaTipoUser.py", "/Clases/ventanaregistro.py"]} |
76,707 | toodaniels/SCA-Python3 | refs/heads/master | /Clases/ventanaMenuPrincipal.py | from PyQt5.QtCore import *
from PyQt5.QtWidgets import *
from PyQt5.QtGui import *
from PyQt5 import uic
import sys
import mysql.connector
import time
class Menu(QMainWindow):
"""docstring for ventana"""
def __init__(self):
QMainWindow.__init__(self)
uic.loadUi("/home/deadpool/Escritorio/Practica 2/xml/Vmenu.ui",self)
self.BotonSalir.clicked.connect(self.cerrar)
self.BotonRegistrarse.clicked.connect(self.Registro)
self.BotonIngresar.clicked.connect(self.TipoUser)
def Registro(self):
self.setVisible(False)
self.registro.show()
def TipoUser(self):
self.setVisible(False)
self.tipo.show()
def cerrar(self):
self.destroy()
def sigRegistro(self,_registro):
self.registro=_registro
def sigIngreso(self,_tipo):
self.tipo=_tipo | {"/main.py": ["/Clases/ventanaBase.py", "/Clases/ventanaBienvenido.py", "/Clases/ventanaInAdmin.py", "/Clases/ventanaInUser.py", "/Clases/ventanaMenuPrincipal.py", "/Clases/ventanaTipoUser.py", "/Clases/ventanaregistro.py"]} |
76,708 | toodaniels/SCA-Python3 | refs/heads/master | /Clases/ventanaregistro.py | from PyQt5.QtCore import *
from PyQt5.QtWidgets import *
from PyQt5.QtGui import *
from PyQt5 import uic
import sys
import mysql.connector
import time
class Registro(QMainWindow):
"""docstring for VentRegistro"""
def __init__(self):
QMainWindow.__init__(self)
uic.loadUi("/home/deadpool/Escritorio/Practica 2/xml/VRegistroAlumn.ui",self)
self.BotonValidar.clicked.connect(self.Validar)
self.BotonResgitro.clicked.connect(self.Registra)
self.BotonFile.clicked.connect(self.file)
def file(self):
fileName, _ = QFileDialog.getOpenFileName(self, "Foto",'/home',"Images (*.png *.xpm *.jpg)")
if fileName:
image = QImage(fileName)
if image.isNull():
QMessageBox.information(self, "Image Viewer","Nose puede cargar %s." % fileName)
return
self.imagen.setPixmap(QPixmap.fromImage(image))
self.cajaFoto.setText(fileName)
def Registra(self):
conn=mysql.connector.Connect(host='localhost',user='root',password='200388',database='Curso')
cursor=conn.cursor()
dateN=self.dateNacer.date().toPyDate()
sentencia=("insert into Usuario(user,nombre,paterno,materno,sexo,nacimiento,telefono,direccion,email,foto,registro) values('"
+self.CajaNombreU.text()+"','"+self.cajaNombre.text()+"','"+self.cajaPaterno.text()+"','"+self.cajaMaterno.text()+"','"+self.comboSexo.currentText()+"',"+
str(dateN)+",'"+self.cajaTelefono.text()+"','"+self.cajaDireccion.text()+"','"+self.cajaEmail.text()+"','"+self.cajaFoto.text()+"',"+str(time.strftime("%Y-%m-%d"))+");")
try:
cursor.execute(sentencia)
except :
QMessageBox.information(self, "Error Nombre","Nombre de Usuario Existente")
conn.commit()
cursor.close()
conn.close()
self.creausuario()
self.Regresa()
def Regresa(self):
self.menu.show()
self.setVisible(False)
def creausuario(self):
conn=mysql.connector.Connect(host='localhost',user='root',password='200388',database='Curso')
cursor=conn.cursor()
sentencia=("Create user '"+self.CajaNombreU.text()+"'@'localhost' identified by '"+
self.CajaPass.text()+"'")
try:
cursor.execute(sentencia)
except :
reply =QMessageBox.information(self, 'Error',"Datos Incorrectos")
def Validar(self):
if self.CajaPass.text()==self.CajaRep.text():
self.ValidarContra.setText("OK")
self.ValidarContra_2.setText("OK")
self.ValidarUsuario()
else :
self.ValidarContra.setText("X")
self.ValidarContra_2.setText("X")
def ValidarUsuario(self):
conn=mysql.connector.Connect(host='localhost',user='root',password='200388',database='Curso')
cursor=conn.cursor()
boo=False
try:
cursor.execute("select user from Usuario")
for i in cursor:
if str(self.CajaNombreU.text())==str(i[0]):
boo=True
except :
boo=False
if boo:
self.ValidarUser.setText("X")
else:
self.ValidarUser.setText("OK")
conn.commit()
cursor.close()
conn.close()
def antMenu(self,_menu):
self.menu=_menu
| {"/main.py": ["/Clases/ventanaBase.py", "/Clases/ventanaBienvenido.py", "/Clases/ventanaInAdmin.py", "/Clases/ventanaInUser.py", "/Clases/ventanaMenuPrincipal.py", "/Clases/ventanaTipoUser.py", "/Clases/ventanaregistro.py"]} |
76,709 | toodaniels/SCA-Python3 | refs/heads/master | /Clases/ventanaTipoUser.py | from PyQt5.QtCore import *
from PyQt5.QtWidgets import *
from PyQt5.QtGui import *
from PyQt5 import uic
import sys
import mysql.connector
import time
class Tipo(QMainWindow):
"""docstring for tipo"""
def __init__(self):
QMainWindow.__init__(self)
uic.loadUi("/home/deadpool/Escritorio/Practica 2/xml/Vtipousuario.ui",self)
self.BotonAdmin.clicked.connect(self.admin)
self.BotonUser.clicked.connect(self.user)
self.BotonRegresa.clicked.connect(self.Regresa)
def Regresa(self):
self.menu.show()
self.setVisible(False)
def admin(self):
self.ingadmin.show()
self.setVisible(False)
def user(self):
self.inguser.show()
self.setVisible(False)
def antMenu(self,_menu):
self.menu=_menu
def sigIngadmin(self,_ingadmin):
self.ingadmin=_ingadmin
def sigIngUser(self,_inguser):
self.inguser=_inguser | {"/main.py": ["/Clases/ventanaBase.py", "/Clases/ventanaBienvenido.py", "/Clases/ventanaInAdmin.py", "/Clases/ventanaInUser.py", "/Clases/ventanaMenuPrincipal.py", "/Clases/ventanaTipoUser.py", "/Clases/ventanaregistro.py"]} |
76,710 | toodaniels/SCA-Python3 | refs/heads/master | /Clases/ventanaBase.py | from PyQt5.QtCore import *
from PyQt5.QtWidgets import *
from PyQt5.QtGui import *
from PyQt5 import uic
import sys
import mysql.connector
import time
class Base(QMainWindow):
"""docstring for Vbase"""
def __init__(self):
QMainWindow.__init__(self)
uic.loadUi("/home/deadpool/Escritorio/Practica 2/xml/Vconsultas.ui",self)
self.BotonEnabled.clicked.connect(self.habilita)
self.BotonDisEnabled.clicked.connect(self.deshabilita)
self.BotonSalir.clicked.connect(self.Regresar)
self.checkTodos.clicked.connect(self.Todos)
self.BotonVer.clicked.connect(self.Vertodo)
self.BotonAltas.clicked.connect(self.darAlta)
self.BotonBajas.clicked.connect(self.darBaja)
self.BotonBuscar.clicked.connect(self.Busqueda)
self.BotonActualizar.clicked.connect(self.Actualizar)
def Actualizar(self):
for row in range(self.tableConsultas.rowCount()):
conn=mysql.connector.Connect(host='localhost',user='root',password='200388',database='Curso')
cursor=conn.cursor()
arr=["user","nombre",
"paterno","materno","sexo","nacimiento"
,"telefono","direccion","email","foto","registro"]
sentencia="update Usuario set "
for column in range (self.tableConsultas.columnCount()):
if column==0:
sentencia=sentencia+arr[column]+"='"+self.tableConsultas.item(row,column).text()+"'"
else :
sentencia=sentencia+","+arr[column]+"='"+self.tableConsultas.item(row,column).text()+"' "
sentencia=sentencia+"where "+arr[0]+"='"+self.tableConsultas.item(row,0).text()+"'"
try:
cursor.execute(sentencia)
except :
QMessageBox.information(self, 'Error',"Error al actualizar datos")
conn.commit()
cursor.close()
conn.close()
def Busqueda(self):
self.limpia()
camp=""
if self.checkTodos.isChecked():
camp="*"
columnas=["Usuario","Nombre",
"Paterno","Materno","Sexo","Nacimiento"
,"Telefono","Direccion","Email","Foto","Registro"]
else:
columnas, campos=self.arreglo()
for i in range (len (campos)):
if i==0:
camp=campos[i]
else:
camp=camp+","+campos[i]
conn=mysql.connector.Connect(host='localhost',user='root',password='200388',database='Curso')
cursor=conn.cursor()
where=self.where()
sentencia="select "+camp+" from Usuario where "+ where
#hacer conexion busquda y todo metodo actualizar
row=0
try:
cursor.execute(sentencia)
self.tableConsultas.setColumnCount(len(columnas))
self.tableConsultas.setHorizontalHeaderLabels(columnas)
for ncontrol in cursor:
self.tableConsultas.insertRow(row)
for co in range(len(ncontrol)):
nco=QTableWidgetItem(str(ncontrol[co]))
self.tableConsultas.setItem(row,co,nco)
row=row+1
except:
QMessageBox.information(self, 'Error',"Busqueda sin resultados")
def limpia(self):
self.tableConsultas.setRowCount(0)
def where(self):
where =""
if self.CajaUser.isEnabled():
if where=="":
where="user='"+self.CajaUser.text()+"'"
else :
where=where+",user='"+self.CajaUser.text()+"'"
if self.CajaNombre.isEnabled():
if where=="":
where="nombre='"+self.CajaNombre.text()+"'"
else :
where=where+",nombre='"+self.CajaNombre.text()+"'"
if self.CajaPaterno.isEnabled():
if where=="":
where="paterno='"+self.CajaPaterno.text()+"'"
else :
where=where+",paterno='"+self.CajaPaterno.text()+"'"
if self.CajaMaterno.isEnabled():
if where=="":
where="materno='"+self.CajaMaterno.text()+"'"
else :
where=where+",'"+self.CajaMaterno.text()+"'"
dateN=self.dateNacimiento.date().toPyDate()
dateN=dateN.strftime('%Y-%m-%d')
if self.dateNacimiento.isEnabled():
if where=="":
where="nacimiento='"+str(dateN)+"'"
else :
where=where+"nacimiento=,'"+self.dateNacimiento.text()+"'"
if self.CajaTelefono.isEnabled():
if where=="":
where="telefono='"+self.CajaTelefono.text()+"'"
else :
where=where+",telefono='"+self.CajaTelefono.text()+"'"
if self.CajaDireccion.isEnabled():
if where=="":
where="direccion='"+self.CajaDireccion.text()+"'"
else :
where=where+",direccion='"+self.CajaDireccion.text()+"'"
if self.comboSexo.isEnabled():
if where=="":
where="sexo='"+self.comboSexo.currentText()+"'"
else :
where=where+",sexo='"+self.comboSexo.currentText()+"'"
if self.CajaFoto.isEnabled():
if where=="":
where="foto='"+self.CajaFoto.text()+"'"
else :
where=where+",foto='"+self.CajaFoto.text()+"'"
dateR=self.dateRegistro.date().toPyDate()
dateR=dateR.strftime('%Y-%m-%d')
if self.dateRegistro.isEnabled():
if where=="":
where="registro='"+str(dateR)+"'"
else :
where=where+",registro='"+str(dateR)+"'"
return where;
def arreglo (self):
arr =[]
campos=[]
if self.checkUser.isChecked():
campos.append("user")
arr.append(self.checkUser.text())
if self.checkNombre.isChecked():
campos.append("nombre")
arr.append(self.checkNombre.text())
if self.checkPaterno.isChecked():
campos.append("paterno")
arr.append(self.checkPaterno.text())
if self.checkMaterno.isChecked():
campos.append("materno")
arr.append(self.checkMaterno.text())
if self.checkSexo.isChecked():
campos.append("sexo")
arr.append(self.checkSexo.text())
if self.checkNacimiento.isChecked():
campos.append("nacimiento")
arr.append(self.checkNacimiento.text())
if self.checkTelefono.isChecked():
campos.append("telefono")
arr.append(self.checkTelefono.text())
if self.checkDireccion.isChecked():
campos.append("direccion")
arr.append(self.checkDireccion.text())
if self.checkEmail.isChecked():
campos.append("email")
arr.append(self.checkEmail.text())
if self.checkFotos.isChecked():
campos.append("foto")
arr.append(self.checkFotos.text())
if self.checkRegistro.isChecked():
campos.append("registro")
arr.append(self.checkRegistro.text())
return arr , campos
def darBaja(self):
corrent=str(self.tableConsultas.currentItem().text())
print(corrent)
responder =QMessageBox.question (self, 'Mensaje',
"¿Desea dar e baja a este usuario?",QMessageBox.Yes |
QMessageBox.No,QMessageBox.No)
if responder==QMessageBox.Yes:
self.borrarUsuario()
self.borrarRegistro()
def borrarUsuario(self):
corrent=str(self.tableConsultas.currentItem().text())
conn=mysql.connector.Connect(host='localhost',user='root',password='200388',database='Curso')
cursor=conn.cursor()
sentencia="drop user '"+corrent+"'@'localhost'";
try:
cursor.execute(sentencia)
except :
QMessageBox.information(self, 'Error',"Error usuario no Existente")
conn.close()
def borrarRegistro(self):
corrent=str(self.tableConsultas.currentItem().text())
conn=mysql.connector.Connect(host='localhost',user='root',password='200388',database='Curso')
cursor=conn.cursor()
sentencia="delete from Usuario where user='"+corrent+"'";
try:
cursor.execute(sentencia)
except :
QMessageBox.information(self, 'Error',"Selecionar toda la Fila 2")
conn.commit()
cursor.close()
conn.close()
def darAlta(self):
corrent=str(self.tableConsultas.currentItem().text())
responder = QMessageBox.question (self, 'Mensaje',
"¿Desea dar de alta a este usuario?",QMessageBox.Yes |
QMessageBox.No, QMessageBox.No)
if responder==QMessageBox.Yes:
conn=mysql.connector.Connect(host='localhost',user='root',password='200388',database='Curso')
cursor=conn.cursor()
sentencia="grant select on Curso.Usuario to '"+corrent+"'@'localhost'";
try:
cursor.execute(sentencia)
except :
QMessageBox.information(self, 'Error',"Selecionar toda la Fila")
conn.close()
def Vertodo (self):
self.checkTodos.setChecked(True)
self.checkUser.setEnabled(False)
self.checkUser.setChecked(False)
self.checkNombre.setEnabled(False)
self.checkNombre.setChecked(False)
self.checkPaterno.setEnabled(False)
self.checkPaterno.setChecked(False)
self.checkMaterno.setEnabled(False)
self.checkMaterno.setChecked(False)
self.checkNacimiento.setEnabled(False)
self.checkNacimiento.setChecked(False)
self.checkTelefono.setEnabled(False)
self.checkTelefono.setChecked(False)
self.checkDireccion.setEnabled(False)
self.checkDireccion.setChecked(False)
self.checkSexo.setEnabled(False)
self.checkSexo.setChecked(False)
self.checkFotos.setEnabled(False)
self.checkFotos.setChecked(False)
self.checkRegistro.setEnabled(False)
self.checkRegistro.setChecked(False)
self.checkEmail.setEnabled(False)
self.checkEmail.setChecked(False)
self.limpia()
arr=["Usuario","Nombre",
"Paterno","Materno","Sexo","Nacimiento"
,"Telefono","Direccion","Email","Foto","Registro"]
conn=mysql.connector.Connect(host='localhost',user='root',password='200388',database='Curso')
cursor=conn.cursor()
sentencia=("select * from Usuario")
row=0
try:
cursor.execute(sentencia)
self.tableConsultas.setColumnCount(11)
self.tableConsultas.setHorizontalHeaderLabels(arr)
for ncontrol in cursor:
self.tableConsultas.insertRow(row)
for co in range(len(ncontrol)):
nco=QTableWidgetItem(str(ncontrol[co]))
self.tableConsultas.setItem(row,co,nco)
row=row+1
except:
QMessageBox.information(self, 'Error',"Error en la Base de Datos")
conn.close()
def Todos(self):
if self.checkTodos.isChecked():
self.checkUser.setEnabled(False)
self.checkUser.setChecked(False)
self.checkNombre.setEnabled(False)
self.checkNombre.setChecked(False)
self.checkPaterno.setEnabled(False)
self.checkPaterno.setChecked(False)
self.checkMaterno.setEnabled(False)
self.checkMaterno.setChecked(False)
self.checkNacimiento.setEnabled(False)
self.checkNacimiento.setChecked(False)
self.checkTelefono.setEnabled(False)
self.checkTelefono.setChecked(False)
self.checkDireccion.setEnabled(False)
self.checkDireccion.setChecked(False)
self.checkSexo.setEnabled(False)
self.checkSexo.setChecked(False)
self.checkFotos.setEnabled(False)
self.checkFotos.setChecked(False)
self.checkRegistro.setEnabled(False)
self.checkRegistro.setChecked(False)
self.checkEmail.setEnabled(False)
self.checkEmail.setChecked(False)
else :
self.checkUser.setEnabled(True)
self.checkNombre.setEnabled(True)
self.checkPaterno.setEnabled(True)
self.checkMaterno.setEnabled(True)
self.checkNacimiento.setEnabled(True)
self.checkTelefono.setEnabled(True)
self.checkDireccion.setEnabled(True)
self.checkSexo.setEnabled(True)
self.checkFotos.setEnabled(True)
self.checkRegistro.setEnabled(True)
self.checkEmail.setEnabled(True)
def Regresar(self):
self.checkUser.setEnabled(False)
self.checkUser.setChecked(False)
self.checkNombre.setEnabled(False)
self.checkNombre.setChecked(False)
self.checkPaterno.setEnabled(False)
self.checkPaterno.setChecked(False)
self.checkMaterno.setEnabled(False)
self.checkMaterno.setChecked(False)
self.checkNacimiento.setEnabled(False)
self.checkNacimiento.setChecked(False)
self.checkTelefono.setEnabled(False)
self.checkTelefono.setChecked(False)
self.checkDireccion.setEnabled(False)
self.checkDireccion.setChecked(False)
self.checkSexo.setEnabled(False)
self.checkSexo.setChecked(False)
self.checkFotos.setEnabled(False)
self.checkFotos.setChecked(False)
self.checkRegistro.setEnabled(False)
self.checkRegistro.setChecked(False)
self.checkEmail.setEnabled(False)
self.checkEmail.setChecked(False)
self.tableConsultas.setRowCount(0)
self.tableConsultas.setColumnCount(0)
self.checkTodos.setChecked(True)
self.CajaUser.setEnabled(False)
self.CajaNombre.setEnabled(False)
self.CajaPaterno.setEnabled(False)
self.CajaMaterno.setEnabled(False)
self.dateNacimiento.setEnabled(False)
self.CajaTelefono.setEnabled(False)
self.CajaDireccion.setEnabled(False)
self.comboSexo.setEnabled(False)
self.CajaFoto.setEnabled(False)
self.Botonfoto.setEnabled(False)
self.dateRegistro.setEnabled(False)
self.bienvenido.show()
self.setVisible(False)
def habilita(self):
if self.comboBox.currentText()=='user':
self.CajaUser.setEnabled(True)
elif self.comboBox.currentText()=='nombre':
self.CajaNombre.setEnabled(True)
elif self.comboBox.currentText()=='paterno':
self.CajaPaterno.setEnabled(True)
elif self.comboBox.currentText()=='materno':
self.CajaMaterno.setEnabled(True)
elif self.comboBox.currentText()=='nacimiento':
self.dateNacimiento.setEnabled(True)
elif self.comboBox.currentText()=='telefono':
self.CajaTelefono.setEnabled(True)
elif self.comboBox.currentText()=='direccion':
self.CajaDireccion.setEnabled(True)
elif self.comboBox.currentText()=='sexo':
self.comboSexo.setEnabled(True)
elif self.comboBox.currentText()=='foto':
self.CajaFoto.setEnabled(True)
self.Botonfoto.setEnabled(True)
elif self.comboBox.currentText()=='registro':
self.dateRegistro.setEnabled(True)
def deshabilita(self):
if self.comboBox.currentText()=='user':
self.CajaUser.setEnabled(False)
elif self.comboBox.currentText()=='nombre':
self.CajaNombre.setEnabled(False)
elif self.comboBox.currentText()=='paterno':
self.CajaPaterno.setEnabled(False)
elif self.comboBox.currentText()=='materno':
self.CajaMaterno.setEnabled(False)
elif self.comboBox.currentText()=='nacimiento':
self.dateNacimiento.setEnabled(False)
elif self.comboBox.currentText()=='telefono':
self.CajaTelefono.setEnabled(False)
elif self.comboBox.currentText()=='direccion':
self.CajaDireccion.setEnabled(False)
elif self.comboBox.currentText()=='sexo':
self.comboSexo.setEnabled(False)
elif self.comboBox.currentText()=='foto':
self.CajaFoto.setEnabled(False)
self.Botonfoto.setEnabled(False)
elif self.comboBox.currentText()=='registro':
self.dateRegistro.setEnabled(False)
def otorgaperomiso(self):
conn=mysql.connector.Connect(host='localhost',user='root',password='200388',database='Curso')
cursor=conn.cursor()
sentencia=("grant select on Curso.Usuario to '"+self.CajaNombreU.text()+"'@'localhost'")
try:
cursor.execute(sentencia)
except :
reply =QMessageBox.information(self, 'Error',"NO hay nombre de usuario")
def user(self,_admin,_name):
self.admin=_admin
self.name=_name
self.derechos()
def derechos(self):
if self.admin:
self.BotonAltas.setEnabled(False)
self.BotonBajas.setEnabled(False)
self.BotonActualizar.setEnabled(False)
def antBienvenido(self,_bienvenido):
self.bienvenido=_bienvenido | {"/main.py": ["/Clases/ventanaBase.py", "/Clases/ventanaBienvenido.py", "/Clases/ventanaInAdmin.py", "/Clases/ventanaInUser.py", "/Clases/ventanaMenuPrincipal.py", "/Clases/ventanaTipoUser.py", "/Clases/ventanaregistro.py"]} |
76,711 | toodaniels/SCA-Python3 | refs/heads/master | /main.py | from PyQt5.QtCore import *
from PyQt5.QtWidgets import *
from PyQt5.QtGui import *
from PyQt5 import uic
import sys
import mysql.connector
import time
from Clases.ventanaBase import *
from Clases.ventanaBienvenido import *
from Clases.ventanaInAdmin import *
from Clases.ventanaInUser import *
from Clases.ventanaMenuPrincipal import *
from Clases.ventanaTipoUser import *
from Clases.ventanaregistro import *
app=QApplication(sys.argv)
_menu=Menu()
_registro=Registro()
_tipo=Tipo()
_ingadmin=IngresoAdmin()
_inguser=IngresoUser()
_bienvenido=Bienvenido()
_base=Base()
_menu.sigRegistro(_registro)
_menu.sigIngreso(_tipo)
_registro.antMenu(_menu)
_tipo.antMenu(_menu)
_tipo.sigIngadmin(_ingadmin)
_tipo.sigIngUser(_inguser)
_ingadmin.antTipo(_tipo)
_ingadmin.sigBienvenido(_bienvenido)
_inguser.antTipo(_tipo)
_inguser.sigBienvenido(_bienvenido)
_bienvenido.antTipo(_tipo)
_bienvenido.sigBase(_base)
_base.antBienvenido(_bienvenido)
_menu.show()
app.exec_() | {"/main.py": ["/Clases/ventanaBase.py", "/Clases/ventanaBienvenido.py", "/Clases/ventanaInAdmin.py", "/Clases/ventanaInUser.py", "/Clases/ventanaMenuPrincipal.py", "/Clases/ventanaTipoUser.py", "/Clases/ventanaregistro.py"]} |
76,712 | toodaniels/SCA-Python3 | refs/heads/master | /Clases/ventanaInAdmin.py | from PyQt5.QtCore import *
from PyQt5.QtWidgets import *
from PyQt5.QtGui import *
from PyQt5 import uic
import sys
import mysql.connector
import time
class IngresoAdmin(QMainWindow):
"""docstring for IngresoAdmin"""
def __init__(self):
QMainWindow.__init__(self)
uic.loadUi("/home/deadpool/Escritorio/Practica 2/xml/VIngresoAdmin.ui",self)
self.BotonIngresar.clicked.connect(self.Ingresar)
self.BotonRegresa.clicked.connect(self.Regresa)
def Regresa(self):
self.tipo.show()
self.setVisible(False)
def Ingresar(self):
if self.ValAdmin(self.CajaAdmin.text(),self.CajaContra.text()):
self.bienvenido.show()
self.bienvenido.user(False,self.CajaAdmin.text())
self.setVisible(False)
else :
reply =QMessageBox.information(self, 'Error',"Datos Incorrectos")
self.CajaAdmin.setText("")
self.CajaContra.setText("")
def ValAdmin(self,admin,passw):
boo=False
try:
conn=mysql.connector.Connect(host='localhost',user=admin,password=passw,database='Curso')
cursor=conn.cursor()
boo=True
except :
boo=False
return boo
def antTipo(self,_tipo):
self.tipo=_tipo
def sigBienvenido(self,_bienvenido):
self.bienvenido=_bienvenido
| {"/main.py": ["/Clases/ventanaBase.py", "/Clases/ventanaBienvenido.py", "/Clases/ventanaInAdmin.py", "/Clases/ventanaInUser.py", "/Clases/ventanaMenuPrincipal.py", "/Clases/ventanaTipoUser.py", "/Clases/ventanaregistro.py"]} |
76,713 | toodaniels/SCA-Python3 | refs/heads/master | /Clases/ventanaBienvenido.py | from PyQt5.QtCore import *
from PyQt5.QtWidgets import *
from PyQt5.QtGui import *
from PyQt5 import uic
import sys
import mysql.connector
import time
class Bienvenido(QMainWindow):
"""docstring for Vbienvenido"""
def __init__(self):
QMainWindow.__init__(self)
uic.loadUi("/home/deadpool/Escritorio/Practica 2/xml/Vbienvenido.ui",self)
self.BotonBase.clicked.connect(self.Base)
self.BotonRegresar.clicked.connect(self.Regresa)
def Regresa(self):
self.tipo.show()
self.setVisible(False)
def Base(self):
self.base.show()
self.base.user(self.admin,self.name)
self.admin=False
self.setVisible(False)
def user(self,adm,_name):
self.admin=adm
self.name=_name
self.usuario.setText(_name)
def antTipo(self,_tipo):
self.tipo=_tipo
def sigBase(self,_base):
self.base=_base | {"/main.py": ["/Clases/ventanaBase.py", "/Clases/ventanaBienvenido.py", "/Clases/ventanaInAdmin.py", "/Clases/ventanaInUser.py", "/Clases/ventanaMenuPrincipal.py", "/Clases/ventanaTipoUser.py", "/Clases/ventanaregistro.py"]} |
76,733 | sebkaster/CarND-Behavioral-Cloning-P3 | refs/heads/master | /model.py | import keras
import cv2
import numpy as np
import pandas as pd
import os
from math import ceil
from preprocess import img_crop, img_normalization
import matplotlib.pyplot as plt
import matplotlib.ticker as ticker
def img_visualization(img):
plt.imshow(img)
plt.tight_layout()
plt.axis('off')
plt.gca().xaxis.set_major_locator(ticker.NullLocator())
plt.gca().yaxis.set_major_locator(ticker.NullLocator())
plt.savefig("original.png", dpi=300, bbox_inches='tight', pad_inches=0)
plt.close()
img_cropped = img_crop(img)
plt.imshow(img_cropped)
plt.tight_layout()
plt.axis('off')
plt.gca().xaxis.set_major_locator(ticker.NullLocator())
plt.gca().yaxis.set_major_locator(ticker.NullLocator())
plt.savefig("cropped.png", dpi=300, bbox_inches='tight', pad_inches=0)
plt.close()
img_random_brighness = random_brightness(img)
plt.imshow(img_random_brighness)
plt.tight_layout()
plt.axis('off')
plt.gca().xaxis.set_major_locator(ticker.NullLocator())
plt.gca().yaxis.set_major_locator(ticker.NullLocator())
plt.savefig("random-brightness.png", dpi=300, bbox_inches='tight', pad_inches=0)
plt.close()
img_normalized = img_normalization(img)
plt.imshow(img_normalized)
plt.tight_layout()
plt.axis('off')
plt.gca().xaxis.set_major_locator(ticker.NullLocator())
plt.gca().yaxis.set_major_locator(ticker.NullLocator())
plt.savefig("normalized.png", dpi=300, bbox_inches='tight', pad_inches=0)
plt.close()
flipped_img, _ = img_flip(img, 1)
plt.imshow(flipped_img)
plt.tight_layout()
plt.axis('off')
plt.gca().xaxis.set_major_locator(ticker.NullLocator())
plt.gca().yaxis.set_major_locator(ticker.NullLocator())
plt.savefig("flipped.png", dpi=300, bbox_inches='tight', pad_inches=0)
plt.close()
def random_brightness(img):
img = cv2.cvtColor(np.asarray(img), cv2.COLOR_RGB2HSV)
random_bright = 0.25 + np.random.uniform()
img[:, :, 2] = img[:, :, 2] * random_bright
img = cv2.cvtColor(img, cv2.COLOR_HSV2RGB)
return img
def img_flip(img, angle):
img = cv2.flip(img, 1)
return img, -1.0 * angle
def image_generator(data, validation_flag):
data = data.sample(frac=1).reset_index(drop=True)
for index, row in driving_log.iterrows():
# Select Left,Center,Right image
select_camera_image = np.random.randint(3)
if select_camera_image == 0:
fname = os.path.basename(row['left'])
steering = np.float32(row['steering']) + 0.25
elif select_camera_image == 1:
fname = os.path.basename(row['center'])
steering = np.float32(row['steering'])
else:
fname = os.path.basename(row['right'])
steering = np.float32(row['steering']) - 0.25
img = keras.preprocessing.image.load_img('./data/IMG/' + fname)
img = np.array(img)
# Crop and Resize the image
img = img_crop(img)
# Normalize the image
img = img_normalization(img)
if np.random.randint(0, 1):
# Add Random Brightness
img = random_brightness(img)
if np.random.randint(0, 1):
# Flip image
img, steering = img_flip(img, steering)
# Change the color space
img = cv2.cvtColor(np.asarray(img), cv2.COLOR_RGB2YUV)
# Reshape the image
img = np.reshape(img, (3, 66, 200))
yield img, steering
def batch_generator(driving_log, validation_flag=False, batch_size=32):
num_rows = len(driving_log.index)
train_images = np.zeros((batch_size, 3, 66, 200))
train_steering = np.zeros(batch_size)
line_num = 0
while True:
for j in range(batch_size):
# Reset generator if over bounds
if line_num >= num_rows:
line_num = 0
images = image_generator(driving_log, validation_flag)
elif line_num == 0:
images = image_generator(driving_log, validation_flag)
train_images[j], train_steering[j] = next(images)
line_num += 1
yield train_images, train_steering
# Cut off 75% of low steering angle
def remove_low_angles(driving_log, angle_threshold=0.1):
num_drops = int(len(driving_log[np.abs(driving_log["steering"]) <= angle_threshold]) * 0.75)
drop_lows = driving_log[driving_log["steering"] == 0]["index"].values[0:num_drops]
return driving_log.drop(drop_lows, axis=0).sample(frac=1.0)
driving_log = pd.read_csv("./data/driving_log.csv").reset_index()
print("Number of Original Data", len(driving_log))
revised_log = remove_low_angles(driving_log)
print("Number of Revised Data", len(revised_log))
num_training = (int(len(revised_log) * 0.8))
training_data = revised_log[0:num_training]
print("Num of Training data", len(training_data))
validation_data = revised_log[num_training:]
print("Num of Validation data", len(validation_data))
# Make dataset
train_data = batch_generator(training_data)
val_data = batch_generator(validation_data, validation_flag=True)
model = keras.Sequential()
model.add(
keras.layers.Conv2D(24, kernel_size=5, strides=2, activation='elu', padding='same',
kernel_regularizer=keras.regularizers.l2(0.001), input_shape=(3, 66, 200)))
model.add(
keras.layers.Conv2D(36, kernel_size=5, strides=2, activation='elu', padding='same',
kernel_regularizer=keras.regularizers.l2(0.001)))
model.add(
keras.layers.Conv2D(48, kernel_size=5, strides=2, activation='elu', padding='same',
kernel_regularizer=keras.regularizers.l2(0.001)))
model.add(
keras.layers.Conv2D(64, kernel_size=3, activation='elu', padding='same',
kernel_regularizer=keras.regularizers.l2(0.001)))
model.add(
keras.layers.Conv2D(64, kernel_size=3, activation='elu', padding='same',
kernel_regularizer=keras.regularizers.l2(0.001)))
model.add(keras.layers.Flatten())
model.add(keras.layers.Dense(100, kernel_regularizer=keras.regularizers.l2(0.001), activation='elu'))
model.add(keras.layers.Dense(50, kernel_regularizer=keras.regularizers.l2(0.001), activation='elu'))
model.add(keras.layers.Dense(10, kernel_regularizer=keras.regularizers.l2(0.001), activation='elu'))
model.add(keras.layers.Dense(1))
model.compile(loss='mse', optimizer='adam')
model.fit_generator(train_data, steps_per_epoch=ceil(len(training_data) / 32),
validation_data=val_data, validation_steps=ceil(len(validation_data) / 32),
epochs=10)
#model.save('model.h5')
| {"/model.py": ["/preprocess.py"]} |
76,734 | sebkaster/CarND-Behavioral-Cloning-P3 | refs/heads/master | /preprocess.py | import cv2
import numpy as np
def img_crop(img):
img = img[40:135,:]
return cv2.resize(img, (200, 66), interpolation=cv2.INTER_AREA)
def img_normalization(img):
img = img / 127.5 - 1.0
img = img.astype(np.float32)
return img
def preprocess_img(img):
img = img_normalization(img)
return img_crop(cv2.cvtColor(img, cv2.COLOR_RGB2YUV))
| {"/model.py": ["/preprocess.py"]} |
76,736 | HuBot2020/Localized-Image-Style-Transfer | refs/heads/master | /object_detection.py | import os
import sys
from mrcnn import visualize
import mrcnn.model as modellib
from mrcnn import utils
import random
import math
import numpy as np
import skimage.io
import matplotlib
import matplotlib.pyplot as plt
import itertools
import colorsys
from skimage.measure import find_contours
from skimage import measure
from matplotlib import patches, lines
from matplotlib.patches import Polygon
from PIL import Image
import coco
import tensorflow as tf
import cv2
import uuid
class InferenceConfig(coco.CocoConfig):
GPU_COUNT = 1
IMAGES_PER_GPU = 1
class_names = ['BG', 'person', 'bicycle', 'car', 'motorcycle', 'airplane',
'bus', 'train', 'truck', 'boat', 'traffic light',
'fire hydrant', 'stop sign', 'parking meter', 'bench', 'bird',
'cat', 'dog', 'horse', 'sheep', 'cow', 'elephant', 'bear',
'zebra', 'giraffe', 'backpack', 'umbrella', 'handbag', 'tie',
'suitcase', 'frisbee', 'skis', 'snowboard', 'sports ball',
'kite', 'baseball bat', 'baseball glove', 'skateboard',
'surfboard', 'tennis racket', 'bottle', 'wine glass', 'cup',
'fork', 'knife', 'spoon', 'bowl', 'banana', 'apple',
'sandwich', 'orange', 'broccoli', 'carrot', 'hot dog', 'pizza',
'donut', 'cake', 'chair', 'couch', 'potted plant', 'bed',
'dining table', 'toilet', 'tv', 'laptop', 'mouse', 'remote',
'keyboard', 'cell phone', 'microwave', 'oven', 'toaster',
'sink', 'refrigerator', 'book', 'clock', 'vase', 'scissors',
'teddy bear', 'hair drier', 'toothbrush']
def random_colors(N):
hsv = [(i / N, 1, 1) for i in range(N)]
colors = list(map(lambda c: colorsys.hsv_to_rgb(*c), hsv))
random.shuffle(colors)
return colors
def apply_mask(image, mask, color, alpha=0.5):
"""Apply the given mask to the image."""
for c in range(3):
image[:, :, c] = np.where(mask == 1,
image[:, :, c] *
(1 - alpha) + alpha * color[c] * 255,
image[:, :, c])
return image
def apply_mask_image(bg, image, mask):
"""Apply the given mask to the image."""
for c in range(3):
bg[:, :, c] = np.where(mask == 1,
image[:, :, c], bg[:, :, c],)
return bg
def apply_mask_inverse_image(bg, image, mask):
"""Apply the inverse of given mask to the image."""
for c in range(3):
image[:, :, c] = np.where(mask == 1,
bg[:, :, c], image[:, :, c],)
return image
def load_object(file_name, model):
""" Show all objects detected in the photo"""
image = load_img(file_name)
results = model.detect([image], verbose=1)
r = results[0]
N = len(r['rois'])
colors = random_colors(N)
figsize = (16, 16)
_, ax = plt.subplots(1, figsize=figsize)
ax.axis('off')
ax.margins(0, 0)
captions = None
masked_image = image.astype(np.uint32).copy()
counts = {}
output = []
for i in range(N):
y1, x1, y2, x2 = r['rois'][i]
# Add captions to the detected objects in the format of
# label number + class name + appeared times
if not captions:
caption = class_names[r['class_ids'][i]]
if caption not in counts:
counts[caption] = 1
caption = str(i)+" "+caption+str(counts[caption])
else:
counts[caption] += 1
caption = str(i)+" "+caption+str(counts[caption])
else:
caption = captions[i]
ax.text(x1, y1 + 8, caption,
color='w', size=11, backgroundcolor="none")
output.append(caption)
# Apply color masks to detected objects
mask = r['masks'][:, :, i]
masked_image = apply_mask(masked_image, mask, colors[i])
padded_mask = np.zeros(
(mask.shape[0] + 2, mask.shape[1] + 2), dtype=np.uint8)
padded_mask[1:-1, 1:-1] = mask
contours = find_contours(padded_mask, 0.5)
for verts in contours:
verts = np.fliplr(verts) - 1
p = Polygon(verts, facecolor="none", edgecolor=colors[i])
ax.add_patch(p)
fig = ax.imshow(masked_image.astype(np.uint8))
fig.axes.get_xaxis().set_visible(False)
fig.axes.get_yaxis().set_visible(False)
all = 'static/out/all_'+uuid.uuid4().hex[:10]+'.jpg'
plt.savefig(all, bbox_inches='tight',
pad_inches=0)
return r, all
def show_selection_outlines(raw_input, image, r):
"""Contour Outlines of selected objects"""
image = skimage.io.imread(image)
figsize = (16, 16)
_, ax = plt.subplots(1, figsize=figsize)
masked_image = image.astype(np.uint32).copy()
contour_outlines = []
# Input 1000 equals select all objects
if raw_input == [1000]:
raw_input = list(range(len(r['rois'])))
# Draw only the outlines of the objects
for i in raw_input:
if i > len(r['rois']) or i < 0:
continue
mask = r['masks'][:, :, i]
padded_mask = np.zeros(
(mask.shape[0] + 2, mask.shape[1] + 2), dtype=np.uint8)
padded_mask[1:-1, 1:-1] = mask
# Find coutour outlines of the objects
contours = find_contours(padded_mask, 0.5)
contour_outlines.append(contours)
for n, contour in enumerate(contours):
ax.plot(contour[:, 1], contour[:, 0], linewidth=2,)
fig = ax.imshow(masked_image.astype(np.uint8))
fig.axes.get_xaxis().set_visible(False)
fig.axes.get_yaxis().set_visible(False)
outlines = 'static/out/selected_'+uuid.uuid4().hex[:10]+'.jpg'
plt.savefig(outlines, bbox_inches='tight',
pad_inches=0)
return outlines
def show_selection_crop(raw_input, image, r):
"""Crop image according to selected contours"""
image = skimage.io.imread(image)
figsize = (16, 16)
_, ax = plt.subplots(1, figsize=figsize)
ax.axis('off')
ax.margins(0, 0)
background_image = np.zeros_like(image)
masked_image = image.astype(np.uint32).copy()
# Input 1000 equals select all objects
if raw_input == [1000]:
raw_input = list(range(len(r['rois'])))
# Crop out the instances selected with black backgroud
for i in raw_input:
if i > len(r['rois']) or i < 0:
continue
mask = r['masks'][:, :, i]
background_image = apply_mask_image(
background_image, masked_image, mask,)
fig = ax.imshow(background_image.astype(np.uint8))
fig.axes.get_xaxis().set_visible(False)
fig.axes.get_yaxis().set_visible(False)
location = 'static/out/crop_'+uuid.uuid4().hex[:10]+'.jpg'
plt.savefig(location, bbox_inches='tight', pad_inches=0)
return location, background_image
def show_selection_inverse(raw_input, image, r):
"""Crop image according to selected inverse contours"""
image = skimage.io.imread(image)
figsize = (16, 16)
_, ax = plt.subplots(1, figsize=figsize)
ax.axis('off')
ax.margins(0, 0)
background_image = np.zeros_like(image)
masked_image = image.astype(np.uint32).copy()
# Input 1000 equals select all objects
if raw_input == [1000]:
raw_input = list(range(len(r['rois'])))
# Crop out the inverse of instances selected with black backgroud
for i in raw_input:
if i > len(r['rois']) or i < 0:
continue
mask = r['masks'][:, :, i]
masked_image = apply_mask_inverse_image(
background_image, masked_image, mask,)
fig = ax.imshow(masked_image.astype(np.uint8))
fig.axes.get_xaxis().set_visible(False)
fig.axes.get_yaxis().set_visible(False)
location = 'static/out/crop_inverse_'+uuid.uuid4().hex[:10]+'.jpg'
plt.savefig(location, bbox_inches='tight', pad_inches=0)
return location, masked_image
def load_img(path_to_img):
"""Load image using skimage"""
img = skimage.io.imread(path_to_img)
return img
def blending(crop_path, original_path, style_path):
"""Blending technique using GaussianBlur"""
styled = cv2.imread(style_path).astype('uint8')
crop = cv2.imread(crop_path).astype('uint8')
original = cv2.imread(original_path).astype('uint8')
# Resize cropped image and orignal image to styled image size
# Styled image size is set to 512
crop = cv2.resize(crop, (styled.shape[1], styled.shape[0]))
original = cv2.resize(original, (styled.shape[1], styled.shape[0]))
# Create mask image
non_black_pixels_mask = np.any(np.logical_and(
crop != [0, 0, 0], crop != [255, 255, 255]), axis=-1)
original_copy = original
mask = crop
styled = styled.astype(float)
original_copy = original_copy.astype(float)
# Set non black pixels to white and create new mask
mask[non_black_pixels_mask] = [255, 255, 255]
m = mask
# Blur the edges
blurSigma = 5
m = m.astype(float)/255.0
m = cv2.GaussianBlur(m, (2*blurSigma+1, 2*blurSigma+1), blurSigma)
# apply alpha blending
style_layer = cv2.multiply(m, styled)
regular_layer = cv2.multiply(1.0-m, original_copy)
out = style_layer + regular_layer
out = out.astype('uint8')
output_str = 'static/final/styled_final_'+uuid.uuid4().hex[:10]+'.jpg'
cv2.imwrite(output_str, out)
return output_str
| {"/main.py": ["/object_detection.py"]} |
76,737 | HuBot2020/Localized-Image-Style-Transfer | refs/heads/master | /main.py | import logging
import os
import sys
import time
from flask import (Flask, flash, make_response, redirect, render_template,
request, send_file, session, url_for)
from PIL import Image
import mrcnn.model as modellib
from mrcnn import utils
from object_detection import load_object, show_selection_outlines, show_selection_crop, show_selection_inverse, InferenceConfig, blending
from werkzeug.utils import secure_filename
from six.moves.urllib.request import urlopen
import tarfile
app = Flask(__name__)
RESULTS = None
SHOW_OBJECTS = None
STYLE_URL = None
CONTENT_URL = None
LOCATION = None
SELECTION = None
ROOT_DIR = os.path.abspath("")
MaskRCNN_DIR = ROOT_DIR
MODEL_DIR = os.path.join(MaskRCNN_DIR, "coco.py")
COCO_MODEL_PATH = os.path.join(ROOT_DIR, "mask_rcnn_coco.h5")
if not os.path.exists(COCO_MODEL_PATH):
utils.download_trained_weights(COCO_MODEL_PATH)
config = InferenceConfig()
detection_model = modellib.MaskRCNN(mode="inference", model_dir=MODEL_DIR, config=config)
detection_model.load_weights(COCO_MODEL_PATH, by_name=True)
detection_model.keras_model._make_predict_function()
def DownloadCheckpointFiles(checkpoint_dir=os.path.abspath("")):
"""Download checkpoint files if necessary """
url_prefix = 'http://download.magenta.tensorflow.org/models/'
checkpoints = ['arbitrary_style_transfer.tar.gz']
path = 'arbitrary_style_transfer'
for checkpoint in checkpoints:
full_checkpoint = os.path.join(checkpoint_dir, checkpoint)
if not os.path.exists(path):
print('Downloading {}'.format(full_checkpoint))
response = urlopen(url_prefix + checkpoint)
data = response.read()
with open(full_checkpoint, 'wb') as fh:
fh.write(data)
unzip_tar_gz()
def unzip_tar_gz():
"""Upzip checkpoint files """
tf = tarfile.open('arbitrary_style_transfer.tar.gz',"r:gz")
tf.extractall()
tf.close()
def upload_style_content_images(style,content):
""" Upload style image to style_images folder
and content image to input_images folder """
style_name = secure_filename(style.filename)
style_path = os.path.join('static/style_images', style_name)
style.save(style_path)
content_name = secure_filename(content.filename)
content_path = os.path.join('static/input_images', content_name)
content.save(content_path)
return style_path, content_path
@app.route('/')
def index():
return render_template('home.html')
@app.route('/about')
def about():
return render_template('about.html')
@app.route('/upload', methods=['POST'])
def upload():
transfer_option = request.form.get('transfer_select')
# Set global variable to access across different pages
global STYLE_URL, CONTENT_URL
global RESULTS, SHOW_OBJECTS
global LOCATION
global SELECTION
# Directly transform the whole image
if transfer_option == 'whole':
style = request.files['style_file']
content = request.files['image_file']
STYLE_URL, CONTENT_URL = upload_style_content_images(style,content)
content_img_name = os.path.basename(CONTENT_URL)[:-4]
style_img_name = os.path.basename(STYLE_URL)[:-4]
# Run 100% style transfer with arbitrary_image_stylization model
out = "arbitrary_image_stylization_with_weights \
--checkpoint=arbitrary_style_transfer/model.ckpt \
--output_dir=static/final \
--style_images_paths="+STYLE_URL+"\
--content_images_paths="+CONTENT_URL+"\
--image_size=512 \
--content_square_crop=False \
--style_image_size=512 \
--style_square_crop=False \
--logtostderr"
os.system(out)
path = 'static/final/'+('%s_stylized_%s_0.jpg' %
(content_img_name, style_img_name))
return render_template('upload.html', image_url=path)
# Transform the whole image with different weights of transfer
elif transfer_option == 'adjust':
style = request.files['style_file']
content = request.files['image_file']
STYLE_URL, CONTENT_URL = upload_style_content_images(style,content)
content_img_name = os.path.basename(CONTENT_URL)[:-4]
style_img_name = os.path.basename(STYLE_URL)[:-4]
# Run different weights of style transfer from 20% to 100%
INTERPOLATION_WEIGHTS='[0.2,0.4,0.6,0.8,1.0]'
output = "arbitrary_image_stylization_with_weights \
--checkpoint=arbitrary_style_transfer/model.ckpt \
--output_dir=static/final \
--style_images_paths="+STYLE_URL+"\
--content_images_paths="+CONTENT_URL+"\
--image_size=512 \
--content_square_crop=False \
--style_image_size=512 \
--style_square_crop=False \
--interpolation_weights="+INTERPOLATION_WEIGHTS+"\
--logtostderr"
os.system(output)
changed_paths = []
for i in range(5):
changed_paths.append('static/final/' + ('%s_stylized_%s_%d.jpg' %
(content_img_name, style_img_name,i)))
return render_template('wholeOptions.html', image_url=changed_paths)
# Object Detection
elif transfer_option == 'object':
SELECTION = 'object'
style = request.files['style_file']
content = request.files['image_file']
STYLE_URL, CONTENT_URL = upload_style_content_images(style,content)
# Run Object Detection
RESULTS, SHOW_OBJECTS = load_object(CONTENT_URL, detection_model)
return render_template('object.html', image_url=SHOW_OBJECTS)
# Inverse Object Detection
elif transfer_option == 'inverse':
SELECTION = 'inverse'
style = request.files['style_file']
content = request.files['image_file']
STYLE_URL, CONTENT_URL = upload_style_content_images(style,content)
# Run Object Detection
RESULTS, SHOW_OBJECTS = load_object(CONTENT_URL, detection_model)
return render_template('object.html', image_url=SHOW_OBJECTS)
@app.route("/select", methods=['POST'])
def select():
global LOCATION
# Run different crop strategies according to selections
selection = request.form.get('chosen_objects')
selection = [int(x) for x in " ".join(selection.split(",")).split()]
contour_outlines = show_selection_outlines(
selection, CONTENT_URL, RESULTS)
if SELECTION == 'object':
location, background_image = show_selection_crop(
selection, CONTENT_URL, RESULTS)
LOCATION = location
elif SELECTION == 'inverse':
location, background_image = show_selection_inverse(
selection, CONTENT_URL, RESULTS)
LOCATION = location
return render_template('crop.html', image_url=contour_outlines)
@app.route("/transform", methods=['POST'])
def transform():
# Transform object detection with options to adjust weights
scale_option = request.form.get('scale')
content_img_name = os.path.basename(LOCATION)[:-4]
style_img_name = os.path.basename(STYLE_URL)[:-4]
# Direct Transformation with 100% style transfer
if scale_option == 'no':
output = "arbitrary_image_stylization_with_weights \
--checkpoint=arbitrary_style_transfer/model.ckpt \
--output_dir=static/final \
--style_images_paths="+STYLE_URL+"\
--content_images_paths="+LOCATION+"\
--image_size=512 \
--content_square_crop=False \
--style_image_size=512 \
--style_square_crop=False \
--logtostderr"
os.system(output)
changed_path = 'static/final/' + ('%s_stylized_%s_0.jpg' %
(content_img_name, style_img_name))
output_str = blending(LOCATION, CONTENT_URL, changed_path)
return render_template('final.html', image_url=output_str)
# Transformation adjustable from 20% to 100% weights
elif scale_option == 'yes':
INTERPOLATION_WEIGHTS='[0.2,0.4,0.6,0.8,1.0]'
outputs = "arbitrary_image_stylization_with_weights \
--checkpoint=arbitrary_style_transfer/model.ckpt \
--output_dir=static/final \
--style_images_paths="+STYLE_URL+"\
--content_images_paths="+LOCATION+"\
--image_size=512 \
--content_square_crop=False \
--style_image_size=512 \
--style_square_crop=False \
--interpolation_weights="+INTERPOLATION_WEIGHTS+"\
--logtostderr"
os.system(outputs)
changed_paths = []
for i in range(5):
changed_paths.append('static/final/' + ('%s_stylized_%s_%d.jpg' %
(content_img_name, style_img_name,i)))
return render_template('options.html',image_url=changed_paths)
@app.route("/blend", methods=['POST'])
def blend():
# Blend the transformed cropped image with original image
content_img_name = os.path.basename(LOCATION)[:-4]
style_img_name = os.path.basename(STYLE_URL)[:-4]
select_number = request.form.get('weightScale')
changed_path_select = 'static/final/' + ('%s_stylized_%s_%s.jpg' %
(content_img_name, style_img_name,select_number))
output_str_select = blending(LOCATION, CONTENT_URL, changed_path_select)
return render_template('final.html',image_url=output_str_select)
@app.errorhandler(500)
def server_error(e):
# Log the error and stacktrace.
logging.exception('An error occurred during a request.')
return 'An internal error occurred.', 500
if __name__ == '__main__':
DownloadCheckpointFiles()
app.run(threaded=True)
| {"/main.py": ["/object_detection.py"]} |
76,745 | JamesMusyoka/InstaBoomz | refs/heads/master | /insters/views.py | from django.shortcuts import render, redirect
from django.http import HttpResponse
import datetime as dt
from .models import *
# Create your views here.
def index(request):
images = Images.objects.all()
return render(request, 'index.html',{"images": images})
def image(request):
date = dt.date.today()
return render(request, 'image.html')
def convert_dates(dates):
# Function that gets the weekday number for the date.
day_number = dt.date.weekday(dates)
days = ['Monday','Tuesday','Wednesday','Thursday','Friday','Saturday',"Sunday"]
# Returning the actual day of the week
day = days[day_number]
return day | {"/insters/views.py": ["/insters/models.py"]} |
76,746 | JamesMusyoka/InstaBoomz | refs/heads/master | /insters/models.py | from django.db import models
from django.contrib.auth.models import User
# Create your models here.
class Images(models.Model):
name = models.CharField(max_length =30)
caption = models.CharField(max_length =150, default="")
user = models.ForeignKey(User, null=True, blank=True, on_delete=models.CASCADE,related_name="red")
likes = models.IntegerField(default=0)
comment = models.CharField(max_length=150)
pub_date = models.DateTimeField(auto_now_add=True)
def __str__(self):
return self.name | {"/insters/views.py": ["/insters/models.py"]} |
76,747 | JamesMusyoka/InstaBoomz | refs/heads/master | /insters/apps.py | from django.apps import AppConfig
class InstersConfig(AppConfig):
name = 'insters'
| {"/insters/views.py": ["/insters/models.py"]} |
76,790 | sameer-raipure/vehicle_E-E | refs/heads/master | /groups/migrations/0016_auto_20190915_1406.py | # Generated by Django 2.1.5 on 2019-09-15 08:36
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('groups', '0015_auto_20190915_1130'),
]
operations = [
migrations.AlterField(
model_name='guest',
name='time',
field=models.DateTimeField(default=datetime.datetime(2019, 9, 15, 14, 6, 14, 520119)),
),
]
| {"/register/views.py": ["/register/models.py"]} |
76,791 | sameer-raipure/vehicle_E-E | refs/heads/master | /register/migrations/0018_auto_20190915_1406.py | # Generated by Django 2.1.5 on 2019-09-15 08:36
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('register', '0017_auto_20190915_1130'),
]
operations = [
migrations.AlterField(
model_name='fac',
name='time',
field=models.DateTimeField(default=datetime.datetime(2019, 9, 15, 14, 6, 14, 522115)),
),
]
| {"/register/views.py": ["/register/models.py"]} |
76,792 | sameer-raipure/vehicle_E-E | refs/heads/master | /groups/views.py | from django.contrib import messages
from django.contrib.auth.mixins import(
LoginRequiredMixin,
PermissionRequiredMixin
)
from django.urls import reverse
from django.db import IntegrityError
from django.shortcuts import get_object_or_404
from django.views import generic
from groups.models import Guest
from . import models
from django.shortcuts import render,redirect
import datetime
def registerVehicle(request):
if request.method == 'POST':
obj = Guest()
obj.name = request.POST["name"]
obj.vehicle_no = request.POST["vehicle_no"]
obj.vehicle_type = request.POST["type"]
obj.purpose = request.POST["purpose"]
obj.in_out= request.POST["in_out"]
time = datetime.datetime.now()
obj.time = time
obj.save()
return redirect('/')
return render(request,'groups/group_base.html')
| {"/register/views.py": ["/register/models.py"]} |
76,793 | sameer-raipure/vehicle_E-E | refs/heads/master | /groups/migrations/0009_auto_20190915_1038.py | # Generated by Django 2.1.5 on 2019-09-15 05:08
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('groups', '0008_auto_20190915_1037'),
]
operations = [
migrations.AlterField(
model_name='guest',
name='time',
field=models.DateTimeField(default=datetime.datetime(2019, 9, 15, 10, 38, 43, 814028)),
),
]
| {"/register/views.py": ["/register/models.py"]} |
76,794 | sameer-raipure/vehicle_E-E | refs/heads/master | /groups/migrations/0007_auto_20190915_1034.py | # Generated by Django 2.1.5 on 2019-09-15 05:04
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('groups', '0006_auto_20190915_1021'),
]
operations = [
migrations.AlterField(
model_name='guest',
name='time',
field=models.DateTimeField(default=datetime.datetime(2019, 9, 15, 10, 34, 32, 739138)),
),
]
| {"/register/views.py": ["/register/models.py"]} |
76,795 | sameer-raipure/vehicle_E-E | refs/heads/master | /register/views.py | from django.contrib import messages
from django.contrib.auth.mixins import(
LoginRequiredMixin,
PermissionRequiredMixin
)
from django.urls import reverse
from django.db import IntegrityError
from django.shortcuts import get_object_or_404
from django.views import generic
from register.models import fac
from . import models
from django.shortcuts import render,redirect
import datetime
def registerfacVehicle(request):
if request.method == 'POST':
obj = fac()
print("jhgbu")
obj.name = request.POST["name"]
obj.vehicle_no = request.POST["vehicle_no"]
obj.vehicle_type = request.POST["type"]
obj.des = request.POST["des"]
obj.in_out= request.POST["in_out"]
time = datetime.datetime.now()
obj.time= time
print("jhgbu")
obj.save()
return redirect('/')
return render(request,'register/register_base.html')
def veh(request):
try:
obj = fac.objects.get(vehicle_no=request.POST["vehno"])
return render(request,'register/register_base.html',{"obj":obj})
# return render('/register')
# return redirect('/')
#fac.objects.filter(vehicle_no=request.POST["vehno"])
# return redirect('/register')
except:
return redirect('/groups')
| {"/register/views.py": ["/register/models.py"]} |
76,796 | sameer-raipure/vehicle_E-E | refs/heads/master | /groups/migrations/0008_auto_20190915_1037.py | # Generated by Django 2.1.5 on 2019-09-15 05:07
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('groups', '0007_auto_20190915_1034'),
]
operations = [
migrations.AlterField(
model_name='guest',
name='time',
field=models.DateTimeField(default=datetime.datetime(2019, 9, 15, 10, 37, 15, 918365)),
),
]
| {"/register/views.py": ["/register/models.py"]} |
76,797 | sameer-raipure/vehicle_E-E | refs/heads/master | /register/migrations/0012_auto_20190915_1043.py | # Generated by Django 2.1.5 on 2019-09-15 05:13
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('register', '0011_auto_20190915_1038'),
]
operations = [
migrations.AlterField(
model_name='fac',
name='time',
field=models.DateTimeField(default=datetime.datetime(2019, 9, 15, 10, 43, 37, 282890)),
),
]
| {"/register/views.py": ["/register/models.py"]} |
76,798 | sameer-raipure/vehicle_E-E | refs/heads/master | /register/migrations/0007_auto_20190915_0932.py | # Generated by Django 2.1.5 on 2019-09-15 04:02
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('register', '0006_auto_20190915_0927'),
]
operations = [
migrations.RemoveField(
model_name='fac',
name='time_in',
),
migrations.AddField(
model_name='fac',
name='time',
field=models.DateTimeField(default=datetime.datetime(2019, 9, 15, 9, 32, 31, 851033)),
),
]
| {"/register/views.py": ["/register/models.py"]} |
76,799 | sameer-raipure/vehicle_E-E | refs/heads/master | /groups/migrations/0005_auto_20190915_0932.py | # Generated by Django 2.1.5 on 2019-09-15 04:02
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('groups', '0004_auto_20190915_0927'),
]
operations = [
migrations.RemoveField(
model_name='guest',
name='time_in',
),
migrations.AddField(
model_name='guest',
name='time',
field=models.DateTimeField(default=datetime.datetime(2019, 9, 15, 9, 32, 31, 851033)),
),
]
| {"/register/views.py": ["/register/models.py"]} |
76,800 | sameer-raipure/vehicle_E-E | refs/heads/master | /register/migrations/0002_auto_20190915_0908.py | # Generated by Django 2.1.5 on 2019-09-15 03:38
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('register', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='fac',
name='in_out',
field=models.CharField(default='Entry', max_length=255),
),
migrations.AddField(
model_name='fac',
name='time_in',
field=models.DateTimeField(default=datetime.datetime(2019, 9, 15, 9, 8, 45, 448208)),
),
]
| {"/register/views.py": ["/register/models.py"]} |
76,801 | sameer-raipure/vehicle_E-E | refs/heads/master | /register/models.py | from django.db import models
from django.conf import settings
from django.urls import reverse
from django.utils.text import slugify
import datetime
# from accounts.models import User
# pip install misaka
import misaka
from django.contrib.auth import get_user_model
User = get_user_model()
# https://docs.djangoproject.com/en/2.0/howto/custom-template-tags/#inclusion-tags
# This is for the in_group_members check template tag
from django import template
register = template.Library()
class fac(models.Model):
name = models.CharField(max_length=255)
vehicle_no = models.CharField(max_length=100)
vehicle_type = models.CharField(max_length=100)
des = models.CharField(max_length=1000)
in_out = models.CharField(max_length=255,default="Entry")
time = models.DateTimeField(default=datetime.datetime.now())
def __str__(self):
return self.vehicle_no
| {"/register/views.py": ["/register/models.py"]} |
76,802 | sameer-raipure/vehicle_E-E | refs/heads/master | /groups/migrations/0003_auto_20190915_0627.py | # Generated by Django 2.1.5 on 2019-09-15 00:57
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('groups', '0002_auto_20190915_0621'),
]
operations = [
migrations.AlterField(
model_name='guest',
name='purpose',
field=models.CharField(max_length=1000),
),
migrations.AlterField(
model_name='guest',
name='vehicle_type',
field=models.CharField(max_length=10),
),
]
| {"/register/views.py": ["/register/models.py"]} |
76,803 | sameer-raipure/vehicle_E-E | refs/heads/master | /groups/migrations/0018_auto_20190915_1411.py | # Generated by Django 2.1.5 on 2019-09-15 08:41
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('groups', '0017_auto_20190915_1407'),
]
operations = [
migrations.AlterField(
model_name='guest',
name='time',
field=models.DateTimeField(default=datetime.datetime(2019, 9, 15, 14, 11, 35, 759060)),
),
]
| {"/register/views.py": ["/register/models.py"]} |
76,804 | sameer-raipure/vehicle_E-E | refs/heads/master | /groups/migrations/0004_auto_20190915_0927.py | # Generated by Django 2.1.5 on 2019-09-15 03:57
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('groups', '0003_auto_20190915_0627'),
]
operations = [
migrations.AddField(
model_name='guest',
name='in_out',
field=models.CharField(default='Entry', max_length=255),
),
migrations.AddField(
model_name='guest',
name='time_in',
field=models.DateTimeField(default=datetime.datetime(2019, 9, 15, 9, 27, 51, 760747)),
),
migrations.AlterField(
model_name='guest',
name='vehicle_no',
field=models.CharField(max_length=100),
),
]
| {"/register/views.py": ["/register/models.py"]} |
76,805 | kkmojo/week1 | refs/heads/master | /extra_exercise1.py | """
lab1 extra exercise
"""
class PatientRoster:
"""
an appointment system for a doctor's office.
===Attributes==
@type patients:list
each elements in this list represents
a single patient,with information stores
in
@type limit: int
limit of patients this doctor could stand.
@type gender_rule: bool
this checks if this doctor enables
gender rule.
@type gender_limit:tuple(int,int)
first element is the max number for male,
second is for female
@type current_num: list
first element is the current number of male
patient this doctor have, second is female.
"""
def __init__(self, limit, rule=False):
"""
initialize a new doctor with <limit>
and if gender rule is applied, ,<rule>
should be True, otherwise, it's default
as False.
@type limit: int
@type rule: bool
@type self: Doctor
@rtype: None
"""
self.gender_rule = rule
self.patients = []
self.limit = limit
self.gender_limit = ()
self.current_num = [0, 0]
def set_rule(self, male_ratio):
"""set up the gender ratio rule for this
doctor, this may not be changed once set.
<male_ratio> is a number between
1 and 0
@type self: Doctor
@type male_ratio: float
@rtype None
>>> new = PatientRoster(100,True)
>>> new.set_rule(0.6)
>>> new.gender_limit
(60, 40)
"""
if self.gender_rule is True:
if 0 <= male_ratio < 1:
male = int(self.limit * male_ratio)
female = self.limit - male
self.gender_limit = (male, female)
def patient_register(self, ohip_number, name, gender):
"""
@type self: Doctor
@type ohip_number: int
@type name: list
@type gender str
@rtype: None
>>> new = PatientRoster(100,True)
>>> new.set_rule(0.6)
>>> new.patient_register(1234,['mike','lee'],'male')
>>> new.patients
[[1234, 'mike', 'lee', 'male']]
>>> new.current_num
[1, 0]
"""
total = self.current_num[0] + self.current_num[1]
if self.gender_rule is True:
if gender == "male":
if total < self.limit and self.current_num[0] < self.gender_limit[0]:
self.patients.append([ohip_number, name[0], name[1], gender])
self.current_num[0] += 1
elif gender == "female":
if total < self.limit and self.current_num[1] < self.gender_limit[1]:
self.patients.append([ohip_number, name[0], name[1], gender])
self.current_num[1] += 1
def delete_patient(self, ohip_num):
"""
delete a patient from the patient list if
that <ohip_num> is in the list.
@type self: Doctor
@type ohip_num: int
@rtype: None
>>> new = PatientRoster(100)
>>> new.set_rule(0.6)
>>> new.patient_register(1234,['mike','lee'],'male')
>>> new.delete_patient(1234)
>>> new.patients
[]
"""
for patient in self.patients:
if patient[0] == ohip_num:
self.patients.remove(patient)
break
class ClassList:
"""
a student records system like ROSI
===Attributes===
@type students: list
a list keeps track of all students in this course,
each identified by student number.
@type limit: int
a limit of how many students this course holds.
"""
def __init__(self, limit):
"""
create a new course with <limit>.
@type limit: int
@rtype: None
"""
self.limit = limit
self.students = []
def register(self, student_num):
"""
register a student to this course if it won't
break the course limit, students are identified
by <student_num>.
@type student_num: int
@rtype: None
>>> csc148 = ClassList(140)
>>> csc148.register(1234)
>>> csc148.students
[1234]
"""
if student_num not in self.students and len(self.students) < self.limit:
self.students.append(student_num)
def drop(self, student_num):
"""
delete a student from the course with its
<student_num>
@type student_num: int
@rtype: none
>>> csc148 = ClassList(140)
>>> csc148.register(1234)
>>> csc148.students
[1234]
>>> csc148.drop(1234)
>>> csc148.students
[]
"""
if student_num in self.students:
self.students.remove(student_num)
class Player:
"""
an app for a game like 2048 or PacMan, where players get
a score each time they play.
===Attributes===
@type scores: list
a list keeps track of the last 100 games's score
of a player
@type average: int
this is the average score of this player over n
games
"""
def __init__(self):
"""
initilize a new player.
@type self: Player
@rtype: None
"""
self.scores = []
self.average = 0
def record(self, score):
"""
record a game with <score> for this player.
the 100-th game record will be deleted if
it existes.
@type self: Player
@type score: int
@rtype: none
>>> new = Player()
>>> new.record(1000)
>>> new.scores
[1000]
>>> 1000 in new.scores
True
>>> for each in range(1,100): new.record(each)
>>> 1000 in new.scores
False
"""
self.scores.append(score)
if len(self.scores) >= 100:
self.scores.pop(0)
self.average = self.avg_helper(self.scores)
def avg_helper(self, lst):
"""
A helper function for calculating the average
score of a player.
@type self: Player
@type lst: list
@rtype: int
"""
total = 0
for each in lst:
total += each
avg = int(total/len(lst))
return avg
class InventoryItem:
"""
an inventory system
===Attirubutes===
@type products = []
a list storing all the info of products
@type categories: dict
this dictionaries stores all the products
with its categories
"""
def __init__(self):
"""
Create a new inventoryItem
@type self: InventoryItem
@rtype: None
"""
self.products = []
self.categories = {}
def add_product(self, num, name, _type, price):
"""
add a new product to this inventory
@type self: InventoryItem
@type num: int
@type name: str
@type _type: str
@type price: int
@rtype: none
>>> new = InventoryItem()
>>> new.add_product(1234,'coke','beverage',10)
"""
product = [num, name, price]
self.products.append(product)
if _type not in self.categories:
self.categories[_type] = [product]
else:
self.categories[_type].append(product)
def get_price(self, num):
"""
given the number of a product, return a
str that indicates the price of the product,
return None if there's no such product.
@type self: InventoryItem
@type num: int
@rrtype: int|none
>>> new = InventoryItem()
>>> new.add_product(1234,'coke','beverage',10)
>>> new.get_price(1234)
10
>>> new.get_price(2345)
"""
for each in self.products:
if num == each[0]:
return each[2]
def discount(self, num, percent):
"""
given the number of a product, discount its
price by <percent>, if that number is in this
system.
@type self: InventoryItem
@type num: int
@type percent: float
@rtype: none
>>> new = InventoryItem()
>>> new.add_product(1234,'coke','beverage',10)
>>> new.discount(1234,0.5)
"""
new_rate = 1 - percent
for each in self.products:
if num == each[0]:
each[2] *= new_rate
break
def compare(self, num1, num2):
"""
givne two number of two products, compares
the price of them by return their prices in
a string if and only if num1 and num2 are in
this system
@type num1: int
@type num2: int
@rtype: none
>>> new = InventoryItem()
>>> new.add_product(1234,'coke','beverage',10)
>>> new.add_product(4321,'dry','beverage',20)
>>> new.compare(1234,4321)
price of first product: 10 ,price of second product: 20
"""
price1 = None
price2 = None
for each in self.products:
if num1 == each[0]:
price1 = each[2]
elif num2 == each[0]:
price2 = each[2]
if price1 is int and price2 is int:
break
if type(price1) is not None and type(price2) is not None:
print('price of first product:', price1,
',price of second product:', price2)
| {"/lab01tester.py": ["/lab01.py"]} |
76,806 | kkmojo/week1 | refs/heads/master | /lab01.py | class RaceRegistry:
"""A system for organizing a 5k running race
=== Attributes ===
@type runner_speed_cate: dict of {str:str}
the key of dict is runner and the key is runner's related speed category
"""
def __init__(self, runner, speed):
"""Initialize all the attributes
@type self: RaceRegistry
@type runner: str
the runner's info
@type speed: str
the runner's speed category
@rtype: None
>>> system = RaceRegistry()
>>> system.add_runner('eason' , '<30')
>>> system.runner_speed_cate
{'eason':'<30'}
"""
self.runner_speed_cate = {}
def __eq__(self, other):
"""to check whether the two object are the same
@type self: RaceRegistry
@type other: RaceRegistry | other
the other object we would like to compare
@rtype: None
>>> system1 = RaceRegistry()
>>> system2 = RaceRegistry()
>>> system1.add_runner('eason', '<30')
>>> system1 == system2
False
"""
if type(self) != type(other):
return False
else:
for runner in self.runner_speed_cate:
if (runner not in other.runner_speed_cate) or (
self.runner_speed_cate[runner] != other.runner_speed_cate[runner]):
return False
return True
def __str__(self):
"""Return a user friendly string representing the race registry system
@type self: RaceRegistry
@rtype: str
>>> system = RaceRegistry()
>>> system.add_runner('eason' , '<30')
>>> print(system)
eason:<30
"""
result += ''
for runner in self.runner_speed_cate:
result += '{}:{}'.format(runner, self.runner_speed_cate)
return result
def add_runner(self, runner, speed):
"""Add a new runner to the system
@type self: RaceRegistry
@type runner: str
the runner's info we will add to the system
@type speed: str
the runner's related speed
@rtype: None
>>> system = RaceRegistry()
>>> system.add_runner('eason' , '<30')
>>> system.runner_speed_cate
{'eason':'<30'}
"""
if runner not in self.runner_speed_cate:
self.runner_speed_cate[runner] = speed
def modify_runner(self, runner, speed):
"""modify the exiting runner's speed category
@type self: RaceRegistry
@type runner: str
the runner's info we will modify in the system
@type speed: str
the runner's related speed
@rtype: None
>>> system = RaceRegistry()
>>> system.add_runner('eason' , '<30')
>>> system.modify_runner('eason', '<20')
>>> system.runner_speed_cate
{'eason':'<20'}
"""
if runner in self.runner_speed_cate:
self.runner_speed_cate[runner] = speed
def look_up_speed_by_runner(self, runner):
"""Return the runner's related speed category
@type self: RaceRegistry
@type runner: str
the runner we want to find its speed category
@rtype: str
>>> system = RaceRegistry()
>>> system.add_runner('eason' , '<30')
>>> system.look_up_speed_by_runner('eason')
'<30'
"""
return self.runner_speed_cate[runner]
def look_up_runners_by_speed_category(self, speed):
"""Return the list of runners of the given speed category
@type self: RaceRegistry
@type email: str
the speed category we want to find its related runners
@rtype: [str]
>>> system = RaceRegistry()
>>> system.add_runner('eason' , '<30')
>>> system.look_up_runners_by_speed_category('<30')
['eason']
"""
result = []
for runner in self.runner_speed_cate:
if self.runner_speed_cate[runner] == speed:
result.append(runner)
return result | {"/lab01tester.py": ["/lab01.py"]} |
76,807 | kkmojo/week1 | refs/heads/master | /lab01tester.py | from lab01 import RaceRegistry
if __name__ == '__main__':
system = RaceRegistry()
system.add_runner('Gerhard', 'Under 40 minutes')
system.add_runner('Tom', 'Under 30 minutes')
system.add_runner('Toni', 'Under 20 minutes')
system.add_runner('Margot', 'Under 30 minutes')
system.modify_runner('Gerhard', 'Under 30 minutes')
print(system.look_up_runners_by_speed_category('Under 30 minutes')) | {"/lab01tester.py": ["/lab01.py"]} |
76,827 | einstein13/atos-mapping | refs/heads/master | /filesystem.py |
from os import path, pardir, makedirs, rename
from json import loads, dump
class FileSystem():
settings_file = 'settings.json'
mapppings_folder = 'mappings'
def get_project_path(self):
basic_folder_names = ["atos-mapping", "atos-mapping-master"]
file_path = path.abspath(__file__)
folder_path = file_path
while True:
splitted = folder_path.split("\\") # Windows
if len(splitted) == 1:
splitted = folder_path.split("/") # Linux
# now "splitted" is a path splitted into folders
if splitted[-1] in basic_folder_names:
# found correct path
break
# save old path
old_path = folder_path
# create new path - less by one folder
folder_path = path.abspath(path.join(folder_path, pardir))
if old_path == folder_path:
# if that is the end of the path
self.output_queue.append({'type': 'text', 'message': 'There was a problem with recognizing the path'})
return None
return folder_path
def get_parent_project_path(self):
project_path = self.get_project_path()
if project_path is None:
return None
folder_path = path.abspath(path.join(project_path, pardir))
return folder_path
def create_mappings_folder(self):
parent = self.get_parent_project_path()
mapping_path = path.join(parent, self.mapppings_folder)
if path.isdir(mapping_path):
return
makedirs(mapping_path)
return
def find_settings_file_path(self):
project = self.get_project_path()
settings_file = path.join(project, self.settings_file)
return settings_file
def create_settings_file(self):
settings_file = self.find_settings_file_path()
if path.isfile(settings_file):
return
file = open(settings_file, "w")
file.write("{}")
file.close()
return
def read_settings_file(self):
settings_file = self.find_settings_file_path()
file = open(settings_file, "r")
content = file.read()
file.close()
try:
return loads(content)
except:
pass
return {}
def set_settings_file(self, dictionary):
settings_file = self.find_settings_file_path()
file = open(settings_file, "w")
# json = loads(dictionary)
dump(dictionary, file)
file.close()
return
def write_mapping_file(self, file_name, file_content):
file_path = self.get_parent_project_path()
file_path = path.join(file_path, self.mapppings_folder)
file_path = path.join(file_path, file_name + ".xml")
file = open(file_path, "w")
file.write(file_content)
file.close()
return | {"/core.py": ["/filesystem.py", "/mapping_search.py"], "/run.py": ["/core.py"]} |
76,828 | einstein13/atos-mapping | refs/heads/master | /core.py | from base64 import b64encode
from filesystem import FileSystem
from mapping_search import MappingSearch
class Core(FileSystem, MappingSearch):
settings = {}
def __init__(self):
super(Core, self).__init__()
self.settings = {}
self.init_project()
self.fill_mandatory_fields()
self.full_run()
self.finish_sequence()
return
def init_project(self):
self.create_mappings_folder()
self.create_settings_file()
self.read_settings()
print("Initial sequence completed\n* * * * * *\n")
return
def hash_password(self, username, password):
string = username + ":" + password
try:
# python 3
hashed = b64encode(bytes(string, "UTF-8")).decode("UTF-8")
except:
# python 2
hashed = b64encode(string)
return hashed
def set_user(self):
user = input('Enter username: ')
password = input('Enter password: ')
if user == '' or password == '':
return ''
return self.hash_password(user, password)
def domain_input_query(self):
domain = input('Enter subdomain [default: atosglobaldev]: ')
if not domain:
domain = 'atosglobaldev'
return domain
def set_domain(self):
domain = ''
while domain == '':
domain = self.domain_input_query()
if 'serivce-now.com' in domain:
print("please type subdomain of full service-now domain name")
domain = ''
full_domain = "https://" + domain + ".service-now.com"
return full_domain
def read_settings(self):
settings = self.read_settings_file()
if settings:
self.settings = settings
return
def update_settings(self, dict_to_update):
for key in dict_to_update.keys():
self.settings[key] = dict_to_update[key]
self.set_settings_file(self.settings)
return
def check_mandatory_settings(self):
keys_to_check = [
'credentials',
'domain'
]
settings_keys = list(self.settings.keys())
for key in keys_to_check:
if key not in settings_keys:
return False
return True
def run_mapping_sequence(self):
mapping_name = input('Enter mapping name: ')
first_block_content = self.one_block_search(mapping_name)
if first_block_content == -1:
return
mapping_content = self.find_full_xml(first_block_content)
self.write_mapping_file(first_block_content['u_name'], mapping_content)
return
def print_help_message(self):
text = """This program accepts commands:
* exit - terminates the program
* mapping - make mapping tree for everything
* configure - change settings for password/url
Aliases are configured in lines ~110 in core.py"""
print(text)
return
def input_command(self):
command = input('Input command: ')
command = command.lower().strip()
if command in ['quit', 'end', 'exit', 'q']:
return False
elif command in ['mapping', 'map']:
self.run_mapping_sequence()
return True
elif command in ['man', 'help']:
self.print_help_message()
return True
elif command in ['pass', 'password', 'credentials', 'user',
'update settings', 'update_settings', 'settings', 'domain',
'configure']:
self.set_connection_settings()
return True
print("Unknown command, try 'help'.")
return True
def set_connection_settings(self):
credentials = self.set_user()
domain = self.set_domain()
update = {}
update['domain'] = domain
update['credentials'] = credentials
self.update_settings(update)
self.test_connection()
return
def fill_mandatory_fields(self):
if not self.check_mandatory_settings():
self.set_connection_settings()
else:
self.test_connection()
return
def finish_sequence(self):
print("\n* * * * * *\nProgram finished\n* * * * * *")
return
def full_run(self):
work_flag = True
while work_flag:
work_flag = self.input_command() | {"/core.py": ["/filesystem.py", "/mapping_search.py"], "/run.py": ["/core.py"]} |
76,829 | einstein13/atos-mapping | refs/heads/master | /mapping_search.py | try: # Python 3+
from urllib.request import Request, urlopen
# from urllib.parse import urlencode, quote
except: # Pyton 2.7
from urllib2 import Request, urlopen
# from urllib import urlencode, quote
from json import loads, dumps
from re import compile as comp
from xml.etree.ElementTree import Element, SubElement
from xml.etree.ElementTree import tostring, dump
try:
# Python 3.5+
from html import unescape
except:
try:
# Python 3.4-
from html.parser import HTMLParser
except:
# Python 2.7
from HTMLParser import HTMLParser
unescape = HTMLParser().unescape
def indent(elem, level=0, more_sibs=False):
# based on https://stackoverflow.com/questions/749796/pretty-printing-xml-in-python
ind = " "
i = "\n"
if level:
i += (level-1) * ind
num_kids = len(elem)
if num_kids:
if not elem.text or not elem.text.strip():
elem.text = i + ind
if level:
elem.text += ind
count = 0
for kid in elem:
indent(kid, level+1, count < num_kids - 1)
count += 1
if not elem.tail or not elem.tail.strip():
elem.tail = i
if more_sibs:
elem.tail += ind
else:
if elem.text:
elem.text = elem.text.replace("\n", i+ind*2)
elem.text = elem.text.replace("\r", "")
if level and (not elem.tail or not elem.tail.strip()):
elem.tail = i
if more_sibs:
elem.tail += ind
return elem
class MappingSearch(object):
mapping_block_table = "u_sr_mapping_block"
mapping_line_table = "u_sr_mapping_line"
used_mapping_blocks = []
def connect(self, table, query=None):
url = self.settings['domain'] + "/api/now/table/" + table
if query:
url += "?" + query
headers = {}
headers['Authorization'] = "Basic " + self.settings['credentials']
headers['Accept'] = "application/json"
headers['Content-Type'] = "application/json"
request_object = Request(url, headers=headers)
try:
connection = urlopen(request_object)
except Exception as e:
print("Connection error:")
print(e)
return False
result = connection.read().decode("UTF-8")
try:
result = loads(result)
except:
print("Error occured while parsing the output:")
print(result)
return False
return result
def test_connection(self):
query = "sysparm_limit=1"
print("Testing connection...")
result = self.connect(self.mapping_block_table, query)
if result:
print("Connection OK.")
else:
print("Testing connection failed. Please retry.")
return
def add_key_value_to_xml(self, xml, key, value=False):
subelement = SubElement(xml, key)
if value:
subelement.text = value
return subelement
def find_mapping_lines(self, block_data):
query = "sysparm_query=u_mapping_block%%3D%s%%5Eu_active%%3Dtrue%%5EORDERBYu_order" % (block_data['sys_id'], )
result = self.connect(self.mapping_line_table, query)
if not result:
return False
lines = result['result']
return lines
def find_mapping_block(self, block_name):
print("Retrieving info about block: " + block_name)
query = "sysparm_query=u_name%3D" + block_name
result = self.connect(self.mapping_block_table, query)
if not result:
print("ERR (Retrieving block): wrong response")
return False
if not 'result' in list(result.keys()):
print("ERR (Retrieving block): no result")
return False
if len(result['result']) == 0:
print("ERR (Retrieving block): no block data")
return False
block = result['result'][0]
return block
def mapping_block_names_search(self, script):
pattern = "\s*return [\"'](.*)[\"'];"
regex = comp(pattern)
results = []
for line in script.split("\n"):
match = regex.match(line)
if match:
results.append(match.group(1))
return results
def add_mapping_lines_to_xml(self, xml, line_data):
valid_keys = [["u_output_parm", "TargetParam"], ["u_type", "Type"],
["u_order", "Order"], ["u_value", "Value"],
["u_script", "Script"], ["u_comment", "Comment"]]
line_keys = list(line_data.keys())
for key in valid_keys:
if key[0] in line_keys and line_data[key[0]]:
self.add_key_value_to_xml(xml, key[1], line_data[key[0]])
if line_data['u_type'] == 'includeMap':
if 'u_value' in line_keys and line_data['u_value']:
included = SubElement(xml, "MappingBlock")
mapping_block_data = self.find_mapping_block(line_data['u_value'])
if mapping_block_data is not False:
self.add_mapping_block_to_xml(included, mapping_block_data)
if 'u_script' in line_keys and line_data['u_script']:
blocks = self.mapping_block_names_search(line_data['u_script'])
for name in blocks:
included = SubElement(xml, "MappingBlock")
mapping_block_data = self.find_mapping_block(name)
if mapping_block_data is not False:
self.add_mapping_block_to_xml(included, mapping_block_data)
if line_data['u_type'] == 'nextMap':
if 'u_value' in line_keys and line_data['u_value']:
return [line_data['u_value']]
if 'u_script' in line_keys and line_data['u_script']:
blocks = self.mapping_block_names_search(line_data['u_script'])
return blocks
return []
def add_lines_to_block_xml(self, xml, block_data):
lines = self.find_mapping_lines(block_data)
if not lines or len(lines) == 0:
return False
for line_data in lines:
line_xml = SubElement(xml, "Line")
next_maps = self.add_mapping_lines_to_xml(line_xml, line_data)
if len(next_maps) > 0:
return next_maps
return []
def add_mapping_block_to_xml(self, xml, block_data):
valid_keys = [['u_name', 'Name'], ['u_phase', 'Phase'],
['u_output_ps', 'TargetParamSet'] , ['u_selector', 'Selector']]
block_keys = list(block_data.keys())
for key in valid_keys:
if key[0] in block_keys and block_data[key[0]]:
self.add_key_value_to_xml(xml, key[1], block_data[key[0]])
lines = self.add_key_value_to_xml(xml, 'MappingLines')
next_maps = self.add_lines_to_block_xml(lines, block_data)
if 'u_name' in block_keys and block_data['u_name']:
self.used_mapping_blocks.append(block_data['u_name'])
self.used_mapping_blocks
return next_maps
def check_mapping_blocks_duplicates(self):
used_blocks = self.used_mapping_blocks
if len(set(used_blocks)) == len(used_blocks):
return
for block_name in set(used_blocks):
if used_blocks.count(block_name) > 1:
print("WARNING: duplicate include of \"%s\"" % (block_name,))
return
def find_full_xml(self, block_data):
self.used_mapping_blocks = []
basic_mapping = Element("mapping")
mapping_block_data = [block_data]
while len(mapping_block_data) > 0:
block_xml = SubElement(basic_mapping, "MappingBlock")
next_maps = self.add_mapping_block_to_xml(block_xml, mapping_block_data[0])
mapping_block_data.pop(0)
for one_map in next_maps:
mapping_block_data.append(self.find_mapping_block(one_map))
self.check_mapping_blocks_duplicates()
indent(basic_mapping)
string = tostring(basic_mapping).decode("UTF-8")
string = unescape(string)
return string
def one_block_search(self, block_name):
query = "sysparm_query=u_nameLIKE" + block_name
query += "%5EORDERBYu_output_ps"
query += "&sysparm_limit=30"
result = self.connect(self.mapping_block_table, query)
if not result:
return -1
lines = result['result']
if len(lines) == 1:
return lines[0]
print("\nFound more matching results (pick one-number):")
itr = 0
itr_max = len(lines)
while itr < itr_max:
print("%d: %s (%s)" %(itr, lines[itr]['u_name'], lines[itr]['u_output_ps']))
itr += 1
input_number = -2
while input_number < 0:
input_number = input("Select proper name (-1 for other search): ")
try:
input_number = int(input_number)
except:
input_number = -2
if input_number == -1:
return -1
if input_number > len(lines)-1:
input_number = -2
return lines[input_number]
| {"/core.py": ["/filesystem.py", "/mapping_search.py"], "/run.py": ["/core.py"]} |
76,830 | einstein13/atos-mapping | refs/heads/master | /run.py | from core import Core
c = Core() | {"/core.py": ["/filesystem.py", "/mapping_search.py"], "/run.py": ["/core.py"]} |
76,838 | fredmanre/criptocoinconection | refs/heads/master | /criptocoin.py | import requests, json, time
from bs4 import BeautifulSoup
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from sqlalchemy.sql import exists
from settings.config import user, passwd, db
from database_setup import Base, CriptoCurrency
engine = create_engine('mysql+pymysql://'+user+':'+passwd+'@localhost/'+db)
# Bind the engine to the metadata of the Base class so that the
# declaratives can be accessed through a DBSession instance
Base.metadata.bind = engine
DBsession = sessionmaker(bind=engine)
# A DBSession() instance establishes all conversations with the database
session = DBsession()
def main():
# we get the data from coinmarketcap
response = requests.get('https://api.coinmarketcap.com/v1/ticker/?limit=0')
coinmarketcap = response.json()
for coin in coinmarketcap:
identify = coin['id']
ret = session.query(exists().where(CriptoCurrency.identify==identify)).scalar()
# we make sure that the cryptocurrency exists, if we do not create it
if ret:
print('exists, updating...')
cripto = session.query(CriptoCurrency).filter_by(identify=identify).one()
cripto.rank = coin['rank'],
cripto.price_usd = coin['price_usd'],
cripto.price_btc = coin['price_btc'],
cripto.volume_usd_24 = coin['24h_volume_usd'],
cripto.market_cap = coin['market_cap_usd'],
cripto.available_supply = coin['available_supply'],
cripto.total_supply = coin['total_supply'],
cripto.max_supply = coin['max_supply'],
cripto.percentaje_1h = coin['percent_change_1h'],
cripto.percentaje_24h = coin['percent_change_24h'],
cripto.percentaje_7d = coin['percent_change_7d'],
cripto.last_update = coin['last_updated']
session.add(cripto)
session.commit()
else:
print('not exists, creating...')
model_create = CriptoCurrency(name=coin['name'],
symbol=coin['symbol'],
identify = coin['id'],
rank=coin['rank'],
price_usd=coin['price_usd'],
price_btc=coin['price_btc'],
volume_usd_24=coin['24h_volume_usd'],
market_cap=coin['market_cap_usd'],
available_supply=coin['available_supply'],
total_supply=coin['total_supply'],
max_supply=coin['max_supply'],
percentaje_1h=coin['percent_change_1h'],
percentaje_24h=coin['percent_change_24h'],
percentaje_7d=coin['percent_change_7d'],
last_update=coin['last_updated'])
session.add(model_create)
session.commit()
# to initial script
if __name__ == '__main__':
main()
| {"/criptocoin.py": ["/settings/config.py", "/database_setup.py"]} |
76,839 | fredmanre/criptocoinconection | refs/heads/master | /database_setup.py | import os
import sys
# fields to tables
from sqlalchemy import Column, Integer, String
# allow works with the database
from sqlalchemy.ext.declarative import declarative_base
# for configurate your database
from sqlalchemy.ext.declarative import declared_attr
# allows create a database and more!
from sqlalchemy import create_engine
class Base(object):
@declared_attr
def __tablename__(cls):
return cls.__name__.lower()
__table__args = {'mysql_engine': 'InnoDB'}
id = Column(Integer, primary_key=True)
Base = declarative_base(cls=Base)
class CriptoCurrency(Base):
__tablename__ = 'cripto_currency'
name = Column(String(50), nullable=False)
symbol = Column(String(10), nullable=False, index=True)
identify = Column(String(50), nullable=False, unique=True, index=True)
rank = Column(String(5), nullable=False)
price_usd = Column(String(50))
price_btc = Column(String(50))
volume_usd_24 = Column(String(50))
market_cap = Column(String(50))
available_supply = Column(String(50))
total_supply = Column(String(50))
max_supply = Column(String(50))
percentaje_1h = Column(String(8))
percentaje_24h = Column(String(8))
percentaje_7d = Column(String(8))
last_update = Column(String(20))
def __repr__(self):
return '{} {}'.format(self.name, self.symbol)
engine = create_engine('mysql+pymysql://fredmanre:perrodeagua@localhost/coinmarket_test')
Base.metadata.create_all(engine)
| {"/criptocoin.py": ["/settings/config.py", "/database_setup.py"]} |
76,840 | fredmanre/criptocoinconection | refs/heads/master | /settings/config.py | # configurations to criptoconnection
# connection to database
user = ''
passwd = ''
db = ''
| {"/criptocoin.py": ["/settings/config.py", "/database_setup.py"]} |
76,844 | zhaofeng-shu33/community | refs/heads/master | /test_interface.py | import networkx as nx
import unittest
from GN import GN
class TestGN(unittest.TestCase):
def test_6point(self):
G = nx.Graph()
G.add_edge(0, 2)
G.add_edge(0, 1)
G.add_edge(2, 1)
G.add_edge(3, 4)
G.add_edge(3, 5)
G.add_edge(4, 5)
G.add_edge(0, 5)
gn = GN()
gn.fit(G)
self.assertEqual(gn.Bestcomps, [{0, 1, 2}, {3, 4, 5}])
print(gn.tree)
def test_karate_dataset(self):
G = nx.readwrite.gml.read_gml('karate.gml', label=None)
n_nodes = len(G.nodes)
# relabel nodes, starting from zero
mapping = {i+1:i for i in range(n_nodes)}
G = nx.relabel_nodes(G, mapping)
gn = GN()
gn.fit(G)
print(gn.tree)
if __name__ == '__main__':
unittest.main()
| {"/test_interface.py": ["/GN.py"], "/__init__.py": ["/GN.py"]} |
76,845 | zhaofeng-shu33/community | refs/heads/master | /GN.py | '''
wrapper of Girvan-Newman community detection algorithm
'''
import networkx as nx
import numpy as np
from ete3 import Tree
try:
from cmty import cmty # cython version first
except ImportError:
import cmty
class GN:
def __init__(self):
self.reinit()
def reinit(self):
self.partition_num_list = []
self.partition_list = []
self.tree = Tree()
self.tree_depth = 0
def fit(self, G_outer, initialize_tree = True):
'''
G_outer: nx.Graph like object
returns the partition
'''
self.reinit()
self.G = G_outer.copy()
G = G_outer.copy()# copy the graph
n = G.number_of_nodes() #|V|
A = nx.adj_matrix(G) # adjacenct matrix
m_ = 0.0 # the weighted version for number of edges
for i in range(0,n):
for j in range(0,n):
m_ += A[i,j]
self.m_ = m_/2.0
# calculate the weighted degree for each node
Orig_deg = {}
self.Orig_deg = cmty.UpdateDeg(A, G.nodes())
# run Newman alg
self.runGirvanNewman()
if(initialize_tree):
self._get_hierarchical_tree()
return self
def runGirvanNewman(self):
# let's find the best split of the graph
BestQ = 0.0
Q = 0.0
self.partition_num_list.append(1)
nvertices = len(self.G.nodes)
self.partition_list.append([set(i for i in range(nvertices))])
while True:
cmty.CmtyGirvanNewmanStep(self.G)
partition = list(nx.connected_components(self.G))
self.partition_num_list.append(len(partition))
self.partition_list.append(partition)
Q = cmty._GirvanNewmanGetModularity(self.G, self.Orig_deg, self.m_)
if Q > BestQ:
BestQ = Q
Bestcomps = partition # Best Split
if self.G.number_of_edges() == 0:
break
if BestQ > 0.0:
self.Bestcomps = Bestcomps
def get_category(self, i):
index = 0
for ind,val in enumerate(self.partition_num_list):
if(val >= i):
index = ind
break
cat = np.zeros(len(self.Orig_deg))
t = 0
for j in self.partition_list[index]:
for r in j:
cat[r] = t
t += 1
return cat
def get_tree_depth(self):
return 0
def _add_node(self, root, node_list, num_index):
label_list = self.get_category(self.partition_num_list[num_index])
cat_list = []
for i in node_list:
if(cat_list.count(label_list[i]) == 0):
cat_list.append(label_list[i])
max_cat = len(cat_list)
label_list_list = [[] for i in range(max_cat)]
for i in node_list:
j = cat_list.index(label_list[i])
label_list_list[j].append(i)
for node_list_i in label_list_list:
node_name = ''.join([str(ii) for ii in node_list_i])
if(node_name != root.name):
root_i = root.add_child(name= node_name)
else:
root_i = root
if(len(node_list_i)>1):
self._add_node(root_i, node_list_i, num_index+1)
def _get_hierarchical_tree(self):
max_num = self.partition_num_list[-1]
node_list = [ i for i in range(0, max_num)]
self._add_node(self.tree, node_list, 1)
def _set_tree_depth(self, node, depth):
if(node.is_leaf()):
if(depth > self.tree_depth):
self.tree_depth = depth
return
for node_i in node.children: # depth first search
self._set_tree_depth(node_i, depth+1)
def get_tree_depth(self):
if(self.tree.is_leaf()):
self._get_hierarchical_tree()
if(self.tree_depth != 0):
return self.tree_depth
self._set_tree_depth(self.tree, 0)
return self.tree_depth
| {"/test_interface.py": ["/GN.py"], "/__init__.py": ["/GN.py"]} |
76,846 | zhaofeng-shu33/community | refs/heads/master | /__init__.py | from .GN import GN
from .cmty import UpdateDeg, CmtyGirvanNewmanStep, _GirvanNewmanGetModularity | {"/test_interface.py": ["/GN.py"], "/__init__.py": ["/GN.py"]} |
76,859 | ahcode/UCOSRA-API-python | refs/heads/master | /config.py | # -*- coding: utf-8 -*-
ConsultaReservasAsignaturaFormUrl = "https://www.uco.es/sra/index.php?go=sra/r2000430/r2000430.html"
ConsultaReservasAsignaturaPostUrl = "https://www.uco.es/sra/index.php?go=sra/r2000430/action/r2000430_00.php"
| {"/webscraping/asignatura.py": ["/config.py"], "/server.py": ["/webscraping/asignatura.py"]} |
76,860 | ahcode/UCOSRA-API-python | refs/heads/master | /webscraping/asignatura.py | # -*- coding: utf-8 -*-
import requests
from bs4 import BeautifulSoup
from config import ConsultaReservasAsignaturaFormUrl, ConsultaReservasAsignaturaPostUrl
def lista_titulaciones():
page = requests.get(ConsultaReservasAsignaturaFormUrl)
soup = BeautifulSoup(page.content, 'html.parser')
titulaciones = list(soup.find('select', {'class':'listaG', 'name':'tpp1'}).find_all('option'))[1:]
titulaciones = [(item['value'], item.get_text().split(' ', 2)[2]) for item in titulaciones]
return titulaciones
def lista_asignaturas(titulacion):
page = requests.get(ConsultaReservasAsignaturaFormUrl, params={'tTit':titulacion, 'tAsig':'---'})
soup = BeautifulSoup(page.content, 'html.parser')
asignaturas = list(soup.find('select', {'class':'listaG', 'name':'ttp2'}).find_all('option'))[1:]
asignaturas = [(item['value'].split(' ', 1)[0], item.get_text().split(' ', 3)[3]) for item in asignaturas]
return asignaturas
def lista_reservas_asignatura(titulacion, asignatura, grupo, fechaini, fechafin):
page = requests.post(ConsultaReservasAsignaturaPostUrl, data={'tTit':titulacion, 'tAsig':asignatura, 'cGrupo':grupo, 'calendarDate1':fechaini, 'calendarDate2':fechafin})
soup = BeautifulSoup(page.content, 'html.parser')
reservas_table = list(soup.find('table', {'class':'tablaDatos'}).find_all('tr'))[1:]
reservas = []
for item in reservas_table:
item = list(item.find_all('td'))
fecha = item[0].next.get_text()
[timeIni, timeFin] = item[4].get_text().split('-')
aula = item[5].next.get_text().split(' ', 1)
aulaCode = aula[0]
detallesAula = aula[1]
profesor = item[6].get_text()
grupo = item[8].get_text()
reservas.append({'fecha':fecha, 'horaIni':timeIni, 'horaFin':timeFin, 'codigo-aula':aulaCode, 'aula':detallesAula, 'profesor':profesor, 'grupo':grupo})
return reservas | {"/webscraping/asignatura.py": ["/config.py"], "/server.py": ["/webscraping/asignatura.py"]} |
76,861 | ahcode/UCOSRA-API-python | refs/heads/master | /server.py | # -*- coding: utf-8 -*-
from flask import Flask, request
from flask_restful import Resource, Api
from webscraping.asignatura import lista_titulaciones, lista_asignaturas, lista_reservas_asignatura
from datetime import datetime
app = Flask(__name__)
api = Api(app)
class Titulaciones(Resource):
def get(self):
return lista_titulaciones()
class Asignaturas(Resource):
def get(self):
args = request.args
if 'titulacion' not in args:
return "Error"
return lista_asignaturas(args['titulacion'])
class ReservasAsignatura(Resource):
def get(self):
args = request.args
if('titulacion' not in args and 'asignatura' not in args):
return "Error"
else:
if 'grupo' in args: grupo = args['grupo']
else: grupo = "T"
if 'fechaini' in args: fechaini = args['fechaini']
else: fechaini = datetime.now().strftime("%d-%m-%Y")
if 'fechafin' in args: fechafin = args['fechafin']
else: fechafin = datetime.now().strftime("%d-%m-%Y")
return lista_reservas_asignatura(args['titulacion'], args['asignatura'], grupo, fechaini, fechafin)
api.add_resource(Titulaciones, '/titulaciones')
api.add_resource(Asignaturas, '/asignaturas')
api.add_resource(ReservasAsignatura, '/reservasasignatura')
if __name__ == '__main__':
app.run(port='5002') | {"/webscraping/asignatura.py": ["/config.py"], "/server.py": ["/webscraping/asignatura.py"]} |
76,863 | JiwonDev/2017-02-pygame-RPG | refs/heads/main | /ComponentBasic.py | # -*- coding:utf-8 -*-
'''
Created on 2016. 11. 29.
#-*-coding:utf-8-*-
@author: Jiwon
'''
import pygame as pg
class Component(pg.sprite.Sprite):
'''
classdocs
컴포넌트 기본틀(인터페이스)
'''
_comType = None # 컴포넌트 타입(부모)
_comName = None # 이름
_comList = []
def __init__(self, comType, name):
pg.sprite.Sprite.__init__(self)
self._comType = comType
self._comName = name
def addComponent(self, components): # 컴퍼넌트 추가
if (isinstance(components, list)):
for com in components:
self._addOne(com)
else:
self._addOne(components)
def _addOne(self, component):
assert (component.getType() != self._comType),\
str(self) + " _ " + str(component) + " 자기자신과 같은타입은 컴포넌트로 등록 할 수없습니다."
for com in self._comList:
assert (str(component) != str(com)),\
str(self) + " _ " + str(component) + "는 이미 존재하는 컴포넌트입니다."
self._comList.append(component)
def removeComponent(self, component): # 컴퍼넌트 삭제
deleteCheck = False
for i in range(0, len(self._comList)):
if (str(component) == str(self._comList[i])):
deleteCheck = True
del self._comList[i]
break;
if (deleteCheck == False):
print(str(self) + " _ " + str(component) + "는 존재하지 않아 삭제불가능합니다.")
def getList(self):
return self._comList
def getType(self):
return self._comType
def getName(self):
return self._comName
def __str__(self):
return "(" + self._comType + "):" + self._comName
| {"/Main.py": ["/Data.py", "/EventChecker.py"], "/EventChecker.py": ["/Data.py"]} |
76,864 | JiwonDev/2017-02-pygame-RPG | refs/heads/main | /Main.py | # -*- coding:utf-8 -*-
'''
Created on 2016. 12. 2.
@author: Jiwon
'''
# state 충돌할때 바뀌는걸로 (중력도) 고치기
# 공격, 슬라이드, 투척 모션
# 표창 컴퍼넌트 만들기
# 적만들기
# 맵만들기
import os, time
import sys
from pygame.locals import *
import Component.Block
import Component.Player_Ninja
import Data
import EventChecker
import Map.Level
import pygame as pg
def write(location, message, size=50, color=Data.Color.black):
font = pg.font.Font(Data.Resource.font, size)
font = font.render(message, True, color)
rect = font.get_rect()
rect.x = location[0]
rect.y = location[1]
return font, rect
def showLoading(display):
# 로딩화면 재생
loadimage = pg.image.load(Data.Resource.load)
loadrect = loadimage.get_rect()
loadrect.x += 200
loadrect.y += 220
display.fill(Data.Color.white)
display.blit(loadimage, loadrect)
pg.display.update()
def runGame():
## 0. pygame 초기화 ##
display = pg.display.set_mode(Data.window, HWSURFACE | DOUBLEBUF) # display 객체
pg.display.set_caption("I DON'T AVOID - 2015642028 김지원")
input = EventChecker.InputEvent() # 이벤트 객체
clock = pg.time.Clock() # fps객체
## 1. player, block, map,기타등등 초기화 ##
player = Component.Player_Ninja.Player_Ninja("player1", showChoice(input, display)) # 플레이어 생성
pg.mixer.music.load(Data.Resource.bgm)
# 로딩화면 재생
showLoading(display)
smokeimage = pg.image.load(Data.Resource.smoke).convert_alpha()
smokerect = smokeimage.get_rect()
winImage = pg.image.load(Data.Resource.win).convert_alpha()
block_group = Component.Block.BlockGroup("level1") # 블럭그룹 생성
map = Map.Level.Level1(block_group) # 맵 생성(블럭그룹 초기화)
background = map.getBackground()
selImage = [] # 선택창
selImage.append(pg.image.load(Data.Resource.select1).convert_alpha())
selImage.append(pg.image.load(Data.Resource.select2).convert_alpha())
selImage.append(pg.image.load(Data.Resource.select3).convert_alpha())
## 2. sprite group으로 묶음 ##
blockList = block_group.getList() # 블럭들
character_layer = pg.sprite.RenderPlain(*[player])
block_layer = pg.sprite.RenderPlain(*blockList)
# 배경위치 설정(카메라)
background_start_x = -(background.get_size()[0] / 3)
backgroundRect = [background_start_x, 0]
display.blit(background, backgroundRect) # 배경그리기
player.updateCollide(block_layer)
smokeStart = False
smokeHigh = 0
pg.mixer.music.play(-1, 0)
time
deadTime = 0
## 1. 게임 메인 루프 ##
while (True):
input.check() # 이벤트 확인
# 죽었다면 리스폰, 연기 초기화
if (player.state.getName() == Data.Action.dead
and input.getTime() - deadTime > 5000): # 죽으면 개념세이브?
player.setPlayerLocation(400, 400)
player.state.setState(Data.Action.idle)
player.state.setFrame(Data.Action.idle)
pg.mixer.music.rewind()
smokeHigh = 0
smokeStart = False
deadTime = 0
if (input.isExit() or input.getKey() == K_ESCAPE):
break;
# Clear
if (player.realLocation[0] < -1700):
rect = winImage.get_rect()
rect.x += 100
rect.y += 100
display.blit(winImage, rect)
pg.display.update()
time.sleep(3)
break;
# 카메라 업데이트
cam_x, cam_y = updateCamera(player, blockList, block_layer)
# 움직이는 배경그리기
backgroundRect[0] -= cam_x / 10
# backgroundRect[1] -= cam_y/100 #그림이 작아서 못움직임
display.blit(background, backgroundRect)
# 업데이트
player.updateCollide(block_layer)
character_layer.update(input)
# 나머지 그리기
block_layer.draw(display)
character_layer.draw(display)
# 일정 높이 이하로 떨어지면 연기가 움직이기 시작(닿이면 죽어요..!)
if (player.realLocation[1] < -6500 and smokeStart == False):
smokeStart = True
# 연기가 시작했는지 확인
if (smokeStart):
smokerect, smokeHigh = updateSmoke(player, smokeimage, smokerect, smokeHigh)
display.blit(smokeimage, smokerect)
display.blit(smokeimage, (smokerect.x - 300, smokerect.y))
display.blit(smokeimage, (smokerect.x + 400, smokerect.y))
# 연기가 플레이어 높이-50보다 높다면 사망
if (smokerect.y < player.location[1] - 50):
player.state.dead()
font, rect = write((10, 200), "아래에서 나오는 연기는 위험합니다. 빠르게 올라가세요!", 40)
display.blit(font, rect)
if (deadTime == 0):
deadTime = input.getTime()
# 전체 화면 및 FPS설정
pg.display.update()
clock.tick(Data.FPS)
def showChoice(input, display):
sel = []
sel.append(pg.image.load(Data.Resource.select1))
sel.append(pg.image.load(Data.Resource.select2))
sel.append(pg.image.load(Data.Resource.select3))
number = 0
clock = pg.time.Clock()
display.fill(Data.Color.white)
while (True):
input.check()
key = input.getKey()
if (input.isExit() or key == K_ESCAPE):
pg.quit()
sys.exit()
if (key == K_LEFT):
number = 1
elif (key == K_RIGHT):
number = 2
elif (key == K_RETURN):
break;
rect = sel[number].get_rect()
rect.x += -70
rect.y += 10
display.blit(sel[number], rect)
pg.display.update()
clock.tick(30)
if (number == 1):
return True
else:
return False
def updateSmoke(player, smokeimage, smokerect, up):
cam_y = 0
new = [0, 0] # 바뀔위치
if (player.location[1] < Data.ground_y[0]):
cam_y = Data.ground_y[0] - player.location[1]
player.realLocation[1] += int(cam_y)
player.location[1] = Data.ground_y[0]
elif (player.location[1] > Data.ground_y[1]):
cam_y = Data.ground_y[1] - player.location[1]
player.realLocation[1] += int(cam_y)
player.location[1] = Data.ground_y[1]
new[1] = (130 * 64) - (player._startLocation[1] - player.realLocation[1])
smokerect.y = new[1] + up
up -= 0.2
return smokerect, up
def updateCamera(player, blockList, blockLayer):
cam_x = 0
cam_y = 0
new = [0, 0] # 바뀔위치
# x카메라
if (player.location[0] < Data.ground_x[0]):
cam_x = Data.ground_x[0] - player.location[0]
player.realLocation[0] += int(cam_x)
player.location[0] = Data.ground_x[0]
elif (player.location[0] > Data.ground_x[1]):
cam_x = Data.ground_x[1] - player.location[0]
player.realLocation[0] += int(cam_x)
player.location[0] = Data.ground_x[1]
if (player.location[1] < Data.ground_y[0]):
cam_y = Data.ground_y[0] - player.location[1]
player.realLocation[1] += int(cam_y)
player.location[1] = Data.ground_y[0]
elif (player.location[1] > Data.ground_y[1]):
cam_y = Data.ground_y[1] - player.location[1]
player.realLocation[1] += int(cam_y)
player.location[1] = Data.ground_y[1]
# x축 블럭배치
oneTime = True
length = [0, 0]
for com in blockList:
new[0] = (com._location[1] * 64) - (player._startLocation[0] -\
player.realLocation[0]) + com._edit[0]
if (oneTime):
length[0] = new[0] - com.rect.x
oneTime = False
com.rect.x = new[0]
# x축 충돌해결
collideList = pg.sprite.spritecollide(player.realRect, blockLayer, False)
while (len(collideList) > 0): # 블럭과 충돌했다면
if (length[0] > 0): # 오른쪽으로 갔다면
player.pushPlayer(1, 0)
else: # 반대
player.pushPlayer(-1, 0)
collideList = pg.sprite.spritecollide(player.realRect, blockLayer, False)
# y축 블럭배치
oneTime = True
for com in blockList:
new[1] = (com._location[0] * 64) - (player._startLocation[1] -\
player.realLocation[1]) + com._edit[1]
if (oneTime):
length[1] = new[1] - com.rect.y
oneTime = False
com.rect.y = new[1]
# y축 충돌 해결
collideList = pg.sprite.spritecollide(player.realRect, blockLayer, False)
while (len(collideList) > 0): # 블럭과 충돌했다면
if (length[1] > 0): # 아래로갔다면
player.pushPlayer(0, 1)
else: # 반대
player.pushPlayer(0, -1)
collideList = pg.sprite.spritecollide(player.realRect, blockLayer, False)
return (int(cam_x), int(cam_y))
def main():
os.environ['SDL_VIDEO_WINDOW_POS'] = "%d,%d" % (100, 100) # 화면위치
pg.init() # pygame초기화
runGame()
pg.quit()
sys.exit()
if __name__ == '__main__':
main()
| {"/Main.py": ["/Data.py", "/EventChecker.py"], "/EventChecker.py": ["/Data.py"]} |
76,865 | JiwonDev/2017-02-pygame-RPG | refs/heads/main | /EventChecker.py | # -*- coding:utf-8 -*-
'''
Created on 2016. 11. 29.
@author: Jiwon
'''
import pygame as pg
from pygame import *
import Data as value
import time
class InputEvent(object):
'''
classdocs
사용자 입력확인 클래스
사용하기 전 pygame.init() 을 실행시켜주어야 합니다.
'''
K_ALT = K_LALT
K_CTRL = K_LCTRL
K_SHIFT = K_LSHIFT
_currentTime = None
# 키보드 정보
_currentKey = None
_functionKey = {K_SHIFT: False, K_ALT: False, K_CTRL: False}
_currentPress = None
# 마우스 정보
_mousePos = [0, 0]
_mouseClick = False
_mouseVisible = True
# 종료 이벤트 정보
_exitEvent = False
# 사용자 이벤트 정보
_userEvent = []
# 입력가능한 키 목록
valid_functionKey_list = set([K_LSHIFT, K_LCTRL, K_LALT])
valid_specialKey_list = set([K_INSERT, K_HOME, K_PAGEDOWN, K_PAGEUP, K_END, K_DELETE])
valid_key_list = set([K_ESCAPE, K_LEFT, K_RIGHT, K_UP, K_DOWN, K_a, K_s, K_d, K_f, K_SPACE, K_RETURN])
valid_key_list.update(valid_specialKey_list)
def __init__(self):
pass
def check(self):
self._currentTime = time.time() * 1000
for event in pg.event.get():
# 윈도우 종료 이벤트
if (event.type == QUIT):
self._exitEvent = True
break;
# 특수키 press 확인
pressKey = pg.key.get_pressed()
self._currentPress = pressKey
for fkey in self._functionKey.keys():
if (pressKey[fkey] == 0):
self._functionKey[fkey] = False
else:
self._functionKey[fkey] = True
# 일반키 상태 업데이트
if (event.type == KEYDOWN and (event.key in self.valid_key_list)):
self._currentKey = event.key
# 일반키 Press 해제
elif (event.type == KEYUP and event.key == self._currentKey):
self._currentKey = None
# 마우스 클릭 상태 업데이트
elif (event.type == MOUSEBUTTONUP):
self._mousePos = pg.mouse.get_pos()
self._mouseClick = True
# 사용자 이벤트
elif (event.type in value.Event.userEventList):
self._userEvent.append(event.type)
def setMouseVisible(self, bool=True):
# 마우스 드러내기/숨기기
self._mouseVisible = bool
pg.mouse.set_visible(bool)
def getKey(self):
# 현재 입력된 키
return self._currentKey
def getFuncKey(self):
# 현재 입력된 기능키
return self._functionKey
def getShift(self):
return self._functionKey[self.K_SHIFT]
def getAlt(self):
return self._functionKey[self.K_ALT]
def getCtrl(self):
return self._functionKey[self.K_CTRL]
def getPos(self):
# 현재 마우스 위치
self._mousePos = pg.mouse.get_pos()
return self._mousePos
def getClick(self):
click = self._mouseClick
self._mouseClick = False
# 현재 마우스 클릭상태
return click
def removeValidKey(self, key):
# 유효키 삭제하기
try:
self.valid_key_list.remove(key)
except:
print("InputEvent - (removeValidKey) 존재하지 않는 키를 삭제요청하였습니다.")
def getEvent(self): # 이벤트 리스트를 반환
event = self._userEvent
self._userEvent = []
return event
def getPress(self):
return self._currentPress
def addVaildKey(self, key):
# 유효키 추가하기
self.valid_key_list.add(key)
def getValidKeyList(self):
# 유효한 키 목록
return tuple(self.valid_key_list)
def getTime(self):
return self._currentTime
def isExit(self):
return self._exitEvent
| {"/Main.py": ["/Data.py", "/EventChecker.py"], "/EventChecker.py": ["/Data.py"]} |
76,866 | JiwonDev/2017-02-pygame-RPG | refs/heads/main | /Data.py | # -*- coding:utf-8 -*-
'''
Created on 2016. 11. 29.
@author: Jiwon
상수 데이터 파일
'''
import pygame
from pygame.locals import *
import os
# key
# 창
window_width = 1152
window_height = 864
window = [window_width, window_height]
# FPS
FPS = 30
# Key
showStatMessage = True
# Player basic state
# idle, *collide, dead, Immortal - collide는 모션중에 발생
# move, jump ,dash, fall, climb
# action, throw, attack, block, skill
# location[] , speed[] , speedRate[] , playerStat[]
# collideList[] , state , player(Component)
# ninjaImage - (58, 110)
player_HP = 100
player_SP = 100
player_moveSpeed = 8
player_runSpeed = 14
player_maxSpeed = 30
player_maxGravity = 20
player_jumpSpeed = -17 # 점프 높이
ground_x = [300, 750]
ground_y = [280, 600]
player_rect = [60, 110]
player_dashRect = [30, 110]
gravity = 1
# playerSize = (40,80)
west, left = "left", "left"
east, right = "right", "right"
north, up = "up", "up"
south, down = "down", "down"
overlap = "overlap"
class Key:
LEFT = K_LEFT
RIGHT = K_RIGHT
UP = K_UP
DOWN = K_DOWN
JUMP = K_s
DASH = K_d
SHIFT = K_LSHIFT
ATTACK = K_1
THROW = K_2
def removeDirSuffix(sDir, suffixList):
for suffix in suffixList:
if (str(sDir).endswith(suffix)):
sDir = sDir[0:len(sDir) - len(suffix)]
break
return sDir
class Action:
dead = "dead"
idle = "idle"
attack = "attack"
climb = "climb"
run = "run"
move = "move"
dash = "dash"
throw = "throw"
jump = "jump"
jump_attack = "jumpattack"
jump_throw = "jumpthrow"
skill = "skill"
collide = "collide"
fall = "fall"
action = "action"
attack = "attack"
block = "block"
Immortal = "Immortal"
class Block:
bridge = "bridge"
cloud = "cloud"
grass = "grass"
plant = "plant"
temple = "temple"
ninja_all_frame = [Action.dead, Action.idle, Action.attack, Action.climb, Action.move, Action.dash, Action.throw,
Action.jump, Action.jump_attack, Action.jump_throw]
block_all = [Block.bridge, Block.cloud, Block.grass, Block.plant, Block.temple]
class Resource:
startDir = removeDirSuffix(os.getcwd(), ["\\Component"])
resDir = startDir + "\\resources"
bgm = resDir + "\\sound\\music\\bgm.mp3"
select1 = resDir + "\\graphic\\select1.png"
select2 = resDir + "\\graphic\\select2.png"
select3 = resDir + "\\graphic\\select3.png"
win = resDir + "\\graphic\\win.png"
font = resDir + "\\font.otf"
load = resDir + "\\graphic\\loading.png"
smoke = resDir + "\\graphic\\smoke.png"
background = resDir + "\\graphic\\background.png"
ninja_boy = resDir + "\\graphic\\player\\ninja_boy\\"
ninja_girl = resDir + "\\graphic\\player\\ninja_girl\\"
ninja_imgName = {}
ninja_imgCount = {}
ninja_imgType = {}
ninja_imgCount[Action.dead] = 3
ninja_imgName[Action.dead] = "Dead__"
ninja_imgName[Action.idle] = "Idle__"
ninja_imgName[Action.attack] = "Attack__"
ninja_imgName[Action.climb] = "Climb__"
ninja_imgName[Action.move] = "Run__"
ninja_imgName[Action.dash] = "Slide__"
ninja_imgName[Action.throw] = "Throw__"
ninja_imgName[Action.jump] = "Jump__"
ninja_imgName[Action.jump_attack] = "Jump_Attack__"
ninja_imgName[Action.jump_throw] = "Jump_Throw__"
for frame in ninja_all_frame:
ninja_imgType[frame] = ".png"
if (frame == Action.climb):
ninja_imgCount[frame] = 2
else:
ninja_imgCount[frame] = 10
block_imgName = {}
block_imgDir = {}
block_imgType = {}
block_imgDir[Block.grass] = resDir + "\\graphic\\block\\grass\\"
block_imgDir[Block.bridge] = resDir + "\\graphic\\block\\bridge\\"
block_imgDir[Block.cloud] = resDir + "\\graphic\\block\\cloud\\"
block_imgDir[Block.plant] = resDir + "\\graphic\\block\\plant\\"
block_imgDir[Block.temple] = resDir + "\\graphic\\block\\temple\\"
block_imgName[Block.bridge] = ["0", "1", "2", "3", "4", "5", "6", "7", "8", "9"]
block_imgName[Block.cloud] = ["0", "1", "2", "3", "4"]
block_imgName[Block.grass] = ["0", "1", "2", "3", "4", "5", "6", "7", ]
block_imgName[Block.plant] = ["0", "1", "2"]
block_imgName[Block.temple] = ["0", "1", "2", "3", "4", "5", "6"]
for type in block_all:
block_imgType[type] = ".png"
class Event:
userEventList = []
resetPlayer = USEREVENT + 1
userEventList.append(resetPlayer)
class Color:
gameSky = (0, 87, 184)
white = (255, 255, 255)
black = (0, 0, 0)
red = (255, 0, 0)
green = (0, 128, 0)
blue = (0, 0, 255)
yellow = (255, 255, 0)
cyan = (0, 255, 255)
orange = (255, 165, 0)
gold = (255, 215, 0)
skyblue = (135, 206, 234)
pink = (255, 105, 180)
gray = (128, 128, 128)
class Type:
character = "characterType"
block = "blockType"
blockGroup = "blockGroupType"
| {"/Main.py": ["/Data.py", "/EventChecker.py"], "/EventChecker.py": ["/Data.py"]} |
76,867 | chance-murphy/pineapple-flask-restapi | refs/heads/master | /app.py | from flask import Flask, jsonify
from flask_cors import CORS
from flask_restful import Resource, Api
# from flask_jwt import JWT, jwt_required, JWTError
from endpoints.user import User, UserList, UserRegister
from endpoints.inventory import Inventory, InventoryProductList, Shopping
from endpoints.purchase_history import PurchaseHistory
# from db.database import create_database
# Create the application instance
app = Flask(__name__)
cors = CORS(app, resources={r"/*": {"origins": "*"}})
api = Api(app)
# Create a URL route in our application for "/"
@app.route('/')
def home():
"""
This function just responds to the browser ULR
localhost:5000/
:return: the rendered template 'home.html'
"""
return "Pineapples's Flask API"
api.add_resource(User, '/users/<string:name>')
api.add_resource(UserList, '/users')
api.add_resource(UserRegister, '/register')
api.add_resource(Inventory, '/product/<string:product>')
api.add_resource(InventoryProductList, '/products')
api.add_resource(PurchaseHistory, '/history/<string:name>')
api.add_resource(Shopping, '/shopping')
if __name__ == '__main__':
app.run(debug=True) | {"/app.py": ["/endpoints/user.py", "/endpoints/purchase_history.py"], "/endpoints/user.py": ["/models/user.py"]} |
76,868 | chance-murphy/pineapple-flask-restapi | refs/heads/master | /models/user.py | import sqlite3
class UserModel:
def __init__(self, id, username, password, address, sex, shoe_size, shirt_size,pant_size_waist, pant_size_length):
self.id = id
self.username = username
self.password = password
self.address = address
self.sex = sex
self.shoe_size = shoe_size
self.shirt_size = shirt_size
self.pant_size_waist = pant_size_waist
self.pant_size_length = pant_size_length
@classmethod
def find_by_name(cls, name, db_path='./db/pineapplestore.db'):
connection = sqlite3.connect(db_path)
cursor = connection.cursor()
query = 'SELECT * FROM user WHERE username=?;'
result = cursor.execute(query, (name,))
rows = result.fetchall()
if rows:
for row in rows:
user = UserModel(row[0], row[1], row[2],row[3], row[4], row[5], row[6], row[7], row[8])
connection.close()
return user
@classmethod
def find_by_id(cls, id, db_path='./db/pineapplestore.db'):
connection = sqlite3.connect(db_path)
cursor = connection.cursor()
query = 'SELECT * FROM user WHERE id=?'
result = cursor.execute(query, (id,))
rows = result.fetchall()
if rows:
for row in rows:
user = UserModel(row[0], row[1], row[2],row[3], row[4], row[5], row[6], row[7], row[8])
connection.close()
return user
@classmethod
def insert_into_table(cls, username, password, db_path='./db/pineapplestore.db'):
connection = sqlite3.connect(db_path)
cursor = connection.cursor()
query = 'INSERT INTO user VALUES(NULL, ?, ?,NULL,NULL,NULL,NULL,NULL,NULL)'
cursor.execute(query, (username, password))
connection.commit()
connection.close()
@classmethod
def find_all(cls, db_path='./db/pineapplestore.db'):
users = list()
connection = sqlite3.connect(db_path)
cursor = connection.cursor()
query = 'SELECT * FROM user;'
result = cursor.execute(query)
rows = result.fetchall()
if rows:
for row in rows:
users.append(UserModel(row[0], row[1], row[2],row[3], row[4], row[5], row[6], row[7], row[8]))
return users
connection.close()
@classmethod
def delete_user(self, name, db_path='./db/pineapplestore.db'):
connection = sqlite3.connect(db_path)
cursor = connection.cursor()
user_id_query_for_purchase_his = 'SELECT id FROM user WHERE username=?;'
user_id = cursor.execute(user_id_query_for_purchase_his, (name,))
result_user_id = str(user_id.fetchone()[0])
purchase_history_deletion = 'DELETE FROM purchase_history WHERE user_id=?;'
delete_user_history = cursor.execute(purchase_history_deletion, (result_user_id))
user_to_delete = 'DELETE FROM user WHERE username=?;'
delete_user = cursor.execute(user_to_delete, (name,))
connection.commit()
connection.close()
def json(self):
return {
'id': self.id,
'username': self.username,
'address': self.address,
'sex': self.sex,
'shoe_size': self.shoe_size,
'shirt_size': self.shirt_size,
'pant_size_waist': self.pant_size_waist,
'pant_size_length': self.pant_size_length
# 'password': self.password
} | {"/app.py": ["/endpoints/user.py", "/endpoints/purchase_history.py"], "/endpoints/user.py": ["/models/user.py"]} |
76,869 | chance-murphy/pineapple-flask-restapi | refs/heads/master | /endpoints/purchase_history.py | from models.purchase_history import PurchaseHistoryModel
from flask_restful import Resource, reqparse
class PurchaseHistory(Resource):
def get(self, name):
list_of_products = PurchaseHistoryModel.find_products_related_with_user_name(name)
if list_of_products:
return {
'product_history': [product.json() for product in list_of_products]
}, 200
else:
return {
'message': 'User and related prducts not found in database!'
}, 404
def post(self, name, product):
pass | {"/app.py": ["/endpoints/user.py", "/endpoints/purchase_history.py"], "/endpoints/user.py": ["/models/user.py"]} |
76,870 | chance-murphy/pineapple-flask-restapi | refs/heads/master | /endpoints/user.py | from models.user import UserModel
from flask_restful import Resource, reqparse
from flask_jwt import jwt_required
class User(Resource):
@jwt_required()
def get(self, name):
users = UserModel.find_by_name(name)
if users:
return {'user': users.json()}, 200
return {'message': 'User not found!'}, 404
def delete(self, name):
user_to_delete = UserModel.delete_user(name)
return {'message': 'User {0} was successfully deleted from database!'.format(name)}
class UserList(Resource):
def get(self):
users = UserModel.find_all()
if users:
return {'users': [user.json() for user in users]}, 200
return {'message': 'No users found!'}, 404
class UserRegister(Resource):
def post(self):
parser = reqparse.RequestParser()
parser.add_argument('username',
type=str,
required=True,
help='This field is required!')
parser.add_argument('password',
type=str,
required=True,
help='This field is required!')
data_payload = parser.parse_args()
if UserModel.find_by_name(data_payload['username']):
return {'message': 'User with the same name already exists in database!'}, 400
else:
UserModel.insert_into_table(data_payload['username'],
data_payload['password'])
return {'message': 'User successfully added to the database!'}, 201 | {"/app.py": ["/endpoints/user.py", "/endpoints/purchase_history.py"], "/endpoints/user.py": ["/models/user.py"]} |
76,871 | chance-murphy/pineapple-flask-restapi | refs/heads/master | /db/database.py | import sqlite3
import csv
import sys
connection = sqlite3.connect("./db/pineapplestore.db")
cursor = connection.cursor()
create_user_table = '{}{}{}'.format(
'CREATE TABLE IF NOT EXISTS',
' user(id INTEGER PRIMARY KEY,',
' username text NOT NULL, password text NOT NULL, address text, sex text, shoe_size FLOAT, pant_size_waist INTEGER, pant_size_length INTEGER, shirt_size TEXT);'
)
cursor.execute(create_user_table)
create_history_table ='{}{}{}{}{}{}'.format(
'CREATE TABLE IF NOT EXISTS',
' purchase_history(id INTEGER PRIMARY KEY,',
' product text, user_id INTEGER NOT NULL,',
' product_id INTEGER NOT NULL,',
' FOREIGN KEY (user_id) REFERENCES user(id),',
' FOREIGN KEY (product_id) REFERENCES inventory(id));'
)
cursor.execute(create_history_table)
create_inventory_table = '{}{}{}{}'.format(
'CREATE TABLE IF NOT EXISTS',
' inventory(id INTEGER PRIMARY KEY, sku text, upc INTEGER,',
' rando text, product text, description text, price FLOAT,',
' size text, color text, amt INTEGER);'
)
cursor.execute(create_inventory_table)
cursor.execute('INSERT OR REPLACE INTO user VALUES(1, "hope_tambala", "qwert", "Ann Arbor", "Male", "12", "30","30","XL");')
cursor.execute('INSERT OR REPLACE INTO user VALUES(2, "chance_murphy", "qwaszx", "Ann Arbor", "Male", "12", "30","30","XL");')
cursor.execute('INSERT OR REPLACE INTO user VALUES(3, "jalin_parker", "zxasqw", "Ann Arbor", "Male", "12", "30","30","XL");')
cursor.execute('INSERT OR REPLACE INTO user VALUES(4, "kangning_chen", "asdfg", "Ann Arbor", "Male", "12", "30","30","XL");')
cursor.execute('INSERT OR REPLACE INTO user VALUES(5, "yunqi_qian", "qwerty", "Ann Arbor", "Male", "12", "30","30","XL");')
cursor.execute('INSERT OR REPLACE INTO user VALUES(6, "tayloir_thompson", "aqwerva", "Ann Arbor", "Male", "12", "30","30","XL");')
with open("./db/pineapple_inventory.csv", "rt") as f:
rows = csv.reader(f)
next(rows) # Skip the header row.
for row in rows:
query = "INSERT OR REPLACE INTO inventory VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"
cursor.execute(query, row)
cursor.execute('INSERT OR REPLACE INTO purchase_history VALUES(1, "tshirt", 1, 1);')
connection.commit()
connection.close()
print('Database successfully created and populated with data!') | {"/app.py": ["/endpoints/user.py", "/endpoints/purchase_history.py"], "/endpoints/user.py": ["/models/user.py"]} |
76,872 | TimmannaC/spark-pytest | refs/heads/master | /pytest-cases/conftest.py | import pytest
import findspark
findspark.init()
from pyspark.sql import SparkSession
def pytest_addoption(parser):
parser.addoption(
"--lwm", action="store", default="", help=" Please add the lower water mark"
)
parser.addoption(
"--hwm", action="store", default="", help=" Please add the higher water mark"
)
@pytest.fixture(scope="session")
def cmdopt(request):
water_mark = {}
water_mark['lwm'] = request.config.getoption("--lwm")
water_mark['hwm'] = request.config.getoption("--hwm")
return water_mark
@pytest.fixture(scope="session")
def spark_session(request):
return SparkSession.builder.appName("pytest").enableHiveSupport().getOrCreate()
@pytest.fixture(scope="session")
def db_conf():
db_prop = {'db': "default", 'tb': ["test_1", "test_2"]}
return db_prop | {"/driver.py": ["/spark_lib/reader.py"]} |
76,873 | TimmannaC/spark-pytest | refs/heads/master | /spark_lib/reader.py |
def hive_reader(spark, query):
return spark.sql(query)
| {"/driver.py": ["/spark_lib/reader.py"]} |
76,874 | TimmannaC/spark-pytest | refs/heads/master | /driver.py | from pyspark.sql import SparkSession
from spark_lib.reader import hive_reader
import json
# spark-submit --deploy-mode client --master yarn --py-files spark_lib.zip driver.py
if __name__ == "__main__":
spark = SparkSession.builder.appName("test").enableHiveSupport().getOrCreate()
# spark.sparkContext.addPyFile("spark_lib.zip")
with(open("conf.json")) as conf_json:
json_conf = json.loads(conf_json.read())
df_1 = hive_reader(spark, json_conf['step-1a'])
df_1.show()
df_2 = hive_reader(spark, json_conf['step-2a'])
df_2.show()
| {"/driver.py": ["/spark_lib/reader.py"]} |
76,875 | TimmannaC/spark-pytest | refs/heads/master | /pytest-cases/pytest-driver.py |
"""
to run the pytest with parameters.
pytest -s --lwm "2019-11-01 23:59:59" --hwm "2019-12-01 23:59:59" pytest-driver.py
"""
def test_build_sql_query(spark_session, db_conf, cmdopt):
print(spark_session.version)
print(db_conf)
print(cmdopt)
def test_table_count(spark_session, db_conf, cmdopt):
db_name = db_conf['db']
tables_list = db_conf['tb']
for table in tables_list:
hive_query = "SELECT *FROM " + table + " WHERE insert_date BETWEEN " + cmdopt['lwm'] + "AND" + cmdopt['hwm']
print(hive_query)
hive_count = spark_session.read.option("query", hive_query).load().count()
print("HIVE table count is : " + hive_count)
if __name__ == "__main__":
print("Inside main !!")
| {"/driver.py": ["/spark_lib/reader.py"]} |
76,877 | zionist/confgen | refs/heads/master | /confgen/const/const.py | __author__ = 'slaviann'
TEMPLATES = (
"ddos.list",
"domains.list",
"domains_ssl.list",
"suspend.list"
)
| {"/confgen/const/__init__.py": ["/confgen/const/nginx_conf.py", "/confgen/const/const.py"]} |
76,878 | zionist/confgen | refs/heads/master | /confgen/const/domains.py | __author__ = 'slaviann'
DOMAINS_TEMPLATE = """
"""
| {"/confgen/const/__init__.py": ["/confgen/const/nginx_conf.py", "/confgen/const/const.py"]} |
76,879 | zionist/confgen | refs/heads/master | /confgen/__init__.py | __author__ = 'slaviann'
| {"/confgen/const/__init__.py": ["/confgen/const/nginx_conf.py", "/confgen/const/const.py"]} |
76,880 | zionist/confgen | refs/heads/master | /confgen/const/nginx_conf.py | __author__ = 'slaviann'
NGINX_CONF_TEMPLATE = """
user slaviann;
worker_processes 4;
pid /run/nginx.pid;
events {
worker_connections 768;
# multi_accept on;
}
http {
##
# Basic Settings
##
sendfile on;
tcp_nopush on;
tcp_nodelay on;
keepalive_timeout 65;
types_hash_max_size 2048;
# server_tokens off;
# server_names_hash_bucket_size 64;
# server_name_in_redirect off;
include /etc/nginx/mime.types;
default_type application/octet-stream;
##
# SSL Settings
##
ssl_protocols TLSv1 TLSv1.1 TLSv1.2; # Dropping SSLv3, ref: POODLE
ssl_prefer_server_ciphers on;
##
# Logging Settings
##
access_log /var/log/nginx/access.log;
error_log /var/log/nginx/error.log;
# Virtual Host Configs
##
include /etc/nginx/conf.d/*.conf;
include /etc/nginx/sites-enabled/*;
}
"""
| {"/confgen/const/__init__.py": ["/confgen/const/nginx_conf.py", "/confgen/const/const.py"]} |
76,881 | zionist/confgen | refs/heads/master | /setup.py | from distutils.core import setup
from setuptools import setup, find_packages
setup( name='confgen',
version='0.1',
description='Simple nginx conf gen tool',
author='slaviann',
author_email='slaviann@gmail.com',
packages=find_packages(),
#install_requires=[
# 'dpkt-fix',
#],
scripts=['bin/confgen'],
)
| {"/confgen/const/__init__.py": ["/confgen/const/nginx_conf.py", "/confgen/const/const.py"]} |
76,882 | zionist/confgen | refs/heads/master | /confgen/const/__init__.py | __author__ = 'slaviann'
from confgen.const.nginx_conf import NGINX_CONF_TEMPLATE
from confgen.const.const import TEMPLATES
| {"/confgen/const/__init__.py": ["/confgen/const/nginx_conf.py", "/confgen/const/const.py"]} |
76,890 | jhugon/lariatPionAbs | refs/heads/master | /plotXsec.py | #!/usr/bin/env python
import ROOT as root
from helpers import *
root.gROOT.SetBatch(True)
if __name__ == "__main__":
cuts = ""
#cuts += "*( pWC > 100 && pWC < 1100 && (isMC || (firstTOF > 0 && firstTOF < 25)))" # pions
cuts += "*( pWC > 450 && pWC < 1100 && (isMC || (firstTOF > 28 && firstTOF < 55)))" # protons
cuts += "*(nTracksInFirstZ[2] >= 1 && nTracksInFirstZ[14] < 4 && nTracksLengthLt[5] < 3)" # tpc tracks
cuts = "*(iBestMatch >= 0 && nMatchedTracks == 1)" # matching in analyzer
###
###
secTrkCuts = "*(trackStartDistToPrimTrkEnd < 2. || trackEndDistToPrimTrkEnd < 2.)"
#weightStr = "pzWeight"+cuts
weightStr = "1"+cuts
nData = 30860.0
logy = True
c = root.TCanvas()
NMAX=10000000000
#NMAX=100
fileConfigs = [
{
#'fn': "piAbs_data_Pos_RunI_v03.root",
#'addFriend': ["friend", "friendTree_Pos_RunI_v03.root"],
'fn': "test_data_Pos_RunI_piAbsSelector.root",
'name': "RunI_Pos",
'title': "Run I Pos. Polarity",
'caption': "Run I Pos. Polarity",
'color': root.kBlack,
'isData': True,
},
{
#'fn': "piAbs_data_Pos_RunII_v03.root",
#'addFriend': ["friend", "friendTree_Pos_RunII_v03.root"],
'fn': "test_data_Pos_RunII_piAbsSelector.root",
'name': "RunII_Pos",
'title': "Run II Pos. Polarity",
'caption': "Run II Pos. Polarity",
'color': root.kGray+1,
'isData': True,
},
{
#'fn': "piAbs_pip_v5.root",
#'addFriend': ["friend", "friendTree_pip_v5.root"],
'fn': "test_pip_piAbsSelector.root",
'name': "pip",
'title': "#pi^{+} MC",
'caption': "#pi^{+} MC",
'color': root.kBlue-7,
#'scaleFactor': 1./35250*nData*0.428/(1.-0.086), #No Cuts
'scaleFactor': 1./35250*nData*0.428/(1.-0.086)*0.70, # pion/tpc tracks cuts
},
{
#'fn': "piAbs_p_v5.root",
#'addFriend': ["friend", "friendTree_p_v5.root"],
'fn': "test_p_piAbsSelector.root",
'name': "p",
'title': "proton MC",
'caption': "proton MC",
'color': root.kRed-4,
'scaleFactor': 1./35200*nData*0.162/(1.-0.086), #No Cuts
},
{
#'fn': "piAbs_ep_v5.root",
#'addFriend': ["friend", "friendTree_ep_v5.root"],
'fn': "test_ep_piAbsSelector.root",
'name': "ep",
'title': "e^{+} MC",
'caption': "e^{+} MC",
'color': root.kGreen,
#'scaleFactor': 1./35700*nData*0.301/(1.-0.086), #No Cuts
'scaleFactor': 1./35700*nData*0.301/(1.-0.086)*0.70, # pion/tpc tracks cuts
},
{
#'fn': "piAbs_mup_v5.root",
#'addFriend': ["friend", "friendTree_mup_v5.root"],
'fn': "test_mup_piAbsSelector.root",
'name': "mup",
'title': "#mu^{+} MC",
'caption': "#mu^{+} MC",
'color': root.kMagenta-4,
#'scaleFactor': 1./35200*nData*0.021/(1.-0.086), #No Cuts
'scaleFactor': 1./35200*nData*0.021/(1.-0.086)*0.70, # pion/tpc tracks cuts
},
#{
# 'fn': "piAbs_kp_v5.root",
# 'addFriend': ["friend", "friendTree_kp_v5.root"],
# #'fn': "test_kp_piAbsSelector.root",
# 'name': "kp",
# 'title': "K^{+} MC",
# 'caption': "K^{+} MC",
# 'color': root.kOrange-3,
# 'scaleFactor': 1./35700*nData*0.00057/(1.-0.086), #No Cuts
#},
#{
# #'fn': "/pnfs/lariat/scratch/users/jhugon/v06_15_00/piAbsSelector/lariat_PiAbsAndChEx_flat_gam_v4/anahist.root",
# #'addFriend': ["friend", "friendTree_gam_v4.root"],
# 'fn': "test_gam_piAbsSelector.root",
# 'name': "gam",
# 'title': "#gamma MC",
# 'caption': "#gamma MC",
# 'color': root.kOrange-3,
# 'scaleFactor': 2953., #AllWeightsCuts Proton
#},
]
histConfigs = [
{
'name': "Incident",
'title': "Incident",
'xtitle': "Reco Kinetic Energy [MeV]",
'ytitle': "Track Hits / MeV",
'binning': [100,0,1000],
'var': "primTrkKins",
'cuts': weightStr+cuts+"*primTrkInFids",
'normToBinWidth': True,
'logy': logy,
},
{
'name': "Interacting",
'title': "Interacting",
'xtitle': "Reco Kinetic Energy [MeV]",
'ytitle': "Track Hits / MeV",
'binning': [100,0,1000],
'var': "primTrkKinInteract",
'cuts': weightStr+cuts,
'normToBinWidth': True,
'logy': logy,
'color': root.kBlue,
},
]
plotManyHistsOnePlot(fileConfigs,histConfigs,c,"PiAbsSelector/tree",nMax=NMAX,outPrefix="RecoKin_")
kinHists = plotOneHistOnePlot(fileConfigs,histConfigs,c,"PiAbsSelector/tree",nMax=NMAX,outPrefix="XsecPlot_",writeImages=False)
print(kinHists)
for fileConfig in fileConfigs:
fileName = fileConfig["name"]
incident = kinHists['Incident'][fileName]
interacting = kinHists['Interacting'][fileName]
rebin = 5
interacting.Rebin(rebin)
incident.Rebin(rebin)
interacting.Scale(1./rebin)
incident.Scale(1./rebin)
xsec = interacting.Clone(interacting.GetName()+"xsec")
xsec.Divide(incident)
#
density = 1.3954 # g / cm3
molardensity = 39.948 #g / mol
avagadro = 6.022140857e23
numberdensity = density * avagadro / molardensity # particles / cm3
sliceThickness = 0.4/math.sin(60.*math.pi/180.) # cm
scaleFactorcm = 1./(numberdensity*sliceThickness) # cm2 / particles
scaleFactorBarn = 1e24 * scaleFactorcm # barn / particles
#
xsec.Scale(scaleFactorBarn)
xsec.GetXaxis().SetTitle("Reco Kinetic Energy [MeV]")
xsec.GetYaxis().SetTitle("Total Cross Section [barn]")
xsec.GetXaxis().SetRangeUser(50,1000)
xsec.GetYaxis().SetRangeUser(0,3.5)
xsec.Draw()
#c.SetLogy(True)
drawStandardCaptions(c,"Super-preliminary",captionright1="#pi^{+} MC")
c.SaveAs("xsec_MC_{}.png".format(fileName))
c.SaveAs("xsec_MC_{}.pdf".format(fileName))
| {"/slicesIso.py": ["/fitCosmicHalo.py"], "/plotCosmics.py": ["/lookAtMonicaLifetime.py"]} |
76,891 | jhugon/lariatPionAbs | refs/heads/master | /tofRooFit.py | #!/usr/bin/env python2
import ROOT as root
from ROOT import gStyle as gStyle
root.gROOT.SetBatch(True)
from helpers import *
def fitMass2(c,do_toy_data=False,plot_components=False,binned=True):
workspace = root.RooWorkspace("w")
mass = root.RooRealVar("mass","Mass [MeV]",0,2000.)
mass2 = root.RooRealVar("mass2","Mass Squared [MeV^{2}]",-2e5,3e6)
true_p = root.RooRealVar("reco_momo","True Momentum [MeV]",500,0,1500.)
reco_tof = root.RooRealVar("reco_tof","",0,100.)
observables = root.RooArgSet(mass2)
mass.setBins(20)
mass2.setBins(20)
true_p.setBins(30) # speeds up data-avaraging projection
data = None
Ndata = 1.
if not do_toy_data:
infile = root.TFile("momentumTest.root")
intree = infile.Get("lowlevel/Mass Tree");
intree.Draw("reco_tof:reco_momo >> tofVmomo(150,0,1500,90,0,90)","","colz")
c.SaveAs("TOF_tofVmomo.pdf")
c.SaveAs("TOF_tofVmomo.png")
intree.Draw("mass:reco_momo >> massVmomo(150,0,1500,150,0,1500)","reco_tof < 75.","colz")
c.SaveAs("TOF_massVmomo.pdf")
c.SaveAs("TOF_massVmomo.png")
intree.Draw("mass2:reco_momo >> mass2Vmomo(150,0,1500,200,-2e3,3e6)","","colz")
c.SaveAs("TOF_mass2Vmomo.pdf")
c.SaveAs("TOF_mass2Vmomo.png")
intree.Draw("mass:reco_tof >> massVtof(100,0,100,150,0,1500)","reco_momo > 200","colz")
c.SaveAs("TOF_massVtof.pdf")
c.SaveAs("TOF_massVtof.png")
data = root.RooDataSet("data","data",root.RooArgSet(mass2,mass,true_p,reco_tof),root.RooFit.Import(intree))
data = data.reduce("reco_momo >200 && reco_tof < 75.")
#data = data.reduce("reco_momo >600 && reco_momo < 605")
#data = data.reduce("reco_momo >400 && reco_momo < 450")
#data = data.reduce("reco_momo >700 && reco_momo < 605")
Ndata = data.sumEntries()
if binned:
data = data.binnedClone()
d = root.RooRealVar("d","Distance",6.683)
sigma_p = root.RooRealVar("sigma_p","",50.)
sigma_dt = root.RooRealVar("sigma_dt","",0.5)
shift_dt2 = root.RooRealVar("shift_dt2","Shift of #Delta t Squared [ns^{2}]",74,-100,200)
coef1_p = root.RooRealVar("coef1_p","1st Polynomial Coefficient for Momentum",1.120,1.,1.50)
# Derived parameters
d2 = root.RooFormulaVar("d2","Distance^{2} [m^{2}]","pow(@0,2)",root.RooArgList(d))
true_p2 = root.RooFormulaVar("true_p2","True Momentum Squared [MeV^{2}]",
"pow(@0,2)",
root.RooArgList(true_p))
measured_p = root.RooFormulaVar("measured_p","Measured Momentum [MeV]",
"@0*@1",
root.RooArgList(true_p,coef1_p))
measured_p2 = root.RooFormulaVar("measured_p2","Measured Momentum Squared [MeV^{2}]",
"pow(@0,2)",
root.RooArgList(measured_p))
# Make true momentum distribution
true_p_norm1 = root.RooRealVar("true_p_norm1","",0.02)
true_p_mean1 = root.RooRealVar("true_p_mean1","",100.)
true_p_sigma1 = root.RooRealVar("true_p_sigma1","",20.)
true_p_norm2 = root.RooRealVar("true_p_norm2","",0.02)
true_p_mean2 = root.RooRealVar("true_p_mean2","",380.)
true_p_sigma2 = root.RooRealVar("true_p_sigma2","",40.)
true_p_norm3 = root.RooRealVar("true_p_norm3","",1.)
true_p_mean3 = root.RooRealVar("true_p_mean3","",660.)
true_p_sigma3 = root.RooRealVar("true_p_sigma3","",150.)
true_p_gaus1 = root.RooGaussian("true_p_gaus1","True Momentum Gaus 1",true_p,true_p_mean1,true_p_sigma1)
true_p_gaus2 = root.RooGaussian("true_p_gaus2","True Momentum Gaus 2",true_p,true_p_mean2,true_p_sigma2)
true_p_gaus3 = root.RooGaussian("true_p_gaus3","True Momentum Gaus 3",true_p,true_p_mean3,true_p_sigma3)
true_p_distribution = root.RooAddPdf("true_p_distribution","True Momentum Distribution",root.RooArgList(true_p_gaus1,true_p_gaus2,true_p_gaus3),root.RooArgList(true_p_norm1,true_p_norm2,true_p_norm3))
particleConfigs = [
#("electron","Electron",0.511,0.01*Ndata),
#("muon","Muon",105.658,0.03*Ndata),
("pion","Pion",139.57,0.03*Ndata),
("kaon","Kaon",493.677,0.007*Ndata),
("proton","Proton",938.27,0.8*Ndata),
#("Deuteron","Deuteron",1875.6,0.002*Ndata),
]
gaussians = []
gaussians2 = []
fractions = []
allVars = []
for particle_name, particle_title, particle_mass, particle_fraction in particleConfigs:
fraction = root.RooRealVar("N_"+particle_name,"Number of of "+particle_title,particle_fraction,0.,Ndata)
true_mass = root.RooRealVar("true_mass_"+particle_name,"True Mass of "+particle_title,particle_mass)
true_mass2 = root.RooFormulaVar("true_mass2_"+particle_name,"True Mass Squared [MeV^{2}]",
"pow(@0,2)",
root.RooArgList(true_mass))
true_dt2 = root.RooFormulaVar("true_dt2_"+particle_name,"True #Delta t Squared [ns^{2}]",
"@2/0.29979/0.29979*(1+@1/@0)",
root.RooArgList(true_p2,true_mass2,d2))
true_dt = root.RooFormulaVar("true_dt_"+particle_name,"True #Delta t [ns]",
"sqrt(@0)",
root.RooArgList(true_dt2))
variance_p = root.RooFormulaVar("variance_p_"+particle_name,"Variance of Momentum [MeV^{2}]",
"pow(@0,2)",
root.RooArgList(sigma_p))
variance_dt = root.RooFormulaVar("variance_dt_"+particle_name,"Variance of #Delta t [ns^{2}]",
"pow(@0,2)",
root.RooArgList(sigma_dt))
variance_mass = root.RooFormulaVar("variance_mass_"+particle_name,"Mass Variance [MeV^4]",
"@4/@2*pow((@0+@1)/@5,2) + @0/@1*@3",
root.RooArgList(true_mass2,true_p2,true_dt2,variance_p,variance_dt,true_mass))
sigma_mass = root.RooFormulaVar("sigma_mass_"+particle_name,"Mass Sigma [MeV^2]",
"sqrt(@0)",
root.RooArgList(variance_mass))
variance_mass2 = root.RooFormulaVar("variance_mass2_"+particle_name,"Mass Squared Variance [MeV^4]",
"4/@2*pow(@0+@1,2)*@5 + 4*pow(@0,2)/@1*@4",
root.RooArgList(true_mass2,true_p2,true_dt2,d2,variance_p,variance_dt))
sigma_mass2 = root.RooFormulaVar("sigma_mass2_"+particle_name,"Mass Squared Sigma [MeV^2]",
"sqrt(@0)",
root.RooArgList(variance_mass2))
mean_dt2 = root.RooFormulaVar("mean_dt2_"+particle_name,"Mean of #Delta t Squared [ns^{2}]",
"@0+@1",
root.RooArgList(true_dt2,shift_dt2));
mean_mass2 = root.RooFormulaVar("mean_mass2_"+particle_name,"Mean Mass Squared [MeV^2]",
"@0*(@1/@2*0.29979*0.29979-1)",
root.RooArgList(measured_p2,mean_dt2,d2))
mean_mass = root.RooFormulaVar("mean_mass_"+particle_name,"Mean Mass [MeV]",
"sqrt(@0)",
root.RooArgList(mean_mass2))
true_mass2.Print()
true_p2.Print()
true_dt2.Print()
d2.Print()
variance_p.Print()
variance_dt.Print()
variance_mass.Print()
sigma_mass.Print()
variance_mass2.Print()
sigma_mass2.Print()
mean_dt2.Print()
mean_mass2.Print()
mean_mass.Print()
gauss = root.RooGaussian("gauss_"+particle_name,particle_name,mass,mean_mass,sigma_mass);
gauss2 = root.RooGaussian("gauss2_"+particle_name,particle_name,mass2,mean_mass2,sigma_mass2);
gaussians.append(gauss)
gaussians2.append(gauss2)
fractions.append(fraction)
#wImport = getattr(workspace,"import")
#wImport(gauss)
#wImport(gauss2)
l = locals()
for k in l:
allVars.append(l[k])
model = root.RooAddPdf("model","ToF Mass Model",root.RooArgList(*gaussians),root.RooArgList(*fractions))
model2 = root.RooAddPdf("model2","ToF Mass Squared Model",root.RooArgList(*gaussians2),root.RooArgList(*fractions))
model_mass_momentum = root.RooProdPdf("model_mass_momentum","ToF Mass Model x Momentum Model",
root.RooArgSet(true_p_distribution),
root.RooFit.Conditional(
root.RooArgSet(model),
root.RooArgSet(mass)))
model_mass2_momentum = root.RooProdPdf("model_mass2_momentum","ToF Mass^{2} Model x Momentum Model",
root.RooArgSet(model2),
root.RooFit.Conditional(
root.RooArgSet(true_p_distribution),
root.RooArgSet(true_p)))
toy_data = None
toy_data2 = None
if do_toy_data:
toy_data = model_mass_momentum.generate(root.RooArgSet(mass,true_p),5000.)
toy_data2 = model_mass2_momentum.generate(root.RooArgSet(mass2,true_p),5000.)
else:
model_mass2_momentum.fitTo(data,
root.RooFit.ConditionalObservables(root.RooArgSet(true_p)),
root.RooFit.NumCPU(4))
c.SetRightMargin(0.1)
gaus_graphs = []
gaus_titles = []
frame2 = mass2.frame(root.RooFit.Title(""))
if do_toy_data:
toy_data2.plotOn(frame2)
model2.plotOn(frame2,root.RooFit.ProjWData(toy_data2,True))
else:
data.plotOn(frame2)
model2.plotOn(frame2,root.RooFit.ProjWData(data,True))
gaus_graphs.append(frame2.getObject(int(frame2.numItems())-1))
gaus_titles.append("All Particles")
if plot_components:
for iGauss, gauss in enumerate(gaussians2):
if do_toy_data:
model2.plotOn(frame2,root.RooFit.Components(gauss.GetName()),root.RooFit.LineStyle(root.kDashed),root.RooFit.LineColor(COLORLIST[iGauss+1]),root.RooFit.ProjWData(toy_data2,True))
else:
model2.plotOn(frame2,root.RooFit.Components(gauss.GetName()),root.RooFit.LineStyle(root.kDashed),root.RooFit.LineColor(COLORLIST[iGauss+1]),root.RooFit.ProjWData(data,True))
gaus_graphs.append(frame2.getObject(int(frame2.numItems())-1))
gaus_titles.append(gauss.GetTitle())
frame2.Draw()
if plot_components:
leg = drawNormalLegend(gaus_graphs,gaus_titles,option="l",position=(0.55,0.7,0.85,0.89))
c.SaveAs("TOFFit2.png")
c.SaveAs("TOFFit2.pdf")
gaus_graphs = []
gaus_titles = []
frame2_zoom = mass2.frame(root.RooFit.Title(""),root.RooFit.Range(-2e5,2e5))
if do_toy_data:
toy_data2.plotOn(frame2_zoom)
model2.plotOn(frame2_zoom,root.RooFit.ProjWData(toy_data2,True))
else:
data.plotOn(frame2_zoom)
model2.plotOn(frame2_zoom,root.RooFit.ProjWData(data,True))
gaus_graphs.append(frame2_zoom.getObject(int(frame2_zoom.numItems())-1))
gaus_titles.append("All Particles")
if plot_components:
for iGauss, gauss in enumerate(gaussians2):
if do_toy_data:
model2.plotOn(frame2_zoom,root.RooFit.Components(gauss.GetName()),root.RooFit.LineStyle(root.kDashed),root.RooFit.LineColor(COLORLIST[iGauss+1]),root.RooFit.ProjWData(toy_data2,True))
else:
model2.plotOn(frame2_zoom,root.RooFit.Components(gauss.GetName()),root.RooFit.LineStyle(root.kDashed),root.RooFit.LineColor(COLORLIST[iGauss+1]),root.RooFit.ProjWData(data,True))
gaus_graphs.append(frame2_zoom.getObject(int(frame2_zoom.numItems())-1))
gaus_titles.append(gauss.GetTitle())
frame2_zoom.Draw()
if plot_components:
leg = drawNormalLegend(gaus_graphs,gaus_titles,option="l",position=(0.55,0.7,0.85,0.89))
c.SaveAs("TOFFit2_zoom.png")
c.SaveAs("TOFFit2_zoom.pdf")
#
# gaus_graphs = []
# gaus_titles = []
# frame = mass.frame(root.RooFit.Title(""))
# #frame.updateNormVars(root.RooArgSet(mass,true_p)) # makes RooFit contionalize on true_p
# if do_toy_data:
# toy_data.plotOn(frame)
# model.plotOn(frame,root.RooFit.ProjWData(toy_data,True))
# else:
# data.plotOn(frame)
# model.plotOn(frame,root.RooFit.ProjWData(data,True))
# gaus_graphs.append(frame.getObject(int(frame.numItems())-1))
# gaus_titles.append("All Particles")
# if plot_components:
# for iGauss, gauss in enumerate(gaussians):
# if do_toy_data:
# model.plotOn(frame,root.RooFit.Components(gauss.GetName()),root.RooFit.LineStyle(root.kDashed),root.RooFit.LineColor(COLORLIST[iGauss+1]),root.RooFit.ProjWData(toy_data,True))
# else:
# model.plotOn(frame,root.RooFit.Components(gauss.GetName()),root.RooFit.LineStyle(root.kDashed),root.RooFit.LineColor(COLORLIST[iGauss+1]),root.RooFit.ProjWData(data,True))
# gaus_graphs.append(frame.getObject(int(frame.numItems())-1))
# gaus_titles.append(gauss.GetTitle())
# frame.Draw()
# if plot_components:
# leg = drawNormalLegend(gaus_graphs,gaus_titles,option="l",position=(0.55,0.7,0.85,0.89))
# c.SaveAs("TOFFit.png")
# c.SaveAs("TOFFit.pdf")
frame_p = true_p.frame()
if do_toy_data:
toy_data.plotOn(frame_p)
else:
data.plotOn(frame_p)
true_p_distribution.plotOn(frame_p)
frame_p.Draw()
c.SaveAs("TOFFit_p.png")
c.SaveAs("TOFFit_p.pdf")
if __name__ == "__main__":
c = root.TCanvas("c")
fitMass2(c)
| {"/slicesIso.py": ["/fitCosmicHalo.py"], "/plotCosmics.py": ["/lookAtMonicaLifetime.py"]} |
76,892 | jhugon/lariatPionAbs | refs/heads/master | /plotInelastic.py | #!/usr/bin/env python
import ROOT as root
from helpers import *
root.gROOT.SetBatch(True)
if __name__ == "__main__":
cuts = ""
cuts += "*(primTrkEndInFid)"
cuts += "*(nTracksInFirstZ[2] >= 1 && nTracksInFirstZ[14] < 4 && nTracksLengthLt[5] < 3)" # tpc tracks
cuts += "*( iBestMatch >= 0 && nMatchedTracks == 1)" # matching in analyzer
cuts += "*(trueEndProcess == 10 || trueEndProcess == 11 || trueEndProcess == 13 || trueEndProcess == 1)"
###
secTrkCuts = "*(trackStartDistToPrimTrkEnd < 2. || trackEndDistToPrimTrkEnd < 2.)"
weightStr = "1"+cuts
nData = 30860.0
logy = True
c = root.TCanvas()
NMAX=10000000000
#NMAX=100
fileConfigs = [
{
#'fn': "piAbs_pip_v5.2.root",
#'addFriend': ["friend", "friendTree_pip_v5.root"],
'fn': "test_pip_piAbsSelector.root",
'name': "pip",
'title': "#pi^{+} MC",
'caption': "#pi^{+} MC",
'color': root.kBlue-7,
'scaleFactor': 1./35250*nData*0.428/(1.-0.086), #No Cuts
#'scaleFactor': 1./35250*nData*0.428/(1.-0.086)*0.51, # pion, tpc, match cuts
},
# {
# #'fn': "piAbs_p_v5.2.root",
# #'addFriend': ["friend", "friendTree_p_v5.root"],
# 'fn': "test_p_piAbsSelector.root",
# 'name': "p",
# 'title': "proton MC",
# 'caption': "proton MC",
# 'color': root.kRed-4,
# 'scaleFactor': 1./35200*nData*0.162/(1.-0.086), #No Cuts
# #'scaleFactor': 1./35200*nData*0.162/(1.-0.086)*0.7216, #proton, tpc, matching
# },
# {
# #'fn': "piAbs_ep_v5.2.root",
# #'addFriend': ["friend", "friendTree_ep_v5.root"],
# 'fn': "test_ep_piAbsSelector.root",
# 'name': "ep",
# 'title': "e^{+} MC",
# 'caption': "e^{+} MC",
# 'color': root.kGreen,
# 'scaleFactor': 1./35700*nData*0.301/(1.-0.086), #No Cuts
# #'scaleFactor': 1./35700*nData*0.301/(1.-0.086)*0.35, # pion, tpc, match cuts
# },
# {
# #'fn': "piAbs_mup_v5.2.root",
# #'addFriend': ["friend", "friendTree_mup_v5.root"],
# 'fn': "test_mup_piAbsSelector.root",
# 'name': "mup",
# 'title': "#mu^{+} MC",
# 'caption': "#mu^{+} MC",
# 'color': root.kMagenta-4,
# 'scaleFactor': 1./35200*nData*0.021/(1.-0.086), #No Cuts
# #'scaleFactor': 1./35200*nData*0.021/(1.-0.086)*0.51, # pion, tpc, match cuts
# },
# {
# #'fn': "piAbs_kp_v5.2.root",
# #'addFriend': ["friend", "friendTree_kp_v5.root"],
# 'fn': "test_kp_piAbsSelector.root",
# 'name': "kp",
# 'title': "K^{+} MC",
# 'caption': "K^{+} MC",
# 'color': root.kOrange-3,
# 'scaleFactor': 1./35700*nData*0.00057/(1.-0.086), #No Cuts
# },
#{
# #'fn': "/pnfs/lariat/scratch/users/jhugon/v06_15_00/piAbsSelector/lariat_PiAbsAndChEx_flat_gam_v4/anahist.root",
# #'addFriend': ["friend", "friendTree_gam_v4.root"],
# 'fn': "test_gam_piAbsSelector.root",
# 'name': "gam",
# 'title': "#gamma MC",
# 'caption': "#gamma MC",
# 'color': root.kOrange-3,
# 'scaleFactor': 2953., #AllWeightsCuts Proton
#},
]
histConfigs = [
{
'title': "#pi^{#pm}",
'xtitle': "Number of daughter particles",
'ytitle': "Daughters / bin",
'binning': [10,0,10],
'var': "trueNSecondaryChPions",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
"color": root.kBlue-7,
},
{
'title': "#pi^{0}",
'xtitle': "Number of daughter particles",
'ytitle': "Daughters / bin",
'binning': [10,0,10],
'var': "trueNSecondaryPiZeros",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
'color': root.kOrange-3,
},
{
'title': "p",
'xtitle': "Number of daughter particles",
'ytitle': "Daughters / bin",
'binning': [10,0,10],
'var': "trueNSecondaryProtons",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
"color": root.kRed-4,
},
{
'title': "#pi^{#pm} + p",
'xtitle': "Number of daughter particles",
'ytitle': "Daughters / bin",
'binning': [10,0,10],
'var': "trueNSecondaryProtons + trueNSecondaryChPions",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
"color": root.kGreen,
},
]
plotManyHistsOnePlot(fileConfigs,histConfigs,c,"PiAbsSelector/tree",nMax=NMAX,outPrefix="Inelastic_nDaughters_")
histConfigs = [
{
'name': "trueNDaughters",
'xtitle': "N daughters (MC truth)",
'ytitle': "Events / bin",
'binning': [10,0,10],
'var': "trueNDaughters",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
# {
# 'name': "trueNSecondaryPiZeros",
# 'xtitle': "N #pi^{0} daughters (MC truth)",
# 'ytitle': "Events / bin",
# 'binning': [10,0,10],
# 'var': "trueNSecondaryPiZeros",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
# },
# {
# 'name': "trueNSecondaryProtons",
# 'xtitle': "N proton daughters (MC truth)",
# 'ytitle': "Events / bin",
# 'binning': [10,0,10],
# 'var': "trueNSecondaryProtons",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
# },
# {
# 'name': "trueNChargedSecondaries",
# 'xtitle': "N charged daughters (MC truth)",
# 'ytitle': "Events / bin",
# 'binning': [10,0,10],
# 'var': "trueNSecondaryProtons+trueNSecondaryChPions",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
# },
{
'name': "nSecTrk",
'xtitle': "N secondary tracks",
'ytitle': "Events / bin",
'binning': [10,0,10],
'var': "nSecTrk",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
{
'name': "nSecTrkVtrueNSecondaryChPions",
'xtitle': "N #pi^{#pm} daughters (MC truth)",
'ytitle': "N secondary tracks",
'binning': [6,0,6,6,0,6],
'var': "nSecTrk:trueNSecondaryChPions",
'cuts': weightStr,
#'normalize': True,
#'logz': True,
},
{
'name': "nSecTrkLLRG0VtrueNSecondaryChPions",
'xtitle': "N #pi^{#pm} daughters (MC truth)",
'ytitle': "N secondary tracks with LLR > 0",
'binning': [6,0,6,6,0,6],
'var': "nSecTrkLLRG0:trueNSecondaryChPions",
'cuts': weightStr,
#'normalize': True,
#'logz': True,
},
{
'name': "nSecTrkLLRG100VtrueNSecondaryChPions",
'xtitle': "N #pi^{#pm} daughters (MC truth)",
'ytitle': "N secondary tracks with LLR > 100",
'binning': [6,0,6,6,0,6],
'var': "nSecTrkLLRG100:trueNSecondaryChPions",
'cuts': weightStr,
#'normalize': True,
#'logz': True,
},
{
'name': "nSecTrkLLRG200VtrueNSecondaryChPions",
'xtitle': "N #pi^{#pm} daughters (MC truth)",
'ytitle': "N secondary tracks with LLR > 200",
'binning': [6,0,6,6,0,6],
'var': "nSecTrkLLRG200:trueNSecondaryChPions",
'cuts': weightStr,
#'normalize': True,
#'logz': True,
},
{
'name': "nSecTrkLLRG400VtrueNSecondaryChPions",
'xtitle': "N #pi^{#pm} daughters (MC truth)",
'ytitle': "N secondary tracks with LLR > 400",
'binning': [6,0,6,6,0,6],
'var': "nSecTrkLLRG400:trueNSecondaryChPions",
'cuts': weightStr,
#'normalize': True,
#'logz': True,
},
]
plotOneHistOnePlot(fileConfigs,histConfigs,c,"PiAbsSelector/tree",nMax=NMAX,outPrefix="Inelastic_")
histConfigs = [
{
'title': "LLR > 0",
'name': "nSecTrkLLRG0",
'xtitle': "N secondary tracks",
'ytitle': "Tracks / bin",
'binning': [10,0,10],
'var': "nSecTrkLLRG0",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
{
'title': "LLR > 100",
'name': "nSecTrkLLRG100",
'xtitle': "N secondary tracks",
'ytitle': "Tracks / bin",
'binning': [10,0,10],
'var': "nSecTrkLLRG100",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
"color": root.kBlue-7,
},
{
'title': "LLR > 200",
'name': "nSecTrkLLRG200",
'xtitle': "N secondary tracks",
'ytitle': "Tracks / bin",
'binning': [10,0,10],
'var': "nSecTrkLLRG200",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
'color': root.kOrange-3,
},
{
'title': "LLR > 400",
'name': "nSecTrkLLRG400",
'xtitle': "N secondary tracks",
'ytitle': "Tracks / bin",
'binning': [10,0,10],
'var': "nSecTrkLLRG400",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
"color": root.kRed-4,
},
{
'title': "PIDA < 8",
'name': "nSecTrkPIDAL8",
'xtitle': "N secondary tracks",
'ytitle': "Tracks / bin",
'binning': [10,0,10],
'var': "nSecTrkPIDAL8",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
"color": root.kGreen,
},
{
'title': "PIDA < 14",
'name': "nSecTrkPIDAL14",
'xtitle': "N secondary tracks",
'ytitle': "Tracks / bin",
'binning': [10,0,10],
'var': "nSecTrkPIDAL14",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
"color": root.kMagenta-4,
},
]
plotManyHistsOnePlot(fileConfigs,histConfigs,c,"PiAbsSelector/tree",nMax=NMAX,outPrefix="Inelastic_NLLR")
| {"/slicesIso.py": ["/fitCosmicHalo.py"], "/plotCosmics.py": ["/lookAtMonicaLifetime.py"]} |
76,893 | jhugon/lariatPionAbs | refs/heads/master | /plotCompareSmearing.py | #!/usr/bin/env python
import ROOT as root
from helpers import *
root.gROOT.SetBatch(True)
import sys
if __name__ == "__main__":
cuts = ""
cuts += "*(nTracks == 1)"
cuts += "*( iBestMatch >= 0)" # primary Track found
cosmicCuts = cuts
cosmicCuts += "*((!isMC) || (trueHitCosmic1 && trueHitCosmic2) || (trueHitCosmic3 && trueHitCosmic4))"
cosmicCuts += "*((primTrkStartTheta > 27*pi/180.) && (primTrkStartTheta < 42*pi/180.))*(primTrkStartPhi > -57*pi/180. && primTrkStartPhi < 60*pi/180.)*(primTrkStartPhi < -15*pi/180. || primTrkStartPhi > 22*pi/180.)" # only angles that match MC
cosmicPhiGeq0Cuts = cosmicCuts + "*(primTrkStartPhi >= 0.)"
cosmicPhiLt0Cuts = cosmicCuts + "*(primTrkStartPhi < 0.)"
beamCuts = "*pzWeight"+cuts
beamPionCuts = beamCuts + "*((((!isMC) && pWC > 100 && pWC < 1100) || (isMC && trueStartMom > 100 && trueStartMom < 1100)) && (isMC || pWC*pWC*(firstTOF*firstTOF*0.00201052122-1.) < 5e4))" + "*(primTrkLength > 85.)"
beamProtonCuts = beamCuts + "*((((!isMC) && pWC > 1000 && pWC < 1100) || (isMC && trueStartMom > 1000 && trueStartMom < 1100)) && (isMC || pWC*pWC*(firstTOF*firstTOF*0.00201052122-1.) > 7e5))" + "*(primTrkLength < 60.)"
hitCuts = "*(primTrkXs > 3. && primTrkXs < 46. && primTrkYs < 18. && primTrkYs > -18. && primTrkZs > 3. && primTrkZs < 87.)"
cosmicHitCuts = hitCuts
beamHitCuts = hitCuts+"*(primTrkZs > 5. && primTrkZs < 10.)"
beamProtonHitCuts = hitCuts+"*(primTrkZs > 2. && primTrkZs < 6.)"
logy = True
scaleFactor = 0.066
c = root.TCanvas()
NMAX=1000000000
#NMAX=100
baseDir="/scratch/jhugon/"
baseDir=""
########################################################
## Beam Pions Definitions ##############################
########################################################
fileConfigs = [
#{
# 'fn': [baseDir+"cosmicBeamData_v2/cosmicAna_beam_Neg_RunII_current100_v02_all.root",
# baseDir+"cosmicBeamData_v2/cosmicAna_beam_Neg_RunII_current60_v02_all.root",
# baseDir+"cosmicBeamData_v2/cosmicAna_beam_Pos_RunII_current100_v02_all.root",
# baseDir+"cosmicBeamData_v2/cosmicAna_beam_Pos_RunII_current60_v02_all.root"],
# 'name': "BeamRunIIPiMuE",
# 'title': "Run II Beam #pi/#mu/e",
# 'caption': "Run II Beam #pi/#mu/e",
# 'color': root.kGray+2,
# 'isData': True,
# 'isBeam': True,
# 'cuts': beamPionCuts + beamHitCuts,
#},
#{
# 'fn': [baseDir+"cosmicBeamData_v2/cosmicAna_beam_Pos_RunII_current100_v02_all.root",
# baseDir+"cosmicBeamData_v2/cosmicAna_beam_Pos_RunII_current60_v02_all.root"],
# 'name': "BeamRunIIPlusPiMuE",
# 'title': "Run II+ Beam #pi/#mu/e",
# 'caption': "Run II+ Beam #pi/#mu/e",
# 'color': root.kGray+2,
# 'isData': True,
# 'isBeam': True,
# 'cuts': beamPionCuts + beamHitCuts,
#},
#{
# 'fn': baseDir+"cosmicBeamData_v2/cosmicAna_beam_Neg_RunII_current60_v02_all.root",
# 'name': "BeamRunIIM60A_PiMuE",
# 'title': "Run II Beam -60 A #pi/#mu/e",
# 'caption': "Run II Beam -60 A #pi/#mu/e",
# 'isData': True,
# 'isBeam': True,
# 'cuts': beamPionCuts + beamHitCuts,
#},
#{
# 'fn': baseDir+"cosmicBeamData_v2/cosmicAna_beam_Neg_RunII_current100_v02_all.root",
# 'name': "BeamRunIIM100A_PiMuE",
# 'title': "Run II Beam -100 A #pi/#mu/e",
# 'caption': "Run II Beam -99 A #pi/#mu/e",
# 'isData': True,
# 'isBeam': True,
# 'cuts': beamPionCuts + beamHitCuts,
#},
#{
# 'fn': baseDir+"cosmicBeamData_v2/cosmicAna_beam_Pos_RunII_current60_v02_all.root",
# 'addFriend': ["friend", baseDir+"cosmicBeamData_v2/friendTrees/cosmicAna_beam_Pos_RunII_current60_v02_all.root"],
# 'name': "BeamRunIIP60A_PiMuE",
# 'title': "Run II Beam +60 A #pi/#mu/e",
# 'caption': "Run II Beam +60 A #pi/#mu/e",
# 'isData': True,
# 'isBeam': True,
# 'cuts': beamPionCuts + beamHitCuts,
#},
{
'fn': baseDir+"cosmicBeamData_v2/new/cosmicAna_beam_Pos_RunII_current100_v02_all.root",
'addFriend': ["friend", baseDir+"cosmicBeamData_v2/new/friendTrees/cosmicAna_beam_Pos_RunII_current100_v02_all.root"],
'name': "BeamRunIIP100A_PiMuE",
'title': "Run II Beam +100 A #pi/#mu/e",
'caption': "Run II Beam +100 A #pi/#mu/e",
'isData': True,
'isBeam': True,
'cuts': beamPionCuts + beamHitCuts,
},
{
'fn': baseDir+"cosmicBeamMC/CosmicAna_pip_v6.root",
'addFriend': ["friend", baseDir+"cosmicBeamMC/friendTrees/CosmicAna_pip_v6.root"],
'name': "BeamMC_pip",
'title': "Beam #pi MC",
'caption': "Beam #pi MC",
'isData': False,
'isBeam': True,
'cuts': beamPionCuts + beamHitCuts,
},
{
'fn': baseDir+"cosmicBeamMC/CosmicAna_pip_presmear10_v6.root",
'addFriend': ["friend", baseDir+"cosmicBeamMC/friendTrees/CosmicAna_pip_presmear10_v6.root"],
'name': "BeamMC_pip_presmear10",
'title': "Beam #pi MC 10% Smearing",
'caption': "Beam #pi MC 10% Smearing",
'isData': False,
'isBeam': True,
'cuts': beamPionCuts + beamHitCuts,
},
#{
# 'fn': baseDir+"cosmicBeamMC/CosmicAna_lariat_PiAbsAndChEx_flat_pip_presmear15_v5.root",
# 'addFriend': ["friend", baseDir+"cosmicBeamMC/friendTrees/CosmicAna_lariat_PiAbsAndChEx_flat_pip_presmear15_v5.root"],
# 'name': "BeamMC_pip_presmear15",
# 'title': "Beam #pi MC 15% Smearing",
# 'caption': "Beam #pi MC 15% Smearing",
# 'isData': False,
# 'isBeam': True,
# 'cuts': beamPionCuts + beamHitCuts,
#},
{
'fn': baseDir+"cosmicBeamMC/CosmicAna_pip_presmear20_v6.root",
'addFriend': ["friend", baseDir+"cosmicBeamMC/friendTrees/CosmicAna_pip_presmear20_v6.root"],
'name': "BeamMC_pip_presmear20",
'title': "Beam #pi MC 20% Smearing",
'caption': "Beam #pi MC 20% Smearing",
'isData': False,
'isBeam': True,
'cuts': beamPionCuts + beamHitCuts,
},
#{
# 'fn': baseDir+"cosmicBeamMC/CosmicAna_lariat_PiAbsAndChEx_flat_pip_presmear25_v5.root",
# 'addFriend': ["friend", baseDir+"cosmicBeamMC/friendTrees/CosmicAna_lariat_PiAbsAndChEx_flat_pip_presmear25_v5.root"],
# 'name': "BeamMC_pip_presmear25",
# 'title': "Beam #pi MC 25% Smearing",
# 'caption': "Beam #pi MC 25% Smearing",
# 'isData': False,
# 'isBeam': True,
# 'cuts': beamPionCuts + beamHitCuts,
#},
{
'fn': baseDir+"cosmicBeamMC/CosmicAna_pip_presmear30_v6.root",
'addFriend': ["friend", baseDir+"cosmicBeamMC/friendTrees/CosmicAna_pip_presmear30_v6.root"],
'name': "BeamMC_pip_presmear30",
'title': "Beam #pi MC 30% Smearing",
'caption': "Beam #pi MC 30% Smearing",
'isData': False,
'isBeam': True,
'cuts': beamPionCuts + beamHitCuts,
},
]
for i in range(len(fileConfigs)):
fileConfigs[i]['color'] = COLORLIST[i]
m2SF = 1.
histConfigs = [
{
'name': "primTrkdEdxs",
'xtitle': "Primary TPC Track dE/dx [MeV/cm]",
'ytitle': "Hits / bin",
'binning': [50,1.,2.5],
'var': "primTrkdEdxs*((1.02-1.)*isMC + 1.)",
'cuts': "1",
'normalize': True,
},
{
'name': "pWC",
'xtitle': "Beamline Momentum [MeV/c]",
'ytitle': "Events / bin",
'binning': [40,100,1100],
'var': "(!isMC)*pWC+isMC*trueStartMom",
'cuts': "1",
'normalize': True,
},
{
'name': "primTrkLength",
'xtitle': "Primary Track Length [cm]",
'ytitle': "Events / bin",
'binning': [100,0,100],
'var': "primTrkLength",
'cuts': "1",
'normalize': True,
},
{
'name': "primTrkKinInteract",
'xtitle': "Interaction Kinetic Energy [MeV]",
'ytitle': "Events / bin",
'binning': [50,0,800],
'var': "primTrkKinInteract",
'cuts': "1",
'normalize': True,
},
{
'name': "primTrkStartTheta",
'xtitle': "Primary TPC Track Start #theta [deg]",
'ytitle': "Events / bin",
'binning': [180,0,180],
'var': "primTrkStartTheta*180/pi",
'cuts': "1",
'normalize': True,
},
{
'name': "primTrkStartPhi",
'xtitle': "Primary TPC Track Start #phi [deg]",
'ytitle': "Events / bin",
'binning': [180,-180,180],
'var': "primTrkStartPhi*180/pi",
'cuts': "1",
'normalize': True,
},
#{
# 'name': "beamlineMass",
# 'xtitle': "Beamline Mass Squared [1000#times (MeV^{2})]",
# 'ytitle': "Events / bin",
# 'binning': [100,-5e5*m2SF,2e6*m2SF],
# 'var': "pWC*pWC*(firstTOF*firstTOF*0.00201052122-1.)",
# 'cuts': "1",
# #'normalize': True,
# 'logy': True,
# 'drawvlines':[105.65**2*m2SF,139.6**2*m2SF,493.677**2*m2SF,938.272046**2*m2SF],
#},
]
plotManyFilesOnePlot(fileConfigs,histConfigs,c,"cosmicanalyzer/tree",outPrefix="CompareSmearing_PiMuE_",nMax=NMAX)
histConfigs = [
{
'name': "primTrkdEdxsVbeamlineMom",
'xtitle': "Beamline Momentum [MeV/c]",
'ytitle': "Primary TPC Track dE/dx [MeV/cm]",
'binning': [50,300,1100,50,1.,2.5],
'var': "primTrkdEdxs*((1.02-1.)*isMC + 1.):(!isMC)*pWC+isMC*trueStartMom",
'cuts': "1",
},
{
'name': "primTrkdEdxsVResRange",
'xtitle': "Residual Range [cm]",
'ytitle': "Primary TPC Track dE/dx [MeV/cm]",
'binning': [50,0,100,50,1.,2.5],
'var': "primTrkdEdxs*((1.02-1.)*isMC + 1.):primTrkResRanges",
'cuts': "1",
},
{
'name': "primTrkdEdxsVRangeSoFar",
'xtitle': "Track Distance from Start [cm]",
'ytitle': "Primary TPC Track dE/dx [MeV/cm]",
'binning': [50,0,100,50,1.,2.5],
'var': "primTrkdEdxs*((1.02-1.)*isMC + 1.):primTrkRangeSoFars",
'cuts': "1",
},
{
'name': "primTrkLengthVkinWCInTPC",
'xtitle': "Kinetic Energy at TPC Start [MeV]",
'ytitle': "Primary TPC Track Length [cm]",
'binning': [50,0,600,50,0,100],
'var': "primTrkLength:kinWCInTPC",
'cuts': "1",
},
{
'name': "primTrkStartThetaVPhi",
'xtitle': "Primary TPC Track #phi [deg]",
'ytitle': "Primary TPC Track #theta [deg]",
'binning': [90,-180,180,90,0,180],
'var': "primTrkStartTheta*180/pi:primTrkStartPhi*180/pi",
'cuts': "1",
},
#{
# 'name': "beamline_TOFVMom",
# 'xtitle': "Beamline Momentum [MeV/c]",
# 'ytitle': "Time Of Flight [ns]",
# 'binning': [100,0,2000,100,0,100],
# 'var': "firstTOF:pWC",
# 'cuts': "1",
# 'normalize': True,
#},
#{
# 'name': "beamline_TOFVMom",
# 'xtitle': "Beamline Momentum [MeV/c]",
# 'ytitle': "Primary TPC Track dE/dx [MeV/cm]",
# 'binning': [100,100,1100,50,1,3.5],
# 'var': "primTrkdEdxs:pWC",
# 'cuts': "1",
#},
]
plotOneHistOnePlot(fileConfigs,histConfigs,c,"cosmicanalyzer/tree",outPrefix="CompareSmearing_PiMuE_",nMax=NMAX)
########################################################
## Beam Protons Definitions ##############################
########################################################
fileConfigs = [
{
'fn': baseDir+"cosmicBeamData_v2/new/cosmicAna_beam_Pos_RunII_current100_v02_all.root",
'addFriend': ["friend", baseDir+"cosmicBeamData_v2/new/friendTrees/cosmicAna_beam_Pos_RunII_current100_v02_all.root"],
'name': "BeamRunIIP100A_Proton",
'title': "Run II Beam +100 A p",
'caption': "Run II Beam +100 A p",
'isData': True,
'isBeam': True,
'cuts': beamProtonCuts + beamHitCuts,
},
{
'fn': baseDir+"cosmicBeamMC/CosmicAna_lariat_PiAbsAndChEx_flat_p_v5.root",
'addFriend': ["friend", baseDir+"cosmicBeamMC/friendTrees/CosmicAna_lariat_PiAbsAndChEx_flat_p_v5.root"],
'name': "BeamMC_pip",
'title': "Beam p MC",
'caption': "Beam p MC",
'isData': False,
'isBeam': True,
'cuts': beamProtonCuts + beamHitCuts,
},
{
'fn': baseDir+"cosmicBeamMC/newv5/CosmicAna_lariat_PiAbsAndChEx_flat_p_presmear10_v5.root",
'addFriend': ["friend", baseDir+"cosmicBeamMC/newv5/friendTrees/CosmicAna_lariat_PiAbsAndChEx_flat_p_presmear10_v5.root"],
'name': "BeamMC_p_presmear10",
'title': "Beam p MC 10% Smearing",
'caption': "Beam p MC 10% Smearing",
'isData': False,
'isBeam': True,
'cuts': beamProtonCuts + beamHitCuts,
},
#{
# 'fn': baseDir+"cosmicBeamMC/CosmicAna_lariat_PiAbsAndChEx_flat_p_presmear15_v5.root",
# 'addFriend': ["friend", baseDir+"cosmicBeamMC/friendTrees/CosmicAna_lariat_PiAbsAndChEx_flat_p_presmear15_v5.root"],
# 'name': "BeamMC_p_presmear15",
# 'title': "Beam p MC 15% Smearing",
# 'caption': "Beam p MC 15% Smearing",
# 'isData': False,
# 'isBeam': True,
# 'cuts': beamProtonCuts + beamHitCuts,
#},
{
'fn': baseDir+"cosmicBeamMC/newv5/CosmicAna_lariat_PiAbsAndChEx_flat_p_presmear20_v5.root",
'addFriend': ["friend", baseDir+"cosmicBeamMC/newv5/friendTrees/CosmicAna_lariat_PiAbsAndChEx_flat_p_presmear20_v5.root"],
'name': "BeamMC_p_presmear20",
'title': "Beam p MC 20% Smearing",
'caption': "Beam p MC 20% Smearing",
'isData': False,
'isBeam': True,
'cuts': beamProtonCuts + beamHitCuts,
},
#{
# 'fn': baseDir+"cosmicBeamMC/CosmicAna_lariat_PiAbsAndChEx_flat_p_presmear25_v5.root",
# 'addFriend': ["friend", baseDir+"cosmicBeamMC/friendTrees/CosmicAna_lariat_PiAbsAndChEx_flat_p_presmear25_v5.root"],
# 'name': "BeamMC_p_presmear25",
# 'title': "Beam p MC 25% Smearing",
# 'caption': "Beam p MC 25% Smearing",
# 'isData': False,
# 'isBeam': True,
# 'cuts': beamProtonCuts + beamHitCuts,
#},
{
'fn': baseDir+"cosmicBeamMC/newv5/CosmicAna_lariat_PiAbsAndChEx_flat_p_presmear30_v5.root",
'addFriend': ["friend", baseDir+"cosmicBeamMC/newv5/friendTrees/CosmicAna_lariat_PiAbsAndChEx_flat_p_presmear30_v5.root"],
'name': "BeamMC_p_presmear30",
'title': "Beam p MC 30% Smearing",
'caption': "Beam p MC 30% Smearing",
'isData': False,
'isBeam': True,
'cuts': beamProtonCuts + beamHitCuts,
},
]
for i in range(len(fileConfigs)):
fileConfigs[i]['color'] = COLORLIST[i]
histConfigs = [
{
'name': "primTrkdEdxs",
'xtitle': "Primary TPC Track dE/dx [MeV/cm]",
'ytitle': "Hits / bin",
'binning': [50,0,10.],
'var': "primTrkdEdxs",
'cuts': "1",
'normalize': True,
},
{
'name': "primTrkdEdxs_zoom4",
'xtitle': "Primary TPC Track dE/dx [MeV/cm]",
'ytitle': "Hits / bin",
'binning': [50,3,8.],
'var': "primTrkdEdxs",
'cuts': "1",
'normalize': True,
},
{
'name': "pWC",
'xtitle': "Beamline Momentum [MeV/c]",
'ytitle': "Events / bin",
'binning': [40,0,2000],
'var': "(!isMC)*pWC+isMC*trueStartMom",
'cuts': "1",
'normalize': True,
},
{
'name': "primTrkKinInteract",
'xtitle': "Interaction Kinetic Energy [MeV]",
'ytitle': "Events / bin",
'binning': [50,0,800],
'var': "primTrkKinInteractProton",
'cuts': "1",
'normalize': True,
},
{
'name': "primTrkStartTheta",
'xtitle': "Primary TPC Track Start #theta [deg]",
'ytitle': "Events / bin",
'binning': [180,0,180],
'var': "primTrkStartTheta*180/pi",
'cuts': "1",
'normalize': True,
},
{
'name': "primTrkStartPhi",
'xtitle': "Primary TPC Track Start #phi [deg]",
'ytitle': "Events / bin",
'binning': [180,-180,180],
'var': "primTrkStartPhi*180/pi",
'cuts': "1",
'normalize': True,
},
#{
# 'name': "beamlineMass",
# 'xtitle': "Beamline Mass Squared [1000#times (MeV^{2})]",
# 'ytitle': "Events / bin",
# 'binning': [100,-5e5*m2SF,2e6*m2SF],
# 'var': "pWC*pWC*(firstTOF*firstTOF*0.00201052122-1.)",
# 'cuts': "1",
# #'normalize': True,
# 'logy': True,
# 'drawvlines':[105.65**2*m2SF,139.6**2*m2SF,493.677**2*m2SF,938.272046**2*m2SF],
#},
]
plotManyFilesOnePlot(fileConfigs,histConfigs,c,"cosmicanalyzer/tree",outPrefix="CompareSmearing_P_",nMax=NMAX)
histConfigs = [
{
'name': "primTrkdEdxsVbeamlineMom",
'xtitle': "Beamline Momentum [MeV/c]",
'ytitle': "Primary TPC Track dE/dx [MeV/cm]",
'binning': [50,300,1100,100,0.,10.],
'var': "primTrkdEdxs:(!isMC)*pWC+isMC*trueStartMom",
'cuts': "1",
},
{
'name': "beamline_TOFVMom",
'xtitle': "Beamline Momentum [MeV/c]",
'ytitle': "Time of Flight [ns]",
'binning': [100,100,1100,100,0,100],
'var': "firstTOF:pWC",
'cuts': "1",
},
{
'name': "primTrkdEdxsVResRange",
'xtitle': "Residual Range [cm]",
'ytitle': "Primary TPC Track dE/dx [MeV/cm]",
'binning': [50,0,100,50,1.,2.5],
'var': "primTrkdEdxs*((1.02-1.)*isMC + 1.):primTrkResRanges",
'cuts': "1",
},
{
'name': "primTrkLengthVkinWCInTPCProton",
'xtitle': "Kinetic Energy at TPC Start [MeV]",
'ytitle': "Primary TPC Track Length [cm]",
'binning': [50,0,600,50,0,100],
'var': "primTrkLength:kinWCInTPCProton",
'cuts': "1",
},
{
'name': "primTrkStartThetaVPhi",
'xtitle': "Primary TPC Track #phi [deg]",
'ytitle': "Primary TPC Track #theta [deg]",
'binning': [90,-180,180,90,0,180],
'var': "primTrkStartTheta*180/pi:primTrkStartPhi*180/pi",
'cuts': "1",
},
]
plotOneHistOnePlot(fileConfigs,histConfigs,c,"cosmicanalyzer/tree",outPrefix="CompareSmearing_P_",nMax=NMAX)
########################################################
## Cosmics Definitions #################################
########################################################
fileConfigs = [
{
'fn': [baseDir+"cosmicsManyRecos/Cosmics_RIIN100.root",
baseDir+"cosmicsManyRecos/Cosmics_RIIP100.root",
baseDir+"cosmicsManyRecos/Cosmics_RIIN60.root",
baseDir+"cosmicsManyRecos/Cosmics_RIIP60.root"],
'name': "CosmicsRunII",
'title': "Run II Cosmics",
'caption': "Run II Cosmics",
'isData': True,
},
{
'fn': baseDir+"cosmicMC/cosmicAna_v04.root",
'name': "CosmicMC",
'title': "Cosmic MC",
'caption': "Cosmic MC",
'isData': False,
},
{
'fn': baseDir+"cosmicMC/cosmicAna_smearing10_v01.root",
'name': "CosmicMC_presmear10perc",
'title': "Cosmic MC Pre-smear 10% ",
'caption': "Cosmic MC Pre-smear 10%",
'isData': False,
},
{
'fn': baseDir+"cosmicMC/cosmicAna_smearing20_v01.root",
'name': "CosmicMC_presmear20perc",
'title': "Cosmic MC Pre-smear 20% ",
'caption': "Cosmic MC Pre-smear 20%",
'isData': False,
},
{
'fn': baseDir+"cosmicMC/cosmicAna_smearing30_v01.root",
'name': "CosmicMC_presmear30perc",
'title': "Cosmic MC Pre-smear 30% ",
'caption': "Cosmic MC Pre-smear 30%",
'isData': False,
},
#{
# 'fn': baseDir+"cosmicMC/cosmicAna_smearing40_v01.root",
# 'name': "CosmicMC_presmear40perc",
# 'title': "Cosmic MC Pre-smear 40% ",
# 'caption': "Cosmic MC Pre-smear 40%",
# 'isData': False,
#},
#{
# 'fn': baseDir+"cosmicMC/cosmicAna_smearing45_v01.root",
# 'name': "CosmicMC_presmear45perc",
# 'title': "Cosmic MC Pre-smear 45% ",
# 'caption': "Cosmic MC Pre-smear 45%",
# 'isData': False,
#},
#{
# 'fn': baseDir+"cosmicMC/cosmicAna_smearing50_v01.root",
# 'name': "CosmicMC_presmear50perc",
# 'title': "Cosmic MC Pre-smear 50% ",
# 'caption': "Cosmic MC Pre-smear 50%",
# 'isData': False,
#},
#{
# 'fn': baseDir+"cosmicMC/cosmicAna_smearing55_v01.root",
# 'name': "CosmicMC_presmear55perc",
# 'title': "Cosmic MC Pre-smear 55% ",
# 'caption': "Cosmic MC Pre-smear 55%",
# 'isData': False,
#},
#{
# 'fn': baseDir+"cosmicMC/cosmicAna_smearing60_v01.root",
# 'name': "CosmicMC_presmear60perc",
# 'title': "Cosmic MC Pre-smear 60% ",
# 'caption': "Cosmic MC Pre-smear 60%",
# 'isData': False,
#},
#{
# 'fn': baseDir+"cosmicMC/cosmicAna_smearing70_v01.root",
# 'name': "CosmicMC_presmear70perc",
# 'title': "Cosmic MC Pre-smear 70% ",
# 'caption': "Cosmic MC Pre-smear 70%",
# 'isData': False,
#},
]
for i in range(len(fileConfigs)):
fileConfigs[i]['color'] = COLORLIST[i]
histConfigs = [
{
'name': "primTrkdEdxs",
'xtitle': "Primary TPC Track dE/dx [MeV/cm]",
'ytitle': "Hits / bin",
'binning': [100,1.,3.5],
'var': "primTrkdEdxs*((1.05-1.)*isMC + 1.)",
'cuts': "1"+cosmicPhiGeq0Cuts,
'normalize': True,
'caption':"Cosmics #phi #geq 0",
},
]
plotManyFilesOnePlot(fileConfigs,histConfigs,c,"cosmicanalyzer/tree",outPrefix="CompareSmearing_Cosmic_phiGeq0_",nMax=NMAX)
fileConfigs = [
{
'fn': [baseDir+"cosmicsManyRecos/Cosmics_RIIN100.root",
baseDir+"cosmicsManyRecos/Cosmics_RIIP100.root",
baseDir+"cosmicsManyRecos/Cosmics_RIIN60.root",
baseDir+"cosmicsManyRecos/Cosmics_RIIP60.root"],
'name': "CosmicsRunII",
'title': "Run II Cosmics",
'caption': "Run II Cosmics",
'isData': True,
},
{
'fn': baseDir+"cosmicMC/cosmicAna_v04.root",
'name': "CosmicMC",
'title': "Cosmic MC",
'caption': "Cosmic MC",
'isData': False,
},
#{
# 'fn': baseDir+"cosmicMC/cosmicAna_smearing10_v01.root",
# 'name': "CosmicMC_presmear10perc",
# 'title': "Cosmic MC Pre-smear 10% ",
# 'caption': "Cosmic MC Pre-smear 10%",
# 'isData': False,
#},
#{
# 'fn': baseDir+"cosmicMC/cosmicAna_smearing20_v01.root",
# 'name': "CosmicMC_presmear20perc",
# 'title': "Cosmic MC Pre-smear 20% ",
# 'caption': "Cosmic MC Pre-smear 20%",
# 'isData': False,
#},
#{
# 'fn': baseDir+"cosmicMC/cosmicAna_smearing30_v01.root",
# 'name': "CosmicMC_presmear30perc",
# 'title': "Cosmic MC Pre-smear 30% ",
# 'caption': "Cosmic MC Pre-smear 30%",
# 'isData': False,
#},
#{
# 'fn': baseDir+"cosmicMC/cosmicAna_smearing40_v01.root",
# 'name': "CosmicMC_presmear40perc",
# 'title': "Cosmic MC Pre-smear 40% ",
# 'caption': "Cosmic MC Pre-smear 40%",
# 'isData': False,
#},
#{
# 'fn': baseDir+"cosmicMC/cosmicAna_smearing45_v01.root",
# 'name': "CosmicMC_presmear45perc",
# 'title': "Cosmic MC Pre-smear 45% ",
# 'caption': "Cosmic MC Pre-smear 45%",
# 'isData': False,
#},
{
'fn': baseDir+"cosmicMC/cosmicAna_smearing50_v01.root",
'name': "CosmicMC_presmear50perc",
'title': "Cosmic MC Pre-smear 50% ",
'caption': "Cosmic MC Pre-smear 50%",
'isData': False,
},
#{
# 'fn': baseDir+"cosmicMC/cosmicAna_smearing55_v01.root",
# 'name': "CosmicMC_presmear55perc",
# 'title': "Cosmic MC Pre-smear 55% ",
# 'caption': "Cosmic MC Pre-smear 55%",
# 'isData': False,
#},
{
'fn': baseDir+"cosmicMC/cosmicAna_smearing60_v01.root",
'name': "CosmicMC_presmear60perc",
'title': "Cosmic MC Pre-smear 60% ",
'caption': "Cosmic MC Pre-smear 60%",
'isData': False,
},
{
'fn': baseDir+"cosmicMC/cosmicAna_smearing70_v01.root",
'name': "CosmicMC_presmear70perc",
'title': "Cosmic MC Pre-smear 70% ",
'caption': "Cosmic MC Pre-smear 70%",
'isData': False,
},
]
for i in range(len(fileConfigs)):
fileConfigs[i]['color'] = COLORLIST[i]
histConfigs = [
{
'name': "primTrkdEdxs",
'xtitle': "Primary TPC Track dE/dx [MeV/cm]",
'ytitle': "Hits / bin",
'binning': [100,1.,3.5],
'var': "primTrkdEdxs*((0.91-1.)*isMC + 1.)",
'cuts': "1"+cosmicPhiLt0Cuts,
'normalize': True,
'caption':"Cosmics #phi < 0",
},
]
plotManyFilesOnePlot(fileConfigs,histConfigs,c,"cosmicanalyzer/tree",outPrefix="CompareSmearing_Cosmic_phiLt0_",nMax=NMAX)
"""
Attaching file cosmicBeamMC/CosmicAna_pip_v6.root as _file0...
root [1] new TBrowser
(class TBrowser*)0x25a8520
root [2] ******************************************************************************
*Tree :tree : tree *
*Entries : 75516 : Total = 1475308613 bytes File Size = 1004441546 *
* : : Tree compression factor = 1.47 *
******************************************************************************
*Br 0 :isMC : isMC/O *
*Entries : 75516 : Total Size= 121846 bytes File Size = 46552 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 2.39 *
*............................................................................*
*Br 1 :runNumber : runNumber/i *
*Entries : 75516 : Total Size= 350950 bytes File Size = 53130 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 6.41 *
*............................................................................*
*Br 2 :subRunNumber : subRunNumber/i *
*Entries : 75516 : Total Size= 352480 bytes File Size = 54648 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 6.26 *
*............................................................................*
*Br 3 :eventNumber : eventNumber/i *
*Entries : 75516 : Total Size= 351970 bytes File Size = 169510 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 2.01 *
*............................................................................*
*Br 4 :nWCTracks : nWCTracks/i *
*Entries : 75516 : Total Size= 350950 bytes File Size = 51612 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 6.60 *
*............................................................................*
*Br 5 :xWC : xWC/F *
*Entries : 75516 : Total Size= 347890 bytes File Size = 50094 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 6.74 *
*............................................................................*
*Br 6 :yWC : yWC/F *
*Entries : 75516 : Total Size= 347890 bytes File Size = 50094 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 6.74 *
*............................................................................*
*Br 7 :thetaWC : thetaWC/F *
*Entries : 75516 : Total Size= 349930 bytes File Size = 52118 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 6.51 *
*............................................................................*
*Br 8 :phiWC : phiWC/F *
*Entries : 75516 : Total Size= 348910 bytes File Size = 51106 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 6.62 *
*............................................................................*
*Br 9 :pzWC : pzWC/F *
*Entries : 75516 : Total Size= 348400 bytes File Size = 337921 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 1.00 *
*............................................................................*
*Br 10 :pWC : pWC/F *
*Entries : 75516 : Total Size= 347890 bytes File Size = 337421 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 1.00 *
*............................................................................*
*Br 11 :eWC : eWC/F *
*Entries : 75516 : Total Size= 347890 bytes File Size = 335760 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 1.01 *
*............................................................................*
*Br 12 :kinWC : kinWC/F *
*Entries : 75516 : Total Size= 348910 bytes File Size = 338495 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 1.00 *
*............................................................................*
*Br 13 :kinWCInTPC : kinWCInTPC/F *
*Entries : 75516 : Total Size= 351460 bytes File Size = 341025 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 1.00 *
*............................................................................*
*Br 14 :eWCProton : eWCProton/F *
*Entries : 75516 : Total Size= 350950 bytes File Size = 327853 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 1.04 *
*............................................................................*
*Br 15 :kinWCProton : kinWCProton/F *
*Entries : 75516 : Total Size= 351970 bytes File Size = 341532 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 1.00 *
*............................................................................*
*Br 16 :kinWCInTPCProton : kinWCInTPCProton/F *
*Entries : 75516 : Total Size= 354520 bytes File Size = 344062 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 1.00 *
*............................................................................*
*Br 17 :yKinkWC : yKinkWC/F *
*Entries : 75516 : Total Size= 349930 bytes File Size = 52118 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 6.51 *
*............................................................................*
*Br 18 :nHitsWC : nHitsWC/i *
*Entries : 75516 : Total Size= 349930 bytes File Size = 50600 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 6.71 *
*............................................................................*
*Br 19 :xWC4Hit : xWC4Hit/F *
*Entries : 75516 : Total Size= 349930 bytes File Size = 52118 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 6.51 *
*............................................................................*
*Br 20 :yWC4Hit : yWC4Hit/F *
*Entries : 75516 : Total Size= 349930 bytes File Size = 52118 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 6.51 *
*............................................................................*
*Br 21 :zWC4Hit : zWC4Hit/F *
*Entries : 75516 : Total Size= 349930 bytes File Size = 52118 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 6.51 *
*............................................................................*
*Br 22 :nTOFs : nTOFs/i *
*Entries : 75516 : Total Size= 348910 bytes File Size = 49588 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 6.83 *
*............................................................................*
*Br 23 :TOFs : TOFs[nTOFs]/F *
*Entries : 75516 : Total Size= 352539 bytes File Size = 52624 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 6.50 *
*............................................................................*
*Br 24 :TOFTimeStamps : TOFTimeStamps[nTOFs]/i *
*Entries : 75516 : Total Size= 357122 bytes File Size = 57178 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 6.06 *
*............................................................................*
*Br 25 :firstTOF : firstTOF/F *
*Entries : 75516 : Total Size= 350440 bytes File Size = 52624 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 6.46 *
*............................................................................*
*Br 26 :triggerBits : triggerBits/i *
*Entries : 75516 : Total Size= 351970 bytes File Size = 52624 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 6.49 *
*............................................................................*
*Br 27 :triggerBEAMON : triggerBEAMON/O *
*Entries : 75516 : Total Size= 126436 bytes File Size = 51106 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 2.27 *
*............................................................................*
*Br 28 :triggerCOSMICON : triggerCOSMICON/O *
*Entries : 75516 : Total Size= 127456 bytes File Size = 52118 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 2.25 *
*............................................................................*
*Br 29 :triggerCOSMIC : triggerCOSMIC/O *
*Entries : 75516 : Total Size= 126436 bytes File Size = 51106 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 2.27 *
*............................................................................*
*Br 30 :triggerUSTOF : triggerUSTOF/O *
*Entries : 75516 : Total Size= 125926 bytes File Size = 50600 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 2.28 *
*............................................................................*
*Br 31 :triggerDSTOF : triggerDSTOF/O *
*Entries : 75516 : Total Size= 125926 bytes File Size = 50600 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 2.28 *
*............................................................................*
*Br 32 :triggerWCCOINC3OF4 : triggerWCCOINC3OF4/O *
*Entries : 75516 : Total Size= 128986 bytes File Size = 53636 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 2.21 *
*............................................................................*
*Br 33 :triggerMICHEL : triggerMICHEL/O *
*Entries : 75516 : Total Size= 126436 bytes File Size = 51106 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 2.27 *
*............................................................................*
*Br 34 :trueEndProcess : trueEndProcess/I *
*Entries : 75516 : Total Size= 353500 bytes File Size = 102367 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 3.35 *
*............................................................................*
*Br 35 :trueNDaughters : trueNDaughters/i *
*Entries : 75516 : Total Size= 353500 bytes File Size = 144757 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 2.37 *
*............................................................................*
*Br 36 :trueNSecondaryChPions : trueNSecondaryChPions/i *
*Entries : 75516 : Total Size= 357070 bytes File Size = 100318 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 3.45 *
*............................................................................*
*Br 37 :trueNSecondaryPiZeros : trueNSecondaryPiZeros/i *
*Entries : 75516 : Total Size= 357070 bytes File Size = 87432 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 3.96 *
*............................................................................*
*Br 38 :trueNSecondaryProtons : trueNSecondaryProtons/i *
*Entries : 75516 : Total Size= 357070 bytes File Size = 125093 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 2.77 *
*............................................................................*
*Br 39 :trueStartX : trueStartX/F *
*Entries : 75516 : Total Size= 351460 bytes File Size = 337471 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 1.01 *
*............................................................................*
*Br 40 :trueStartY : trueStartY/F *
*Entries : 75516 : Total Size= 351460 bytes File Size = 341015 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 1.00 *
*............................................................................*
*Br 41 :trueStartZ : trueStartZ/F *
*Entries : 75516 : Total Size= 351460 bytes File Size = 53636 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 6.36 *
*............................................................................*
*Br 42 :trueEndX : trueEndX/F *
*Entries : 75516 : Total Size= 350440 bytes File Size = 339206 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 1.00 *
*............................................................................*
*Br 43 :trueEndY : trueEndY/F *
*Entries : 75516 : Total Size= 350440 bytes File Size = 340014 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 1.00 *
*............................................................................*
*Br 44 :trueEndZ : trueEndZ/F *
*Entries : 75516 : Total Size= 350440 bytes File Size = 339956 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 1.00 *
*............................................................................*
*Br 45 :trueStartTheta : trueStartTheta/F *
*Entries : 75516 : Total Size= 353500 bytes File Size = 341427 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 1.00 *
*............................................................................*
*Br 46 :trueStartPhi : trueStartPhi/F *
*Entries : 75516 : Total Size= 352480 bytes File Size = 339564 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 1.01 *
*............................................................................*
*Br 47 :trueStartMom : trueStartMom/F *
*Entries : 75516 : Total Size= 352480 bytes File Size = 341975 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 1.00 *
*............................................................................*
*Br 48 :trueStartE : trueStartE/F *
*Entries : 75516 : Total Size= 351460 bytes File Size = 339301 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 1.01 *
*............................................................................*
*Br 49 :trueEndMom : trueEndMom/F *
*Entries : 75516 : Total Size= 351460 bytes File Size = 135192 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 2.52 *
*............................................................................*
*Br 50 :trueEndE : trueEndE/F *
*Entries : 75516 : Total Size= 350440 bytes File Size = 132777 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 2.56 *
*............................................................................*
*Br 51 :trueSecondToEndMom : trueSecondToEndMom/F *
*Entries : 75516 : Total Size= 355540 bytes File Size = 345074 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 1.00 *
*............................................................................*
*Br 52 :trueSecondToEndE : trueSecondToEndE/F *
*Entries : 75516 : Total Size= 354520 bytes File Size = 340549 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 1.01 *
*............................................................................*
*Br 53 :trueHitCosmic1 : trueHitCosmic1/O *
*Entries : 75516 : Total Size= 126946 bytes File Size = 51618 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 2.26 *
*............................................................................*
*Br 54 :trueHitCosmic2 : trueHitCosmic2/O *
*Entries : 75516 : Total Size= 126946 bytes File Size = 51710 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 2.25 *
*............................................................................*
*Br 55 :trueHitCosmic3 : trueHitCosmic3/O *
*Entries : 75516 : Total Size= 126946 bytes File Size = 51625 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 2.26 *
*............................................................................*
*Br 56 :trueHitCosmic4 : trueHitCosmic4/O *
*Entries : 75516 : Total Size= 126946 bytes File Size = 51809 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 2.25 *
*............................................................................*
*Br 57 :nTracks : nTracks/i *
*Entries : 75516 : Total Size= 349930 bytes File Size = 117066 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 2.90 *
*............................................................................*
*Br 58 :nTracksInFirstZ : nTracksInFirstZ[95]/i *
*Entries : 75516 : Total Size= 28799646 bytes File Size = 750046 *
*Baskets : 1012 : Basket Size= 32000 bytes Compression= 38.37 *
*............................................................................*
*Br 59 :nTracksLengthLt : nTracksLengthLt[20]/i *
*Entries : 75516 : Total Size= 6093234 bytes File Size = 344164 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 17.67 *
*............................................................................*
*Br 60 :trackStartX : trackStartX[nTracks]/F *
*Entries : 75516 : Total Size= 885177 bytes File Size = 679830 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 1.29 *
*............................................................................*
*Br 61 :trackStartY : trackStartY[nTracks]/F *
*Entries : 75516 : Total Size= 885177 bytes File Size = 705548 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 1.24 *
*............................................................................*
*Br 62 :trackStartZ : trackStartZ[nTracks]/F *
*Entries : 75516 : Total Size= 885177 bytes File Size = 704228 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 1.24 *
*............................................................................*
*Br 63 :trackStartTheta : trackStartTheta[nTracks]/F *
*Entries : 75516 : Total Size= 887217 bytes File Size = 698109 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 1.26 *
*............................................................................*
*Br 64 :trackStartPhi : trackStartPhi[nTracks]/F *
*Entries : 75516 : Total Size= 886197 bytes File Size = 701348 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 1.25 *
*............................................................................*
*Br 65 :trackEndX : trackEndX[nTracks]/F *
*Entries : 75516 : Total Size= 884157 bytes File Size = 684812 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 1.28 *
*............................................................................*
*Br 66 :trackEndY : trackEndY[nTracks]/F *
*Entries : 75516 : Total Size= 884157 bytes File Size = 702761 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 1.24 *
*............................................................................*
*Br 67 :trackEndZ : trackEndZ[nTracks]/F *
*Entries : 75516 : Total Size= 884157 bytes File Size = 681102 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 1.28 *
*............................................................................*
*Br 68 :trackLength : trackLength[nTracks]/F *
*Entries : 75516 : Total Size= 885177 bytes File Size = 694236 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 1.26 *
*............................................................................*
*Br 69 :trackXFront : trackXFront[nTracks]/F *
*Entries : 75516 : Total Size= 885177 bytes File Size = 191958 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 4.56 *
*............................................................................*
*Br 70 :trackYFront : trackYFront[nTracks]/F *
*Entries : 75516 : Total Size= 885177 bytes File Size = 191958 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 4.56 *
*............................................................................*
*Br 71 :trackLLHPion : trackLLHPion[nTracks]/F *
*Entries : 75516 : Total Size= 885687 bytes File Size = 191999 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 4.56 *
*............................................................................*
*Br 72 :trackLLHProton : trackLLHProton[nTracks]/F *
*Entries : 75516 : Total Size= 886707 bytes File Size = 193014 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 4.54 *
*............................................................................*
*Br 73 :trackLLHMuon : trackLLHMuon[nTracks]/F *
*Entries : 75516 : Total Size= 885687 bytes File Size = 191999 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 4.56 *
*............................................................................*
*Br 74 :trackLLHKaon : trackLLHKaon[nTracks]/F *
*Entries : 75516 : Total Size= 885687 bytes File Size = 191999 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 4.56 *
*............................................................................*
*Br 75 :trackPIDA : trackPIDA[nTracks]/F *
*Entries : 75516 : Total Size= 884157 bytes File Size = 189183 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 4.62 *
*............................................................................*
*Br 76 :trackStartDistToPrimTrkEnd : trackStartDistToPrimTrkEnd[nTracks]/F *
*Entries : 75516 : Total Size= 892827 bytes File Size = 199191 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 4.43 *
*............................................................................*
*Br 77 :trackEndDistToPrimTrkEnd : trackEndDistToPrimTrkEnd[nTracks]/F *
*Entries : 75516 : Total Size= 891807 bytes File Size = 198039 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 4.45 *
*............................................................................*
*Br 78 :iBestMatch : iBestMatch/I *
*Entries : 75516 : Total Size= 351460 bytes File Size = 97404 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 3.50 *
*............................................................................*
*Br 79 :trackMatchDeltaX : trackMatchDeltaX[nTracks]/F *
*Entries : 75516 : Total Size= 887727 bytes File Size = 193952 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 4.52 *
*............................................................................*
*Br 80 :trackMatchDeltaY : trackMatchDeltaY[nTracks]/F *
*Entries : 75516 : Total Size= 887727 bytes File Size = 193952 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 4.52 *
*............................................................................*
*Br 81 :trackMatchDeltaR : trackMatchDeltaR[nTracks]/F *
*Entries : 75516 : Total Size= 887727 bytes File Size = 193952 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 4.52 *
*............................................................................*
*Br 82 :trackMatchDeltaAngle : trackMatchDeltaAngle[nTracks]/F *
*Entries : 75516 : Total Size= 889767 bytes File Size = 195961 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 4.49 *
*............................................................................*
*Br 83 :trackMatchLowestZ : trackMatchLowestZ[nTracks]/F *
*Entries : 75516 : Total Size= 888237 bytes File Size = 193213 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 4.54 *
*............................................................................*
*Br 84 :nMatchedTracks : nMatchedTracks/i *
*Entries : 75516 : Total Size= 353500 bytes File Size = 54142 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 6.34 *
*............................................................................*
*Br 85 :primTrkStartMomTrking : primTrkStartMomTrking/F *
*Entries : 75516 : Total Size= 357070 bytes File Size = 96848 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 3.58 *
*............................................................................*
*Br 86 :primTrkStartTheta : primTrkStartTheta/F *
*Entries : 75516 : Total Size= 355030 bytes File Size = 315810 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 1.09 *
*............................................................................*
*Br 87 :primTrkStartPhi : primTrkStartPhi/F *
*Entries : 75516 : Total Size= 354010 bytes File Size = 317050 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 1.08 *
*............................................................................*
*Br 88 :primTrkLength : primTrkLength/F *
*Entries : 75516 : Total Size= 352990 bytes File Size = 307805 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 1.11 *
*............................................................................*
*Br 89 :primTrkStartX : primTrkStartX/F *
*Entries : 75516 : Total Size= 352990 bytes File Size = 304554 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 1.12 *
*............................................................................*
*Br 90 :primTrkStartY : primTrkStartY/F *
*Entries : 75516 : Total Size= 352990 bytes File Size = 318981 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 1.07 *
*............................................................................*
*Br 91 :primTrkStartZ : primTrkStartZ/F *
*Entries : 75516 : Total Size= 352990 bytes File Size = 316337 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 1.08 *
*............................................................................*
*Br 92 :primTrkEndX : primTrkEndX/F *
*Entries : 75516 : Total Size= 351970 bytes File Size = 310059 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 1.10 *
*............................................................................*
*Br 93 :primTrkEndY : primTrkEndY/F *
*Entries : 75516 : Total Size= 351970 bytes File Size = 318259 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 1.07 *
*............................................................................*
*Br 94 :primTrkEndZ : primTrkEndZ/F *
*Entries : 75516 : Total Size= 351970 bytes File Size = 305591 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 1.12 *
*............................................................................*
*Br 95 :primTrkPitchC : primTrkPitchC/F *
*Entries : 75516 : Total Size= 352990 bytes File Size = 304407 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 1.13 *
*............................................................................*
*Br 96 :primTrkPitchCInduct : primTrkPitchCInduct/F *
*Entries : 75516 : Total Size= 356050 bytes File Size = 307208 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 1.12 *
*............................................................................*
*Br 97 :primTrkCaloRange : primTrkCaloRange/F *
*Entries : 75516 : Total Size= 354520 bytes File Size = 309890 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 1.11 *
*............................................................................*
*Br 98 :primTrkEndInFid : primTrkEndInFid/O *
*Entries : 75516 : Total Size= 127456 bytes File Size = 75498 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 1.55 *
*............................................................................*
*Br 99 :primTrkLLHPion : primTrkLLHPion/F *
*Entries : 75516 : Total Size= 353500 bytes File Size = 55660 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 6.16 *
*............................................................................*
*Br 100 :primTrkLLHProton : primTrkLLHProton/F *
*Entries : 75516 : Total Size= 354520 bytes File Size = 56672 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 6.07 *
*............................................................................*
*Br 101 :primTrkLLHMuon : primTrkLLHMuon/F *
*Entries : 75516 : Total Size= 353500 bytes File Size = 55660 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 6.16 *
*............................................................................*
*Br 102 :primTrkLLHKaon : primTrkLLHKaon/F *
*Entries : 75516 : Total Size= 353500 bytes File Size = 55660 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 6.16 *
*............................................................................*
*Br 103 :primTrkPIDA : primTrkPIDA/F *
*Entries : 75516 : Total Size= 351970 bytes File Size = 54142 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 6.31 *
*............................................................................*
*Br 104 :primTrkKinInteract : primTrkKinInteract/F *
*Entries : 75516 : Total Size= 355540 bytes File Size = 230930 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 1.49 *
*............................................................................*
*Br 105 :primTrkKinInteractProton : primTrkKinInteractProton/F *
*Entries : 75516 : Total Size= 358600 bytes File Size = 237470 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 1.47 *
*............................................................................*
*Br 106 :primTrkdEdxs : vector<float> *
*Entries : 75516 : Total Size= 25324686 bytes File Size = 22228923 *
*Baskets : 1012 : Basket Size= 32000 bytes Compression= 1.14 *
*............................................................................*
*Br 107 :primTrkdQdxs : vector<float> *
*Entries : 75516 : Total Size= 25324686 bytes File Size = 21886083 *
*Baskets : 1012 : Basket Size= 32000 bytes Compression= 1.16 *
*............................................................................*
*Br 108 :primTrkResRanges : vector<float> *
*Entries : 75516 : Total Size= 25328750 bytes File Size = 22640955 *
*Baskets : 1012 : Basket Size= 32000 bytes Compression= 1.12 *
*............................................................................*
*Br 109 :primTrkRangeSoFars : vector<float> *
*Entries : 75516 : Total Size= 25330782 bytes File Size = 22665083 *
*Baskets : 1012 : Basket Size= 32000 bytes Compression= 1.12 *
*............................................................................*
*Br 110 :primTrkPitches : vector<float> *
*Entries : 75516 : Total Size= 25326718 bytes File Size = 4272761 *
*Baskets : 1012 : Basket Size= 32000 bytes Compression= 5.92 *
*............................................................................*
*Br 111 :primTrkIBackwards : vector<float> *
*Entries : 75516 : Total Size= 25329766 bytes File Size = 2356970 *
*Baskets : 1012 : Basket Size= 32000 bytes Compression= 10.74 *
*............................................................................*
*Br 112 :primTrkXs : vector<float> *
*Entries : 75516 : Total Size= 25321638 bytes File Size = 21917104 *
*Baskets : 1012 : Basket Size= 32000 bytes Compression= 1.15 *
*............................................................................*
*Br 113 :primTrkYs : vector<float> *
*Entries : 75516 : Total Size= 25321638 bytes File Size = 23157199 *
*Baskets : 1012 : Basket Size= 32000 bytes Compression= 1.09 *
*............................................................................*
*Br 114 :primTrkZs : vector<float> *
*Entries : 75516 : Total Size= 25321638 bytes File Size = 22598130 *
*Baskets : 1012 : Basket Size= 32000 bytes Compression= 1.12 *
*............................................................................*
*Br 115 :primTrkKins : vector<float> *
*Entries : 75516 : Total Size= 25323670 bytes File Size = 22204960 *
*Baskets : 1012 : Basket Size= 32000 bytes Compression= 1.14 *
*............................................................................*
*Br 116 :primTrkKinsProton : vector<float> *
*Entries : 75516 : Total Size= 25329766 bytes File Size = 22811761 *
*Baskets : 1012 : Basket Size= 32000 bytes Compression= 1.11 *
*............................................................................*
*Br 117 :primTrkdEdxsInduct : vector<float> *
*Entries : 75516 : Total Size= 25677210 bytes File Size = 21953213 *
*Baskets : 1012 : Basket Size= 32000 bytes Compression= 1.17 *
*............................................................................*
*Br 118 :primTrkdQdxsInduct : vector<float> *
*Entries : 75516 : Total Size= 25677210 bytes File Size = 22477870 *
*Baskets : 1012 : Basket Size= 32000 bytes Compression= 1.14 *
*............................................................................*
*Br 119 :primTrkResRangesInduct : vector<float> *
*Entries : 75516 : Total Size= 25681274 bytes File Size = 22967663 *
*Baskets : 1012 : Basket Size= 32000 bytes Compression= 1.12 *
*............................................................................*
*Br 120 :primTrkPitchesInduct : vector<float> *
*Entries : 75516 : Total Size= 25679242 bytes File Size = 4311214 *
*Baskets : 1012 : Basket Size= 32000 bytes Compression= 5.95 *
*............................................................................*
*Br 121 :primTrkIBackwardsInduct : vector<float> *
*Entries : 75516 : Total Size= 25682290 bytes File Size = 2384807 *
*Baskets : 1012 : Basket Size= 32000 bytes Compression= 10.76 *
*............................................................................*
*Br 122 :primTrkXsInduct : vector<float> *
*Entries : 75516 : Total Size= 25674162 bytes File Size = 22228755 *
*Baskets : 1012 : Basket Size= 32000 bytes Compression= 1.15 *
*............................................................................*
*Br 123 :primTrkYsInduct : vector<float> *
*Entries : 75516 : Total Size= 25674162 bytes File Size = 23484670 *
*Baskets : 1012 : Basket Size= 32000 bytes Compression= 1.09 *
*............................................................................*
*Br 124 :primTrkZsInduct : vector<float> *
*Entries : 75516 : Total Size= 25674162 bytes File Size = 22919452 *
*Baskets : 1012 : Basket Size= 32000 bytes Compression= 1.12 *
*............................................................................*
*Br 125 :primTrkTrueWiresInduct : vector<unsigned int> *
*Entries : 75516 : Total Size= 25681274 bytes File Size = 3044396 *
*Baskets : 1012 : Basket Size= 32000 bytes Compression= 8.43 *
*............................................................................*
*Br 126 :primTrkTruedEdxs : vector<float> *
*Entries : 75516 : Total Size= 25328750 bytes File Size = 22230340 *
*Baskets : 1012 : Basket Size= 32000 bytes Compression= 1.14 *
*............................................................................*
*Br 127 :primTrkTruedQdxs : vector<float> *
*Entries : 75516 : Total Size= 25328750 bytes File Size = 21664935 *
*Baskets : 1012 : Basket Size= 32000 bytes Compression= 1.17 *
*............................................................................*
*Br 128 :primTrkTruedEs : vector<float> *
*Entries : 75516 : Total Size= 25326718 bytes File Size = 21849756 *
*Baskets : 1012 : Basket Size= 32000 bytes Compression= 1.16 *
*............................................................................*
*Br 129 :primTrkTruedQs : vector<float> *
*Entries : 75516 : Total Size= 25326718 bytes File Size = 22065466 *
*Baskets : 1012 : Basket Size= 32000 bytes Compression= 1.15 *
*............................................................................*
*Br 130 :primTrkTrueResRanges : vector<float> *
*Entries : 75516 : Total Size= 25332814 bytes File Size = 22644988 *
*Baskets : 1012 : Basket Size= 32000 bytes Compression= 1.12 *
*............................................................................*
*Br 131 :primTrkTrueWires : vector<unsigned int> *
*Entries : 75516 : Total Size= 25328750 bytes File Size = 3056834 *
*Baskets : 1012 : Basket Size= 32000 bytes Compression= 8.28 *
*............................................................................*
*Br 132 :primTrkHitIsCollections : vector<bool> *
*Entries : 75516 : Total Size= 13283571 bytes File Size = 1894281 *
*Baskets : 508 : Basket Size= 32000 bytes Compression= 7.01 *
*............................................................................*
*Br 133 :primTrkHitWires : vector<unsigned int> *
*Entries : 75516 : Total Size= 49930584 bytes File Size = 18713428 *
*Baskets : 1899 : Basket Size= 32000 bytes Compression= 2.67 *
*............................................................................*
*Br 134 :primTrkHitStartTimes : vector<int> *
*Entries : 75516 : Total Size= 49940099 bytes File Size = 20183361 *
*Baskets : 1899 : Basket Size= 32000 bytes Compression= 2.47 *
*............................................................................*
*Br 135 :primTrkHitEndTimes : vector<int> *
*Entries : 75516 : Total Size= 49936293 bytes File Size = 21067973 *
*Baskets : 1899 : Basket Size= 32000 bytes Compression= 2.37 *
*............................................................................*
*Br 136 :primTrkHitPeakTimes : vector<float> *
*Entries : 75516 : Total Size= 49938196 bytes File Size = 42888417 *
*Baskets : 1899 : Basket Size= 32000 bytes Compression= 1.16 *
*............................................................................*
*Br 137 :primTrkHitSigPeakTimes : vector<float> *
*Entries : 75516 : Total Size= 49944022 bytes File Size = 43439631 *
*Baskets : 1900 : Basket Size= 32000 bytes Compression= 1.15 *
*............................................................................*
*Br 138 :primTrkHitRMSs : vector<float> *
*Entries : 75516 : Total Size= 49928681 bytes File Size = 41532996 *
*Baskets : 1899 : Basket Size= 32000 bytes Compression= 1.20 *
*............................................................................*
*Br 139 :primTrkHitAmps : vector<float> *
*Entries : 75516 : Total Size= 49928681 bytes File Size = 43268220 *
*Baskets : 1899 : Basket Size= 32000 bytes Compression= 1.15 *
*............................................................................*
*Br 140 :primTrkHitSigAmps : vector<float> *
*Entries : 75516 : Total Size= 49934390 bytes File Size = 38971113 *
*Baskets : 1899 : Basket Size= 32000 bytes Compression= 1.28 *
*............................................................................*
*Br 141 :primTrkHitIntegrals : vector<float> *
*Entries : 75516 : Total Size= 49938196 bytes File Size = 44176360 *
*Baskets : 1899 : Basket Size= 32000 bytes Compression= 1.13 *
*............................................................................*
*Br 142 :primTrkHitSigIntegrals : vector<float> *
*Entries : 75516 : Total Size= 49944022 bytes File Size = 43441505 *
*Baskets : 1900 : Basket Size= 32000 bytes Compression= 1.15 *
*............................................................................*
*Br 143 :primTrkHitMultiplicities : vector<int> *
*Entries : 75516 : Total Size= 49947830 bytes File Size = 1991848 *
*Baskets : 1900 : Basket Size= 32000 bytes Compression= 25.06 *
*............................................................................*
*Br 144 :primTrkHitChi2NDFs : vector<float> *
*Entries : 75516 : Total Size= 49936293 bytes File Size = 45677509 *
*Baskets : 1899 : Basket Size= 32000 bytes Compression= 1.09 *
*............................................................................*
*Br 145 :primMCdEdxs : vector<float> *
*Entries : 75516 : Total Size= 17070144 bytes File Size = 14754075 *
*Baskets : 919 : Basket Size= 32000 bytes Compression= 1.16 *
*............................................................................*
*Br 146 :primMCXs : vector<float> *
*Entries : 75516 : Total Size= 17067375 bytes File Size = 14984173 *
*Baskets : 919 : Basket Size= 32000 bytes Compression= 1.14 *
*............................................................................*
*Br 147 :primMCYs : vector<float> *
*Entries : 75516 : Total Size= 17067375 bytes File Size = 15621166 *
*Baskets : 919 : Basket Size= 32000 bytes Compression= 1.09 *
*............................................................................*
*Br 148 :primMCZs : vector<float> *
*Entries : 75516 : Total Size= 17067375 bytes File Size = 14062296 *
*Baskets : 919 : Basket Size= 32000 bytes Compression= 1.21 *
*............................................................................*
*Br 149 :primMClastXs : vector<float> *
*Entries : 75516 : Total Size= 17071067 bytes File Size = 14997880 *
*Baskets : 919 : Basket Size= 32000 bytes Compression= 1.14 *
*............................................................................*
*Br 150 :primMClastYs : vector<float> *
*Entries : 75516 : Total Size= 17071067 bytes File Size = 15632479 *
*Baskets : 919 : Basket Size= 32000 bytes Compression= 1.09 *
*............................................................................*
*Br 151 :primMClastZs : vector<float> *
*Entries : 75516 : Total Size= 17071067 bytes File Size = 13886720 *
*Baskets : 919 : Basket Size= 32000 bytes Compression= 1.23 *
*............................................................................*
*Br 152 :enterExitXp : enterExitXp/O *
*Entries : 75516 : Total Size= 125416 bytes File Size = 57788 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 1.99 *
*............................................................................*
*Br 153 :enterExitXm : enterExitXm/O *
*Entries : 75516 : Total Size= 125416 bytes File Size = 63501 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 1.81 *
*............................................................................*
*Br 154 :enterExitYp : enterExitYp/O *
*Entries : 75516 : Total Size= 125416 bytes File Size = 54513 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 2.11 *
*............................................................................*
*Br 155 :enterExitYm : enterExitYm/O *
*Entries : 75516 : Total Size= 125416 bytes File Size = 55012 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 2.09 *
*............................................................................*
*Br 156 :enterExitZp : enterExitZp/O *
*Entries : 75516 : Total Size= 125416 bytes File Size = 70948 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 1.62 *
*............................................................................*
*Br 157 :enterExitZm : enterExitZm/O *
*Entries : 75516 : Total Size= 125416 bytes File Size = 73162 *
*Baskets : 506 : Basket Size= 32000 bytes Compression= 1.57 *
*............................................................................*
root [2]
"""
| {"/slicesIso.py": ["/fitCosmicHalo.py"], "/plotCosmics.py": ["/lookAtMonicaLifetime.py"]} |
76,894 | jhugon/lariatPionAbs | refs/heads/master | /plotEndProcess.py | #!/usr/bin/env python
import ROOT as root
from helpers import *
root.gROOT.SetBatch(True)
if __name__ == "__main__":
cuts = ""
#cuts += "*(pWC < 500.)"
cuts += "*(primTrkEndInFid)"
cuts += "*(nTracksInFirstZ[2] >= 1 && nTracksInFirstZ[14] < 4 && nTracksLengthLt[5] < 3)" # tpc tracks
cuts += "*( iBestMatch >= 0 && nMatchedTracks == 1)" # matching in analyzer
###
secTrkCuts = "*(trackStartDistToPrimTrkEnd < 2. || trackEndDistToPrimTrkEnd < 2.)"
weightStr = "1"+cuts
nData = 30860.0
logy = True
c = root.TCanvas()
NMAX=10000000000
#NMAX=100
fileConfigs = [
{
#'fn': "piAbs_pip_v5.2.root",
#'addFriend': ["friend", "friendTree_pip_v5.root"],
'fn': "test_pip_piAbsSelector.root",
'name': "pip",
'title': "#pi^{+} MC",
'caption': "#pi^{+} MC",
'color': root.kBlue-7,
'scaleFactor': 1./35250*nData*0.428/(1.-0.086), #No Cuts
#'scaleFactor': 1./35250*nData*0.428/(1.-0.086)*0.51, # pion, tpc, match cuts
},
{
#'fn': "piAbs_p_v5.2.root",
#'addFriend': ["friend", "friendTree_p_v5.root"],
'fn': "test_p_piAbsSelector.root",
'name': "p",
'title': "proton MC",
'caption': "proton MC",
'color': root.kRed-4,
'scaleFactor': 1./35200*nData*0.162/(1.-0.086), #No Cuts
#'scaleFactor': 1./35200*nData*0.162/(1.-0.086)*0.7216, #proton, tpc, matching
},
{
#'fn': "piAbs_ep_v5.2.root",
#'addFriend': ["friend", "friendTree_ep_v5.root"],
'fn': "test_ep_piAbsSelector.root",
'name': "ep",
'title': "e^{+} MC",
'caption': "e^{+} MC",
'color': root.kGreen,
'scaleFactor': 1./35700*nData*0.301/(1.-0.086), #No Cuts
#'scaleFactor': 1./35700*nData*0.301/(1.-0.086)*0.35, # pion, tpc, match cuts
},
{
#'fn': "piAbs_mup_v5.2.root",
#'addFriend': ["friend", "friendTree_mup_v5.root"],
'fn': "test_mup_piAbsSelector.root",
'name': "mup",
'title': "#mu^{+} MC",
'caption': "#mu^{+} MC",
'color': root.kMagenta-4,
'scaleFactor': 1./35200*nData*0.021/(1.-0.086), #No Cuts
#'scaleFactor': 1./35200*nData*0.021/(1.-0.086)*0.51, # pion, tpc, match cuts
},
{
#'fn': "piAbs_kp_v5.2.root",
#'addFriend': ["friend", "friendTree_kp_v5.root"],
'fn': "test_kp_piAbsSelector.root",
'name': "kp",
'title': "K^{+} MC",
'caption': "K^{+} MC",
'color': root.kOrange-3,
'scaleFactor': 1./35700*nData*0.00057/(1.-0.086), #No Cuts
},
#{
# #'fn': "/pnfs/lariat/scratch/users/jhugon/v06_15_00/piAbsSelector/lariat_PiAbsAndChEx_flat_gam_v4/anahist.root",
# #'addFriend': ["friend", "friendTree_gam_v4.root"],
# 'fn': "test_gam_piAbsSelector.root",
# 'name': "gam",
# 'title': "#gamma MC",
# 'caption': "#gamma MC",
# 'color': root.kOrange-3,
# 'scaleFactor': 2953., #AllWeightsCuts Proton
#},
]
histConfigs = [
{
'name': "pWC",
'xtitle': "Momentum from WC [MeV/c]",
'ytitle': "Events / bin",
'binning': [100,0,2000],
'var': "pWC",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
#'printIntegral': True,
},
# {
# 'name': "trackPIDA",
# 'xtitle': "TPC Track PIDA",
# 'ytitle': "Tracks / bin",
# 'binning': [100,0,50],
# 'var': "trackPIDA",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
# },
# {
# 'name': "primTrkKins",
# 'xtitle': "Hit Kinetic Energy [MeV]",
# 'ytitle': "Events / bin",
# 'binning': [100,0,1000],
# 'var': "primTrkKins",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
# #'printIntegral': True,
# },
#{
# 'name': "primTrkdEdxLast3Hits",
# 'xtitle': "Hit dE/dx [MeV/cm]",
# 'ytitle': "Events / bin",
# 'binning': [100,0,50],
# 'var': "(primTrkIBackwards < 3)*primTrkdEdxs-(primTrkIBackwards >= 3)",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
# #'printIntegral': True,
#},
#{
# 'name': "primTrkdEdxLast1cm",
# 'xtitle': "Hit dE/dx [MeV/cm]",
# 'ytitle': "Events / bin",
# 'binning': [100,0,50],
# 'var': "(primTrkResRanges < 1.)*primTrkdEdxs-(primTrkResRanges >= 1.)",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
# #'printIntegral': True,
#},
{
'name': "primTrkPIDA",
'xtitle': "Primary TPC Track PIDA",
'ytitle': "Events / bin",
'binning': [100,0,50],
'var': "primTrkPIDA",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
{
'name': "trackStartDistToPrimTrkEnd",
'xtitle': "TPC Track Start Distance to Primary End [cm]",
'ytitle': "Tracks / bin",
'binning': [40,0,20],
'var': "trackStartDistToPrimTrkEnd",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
{
'name': "trackEndDistToPrimTrkEnd",
'xtitle': "TPC Track End Distance to Primary End [cm]",
'ytitle': "Tracks / bin",
'binning': [40,0,20],
'var': "trackEndDistToPrimTrkEnd",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
{
'name': "secTrkLength",
'xtitle': "Secondary TPC Track Length [cm]",
'ytitle': "Tracks / bin",
'binning': [100,-10,100],
'var': "trackLength",
'cuts': weightStr+secTrkCuts,
#'normalize': True,
'logy': logy,
},
{
'name': "secTrkCaloKin",
'xtitle': "Secondary Track Calo Estimate of KE [MeV]",
'ytitle': "Tracks / bin",
'binning': [50,0,2500],
'var': "trackCaloKin",
'cuts': weightStr+secTrkCuts,
#'normalize': True,
'logy': logy,
},
{
'name': "primTrkdEdxMedianLast3Hits",
'xtitle': "Median dE/dx of last 3 hits [MeV/cm]",
'ytitle': "Tracks / bin",
'binning': [100,0,50],
'var': "primTrkdEdxMedianLast3Hits",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
{
'name': "primTrkdEdxMedianLast5Hits",
'xtitle': "Median dE/dx of last 5 hits [MeV/cm]",
'ytitle': "Tracks / bin",
'binning': [100,0,50],
'var': "primTrkdEdxMedianLast5Hits",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
{
'name': "primTrkdEdxMedianLast7Hits",
'xtitle': "Median dE/dx of last 7 hits [MeV/cm]",
'ytitle': "Tracks / bin",
'binning': [100,0,50],
'var': "primTrkdEdxMedianLast7Hits",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
{
'name': "primTrkdEdxMedianRRL1",
'xtitle': "Median dE/dx of hits RR < 1 cm [MeV/cm]",
'ytitle': "Tracks / bin",
'binning': [100,0,50],
'var': "primTrkdEdxMedianRRL1",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
{
'name': "primTrkdEdxMedianRRL3",
'xtitle': "Median dE/dx of hits RR < 3 cm [MeV/cm]",
'ytitle': "Tracks / bin",
'binning': [100,0,50],
'var': "primTrkdEdxMedianRRL3",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
{
'name': "primTrkdEdxMedianRRL5",
'xtitle': "Median dE/dx of hits RR < 5 cm [MeV/cm]",
'ytitle': "Tracks / bin",
'binning': [100,0,50],
'var': "primTrkdEdxMedianRRL5",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
{
'name': "primTrkdEdxMedianRRL7",
'xtitle': "Median dE/dx of hits RR < 7 cm [MeV/cm]",
'ytitle': "Tracks / bin",
'binning': [100,0,50],
'var': "primTrkdEdxMedianRRL3",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
{
'name': "primTrkdEdxMedianRRL3G1",
'xtitle': "Median dE/dx of hits 1 cm < RR < 3 cm [MeV/cm]",
'ytitle': "Tracks / bin",
'binning': [100,0,50],
'var': "primTrkdEdxMedianRRL3G1",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
{
'name': "primTrkdEdxMedianRRL5G1",
'xtitle': "Median dE/dx of hits 1 cm < RR < 5 cm [MeV/cm]",
'ytitle': "Tracks / bin",
'binning': [100,0,50],
'var': "primTrkdEdxMedianRRL5G1",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
{
'name': "primTrkdEdxMedianRRL7G1",
'xtitle': "Median dE/dx of hits RR < 7 cm [MeV/cm]",
'ytitle': "Tracks / bin",
'binning': [100,0,50],
'var': "primTrkdEdxMedianRRL7G1",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
]
cutList = [
"",
"*(trueEndProcess == 6)",
"*(trueEndProcess == 14)",
"*(trueEndProcess == 15)",
"*(trueEndProcess == 10 || trueEndProcess == 11 || trueEndProcess == 13 || trueEndProcess == 1)",
]
titles = [
"All",
"Decay",
"Stop",
"Leave World",
"Inelastic",
]
colors = [root.kBlue-7, root.kRed-4, root.kGreen, root.kMagenta-4, root.kOrange-3,root.kGray+1]
for histConfig in histConfigs:
name = histConfig["name"]
hcs = []
for cut,title,color in zip(cutList,titles,colors[:len(cutList)]):
hc = copy.deepcopy(histConfig)
hc["cuts"] = histConfig["cuts"]+cut
hc["title"] = title
hc["color"] = color
hcs.append(hc)
plotManyHistsOnePlot(fileConfigs,hcs,c,"PiAbsSelector/tree",nMax=NMAX,outPrefix=name+"_")
| {"/slicesIso.py": ["/fitCosmicHalo.py"], "/plotCosmics.py": ["/lookAtMonicaLifetime.py"]} |
76,895 | jhugon/lariatPionAbs | refs/heads/master | /plotTrueTraj.py | #!/usr/bin/env python
import ROOT as root
from helpers import *
root.gROOT.SetBatch(True)
if __name__ == "__main__":
cuts = ""
cuts += "*(nTracksInFirstZ[2] >= 1 && nTracksInFirstZ[14] < 4 && nTracksLengthLt[5] < 3)" # tpc tracks
cuts += "*( iBestMatch >= 0 && nMatchedTracks == 1)" # matching in analyzer
###
secTrkCuts = "*(trackStartDistToPrimTrkEnd < 2. || trackEndDistToPrimTrkEnd < 2.)"
weightStr = "1"+cuts
nData = 30860.0
logy = True
c = root.TCanvas()
NMAX=10000000000
#NMAX=100
pionFileConfig = {
#'fn': "piAbs_pip_v5.2.root",
#'addFriend': ["friend", "friendTree_pip_v5.root"],
'fn': "test_pip_piAbsSelector.root",
'name': "pip",
'title': "#pi^{+} MC",
'caption': "#pi^{+} MC",
'color': root.kBlue-7,
'scaleFactor': 1./35250*nData*0.428/(1.-0.086), #No Cuts
#'scaleFactor': 1./35250*nData*0.428/(1.-0.086)*0.51, # pion, tpc, match cuts
}
protonFileConfig = {
#'fn': "piAbs_p_v5.2.root",
#'addFriend': ["friend", "friendTree_p_v5.root"],
'fn': "test_p_piAbsSelector.root",
'name': "p",
'title': "proton MC",
'caption': "proton MC",
'color': root.kRed-4,
'scaleFactor': 1./35200*nData*0.162/(1.-0.086), #No Cuts
#'scaleFactor': 1./35200*nData*0.162/(1.-0.086)*0.7216, #proton, tpc, matching
}
histConfigs = [
{
'name': "primTrkKins",
'xtitle': "Hit Kinetic Energy [MeV]",
'ytitle': "Hits / bin",
'binning': [50,0,1000],
'var': "primTrkKins",
'cuts': weightStr,
'logy': logy,
},
{
'name': "primTrkKinsTrue",
'xtitle': "True Hit Kinetic Energy [MeV]",
'ytitle': "Hits / bin",
'binning': [50,0,1000],
'var': "primTrkKinsTrue",
'cuts': weightStr,
'logy': logy,
},
{
'name': "primTrkKinsTrueCuts",
'xtitle': "True Hit Kinetic Energy [MeV]",
'ytitle': "Hits / bin",
'binning': [50,0,1000],
'var': "primTrkKinsTrue",
'cuts': weightStr+"*(primTrkDistToTrueTraj < 0.8 && primTrkKinsTrue > 0.)",
'logy': logy,
},
{
'name': "primTrkKinsVprimTrkKinsTrue",
'ytitle': "Reco Hit Kinetic Energy [MeV]",
'xtitle': "True Hit Kinetic Energy [MeV]",
'binning': [50,0,1000,50,0,1000],
'var': "primTrkKins:primTrkKinsTrue",
'cuts': weightStr,
'logz': True,
},
{
'name': "primTrkKinsVprimTrkKinsTrueCuts",
'ytitle': "Reco Hit Kinetic Energy [MeV]",
'xtitle': "True Hit Kinetic Energy [MeV]",
'binning': [50,0,1000,50,0,1000],
'var': "primTrkKins:primTrkKinsTrue",
'cuts': weightStr+"*(primTrkDistToTrueTraj < 0.8 && primTrkKinsTrue > 0.)",
'logz': True,
},
{
'name': "primTrkKinErrVprimTrkDistToTrueTrajPoint",
'ytitle': "Reco - Truth Hit Kinetic Energy [MeV]",
'xtitle': "Hit distance to true trajectory point [cm]",
'binning': [40,0,10,50,-100,100],
'var': "primTrkKins-primTrkKinsTrue:primTrkDistToTrueTrajPoint",
'cuts': weightStr,
'logz': True,
},
{
'name': "primTrkKinErrVpWC",
'ytitle': "Reco - Truth Hit Kinetic Energy [MeV]",
'xtitle': "True Initial Momentum [MeV/c]",
'binning': [100,0,1500,100,20,60],
#'binning': [50,0,1500,50,-1e3,1e3],
'var': "primTrkKins-primTrkKinsTrue:pWC",
'cuts': weightStr+"*(primTrkDistToTrueTraj < 0.8 && primTrkKinsTrue > 0.)",
#'logz': True,
'profileXtoo': True,
},
{
'name': "primTrkKinErrFirstHitVpWC",
'ytitle': "Reco - Truth Hit Kinetic Energy [MeV]",
'xtitle': "True Initial Momentum [MeV/c]",
'binning': [100,0,1500,100,20,60],
#'binning': [50,0,1500,50,-1e3,1e3],
'var': "primTrkKins[0]-primTrkKinsTrue[0]:pWC",
'cuts': weightStr+"*(primTrkDistToTrueTraj[0] < 0.8 && primTrkKinsTrue[0] > 0.)",
#'logz': True,
'profileXtoo': True,
'captionleft1': "First track hit",
},
]
plotOneHistOnePlot([pionFileConfig],histConfigs,c,"PiAbsSelector/tree",nMax=NMAX,outPrefix="TrueTraj_")
histConfigs = [
{
'name': "primTrkKinsProton",
'xtitle': "Hit Kinetic Energy [MeV]",
'ytitle': "Hits / bin",
'binning': [50,0,1000],
'var': "primTrkKinsProton",
'cuts': weightStr,
'logy': logy,
},
{
'name': "primTrkKinsTrue",
'xtitle': "True Hit Kinetic Energy [MeV]",
'ytitle': "Hits / bin",
'binning': [50,0,1000],
'var': "primTrkKinsTrue",
'cuts': weightStr,
'logy': logy,
},
{
'name': "primTrkKinsTrueCuts",
'xtitle': "True Hit Kinetic Energy [MeV]",
'ytitle': "Hits / bin",
'binning': [50,0,1000],
'var': "primTrkKinsTrue",
'cuts': weightStr+"*(primTrkDistToTrueTraj < 0.8 && primTrkKinsTrue > 0.)",
'logy': logy,
},
{
'name': "primTrkKinsProtonVprimTrkKinsTrueCuts",
'ytitle': "Reco Hit Kinetic Energy [MeV]",
'xtitle': "True Hit Kinetic Energy [MeV]",
'binning': [100,0,500,100,0,500],
'var': "primTrkKinsProton:primTrkKinsTrue",
'cuts': weightStr+"*(primTrkDistToTrueTraj < 0.8 && primTrkKinsTrue > 0.)",
#'logz': True,
},
{
'name': "primTrkKinProtonErrVpWC",
'ytitle': "Reco - Truth Hit Kinetic Energy [MeV]",
'xtitle': "True Initial Momentum [MeV/c]",
'binning': [100,0,1500,100,30,180],
#'binning': [50,0,1500,50,0,250],
'var': "primTrkKinsProton-primTrkKinsTrue:pWC",
'cuts': weightStr+"*(primTrkDistToTrueTraj < 0.8 && primTrkKinsTrue > 0.)",
#'logz': True,
'profileXtoo': True,
},
#{
# 'name': "primTrkKinProtonErrVpWCTrueKinL100",
# 'ytitle': "Reco - Truth Hit Kinetic Energy [MeV]",
# 'xtitle': "True Initial Momentum [MeV/c]",
# 'binning': [50,0,1500,50,30,200],
# #'binning': [50,0,1500,50,0,250],
# 'var': "primTrkKinsProton-primTrkKinsTrue:pWC",
# 'cuts': weightStr+"*(primTrkDistToTrueTraj < 0.8 && primTrkKinsTrue > 0. && primTrkKinsTrue < 100.)",
# #'logz': True,
# 'profileXtoo': True,
#},
#{
# 'name': "primTrkKinProtonErrVpWCTrueKinG100",
# 'ytitle': "Reco - Truth Hit Kinetic Energy [MeV]",
# 'xtitle': "True Initial Momentum [MeV/c]",
# 'binning': [50,0,1500,50,30,200],
# #'binning': [50,0,1500,50,0,250],
# 'var': "primTrkKinsProton-primTrkKinsTrue:pWC",
# 'cuts': weightStr+"*(primTrkDistToTrueTraj < 0.8 && primTrkKinsTrue > 0. && primTrkKinsTrue > 100.)",
# #'logz': True,
# 'profileXtoo': True,
#},
{
'name': "primTrkKinProtonErrFirstHitVpWC",
'ytitle': "Reco - Truth Hit Kinetic Energy [MeV]",
'xtitle': "True Initial Momentum [MeV/c]",
'binning': [100,0,1500,100,30,180],
#'binning': [50,0,1500,50,-1e3,1e3],
'var': "primTrkKinsProton[0]-primTrkKinsTrue[0]:pWC",
'cuts': weightStr+"*(primTrkDistToTrueTraj[0] < 0.8 && primTrkKinsTrue[0] > 0.)",
#'logz': True,
'profileXtoo': True,
'captionleft1': "First track hit",
},
]
plotOneHistOnePlot([protonFileConfig],histConfigs,c,"PiAbsSelector/tree",nMax=NMAX,outPrefix="TrueTraj_")
#tree = root.TChain("PiAbsSelector/tree")
#tree.AddFile("test_pip_piAbsSelector.root")
#tree.Scan("primTrkKins:primTrkKinsTrue:primTrkDistToTrueTraj:primTrkDistToTrueTrajPoint:primTrkdEdxs:primTrkResRanges:primTrkZs")
| {"/slicesIso.py": ["/fitCosmicHalo.py"], "/plotCosmics.py": ["/lookAtMonicaLifetime.py"]} |
76,896 | jhugon/lariatPionAbs | refs/heads/master | /plotDataMC.py | #!/usr/bin/env python
import ROOT as root
from helpers import *
root.gROOT.SetBatch(True)
if __name__ == "__main__":
cuts = ""
#cuts += "*( pWC > 100 && pWC < 1100 && (isMC || (firstTOF > 0 && firstTOF < 25)))" # old pions
#cuts += "*( pWC > 100 && pWC < 1100 && (isMC || pWC*pWC*(firstTOF*firstTOF*0.00201052122-1.) < 5e4))" # pions
#cuts += "*( pWC > 450 && pWC < 1100 && (isMC || (firstTOF > 28 && firstTOF < 55)))" # old protons
#cuts += "*( pWC > 450 && pWC < 1100 && (isMC || pWC*pWC*(firstTOF*firstTOF*0.00201052122-1.) > 7e5))" # protons
#cuts += "*(nTracksInFirstZ[2] >= 1 && nTracksInFirstZ[14] < 4 && nTracksLengthLt[5] < 3)" # tpc tracks
cuts += "*(primTrkStartZ < 2.)" # tpc tracks
cuts += "*( iBestMatch >= 0 && nMatchedTracks == 1)" # matching in analyzer
cuts += "*(primTrkEndInFid == 1)"
cuts += "*(primTrkEndX > 5.4 && primTrkEndX < 42.7)"
cuts += "*(primTrkEndY > -15. && primTrkEndY < 15.)"
cuts += "*(primTrkEndZ > 5. && primTrkEndZ < 85.)"
# matching debug
#cuts += "*(sqrt(pow(xWC-23.75,2)+pow(yWC-0.2,2)) < 11.93)" # wc track in flange
#cuts += "*(sqrt(pow(trackXFront-23.75,2)+pow(trackYFront-0.2,2)) < 11.93)" # TPC track in flange
#cuts += "*(trackMatchLowestZ < 2.)" # matching
#cuts += "*(fabs(trackMatchDeltaY) < 5.)" # matching
#cuts += "*((!isMC && (trackMatchDeltaX < 6. && trackMatchDeltaX > -4.)) || (isMC && (fabs(trackMatchDeltaX) < 5.)))" # matching
#cuts += "*(trackMatchDeltaAngle*180/pi < 10.)" # matching
###
###
secTrkCuts = "*(trackStartDistToPrimTrkEnd < 2.)"
weightStr = "pzWeight"+cuts
#weightStr = "1"+cuts
#DataMC_pWC_NoCutsHist Run II +100A Integral: 224281.0
#DataMC_pWC_NoCutsHist Run II +60A Integral: 50672.0
nData = 224281.0
logy = False
c = root.TCanvas()
NMAX=10000000000
#NMAX=100
fileConfigs = [
{
'fn': "piAbs_v2/piAbsSelector_Pos_RunII_current100_v02_all.root",
'addFriend': ["friend", "piAbs_v2/friendTrees/friendTree_piAbsSelector_Pos_RunII_current100_v02_all.root"],
'name': "RunII_Pos_100",
'title': "Run II +100A",
'caption': "Run II +100A",
'color': root.kBlack,
'isData': True,
},
{
'fn': "piAbs_v2/piAbsSelector_Pos_RunII_current60_v02_all.root",
'addFriend': ["friend", "piAbs_v2/friendTrees/friendTree_piAbsSelector_Pos_RunII_current60_v02_all.root"],
'name': "RunII_Pos_60",
'title': "Run II +60A",
'caption': "Run II +60A",
'color': root.kGray+2,
'isData': True,
},
{
'fn': "piAbs_v2/piAbsSelector_Neg_RunII_current100_v02_all.root",
'addFriend': ["friend", "piAbs_v2/friendTrees/friendTree_piAbsSelector_Neg_RunII_current100_v02_all.root"],
'name': "RunII_Neg_100",
'title': "Run II -100A",
'caption': "Run II -100A",
'color': root.kGreen,
'isData': True,
},
{
'fn': "piAbs_v2/piAbsSelector_Neg_RunII_current60_v02_all.root",
'addFriend': ["friend", "piAbs_v2/friendTrees/friendTree_piAbsSelector_Neg_RunII_current60_v02_all.root"],
'name': "RunII_Neg_60",
'title': "Run II -60A",
'caption': "Run II -60A",
'color': root.kYellow+1,
'isData': True,
},
{
'fn': "billMC1/MC1_PDG_211.root",
'addFriend': ["friend", "billMC1/friendTrees/friend_MC1_PDG_211.root"],
'name': "pip",
'title': "#pi^{+} MC",
'caption': "#pi^{+} MC",
'color': root.kBlue-7,
'scaleFactor': 1./25000*nData,
},
{
'fn': "billMC1/MC1_PDG_2212.root",
'addFriend': ["friend", "billMC1/friendTrees/friend_MC1_PDG_2212.root"],
'name': "p",
'title': "proton MC",
'caption': "proton MC",
'color': root.kRed-4,
'scaleFactor': 1./10000*nData,
},
{
'fn': "billMC1/MC1_PDG_-11.root",
'addFriend': ["friend", "billMC1/friendTrees/friend_MC1_PDG_-11.root"],
'name': "ep",
'title': "e^{+} MC",
'caption': "e^{+} MC",
'color': root.kGreen,
'scaleFactor': 1./10000*nData,
},
{
'fn': "billMC1/MC1_PDG_-13.root",
'addFriend': ["friend", "billMC1/friendTrees/friend_MC1_PDG_-13.root"],
'name': "mup",
'title': "#mu^{+} MC",
'caption': "#mu^{+} MC",
'color': root.kMagenta-4,
'scaleFactor': 1./10000*nData,
},
{
'fn': "billMC1/MC1_PDG_321.root",
'addFriend': ["friend", "billMC1/friendTrees/friend_MC1_PDG_321.root"],
'name': "kp",
'title': "K^{+} MC",
'caption': "K^{+} MC",
'color': root.kOrange-3,
'scaleFactor': 1./10000*nData,
},
]
histConfigs = [
# {
# 'name': "xWC4Hit",
# 'xtitle': "X Position at WC4 [cm]",
# 'ytitle': "Events / bin",
# 'binning': [100,0,50],
# 'var': "xWC4Hit",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
# },
# {
# 'name': "yWC4Hit",
# 'xtitle': "Y Position at WC4 [cm]",
# 'ytitle': "Events / bin",
# 'binning': [100,-25,25],
# 'var': "yWC4Hit",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
# },
# {
# 'name': "zWC4Hit",
# 'xtitle': "Z Position at WC4 [cm]",
# 'ytitle': "Events / bin",
# 'binning': [100,-97,-95],
# 'var': "zWC4Hit",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
# },
# {
# 'name': "xWC",
# 'xtitle': "X Position of WC track projection to TPC [cm]",
# 'ytitle': "Events / bin",
# 'binning': [100,0,75],
# 'var': "xWC",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
# },
# {
# 'name': "yWC",
# 'xtitle': "Y Position of WC track projection to TPC [cm]",
# 'ytitle': "Events / bin",
# 'binning': [100,-50,50],
# 'var': "yWC",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
# },
# {
# 'name': "pzWC",
# 'xtitle': "Z Momentum from WC [MeV/c]",
# 'ytitle': "Events / bin",
# 'binning': [100,0,2000],
# 'var': "pzWC",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
# #'printIntegral': True,
# },
{
'name': "pWC",
'xtitle': "Momentum from WC [MeV/c]",
'ytitle': "Events / bin",
'binning': [40,300,1100],
'var': "pWC",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
'printIntegral': True,
},
{
'name': "pWC_NoCuts",
'xtitle': "Momentum from WC [MeV/c]",
'ytitle': "Events / bin",
'binning': [60,300,1500],
'var': "pWC",
'cuts': "pzWeight*(isMC || (firstTOF > -100))",
#'normalize': True,
'logy': logy,
'printIntegral': True,
},
# {
# 'name': "kinWC",
# 'xtitle': "Kinetic Energy at WC [MeV/c] (m=m_{#pi^{#pm}})",
# 'ytitle': "Events / bin",
# 'binning': [100,0,2000],
# 'var': "kinWC",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
# },
# {
# 'name': "kinWCInTPC",
# 'xtitle': "Kinetic Energy at TPC [MeV/c] (m=m_{#pi^{#pm}})",
# 'ytitle': "Events / bin",
# 'binning': [100,0,2000],
# 'var': "kinWCInTPC",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
# },
{
'name': "phiWC",
'xtitle': "WC track #phi [deg]",
'ytitle': "Events / bin",
'binning': [360,-180,180],
'var': "phiWC*180/pi",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
{
'name': "thetaWC",
'xtitle': "WC track #theta [deg]",
'ytitle': "Events / bin",
'binning': [40,0,10],
'var': "thetaWC*180/pi",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
{
'name': "thetaxzWC",
'xtitle': "WC track #theta_{xz} [deg]",
'ytitle': "Events / bin",
'binning': [100,-10,10],
'var': "(atan(tan(thetaWC)*cos(phiWC)))*180/pi",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
{
'name': "thetayzWC",
'xtitle': "WC track #theta_{yz} [deg]",
'ytitle': "Events / bin",
'binning': [100,-5,5],
'var': "(asin(tan(thetaWC)*sin(phiWC)))*180/pi",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
{
'name': "sinthetayz",
'xtitle': "WC Track sin(#theta_{yz})",
'ytitle': "Tracks / bin",
'binning': [80,-0.1,0.1],
'var': "sin(thetaWC)*sin(phiWC)",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
{
'name': "primTrkStartZ",
'xtitle': "Primary TPC Track Start Z [cm]",
'ytitle': "Events / bin",
'binning': [60,-3,3],
'var': "primTrkStartZ",
'cuts': weightStr,
#'normalize': True,
'logy': False,
},
{
'name': "primTrkStartZ_Logy",
'xtitle': "Primary TPC Track Start Z [cm]",
'ytitle': "Events / bin",
'binning': [60,-10,10],
'var': "primTrkStartZ",
'cuts': weightStr,
#'normalize': True,
'logy': True,
},
{
'name': "nTracks",
'xtitle': "Number of TPC Tracks / Event",
'ytitle': "Events / bin",
'binning': [31,0,30],
'var': "nTracks",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
{
'name': "nTracksInFirstZ2",
'xtitle': "Number of TPC Tracks in first 2 cm / Event",
'ytitle': "Events / bin",
'binning': [16,0,15],
'var': "nTracksInFirstZ[2]",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
{
'name': "nTracksInFirstZ14",
'xtitle': "Number of TPC Tracks in first 14 cm / Event",
'ytitle': "Events / bin",
'binning': [16,0,15],
'var': "nTracksInFirstZ[14]",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
{
'name': "nTracksLengthLt5",
'xtitle': "Number of TPC Tracks with length < 5 cm / Event",
'ytitle': "Events / bin",
'binning': [16,0,15],
'var': "nTracksLengthLt[5]",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
# {
# 'name': "nMatchedTracks",
# 'xtitle': "Number of TPC/WC Track Matches / Event",
# 'ytitle': "Events / bin",
# 'binning': [11,0,10],
# 'var': "nMatchedTracks",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': True,
# },
# {
# 'name': "trackMatchDeltaX",
# 'xtitle': "TPC / WC Track #Delta x at TPC Front [cm]",
# 'ytitle': "TPC Tracks / bin",
# 'binning': [40,-10,10],
# #'var': "trackMatchDeltaX[iBestMatch]",
# #'cuts': "(iBestMatch >= 0)*"+weightStr,
# 'var': "trackMatchDeltaX",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
# },
# {
# 'name': "trackMatchDeltaY",
# 'xtitle': "TPC / WC Track #Delta y at TPC Front [cm]",
# 'ytitle': "TPC Tracks / bin",
# 'binning': [40,-10,10],
# #'var': "trackMatchDeltaY[iBestMatch]",
# #'cuts': "(iBestMatch >= 0)*"+weightStr,
# 'var': "trackMatchDeltaY",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
# },
# {
# 'name': "trackMatchDeltaAngle",
# 'xtitle': "TPC / WC Track #Delta #alpha [deg]",
# 'ytitle': "TPC Tracks / bin",
# #'binning': [90,0,180],
# 'binning': [20,0,20],
# #'var': "trackMatchDeltaAngle[iBestMatch]*180/pi",
# #'cuts': "(iBestMatch >= 0)*"+weightStr,
# 'var': "trackMatchDeltaAngle*180/pi",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
# },
# {
# 'name': "trackXFront",
# 'xtitle': "X of TPC Track Projection to TPC Front [cm]",
# 'ytitle': "TPC Tracks / bin",
# 'binning': [50,0,50],
# 'var': "trackXFront",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
# },
# {
# 'name': "trackYFront",
# 'xtitle': "Y of TPC Track Projection to TPC Front [cm]",
# 'ytitle': "TPC Tracks / bin",
# 'binning': [50,-50,50],
# 'var': "trackYFront",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
# },
# {
# 'name': "trackMatchLowestZ",
# 'xtitle': "TPC Track Start Z [cm]",
# 'ytitle': "TPC Tracks / bin",
# 'binning': [40,0,20],
# 'var': "trackMatchLowestZ",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
# },
# {
# 'name': "nTOFs",
# 'xtitle': "Number of TOF Objects",
# 'ytitle': "Events / bin",
# 'binning': [11,0,10],
# 'var': "nTOFs",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
# },
# {
# 'name': "TOFs",
# 'xtitle': "TOF [ns]",
# 'ytitle': "TOFs / bin",
# 'binning': [100,0,100],
# 'var': "TOFs",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
# },
# {
# 'name': "firstTOF",
# 'xtitle': "TOF [ns]",
# 'ytitle': "Events / bin",
# 'binning': [100,0,100],
# 'var': "firstTOF",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
# },
# {
# 'name': "trackStartX",
# 'xtitle': "TPC Track Start X [cm]",
# 'ytitle': "Tracks / bin",
# 'binning': [100,-20,60],
# 'var': "trackStartX",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
# },
# {
# 'name': "trackStartY",
# 'xtitle': "TPC Track Start Y [cm]",
# 'ytitle': "Tracks / bin",
# 'binning': [100,-50,50],
# 'var': "trackStartY",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
# },
{
'name': "trackStartZ",
'xtitle': "TPC Track Start Z [cm]",
'ytitle': "Tracks / bin",
'binning': [20,-5,5],
'var': "trackStartZ",
'cuts': weightStr,
#'normalize': True,
'logy': False,
},
{
'name': "trackStartZ_Logy",
'xtitle': "TPC Track Start Z [cm]",
'ytitle': "Tracks / bin",
'binning': [30,-10,20],
'var': "trackStartZ",
'cuts': weightStr,
#'normalize': True,
'logy': True,
},
# #{
# # 'name': "trackEndX",
# # 'xtitle': "TPC Track End X [cm]",
# # 'ytitle': "Tracks / bin",
# # 'binning': [100,-20,60],
# # 'var': "trackEndX",
# # 'cuts': weightStr,
# # #'normalize': True,
# # 'logy': logy,
# #},
# #{
# # 'name': "trackEndY",
# # 'xtitle': "TPC Track End Y [cm]",
# # 'ytitle': "Tracks / bin",
# # 'binning': [100,-50,50],
# # 'var': "trackEndY",
# # 'cuts': weightStr,
# # #'normalize': True,
# # 'logy': logy,
# #},
# #{
# # 'name': "trackEndZ",
# # 'xtitle': "TPC Track End Z [cm]",
# # 'ytitle': "Tracks / bin",
# # 'binning': [100,-20,110],
# # 'var': "trackEndZ",
# # 'cuts': weightStr,
# # #'normalize': True,
# # 'logy': logy,
# #},
{
'name': "trackLength",
'xtitle': "TPC Track Length [cm]",
'ytitle': "Tracks / bin",
'binning': [100,-10,100],
'var': "trackLength",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
#{
# 'name': "trackCaloKin",
# 'xtitle': "TPC Calo Estimate of KE [MeV]",
# 'ytitle': "Tracks / bin",
# 'binning': [50,0,2500],
# 'var': "trackCaloKin",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
#},
#{
# 'name': "trackLLHPion",
# 'xtitle': "TPC Track Pion -logLH",
# 'ytitle': "Tracks / bin",
# 'binning': [100,0,5000],
# 'var': "-trackLLHPion",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
#},
#{
# 'name': "trackLLHProton",
# 'xtitle': "TPC Track Proton -logLH",
# 'ytitle': "Tracks / bin",
# 'binning': [100,0,5000],
# 'var': "-trackLLHProton",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
#},
#{
# 'name': "trackLLHMuon",
# 'xtitle': "TPC Track Muon -logLH",
# 'ytitle': "Tracks / bin",
# 'binning': [100,0,5000],
# 'var': "-trackLLHMuon",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
#},
#{
# 'name': "trackLLHKaon",
# 'xtitle': "TPC Track Kaon -logLH",
# 'ytitle': "Tracks / bin",
# 'binning': [100,0,5000],
# 'var': "-trackLLHKaon",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
#},
{
'name': "trackPIDA",
'xtitle': "TPC Track PIDA",
'ytitle': "Tracks / bin",
'binning': [100,0,50],
'var': "trackPIDA",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
{
'name': "trackLLR",
'xtitle': "TPC Track Pion/Proton LLHR",
'ytitle': "Tracks / bin",
'binning': [100,-300,1000],
'var': "trackLLHPion-trackLLHProton",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
{
'name': "trackLLRInt",
'xtitle': "TPC Track Pion/Proton LLHR",
'ytitle': "Tracks / bin",
'binning': [100,-300,1000],
'var': "primTrkLLHPion-primTrkLLHProton",
'cuts': weightStr,
#'logy': logy,
'normalize': True,
'integral': True
},
#{
# 'name': "primTrkLLHPion",
# 'xtitle': "Primary TPC Track Pion -logLH",
# 'ytitle': "Events / bin",
# 'binning': [100,0,5000],
# 'var': "-primTrkLLHPion",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
#},
#{
# 'name': "primTrkLLHProton",
# 'xtitle': "Primary TPC Track Proton -logLH",
# 'ytitle': "Events / bin",
# 'binning': [100,0,5000],
# 'var': "-primTrkLLHProton",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
#},
#{
# 'name': "primTrkLLHMuon",
# 'xtitle': "Primary TPC Track Muon -logLH",
# 'ytitle': "Events / bin",
# 'binning': [100,0,5000],
# 'var': "-primTrkLLHMuon",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
#},
#{
# 'name': "primTrkLLHKaon",
# 'xtitle': "Primary TPC Track Kaon -logLH",
# 'ytitle': "Events / bin",
# 'binning': [100,0,5000],
# 'var': "-primTrkLLHKaon",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
#},
{
'name': "primTrkLLR",
'xtitle': "Primary TPC Track Pion/Proton LLHR",
'ytitle': "Tracks / bin",
'binning': [100,-300,1000],
'var': "primTrkLLHPion-primTrkLLHProton",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
{
'name': "primTrkLLRInt",
'xtitle': "Primary TPC Track Pion/Proton LLHR",
'ytitle': "Efficiency for Cut >= X",
'binning': [100,-300,1000],
'var': "primTrkLLHPion-primTrkLLHProton",
'cuts': weightStr,
#'logy': logy,
'normalize': True,
'integral': True
},
#{
# 'name': "primTrkLLRKP",
# 'xtitle': "Primary TPC Track Kaon/Proton LLHR",
# 'ytitle': "Tracks / bin",
# 'binning': [100,-300,1000],
# 'var': "primTrkLLHKaon-primTrkLLHProton",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
#},
#{
# 'name': "primTrkLLRKPInt",
# 'xtitle': "Primary TPC Track Kaon/Proton LLHR",
# 'ytitle': "Efficiency for Cut >= X",
# 'binning': [100,-300,1000],
# 'var': "primTrkLLHKaon-primTrkLLHProton",
# 'cuts': weightStr,
# #'logy': logy,
# 'normalize': True,
# 'integral': True
#},
{
'name': "primTrkPIDA",
'xtitle': "Primary TPC Track PIDA",
'ytitle': "Events / bin",
'binning': [100,0,50],
'var': "primTrkPIDA",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
{
'name': "trackStartDistToPrimTrkEnd",
'xtitle': "TPC Track Start Distance to Primary End [cm]",
'ytitle': "Tracks / bin",
#'binning': [40,0,20],
'binning': [160,0,80],
'var': "trackStartDistToPrimTrkEnd",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
{
'name': "trackEndDistToPrimTrkEnd",
'xtitle': "TPC Track End Distance to Primary End [cm]",
'ytitle': "Tracks / bin",
#'binning': [40,0,20],
'binning': [160,0,80],
'var': "trackEndDistToPrimTrkEnd",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
{
'name': "secTrkLength",
'xtitle': "Secondary TPC Track Length [cm]",
'ytitle': "Tracks / bin",
'binning': [100,-10,100],
'var': "trackLength",
'cuts': weightStr+secTrkCuts,
#'normalize': True,
'logy': logy,
},
#{
# 'name': "secTrkCaloKin",
# 'xtitle': "Secondary Track Calo Estimate of KE [MeV]",
# 'ytitle': "Tracks / bin",
# 'binning': [50,0,2500],
# 'var': "trackCaloKin",
# 'cuts': weightStr+secTrkCuts,
# #'normalize': True,
# 'logy': logy,
#},
#{
# 'name': "secTrkLLHPion",
# 'xtitle': "Secondary TPC Track Pion -logLH",
# 'ytitle': "Tracks / bin",
# 'binning': [100,0,5000],
# 'var': "-trackLLHPion",
# 'cuts': weightStr+secTrkCuts,
# #'normalize': True,
# 'logy': logy,
#},
#{
# 'name': "secTrkLLHProton",
# 'xtitle': "Secondary TPC Track Proton -logLH",
# 'ytitle': "Tracks / bin",
# 'binning': [100,0,5000],
# 'var': "-trackLLHProton",
# 'cuts': weightStr+secTrkCuts,
# #'normalize': True,
# 'logy': logy,
#},
#{
# 'name': "secTrkLLHMuon",
# 'xtitle': "Secondary TPC Track Muon -logLH",
# 'ytitle': "Tracks / bin",
# 'binning': [100,0,5000],
# 'var': "-trackLLHMuon",
# 'cuts': weightStr+secTrkCuts,
# #'normalize': True,
# 'logy': logy,
#},
#{
# 'name': "secTrkLLHKaon",
# 'xtitle': "Secondary TPC Track Kaon -logLH",
# 'ytitle': "Tracks / bin",
# 'binning': [100,0,5000],
# 'var': "-trackLLHKaon",
# 'cuts': weightStr+secTrkCuts,
# #'normalize': True,
# 'logy': logy,
#},
{
'name': "secTrkStartZ",
'xtitle': "Secondary TPC Track Start z [cm]",
'ytitle': "Tracks / bin",
'binning': [120,-10,110],
'var': "trackStartZ",
'cuts': weightStr+secTrkCuts,
#'normalize': True,
'logy': logy,
},
{
'name': "secTrkLLR",
'xtitle': "Secondary TPC Track Pion/Proton LLHR",
'ytitle': "Tracks / bin",
'binning': [100,-300,1000],
'var': "trackLLHPion-trackLLHProton",
'cuts': weightStr+secTrkCuts,
#'normalize': True,
'logy': logy,
},
{
'name': "secTrkLLRInt",
'xtitle': "Secondary TPC Track Pion/Proton LLHR",
'ytitle': "Tracks / bin",
'binning': [100,-300,1000],
'var': "primTrkLLHPion-primTrkLLHProton",
'cuts': weightStr+secTrkCuts,
#'logy': logy,
'normalize': True,
'integral': True
},
{
'name': "secTrkPIDA",
'xtitle': "Secondary TPC Track PIDA",
'ytitle': "Tracks / bin",
'binning': [100,0,50],
'var': "trackPIDA",
'cuts': weightStr+secTrkCuts,
#'normalize': True,
'logy': logy,
},
{
'name': "primTrkLength",
'xtitle': "Primary TPC Track Length [cm]",
'ytitle': "Events / bin",
'binning': [100,0,100],
'var': "primTrkLength",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
#{
# 'name': "primTrkdEdxs",
# 'xtitle': "Primary TPC Track dE/dx [MeV/cm]",
# 'ytitle': "Events / bin",
# 'binning': [200,0,50],
# 'var': "primTrkdEdxs",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
#},
#{
# 'name': "primTrkdEdxsFidCut",
# 'xtitle': "Primary TPC Track dE/dx [MeV/cm]",
# 'ytitle': "Events / bin",
# 'binning': [200,0,50],
# 'var': "primTrkdEdxs",
# 'cuts': weightStr+"*primTrkInFids",
# #'normalize': True,
# 'logy': logy,
#},
#{
# 'name': "primTrkResRanges",
# 'xtitle': "Primary TPC Track Residual Range [cm]",
# 'ytitle': "Events / bin",
# 'binning': [200,0,100],
# 'var': "primTrkResRanges",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
#},
#{
# 'name': "primTrkEndKin",
# 'xtitle': "Primary TPC Track End Kinetic Energy [MeV]",
# 'ytitle': "Events / bin",
# 'binning': [50,0,1000],
# 'var': "primTrkEndKin",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
#},
#{
# 'name': "primTrkEndKinFid",
# 'xtitle': "Primary TPC Track End Kinetic Energy [MeV]",
# 'ytitle': "Events / bin",
# 'binning': [50,0,1000],
# 'var': "primTrkEndKinFid",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
#},
{
'name': "primTrkKins",
'xtitle': "Primary TPC Track Hit Kinetic Energy [MeV]",
'ytitle': "Events / bin",
'binning': [100,0,1000],
'var': "primTrkKins",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
{
'name': "primTrkKinInteract",
'xtitle': "Primary TPC Track Interaction Kinetic Energy [MeV]",
'ytitle': "Events / bin",
'binning': [100,0,1000],
'var': "primTrkKinInteract",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
{
'name': "primTrkZs",
'xtitle': "Primary TPC Track Hit Z coordinates [cm]",
'ytitle': "Events / bin",
'binning': [80,-10,10],
'var': "primTrkZs",
'cuts': weightStr,
#'normalize': True,
'logy': logy,
},
#{
# 'name': "trueEndProcess",
# 'xtitle': "trueEndProcess",
# 'ytitle': "Events / bin",
# 'binning': [17,0,17],
# 'var': "trueEndProcess",
# 'cuts': weightStr,
# #'normalize': True,
# 'logy': logy,
#},
]
#for i in reversed(range(len(histConfigs))):
# if histConfigs[i]['name'] != "pzWC":
# #if histConfigs[i]['name'] != "zWC4Hit":
# histConfigs.pop(i)
# plotManyFilesOnePlot(fileConfigs,histConfigs,c,"PiAbsSelectorTC/tree",outPrefix="DataMC_",nMax=NMAX)
fileConfigMCs = copy.deepcopy(fileConfigs)
fileConfigDatas = []
for i in reversed(range(len(fileConfigMCs))):
if 'isData' in fileConfigMCs[i] and fileConfigMCs[i]['isData']:
fileConfigDatas.append(fileConfigMCs.pop(i))
DataMCStack(fileConfigDatas,fileConfigMCs,histConfigs,c,"PiAbsSelectorTC/tree",outPrefix="DataMC_",nMax=NMAX)
#DataMCCategoryStack(fileConfigDatas,fileConfigMCs,histConfigs,c,"PiAbsSelectorTC/tree",
# outPrefix="DataMC_",nMax=NMAX,
# catConfigs=TRUECATEGORYFEWERCONFIGS
# )
m2SF = 1e-3
histConfigs = [
{
'name': "beamlineMass_NoCuts",
'xtitle': "Beamline Mass Squared [1000#times (MeV^{2})]",
'ytitle': "Events / bin",
'binning': [100,-2e5*m2SF,2e5*m2SF],
'var': "pWC*pWC*(firstTOF*firstTOF*0.00201052122-1.)*1e-3",
'cuts': "(!isMC)",
#'normalize': True,
'logy': False,
'drawvlines':[105.65**2*m2SF,139.6**2*m2SF,493.677**2*m2SF,938.272046**2*m2SF],
},
{
'name': "beamlineMass_NoCuts_Logy",
'xtitle': "Beamline Mass Squared [1000#times (MeV^{2})]",
'ytitle': "Events / bin",
'binning': [100,-5e5*m2SF,2e6*m2SF],
'var': "pWC*pWC*(firstTOF*firstTOF*0.00201052122-1.)*1e-3",
'cuts': "(!isMC)",
#'normalize': True,
'logy': True,
'drawvlines':[105.65**2*m2SF,139.6**2*m2SF,493.677**2*m2SF,938.272046**2*m2SF],
},
]
plotManyFilesOnePlot([f for f in fileConfigs if ('isData' in f and f['isData'])],histConfigs,c,"PiAbsSelectorTC/tree",outPrefix="DataMC_",nMax=NMAX)
histConfigs = [
{
'name': "thetayzWCVthetaxzWC",
'xtitle': "WC track #theta_{xz} [deg]",
'ytitle': "WC track #theta_{yz} [deg]",
'binning': [40,-10,10,40,-10,10],
'var': "(asin(sin(thetaWC)*sin(phiWC)))*180/pi:(atan(tan(thetaWC)*cos(phiWC)))*180/pi",
'cuts': "",
#'normalize': True,
#'logy': logy,
},
#{
# 'name': "xWCVthetaxzWC",
# 'xtitle': "WC track #theta_{xz} [deg]",
# 'ytitle': "X of WC track projected to front of TPC [cm]",
# 'binning': [40,-10,10,40,0,75],
# 'var': "xWC:(atan(tan(thetaWC)*cos(phiWC)))*180/pi",
# 'cuts': "",
# #'normalize': True,
# #'logy': logy,
#},
#{
# 'name': "xWCVxWC4Hit",
# 'xtitle': "X of WC4 Hit [cm]",
# 'ytitle': "X of WC track projected to front of TPC [cm]",
# 'binning': [50,0,50,40,0,75],
# 'var': "xWC:xWC4Hit",
# 'cuts': "",
# #'normalize': True,
# #'logy': logy,
#},
# {
# 'name': "primTrkLLRVPIDA",
# 'xtitle': "Primary TPC Track PIDA",
# 'ytitle': "Primary TPC Track Pion/Proton LLHR",
# 'binning': [100,0,50,100,-300,1000],
# 'var': "primTrkLLHPion-primTrkLLHProton:primTrkPIDA",
# 'cuts': weightStr,
# #'normalize': True,
# #'logz': True,
# },
# {
# 'name': "primTrkLLRVpWC",
# 'xtitle': "WC Momentum [MeV/c]",
# 'ytitle': "Primary TPC Track Pion/Proton LLHR",
# 'binning': [100,0,1500,100,-300,1000],
# 'var': "primTrkLLHPion-primTrkLLHProton:pWC",
# 'cuts': weightStr,
# #'normalize': True,
# #'logz': True,
# },
# {
# 'name': "primTrkLLRKPVpWC",
# 'xtitle': "WC Momentum [MeV/c]",
# 'ytitle': "Primary TPC Track Kaon/Proton LLHR",
# 'binning': [100,0,1500,100,-300,1000],
# 'var': "primTrkLLHKaon-primTrkLLHProton:pWC",
# 'cuts': weightStr,
# #'normalize': True,
# #'logz': True,
# },
# {
# 'name': "primTrkPIDAPVpWC",
# 'xtitle': "WC Momentum [MeV/c]",
# 'ytitle': "Primary TPC Track PIDA",
# 'binning': [100,0,1500,100,0,50],
# 'var': "primTrkPIDA:pWC",
# 'cuts': weightStr,
# #'normalize': True,
# #'logz': True,
# },
# {
# 'name': "primTrkLengthPVpWC",
# 'xtitle': "WC Momentum [MeV/c]",
# 'ytitle': "Primary TPC Track Length [cm]",
# 'binning': [100,0,1500,100,0,100],
# 'var': "primTrkLength:pWC",
# 'cuts': weightStr,
# #'normalize': True,
# #'logz': True,
# },
# {
# 'name': "primTrkdEdxVRange",
# 'xtitle': "Primary Track Hit Residual Range [cm]",
# 'ytitle': "Primary Track Hit dE/dx [MeV/cm]",
# 'binning': [100,0,100,100,0,50],
# 'var': "primTrkdEdxs:primTrkResRanges",
# 'cuts': weightStr,
# #'normalize': True,
# #'logz': True,
# },
# {
# 'name': "primTrkdEdxVRangeFidCut",
# 'xtitle': "Primary Track Hit Residual Range [cm]",
# 'ytitle': "Primary Track Hit dE/dx [MeV/cm]",
# 'binning': [100,0,100,100,0,50],
# 'var': "primTrkdEdxs:primTrkResRanges",
# 'cuts': weightStr+"*primTrkInFids",
# #'normalize': True,
# #'logz': True,
# },
# {
# 'name': "firstTOFVnTOFs",
# 'xtitle': "nTOFs",
# 'ytitle': "First TOF [ns]",
# 'binning': [11,0,10,100,0,50],
# 'var': "firstTOF:nTOFs",
# 'cuts': weightStr,
# #'normalize': True,
# #'logz': True,
# },
# {
# 'name': "yWCVxWC",
# 'xtitle': "X Position of WC track projection to TPC [cm]",
# 'ytitle': "Y Position of WC track projection to TPC [cm]",
# 'binning': [40,0,40,40,-20,20],
# 'var': "yWC:xWC",
# 'cuts': weightStr,
# #'normalize': True,
# #'logz': True,
# },
# {
# 'name': "trackYFrontVtrackXFront",
# 'xtitle': "X of TPC Track Projection to TPC Front [cm]",
# 'ytitle': "Y of TPC Track Projection to TPC Front [cm]",
# 'binning': [40,0,40,40,-20,20],
# 'var': "trackYFront:trackXFront",
# 'cuts': weightStr,
# #'normalize': True,
# #'logz': True,
# },
{
'name': "trackLengthVtrackStartZ",
'ytitle': "TPC Track Length [cm]",
'xtitle': "TPC Track Start z [cm]",
'binning': [25,0,100,30,-10,110],
'var': "trackLength:trackStartZ",
'cuts': weightStr,
#'normalize': True,
#'logz': True,
},
{
'name': "trackStartDistToPrimTrkEndVtrackStartZ",
'xtitle': "TPC Track Start z [cm]",
'ytitle': "TPC Track Start Distance to Primary End [cm]",
'binning': [25,0,100,20,0,80],
'var': "trackStartDistToPrimTrkEnd:trackStartZ",
'cuts': weightStr,
#'normalize': True,
#'logz': True,
},
{
'name': "trackStartDistToPrimTrkEndVprimTrkEndZ",
'xtitle': "Primary TPC Track End z [cm]",
'ytitle': "TPC Track Start Distance to Primary End [cm]",
'binning': [25,0,100,20,0,80],
'var': "trackStartDistToPrimTrkEnd:primTrkEndZ",
'cuts': weightStr,
#'normalize': True,
#'logz': True,
},
]
plotOneHistOnePlot(fileConfigs,histConfigs,c,"PiAbsSelector/tree",outPrefix="DataMC_",nMax=NMAX)
| {"/slicesIso.py": ["/fitCosmicHalo.py"], "/plotCosmics.py": ["/lookAtMonicaLifetime.py"]} |
76,897 | jhugon/lariatPionAbs | refs/heads/master | /fitCosmicHalo.py | #!/usr/bin/env python2
import ROOT as root
from ROOT import gStyle as gStyle
root.gROOT.SetBatch(True)
from helpers import *
def plotSlices(c,hist,savename,xlimits,xtitle,ytitle,xvarname,rebinX=1,rebinY=1,xunits=None,normalize=False):
print(hist)
if not hist:
return
hist = hist.Clone(uuid.uuid1().hex)
hist.RebinX(rebinX)
hist.RebinY(rebinY)
histAll = hist.ProjectionY("_pyAll",1,hist.GetNbinsX())
if normalize:
integral = histAll.Integral()
if integral != 0.:
histAll.Scale(1./integral)
ymax = histAll.GetMaximum()
histAll.SetLineColor(root.kBlack)
histAll.SetMarkerColor(root.kBlack)
labels = ["All"]
nBinsX = hist.GetNbinsX()
sliceHists = []
for iBin in range(1,nBinsX+1):
sliceHist = getXBinHist(hist,iBin)
if normalize:
integral = sliceHist.Integral()
if integral != 0.:
sliceHist.Scale(1./integral)
ymax = max(sliceHist.GetMaximum(),ymax)
sliceHist.SetLineColor(COLORLIST[iBin-1])
sliceHist.SetMarkerColor(COLORLIST[iBin-1])
sliceHists.append(sliceHist)
xlow = hist.GetXaxis().GetBinLowEdge(iBin)
xhigh = hist.GetXaxis().GetBinUpEdge(iBin)
if xunits:
labels.append("{0:.4g} {3} < {1} < {2:.4g} {3}".format(xlow,xvarname,xhigh,xunits))
else:
labels.append("{0:.4g} < {1} < {2:.4g}".format(xlow,xvarname,xhigh))
if c.GetLogy() == 1:
ybound = ymax * 10**((log10(ymax)+1)*0.5)
axisHist = Hist2D(1,xlimits[0],xlimits[1],1,0.1,ybound)
else:
axisHist = Hist2D(1,xlimits[0],xlimits[1],1,0,ymax*1.1)
setHistTitles(axisHist,xtitle,ytitle)
axisHist.Draw()
for sliceHist in sliceHists:
sliceHist.Draw("histsame")
histAll.Draw("histsame")
leg = drawNormalLegend([histAll]+sliceHists,labels)
c.SaveAs(savename+".png")
c.SaveAs(savename+".pdf")
def getMaxAndFWHM(hist,xBin):
sliceHist = getXBinHist(hist,xBin)
nBins = sliceHist.GetNbinsX()
contentMax = sliceHist.GetMaximum()
halfContentMax = 0.5*contentMax
iMax = sliceHist.GetMaximumBin()
xMax = sliceHist.GetXaxis().GetBinCenter(iMax)
xHalfMaxAbove = float('nan')
xHalfMaxBelow = float('nan')
for iBin in range(iMax,nBins+2):
if sliceHist.GetBinContent(iBin) <= halfContentMax:
xHalfMaxAbove = sliceHist.GetXaxis().GetBinLowEdge(iBin)
break
for iBin in range(iMax,-1,-1):
if sliceHist.GetBinContent(iBin) <= halfContentMax:
xHalfMaxBelow = sliceHist.GetXaxis().GetBinUpEdge(iBin)
break
fwhm = xHalfMaxAbove-xHalfMaxBelow
return xMax, fwhm
def getFracMaxVals(hist,frac=0.5):
nBins = hist.GetNbinsX()
contentMax = hist.GetMaximum()
halfContentMax = frac*contentMax
iMax = hist.GetMaximumBin()
xMax = hist.GetXaxis().GetBinCenter(iMax)
xHalfMaxAbove = float('nan')
xHalfMaxBelow = float('nan')
for iBin in range(iMax,nBins+2):
if hist.GetBinContent(iBin) <= halfContentMax:
xHalfMaxAbove = hist.GetXaxis().GetBinLowEdge(iBin)
break
for iBin in range(iMax,-1,-1):
if hist.GetBinContent(iBin) <= halfContentMax:
xHalfMaxBelow = hist.GetXaxis().GetBinUpEdge(iBin)
break
return xHalfMaxBelow, xHalfMaxAbove
def makeGraphsModeAndFWHM(hist):
hist = hist.Clone(uuid.uuid1().hex)
graph = root.TGraph()
graphFWHM = root.TGraph()
for iBin in range(1,hist.GetNbinsX()+1):
yMax, fwhm = getMaxAndFWHM(hist,iBin)
x = hist.GetXaxis().GetBinCenter(iBin)
graph.SetPoint(iBin-1,x,yMax)
graphFWHM.SetPoint(iBin-1,x,fwhm)
return graph, graphFWHM
def fitLandaus(c,hist,postfix,caption,fitMin=1.6,fitMax=2.3,nLandaus=3,smearGauss=True,fixedLandauWidth=None,dQdx=False):
if nLandaus <= 0:
raise ValueError("nLandaus must be > 0")
xTitle = "dE/dx [MeV/cm]"
if dQdx:
xTitle = "dQ/dx [ADC ns / cm]"
t = root.RooRealVar("t",xTitle,0.,10)
t.setBins(10000,"cache")
observables = root.RooArgSet(t)
data = root.RooDataHist("data_"+hist.GetName(),"Data Hist",root.RooArgList(t),hist)
##############
mg = root.RooRealVar("mg","mg",0)
sg = root.RooRealVar("sg","sg",0.1,0.01,2.)
gauss = root.RooGaussian("gauss","gauss",t,mg,sg)
landauParams = []
landaus = []
langauses = []
for iLandau in range(1,nLandaus+1):
iLandauStr = str(iLandau)
mpvl = root.RooRealVar("mpvl"+iLandauStr,"mpv landau "+iLandauStr,1.7,0,5)
wl = None
if fixedLandauWidth is None:
wl = root.RooRealVar("wl"+iLandauStr,"width landau "+iLandauStr,0.42,0.01,10)
else:
wl = root.RooRealVar("wl"+iLandauStr,"width landau "+iLandauStr,fixedLandauWidth)
ml = root.RooFormulaVar("ml"+iLandauStr,"first landau param "+iLandauStr,"@0+0.22278*@1",root.RooArgList(mpvl,wl))
landau = root.RooLandau("lx"+iLandauStr,"lx "+iLandauStr,t,ml,wl)
landauParams += [mpvl,wl,ml]
landaus.append(landau)
langaus = root.RooFFTConvPdf("langaus"+iLandauStr,"landau (X) gauss "+iLandauStr,t,landau,gauss)
langaus.setBufferFraction(0.2)
langauses.append(langaus)
ratioParams = []
for iRatio in range(1,nLandaus):
iRatioStr = str(iRatio)
ratio = root.RooRealVar("ratio"+iRatioStr,"ratio "+iRatioStr,0.18,0,1)
ratioParams.append(ratio)
model = landaus[0]
multiLandaus = None
multiLangaus = None
if nLandaus > 1:
multiLandaus = root.RooAddPdf("multiLandaus","multiLandaus",root.RooArgList(*landaus),root.RooArgList(*ratioParams))
multiLangaus = root.RooAddPdf("multiLangaus","multiLangaus",root.RooArgList(*langauses),root.RooArgList(*ratioParams))
model = multiLandaus
if smearGauss:
model = multiLangaus
##############
frame = t.frame(root.RooFit.Title(""))
data.plotOn(frame)
plotOnBaseArgs = [frame]
if not (fitMin is None or fitMax is None):
model.fitTo(data,root.RooFit.Range(fitMin,fitMax))
plotOnBaseArgs.append(root.RooFit.Range(fitMin,fitMax))
else:
model.fitTo(data)
model.plotOn(*plotOnBaseArgs)
for iLandau in range(2,nLandaus+1):
iLandauStr = str(iLandau)
plotOnArgs = plotOnBaseArgs + [root.RooFit.LineStyle(root.kDashed),root.RooFit.LineColor(COLORLIST[iLandau])]
if smearGauss:
plotOnArgs.append(root.RooFit.Components("langaus"+iLandauStr))
else:
plotOnArgs.append(root.RooFit.Components("lx"+iLandauStr))
model.plotOn(*plotOnArgs)
#root.gPad.SetLeftMargin(0.15)
#frame.GetYaxis().SetTitleOffset(1.4)
#frame.Draw("same")
#axisHist = root.TH2F("axisHist","",1,0,50,1,0,1000)
##axisHist = root.TH2F("axisHist","",1,-1,1,1,1000,1300)
#axisHist.Draw()
#frame.Draw("same")
frame.Draw()
frame.SetTitle(caption)
c.SaveAs("roofit_landau_{}.png".format(postfix))
bestFits = []
errs = []
for iLandau in range(nLandaus):
for iParam in range(2):
param = landauParams[iParam+iLandau*3]
bestFits.append(param.getVal())
errs.append(param.getError())
if smearGauss:
bestFits.append(sg.getVal())
errs.append(sg.getError())
return bestFits, errs
def fitSlicesLandaus(c,hist,fileprefix,nJump=1,nLandaus=1,smearGauss=False,fracMax=None):
xaxis = hist.GetXaxis()
xTitle = xaxis.GetTitle()
yaxis = hist.GetYaxis()
yTitle = yaxis.GetTitle()
mpvlGraph = root.TGraphErrors()
wlGraph = root.TGraphErrors()
sgGraph = root.TGraphErrors()
fwhmGraph = root.TGraphErrors()
iPoint=0
for i in range(hist.GetNbinsX()//nJump):
firstBin = i*nJump+1
lastBin = (i+1)*(nJump)
lastBin = min(lastBin,hist.GetNbinsX())
histAll = hist.ProjectionY("_pyAll",firstBin,lastBin)
if histAll.GetEntries() < 10:
continue
postfix = "_"+fileprefix+"bins{}".format(i)
xMin = xaxis.GetBinLowEdge(firstBin)
xMax = xaxis.GetBinUpEdge(lastBin)
caption = "{} from {} to {}".format(xTitle,xMin,xMax)
xMiddle = 0.5*(xMax+xMin)
xError = 0.5*(xMax-xMin)
startFit = 0.
endFit = 0.
startFit = None
endFit = None
if not (fracMax is None):
startFit, endFit = getFracMaxVals(histAll,fracMax)
bestFits,errors = fitLandaus(c,histAll,postfix,caption,fitMin=startFit,fitMax=endFit,nLandaus=1,smearGauss=smearGauss)
#if and (mpvlErr > 0.5 or wlErr > 0.5 or sgErr > 0.5):
# continue
mpvlGraph.SetPoint(iPoint,xMiddle,bestFits[0])
wlGraph.SetPoint(iPoint,xMiddle,bestFits[1])
mpvlGraph.SetPointError(iPoint,xError,errors[0])
wlGraph.SetPointError(iPoint,xError,errors[1])
iPoint += 1
graphs = [mpvlGraph,wlGraph]
labels = ["Landau MPV", "Landau Width"]
#graphs = [mpvlGraph,sgGraph]
#labels = ["Landau MPV", "Gaussian #sigma"]
for i, graph in enumerate(graphs):
graph.SetLineColor(COLORLIST[i])
graph.SetMarkerColor(COLORLIST[i])
pad1 = root.TPad("pad1"+hist.GetName(),"",0.02,0.50,0.98,0.98,0)
pad2 = root.TPad("pad2"+hist.GetName(),"",0.02,0.01,0.98,0.49,0)
c.cd()
c.Clear()
pad1.Draw()
pad2.Draw()
pad1.cd()
axis1 = drawGraphs(pad1,[mpvlGraph],xTitle,"Landau MPV [MeV/cm]",yStartZero=False)
pad2.cd()
#axis2 = drawGraphs(pad2,[sgGraph],xTitle,"Gaussian #sigma [MeV/cm]")
axis2 = drawGraphs(pad2,[wlGraph],xTitle,"Landau Width [MeV/cm]")
#leg = drawNormalLegend(graphs,labels,option="lep",position=[0.2,0.50,0.6,0.70])
c.cd()
c.SaveAs("SliceFitParams_"+fileprefix+".png")
c.SaveAs("SliceFitParams_"+fileprefix+".pdf")
return mpvlGraph,wlGraph
def fitGaussCore(c,hist,postfix,caption,fitMin=1.4,fitMax=2.4):
xMin = hist.GetXaxis().GetBinLowEdge(1)
xMax = hist.GetXaxis().GetBinUpEdge(hist.GetNbinsX())
t = root.RooRealVar("t","dE/dx [MeV/cm]",xMin,xMax)
observables = root.RooArgSet(t)
data = root.RooDataHist("data_"+hist.GetName(),"Data Hist",root.RooArgList(t),hist)
##############
mg = root.RooRealVar("mg","mg",1.7,0.,5.)
sg = root.RooRealVar("sg","sg",0.1,0.01,2.)
gauss = root.RooGaussian("gauss","gauss",t,mg,sg)
model = gauss
##############
fitResult = model.fitTo(data,root.RooFit.Save(),root.RooFit.Range(fitMin,fitMax))
frame = t.frame(root.RooFit.Title(""))
data.plotOn(frame)
model.plotOn(frame,root.RooFit.Range(fitMin,fitMax))
#root.gPad.SetLeftMargin(0.15)
#frame.GetYaxis().SetTitleOffset(1.4)
#frame.Draw("same")
#axisHist = root.TH2F("axisHist","",1,0,50,1,0,1000)
##axisHist = root.TH2F("axisHist","",1,-1,1,1,1000,1300)
#axisHist.Draw()
#frame.Draw("same")
frame.Draw()
frame.SetTitle(caption)
c.SaveAs("roofit_gauss_{}.png".format(postfix))
c.SaveAs("roofit_gauss_{}.pdf".format(postfix))
fwhm = calcFWHM(model,t,1.,4.,0.01)
return (mg.getVal(),float('nan'),sg.getVal()), (mg.getError(),float('nan'),sg.getError()), fwhm
def fitLandauCore(c,hist,postfix,caption,fitMin=1.6,fitMax=2.3,fixedLandauWidth=None,dQdx=False):
xMin = hist.GetXaxis().GetBinLowEdge(1)
xMax = hist.GetXaxis().GetBinUpEdge(hist.GetNbinsX())
if not dQdx:
xMax = min(xMax,5.)
xTitle = "dE/dx [MeV/cm]"
if dQdx:
xTitle = "dQ/dx [ADC ns / cm]"
t = root.RooRealVar("t",xTitle,xMin,xMax)
observables = root.RooArgSet(t)
data = root.RooDataHist("data_"+hist.GetName(),"Data Hist",root.RooArgList(t),hist)
mpvl = None
wl = None
ml = None
mg = None
sg = None
##############
if dQdx:
mpvl = root.RooRealVar("mpvl","mpv landau",0.5*(fitMin+fitMax),0,xMax*1.5)
if fixedLandauWidth is None:
wl = root.RooRealVar("wl","width landau",0.5*(fitMax-fitMin),0.01*(fitMax-fitMin),2*(fitMax-fitMin))
else:
wl = root.RooRealVar("wl","width landau",fixedLandauWidth)
ml = root.RooFormulaVar("ml","first landau param","@0+0.22278*@1",root.RooArgList(mpvl,wl))
mg = root.RooRealVar("mg","mg",0)
sg = root.RooRealVar("sg","sg",0.5*(fitMax-fitMin),0.01*(fitMax-fitMin),2*(fitMax-fitMin))
else:
mpvl = root.RooRealVar("mpvl","mpv landau",1.7,0,5)
if fixedLandauWidth is None:
wl = root.RooRealVar("wl","width landau",0.42,0.01,10)
else:
wl = root.RooRealVar("wl","width landau",fixedLandauWidth)
ml = root.RooFormulaVar("ml","first landau param","@0+0.22278*@1",root.RooArgList(mpvl,wl))
mg = root.RooRealVar("mg","mg",0)
sg = root.RooRealVar("sg","sg",0.1,0.01,2.)
t.Print()
mpvl.Print()
wl.Print()
ml.Print()
mg.Print()
sg.Print()
landau = root.RooLandau("lx","lx",t,ml,wl)
gauss = root.RooGaussian("gauss","gauss",t,mg,sg)
t.setBins(10000,"cache")
langaus = root.RooFFTConvPdf("langaus","landau (X) gauss",t,landau,gauss)
langaus.setBufferFraction(0.4)
model = langaus
##############
fitResult = model.fitTo(data,root.RooFit.Save(),root.RooFit.Range(fitMin,fitMax))
fwhm = None
if dQdx:
fwhm = calcFWHM(model,t,0.5*fitMin,fitMax*1.5,(fitMax-fitMin)/200.)
else:
fwhm = calcFWHM(model,t,1.,4.,0.01)
if False:
frame = t.frame(root.RooFit.Title("landau (x) gauss convolution"))
data.plotOn(frame)
model.plotOn(frame,root.RooFit.Range(fitMin,fitMax))
frame.Draw()
frame.SetTitle(caption)
c.SaveAs("roofit_landau_{}.png".format(postfix))
c.SaveAs("roofit_landau_{}.pdf".format(postfix))
return (mpvl.getVal(),wl.getVal(),sg.getVal()), (mpvl.getError(),wl.getError(),sg.getError()), fwhm
def fitSlicesLandauCore(c,hist,fileprefix,nJump=1,fracMax=0.4,fixedLandauWidth=0.12,dQdx=False):
xaxis = hist.GetXaxis()
xTitle = xaxis.GetTitle()
yaxis = hist.GetYaxis()
yTitle = yaxis.GetTitle()
mpvlGraph = root.TGraphErrors()
wlGraph = root.TGraphErrors()
sgGraph = root.TGraphErrors()
fwhmGraph = root.TGraphErrors()
iPoint=0
for i in range(hist.GetNbinsX()//nJump):
firstBin = i*nJump+1
lastBin = (i+1)*(nJump)
lastBin = min(lastBin,hist.GetNbinsX())
histAll = hist.ProjectionY("_pyAll",firstBin,lastBin)
if histAll.GetEntries() < 10:
continue
postfix = "_"+fileprefix+"bins{}".format(i)
xMin = xaxis.GetBinLowEdge(firstBin)
xMax = xaxis.GetBinUpEdge(lastBin)
caption = "{} from {} to {}".format(xTitle,xMin,xMax)
xMiddle = 0.5*(xMax+xMin)
xError = 0.5*(xMax-xMin)
startFit = 0.
endFit = 0.
if dQdx:
histAllRebin = histAll.Clone(histAll.GetName()+"_rebin")
histAllRebin.Rebin(2)
startFit, endFit = getFracMaxVals(histAllRebin,fracMax)
else:
startFit, endFit = getFracMaxVals(histAll,fracMax)
(mpvl,wl,sg),(mpvlErr,wlErr,sgErr), fwhm = fitLandauCore(c,histAll,postfix,caption,startFit,endFit,fixedLandauWidth=fixedLandauWidth,dQdx=dQdx)
if (not dQdx) and (mpvlErr > 0.5 or wlErr > 0.5 or sgErr > 0.5):
continue
if dQdx and mpvl > 4000 :
continue
mpvlGraph.SetPoint(iPoint,xMiddle,mpvl)
wlGraph.SetPoint(iPoint,xMiddle,wl)
sgGraph.SetPoint(iPoint,xMiddle,sg)
fwhmGraph.SetPoint(iPoint,xMiddle,fwhm)
mpvlGraph.SetPointError(iPoint,xError,mpvlErr)
wlGraph.SetPointError(iPoint,xError,wlErr)
sgGraph.SetPointError(iPoint,xError,sgErr)
iPoint += 1
graphs = [mpvlGraph,wlGraph,sgGraph,fwhmGraph]
labels = ["Landau MPV", "Landau Width", "Gaussian #sigma","FWHM"]
#graphs = [mpvlGraph,sgGraph]
#labels = ["Landau MPV", "Gaussian #sigma"]
for i, graph in enumerate(graphs):
graph.SetLineColor(COLORLIST[i])
graph.SetMarkerColor(COLORLIST[i])
pad1 = root.TPad("pad1"+hist.GetName(),"",0.02,0.50,0.98,0.98,0)
pad2 = root.TPad("pad2"+hist.GetName(),"",0.02,0.01,0.98,0.49,0)
c.cd()
c.Clear()
pad1.Draw()
pad2.Draw()
pad1.cd()
axis1 = drawGraphs(pad1,[mpvlGraph],xTitle,"Landau MPV [MeV/cm]",yStartZero=False)
pad2.cd()
axis2 = drawGraphs(pad2,[sgGraph],xTitle,"Gaussian #sigma [MeV/cm]")
#leg = drawNormalLegend(graphs,labels,option="lep",position=[0.2,0.50,0.6,0.70])
c.cd()
c.SaveAs(fileprefix+".png")
c.SaveAs(fileprefix+".pdf")
return mpvlGraph,wlGraph,sgGraph
def fitSlicesLandauCore3D(c,hist,fileprefix,nJump=1,fracMax=0.4,fixedLandauWidth=0.12,dQdx=False):
xaxis = hist.GetXaxis()
xTitle = xaxis.GetTitle()
yaxis = hist.GetYaxis()
yTitle = yaxis.GetTitle()
zaxis = hist.GetZaxis()
zTitle = zaxis.GetTitle()
binning = [xaxis.GetNbins(),xaxis.GetXmin(),xaxis.GetXmax(),
yaxis.GetNbins(),yaxis.GetXmin(),yaxis.GetXmax()
]
zBinning = [zaxis.GetNbins(),zaxis.GetXmin(),zaxis.GetXmax()]
mpvlHist = Hist2D(*binning)
wlHist = Hist2D(*binning)
sgHist = Hist2D(*binning)
mpvlErrorHist = Hist2D(*binning)
wlErrorHist = Hist2D(*binning)
sgErrorHist = Hist2D(*binning)
fwhmHist = Hist2D(*binning)
minMPV = 1e9
minWL = 1e9
minSG = 1e9
maxMPV = -1e9
maxWL = -1e9
maxSG = -1e9
for iBinX in range(1,xaxis.GetNbins()+1):
for iBinY in range(1,yaxis.GetNbins()+1):
postfix = "_"+fileprefix+"bins{}_{}".format(iBinX,iBinY)
xMin = xaxis.GetBinLowEdge(iBinX)
xMax = xaxis.GetBinUpEdge(iBinX)
yMin = yaxis.GetBinLowEdge(iBinY)
yMax = yaxis.GetBinUpEdge(iBinY)
caption = "{} in [{},{}), {} in [{},{})".format(xTitle,xMin,xMax,yTitle,yMin,yMax)
histForFit = Hist(*zBinning)
histForFit.GetXaxis().SetTitle(zTitle)
for iBinZ in range(1,zaxis.GetNbins()+1):
histForFit.SetBinContent(iBinZ,hist.GetBinContent(iBinX,iBinY,iBinZ))
if histForFit.Integral(1,zaxis.GetNbins()+1) < 10:
continue
if dQdx:
histForFit.Rebin(2)
startFit = 0.
endFit = 0.
startFit, endFit = getFracMaxVals(histForFit,fracMax)
(mpvl,wl,sg),(mpvlErr,wlErr,sgErr), fwhm = fitLandauCore(c,histForFit,postfix,caption,startFit,endFit,fixedLandauWidth=fixedLandauWidth,dQdx=dQdx)
if (mpvlErr/mpvl > 0.02 or wlErr/wl > 0.2 or sgErr/sg > 0.2):
continue
mpvlHist.SetBinContent(iBinX,iBinY,mpvl)
wlHist.SetBinContent(iBinX,iBinY,wl)
sgHist.SetBinContent(iBinX,iBinY,sg)
fwhmHist.SetBinContent(iBinX,iBinY,fwhm)
mpvlErrorHist.SetBinContent(iBinX,iBinY,mpvlErr/mpvl)
wlErrorHist.SetBinContent(iBinX,iBinY,wlErr/wl)
sgErrorHist.SetBinContent(iBinX,iBinY,sgErr/sg)
minMPV = min(mpvl,minMPV)
minWL = min(wl,minWL)
minSG = min(sg,minSG)
maxMPV = max(mpvl,maxMPV)
maxWL = max(wl,maxWL)
maxSG = max(sg,maxSG)
if maxMPV > minMPV:
mpvlHist.GetZaxis().SetRangeUser(minMPV,maxMPV)
if maxWL > minWL:
wlHist.GetZaxis().SetRangeUser(minWL,maxWL)
if maxSG > minSG:
sgHist.GetZaxis().SetRangeUser(minSG,maxSG)
graphs = [mpvlHist,wlHist,sgHist,mpvlErrorHist,wlErrorHist,sgErrorHist,fwhmHist]
labels = ["Best-Fit Landau MPV", "Best-Fit Landau Width", "Best-Fit Gaussian #sigma",
"Relative Error Landau MPV", "Relative Error Landau Width", "Relative Error Gaussian #sigma",
"FWHM"]
names = ["bfMPV", "bfWL", "bfSigma",
"relerrMPV", "relerrWL", "relerrSigma",
"FWHM"]
setupCOLZFrame(c)
for graph,label,name in zip(graphs,labels,names):
graph.Draw("colz")
print xTitle,yTitle
setHistTitles(graph,xTitle,yTitle)
drawStandardCaptions(c,label)
c.SaveAs(fileprefix+name+".png")
c.SaveAs(fileprefix+name+".pdf")
setupCOLZFrame(c,True)
return mpvlHist,wlHist,sgHist
def compareGraphs(c,outfilePrefix,graphsList,histIndex,xTitle,yTitle,legendTitles,yStartZero=False):
c.Clear()
for iColor, graphs in enumerate(graphsList):
graphs[histIndex].SetMarkerColor(COLORLIST[iColor])
graphs[histIndex].SetLineColor(COLORLIST[iColor])
axisHist = drawGraphs(c,[x[histIndex] for x in graphsList],xTitle,yTitle,yStartZero=yStartZero,freeTopSpace=0.4)
leg = drawNormalLegend([x[histIndex] for x in graphsList],legendTitles,option="ep")
c.SaveAs(outfilePrefix+".png")
c.SaveAs(outfilePrefix+".pdf")
c.Clear()
if __name__ == "__main__":
c = root.TCanvas("c")
fCosmics = root.TFile("cosmics_hists.root")
fCosmics.ls()
hist3D1 = fCosmics.Get("primTrkdEdxsVHitWireAndHitY_phiLt0_RunIINocrct")
hist3D1 = hist3D1.Clone("hist3D1")
hist3D1.Rebin3D(20,1,1)
fitSlicesLandauCore3D(c,hist3D1,"Fit3D_dEdxVWireAndY_phiLt0_RunIINocrct_manyY")
hist3D2 = fCosmics.Get("primTrkdEdxsVHitWireAndHitY_phiLt0_RunIINocrct")
hist3D2 = hist3D2.Clone("hist3D2")
hist3D2.Rebin3D(5,5,1)
fitSlicesLandauCore3D(c,hist3D2,"Fit3D_dEdxVWireAndY_phiLt0_RunIINocrct_manyWire")
hist3D3 = fCosmics.Get("primTrkdEdxsVHitWireAndHitY_phiGeq0_RunIINocrct")
hist3D3 = hist3D3.Clone("hist3D3")
hist3D3.Rebin3D(20,1,1)
fitSlicesLandauCore3D(c,hist3D3,"Fit3D_dEdxVWireAndY_phiGeq0_RunIINocrct_manyY")
hist3D4 = fCosmics.Get("primTrkdEdxsVHitWireAndHitY_phiGeq0_RunIINocrct")
hist3D4 = hist3D4.Clone("hist3D4")
hist3D4.Rebin3D(5,5,1)
fitSlicesLandauCore3D(c,hist3D4,"Fit3D_dEdxVWireAndY_phiGeq0_RunIINocrct_manyWire")
hist3D5 = fCosmics.Get("primTrkdQdxsVHitWireAndHitY_phiLt0_RunIINocrct")
hist3D5.Rebin3D(10,2,1)
fitSlicesLandauCore3D(c,hist3D5,"Fit3D_dQdxVWireAndY_phiLt0_RunIINocrct",dQdx=True)
hist3D6 = fCosmics.Get("primTrkdQdxsVrunAndHitX_phiLt0_RunIINocrct")
hist3D6.Rebin3D(10,2,1)
fitSlicesLandauCore3D(c,hist3D6,"Fit3D_dQdxVrunAndX_phiLt0_RunIINocrct",dQdx=True)
hist3D7 = fCosmics.Get("primTrkdEdxsVHitWireAndHitY_phiLt0_RunII")
hist3D7 = hist3D7.Clone("hist3D7")
hist3D7.Rebin3D(20,1,1)
fitSlicesLandauCore3D(c,hist3D7,"Fit3D_dEdxVWireAndY_phiLt0_RunII_manyY")
hist3D8 = fCosmics.Get("primTrkdEdxsVHitWireAndHitY_phiLt0_RunII")
hist3D8 = hist3D8.Clone("hist3D8")
hist3D8.Rebin3D(5,5,1)
fitSlicesLandauCore3D(c,hist3D8,"Fit3D_dEdxVWireAndY_phiLt0_RunII_manyWire")
hist3D9 = fCosmics.Get("primTrkdEdxsVHitWireAndHitY_phiLt0_RunII")
hist3D9 = hist3D9.Clone("hist3D9")
hist3D9.Rebin3D(20,1,1)
fitSlicesLandauCore3D(c,hist3D9,"Fit3D_dEdxVWireAndY_phiLt0_RunII_manyY")
hist3D10 = fCosmics.Get("primTrkdEdxsVHitWireAndHitY_phiLt0_RunII")
hist3D10 = hist3D10.Clone("hist3D10")
hist3D10.Rebin3D(5,5,1)
fitSlicesLandauCore3D(c,hist3D10,"Fit3D_dEdxVWireAndY_phiLt0_RunII_manyWire")
hist3D100 = fCosmics.Get("primTrkdEdxsVHitZAndHitY_phiLt0_RunIINocrct")
hist3D100 = hist3D100.Clone("hist3D100")
hist3D100.Rebin3D(5,5,1)
fitSlicesLandauCore3D(c,hist3D100,"Fit3D_dEdxVZAndY_phiLt0_RunIINocrct")
hist3D101 = fCosmics.Get("primTrkdEdxsVHitZAndHitY_phiLt0_RunII")
hist3D101 = hist3D101.Clone("hist3D101")
hist3D101.Rebin3D(5,5,1)
fitSlicesLandauCore3D(c,hist3D101,"Fit3D_dEdxVZAndY_phiLt0_RunII")
hist3D102 = fCosmics.Get("primTrkdEdxsVHitZAndHitY_phiGeq0_RunII")
hist3D102 = hist3D102.Clone("hist3D102")
hist3D102.Rebin3D(5,5,1)
fitSlicesLandauCore3D(c,hist3D102,"Fit3D_dEdxVZAndY_phiGeq0_RunII")
hist3D103 = fCosmics.Get("primTrkdEdxsVHitZAndHitY_phiLt0_RunII")
hist3D103 = hist3D103.Clone("hist3D103")
hist3D103.Rebin3D(2,10,1)
fitSlicesLandauCore3D(c,hist3D103,"Fit3D_dEdxVZAndY_phiLt0_RunII_moreZ")
hist3D104 = fCosmics.Get("primTrkdEdxsVHitZAndHitY_phiGeq0_RunII")
hist3D104 = hist3D104.Clone("hist3D104")
hist3D104.Rebin3D(2,10,1)
fitSlicesLandauCore3D(c,hist3D104,"Fit3D_dEdxVZAndY_phiGeq0_RunII_moreZ")
hist3D105 = fCosmics.Get("primTrkdEdxsVHitZAndHitY_phiLt0_RunII")
hist3D105 = hist3D105.Clone("hist3D105")
hist3D105.Rebin3D(10,2,1)
fitSlicesLandauCore3D(c,hist3D105,"Fit3D_dEdxVZAndY_phiLt0_RunII_moreY")
hist3D106 = fCosmics.Get("primTrkdEdxsVHitZAndHitY_phiGeq0_RunII")
hist3D106 = hist3D106.Clone("hist3D106")
hist3D106.Rebin3D(10,2,1)
fitSlicesLandauCore3D(c,hist3D106,"Fit3D_dEdxVZAndY_phiGeq0_RunII_moreY")
sys.exit(0)
nameLists = []
paramLists = []
errorLists = []
paramGausLists = []
errorGausLists = []
fwhmLists = []
for key in fCosmics.GetListOfKeys():
name = key.GetName()
if "primTrkdEdxs_zoom3_phiGeq0" in name:
hist = key.ReadObj()
hist.Rebin(2)
startFit, endFit = getFracMaxVals(hist,0.4)
#####
params, errs, fwhm = fitLandauCore(c,hist,name,name,startFit,endFit,fixedLandauWidth=0.12)
#params, errs, fwhm = fitLandauCore(c,hist,name,name,1.,4.)
#params, errs, fwhm = fitLandauCore(c,hist,name,name,1.4,2.)
nameLists.append(name)
paramLists.append(params)
errorLists.append(errs)
fwhmLists.append(fwhm)
#xMin,xMax = getHistFracMaxVals(hist,0.25)
#params, errs = fitGaussCore(c,hist,name,name,xMin,xMax)
#params, errs, fwhm = fitGaussCore(c,hist,name,name,startFit,endFit)
#paramGausLists.append(params)
#errorGausLists.append(errs)
elif "primTrkdEdxs_zoom3_phiLt0" in name:
pass
elif "primTrkdEdxs_zoom3" in name:
pass
elif "primTrkdQdxs_phiLt0" in name:
hist = key.ReadObj()
hist.Print()
startFit, endFit = getFracMaxVals(hist,0.5)
params, errs, fwhm = fitLandauCore(c,hist,name,name,startFit,endFit,fixedLandauWidth=180,dQdx=True)
elif "primTrkdQdxs_phiGeq0" in name:
hist = key.ReadObj()
hist.Print()
startFit, endFit = getFracMaxVals(hist,0.5)
params, errs, fwhm = fitLandauCore(c,hist,name,name,startFit,endFit,fixedLandauWidth=280,dQdx=True)
elif "primTrkdQdxs" in name:
pass
dataParamsErrs = []
dataFWHMs = []
dataLabels = []
mcSmearingVals = []
mcParams = []
mcErrs = []
fwhmVals = []
for name, params, errors, fwhm in zip(nameLists,paramLists,errorLists,fwhmLists):
printStr = "{:55} ".format(name)
for i in range(len(params)):
printStr += "{:6.3f} +/- {:8.3g} ".format(params[i],errors[i])
printStr += "FWHM: {:6.3f} ".format(fwhm)
print(printStr)
if "RunII" in name:
print("name",name)
dataParamsErrs.append((params,errors))
dataFWHMs.append(fwhm)
match = re.search(r"RunIIP([0-9]+)",name)
if match:
current = match.group(1)
dataLabels.append("Run II + {} Data".format(current))
else:
dataLabels.append("Run II Data")
else:
match = re.match(r".*_presmear(\d+)perc$",name)
if match:
mcParams.append(params)
mcErrs.append(errors)
fwhmVals.append(fwhm)
mcSmearingVals.append(float(match.group(1)))
else:
mcParams.append(params)
mcErrs.append(errors)
fwhmVals.append(fwhm)
mcSmearingVals.append(0.)
try:
import numpy
from matplotlib import pyplot as mpl
mcParams = numpy.array(mcParams)
mcErrs = numpy.array(mcErrs)
fig, ax = mpl.subplots(figsize=(7,7))
for dataLabel, dataParamsErr in zip(dataLabels,dataParamsErrs):
ax.axhspan(dataParamsErr[0][2]-dataParamsErr[1][2],dataParamsErr[0][2]+dataParamsErr[1][2],facecolor='k',edgecolor='k',alpha=0.3)
ax.axhline(dataParamsErr[0][2],c='k')
ax.errorbar(mcSmearingVals,mcParams[:,2],yerr=mcErrs[:,2],fmt=".b")
#ax.set_xlim(-10,50)
ax.set_xlabel("MC Smearing [%]")
ax.set_ylabel("Gaussian $\sigma$ Parameter [MeV/cm]")
for dataLabel, dataParamsErr in zip(dataLabels,dataParamsErrs):
ax.annotate(dataLabel,(45,dataParamsErr[0][2]+0.5*dataParamsErr[1][2]),ha='right',va='center')
fig.savefig("Cosmic_Gaus_Widths.png")
fig.savefig("Cosmic_Gaus_Widths.pdf")
fig, ax = mpl.subplots(figsize=(7,7))
for dataLabel, dataParamsErr in zip(dataLabels,dataParamsErrs):
ax.axhspan(dataParamsErr[0][0]-dataParamsErr[1][0],dataParamsErr[0][0]+dataParamsErr[1][0],facecolor='k',edgecolor='k',alpha=0.3)
ax.axhline(dataParamsErr[0][0],c='k')
ax.errorbar(mcSmearingVals,mcParams[:,0],yerr=mcErrs[:,0],fmt=".b")
#ax.set_xlim(-10,50)
ax.set_xlabel("MC Smearing [%]")
ax.set_ylabel("Landau MPV Parameter [MeV/cm]")
for dataLabel, dataParamsErr in zip(dataLabels,dataParamsErrs):
ax.annotate(dataLabel,(45,dataParamsErr[0][0]+0.5*dataParamsErr[1][0]),ha='right',va='center')
fig.savefig("Cosmic_Gaus_MPV.png")
fig.savefig("Cosmic_Gaus_MPV.pdf")
fig, ax = mpl.subplots(figsize=(7,7))
for dataLabel, dataFWHM in zip(dataLabels,dataFWHMs):
ax.axhline(dataFWHM,c='k',lw=2)
ax.errorbar(mcSmearingVals,fwhmVals,fmt="ob")
#ax.set_xlim(-10,50)
ax.set_xlabel("MC Smearing [%]")
ax.set_ylabel("Full Width Half Max of Fit PDF [MeV/cm]")
for dataLabel, dataFWHM in zip(dataLabels,dataFWHMs):
ax.annotate(dataLabel,(45,dataFWHM),ha='right',va='bottom')
fig.savefig("Cosmic_FWHM.png")
fig.savefig("Cosmic_FWHM.pdf")
except ImportError:
pass
# for logy,xmax,outext,ytitle in [(False,4,"","Normalized--Hits"),(True,50,"_logy","Hits/bin")]:
# c.SetLogy(logy)
#
# plotSlices(c,fCosmics.Get("primTrkdEdxVwire_RunIIP60"),"SlicesWireRunIIP60_Cosmics"+outext,[0,xmax],"dE/dx [MeV/cm]",ytitle,"wire",rebinX=1,xunits="",normalize=not logy)
# plotSlices(c,fCosmics.Get("primTrkdEdxVwire_RunIIP100"),"SlicesWireRunIIP100_Cosmics"+outext,[0,xmax],"dE/dx [MeV/cm]",ytitle,"wire",rebinX=1,xunits="",normalize=not logy)
#
# plotSlices(c,fCosmics.Get("primTrkdEdxsVx_RunII"),"SlicesXRunII_Cosmics"+outext,[0,xmax],"dE/dx [MeV/cm]",ytitle,"x",rebinX=5,xunits="cm",normalize=not logy)
# plotSlices(c,fCosmics.Get("primTrkdEdxsVx_CosmicMC"),"SlicesXCosmicMC"+outext,[0,xmax],"dE/dx [MeV/cm]",ytitle,"x",rebinX=5,xunits="cm",normalize=not logy)
#plotSlices(c,fCosmics.Get("primTrkdEdxsVy_RunII"),"SlicesYRunII_Cosmics"+outext,[0,xmax],"dE/dx [MeV/cm]",ytitle,"y",rebinX=10,xunits="cm",normalize=not logy)
#plotSlices(c,fCosmics.Get("primTrkdEdxsVy_CosmicMC"),"SlicesYCosmicMC"+outext,[0,xmax],"dE/dx [MeV/cm]",ytitle,"y",rebinX=10,xunits="cm",normalize=not logy)
#plotSlices(c,fCosmics.Get("primTrkdEdxsVz_RunII"),"SlicesZRunII_Cosmics"+outext,[0,xmax],"dE/dx [MeV/cm]",ytitle,"z",rebinX=10,xunits="cm",normalize=not logy)
#plotSlices(c,fCosmics.Get("primTrkdEdxsVz_CosmicMC"),"SlicesZ_CosmicMC"+outext,[0,xmax],"dE/dx [MeV/cm]",ytitle,"z",rebinX=10,xunits="cm",normalize=not logy)
#plotSlices(c,fCosmics.Get("primTrkdEdxsVyFromCenter_RunII"),"SlicesYFromCenterRunII_Cosmics"+outext,[0,xmax],"dE/dx [MeV/cm]",ytitle,"|y|",rebinX=8,xunits="cm",normalize=not logy)
#plotSlices(c,fCosmics.Get("primTrkdEdxsVyFromCenter_CosmicMC"),"SlicesYFromCenterCosmicMC"+outext,[0,xmax],"dE/dx [MeV/cm]",ytitle,"|y|",rebinX=8,xunits="cm",normalize=not logy)
#plotSlices(c,fCosmics.Get("primTrkdEdxsVzFromCenter_RunII"),"SlicesZFromCenterRunII_Cosmics"+outext,[0,xmax],"dE/dx [MeV/cm]",ytitle,"z",rebinX=4,xunits="cm",normalize=not logy)
#plotSlices(c,fCosmics.Get("primTrkdEdxsVzFromCenter_CosmicMC"),"SlicesZFromCenter_CosmicMC"+outext,[0,xmax],"dE/dx [MeV/cm]",ytitle,"|z-45cm|",rebinX=8,xunits="cm",normalize=not logy)
#
# plotSlices(c,fCosmics.Get("primTrkdEdxsVrun_RunII"),"SlicesRunRunII_Cosmics"+outext,[0,xmax],"dE/dx [MeV/cm]",ytitle,"Run",rebinX=2,normalize=not logy)
##############################################
# c.SetLogy(False)
# graphConfigs = [
# (fCosmics.Get("primTrkdEdxsVrun_RunII"),"Slices_modefwhm_run_cosmics","Run Number","Mode & FWHM of dE/dx [MeV/cm]"),
# (fCosmics.Get("primTrkdEdxsVx_RunII"),"Slices_modefwhm_x_cosmics","Hit x [cm]","Mode & FWHM of dE/dx [MeV/cm]"),
# (fCosmics.Get("primTrkdEdxsVy_RunII"),"Slices_modefwhm_y_cosmics","Hit y [cm]","Mode & FWHM of dE/dx [MeV/cm]"),
# (fCosmics.Get("primTrkdEdxsVz_RunII"),"Slices_modefwhm_z_cosmics","Hit z [cm]","Mode & FWHM of dE/dx [MeV/cm]"),
# ]
#
# for hist, savename, xtitle, ytitle, in graphConfigs:
# graphMode,graphFWHM = makeGraphsModeAndFWHM(hist)
# axisHist = makeStdAxisHist([graphMode,graphFWHM],ylim=[0,4])
# axisHist.Draw()
# graphMode.Draw("P")
# graphFWHM.Draw("P")
# graphFWHM.SetMarkerColor(COLORLIST[0])
# graphFWHM.SetLineColor(COLORLIST[0])
# setHistTitles(axisHist,xtitle,ytitle)
# c.SaveAs(savename+".png")
# c.SaveAs(savename+".pdf")
#fitSlicesLandaus(c,fCosmics.Get("primTrkdEdxsVy_RunII"))
#fitSlicesLandaus(c,fCosmics.Get("primTrkdEdxsVy_CosmicMC"))
hist = fCosmics.Get("primTrkdQdxsVrun_RunIINocrct")
#graphsdQdxRuns = fitSlicesLandauCore(c,hist,"Run_1_",dQdx=True,fixedLandauWidth=None)
graphsdQdxRuns = fitSlicesLandauCore(c,hist,"dQdxRun_10_",nJump=10,dQdx=True,fixedLandauWidth=None)
hist = fCosmics.Get("primTrkdQdxsVrun_phiGeq0_RunIINocrct")
#graphsdQdxRuns_phiGeq0 = fitSlicesLandauCore(c,hist,"dQdxRun_phiGeq0_1_",dQdx=True,fixedLandauWidth=None)
graphsdQdxRuns_phiGeq0 = fitSlicesLandauCore(c,hist,"dQdxRun_phiGeq0_10_",nJump=10,dQdx=True,fixedLandauWidth=None)
hist = fCosmics.Get("primTrkdQdxsVrun_phiLt0_RunIINocrct")
#graphsdQdxRuns_phiLt0 = fitSlicesLandauCore(c,hist,"dQdxRun_phiLt0_1_",dQdx=True,fixedLandauWidth=None)
graphsdQdxRuns_phiLt0 = fitSlicesLandauCore(c,hist,"dQdxRun_phiLt0_10_",nJump=10,dQdx=True,fixedLandauWidth=None)
graphsdQdxRunsList = [graphsdQdxRuns,graphsdQdxRuns_phiGeq0,graphsdQdxRuns_phiLt0]
compareGraphs(c,"ComparedQdxRuns_MPV",graphsdQdxRunsList,0,"Run Number","Landau MPV [ADC ns / cm]",["All","#phi #geq 0","#phi < 0"])
compareGraphs(c,"ComparedQdxRuns_Sigma",graphsdQdxRunsList,2,"Run Number","Gaussian Sigma [ADC ns / cm]",["All","#phi #geq 0","#phi < 0"])
hist = fCosmics.Get("primTrkdQdxVwire_RunIINocrct")
#graphsdQdxWires = fitSlicesLandauCore(c,hist,"dQdxWire_1_",dQdx=True,fixedLandauWidth=None)
graphsdQdxWires = fitSlicesLandauCore(c,hist,"dQdxWire_8_",nJump=8,dQdx=True,fixedLandauWidth=None)
hist = fCosmics.Get("primTrkdQdxVwire_phiGeq0_RunIINocrct")
#graphsdQdxWires_phiGeq0 = fitSlicesLandauCore(c,hist,"dQdxWire_phiGeq0_1_",dQdx=True,fixedLandauWidth=None)
graphsdQdxWires_phiGeq0 = fitSlicesLandauCore(c,hist,"dQdxWire_phiGeq0_8_",nJump=8,dQdx=True,fixedLandauWidth=None)
hist = fCosmics.Get("primTrkdQdxVwire_phiLt0_RunIINocrct")
#graphsdQdxWires_phiLt0 = fitSlicesLandauCore(c,hist,"dQdxWire_phiLt0_1_",dQdx=True,fixedLandauWidth=None)
graphsdQdxWires_phiLt0 = fitSlicesLandauCore(c,hist,"dQdxWire_phiLt0_8_",nJump=8,dQdx=True,fixedLandauWidth=None)
graphsdQdxWiresList = [graphsdQdxWires,graphsdQdxWires_phiGeq0,graphsdQdxWires_phiLt0]
compareGraphs(c,"ComparedQdxWires_MPV",graphsdQdxWiresList,0,"Wire Number","Landau MPV [ADC ns / cm]",["All","#phi #geq 0","#phi < 0"])
compareGraphs(c,"ComparedQdxWires_Sigma",graphsdQdxWiresList,2,"Wire Number","Gaussian Sigma [ADC ns / cm]",["All","#phi #geq 0","#phi < 0"])
#################################################
hist = fCosmics.Get("primTrkdEdxsVrun_RunIINocrct")
#graphsRuns = fitSlicesLandauCore(c,hist,"Run_1_")
graphsRuns = fitSlicesLandauCore(c,hist,"Run_10_",nJump=10)
hist = fCosmics.Get("primTrkdEdxsVrun_phiGeq0_RunIINocrct")
#graphsRuns_phiGeq0 = fitSlicesLandauCore(c,hist,"Run_phiGeq0_1_")
graphsRuns_phiGeq0 = fitSlicesLandauCore(c,hist,"Run_phiGeq0_10_",nJump=10)
hist = fCosmics.Get("primTrkdEdxsVrun_phiLt0_RunIINocrct")
#graphsRuns_phiLt0 = fitSlicesLandauCore(c,hist,"Run_phiLt0_1_")
graphsRuns_phiLt0 = fitSlicesLandauCore(c,hist,"Run_phiLt0_10_",nJump=10)
graphsRunsList = [graphsRuns,graphsRuns_phiGeq0,graphsRuns_phiLt0]
compareGraphs(c,"CompareRuns_MPV",graphsRunsList,0,"Run Number","Landau MPV [MeV/cm]",["All","#phi #geq 0","#phi < 0"])
compareGraphs(c,"CompareRuns_Sigma",graphsRunsList,2,"Run Number","Gaussian Sigma [MeV/cm]",["All","#phi #geq 0","#phi < 0"])
hist = fCosmics.Get("primTrkdEdxVwire_RunIINocrct")
#graphsWires = fitSlicesLandauCore(c,hist,"Wire_1_")
graphsWires = fitSlicesLandauCore(c,hist,"Wire_8_",nJump=8)
hist = fCosmics.Get("primTrkdEdxVwire_phiGeq0_RunIINocrct")
#graphsWires_phiGeq0 = fitSlicesLandauCore(c,hist,"Wire_phiGeq0_1_")
graphsWires_phiGeq0 = fitSlicesLandauCore(c,hist,"Wire_phiGeq0_8_",nJump=8)
hist = fCosmics.Get("primTrkdEdxVwire_phiLt0_RunIINocrct")
#graphsWires_phiLt0 = fitSlicesLandauCore(c,hist,"Wire_phiLt0_1_")
graphsWires_phiLt0 = fitSlicesLandauCore(c,hist,"Wire_phiLt0_8_",nJump=8)
graphsWiresList = [graphsWires,graphsWires_phiGeq0,graphsWires_phiLt0]
compareGraphs(c,"CompareWires_MPV",graphsWiresList,0,"Wire Number","Landau MPV [MeV/cm]",["All","#phi #geq 0","#phi < 0"])
compareGraphs(c,"CompareWires_Sigma",graphsWiresList,2,"Wire Number","Gaussian Sigma [MeV/cm]",["All","#phi #geq 0","#phi < 0"])
hist = fCosmics.Get("primTrkdEdxsVx_phiLt0_RunIINocrct")
graphsX_phiLt0 = fitSlicesLandauCore(c,hist,"X_phiLt0_1_")
#graphsX_phiLt0 = fitSlicesLandauCore(c,hist,"X_phiLt0_2_",nJump=2)
hist = fCosmics.Get("primTrkdEdxsVy_phiLt0_RunIINocrct")
graphsY_phiLt0 = fitSlicesLandauCore(c,hist,"Y_phiLt0_1_")
#graphsY_phiLt0 = fitSlicesLandauCore(c,hist,"Y_phiLt0_2_",nJump=2)
hist = fCosmics.Get("primTrkdEdxsVz_phiLt0_RunIINocrct")
graphsZ_phiLt0 = fitSlicesLandauCore(c,hist,"Z_phiLt0_1_")
#graphsZ_phiLt0 = fitSlicesLandauCore(c,hist,"Z_phiLt0_5_",nJump=5)
# hist = fCosmics.Get("primTrkdQdxVwire_RunII")
# graphsdQdxWires = fitSlicesLandauCore(c,hist,"dQdxWire_8_",nJump=8)
#
# hist = fCosmics.Get("primTrkdQdxVwire_phiGeq0_RunII")
# graphsdQdxWires_phiGeq0 = fitSlicesLandauCore(c,hist,"dQdxWire_phiGeq0_8_",nJump=8)
#
# hist = fCosmics.Get("primTrkdQdxVwire_phiLt0_RunII")
# graphsdQdxWires_phiLt0 = fitSlicesLandauCore(c,hist,"dQdxWire_phiLt0_8_",nJump=8)
#
# graphsdQdxWiresList = [graphsdQdxWires,graphsdQdxWires_phiGeq0,graphsdQdxWires_phiLt0]
# compareGraphs(c,"ComparedQdxWires_MPV",graphsdQdxWiresList,0,"Wire Number","dQ/dx Landau MPV [MeV/cm]",["All","#phi #geq 0","#phi < 0"])
# compareGraphs(c,"ComparedQdxWires_Sigma",graphsdQdxWiresList,2,"Wire Number","dQ/dx Gaussian Sigma [MeV/cm]",["All","#phi #geq 0","#phi < 0"])
| {"/slicesIso.py": ["/fitCosmicHalo.py"], "/plotCosmics.py": ["/lookAtMonicaLifetime.py"]} |
76,898 | jhugon/lariatPionAbs | refs/heads/master | /lookAtMonicaLifetime.py | #!/usr/bin/env python
import re
import ROOT
def getLifetimeGraphs(scaleFactor=1.):
graph = ROOT.TGraphAsymmErrors()
iPoint = 0
with open("ZoomLifetime_Run2_v05_01_01.txt") as f:
for line in f:
reString = r"([-+.0-9]+)\s+"*13
match = re.match(reString,line)
if match:
firstRun = int(match.group(1))
lastRun = int(match.group(2))
value = float(match.group(3))*scaleFactor
errLow = float(match.group(4))*scaleFactor
errHigh = float(match.group(5))*scaleFactor
middleRun = 0.5*(firstRun+lastRun)
graph.SetPoint(iPoint,middleRun,value)
graph.SetPointEYhigh(iPoint,errHigh)
graph.SetPointEYlow(iPoint,errLow)
graph.SetPointEXhigh(iPoint,0.5*(lastRun-firstRun))
graph.SetPointEXlow(iPoint,0.5*(lastRun-firstRun))
iPoint+=1
return graph
if __name__ == "__main__":
from helpers import *
graph = getLifetimeGraphs()
c = ROOT.TCanvas()
axisHist = drawGraphs(c,[graph],"Run Number","Electron Lifetime",ylims=[0,2000])
c.SaveAs("MonicaElectonLifetime.png")
c.SaveAs("MonicaElectonLifetime.pdf")
| {"/slicesIso.py": ["/fitCosmicHalo.py"], "/plotCosmics.py": ["/lookAtMonicaLifetime.py"]} |
76,899 | jhugon/lariatPionAbs | refs/heads/master | /slicesIso.py | #!/usr/bin/env python2
import ROOT as root
from ROOT import gStyle as gStyle
root.gROOT.SetBatch(True)
from helpers import *
from fitCosmicHalo import *
if __name__ == "__main__":
c = root.TCanvas("c")
f = root.TFile("unifiso_hists.root")
f.ls()
#for logy,xmax,outext,ytitle in [(False,4,"","Normalized--Hits")]:#,(True,10,"_logy","Hits/bin")]:
# c.SetLogy(logy)
# plotSlices(c,f.Get("primTrkdEdxsVtrueStartTheta_UniformIsoMuon"),"Slices_trueStartTheta_UniformIsoMuon"+outext,[0,xmax],"dE/dx [MeV/cm]",ytitle,"#theta",rebinX=5,rebinY=10,xunits="deg",normalize=not logy)
# plotSlices(c,f.Get("primTrkdEdxsVtrueStartThetaY_UniformIsoMuon"),"Slices_trueStartThetaY_UniformIsoMuon"+outext,[0,xmax],"dE/dx [MeV/cm]",ytitle,"#theta_{y}",rebinX=5,rebinY=10,xunits="deg",normalize=not logy)
# plotSlices(c,f.Get("primTrkdEdxsVtrueStartThetaX_UniformIsoMuon"),"Slices_trueStartThetaX_UniformIsoMuon"+outext,[0,xmax],"dE/dx [MeV/cm]",ytitle,"#theta_{x}",rebinX=5,rebinY=10,xunits="deg",normalize=not logy)
# plotSlices(c,f.Get("primTrkdEdxsVtrueStartPhi_UniformIsoMuon"),"Slices_trueStartPhi_UniformIsoMuon"+outext,[0,xmax],"dE/dx [MeV/cm]",ytitle,"#phi",rebinX=5,rebinY=5,xunits="deg",normalize=not logy)
# plotSlices(c,f.Get("primTrkdEdxsVtrueStartPhiZX_UniformIsoMuon"),"Slices_trueStartPhiZX_UniformIsoMuon"+outext,[0,xmax],"dE/dx [MeV/cm]",ytitle,"#phi_{zx}",rebinX=5,rebinY=10,xunits="deg",normalize=not logy)
# plotSlices(c,f.Get("primTrkdEdxsVtrueStartPhiZY_UniformIsoMuon"),"Slices_trueStartPhiZY_UniformIsoMuon"+outext,[0,xmax],"dE/dx [MeV/cm]",ytitle,"#phi_{xy}",rebinX=5,rebinY=10,xunits="deg",normalize=not logy)
# plotSlices(c,f.Get("primTrkdEdxsVx_UniformIsoMuon"),"Slices_x_UniformIsoMuon"+outext,[0,xmax],"dE/dx [MeV/cm]",ytitle,"x",rebinX=5,rebinY=10,xunits="cm",normalize=not logy)
# plotSlices(c,f.Get("primTrkdEdxsVy_UniformIsoMuon"),"Slices_y_UniformIsoMuon"+outext,[0,xmax],"dE/dx [MeV/cm]",ytitle,"y",rebinX=5,rebinY=10,xunits="cm",normalize=not logy)
# plotSlices(c,f.Get("primTrkdEdxsVz_UniformIsoMuon"),"Slices_z_UniformIsoMuon"+outext,[0,xmax],"dE/dx [MeV/cm]",ytitle,"z",rebinX=5,rebinY=10,xunits="cm",normalize=not logy)
# plotSlices(c,f.Get("primTrkdEdxsVprimTrkPitches_UniformIsoMuon"),"Slices_primTrkPitches_UniformIsoMuon"+outext,[0,xmax],"dE/dx [MeV/cm]",ytitle,"pitch",rebinY=20,xunits="cm",normalize=not logy)
# plotSlices(c,f.Get("primTrkdEdxsV1OprimTrkPitches_UniformIsoMuon"),"Slices_1OprimTrkPitches_UniformIsoMuon"+outext,[0,xmax],"dE/dx [MeV/cm]",ytitle,"pitch^{-1}",rebinX=10,rebinY=20,xunits="cm^{-1}",normalize=not logy)
##############################################
c.SetLogy(False)
# graphConfigs = [
# (f.Get("primTrkdEdxsVtrueStartPhiZY_UniformIsoMuon"),"Slices_trueStartPhiZY_fit","#phi_{zy}","Fit of dE/dx [MeV/cm]"),
# (f.Get("primTrkdEdxsVtrueStartPhiZX_UniformIsoMuon"),"Slices_trueStartPhiZX_fit","#phi_{zx}","Fit of dE/dx [MeV/cm]"),
# ]
#
# for hist, savename, xtitle, ytitle, in graphConfigs:
# graphMode,graphFWHM = makeGraphsModeAndFWHM(hist)
# axisHist = makeStdAxisHist([graphMode,graphFWHM],ylim=[0,4])
# axisHist.Draw()
# graphMode.Draw("P")
# graphFWHM.Draw("P")
# graphFWHM.SetMarkerColor(COLORLIST[0])
# graphFWHM.SetLineColor(COLORLIST[0])
# setHistTitles(axisHist,xtitle,ytitle)
# c.SaveAs(savename+".png")
# c.SaveAs(savename+".pdf")
fitSlicesLandauCore(c,f.Get("primTrkdEdxsVtrueStartPhiZY_UniformIsoMuon").Rebin2D(5,20,"newPhiZy"),"Fits_trueStartPhiZY_UniformIsoMuon_")
fitSlicesLandauCore(c,f.Get("primTrkdEdxsVtrueStartPhiZX_UniformIsoMuon").Rebin2D(5,1,"newPhiZx"),"Fits_trueStartPhiZX_UniformIsoMuon_")
fitSlicesLandauCore(c,f.Get("primTrkdEdxsVtrueStartPhi_UniformIsoMuon").Rebin2D(5,1,"newPhiXY"),"Fits_trueStartPhi_UniformIsoMuon_")
fitSlicesLandauCore(c,f.Get("primTrkdEdxsVtrueStartCosThetaX_UniformIsoMuon").Rebin2D(5,2,"newCosThX"),"Fits_trueStartCosThetaX_UniformIsoMuon_")
fitSlicesLandauCore(c,f.Get("primTrkdEdxsVtrueStartCosThetaY_UniformIsoMuon").Rebin2D(5,2,"newCosThY"),"Fits_trueStartCosThetaY_UniformIsoMuon_")
fitSlicesLandauCore(c,f.Get("primTrkdEdxsVtrueStartCosTheta_UniformIsoMuon").Rebin2D(5,2,"newCosTh"),"Fits_trueStartCosTheta_UniformIsoMuon_")
# c.SetLogx(True)
# fitSlicesLandauCore(c,f.Get("primTrkdEdxsVprimTrkPitches_UniformIsoMuon").Rebin2D(1,10,"newPitches"),"Fits_primTrkPitches_UniformIsoMuon_")
| {"/slicesIso.py": ["/fitCosmicHalo.py"], "/plotCosmics.py": ["/lookAtMonicaLifetime.py"]} |
76,900 | jhugon/lariatPionAbs | refs/heads/master | /bethe.py | #!/usr/bin/env python2
#import ROOT as root
#from ROOT import gStyle as gStyle
#root.gROOT.SetBatch(True)
import math
import numpy
from matplotlib import pyplot as plt
MUONMASS = 105.6583715 # MeV/c^2
ELECTRONMASS = 0.510998928
PIONMASS = 139.57018
PROTONMASS = 938.272046
KAONMASS = 493.677
class MuonTable(object):
def __init__(self,fn="muE_liquid_argon.txt"):
self.rho = 1.396 #g/cm^3
self.Ts = []
self.dEdxs = []
self.ranges = []
with open(fn) as infile:
for line in infile:
if line[0] == '#':
continue
T = line[10*0+1:10*0+11]
dEdx = line[10*7+1:10*7+11]
r = line[10*8+1:10*8+11]
T = float(T)
dEdx = float(dEdx)*self.rho
r = float(r)*self.rho
self.Ts.append(T)
self.dEdxs.append(dEdx)
self.ranges.append(r)
def dEdx(self,ke):
"""
Given a kinetic energy in MeV
returns mean dEdx in MeV/cm
"""
return numpy.interp(ke,self.Ts,self.dEdxs,left=float('nan'),right=float('nan'))
def rangeCSDA(self,ke):
"""
Given a kinetic energy in MeV
returns CSDA range in cm
"""
return numpy.interp(ke,self.Ts,self.ranges,left=float('nan'),right=float('nan'))
class Bethe(object):
def __init__(self):
"""
Setup everything for liquid argon
"""
self.K = 0.307075 # MeV/mol cm^2
self.Z = 18
self.A = 39.948 #g/mol
self.rho = 1.396 #g/cm^3
self.z = 1
self.hbarw = 22.85*1e-6 # MeV
self.I = 188.0*1e-6 # MeV
self.mec2 = 0.410999 #Mev/c^2
self.j = 0.200
self.a = 0.19559
self.k = 3.
self.x0 = 0.2000
self.x1 = 3.0000
self.Cbar = 5.2146
self.delta0 = 0.
if False: # silicon
self.Z = 14
self.A = 28.0855
self.rho = 2.329
self.hbarw = 31.05*1e-6 # MeV
self.I = 173.0*1e-6 # MeV
self.a = 0.14921
self.k = 3.2546
self.x0 = 0.2015
self.x1 = 2.8716
self.Cbar = 4.4355
self.delta0 = 0.14
def stoppingPower(self, l, momentum, Mparticle):
"""
average -dE/dx in MeV cm^2 / g
"""
energy, gamma, beta = self.getEnergyGammaBeta(momentum,Mparticle)
Wmax = self.Wmax(beta,gamma,Mparticle)
delta = self.delta(momentum,Mparticle)
term1 = 0.5*math.log((2*self.mec2*beta**2*gamma**2*Wmax)/(self.I**2))
term2 = - beta**2
term3 = - 0.5 * delta
result = term1 + term2 + term3
result *= self.K*(self.z)**2 * self.Z / self.A / (beta**2) # now MeV cm^2 /g
return result
def mean(self, l, momentum, Mparticle):
"""
average energy deposited in MeV for l in cm
"""
result = self.stoppingPower(l,momentum, Mparticle) # in MeV cm^2 / g
result *= self.rho # now in MeV/cm
result *= l # now in MeV
return result
def mpv(self, l, momentum, Mparticle):
"""
Most probable energy deposition in MeV
"""
energy, gamma, beta = self.getEnergyGammaBeta(momentum,Mparticle)
xi = self.xi(beta,l)
delta = self.delta(momentum,Mparticle)
term1 = math.log(2*self.mec2*beta**2*gamma**2/self.I)
term2 = math.log(xi/self.I)
result = term1 + term2 + self.j - beta**2 - delta
result *= xi
#result *= 1/(l*self.rho)# now in MeV cm^2 / g
return result
def width(self, l, momentum, Mparticle):
"""
Landau width in MeV
beta = v/c
l in cm
"""
energy, gamma, beta = self.getEnergyGammaBeta(momentum,Mparticle)
return 4*self.xi(beta,l)
def Wmax(self,beta,gamma,Mparticle):
"""
Mparticle in MeV/c^2
"""
num = 2*self.mec2*beta**2*gamma**2
denom = 1 + 2*gamma*self.mec2/Mparticle + (self.mec2/Mparticle)**2
result = num/denom
return result
def xi(self,beta,l):
"""
beta = v/c
l in cm
Result in MeV
"""
x = self.rho * l
result = 0.5*self.K*self.Z/self.A*(self.z)**2*x/beta**2
return result
def delta(self,momentum,Mparticle):
"""
delta(beta*Gamma)
Argument is beta * gamma
"""
x = math.log10(momentum/Mparticle)
if x >= self.x1:
return 2*math.log(10)*x - self.Cbar
if x < self.x1 and x >= self.x0:
return 2*math.log(10)*x - self.Cbar + self.a*(self.x1-x)**self.k
if x < self.x0:
return self.delta0*10**(2*(x-self.x0))
raise Exception("Shouldn't have gotten past if statments")
def getEnergyGammaBeta(self,momentum,mass):
energy = math.sqrt(momentum**2+mass**2)
gamma = energy / mass
beta = momentum / energy
return energy, gamma, beta
if __name__ == "__main__":
fig, ax = plt.subplots()
b = Bethe()
mt = MuonTable()
wire_spacing = 0.4 #cm
l = 1.0
fig.text(0.7,0.91,"Liquid Argon, $\ell$ = {:.2f} cm".format(l),ha='right',va='bottom')
print -b.mean(1,357,MUONMASS), b.mpv(1,357,MUONMASS), b.width(1,357,MUONMASS)
print -b.mean(1,500,PROTONMASS), b.mpv(1,500,PROTONMASS), b.width(1,500,PROTONMASS)
masses = [MUONMASS, PIONMASS, KAONMASS, PROTONMASS]
labels = [r"$\mu^\pm$",r"$\pi^\pm$","$K^\pm$","p"]
colors = ["b","g","k","r"]
ax.cla()
for mass,label,color in zip(masses,labels,colors):
momentas = numpy.linspace(30,1000,200)
energies = numpy.sqrt(momentas**2+mass**2)
kes = energies - mass
means = numpy.array([b.mean(l,m,mass) for m in momentas])
mpvs = numpy.array([b.mpv(l,m,mass) for m in momentas])
widths = numpy.array([b.width(l,m,mass) for m in momentas])
distlows = mpvs-0.5*widths
disthighs = mpvs+0.5*widths
tableVals = mt.dEdx(kes)*l
ax.fill_between(momentas,distlows/l,disthighs/l,edgecolor="",facecolor=color,alpha=0.4)
ax.plot(momentas,means/l,color+"--")
ax.plot(momentas,mpvs/l,color+"-",label=label)
ax.legend(loc="best")
ax.set_xlabel("Momentum [MeV/c]")
ax.set_ylabel("$\Delta/x$ [MeV/cm]")
ax.set_xlim(0,1000)
ax.set_ylim(0,30)
fig.savefig("BetheMomentum.png")
fig.savefig("BetheMomentum.pdf")
ax.set_ylim(1,5)
fig.savefig("BetheMomentum_Zoom.png")
fig.savefig("BetheMomentum_Zoom.pdf")
ax.cla()
for mass,label,color in zip(masses,labels,colors):
momentas = numpy.linspace(30,2000,400)
energies = numpy.sqrt(momentas**2+mass**2)
kes = energies - mass
means = numpy.array([b.mean(l,m,mass) for m in momentas])
mpvs = numpy.array([b.mpv(l,m,mass) for m in momentas])
widths = numpy.array([b.width(l,m,mass) for m in momentas])
distlows = mpvs-0.5*widths
disthighs = mpvs+0.5*widths
tableVals = mt.dEdx(kes)*l
ax.fill_between(kes,distlows/l,disthighs/l,edgecolor="",facecolor=color,alpha=0.4)
ax.plot(kes,means/l,color+"--")
ax.plot(kes,mpvs/l,color+"-",label=label)
ax.legend(loc="best")
ax.set_xlabel("Kinetic Energy [MeV]")
ax.set_ylabel("$\Delta/x$ [MeV/cm]")
ax.set_xlim(0,500)
ax.set_ylim(0,30)
fig.savefig("BetheKE.png")
fig.savefig("BetheKE.pdf")
ax.set_ylim(1,5)
fig.savefig("BetheKE_Zoom.png")
fig.savefig("BetheKE_Zoom.pdf")
m = 800.
mass = PIONMASS
color = 'b'
fig, ax = plt.subplots()
fig.suptitle(r"$\pi^\pm$ on Liquid Argon, p = {0:.0f} MeV/c, KE = {1:.0f} MeV".format(m,numpy.sqrt(m**2+mass**2)-mass))
lengths = numpy.logspace(-2,1)
mpvs = numpy.array([b.mpv(l,m,mass) for l in lengths])
widths = numpy.array([b.width(l,m,mass) for l in lengths])
distlows = mpvs-0.5*widths
disthighs = mpvs+0.5*widths
ax.fill_between(lengths,distlows/lengths,disthighs/lengths,edgecolor="",facecolor=color,alpha=0.4)
ax.plot(lengths,mpvs/lengths,color+"-",label=label)
ax.set_xlabel("$\ell$ [cm]")
ax.set_ylabel("$\Delta/x$ [MeV/cm]")
ax.set_xlim(0.4,5)
ax.set_ylim(1.,2.4)
fig.savefig("BetheL.png")
fig.savefig("BetheL.pdf")
ax.cla()
fig.text(0.15,0.94,"Wire Spacing = 0.4 cm".format(l),ha='left',va='top')
#angles = numpy.linspace(0,90,300)
angles = numpy.logspace(-2,2,100)
lengths = wire_spacing/numpy.sin(angles*numpy.pi/180.)
mpvs = numpy.array([b.mpv(l,m,mass) for l in lengths])
widths = numpy.array([b.width(l,m,mass) for l in lengths])
distlows = mpvs-0.5*widths
disthighs = mpvs+0.5*widths
ax.fill_between(angles,distlows/lengths,disthighs/lengths,edgecolor="",facecolor=color,alpha=0.4)
ax.plot(angles,(mpvs/lengths),color+"-",label=label)
ax.set_xlabel(r"$\theta$ w.r.t wire direction in wire plane (0 is $\|\|$ to wire) [deg]")
ax.set_ylabel("$\Delta/x$ [MeV/cm]")
ax.set_xlim(0.,90.)
ax.set_ylim(1.,2.4)
fig.savefig("BetheAngle.png")
fig.savefig("BetheAngle.pdf")
| {"/slicesIso.py": ["/fitCosmicHalo.py"], "/plotCosmics.py": ["/lookAtMonicaLifetime.py"]} |
76,901 | jhugon/lariatPionAbs | refs/heads/master | /sliceUpDataset.py | #!/usr/bin/env python
import random
import sys
import re
import os
import time
import subprocess
import string
class FakeSAM(object):
def __init__(self,first=8000,last=10227):
self.data = self.genRandomNFiles(self.genRandomNums(first,last))
def genRandomNums(self,first,last):
assert(first < last)
assert((last - first) > 1000)
result = set()
for i in range(20):
center = random.randint(first+10,last-10)
available = xrange(center - int(10),center + int(10))
samples = random.sample(available,10)
for sample in samples:
if not (sample in result):
result.add(sample)
result = list(result)
result.sort()
return result
def genRandomNFiles(self,runList):
result = []
for run in runList:
result.append([run,random.randint(5,500)])
return result
def count(self,firstRun,lastRun):
result = 0
print "Runs: {} {}".format(firstRun,lastRun)
for iRun in range(len(self.data)):
run = self.data[iRun][0]
if run >= firstRun and run < lastRun:
nfiles = self.data[iRun][1]
result += nfiles
print "nFiles: {}".format(result)
return result
def __str__(self):
result = "FakeSAM\n"
for datum in self.data:
result += "Run: {0:6} Files: {1:5}\n".format(*datum)
return result
class TalkToSAM(object):
def __init__(self,basedefname,pause_time=120,nTries=5):
"""
basedefname is the definition name to start from
pause_time is number of seconds to pause after running a samweb command
nTries is the number of tries to run a samweb command before raising an exception
"""
self.basedefname = basedefname
self.pause_time = pause_time
self.nTries = nTries
def call(self,commandlist):
result = None
pause_time = self.pause_time
for iTry in range(self.nTries):
try:
result = subprocess.check_output(commandlist)
except Exception as e:
print "Error: '{}' running: check_output on {}".format(e,commandlist)
else: # if no exception
break
finally: #always
time.sleep(pause_time)
#pause_time *= 2
return result
def count(self,firstRun,lastRun,onlyPrint=False):
command = ["samweb","count-files","defname:", self.basedefname, "and", "run_number", ">=", str(firstRun), "and", "run_number", "<", str(lastRun)]
print " ".join(command)
sys.stdout.flush()
result = None
if not onlyPrint:
result = self.call(command)
if result is None:
raise Exception("Couldn't count files")
result = int(result)
print "nFiles: {}".format(result)
sys.stdout.flush()
return result
def createDefinition(self,sub_name,firstRun,lastRun,prefix="",suffix="_v1",onlyPrint=False,stripVersion=False):
match = re.match(r"(.+)_v\d+",self.basedefname)
new_basedefname = self.basedefname
if match:
new_basedefname = match.group(1)
newname = "{}{}_{}{}".format(prefix,new_basedefname,sub_name,suffix)
command = ["samweb","create-definition",newname,"defname:", self.basedefname, "and", "run_number", ">=", str(firstRun), "and", "run_number", "<", str(lastRun)]
print " ".join(command)
if not onlyPrint:
call_result = self.call(command)
if call_result is None:
raise Exception("Couldn't create definition")
class MakeSubDatasets(object):
def __init__(self,first,last,nFilesPerSet=5000,nFilesPerSetError=100):
assert(first < last)
self.firstRuns = []
self.lastRuns = []
self.nFiles = []
self.first = first
self.last = last
self.nFilesPerSet = nFilesPerSet
self.nFilesPerSetError = nFilesPerSetError
def __str__(self):
result = "MakeSubDatasets(first={},last={},nFilesPerSet={},nFilesPerSetError={})\n".format(self.first,self.last,self.nFilesPerSet,self.nFilesPerSetError)
for f,l,n in zip(self.firstRuns,self.lastRuns,self.nFiles):
runsStr = "{}-{}".format(f,l)
result += " Runs {:12}: {:5} files\n".format(runsStr,n)
return result
def run(self,countFunc):
totalCount = countFunc(self.first,self.last)
if totalCount == 0:
raise Exception("No files found between:",self.first,self.last)
while True:
firstRun = self.first
if len(self.lastRuns) > 0:
firstRun = self.lastRuns[-1]
if firstRun == self.last:
break
print "\nWorking on dataset starting at {}".format(firstRun)
sys.stdout.flush()
allcount = countFunc(firstRun,self.last)
if allcount <= (self.nFilesPerSet - self.nFilesPerSetError):
self.firstRuns.append(firstRun)
self.lastRuns.append(self.last)
self.nFiles.append(allcount)
break
thislast, thiscount = self.binomial(countFunc,firstRun,firstRun,self.last)
assert(countFunc(firstRun,thislast) == thiscount)
if thiscount == 0:
break
self.firstRuns.append(firstRun)
self.lastRuns.append(thislast)
self.nFiles.append(thiscount)
setsTotalCount = 0
for n in self.nFiles:
setsTotalCount += n
assert(setsTotalCount == totalCount)
def binomial(self,countFunc,firstRun,lastmin,lastmax):
lastmid = int(0.5*(lastmin + lastmax))
count = countFunc(firstRun,lastmid)
difference = count - self.nFilesPerSet
#print "binomial: firstRun: {} lastmin: {} lastmid {} lastmax {} countmid {}".format(firstRun,lastmin,lastmid,lastmax,count)
if abs(difference) <= self.nFilesPerSetError:
return lastmid, count
elif lastmax == lastmin: # give up, step is too big
return lastmax, count
elif difference > 0:
return self.binomial(countFunc,firstRun,lastmin,lastmid-1)
else:
return self.binomial(countFunc,firstRun,lastmid,lastmax+1)
def printDefinitions(self,tts):
print ""
print "New Definitions:"
for f,l,n,a in zip(self.firstRuns,self.lastRuns,self.nFiles,string.ascii_lowercase[:len(self.firstRuns)]):
tts.createDefinition(a,f,l,onlyPrint=True)
sys.stdout.flush()
if __name__ == "__main__":
sam = TalkToSAM("Lovely1_Neg_RunII_jhugon_current60_secondary64_v1",pause_time=60,nTries=2)
#sam.count(9000,12000)
#sam.createDefinition("test1",9000,12000)
#fakesam = FakeSAM()
msd = MakeSubDatasets(8000,10227)
msd.run(sam.count)
#msd.run(fakesam.count)
print msd
msd.printDefinitions(sam)
| {"/slicesIso.py": ["/fitCosmicHalo.py"], "/plotCosmics.py": ["/lookAtMonicaLifetime.py"]} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.