file_name large_stringlengths 4 140 | prefix large_stringlengths 0 12.1k | suffix large_stringlengths 0 12k | middle large_stringlengths 0 7.51k | fim_type large_stringclasses 4
values |
|---|---|---|---|---|
lib.rs |
fn default_layer(v: &i32, branch_factor: u16) -> u8 {
let mut layer = 0;
let mut v = *v;
if branch_factor == 16 {
while v != 0 && v & 0xf == 0 {
v >>= 4;
layer += 1
}
} else {
while v != 0 && v % branch_factor as i32 == 0 {
v /= branch_factor as i32;
layer += 1;
}
}
return layer;
}
impl<'a> Mast<'a> {
pub fn newInMemory() -> Mast<'a> {
return Mast {
size: 0,
height: 0,
root_link: Link::MutableNode(Node::new(default_branch_factor as usize), None),
branch_factor: default_branch_factor,
grow_after_size: default_branch_factor as u64,
shrink_below_size: 1,
key_order: default_order,
key_layer: default_layer,
_a: std::marker::PhantomData,
// store: InMemoryNodeStore::new(),
};
}
fn insert(&mut self, key: i32, value: i32) -> Result<InsertResult, MastError> {
let key_layer = (self.key_layer)(&key, self.branch_factor);
let target_layer = std::cmp::min(key_layer, self.height);
let distance = self.height - target_layer;
let root = load_mut(&mut self.root_link)?;
let res = root.insert(key, value, distance, self.key_order)?;
match res {
InsertResult::Inserted => self.size += 1,
_ => return Ok(res),
};
if self.size > self.grow_after_size
&& root.can_grow(self.height, self.key_layer, self.branch_factor)
{
self.root_link = root
.grow(self.height, self.key_layer, self.branch_factor)
.unwrap();
self.height += 1;
self.shrink_below_size *= self.branch_factor as u64;
self.grow_after_size *= self.branch_factor as u64;
};
Ok(res)
}
fn get(&self, key: &i32) -> Result<Option<&i32>, MastError> {
let mut distance =
self.height - std::cmp::min((self.key_layer)(key, self.branch_factor), self.height);
if distance < 0 { panic!("goo") };
let mut node = load(&self.root_link)?;
loop {
let (equal, i) = get_index_for_key(key, &node.key, self.key_order);
if distance == 0 {
if equal {
return Ok(Some(&node.value[i]));
} else {
return Ok(None);
}
} else {
distance -= 1
}
match node.link[i] {
None => return Ok(None),
Some(ref link) => node = load(link)?,
}
}
}
}
fn load(link: &Link) -> Result<&Node, MastError> {
match link {
// Link::Empty => Ok(Cow::Borrowed(&Node::empty()).to_mut()),
Link::MutableNode(ref node, _) => Ok(node),
Link::SharedNode(ref rc) => Ok(rc),
Link::Stored(_) => unimplemented!("Link::Stored"),
}
}
fn load_mut(link: &mut Link) -> Result<&mut Node, MastError> {
match link {
// Link::Empty => Ok(Cow::Borrowed(&Node::empty()).to_mut()),
Link::MutableNode(ref mut node, _) => Ok(node),
Link::SharedNode(ref mut rc) => {
let mutable = Rc::make_mut(rc).to_owned();
*link = Link::MutableNode(mutable, Some(rc.clone()));
if let Link::MutableNode(ref mut scopey, _) = link {
Ok(scopey)
} else {
panic!("asdf")
}
}
Link::Stored(_) => unimplemented!("Link::Stored"),
}
}
// struct NodeAndSlot<'a>(&'a mut Node<'a>, usize);
/*
struct FindOptions<'a> {
mast: &'a mut Mast<'a>,
target_layer: u8,
current_height: u8,
create_missing_nodes: bool,
node_path: Vec<&'a mut Node>,
link_path: Vec<usize>,
}
*/
impl Node {
fn new(branch_factor: usize) -> Node {
let mut link = Vec::with_capacity(branch_factor + 1);
link.push(None);
Node {
key: Vec::with_capacity(branch_factor),
value: Vec::with_capacity(branch_factor),
link,
dirty: false,
}
}
/*
fn follow(
&'a mut self,
index: usize,
create_ok: bool,
m: &'a mut Mast<'a>,
) -> std::result::Result<&'a mut Node<'a>, std::io::Error> {
if let Some(ref mut links) = self.link {
return Ok(m.load(&mut links[index])?);
} else if !create_ok {
return Ok(self);
}
return Ok(&mut Node::empty());
}*/
fn insert(
&mut self,
key: i32,
value: i32,
distance: u8,
key_order: fn(&i32, &i32) -> i8,
) -> Result<InsertResult, MastError> {
let (equal, i) = get_index_for_key(&key, &self.key, key_order);
if distance != 0 {
let mut z = self.link.get_mut(i).unwrap();
let child = match &mut z {
Some(ref mut link) => load_mut(link)?,
None => {
*z = Some(Link::MutableNode(Node::new(self.key.capacity()), None));
match &mut z {
Some(ref mut link) => load_mut(link)?,
None => panic!("can't load just-set link"),
}
}
};
let res = child.insert(key, value, distance - 1, key_order)?;
match res {
InsertResult::NoChange => (),
_ => self.dirty = true,
};
return Ok(res);
}
if equal {
if value == self.value[i] {
return Ok(InsertResult::NoChange);
}
self.value[i] = value;
self.dirty = true;
return Ok(InsertResult::Updated);
}
let (left_link, right_link) = match self.link.get_mut(i).unwrap() {
Some(ref mut link) => {
let child = load_mut(link)?;
split(child, &key, key_order)?
}
None => (None, None),
};
self.key.insert(i, key);
self.value.insert(i, value);
self.link[i] = right_link;
self.link.insert(i, left_link);
self.dirty = true;
return Ok(InsertResult::Inserted);
}
fn can_grow(
&self,
current_height: u8,
key_layer: fn(&i32, u16) -> u8,
branch_factor: u16,
) -> bool {
for key in &self.key {
if key_layer(key, branch_factor) > current_height {
return true;
}
}
return false;
}
fn grow(
&mut self,
current_height: u8,
key_layer: fn(&i32, u16) -> u8,
branch_factor: u16,
) -> Option<Link> {
let mut new_parent = Node::new(self.key.capacity());
if !self.is_empty() {
for i in 0..self.key.len() {
let key = &self.key[i];
let layer = key_layer(key, branch_factor);
if layer <= current_height {
continue;
}
let new_left = self.extract(i);
new_parent.key.push(self.key[0]);
new_parent.value.push(self.value[0]);
new_parent.link.insert(new_parent.link.len() - 1, new_left);
}
}
let new_right = self.extract(self.key.len());
*new_parent.link.last_mut().unwrap() = new_right;
return new_parent.to_link();
}
fn extract(&mut self, end: usize) -> Option<Link> {
let mut node = Node::new(self.key.capacity());
node.key = self.key.drain(..end).collect();
node.key.reserve(self.key.capacity());
node.value = self.value.drain(..end).collect();
node.value.reserve(self.key.capacity());
node.link = self.link.drain(..=end).collect();
node.link.reserve(self.key.capacity() + 1);
self.link.insert(0, None);
return node.to_link();
}
fn to_link(self) -> Option<Link> {
if self.is_empty() {
return None;
}
return Some(Link::MutableNode(self, None));
}
fn is_empty(&self) -> bool {
return self.key.len() == 0
&& self.value.len() == 0
&& self.link.len() == 1
&& self.link[0].is_none();
}
}
#[derive | {
if *a < *b {
return -1;
} else if *a > *b {
return 1;
} else {
return 0;
}
} | identifier_body | |
MOS6502.py | 0):
self.bitwidth = bitwidth = 8 # 1 byte - 8 bits
self.regs = {'A': Register('A', bitwidth),
'X': Register('X', bitwidth),
'Y': Register('Y', bitwidth),
'PC': Register('PC', bitwidth * 2),
'S': Register('S', bitwidth),
'P': Register('P', bitwidth)}
self.base_address = baseAddress # unused for now
self.pc_size = 2 # 2 bytes for PC - 16 bits
self.memory = Memory()
self.past_memory = []
#self.symbMemory = z3Array('mem', z3.BitVecSort(bitwidth), z3.BitVecSort(8))
self.regs['PC'].set_value(0)
self.regs['S'].set_value(0)
self.cycle = 0
self.global_cycle = 0
self.nmi_flipflop = 0
self.stack_base = 0x0100
self.ppu_mem = 0x2000
self.apu_mem = 0x4000
self.spr_dma = 0x4014
self.channels = 0x4015
self.ctrl1 = 0x4016
self.ctrl2 = 0x4017
self.nmi_vector = 0xFFFA
self.reset_vector = 0xFFFC
self.irq_brk_vector = 0xFFFE
self.ctrl_a = 7
self.ctrl_b = 6
self.ctrl_select = 5
self.ctrl_start = 4
self.ctrl_up = 3
self.ctrl_down = 2
self.ctrl_left = 1
self.ctrl_right = 0
self.rom = None
self.ppu = None
self.controllers = None
self.paused = False
self.pause_reason = None
self.last_four = [0x00] * 4
self.bwrites = []
self.breads = []
def clear_memory(self):
"""Clear (zero out) emulated memory"""
self.memory.ClearMemory()
def reset(self):
"""Reset CPU. Note: reset state is not the same as the initial power-on state"""
# https://wiki.nesdev.com/w/index.php/CPU_power_up_state
ctrl_start = self.read_mem_word(self.reset_vector)
self.set_pc(ctrl_start)
self.set_register('S', 0xFD)
self.set_register('P', 0x24) # MMM: documentation says P = P | 0x04 ?
self.clear_memory() # MMM: documentation says memory is unchanged
self.set_memory(0x01FE, 0xFF)
self.set_memory(0x01FF, 0xFF)
# https://wiki.nesdev.com/w/index.php/PPU_power_up_state
self.ppu = PPU(self) # MMM: PPU object should probably have its own reset() method
self.controllers = Controllers(self)
def map_mem(self, address):
"""Map program memory from the NES ROM into the CPU's memory space"""
return self.rom.mapMem(self, address)
def map_vmem(self, address):
"""Map VMem from the NES ROM into the CPU's memory space"""
return self.rom.mapVMem(self, address)
def load_rom(self, rom_path):
"""Load an NES cartridge ROM from the given .nes file path"""
self.rom = Rom(rom_path, self)
return self.rom
def read_memory(self, address):
"""Return a read of 1 byte of main memory from the given address"""
address = address & 0xFFFF
if address in self.breads:
self.paused = True
self.pause_reason = 'Read at ' + hex(address)
return self.memory.ReadMemory(self, address)
def read_vmemory(self, address):
"""Return a read of 1 byte of video memory from the given address"""
# MMM: isn't VRAM a property of the PPU?
return self.rom.ReadVMemory(self, address)
def read_mem_word(self, address):
"""Return a read of one 16-bit word of memory from the given address"""
value = self.read_memory(address)
value += self.read_memory(address + 1) << 8
return value
def read_mem_word_bug(self, addr):
addr = addr % 0xFFFF
hi_addr = (addr & 0xFF00) | ((addr + 1) & 0xFF)
lo_byte = self.read_memory(addr)
hi_byte = self.read_memory(hi_addr)
return ((hi_byte << 8) | lo_byte)
def read_rel_pc(self, offset):
"""Return 1 byte of memory read using PC-relative addressing"""
return self.read_memory(self.get_register('PC')+offset) & 0xFF
def set_memory(self, address, value):
"""Write the given 1 byte value to the given address in NES memory"""
#self.memory[address] = value & 0xFF
if address in self.bwrites:
self.paused = True
self.pause_reason = 'Write at ' + hex(address)
return self.memory.SetMemory(self, address, value)
def init_memory(self, address, values):
"""Initialize a region of emulated memory to the bytes specified in 'values'"""
for value in values: # writing 1 byte at a time
self.set_memory(address, value)
address = address + 1
def get_memory(self, address, size):
"""Return an arbitrarily sized region of the emulated memory space"""
mem = []
for i in range(0, size):
mem.append(self.read_memory(address+i))
return mem
def get_register(self, name):
"""Return the value of the given register (valid values: A, X, Y, S, P, PC)"""
return self.regs[name].get_value()
def set_pc(self, value):
"""Set the Program Counter of the emulated CPU to the given 16-bit value."""
self.regs['PC'].set_value(value & 0xFFFF)
return value & 0xFFFF
def set_register(self, name, value):
"""Set the given 8-bit register (A, X, Y, S, or P registers only)"""
if name is 'P':
value = value | (1 << 5)
self.regs[name].set_value(value & 0xFF)
return value & 0xFF
def push_byte(self, value):
|
def push_word(self, value):
"""Push the given 16-bit word value onto the emulated CPU stack."""
self.push_byte((value & 0xFF00) >> 8)
return self.push_byte(value & 0xFF)
def pop_byte(self):
"""Return a byte value popped from the emulated CPU stack."""
reg_s = self.get_register('S') + 1
value = self.read_memory(reg_s +self.stack_base)
self.set_register('S', reg_s)
return value
def pop_word(self):
"""Return a 16-bit word value popped from the emulated CPU stack."""
return self.pop_byte() + (self.pop_byte() << 8)
def set_flag(self, flag_name, value):
"""Set the current emulated 8-bit status (flags) register to the given value"""
flags = {'C':0, # Carry
'Z':1, # Zero
'I':2, # Interrupt mask
'D':3, # Decimal
'B':4, # Break
'V':6, # Overflow
'N':7} # Negative
flag_reg = self.get_register('P')
if value == 1:
new_flag = flag_reg | 1 << flags[flag_name]
else:
new_flag = flag_reg & ~(1 << flags[flag_name])
self.set_register('P', new_flag)
def create_overflow_condition(self, old_dst, old_src, new_val, sub_op):
"""Return boolean value whether operation creates an overflow condition"""
if not sub_op:
of_cond = (old_dst^old_src)&0x80 == 0 and (old_dst^new_val) & 0x80 != 0
else:
of_cond = (old_dst^old_src)&0x80 != 0 and (old_dst^new_val) & 0x80 != 0
return of_cond
@staticmethod
def create_carry_condition(new_val, sub_op):
"""Return boolean value whether operation creates a carry condition"""
if not sub_op:
carry_cond = new_val > 0xFF
else:
carry_cond = new_val >= 0
return carry_cond
def ctrl_update_flags(self, flags, old_dst, old_src, new_val, sub | """Push the given byte value onto the emulated CPU stack."""
reg_s = self.get_register('S')
self.set_memory(reg_s + self.stack_base, value)
self.set_register('S', reg_s - 1)
return reg_s + self.stack_base - 1 | identifier_body |
MOS6502.py | 0
self.stack_base = 0x0100
self.ppu_mem = 0x2000
self.apu_mem = 0x4000
self.spr_dma = 0x4014
self.channels = 0x4015
self.ctrl1 = 0x4016
self.ctrl2 = 0x4017
self.nmi_vector = 0xFFFA
self.reset_vector = 0xFFFC
self.irq_brk_vector = 0xFFFE
self.ctrl_a = 7
self.ctrl_b = 6
self.ctrl_select = 5
self.ctrl_start = 4
self.ctrl_up = 3
self.ctrl_down = 2
self.ctrl_left = 1
self.ctrl_right = 0
self.rom = None
self.ppu = None
self.controllers = None
self.paused = False
self.pause_reason = None
self.last_four = [0x00] * 4
self.bwrites = []
self.breads = []
def clear_memory(self):
"""Clear (zero out) emulated memory"""
self.memory.ClearMemory()
def reset(self):
"""Reset CPU. Note: reset state is not the same as the initial power-on state"""
# https://wiki.nesdev.com/w/index.php/CPU_power_up_state
ctrl_start = self.read_mem_word(self.reset_vector)
self.set_pc(ctrl_start)
self.set_register('S', 0xFD)
self.set_register('P', 0x24) # MMM: documentation says P = P | 0x04 ?
self.clear_memory() # MMM: documentation says memory is unchanged
self.set_memory(0x01FE, 0xFF)
self.set_memory(0x01FF, 0xFF)
# https://wiki.nesdev.com/w/index.php/PPU_power_up_state
self.ppu = PPU(self) # MMM: PPU object should probably have its own reset() method
self.controllers = Controllers(self)
def map_mem(self, address):
"""Map program memory from the NES ROM into the CPU's memory space"""
return self.rom.mapMem(self, address)
def map_vmem(self, address):
"""Map VMem from the NES ROM into the CPU's memory space"""
return self.rom.mapVMem(self, address)
def load_rom(self, rom_path):
"""Load an NES cartridge ROM from the given .nes file path"""
self.rom = Rom(rom_path, self)
return self.rom
def read_memory(self, address):
"""Return a read of 1 byte of main memory from the given address"""
address = address & 0xFFFF
if address in self.breads:
self.paused = True
self.pause_reason = 'Read at ' + hex(address)
return self.memory.ReadMemory(self, address)
def read_vmemory(self, address):
"""Return a read of 1 byte of video memory from the given address"""
# MMM: isn't VRAM a property of the PPU?
return self.rom.ReadVMemory(self, address)
def read_mem_word(self, address):
"""Return a read of one 16-bit word of memory from the given address"""
value = self.read_memory(address)
value += self.read_memory(address + 1) << 8
return value
def read_mem_word_bug(self, addr):
addr = addr % 0xFFFF
hi_addr = (addr & 0xFF00) | ((addr + 1) & 0xFF)
lo_byte = self.read_memory(addr)
hi_byte = self.read_memory(hi_addr)
return ((hi_byte << 8) | lo_byte)
def read_rel_pc(self, offset):
"""Return 1 byte of memory read using PC-relative addressing"""
return self.read_memory(self.get_register('PC')+offset) & 0xFF
def set_memory(self, address, value):
"""Write the given 1 byte value to the given address in NES memory"""
#self.memory[address] = value & 0xFF
if address in self.bwrites:
self.paused = True
self.pause_reason = 'Write at ' + hex(address)
return self.memory.SetMemory(self, address, value)
def init_memory(self, address, values):
"""Initialize a region of emulated memory to the bytes specified in 'values'"""
for value in values: # writing 1 byte at a time
self.set_memory(address, value)
address = address + 1
def get_memory(self, address, size):
"""Return an arbitrarily sized region of the emulated memory space"""
mem = []
for i in range(0, size):
mem.append(self.read_memory(address+i))
return mem
def get_register(self, name):
"""Return the value of the given register (valid values: A, X, Y, S, P, PC)"""
return self.regs[name].get_value()
def set_pc(self, value):
"""Set the Program Counter of the emulated CPU to the given 16-bit value."""
self.regs['PC'].set_value(value & 0xFFFF)
return value & 0xFFFF
def set_register(self, name, value):
"""Set the given 8-bit register (A, X, Y, S, or P registers only)"""
if name is 'P':
value = value | (1 << 5)
self.regs[name].set_value(value & 0xFF)
return value & 0xFF
def push_byte(self, value):
"""Push the given byte value onto the emulated CPU stack."""
reg_s = self.get_register('S')
self.set_memory(reg_s + self.stack_base, value)
self.set_register('S', reg_s - 1)
return reg_s + self.stack_base - 1
def push_word(self, value):
"""Push the given 16-bit word value onto the emulated CPU stack."""
self.push_byte((value & 0xFF00) >> 8)
return self.push_byte(value & 0xFF)
def pop_byte(self):
"""Return a byte value popped from the emulated CPU stack."""
reg_s = self.get_register('S') + 1
value = self.read_memory(reg_s +self.stack_base)
self.set_register('S', reg_s)
return value
def pop_word(self):
"""Return a 16-bit word value popped from the emulated CPU stack."""
return self.pop_byte() + (self.pop_byte() << 8)
def set_flag(self, flag_name, value):
"""Set the current emulated 8-bit status (flags) register to the given value"""
flags = {'C':0, # Carry
'Z':1, # Zero
'I':2, # Interrupt mask
'D':3, # Decimal
'B':4, # Break
'V':6, # Overflow
'N':7} # Negative
flag_reg = self.get_register('P')
if value == 1:
new_flag = flag_reg | 1 << flags[flag_name]
else:
new_flag = flag_reg & ~(1 << flags[flag_name])
self.set_register('P', new_flag)
def create_overflow_condition(self, old_dst, old_src, new_val, sub_op):
"""Return boolean value whether operation creates an overflow condition"""
if not sub_op:
of_cond = (old_dst^old_src)&0x80 == 0 and (old_dst^new_val) & 0x80 != 0
else:
of_cond = (old_dst^old_src)&0x80 != 0 and (old_dst^new_val) & 0x80 != 0
return of_cond
@staticmethod
def create_carry_condition(new_val, sub_op):
"""Return boolean value whether operation creates a carry condition"""
if not sub_op:
carry_cond = new_val > 0xFF
else:
carry_cond = new_val >= 0
return carry_cond
def ctrl_update_flags(self, flags, old_dst, old_src, new_val, sub_op):
"""Update, as needed, the C or V bits in the emulated flags register"""
of_cond = self.create_overflow_condition(old_dst, old_src, new_val, sub_op)
cf_cond = self.create_carry_condition(new_val, sub_op)
valid_flags = {'C': cf_cond is True,
'Z': new_val & 0xFF == 0,
'V': of_cond is True,
'N': ((new_val & 0x80) != 0)}
for flag in flags:
self.set_flag(flag, valid_flags[flag])
def get_flag(self, flag_name):
"""Return the current emulated 8-bit status (flags) register"""
flags = {'C':0, # Carry
'Z':1, # Zero
'I':2, # Interrctrl_upt mask
'D':3, # Decimal
'B':4, # Break
'V':6, # Overflow | random_line_split | ||
MOS6502.py | 0):
self.bitwidth = bitwidth = 8 # 1 byte - 8 bits
self.regs = {'A': Register('A', bitwidth),
'X': Register('X', bitwidth),
'Y': Register('Y', bitwidth),
'PC': Register('PC', bitwidth * 2),
'S': Register('S', bitwidth),
'P': Register('P', bitwidth)}
self.base_address = baseAddress # unused for now
self.pc_size = 2 # 2 bytes for PC - 16 bits
self.memory = Memory()
self.past_memory = []
#self.symbMemory = z3Array('mem', z3.BitVecSort(bitwidth), z3.BitVecSort(8))
self.regs['PC'].set_value(0)
self.regs['S'].set_value(0)
self.cycle = 0
self.global_cycle = 0
self.nmi_flipflop = 0
self.stack_base = 0x0100
self.ppu_mem = 0x2000
self.apu_mem = 0x4000
self.spr_dma = 0x4014
self.channels = 0x4015
self.ctrl1 = 0x4016
self.ctrl2 = 0x4017
self.nmi_vector = 0xFFFA
self.reset_vector = 0xFFFC
self.irq_brk_vector = 0xFFFE
self.ctrl_a = 7
self.ctrl_b = 6
self.ctrl_select = 5
self.ctrl_start = 4
self.ctrl_up = 3
self.ctrl_down = 2
self.ctrl_left = 1
self.ctrl_right = 0
self.rom = None
self.ppu = None
self.controllers = None
self.paused = False
self.pause_reason = None
self.last_four = [0x00] * 4
self.bwrites = []
self.breads = []
def clear_memory(self):
"""Clear (zero out) emulated memory"""
self.memory.ClearMemory()
def reset(self):
"""Reset CPU. Note: reset state is not the same as the initial power-on state"""
# https://wiki.nesdev.com/w/index.php/CPU_power_up_state
ctrl_start = self.read_mem_word(self.reset_vector)
self.set_pc(ctrl_start)
self.set_register('S', 0xFD)
self.set_register('P', 0x24) # MMM: documentation says P = P | 0x04 ?
self.clear_memory() # MMM: documentation says memory is unchanged
self.set_memory(0x01FE, 0xFF)
self.set_memory(0x01FF, 0xFF)
# https://wiki.nesdev.com/w/index.php/PPU_power_up_state
self.ppu = PPU(self) # MMM: PPU object should probably have its own reset() method
self.controllers = Controllers(self)
def map_mem(self, address):
"""Map program memory from the NES ROM into the CPU's memory space"""
return self.rom.mapMem(self, address)
def map_vmem(self, address):
"""Map VMem from the NES ROM into the CPU's memory space"""
return self.rom.mapVMem(self, address)
def load_rom(self, rom_path):
"""Load an NES cartridge ROM from the given .nes file path"""
self.rom = Rom(rom_path, self)
return self.rom
def read_memory(self, address):
"""Return a read of 1 byte of main memory from the given address"""
address = address & 0xFFFF
if address in self.breads:
self.paused = True
self.pause_reason = 'Read at ' + hex(address)
return self.memory.ReadMemory(self, address)
def read_vmemory(self, address):
"""Return a read of 1 byte of video memory from the given address"""
# MMM: isn't VRAM a property of the PPU?
return self.rom.ReadVMemory(self, address)
def read_mem_word(self, address):
"""Return a read of one 16-bit word of memory from the given address"""
value = self.read_memory(address)
value += self.read_memory(address + 1) << 8
return value
def read_mem_word_bug(self, addr):
addr = addr % 0xFFFF
hi_addr = (addr & 0xFF00) | ((addr + 1) & 0xFF)
lo_byte = self.read_memory(addr)
hi_byte = self.read_memory(hi_addr)
return ((hi_byte << 8) | lo_byte)
def read_rel_pc(self, offset):
"""Return 1 byte of memory read using PC-relative addressing"""
return self.read_memory(self.get_register('PC')+offset) & 0xFF
def set_memory(self, address, value):
"""Write the given 1 byte value to the given address in NES memory"""
#self.memory[address] = value & 0xFF
if address in self.bwrites:
self.paused = True
self.pause_reason = 'Write at ' + hex(address)
return self.memory.SetMemory(self, address, value)
def init_memory(self, address, values):
"""Initialize a region of emulated memory to the bytes specified in 'values'"""
for value in values: # writing 1 byte at a time
self.set_memory(address, value)
address = address + 1
def | (self, address, size):
"""Return an arbitrarily sized region of the emulated memory space"""
mem = []
for i in range(0, size):
mem.append(self.read_memory(address+i))
return mem
def get_register(self, name):
"""Return the value of the given register (valid values: A, X, Y, S, P, PC)"""
return self.regs[name].get_value()
def set_pc(self, value):
"""Set the Program Counter of the emulated CPU to the given 16-bit value."""
self.regs['PC'].set_value(value & 0xFFFF)
return value & 0xFFFF
def set_register(self, name, value):
"""Set the given 8-bit register (A, X, Y, S, or P registers only)"""
if name is 'P':
value = value | (1 << 5)
self.regs[name].set_value(value & 0xFF)
return value & 0xFF
def push_byte(self, value):
"""Push the given byte value onto the emulated CPU stack."""
reg_s = self.get_register('S')
self.set_memory(reg_s + self.stack_base, value)
self.set_register('S', reg_s - 1)
return reg_s + self.stack_base - 1
def push_word(self, value):
"""Push the given 16-bit word value onto the emulated CPU stack."""
self.push_byte((value & 0xFF00) >> 8)
return self.push_byte(value & 0xFF)
def pop_byte(self):
"""Return a byte value popped from the emulated CPU stack."""
reg_s = self.get_register('S') + 1
value = self.read_memory(reg_s +self.stack_base)
self.set_register('S', reg_s)
return value
def pop_word(self):
"""Return a 16-bit word value popped from the emulated CPU stack."""
return self.pop_byte() + (self.pop_byte() << 8)
def set_flag(self, flag_name, value):
"""Set the current emulated 8-bit status (flags) register to the given value"""
flags = {'C':0, # Carry
'Z':1, # Zero
'I':2, # Interrupt mask
'D':3, # Decimal
'B':4, # Break
'V':6, # Overflow
'N':7} # Negative
flag_reg = self.get_register('P')
if value == 1:
new_flag = flag_reg | 1 << flags[flag_name]
else:
new_flag = flag_reg & ~(1 << flags[flag_name])
self.set_register('P', new_flag)
def create_overflow_condition(self, old_dst, old_src, new_val, sub_op):
"""Return boolean value whether operation creates an overflow condition"""
if not sub_op:
of_cond = (old_dst^old_src)&0x80 == 0 and (old_dst^new_val) & 0x80 != 0
else:
of_cond = (old_dst^old_src)&0x80 != 0 and (old_dst^new_val) & 0x80 != 0
return of_cond
@staticmethod
def create_carry_condition(new_val, sub_op):
"""Return boolean value whether operation creates a carry condition"""
if not sub_op:
carry_cond = new_val > 0xFF
else:
carry_cond = new_val >= 0
return carry_cond
def ctrl_update_flags(self, flags, old_dst, old_src, new_val, | get_memory | identifier_name |
MOS6502.py | self.memory = Memory()
self.past_memory = []
#self.symbMemory = z3Array('mem', z3.BitVecSort(bitwidth), z3.BitVecSort(8))
self.regs['PC'].set_value(0)
self.regs['S'].set_value(0)
self.cycle = 0
self.global_cycle = 0
self.nmi_flipflop = 0
self.stack_base = 0x0100
self.ppu_mem = 0x2000
self.apu_mem = 0x4000
self.spr_dma = 0x4014
self.channels = 0x4015
self.ctrl1 = 0x4016
self.ctrl2 = 0x4017
self.nmi_vector = 0xFFFA
self.reset_vector = 0xFFFC
self.irq_brk_vector = 0xFFFE
self.ctrl_a = 7
self.ctrl_b = 6
self.ctrl_select = 5
self.ctrl_start = 4
self.ctrl_up = 3
self.ctrl_down = 2
self.ctrl_left = 1
self.ctrl_right = 0
self.rom = None
self.ppu = None
self.controllers = None
self.paused = False
self.pause_reason = None
self.last_four = [0x00] * 4
self.bwrites = []
self.breads = []
def clear_memory(self):
"""Clear (zero out) emulated memory"""
self.memory.ClearMemory()
def reset(self):
"""Reset CPU. Note: reset state is not the same as the initial power-on state"""
# https://wiki.nesdev.com/w/index.php/CPU_power_up_state
ctrl_start = self.read_mem_word(self.reset_vector)
self.set_pc(ctrl_start)
self.set_register('S', 0xFD)
self.set_register('P', 0x24) # MMM: documentation says P = P | 0x04 ?
self.clear_memory() # MMM: documentation says memory is unchanged
self.set_memory(0x01FE, 0xFF)
self.set_memory(0x01FF, 0xFF)
# https://wiki.nesdev.com/w/index.php/PPU_power_up_state
self.ppu = PPU(self) # MMM: PPU object should probably have its own reset() method
self.controllers = Controllers(self)
def map_mem(self, address):
"""Map program memory from the NES ROM into the CPU's memory space"""
return self.rom.mapMem(self, address)
def map_vmem(self, address):
"""Map VMem from the NES ROM into the CPU's memory space"""
return self.rom.mapVMem(self, address)
def load_rom(self, rom_path):
"""Load an NES cartridge ROM from the given .nes file path"""
self.rom = Rom(rom_path, self)
return self.rom
def read_memory(self, address):
"""Return a read of 1 byte of main memory from the given address"""
address = address & 0xFFFF
if address in self.breads:
self.paused = True
self.pause_reason = 'Read at ' + hex(address)
return self.memory.ReadMemory(self, address)
def read_vmemory(self, address):
"""Return a read of 1 byte of video memory from the given address"""
# MMM: isn't VRAM a property of the PPU?
return self.rom.ReadVMemory(self, address)
def read_mem_word(self, address):
"""Return a read of one 16-bit word of memory from the given address"""
value = self.read_memory(address)
value += self.read_memory(address + 1) << 8
return value
def read_mem_word_bug(self, addr):
addr = addr % 0xFFFF
hi_addr = (addr & 0xFF00) | ((addr + 1) & 0xFF)
lo_byte = self.read_memory(addr)
hi_byte = self.read_memory(hi_addr)
return ((hi_byte << 8) | lo_byte)
def read_rel_pc(self, offset):
"""Return 1 byte of memory read using PC-relative addressing"""
return self.read_memory(self.get_register('PC')+offset) & 0xFF
def set_memory(self, address, value):
"""Write the given 1 byte value to the given address in NES memory"""
#self.memory[address] = value & 0xFF
if address in self.bwrites:
self.paused = True
self.pause_reason = 'Write at ' + hex(address)
return self.memory.SetMemory(self, address, value)
def init_memory(self, address, values):
"""Initialize a region of emulated memory to the bytes specified in 'values'"""
for value in values: # writing 1 byte at a time
self.set_memory(address, value)
address = address + 1
def get_memory(self, address, size):
"""Return an arbitrarily sized region of the emulated memory space"""
mem = []
for i in range(0, size):
mem.append(self.read_memory(address+i))
return mem
def get_register(self, name):
"""Return the value of the given register (valid values: A, X, Y, S, P, PC)"""
return self.regs[name].get_value()
def set_pc(self, value):
"""Set the Program Counter of the emulated CPU to the given 16-bit value."""
self.regs['PC'].set_value(value & 0xFFFF)
return value & 0xFFFF
def set_register(self, name, value):
"""Set the given 8-bit register (A, X, Y, S, or P registers only)"""
if name is 'P':
value = value | (1 << 5)
self.regs[name].set_value(value & 0xFF)
return value & 0xFF
def push_byte(self, value):
"""Push the given byte value onto the emulated CPU stack."""
reg_s = self.get_register('S')
self.set_memory(reg_s + self.stack_base, value)
self.set_register('S', reg_s - 1)
return reg_s + self.stack_base - 1
def push_word(self, value):
"""Push the given 16-bit word value onto the emulated CPU stack."""
self.push_byte((value & 0xFF00) >> 8)
return self.push_byte(value & 0xFF)
def pop_byte(self):
"""Return a byte value popped from the emulated CPU stack."""
reg_s = self.get_register('S') + 1
value = self.read_memory(reg_s +self.stack_base)
self.set_register('S', reg_s)
return value
def pop_word(self):
"""Return a 16-bit word value popped from the emulated CPU stack."""
return self.pop_byte() + (self.pop_byte() << 8)
def set_flag(self, flag_name, value):
"""Set the current emulated 8-bit status (flags) register to the given value"""
flags = {'C':0, # Carry
'Z':1, # Zero
'I':2, # Interrupt mask
'D':3, # Decimal
'B':4, # Break
'V':6, # Overflow
'N':7} # Negative
flag_reg = self.get_register('P')
if value == 1:
new_flag = flag_reg | 1 << flags[flag_name]
else:
new_flag = flag_reg & ~(1 << flags[flag_name])
self.set_register('P', new_flag)
def create_overflow_condition(self, old_dst, old_src, new_val, sub_op):
"""Return boolean value whether operation creates an overflow condition"""
if not sub_op:
of_cond = (old_dst^old_src)&0x80 == 0 and (old_dst^new_val) & 0x80 != 0
else:
of_cond = (old_dst^old_src)&0x80 != 0 and (old_dst^new_val) & 0x80 != 0
return of_cond
@staticmethod
def create_carry_condition(new_val, sub_op):
"""Return boolean value whether operation creates a carry condition"""
if not sub_op:
carry_cond = new_val > 0xFF
else:
carry_cond = new_val >= 0
return carry_cond
def ctrl_update_flags(self, flags, old_dst, old_src, new_val, sub_op):
"""Update, as needed, the C or V bits in the emulated flags register"""
of_cond = self.create_overflow_condition(old_dst, old_src, new_val, sub_op)
cf_cond = self.create_carry_condition(new_val, sub_op)
valid_flags = {'C': cf_cond is True,
'Z': new_val & 0xFF == 0,
'V': of_cond is True,
'N': ((new_val & 0x80) != 0)}
for flag in flags:
| self.set_flag(flag, valid_flags[flag]) | conditional_block | |
icip_train_val.py | fasttext": "/home/wangkai/ICIP/feature/train/FastText_tags+des_20337.csv",
"tfidf": "/home/wangkai/ICIP/feature/train/Tfidf_tags+des_20337.csv",
"lsa": "/home/wangkai/ICIP/feature/train/LSA_tags+title+des_20337.csv",
"lda": "/home/wangkai/ICIP/feature/train/LDA_tags+title+des_20337.csv",
"wordchar": "/home/wangkai/ICIP/feature/train/wordchar_tags+title+des_20337.csv",
"userid": "/home/wangkai/ICIP/feature/train/UserId256_20337.csv",
"image": "/home/wangkai/ICIP/feature/train/ResNext101_image_20337.csv"
}
test_feature_filepath = {
"original": "/home/wangkai/ICIP/feature/test/test_feature_7693.csv",
"fasttext": "/home/wangkai/ICIP/feature/test/FastText_tags+des_7693.csv",
"tfidf": "/home/wangkai/ICIP/feature/test/Tfidf_tags+des_7693.csv",
"lsa": "/home/wangkai/ICIP/feature/test/LSA_tags+title+des_7693.csv",
"lda": "/home/wangkai/ICIP/feature/test/LDA_tags+title+des_7693.csv",
"wordchar": "/home/wangkai/ICIP/feature/test/wordchar_tags+title+des_7693.csv",
"userid": "/home/wangkai/ICIP/feature/test/UserId256_7693.csv",
"image": "/home/wangkai/ICIP/feature/test/ResNext101_image_7693.csv"
}
def clutser(num_class=num_class):
df_popularity = pd.read_csv(all_popularity_filepath)
# 归一化
normalized_popularity = df_popularity.iloc[:, 1:].div(
df_popularity["Day30"], axis=0)
# 聚类的label
kmeans = KMeans(n_clusters=num_class, init="k-means++", n_init=100, max_iter=10000,
random_state=random_seed, n_jobs=-1, algorithm="auto").fit(normalized_popularity)
df_label = pd.DataFrame(
{"FlickrId": df_popularity["FlickrId"], "label": kmeans.labels_})
df_label.to_csv(cluser_label_filepath, index=False)
# 聚类中心
df_cluster_center = pd.DataFrame(kmeans.cluster_centers_)
df_cluster_center.columns = ["day"+str(i+1) for i in range(30)]
df_cluster_center.insert(0, column="label", value=np.arange(num_class))
df_cluster_center.to_csv(cluster_center_filepath, index=False)
def load_feature(feature_list, flag="train"):
feature_path = train_feature_filepath if flag == "train" else test_feature_filepath
for i, feature_name in enumerate(feature_list):
print("Loading {} .. | columns
all_feature.drop(useless, axis=1, inplace=True)
print(all_feature)
return all_feature
def calssify_catboost(train, validate):
cat_features = ["UserId"]
# cat_features=[]
train_data = catboost.Pool(
train.iloc[:, 1:-31], train["label"], cat_features=cat_features)
validata_data = catboost.Pool(
validate.iloc[:, 1:-31], validate["label"], cat_features=cat_features)
model = catboost.CatBoostClassifier(iterations=10000, learning_rate=0.01, depth=6, objective="MultiClass", classes_count=num_class, eval_metric="Accuracy", l2_leaf_reg=3.0,
min_data_in_leaf=1, boosting_type="Plain", use_best_model=False, thread_count=-1, task_type="GPU", devices="0", random_state=random_seed, verbose=300, early_stopping_rounds=1000)
model = model.fit(train_data, eval_set=validata_data, plot=False)
# predict label
preds = model.predict(validata_data)
preds = preds.flatten()
print("\nValidate\nACC: {}\tTotal right: {}".format(
np.sum(preds == validate["label"])/len(preds), np.sum(preds == validate["label"])))
# feature importance
df_important = pd.DataFrame(
{"feature_name": model.feature_names_, "importance": model.feature_importances_})
df_important = df_important.sort_values(by=["importance"], ascending=False)
print(df_important)
df_predict_label = pd.DataFrame(
{"FlickrId": validate["FlickrId"], "preds_label": preds})
return model, df_predict_label
def regression_catboost(train, validate):
cat_features = ["UserId"]
# cat_features=[]
p_train, p_validate = np.log(
train["Day30"]/4+1), np.log(validate["Day30"]/4+1)
# p_train,p_validate=train["Day30"],validate["Day30"]
train_data = catboost.Pool(
train.iloc[:, 1:-31], p_train, cat_features=cat_features)
validata_data = catboost.Pool(
validate.iloc[:, 1:-31], p_validate, cat_features=cat_features)
model = catboost.CatBoostRegressor(iterations=35000, learning_rate=0.003, depth=6, objective="MAPE", eval_metric="MAPE", custom_metric=["RMSE", "MAE", "MAPE"], l2_leaf_reg=3.0, min_data_in_leaf=1, boosting_type="Plain", use_best_model=True, thread_count=-1, task_type="GPU", devices="0", random_state=random_seed, verbose=300, early_stopping_rounds=1000, fold_permutation_block=1, bagging_temperature=0)
# model=catboost.CatBoostRegressor(iterations=100000, learning_rate=0.1, depth=6, objective="RMSE", eval_metric="RMSE",custom_metric=["RMSE","MAE","MAPE"], l2_leaf_reg=3.0, min_data_in_leaf=1, boosting_type="Plain", use_best_model=True, thread_count=-1, task_type="CPU",devices="0", random_state=random_seed, verbose=300, early_stopping_rounds=500)
model.fit(train_data, eval_set=validata_data, plot=False)
preds_p_validate = model.predict(validata_data)
preds_day30 = (np.exp(preds_p_validate)-1)*4
src, _ = spearmanr(validate["Day30"], preds_day30)
df_important = pd.DataFrame(
{"feature_name": model.feature_names_, "importance": model.feature_importances_})
df_important = df_important.sort_values(by=["importance"], ascending=False)
print(df_important)
df_predict_day30 = pd.DataFrame(
{"FlickrId": validate["FlickrId"], "Day30": validate["Day30"], "preds_day30": preds_day30})
return model, df_predict_day30
def train(classify_feature_list, regression_feature_list):
df_label = pd.read_csv(cluser_label_filepath)
df_train_popularity = pd.read_csv(train_popularity_filepath)
df_validate_popularity = pd.read_csv(validate_popularity_filepath)
train_label = pd.merge(df_label, df_train_popularity,
on="FlickrId", how="inner")
validate_label = pd.merge(
df_label, df_validate_popularity, on="FlickrId", how="inner")
# Classify
classify_feature = load_feature(classify_feature_list, flag="train")
train = pd.merge(classify_feature, train_label, on="FlickrId", how="inner")
validate = pd.merge(classify_feature, validate_label,
on="FlickrId", how="inner")
classify_model, df_predict_label = calssify_catboost(train, validate)
df_predict_label.to_csv(
"/home/wangkai/ICIP/predict_label.csv", index=False)
df_predict_label = pd.read_csv("/home/wangkai/ICIP/predict_label.csv")
regression_feature = load_feature(regression_feature_list, flag="train")
train = pd.merge(regression_feature, train_label,
on="FlickrId", how="inner")
validate = pd.merge(regression_feature, validate_label,
on="FlickrId", how="inner")
regression_model, df_predict_day30 = regression_catboost(train, validate)
df_predict_day30 | .".format(feature_name))
feature = pd.read_csv(feature_path[feature_name])
print("feature: {}, len:{}".format(
feature_name, len(feature.columns)-1))
if i == 0:
all_feature = feature
else:
all_feature = pd.merge(all_feature, feature)
useless = text_ | conditional_block |
icip_train_val.py | from sklearn.metrics import mean_absolute_error, mean_squared_error
from sklearn.model_selection import train_test_split
random_seed = 2020
num_class = 50
os.environ["CUDA_VISIBLE_DEVICES"] = "2"
all_popularity_filepath = "/home/wangkai/ICIP/feature/label/popularity_TRAIN_20337.csv"
cluster_center_filepath = "/home/wangkai/ICIP/feature/label/cluster_center.csv"
cluser_label_filepath = "/home/wangkai/ICIP/feature/label/cluster_label_20337.csv"
# random
train_popularity_filepath = "/home/wangkai/ICIP/feature/label/train_label_random.csv"
validate_popularity_filepath = "/home/wangkai/ICIP/feature/label/validate_label_random.csv"
# # postdate
train_popularity_filepath="/home/wangkai/ICIP/feature/label/train_label_datetaken.csv"
validate_popularity_filepath="/home/wangkai/ICIP/feature/label/validate_label_datetaken.csv"
number_columns = ["PhotoCount", "MeanViews", "Contacts", "GroupsCount", "NumSets", "GroupsAvgPictures",
"GroupsAvgMembers", "Ispro", "HasStats", "AvgGroupsMemb", "AvgGroupPhotos", "NumGroups"] # 12
text_columns = ["Tags", "Title", "Description"] # 3
first_columns = ["FlickrId", "UserId"] # 2
train_feature_filepath = {
"original": "/home/wangkai/ICIP/feature/train/train_feature_20337.csv",
"fasttext": "/home/wangkai/ICIP/feature/train/FastText_tags+des_20337.csv",
"tfidf": "/home/wangkai/ICIP/feature/train/Tfidf_tags+des_20337.csv",
"lsa": "/home/wangkai/ICIP/feature/train/LSA_tags+title+des_20337.csv",
"lda": "/home/wangkai/ICIP/feature/train/LDA_tags+title+des_20337.csv",
"wordchar": "/home/wangkai/ICIP/feature/train/wordchar_tags+title+des_20337.csv",
"userid": "/home/wangkai/ICIP/feature/train/UserId256_20337.csv",
"image": "/home/wangkai/ICIP/feature/train/ResNext101_image_20337.csv"
}
test_feature_filepath = {
"original": "/home/wangkai/ICIP/feature/test/test_feature_7693.csv",
"fasttext": "/home/wangkai/ICIP/feature/test/FastText_tags+des_7693.csv",
"tfidf": "/home/wangkai/ICIP/feature/test/Tfidf_tags+des_7693.csv",
"lsa": "/home/wangkai/ICIP/feature/test/LSA_tags+title+des_7693.csv",
"lda": "/home/wangkai/ICIP/feature/test/LDA_tags+title+des_7693.csv",
"wordchar": "/home/wangkai/ICIP/feature/test/wordchar_tags+title+des_7693.csv",
"userid": "/home/wangkai/ICIP/feature/test/UserId256_7693.csv",
"image": "/home/wangkai/ICIP/feature/test/ResNext101_image_7693.csv"
}
def clutser(num_class=num_class):
df_popularity = pd.read_csv(all_popularity_filepath)
# 归一化
normalized_popularity = df_popularity.iloc[:, 1:].div(
df_popularity["Day30"], axis=0)
# 聚类的label
kmeans = KMeans(n_clusters=num_class, init="k-means++", n_init=100, max_iter=10000,
random_state=random_seed, n_jobs=-1, algorithm="auto").fit(normalized_popularity)
df_label = pd.DataFrame(
{"FlickrId": df_popularity["FlickrId"], "label": kmeans.labels_})
df_label.to_csv(cluser_label_filepath, index=False)
# 聚类中心
df_cluster_center = pd.DataFrame(kmeans.cluster_centers_)
df_cluster_center.columns = ["day"+str(i+1) for i in range(30)]
df_cluster_center.insert(0, column="label", value=np.arange(num_class))
df_cluster_center.to_csv(cluster_center_filepath, index=False)
def load_feature(feature_list, flag="train"):
feature_path = train_feature_filepath if flag == "train" else test_feature_filepath
for i, feature_name in enumerate(feature_list):
print("Loading {} ...".format(feature_name))
feature = pd.read_csv(feature_path[feature_name])
print("feature: {}, len:{}".format(
feature_name, len(feature.columns)-1))
if i == 0:
all_feature = feature
else:
all_feature = pd.merge(all_feature, feature)
useless = text_columns
all_feature.drop(useless, axis=1, inplace=True)
print(all_feature)
return all_feature
def calssify_catboost(train, validate):
cat_features = ["UserId"]
# cat_features=[]
train_data = catboost.Pool(
train.iloc[:, 1:-31], train["label"], cat_features=cat_features)
validata_data = catboost.Pool(
validate.iloc[:, 1:-31], validate["label"], cat_features=cat_features)
model = catboost.CatBoostClassifier(iterations=10000, learning_rate=0.01, depth=6, objective="MultiClass", classes_count=num_class, eval_metric="Accuracy", l2_leaf_reg=3.0,
min_data_in_leaf=1, boosting_type="Plain", use_best_model=False, thread_count=-1, task_type="GPU", devices="0", random_state=random_seed, verbose=300, early_stopping_rounds=1000)
model = model.fit(train_data, eval_set=validata_data, plot=False)
# predict label
preds = model.predict(validata_data)
preds = preds.flatten()
print("\nValidate\nACC: {}\tTotal right: {}".format(
np.sum(preds == validate["label"])/len(preds), np.sum(preds == validate["label"])))
# feature importance
df_important = pd.DataFrame(
{"feature_name": model.feature_names_, "importance": model.feature_importances_})
df_important = df_important.sort_values(by=["importance"], ascending=False)
print(df_important)
df_predict_label = pd.DataFrame(
{"FlickrId": validate["FlickrId"], "preds_label": preds})
return model, df_predict_label
def regression_catboost(train, validate):
cat_features = ["UserId"]
# cat_features=[]
p_train, p_validate = np.log(
train["Day30"]/4+1), np.log(validate["Day30"]/4+1)
# p_train,p_validate=train["Day30"],validate["Day30"]
train_data = catboost.Pool(
train.iloc[:, 1:-31], p_train, cat_features=cat_features)
validata_data = catboost.Pool(
validate.iloc[:, 1:-31], p_validate, cat_features=cat_features)
model = catboost.CatBoostRegressor(iterations=35000, learning_rate=0.003, depth=6, objective="MAPE", eval_metric="MAPE", custom_metric=["RMSE", "MAE", "MAPE"], l2_leaf_reg=3.0, min_data_in_leaf=1, boosting_type="Plain", use_best_model=True, thread_count=-1, task_type="GPU", devices="0", random_state=random_seed, verbose=300, early_stopping_rounds=1000, fold_permutation_block=1, bagging_temperature=0)
# model=catboost.CatBoostRegressor(iterations=100000, learning_rate=0.1, depth=6, objective="RMSE", eval_metric="RMSE",custom_metric=["RMSE","MAE","MAPE"], l2_leaf_reg=3.0, min_data_in_leaf=1, boosting_type="Plain", use_best_model=True, thread_count=-1, task_type="CPU",devices="0", random_state=random_seed, verbose=300, early_stopping_rounds=500)
model.fit(train_data, eval_set=validata_data, plot=False)
preds_p_validate = model.predict(validata_data)
preds_day30 = (np.exp(preds_p_validate)-1)*4
src, _ = spearmanr(validate["Day30"], preds_day30)
df_important = pd.DataFrame(
{"feature_name": model.feature_names_, "importance": model.feature_importances_})
df_important = df_important.sort_values(by=["importance"], ascending | from scipy import stats
from scipy.stats import spearmanr
from sklearn.cluster import KMeans | random_line_split | |
icip_train_val.py | fasttext": "/home/wangkai/ICIP/feature/train/FastText_tags+des_20337.csv",
"tfidf": "/home/wangkai/ICIP/feature/train/Tfidf_tags+des_20337.csv",
"lsa": "/home/wangkai/ICIP/feature/train/LSA_tags+title+des_20337.csv",
"lda": "/home/wangkai/ICIP/feature/train/LDA_tags+title+des_20337.csv",
"wordchar": "/home/wangkai/ICIP/feature/train/wordchar_tags+title+des_20337.csv",
"userid": "/home/wangkai/ICIP/feature/train/UserId256_20337.csv",
"image": "/home/wangkai/ICIP/feature/train/ResNext101_image_20337.csv"
}
test_feature_filepath = {
"original": "/home/wangkai/ICIP/feature/test/test_feature_7693.csv",
"fasttext": "/home/wangkai/ICIP/feature/test/FastText_tags+des_7693.csv",
"tfidf": "/home/wangkai/ICIP/feature/test/Tfidf_tags+des_7693.csv",
"lsa": "/home/wangkai/ICIP/feature/test/LSA_tags+title+des_7693.csv",
"lda": "/home/wangkai/ICIP/feature/test/LDA_tags+title+des_7693.csv",
"wordchar": "/home/wangkai/ICIP/feature/test/wordchar_tags+title+des_7693.csv",
"userid": "/home/wangkai/ICIP/feature/test/UserId256_7693.csv",
"image": "/home/wangkai/ICIP/feature/test/ResNext101_image_7693.csv"
}
def clutser(num_class=num_class):
| feature_list, flag="train"):
feature_path = train_feature_filepath if flag == "train" else test_feature_filepath
for i, feature_name in enumerate(feature_list):
print("Loading {} ...".format(feature_name))
feature = pd.read_csv(feature_path[feature_name])
print("feature: {}, len:{}".format(
feature_name, len(feature.columns)-1))
if i == 0:
all_feature = feature
else:
all_feature = pd.merge(all_feature, feature)
useless = text_columns
all_feature.drop(useless, axis=1, inplace=True)
print(all_feature)
return all_feature
def calssify_catboost(train, validate):
cat_features = ["UserId"]
# cat_features=[]
train_data = catboost.Pool(
train.iloc[:, 1:-31], train["label"], cat_features=cat_features)
validata_data = catboost.Pool(
validate.iloc[:, 1:-31], validate["label"], cat_features=cat_features)
model = catboost.CatBoostClassifier(iterations=10000, learning_rate=0.01, depth=6, objective="MultiClass", classes_count=num_class, eval_metric="Accuracy", l2_leaf_reg=3.0,
min_data_in_leaf=1, boosting_type="Plain", use_best_model=False, thread_count=-1, task_type="GPU", devices="0", random_state=random_seed, verbose=300, early_stopping_rounds=1000)
model = model.fit(train_data, eval_set=validata_data, plot=False)
# predict label
preds = model.predict(validata_data)
preds = preds.flatten()
print("\nValidate\nACC: {}\tTotal right: {}".format(
np.sum(preds == validate["label"])/len(preds), np.sum(preds == validate["label"])))
# feature importance
df_important = pd.DataFrame(
{"feature_name": model.feature_names_, "importance": model.feature_importances_})
df_important = df_important.sort_values(by=["importance"], ascending=False)
print(df_important)
df_predict_label = pd.DataFrame(
{"FlickrId": validate["FlickrId"], "preds_label": preds})
return model, df_predict_label
def regression_catboost(train, validate):
cat_features = ["UserId"]
# cat_features=[]
p_train, p_validate = np.log(
train["Day30"]/4+1), np.log(validate["Day30"]/4+1)
# p_train,p_validate=train["Day30"],validate["Day30"]
train_data = catboost.Pool(
train.iloc[:, 1:-31], p_train, cat_features=cat_features)
validata_data = catboost.Pool(
validate.iloc[:, 1:-31], p_validate, cat_features=cat_features)
model = catboost.CatBoostRegressor(iterations=35000, learning_rate=0.003, depth=6, objective="MAPE", eval_metric="MAPE", custom_metric=["RMSE", "MAE", "MAPE"], l2_leaf_reg=3.0, min_data_in_leaf=1, boosting_type="Plain", use_best_model=True, thread_count=-1, task_type="GPU", devices="0", random_state=random_seed, verbose=300, early_stopping_rounds=1000, fold_permutation_block=1, bagging_temperature=0)
# model=catboost.CatBoostRegressor(iterations=100000, learning_rate=0.1, depth=6, objective="RMSE", eval_metric="RMSE",custom_metric=["RMSE","MAE","MAPE"], l2_leaf_reg=3.0, min_data_in_leaf=1, boosting_type="Plain", use_best_model=True, thread_count=-1, task_type="CPU",devices="0", random_state=random_seed, verbose=300, early_stopping_rounds=500)
model.fit(train_data, eval_set=validata_data, plot=False)
preds_p_validate = model.predict(validata_data)
preds_day30 = (np.exp(preds_p_validate)-1)*4
src, _ = spearmanr(validate["Day30"], preds_day30)
df_important = pd.DataFrame(
{"feature_name": model.feature_names_, "importance": model.feature_importances_})
df_important = df_important.sort_values(by=["importance"], ascending=False)
print(df_important)
df_predict_day30 = pd.DataFrame(
{"FlickrId": validate["FlickrId"], "Day30": validate["Day30"], "preds_day30": preds_day30})
return model, df_predict_day30
def train(classify_feature_list, regression_feature_list):
df_label = pd.read_csv(cluser_label_filepath)
df_train_popularity = pd.read_csv(train_popularity_filepath)
df_validate_popularity = pd.read_csv(validate_popularity_filepath)
train_label = pd.merge(df_label, df_train_popularity,
on="FlickrId", how="inner")
validate_label = pd.merge(
df_label, df_validate_popularity, on="FlickrId", how="inner")
# Classify
classify_feature = load_feature(classify_feature_list, flag="train")
train = pd.merge(classify_feature, train_label, on="FlickrId", how="inner")
validate = pd.merge(classify_feature, validate_label,
on="FlickrId", how="inner")
classify_model, df_predict_label = calssify_catboost(train, validate)
df_predict_label.to_csv(
"/home/wangkai/ICIP/predict_label.csv", index=False)
df_predict_label = pd.read_csv("/home/wangkai/ICIP/predict_label.csv")
regression_feature = load_feature(regression_feature_list, flag="train")
train = pd.merge(regression_feature, train_label,
on="FlickrId", how="inner")
validate = pd.merge(regression_feature, validate_label,
on="FlickrId", how="inner")
regression_model, df_predict_day30 = regression_catboost(train, validate)
df_predict_day30 | df_popularity = pd.read_csv(all_popularity_filepath)
# 归一化
normalized_popularity = df_popularity.iloc[:, 1:].div(
df_popularity["Day30"], axis=0)
# 聚类的label
kmeans = KMeans(n_clusters=num_class, init="k-means++", n_init=100, max_iter=10000,
random_state=random_seed, n_jobs=-1, algorithm="auto").fit(normalized_popularity)
df_label = pd.DataFrame(
{"FlickrId": df_popularity["FlickrId"], "label": kmeans.labels_})
df_label.to_csv(cluser_label_filepath, index=False)
# 聚类中心
df_cluster_center = pd.DataFrame(kmeans.cluster_centers_)
df_cluster_center.columns = ["day"+str(i+1) for i in range(30)]
df_cluster_center.insert(0, column="label", value=np.arange(num_class))
df_cluster_center.to_csv(cluster_center_filepath, index=False)
def load_feature( | identifier_body |
icip_train_val.py | fasttext": "/home/wangkai/ICIP/feature/train/FastText_tags+des_20337.csv",
"tfidf": "/home/wangkai/ICIP/feature/train/Tfidf_tags+des_20337.csv",
"lsa": "/home/wangkai/ICIP/feature/train/LSA_tags+title+des_20337.csv",
"lda": "/home/wangkai/ICIP/feature/train/LDA_tags+title+des_20337.csv",
"wordchar": "/home/wangkai/ICIP/feature/train/wordchar_tags+title+des_20337.csv",
"userid": "/home/wangkai/ICIP/feature/train/UserId256_20337.csv",
"image": "/home/wangkai/ICIP/feature/train/ResNext101_image_20337.csv"
}
test_feature_filepath = {
"original": "/home/wangkai/ICIP/feature/test/test_feature_7693.csv",
"fasttext": "/home/wangkai/ICIP/feature/test/FastText_tags+des_7693.csv",
"tfidf": "/home/wangkai/ICIP/feature/test/Tfidf_tags+des_7693.csv",
"lsa": "/home/wangkai/ICIP/feature/test/LSA_tags+title+des_7693.csv",
"lda": "/home/wangkai/ICIP/feature/test/LDA_tags+title+des_7693.csv",
"wordchar": "/home/wangkai/ICIP/feature/test/wordchar_tags+title+des_7693.csv",
"userid": "/home/wangkai/ICIP/feature/test/UserId256_7693.csv",
"image": "/home/wangkai/ICIP/feature/test/ResNext101_image_7693.csv"
}
def clutser(num_class=num_class):
df_popularity = pd.read_csv(all_popularity_filepath)
# 归一化
normalized_popularity = df_popularity.iloc[:, 1:].div(
df_popularity["Day30"], axis=0)
# 聚类的label
kmeans = KMeans(n_clusters=num_class, init="k-means++", n_init=100, max_iter=10000,
random_state=random_seed, n_jobs=-1, algorithm="auto").fit(normalized_popularity)
df_label = pd.DataFrame(
{"FlickrId": df_popularity["FlickrId"], "label": kmeans.labels_})
df_label.to_csv(cluser_label_filepath, index=False)
# 聚类中心
df_cluster_center = pd.DataFrame(kmeans.cluster_centers_)
df_cluster_center.columns = ["day"+str(i+1) for i in range(30)]
df_cluster_center.insert(0, column="label", value=np.arange(num_class))
df_cluster_center.to_csv(cluster_center_filepath, index=False)
def load_feature(feature_list, flag="train"):
feature_path = train_feature_filepath if flag == "train" else test_feature_filepath
for i, feature_name in enumerate(feature_list):
print("Loading {} ...".format(feature_name))
feature = pd.read_csv(feature_path[feature_name])
print("feature: {}, len:{}".format(
feature_name, len(feature.columns)-1))
if i == 0:
all_feature = feature
else:
all_feature = pd.merge(all_feature, feature)
useless = text_columns
all_feature.drop(useless, axis=1, inplace=True)
print(all_feature)
return all_feature
def calssify_catboost(tr | cat_features = ["UserId"]
# cat_features=[]
train_data = catboost.Pool(
train.iloc[:, 1:-31], train["label"], cat_features=cat_features)
validata_data = catboost.Pool(
validate.iloc[:, 1:-31], validate["label"], cat_features=cat_features)
model = catboost.CatBoostClassifier(iterations=10000, learning_rate=0.01, depth=6, objective="MultiClass", classes_count=num_class, eval_metric="Accuracy", l2_leaf_reg=3.0,
min_data_in_leaf=1, boosting_type="Plain", use_best_model=False, thread_count=-1, task_type="GPU", devices="0", random_state=random_seed, verbose=300, early_stopping_rounds=1000)
model = model.fit(train_data, eval_set=validata_data, plot=False)
# predict label
preds = model.predict(validata_data)
preds = preds.flatten()
print("\nValidate\nACC: {}\tTotal right: {}".format(
np.sum(preds == validate["label"])/len(preds), np.sum(preds == validate["label"])))
# feature importance
df_important = pd.DataFrame(
{"feature_name": model.feature_names_, "importance": model.feature_importances_})
df_important = df_important.sort_values(by=["importance"], ascending=False)
print(df_important)
df_predict_label = pd.DataFrame(
{"FlickrId": validate["FlickrId"], "preds_label": preds})
return model, df_predict_label
def regression_catboost(train, validate):
cat_features = ["UserId"]
# cat_features=[]
p_train, p_validate = np.log(
train["Day30"]/4+1), np.log(validate["Day30"]/4+1)
# p_train,p_validate=train["Day30"],validate["Day30"]
train_data = catboost.Pool(
train.iloc[:, 1:-31], p_train, cat_features=cat_features)
validata_data = catboost.Pool(
validate.iloc[:, 1:-31], p_validate, cat_features=cat_features)
model = catboost.CatBoostRegressor(iterations=35000, learning_rate=0.003, depth=6, objective="MAPE", eval_metric="MAPE", custom_metric=["RMSE", "MAE", "MAPE"], l2_leaf_reg=3.0, min_data_in_leaf=1, boosting_type="Plain", use_best_model=True, thread_count=-1, task_type="GPU", devices="0", random_state=random_seed, verbose=300, early_stopping_rounds=1000, fold_permutation_block=1, bagging_temperature=0)
# model=catboost.CatBoostRegressor(iterations=100000, learning_rate=0.1, depth=6, objective="RMSE", eval_metric="RMSE",custom_metric=["RMSE","MAE","MAPE"], l2_leaf_reg=3.0, min_data_in_leaf=1, boosting_type="Plain", use_best_model=True, thread_count=-1, task_type="CPU",devices="0", random_state=random_seed, verbose=300, early_stopping_rounds=500)
model.fit(train_data, eval_set=validata_data, plot=False)
preds_p_validate = model.predict(validata_data)
preds_day30 = (np.exp(preds_p_validate)-1)*4
src, _ = spearmanr(validate["Day30"], preds_day30)
df_important = pd.DataFrame(
{"feature_name": model.feature_names_, "importance": model.feature_importances_})
df_important = df_important.sort_values(by=["importance"], ascending=False)
print(df_important)
df_predict_day30 = pd.DataFrame(
{"FlickrId": validate["FlickrId"], "Day30": validate["Day30"], "preds_day30": preds_day30})
return model, df_predict_day30
def train(classify_feature_list, regression_feature_list):
df_label = pd.read_csv(cluser_label_filepath)
df_train_popularity = pd.read_csv(train_popularity_filepath)
df_validate_popularity = pd.read_csv(validate_popularity_filepath)
train_label = pd.merge(df_label, df_train_popularity,
on="FlickrId", how="inner")
validate_label = pd.merge(
df_label, df_validate_popularity, on="FlickrId", how="inner")
# Classify
classify_feature = load_feature(classify_feature_list, flag="train")
train = pd.merge(classify_feature, train_label, on="FlickrId", how="inner")
validate = pd.merge(classify_feature, validate_label,
on="FlickrId", how="inner")
classify_model, df_predict_label = calssify_catboost(train, validate)
df_predict_label.to_csv(
"/home/wangkai/ICIP/predict_label.csv", index=False)
df_predict_label = pd.read_csv("/home/wangkai/ICIP/predict_label.csv")
regression_feature = load_feature(regression_feature_list, flag="train")
train = pd.merge(regression_feature, train_label,
on="FlickrId", how="inner")
validate = pd.merge(regression_feature, validate_label,
on="FlickrId", how="inner")
regression_model, df_predict_day30 = regression_catboost(train, validate)
df_predict_day3 | ain, validate):
| identifier_name |
offset.rs | Mut<'p, 'v, A>> for Offset<'p, 'v> {
#[inline(always)]
fn borrow(&self) -> &OffsetMut<'p, 'v, A> {
self.as_ref()
}
}
impl<'p, 'v, A> AsRef<OffsetMut<'p, 'v, A>> for Offset<'p, 'v> {
#[inline(always)]
fn as_ref(&self) -> &OffsetMut<'p, 'v, A> {
// SAFETY: #[repr(transparent)]
unsafe { &*(self as *const Self as *const _) }
}
}
*/
impl<'p, 'v> From<Offset<'p, 'v>> for usize {
fn from(offset: Offset<'p, 'v>) -> usize {
offset.get()
}
}
impl<'p, 'v> From<Offset<'p, 'v>> for OffsetMut<'p, 'v> {
fn from(inner: Offset<'p, 'v>) -> Self {
Self {
marker: PhantomData,
inner,
}
}
}
impl cmp::PartialEq<usize> for Offset<'_, '_> {
fn eq(&self, other: &usize) -> bool {
self.get() == *other
}
}
impl cmp::PartialEq<Offset<'_, '_>> for usize {
fn eq(&self, other: &Offset<'_, '_>) -> bool {
*self == other.get()
}
}
impl<'p, 'v> Offset<'p, 'v> {
/// The largest `Offset`.
pub const MAX: usize = (1 << 62) - 1;
/// Creates a new `Offset`.
///
/// Returns `None` if the offset is out of range:
///
/// ```
/// use hoard::offset::Offset;
///
/// assert!(Offset::new(Offset::MAX + 1)
/// .is_none());
/// ```
///
/// # Examples
///
/// Zero is a valid offset:
///
/// ```
/// use hoard::offset::Offset;
///
/// Offset::new(0).unwrap();
/// ```
pub fn new(offset: usize) -> Option<Self> {
if offset <= Self::MAX {
let offset = offset as u64;
Some(offset.checked_shl(1).map(|offset|
Self {
marker: PhantomData,
raw: NonZeroU64::new(offset | 1).unwrap().into(),
}
).unwrap())
} else {
None
}
}
/// Casts the `Offset` to a different lifetime.
///
/// This is *safe* because an offset by itself has no guarantees associated with it.
#[inline(always)]
pub fn cast<'p2, 'v2>(&self) -> Offset<'p2, 'v2> {
Offset {
marker: PhantomData,
raw: self.raw,
}
}
/// Gets the offset as a `usize`.
///
/// # Examples
///
/// ```
/// use hoard::offset::Offset;
///
/// assert_eq!(Offset::new(0).unwrap().get(), 0);
/// assert_eq!(Offset::new(1).unwrap().get(), 1);
/// ```
#[inline(always)]
pub fn get(&self) -> usize {
(self.raw.get().get() >> 1) as usize
}
/// Creates a dangling `Offset`.
///
/// # Examples
///
/// ```
/// use hoard::offset::Offset;
///
/// assert_eq!(Offset::dangling().get(), Offset::MAX);
/// ```
#[inline(always)]
pub fn dangling() -> Self {
Self::new(Self::MAX).unwrap()
}
/// Erases the lifetime of an `Offset`.
pub fn to_static(&self) -> Offset<'static, 'static> {
Offset {
marker: PhantomData,
raw: self.raw,
}
}
}
/// Enum for the kinds of `OffsetMut`.
#[derive(Debug)]
pub enum Kind<'p, 'v> {
/// An unmodified `Offset`.
Offset(Offset<'p, 'v>),
/// A pointer to something in the heap.
Ptr(HeapPtr),
}
impl<'p, 'v, A> OffsetMut<'p, 'v, A> {
/// Create an `OffsetMut` from a pointer.
///
/// Returns `None` if the alignment is incorrect.
#[inline]
pub fn from_ptr(ptr: NonNull<u16>) -> Option<Self> {
let raw = ptr.as_ptr() as usize as u64;
if raw & 1 == 1 {
unsafe { Some(mem::transmute(ptr.as_ptr() as usize as u64)) }
} else {
None
}
}
/// Creates an `OffsetMut` from a pointer without checking the alignment.
///
/// # Safety
///
/// The pointer must be properly aligned.
#[inline]
pub unsafe fn from_ptr_unchecked(ptr: NonNull<u16>) -> Self {
match Self::from_ptr(ptr) {
Some(this) => this,
None => {
unreachable_unchecked()
}
}
}
/// Returns the kind of offset.
pub fn kind(&self) -> Kind<'p, 'v> {
if self.inner.raw.get().get() & 1 == 1 {
Kind::Offset(self.inner)
} else {
Kind::Ptr(unsafe { mem::transmute(self.inner) })
}
}
/// Gets the `Offset` from a clean `OffsetMut`.
#[inline(always)]
pub fn get_offset(&self) -> Option<Offset<'p, 'v>> {
match self.kind() {
Kind::Offset(offset) => Some(offset),
Kind::Ptr(_) => None,
}
}
/// Gets the pointer from a dirty `OffsetMut`.
#[inline(always)]
pub fn get_ptr(&self) -> Option<HeapPtr> {
match self.kind() {
Kind::Ptr(ptr) => Some(ptr),
Kind::Offset(_) => None,
}
}
}
/*
impl<'p, 'v, A> AsPtr<OffsetMut<'p, 'v, A>> for HeapPtr {
#[inline(always)]
fn as_ptr(&self) -> &OffsetMut<'p, 'v, A> {
static_assertions::assert_eq_size!(OffsetMut, HeapPtr);
unsafe {
&*(self as *const _ as *const _)
}
}
}
*/
impl<'p, 'v> Ptr for OffsetMut<'p, 'v> {
type Zone = TryPile<'p, 'v>;
type BlobZone = TryPile<'p, 'v>;
type Persist = Offset<'p, 'v>;
unsafe fn dealloc<T: ?Sized + Pointee>(&self, metadata: T::Metadata) {
match self.kind() {
Kind::Offset(_) => {},
Kind::Ptr(heap_ptr) => heap_ptr.dealloc::<T>(metadata),
}
}
unsafe fn try_get_dirty_unchecked<T: ?Sized + Pointee>(&self, metadata: T::Metadata) -> Result<&T, Self::Persist> {
match self.kind() {
Kind::Ptr(ptr) => {
todo!()
},
Kind::Offset(offset) => Err(offset),
}
}
}
impl<'p,'v> Default for OffsetMut<'p, 'v> {
fn default() -> Self {
Offset::dangling().into()
}
}
#[derive(Debug, Default)]
pub struct ShallowDumper<'p, 'v> {
marker: PhantomData<OffsetMut<'p, 'v>>,
written: Vec<u8>,
initial_offset: usize,
}
impl<'p, 'v> Saver for ShallowDumper<'p, 'v> {
type SrcPtr = OffsetMut<'p, 'v>;
type DstPtr = Offset<'p, 'v>;
type Error = !;
fn try_save_raw<R, T: ?Sized + ValidateBlob>(&self,
ptr: &Offset<'p, 'v>,
_metadata: T::Metadata,
_f: impl FnOnce(ValidBlob<T>, &<Self::SrcPtr as Ptr>::BlobZone) -> R,
) -> Result<Result<<Self::DstPtr as Ptr>::Persist, R>,
Self::Error>
{
Ok(Ok(*ptr))
}
fn finish_save<T>(&mut self, value_poll: &T) -> Result<Offset<'p, 'v>, Self::Error>
where T: EncodeBlob
{
let offset = self.initial_offset
.checked_add(self.written.len())
.and_then(Offset::new)
.expect("overflow");
let written = mem::replace(&mut self.written, vec![]);
self.written = value_poll.encode_blob(written).into_ok();
Ok(offset)
}
}
impl<'p, 'v> ShallowDumper<'p, 'v> {
pub fn new(initial_offset: usize) -> Self {
Self {
marker: PhantomData,
written: vec![],
initial_offset,
}
}
pub fn from_buf(buf: impl Into<Vec<u8>>) -> Self {
Self {
marker: PhantomData,
initial_offset: 0,
written: buf.into(),
}
}
pub fn | save | identifier_name | |
offset.rs | 1 && (raw >> 1) < Offset::MAX as u64 {
unsafe { Ok(blob.assume_valid()) }
} else {
Err(ValidateOffsetBlobError)
}
}
fn decode_blob<'a>(blob: ValidBlob<'a, Self>) -> Self {
blob.as_value().clone()
}
fn try_deref_blob<'a>(blob: ValidBlob<'a, Self>) -> Result<&'a Self, ValidBlob<'a, Self>> {
Ok(blob.as_value())
}
fn encode_blob<W: WriteBlob>(&self, dst: W) -> Result<W::Ok, W::Error> {
todo!()
}
}
impl AsPtrImpl<Self> for Offset<'_, '_> {
fn as_ptr_impl(this: &Self) -> &Self {
this
}
}
impl<'p, 'v> PersistPtr for Offset<'p, 'v> {
type Zone = !;
type BlobZone = TryPile<'p, 'v>;
}
#[derive(Clone, Copy)]
#[repr(transparent)]
pub struct OffsetMut<'p, 'v, A = System> {
marker: PhantomData<A>,
inner: Offset<'p, 'v>,
}
unsafe impl Persist for OffsetMut<'_, '_> {}
impl fmt::Debug for OffsetMut<'_, '_> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.kind().fmt(f)
}
}
unsafe impl ValidateBlob for OffsetMut<'_, '_> {
type BlobError = ValidateOffsetBlobError;
fn try_blob_layout(_: ()) -> Result<BlobLayout, !> {
Ok(BlobLayout::new_nonzero(mem::size_of::<Self>()))
}
fn validate_blob<'a>(blob: Blob<'a, Self>, ignore_padding: bool) -> Result<ValidBlob<'a, Self>, Self::BlobError> {
let mut fields = blob.validate_fields(ignore_padding);
fields.validate_blob::<Offset>()?;
unsafe { Ok(fields.finish()) }
}
}
impl Load for OffsetMut<'_, '_> {
type Ptr = !;
fn decode_blob(blob: ValidBlob<Self>, _: &<Self::Ptr as Ptr>::BlobZone) -> Self {
blob.as_value().clone()
}
fn try_deref_blob<'a>(blob: ValidBlob<'a, Self>, _: &()) -> Result<&'a Self, ValidBlob<'a, Self>> {
Ok(blob.as_value())
}
}
impl AsPtrImpl<Self> for OffsetMut<'_, '_> {
fn as_ptr_impl(this: &Self) -> &Self {
this
}
}
/*
impl<'p, 'v, A> Borrow<OffsetMut<'p, 'v, A>> for Offset<'p, 'v> {
#[inline(always)]
fn borrow(&self) -> &OffsetMut<'p, 'v, A> {
self.as_ref()
}
}
impl<'p, 'v, A> AsRef<OffsetMut<'p, 'v, A>> for Offset<'p, 'v> {
#[inline(always)]
fn as_ref(&self) -> &OffsetMut<'p, 'v, A> {
// SAFETY: #[repr(transparent)]
unsafe { &*(self as *const Self as *const _) }
}
}
*/
impl<'p, 'v> From<Offset<'p, 'v>> for usize {
fn from(offset: Offset<'p, 'v>) -> usize {
offset.get()
}
}
impl<'p, 'v> From<Offset<'p, 'v>> for OffsetMut<'p, 'v> {
fn from(inner: Offset<'p, 'v>) -> Self {
Self {
marker: PhantomData,
inner,
}
}
}
impl cmp::PartialEq<usize> for Offset<'_, '_> {
fn eq(&self, other: &usize) -> bool {
self.get() == *other
}
}
impl cmp::PartialEq<Offset<'_, '_>> for usize {
fn eq(&self, other: &Offset<'_, '_>) -> bool {
*self == other.get()
}
}
impl<'p, 'v> Offset<'p, 'v> {
/// The largest `Offset`.
pub const MAX: usize = (1 << 62) - 1;
/// Creates a new `Offset`.
///
/// Returns `None` if the offset is out of range:
///
/// ```
/// use hoard::offset::Offset;
///
/// assert!(Offset::new(Offset::MAX + 1)
/// .is_none());
/// ```
///
/// # Examples
///
/// Zero is a valid offset:
///
/// ```
/// use hoard::offset::Offset;
///
/// Offset::new(0).unwrap();
/// ```
pub fn new(offset: usize) -> Option<Self> {
if offset <= Self::MAX {
let offset = offset as u64;
Some(offset.checked_shl(1).map(|offset|
Self {
marker: PhantomData,
raw: NonZeroU64::new(offset | 1).unwrap().into(),
}
).unwrap())
} else {
None
}
}
/// Casts the `Offset` to a different lifetime.
///
/// This is *safe* because an offset by itself has no guarantees associated with it.
#[inline(always)]
pub fn cast<'p2, 'v2>(&self) -> Offset<'p2, 'v2> {
Offset {
marker: PhantomData,
raw: self.raw,
}
}
/// Gets the offset as a `usize`.
///
/// # Examples
///
/// ```
/// use hoard::offset::Offset;
///
/// assert_eq!(Offset::new(0).unwrap().get(), 0);
/// assert_eq!(Offset::new(1).unwrap().get(), 1);
/// ```
#[inline(always)]
pub fn get(&self) -> usize {
(self.raw.get().get() >> 1) as usize
}
/// Creates a dangling `Offset`.
///
/// # Examples
///
/// ```
/// use hoard::offset::Offset;
///
/// assert_eq!(Offset::dangling().get(), Offset::MAX);
/// ```
#[inline(always)]
pub fn dangling() -> Self {
Self::new(Self::MAX).unwrap()
}
/// Erases the lifetime of an `Offset`.
pub fn to_static(&self) -> Offset<'static, 'static> {
Offset {
marker: PhantomData,
raw: self.raw,
}
}
}
/// Enum for the kinds of `OffsetMut`.
#[derive(Debug)]
pub enum Kind<'p, 'v> {
/// An unmodified `Offset`.
Offset(Offset<'p, 'v>),
/// A pointer to something in the heap.
Ptr(HeapPtr),
}
impl<'p, 'v, A> OffsetMut<'p, 'v, A> {
/// Create an `OffsetMut` from a pointer.
///
/// Returns `None` if the alignment is incorrect.
#[inline]
pub fn from_ptr(ptr: NonNull<u16>) -> Option<Self> {
let raw = ptr.as_ptr() as usize as u64;
if raw & 1 == 1 {
unsafe { Some(mem::transmute(ptr.as_ptr() as usize as u64)) }
} else {
None
}
}
/// Creates an `OffsetMut` from a pointer without checking the alignment.
///
/// # Safety
///
/// The pointer must be properly aligned.
#[inline]
pub unsafe fn from_ptr_unchecked(ptr: NonNull<u16>) -> Self {
match Self::from_ptr(ptr) {
Some(this) => this,
None => {
unreachable_unchecked()
}
}
}
/// Returns the kind of offset.
pub fn kind(&self) -> Kind<'p, 'v> {
if self.inner.raw.get().get() & 1 == 1 {
Kind::Offset(self.inner)
} else |
}
/// Gets the `Offset` from a clean `OffsetMut`.
#[inline(always)]
pub fn get_offset(&self) -> Option<Offset<'p, 'v>> {
match self.kind() {
Kind::Offset(offset) => Some(offset),
Kind::Ptr(_) => None,
}
}
/// Gets the pointer from a dirty `OffsetMut`.
#[inline(always)]
pub fn get_ptr(&self) -> Option<HeapPtr> {
match self.kind() {
Kind::Ptr(ptr) => Some(ptr),
Kind::Offset(_) => None,
}
}
}
/*
impl<'p, 'v, A> AsPtr<OffsetMut<'p, 'v, A>> for HeapPtr {
#[inline(always)]
fn as_ptr(&self) -> &OffsetMut<'p, 'v, A> {
static_assertions::assert_eq_size!(OffsetMut, HeapPtr);
unsafe {
&*(self as *const _ as *const _)
}
}
}
*/
impl<'p, 'v> Ptr for OffsetMut<'p, 'v> {
type Zone = TryPile<'p, 'v>;
type BlobZone = TryPile<'p, | {
Kind::Ptr(unsafe { mem::transmute(self.inner) })
} | conditional_block |
offset.rs | b1 && (raw >> 1) < Offset::MAX as u64 {
unsafe { Ok(blob.assume_valid()) }
} else {
Err(ValidateOffsetBlobError)
}
}
fn decode_blob<'a>(blob: ValidBlob<'a, Self>) -> Self {
blob.as_value().clone()
}
fn try_deref_blob<'a>(blob: ValidBlob<'a, Self>) -> Result<&'a Self, ValidBlob<'a, Self>> {
Ok(blob.as_value())
}
fn encode_blob<W: WriteBlob>(&self, dst: W) -> Result<W::Ok, W::Error> {
todo!()
}
}
impl AsPtrImpl<Self> for Offset<'_, '_> {
fn as_ptr_impl(this: &Self) -> &Self {
this
}
}
impl<'p, 'v> PersistPtr for Offset<'p, 'v> {
type Zone = !;
type BlobZone = TryPile<'p, 'v>;
}
#[derive(Clone, Copy)]
#[repr(transparent)]
pub struct OffsetMut<'p, 'v, A = System> {
marker: PhantomData<A>,
inner: Offset<'p, 'v>,
}
unsafe impl Persist for OffsetMut<'_, '_> {}
impl fmt::Debug for OffsetMut<'_, '_> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.kind().fmt(f)
}
}
unsafe impl ValidateBlob for OffsetMut<'_, '_> {
type BlobError = ValidateOffsetBlobError;
fn try_blob_layout(_: ()) -> Result<BlobLayout, !> {
Ok(BlobLayout::new_nonzero(mem::size_of::<Self>()))
}
fn validate_blob<'a>(blob: Blob<'a, Self>, ignore_padding: bool) -> Result<ValidBlob<'a, Self>, Self::BlobError> {
let mut fields = blob.validate_fields(ignore_padding);
fields.validate_blob::<Offset>()?;
unsafe { Ok(fields.finish()) }
}
}
impl Load for OffsetMut<'_, '_> {
type Ptr = !;
fn decode_blob(blob: ValidBlob<Self>, _: &<Self::Ptr as Ptr>::BlobZone) -> Self {
blob.as_value().clone()
}
fn try_deref_blob<'a>(blob: ValidBlob<'a, Self>, _: &()) -> Result<&'a Self, ValidBlob<'a, Self>> {
Ok(blob.as_value())
}
}
impl AsPtrImpl<Self> for OffsetMut<'_, '_> {
fn as_ptr_impl(this: &Self) -> &Self {
this
}
}
/*
impl<'p, 'v, A> Borrow<OffsetMut<'p, 'v, A>> for Offset<'p, 'v> {
#[inline(always)]
fn borrow(&self) -> &OffsetMut<'p, 'v, A> {
self.as_ref()
}
}
impl<'p, 'v, A> AsRef<OffsetMut<'p, 'v, A>> for Offset<'p, 'v> {
#[inline(always)] | // SAFETY: #[repr(transparent)]
unsafe { &*(self as *const Self as *const _) }
}
}
*/
impl<'p, 'v> From<Offset<'p, 'v>> for usize {
fn from(offset: Offset<'p, 'v>) -> usize {
offset.get()
}
}
impl<'p, 'v> From<Offset<'p, 'v>> for OffsetMut<'p, 'v> {
fn from(inner: Offset<'p, 'v>) -> Self {
Self {
marker: PhantomData,
inner,
}
}
}
impl cmp::PartialEq<usize> for Offset<'_, '_> {
fn eq(&self, other: &usize) -> bool {
self.get() == *other
}
}
impl cmp::PartialEq<Offset<'_, '_>> for usize {
fn eq(&self, other: &Offset<'_, '_>) -> bool {
*self == other.get()
}
}
impl<'p, 'v> Offset<'p, 'v> {
/// The largest `Offset`.
pub const MAX: usize = (1 << 62) - 1;
/// Creates a new `Offset`.
///
/// Returns `None` if the offset is out of range:
///
/// ```
/// use hoard::offset::Offset;
///
/// assert!(Offset::new(Offset::MAX + 1)
/// .is_none());
/// ```
///
/// # Examples
///
/// Zero is a valid offset:
///
/// ```
/// use hoard::offset::Offset;
///
/// Offset::new(0).unwrap();
/// ```
pub fn new(offset: usize) -> Option<Self> {
if offset <= Self::MAX {
let offset = offset as u64;
Some(offset.checked_shl(1).map(|offset|
Self {
marker: PhantomData,
raw: NonZeroU64::new(offset | 1).unwrap().into(),
}
).unwrap())
} else {
None
}
}
/// Casts the `Offset` to a different lifetime.
///
/// This is *safe* because an offset by itself has no guarantees associated with it.
#[inline(always)]
pub fn cast<'p2, 'v2>(&self) -> Offset<'p2, 'v2> {
Offset {
marker: PhantomData,
raw: self.raw,
}
}
/// Gets the offset as a `usize`.
///
/// # Examples
///
/// ```
/// use hoard::offset::Offset;
///
/// assert_eq!(Offset::new(0).unwrap().get(), 0);
/// assert_eq!(Offset::new(1).unwrap().get(), 1);
/// ```
#[inline(always)]
pub fn get(&self) -> usize {
(self.raw.get().get() >> 1) as usize
}
/// Creates a dangling `Offset`.
///
/// # Examples
///
/// ```
/// use hoard::offset::Offset;
///
/// assert_eq!(Offset::dangling().get(), Offset::MAX);
/// ```
#[inline(always)]
pub fn dangling() -> Self {
Self::new(Self::MAX).unwrap()
}
/// Erases the lifetime of an `Offset`.
pub fn to_static(&self) -> Offset<'static, 'static> {
Offset {
marker: PhantomData,
raw: self.raw,
}
}
}
/// Enum for the kinds of `OffsetMut`.
#[derive(Debug)]
pub enum Kind<'p, 'v> {
/// An unmodified `Offset`.
Offset(Offset<'p, 'v>),
/// A pointer to something in the heap.
Ptr(HeapPtr),
}
impl<'p, 'v, A> OffsetMut<'p, 'v, A> {
/// Create an `OffsetMut` from a pointer.
///
/// Returns `None` if the alignment is incorrect.
#[inline]
pub fn from_ptr(ptr: NonNull<u16>) -> Option<Self> {
let raw = ptr.as_ptr() as usize as u64;
if raw & 1 == 1 {
unsafe { Some(mem::transmute(ptr.as_ptr() as usize as u64)) }
} else {
None
}
}
/// Creates an `OffsetMut` from a pointer without checking the alignment.
///
/// # Safety
///
/// The pointer must be properly aligned.
#[inline]
pub unsafe fn from_ptr_unchecked(ptr: NonNull<u16>) -> Self {
match Self::from_ptr(ptr) {
Some(this) => this,
None => {
unreachable_unchecked()
}
}
}
/// Returns the kind of offset.
pub fn kind(&self) -> Kind<'p, 'v> {
if self.inner.raw.get().get() & 1 == 1 {
Kind::Offset(self.inner)
} else {
Kind::Ptr(unsafe { mem::transmute(self.inner) })
}
}
/// Gets the `Offset` from a clean `OffsetMut`.
#[inline(always)]
pub fn get_offset(&self) -> Option<Offset<'p, 'v>> {
match self.kind() {
Kind::Offset(offset) => Some(offset),
Kind::Ptr(_) => None,
}
}
/// Gets the pointer from a dirty `OffsetMut`.
#[inline(always)]
pub fn get_ptr(&self) -> Option<HeapPtr> {
match self.kind() {
Kind::Ptr(ptr) => Some(ptr),
Kind::Offset(_) => None,
}
}
}
/*
impl<'p, 'v, A> AsPtr<OffsetMut<'p, 'v, A>> for HeapPtr {
#[inline(always)]
fn as_ptr(&self) -> &OffsetMut<'p, 'v, A> {
static_assertions::assert_eq_size!(OffsetMut, HeapPtr);
unsafe {
&*(self as *const _ as *const _)
}
}
}
*/
impl<'p, 'v> Ptr for OffsetMut<'p, 'v> {
type Zone = TryPile<'p, 'v>;
type BlobZone = TryPile<'p, ' | fn as_ref(&self) -> &OffsetMut<'p, 'v, A> { | random_line_split |
offset.rs |
fn decode_blob<'a>(blob: ValidBlob<'a, Self>) -> Self {
blob.as_value().clone()
}
fn try_deref_blob<'a>(blob: ValidBlob<'a, Self>) -> Result<&'a Self, ValidBlob<'a, Self>> {
Ok(blob.as_value())
}
fn encode_blob<W: WriteBlob>(&self, dst: W) -> Result<W::Ok, W::Error> {
todo!()
}
}
impl AsPtrImpl<Self> for Offset<'_, '_> {
fn as_ptr_impl(this: &Self) -> &Self {
this
}
}
impl<'p, 'v> PersistPtr for Offset<'p, 'v> {
type Zone = !;
type BlobZone = TryPile<'p, 'v>;
}
#[derive(Clone, Copy)]
#[repr(transparent)]
pub struct OffsetMut<'p, 'v, A = System> {
marker: PhantomData<A>,
inner: Offset<'p, 'v>,
}
unsafe impl Persist for OffsetMut<'_, '_> {}
impl fmt::Debug for OffsetMut<'_, '_> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.kind().fmt(f)
}
}
unsafe impl ValidateBlob for OffsetMut<'_, '_> {
type BlobError = ValidateOffsetBlobError;
fn try_blob_layout(_: ()) -> Result<BlobLayout, !> {
Ok(BlobLayout::new_nonzero(mem::size_of::<Self>()))
}
fn validate_blob<'a>(blob: Blob<'a, Self>, ignore_padding: bool) -> Result<ValidBlob<'a, Self>, Self::BlobError> {
let mut fields = blob.validate_fields(ignore_padding);
fields.validate_blob::<Offset>()?;
unsafe { Ok(fields.finish()) }
}
}
impl Load for OffsetMut<'_, '_> {
type Ptr = !;
fn decode_blob(blob: ValidBlob<Self>, _: &<Self::Ptr as Ptr>::BlobZone) -> Self {
blob.as_value().clone()
}
fn try_deref_blob<'a>(blob: ValidBlob<'a, Self>, _: &()) -> Result<&'a Self, ValidBlob<'a, Self>> {
Ok(blob.as_value())
}
}
impl AsPtrImpl<Self> for OffsetMut<'_, '_> {
fn as_ptr_impl(this: &Self) -> &Self {
this
}
}
/*
impl<'p, 'v, A> Borrow<OffsetMut<'p, 'v, A>> for Offset<'p, 'v> {
#[inline(always)]
fn borrow(&self) -> &OffsetMut<'p, 'v, A> {
self.as_ref()
}
}
impl<'p, 'v, A> AsRef<OffsetMut<'p, 'v, A>> for Offset<'p, 'v> {
#[inline(always)]
fn as_ref(&self) -> &OffsetMut<'p, 'v, A> {
// SAFETY: #[repr(transparent)]
unsafe { &*(self as *const Self as *const _) }
}
}
*/
impl<'p, 'v> From<Offset<'p, 'v>> for usize {
fn from(offset: Offset<'p, 'v>) -> usize {
offset.get()
}
}
impl<'p, 'v> From<Offset<'p, 'v>> for OffsetMut<'p, 'v> {
fn from(inner: Offset<'p, 'v>) -> Self {
Self {
marker: PhantomData,
inner,
}
}
}
impl cmp::PartialEq<usize> for Offset<'_, '_> {
fn eq(&self, other: &usize) -> bool {
self.get() == *other
}
}
impl cmp::PartialEq<Offset<'_, '_>> for usize {
fn eq(&self, other: &Offset<'_, '_>) -> bool {
*self == other.get()
}
}
impl<'p, 'v> Offset<'p, 'v> {
/// The largest `Offset`.
pub const MAX: usize = (1 << 62) - 1;
/// Creates a new `Offset`.
///
/// Returns `None` if the offset is out of range:
///
/// ```
/// use hoard::offset::Offset;
///
/// assert!(Offset::new(Offset::MAX + 1)
/// .is_none());
/// ```
///
/// # Examples
///
/// Zero is a valid offset:
///
/// ```
/// use hoard::offset::Offset;
///
/// Offset::new(0).unwrap();
/// ```
pub fn new(offset: usize) -> Option<Self> {
if offset <= Self::MAX {
let offset = offset as u64;
Some(offset.checked_shl(1).map(|offset|
Self {
marker: PhantomData,
raw: NonZeroU64::new(offset | 1).unwrap().into(),
}
).unwrap())
} else {
None
}
}
/// Casts the `Offset` to a different lifetime.
///
/// This is *safe* because an offset by itself has no guarantees associated with it.
#[inline(always)]
pub fn cast<'p2, 'v2>(&self) -> Offset<'p2, 'v2> {
Offset {
marker: PhantomData,
raw: self.raw,
}
}
/// Gets the offset as a `usize`.
///
/// # Examples
///
/// ```
/// use hoard::offset::Offset;
///
/// assert_eq!(Offset::new(0).unwrap().get(), 0);
/// assert_eq!(Offset::new(1).unwrap().get(), 1);
/// ```
#[inline(always)]
pub fn get(&self) -> usize {
(self.raw.get().get() >> 1) as usize
}
/// Creates a dangling `Offset`.
///
/// # Examples
///
/// ```
/// use hoard::offset::Offset;
///
/// assert_eq!(Offset::dangling().get(), Offset::MAX);
/// ```
#[inline(always)]
pub fn dangling() -> Self {
Self::new(Self::MAX).unwrap()
}
/// Erases the lifetime of an `Offset`.
pub fn to_static(&self) -> Offset<'static, 'static> {
Offset {
marker: PhantomData,
raw: self.raw,
}
}
}
/// Enum for the kinds of `OffsetMut`.
#[derive(Debug)]
pub enum Kind<'p, 'v> {
/// An unmodified `Offset`.
Offset(Offset<'p, 'v>),
/// A pointer to something in the heap.
Ptr(HeapPtr),
}
impl<'p, 'v, A> OffsetMut<'p, 'v, A> {
/// Create an `OffsetMut` from a pointer.
///
/// Returns `None` if the alignment is incorrect.
#[inline]
pub fn from_ptr(ptr: NonNull<u16>) -> Option<Self> {
let raw = ptr.as_ptr() as usize as u64;
if raw & 1 == 1 {
unsafe { Some(mem::transmute(ptr.as_ptr() as usize as u64)) }
} else {
None
}
}
/// Creates an `OffsetMut` from a pointer without checking the alignment.
///
/// # Safety
///
/// The pointer must be properly aligned.
#[inline]
pub unsafe fn from_ptr_unchecked(ptr: NonNull<u16>) -> Self {
match Self::from_ptr(ptr) {
Some(this) => this,
None => {
unreachable_unchecked()
}
}
}
/// Returns the kind of offset.
pub fn kind(&self) -> Kind<'p, 'v> {
if self.inner.raw.get().get() & 1 == 1 {
Kind::Offset(self.inner)
} else {
Kind::Ptr(unsafe { mem::transmute(self.inner) })
}
}
/// Gets the `Offset` from a clean `OffsetMut`.
#[inline(always)]
pub fn get_offset(&self) -> Option<Offset<'p, 'v>> {
match self.kind() {
Kind::Offset(offset) => Some(offset),
Kind::Ptr(_) => None,
}
}
/// Gets the pointer from a dirty `OffsetMut`.
#[inline(always)]
pub fn get_ptr(&self) -> Option<HeapPtr> {
match self.kind() {
Kind::Ptr(ptr) => Some(ptr),
Kind::Offset(_) => None,
}
}
}
/*
impl<'p, 'v, A> AsPtr<OffsetMut<'p, 'v, A>> for HeapPtr {
#[inline(always)]
fn as_ptr(&self) -> &OffsetMut<'p, 'v, A> {
static_assertions::assert_eq_size!(OffsetMut, HeapPtr);
unsafe {
&*(self as *const _ as *const _)
}
}
}
*/
impl<'p, 'v> Ptr for Offset | {
let raw = u64::from_le_bytes(blob.as_bytes().try_into().unwrap());
if raw & 0b1 == 0b1 && (raw >> 1) < Offset::MAX as u64 {
unsafe { Ok(blob.assume_valid()) }
} else {
Err(ValidateOffsetBlobError)
}
} | identifier_body | |
glyph.rs | ,
pub colored: bool,
}
#[derive(Copy, Debug, Clone)]
pub struct QuadAtlasGlyph {
pub atlas_index: usize,
pub uv_bot: f32,
pub uv_left: f32,
pub uv_width: f32,
pub uv_height: f32,
pub top: i16,
pub left: i16,
pub width: i16,
pub height: i16,
pub colored: bool,
}
#[derive(Copy, Debug, Clone)]
pub enum AtlasGlyph {
Grid(GridAtlasGlyph),
Quad(QuadAtlasGlyph),
}
/// Naïve glyph cache.
///
/// Currently only keyed by `char`, and thus not possible to hold different
/// representations of the same code point.
pub struct GlyphCache {
/// Cache of buffered glyphs.
pub cache: HashMap<GlyphKey, AtlasGlyph, BuildHasherDefault<FnvHasher>>,
/// Cache of buffered cursor glyphs.
pub cursor_cache: HashMap<CursorKey, AtlasGlyph, BuildHasherDefault<FnvHasher>>,
/// Rasterizer for loading new glyphs.
rasterizer: Rasterizer,
/// Regular font.
pub font_key: FontKey,
/// Bold font.
pub bold_key: FontKey,
/// Italic font.
pub italic_key: FontKey,
/// Bold italic font.
pub bold_italic_key: FontKey,
/// Font size.
pub font_size: crossfont::Size,
/// Glyph offset.
glyph_offset: Delta<i8>,
/// Font metrics.
pub metrics: crossfont::Metrics,
/// Cell size
pub cell_size: Vec2<i32>,
}
impl GlyphCache {
pub fn new<L>(
mut rasterizer: Rasterizer,
config: &Config,
font: &Font,
loader: &mut L,
) -> Result<GlyphCache, crossfont::Error>
where
L: LoadGlyph,
{
let (regular, bold, italic, bold_italic) = Self::compute_font_keys(font, &mut rasterizer)?;
// Need to load at least one glyph for the face before calling metrics.
// The glyph requested here ('m' at the time of writing) has no special
// meaning.
rasterizer.get_glyph(crossfont::GlyphKey { font_key: regular, c: 'm', size: font.size })?;
let metrics = rasterizer.metrics(regular, font.size)?;
let (cell_width, cell_height) = Self::compute_cell_size(config, &metrics);
let cell_size = Vec2::new(cell_width as i32, cell_height as i32);
let mut cache = Self {
cache: HashMap::default(),
cursor_cache: HashMap::default(),
rasterizer,
font_size: font.size,
font_key: regular,
bold_key: bold,
italic_key: italic,
bold_italic_key: bold_italic,
glyph_offset: font.glyph_offset,
metrics,
cell_size,
};
cache.clear_cache_with_common_glyphs(loader, config);
Ok(cache)
}
/// Computes font keys for (Regular, Bold, Italic, Bold Italic).
fn compute_font_keys(
font: &Font,
rasterizer: &mut Rasterizer,
) -> Result<(FontKey, FontKey, FontKey, FontKey), crossfont::Error> {
let size = font.size;
// Load regular font.
let regular_desc = Self::make_desc(&font.normal(), Slant::Normal, Weight::Normal);
let regular = Self::load_regular_font(rasterizer, ®ular_desc, size)?;
// Helper to load a description if it is not the `regular_desc`.
let mut load_or_regular = |desc: FontDesc| {
if desc == regular_desc { | else {
rasterizer.load_font(&desc, size).unwrap_or_else(|_| regular)
}
};
// Load bold font.
let bold_desc = Self::make_desc(&font.bold(), Slant::Normal, Weight::Bold);
let bold = load_or_regular(bold_desc);
// Load italic font.
let italic_desc = Self::make_desc(&font.italic(), Slant::Italic, Weight::Normal);
let italic = load_or_regular(italic_desc);
// Load bold italic font.
let bold_italic_desc = Self::make_desc(&font.bold_italic(), Slant::Italic, Weight::Bold);
let bold_italic = load_or_regular(bold_italic_desc);
Ok((regular, bold, italic, bold_italic))
}
fn load_regular_font(
rasterizer: &mut Rasterizer,
description: &FontDesc,
size: Size,
) -> Result<FontKey, crossfont::Error> {
match rasterizer.load_font(description, size) {
Ok(font) => Ok(font),
Err(err) => {
error!("{}", err);
let fallback_desc =
Self::make_desc(&Font::default().normal(), Slant::Normal, Weight::Normal);
rasterizer.load_font(&fallback_desc, size)
},
}
}
fn make_desc(desc: &FontDescription, slant: Slant, weight: Weight) -> FontDesc {
let style = if let Some(ref spec) = desc.style {
Style::Specific(spec.to_owned())
} else {
Style::Description { slant, weight }
};
FontDesc::new(desc.family.clone(), style)
}
fn rasterize_glyph(
glyph_key: GlyphKey,
rasterizer: &mut Rasterizer,
glyph_offset: Delta<i8>,
metrics: &crossfont::Metrics,
) -> RasterizedGlyph {
let mut rasterized =
rasterizer.get_glyph(glyph_key.key).unwrap_or_else(|_| Default::default());
rasterized.left += i32::from(glyph_offset.x);
rasterized.top += i32::from(glyph_offset.y);
rasterized.top -= metrics.descent as i32;
RasterizedGlyph { wide: glyph_key.wide, zero_width: glyph_key.zero_width, rasterized }
}
pub fn get<L>(&mut self, glyph_key: GlyphKey, loader: &mut L) -> &AtlasGlyph
where
L: LoadGlyph,
{
let glyph_offset = self.glyph_offset;
let rasterizer = &mut self.rasterizer;
let metrics = &self.metrics;
self.cache.entry(glyph_key).or_insert_with(|| {
let rasterized = Self::rasterize_glyph(glyph_key, rasterizer, glyph_offset, metrics);
loader.load_glyph(&rasterized)
})
}
/// Clear currently cached data in both GL and the registry.
pub fn clear_glyph_cache<L: LoadGlyph>(&mut self, config: &Config, loader: &mut L) {
let (cell_width, cell_height) = Self::compute_cell_size(config, &self.metrics);
self.cell_size = Vec2::new(cell_width as i32, cell_height as i32);
self.cache = HashMap::default();
self.cursor_cache = HashMap::default();
self.clear_cache_with_common_glyphs(loader, config);
}
pub fn update_font_size<L: LoadGlyph>(
&mut self,
config: &Config,
font: &Font,
dpr: f64,
loader: &mut L,
) -> Result<(), crossfont::Error> {
// Update dpi scaling.
self.rasterizer.update_dpr(dpr as f32);
// Recompute font keys.
let (regular, bold, italic, bold_italic) =
Self::compute_font_keys(font, &mut self.rasterizer)?;
self.rasterizer.get_glyph(crossfont::GlyphKey {
font_key: regular,
c: 'm',
size: font.size,
})?;
let metrics = self.rasterizer.metrics(regular, font.size)?;
info!("Font size changed to {:?} with DPR of {}", font.size, dpr);
self.font_size = font.size;
self.font_key = regular;
self.bold_key = bold;
self.italic_key = italic;
self.bold_italic_key = bold_italic;
self.metrics = metrics;
self.clear_glyph_cache(config, loader);
Ok(())
}
pub fn font_metrics(&self) -> crossfont::Metrics {
self.metrics
}
/// Prefetch glyphs that are almost guaranteed to be loaded anyways.
fn clear_cache_with_common_glyphs<L: LoadGlyph>(&mut self, loader: &mut L, config: &Config) {
let glyph_offset = self.glyph_offset;
let metrics = &self.metrics;
let font_size = self.font_size;
let rasterizer = &mut self.rasterizer;
let cell_size = self.cell_size;
let mut atlas_cell_size = self.cell_size;
let mut atlas_cell_offset = Vec2 { x: 0, y: 0 };
type Glyphs = Vec<(GlyphKey, RasterizedGlyph)>;
let glyphs: Glyphs = [self.font_key, self.bold_key, self.italic_key, self.bold_italic_key]
.iter()
.flat_map(|font| {
(32u8..=126u8)
.map(|c| {
let glyph_key = GlyphKey {
wide: false,
zero_width: false,
key: crossfont::Glyph |
regular
} | conditional_block |
glyph.rs | {
pub rasterized: crossfont::RasterizedGlyph,
pub wide: bool,
pub zero_width: bool,
}
/// `LoadGlyph` allows for copying a rasterized glyph into graphics memory.
pub trait LoadGlyph {
/// Load the rasterized glyph into GPU memory.
fn load_glyph(&mut self, rasterized: &RasterizedGlyph) -> AtlasGlyph;
/// Clear any state accumulated from previous loaded glyphs.
///
/// This can, for instance, be used to reset the texture Atlas.
fn clear(&mut self, cell_size: Vec2<i32>, cell_offset: Vec2<i32>);
}
#[derive(Copy, Debug, Clone)]
pub struct GridAtlasGlyph {
pub atlas_index: usize,
pub line: u16,
pub column: u16,
pub colored: bool,
}
#[derive(Copy, Debug, Clone)]
pub struct QuadAtlasGlyph {
pub atlas_index: usize,
pub uv_bot: f32,
pub uv_left: f32,
pub uv_width: f32,
pub uv_height: f32,
pub top: i16,
pub left: i16,
pub width: i16,
pub height: i16,
pub colored: bool,
}
#[derive(Copy, Debug, Clone)]
pub enum AtlasGlyph {
Grid(GridAtlasGlyph),
Quad(QuadAtlasGlyph),
}
/// Naïve glyph cache.
///
/// Currently only keyed by `char`, and thus not possible to hold different
/// representations of the same code point.
pub struct GlyphCache {
/// Cache of buffered glyphs.
pub cache: HashMap<GlyphKey, AtlasGlyph, BuildHasherDefault<FnvHasher>>,
/// Cache of buffered cursor glyphs.
pub cursor_cache: HashMap<CursorKey, AtlasGlyph, BuildHasherDefault<FnvHasher>>,
/// Rasterizer for loading new glyphs.
rasterizer: Rasterizer,
/// Regular font.
pub font_key: FontKey,
/// Bold font.
pub bold_key: FontKey,
/// Italic font.
pub italic_key: FontKey,
/// Bold italic font.
pub bold_italic_key: FontKey,
/// Font size.
pub font_size: crossfont::Size,
/// Glyph offset.
glyph_offset: Delta<i8>,
/// Font metrics.
pub metrics: crossfont::Metrics,
/// Cell size
pub cell_size: Vec2<i32>,
}
impl GlyphCache {
pub fn new<L>(
mut rasterizer: Rasterizer,
config: &Config,
font: &Font,
loader: &mut L,
) -> Result<GlyphCache, crossfont::Error>
where
L: LoadGlyph,
{
let (regular, bold, italic, bold_italic) = Self::compute_font_keys(font, &mut rasterizer)?;
// Need to load at least one glyph for the face before calling metrics.
// The glyph requested here ('m' at the time of writing) has no special
// meaning.
rasterizer.get_glyph(crossfont::GlyphKey { font_key: regular, c: 'm', size: font.size })?;
let metrics = rasterizer.metrics(regular, font.size)?;
let (cell_width, cell_height) = Self::compute_cell_size(config, &metrics);
let cell_size = Vec2::new(cell_width as i32, cell_height as i32);
let mut cache = Self {
cache: HashMap::default(),
cursor_cache: HashMap::default(),
rasterizer,
font_size: font.size,
font_key: regular,
bold_key: bold,
italic_key: italic,
bold_italic_key: bold_italic,
glyph_offset: font.glyph_offset,
metrics,
cell_size,
};
cache.clear_cache_with_common_glyphs(loader, config);
Ok(cache)
}
/// Computes font keys for (Regular, Bold, Italic, Bold Italic).
fn compute_font_keys(
font: &Font,
rasterizer: &mut Rasterizer,
) -> Result<(FontKey, FontKey, FontKey, FontKey), crossfont::Error> {
let size = font.size;
// Load regular font.
let regular_desc = Self::make_desc(&font.normal(), Slant::Normal, Weight::Normal);
let regular = Self::load_regular_font(rasterizer, ®ular_desc, size)?;
// Helper to load a description if it is not the `regular_desc`.
let mut load_or_regular = |desc: FontDesc| {
if desc == regular_desc {
regular
} else {
rasterizer.load_font(&desc, size).unwrap_or_else(|_| regular)
}
};
// Load bold font.
let bold_desc = Self::make_desc(&font.bold(), Slant::Normal, Weight::Bold);
let bold = load_or_regular(bold_desc);
// Load italic font.
let italic_desc = Self::make_desc(&font.italic(), Slant::Italic, Weight::Normal);
let italic = load_or_regular(italic_desc);
// Load bold italic font.
let bold_italic_desc = Self::make_desc(&font.bold_italic(), Slant::Italic, Weight::Bold);
let bold_italic = load_or_regular(bold_italic_desc);
Ok((regular, bold, italic, bold_italic))
}
fn load_regular_font(
rasterizer: &mut Rasterizer,
description: &FontDesc,
size: Size,
) -> Result<FontKey, crossfont::Error> {
match rasterizer.load_font(description, size) {
Ok(font) => Ok(font),
Err(err) => {
error!("{}", err);
let fallback_desc =
Self::make_desc(&Font::default().normal(), Slant::Normal, Weight::Normal);
rasterizer.load_font(&fallback_desc, size)
},
}
}
fn make_desc(desc: &FontDescription, slant: Slant, weight: Weight) -> FontDesc {
let style = if let Some(ref spec) = desc.style {
Style::Specific(spec.to_owned())
} else {
Style::Description { slant, weight }
};
FontDesc::new(desc.family.clone(), style)
}
fn rasterize_glyph(
glyph_key: GlyphKey,
rasterizer: &mut Rasterizer,
glyph_offset: Delta<i8>,
metrics: &crossfont::Metrics,
) -> RasterizedGlyph {
let mut rasterized =
rasterizer.get_glyph(glyph_key.key).unwrap_or_else(|_| Default::default());
rasterized.left += i32::from(glyph_offset.x);
rasterized.top += i32::from(glyph_offset.y);
rasterized.top -= metrics.descent as i32;
RasterizedGlyph { wide: glyph_key.wide, zero_width: glyph_key.zero_width, rasterized }
}
pub fn get<L>(&mut self, glyph_key: GlyphKey, loader: &mut L) -> &AtlasGlyph
where
L: LoadGlyph,
{
let glyph_offset = self.glyph_offset;
let rasterizer = &mut self.rasterizer;
let metrics = &self.metrics;
self.cache.entry(glyph_key).or_insert_with(|| {
let rasterized = Self::rasterize_glyph(glyph_key, rasterizer, glyph_offset, metrics);
loader.load_glyph(&rasterized)
})
}
/// Clear currently cached data in both GL and the registry.
pub fn clear_glyph_cache<L: LoadGlyph>(&mut self, config: &Config, loader: &mut L) {
let (cell_width, cell_height) = Self::compute_cell_size(config, &self.metrics);
self.cell_size = Vec2::new(cell_width as i32, cell_height as i32);
self.cache = HashMap::default();
self.cursor_cache = HashMap::default();
self.clear_cache_with_common_glyphs(loader, config);
}
pub fn update_font_size<L: LoadGlyph>(
&mut self,
config: &Config,
font: &Font,
dpr: f64,
loader: &mut L,
) -> Result<(), crossfont::Error> {
// Update dpi scaling.
self.rasterizer.update_dpr(dpr as f32);
// Recompute font keys.
let (regular, bold, italic, bold_italic) =
Self::compute_font_keys(font, &mut self.rasterizer)?;
self.rasterizer.get_glyph(crossfont::GlyphKey {
font_key: regular,
c: 'm',
size: font.size,
})?;
let metrics = self.rasterizer.metrics(regular, font.size)?;
info!("Font size changed to {:?} with DPR of {}", font.size, dpr);
self.font_size = font.size;
self.font_key = regular;
self.bold_key = bold;
self.italic_key = italic;
self.bold_italic_key = bold_italic;
self.metrics = metrics;
self.clear_glyph_cache(config, loader);
Ok(())
}
pub fn font_metrics(&self) -> crossfont::Metrics {
self.metrics
}
/// Prefetch glyphs that are almost guaranteed to be loaded anyways.
fn clear_cache_with_common_glyphs<L: LoadGlyph>(&mut self, loader: &mut L, config: &Config) {
let glyph_offset = self.glyph_offset | RasterizedGlyph | identifier_name | |
glyph.rs | , Clone)]
pub struct QuadAtlasGlyph {
pub atlas_index: usize,
pub uv_bot: f32,
pub uv_left: f32,
pub uv_width: f32,
pub uv_height: f32,
pub top: i16,
pub left: i16,
pub width: i16,
pub height: i16,
pub colored: bool,
}
#[derive(Copy, Debug, Clone)]
pub enum AtlasGlyph {
Grid(GridAtlasGlyph),
Quad(QuadAtlasGlyph),
}
/// Naïve glyph cache.
///
/// Currently only keyed by `char`, and thus not possible to hold different
/// representations of the same code point.
pub struct GlyphCache {
/// Cache of buffered glyphs.
pub cache: HashMap<GlyphKey, AtlasGlyph, BuildHasherDefault<FnvHasher>>,
/// Cache of buffered cursor glyphs.
pub cursor_cache: HashMap<CursorKey, AtlasGlyph, BuildHasherDefault<FnvHasher>>,
/// Rasterizer for loading new glyphs.
rasterizer: Rasterizer,
/// Regular font.
pub font_key: FontKey,
/// Bold font.
pub bold_key: FontKey,
/// Italic font.
pub italic_key: FontKey,
/// Bold italic font.
pub bold_italic_key: FontKey,
/// Font size.
pub font_size: crossfont::Size,
/// Glyph offset.
glyph_offset: Delta<i8>,
/// Font metrics.
pub metrics: crossfont::Metrics,
/// Cell size
pub cell_size: Vec2<i32>,
}
impl GlyphCache {
pub fn new<L>(
mut rasterizer: Rasterizer,
config: &Config,
font: &Font,
loader: &mut L,
) -> Result<GlyphCache, crossfont::Error>
where
L: LoadGlyph,
{
let (regular, bold, italic, bold_italic) = Self::compute_font_keys(font, &mut rasterizer)?;
// Need to load at least one glyph for the face before calling metrics.
// The glyph requested here ('m' at the time of writing) has no special
// meaning.
rasterizer.get_glyph(crossfont::GlyphKey { font_key: regular, c: 'm', size: font.size })?;
let metrics = rasterizer.metrics(regular, font.size)?;
let (cell_width, cell_height) = Self::compute_cell_size(config, &metrics);
let cell_size = Vec2::new(cell_width as i32, cell_height as i32);
let mut cache = Self {
cache: HashMap::default(),
cursor_cache: HashMap::default(),
rasterizer,
font_size: font.size,
font_key: regular,
bold_key: bold,
italic_key: italic,
bold_italic_key: bold_italic,
glyph_offset: font.glyph_offset,
metrics,
cell_size,
};
cache.clear_cache_with_common_glyphs(loader, config);
Ok(cache)
}
/// Computes font keys for (Regular, Bold, Italic, Bold Italic).
fn compute_font_keys(
font: &Font,
rasterizer: &mut Rasterizer,
) -> Result<(FontKey, FontKey, FontKey, FontKey), crossfont::Error> {
let size = font.size;
// Load regular font.
let regular_desc = Self::make_desc(&font.normal(), Slant::Normal, Weight::Normal);
let regular = Self::load_regular_font(rasterizer, ®ular_desc, size)?;
// Helper to load a description if it is not the `regular_desc`.
let mut load_or_regular = |desc: FontDesc| {
if desc == regular_desc {
regular
} else {
rasterizer.load_font(&desc, size).unwrap_or_else(|_| regular)
}
};
// Load bold font.
let bold_desc = Self::make_desc(&font.bold(), Slant::Normal, Weight::Bold);
let bold = load_or_regular(bold_desc);
// Load italic font.
let italic_desc = Self::make_desc(&font.italic(), Slant::Italic, Weight::Normal);
let italic = load_or_regular(italic_desc);
// Load bold italic font.
let bold_italic_desc = Self::make_desc(&font.bold_italic(), Slant::Italic, Weight::Bold);
let bold_italic = load_or_regular(bold_italic_desc);
Ok((regular, bold, italic, bold_italic))
}
fn load_regular_font(
rasterizer: &mut Rasterizer,
description: &FontDesc,
size: Size,
) -> Result<FontKey, crossfont::Error> {
match rasterizer.load_font(description, size) {
Ok(font) => Ok(font),
Err(err) => {
error!("{}", err);
let fallback_desc =
Self::make_desc(&Font::default().normal(), Slant::Normal, Weight::Normal);
rasterizer.load_font(&fallback_desc, size)
},
}
}
fn make_desc(desc: &FontDescription, slant: Slant, weight: Weight) -> FontDesc {
let style = if let Some(ref spec) = desc.style {
Style::Specific(spec.to_owned())
} else {
Style::Description { slant, weight }
};
FontDesc::new(desc.family.clone(), style)
}
fn rasterize_glyph(
glyph_key: GlyphKey,
rasterizer: &mut Rasterizer,
glyph_offset: Delta<i8>,
metrics: &crossfont::Metrics,
) -> RasterizedGlyph {
let mut rasterized =
rasterizer.get_glyph(glyph_key.key).unwrap_or_else(|_| Default::default());
rasterized.left += i32::from(glyph_offset.x);
rasterized.top += i32::from(glyph_offset.y);
rasterized.top -= metrics.descent as i32;
RasterizedGlyph { wide: glyph_key.wide, zero_width: glyph_key.zero_width, rasterized }
}
pub fn get<L>(&mut self, glyph_key: GlyphKey, loader: &mut L) -> &AtlasGlyph
where
L: LoadGlyph,
{
let glyph_offset = self.glyph_offset;
let rasterizer = &mut self.rasterizer;
let metrics = &self.metrics;
self.cache.entry(glyph_key).or_insert_with(|| {
let rasterized = Self::rasterize_glyph(glyph_key, rasterizer, glyph_offset, metrics);
loader.load_glyph(&rasterized)
})
}
/// Clear currently cached data in both GL and the registry.
pub fn clear_glyph_cache<L: LoadGlyph>(&mut self, config: &Config, loader: &mut L) {
let (cell_width, cell_height) = Self::compute_cell_size(config, &self.metrics);
self.cell_size = Vec2::new(cell_width as i32, cell_height as i32);
self.cache = HashMap::default();
self.cursor_cache = HashMap::default();
self.clear_cache_with_common_glyphs(loader, config);
}
pub fn update_font_size<L: LoadGlyph>(
&mut self,
config: &Config,
font: &Font,
dpr: f64,
loader: &mut L,
) -> Result<(), crossfont::Error> {
// Update dpi scaling.
self.rasterizer.update_dpr(dpr as f32);
// Recompute font keys.
let (regular, bold, italic, bold_italic) =
Self::compute_font_keys(font, &mut self.rasterizer)?;
self.rasterizer.get_glyph(crossfont::GlyphKey {
font_key: regular,
c: 'm',
size: font.size,
})?;
let metrics = self.rasterizer.metrics(regular, font.size)?;
info!("Font size changed to {:?} with DPR of {}", font.size, dpr);
self.font_size = font.size;
self.font_key = regular;
self.bold_key = bold;
self.italic_key = italic;
self.bold_italic_key = bold_italic;
self.metrics = metrics;
self.clear_glyph_cache(config, loader);
Ok(())
}
pub fn font_metrics(&self) -> crossfont::Metrics {
self.metrics
}
/// Prefetch glyphs that are almost guaranteed to be loaded anyways.
fn clear_cache_with_common_glyphs<L: LoadGlyph>(&mut self, loader: &mut L, config: &Config) {
let glyph_offset = self.glyph_offset;
let metrics = &self.metrics;
let font_size = self.font_size;
let rasterizer = &mut self.rasterizer;
let cell_size = self.cell_size;
let mut atlas_cell_size = self.cell_size;
let mut atlas_cell_offset = Vec2 { x: 0, y: 0 };
type Glyphs = Vec<(GlyphKey, RasterizedGlyph)>;
let glyphs: Glyphs = [self.font_key, self.bold_key, self.italic_key, self.bold_italic_key]
.iter()
.flat_map(|font| {
(32u8..=126u8)
.map(|c| {
let glyph_key = GlyphKey {
wide: false,
zero_width: false, | key: crossfont::GlyphKey {
font_key: *font,
c: c as char, | random_line_split | |
main.go |
}
deps := &getpan.DependencyList{
Dependencies: make([]*getpan.Dependency, 0),
}
d1, _ := getpan.DependencyFromString("Parse::LocalDistribution", "")
d2, _ := getpan.DependencyFromString("JSON::XS", "")
deps.AddDependency(d1)
deps.AddDependency(d2)
if err := deps.Resolve(); err != nil {
log.Error("Error resolving dependencies: %s", err)
os.Exit(1)
return
}
_, err := deps.Install()
if err != nil {
log.Error("Error installing dependencies: %s", err)
os.Exit(2)
return
}
log.Info(" - Installed %d modules", deps.UniqueInstalled())
log.Info("SmartPAN initialisation complete")
return
}
if config.TestDeps {
perldeps := gopan.TestPerlDeps()
perldeps.Dump()
if !perldeps.Ok {
log.Error("Required perl dependencies are missing")
os.Exit(1)
return
}
}
if len(args) > 0 && args[0] == "import" {
if len(args) < 4 {
log.Error("Invalid arguments, expecting: smartpan import FILE AUTHORID INDEX")
return
}
fname := args[1]
log.Info("Importing module from %s", fname)
log.Info("Author ID: %s", args[2])
log.Info("Index : %s", args[3])
extraParams := map[string]string{
"importinto": args[3],
"authorid": args[2],
"newindex": "",
"cpanmirror": "",
"importurl": "",
"fromdir": "",
}
if strings.HasPrefix(fname, "http://") || strings.HasPrefix(fname, "https://") {
log.Info("URL: %s", fname)
extraParams["importurl"] = fname
request, err := newFormPostRequest(config.RemoteHost+"/import?stream=y", extraParams)
if err != nil {
log.Error("Create request error: %s", err.Error())
return
}
client := &nethttp.Client{}
resp, err := client.Do(request)
if err != nil {
log.Error("Error connecting to host: %s", err.Error())
return
} else {
// TODO stream this
body := &bytes.Buffer{}
_, err := body.ReadFrom(resp.Body)
if err != nil {
log.Error("Error reading response: %s", err.Error())
return
}
resp.Body.Close()
//log.Info("%d", resp.StatusCode)
//log.Info("%s", resp.Header)
log.Info("%s", body.String())
}
} else {
fname = strings.TrimPrefix(fname, "file://")
log.Info("File: %s", fname)
if _, err := os.Stat(fname); err != nil {
log.Error("File not found: %s", err.Error())
return
}
request, err := newfileUploadRequest(config.RemoteHost+"/import?stream=y", extraParams, "fromfile", fname)
if err != nil {
log.Error("Create upload error: %s", err.Error())
return
}
client := &nethttp.Client{}
resp, err := client.Do(request)
if err != nil {
log.Error("Error connecting to host: %s", err.Error())
return
} else {
// TODO stream this
body := &bytes.Buffer{}
_, err := body.ReadFrom(resp.Body)
if err != nil {
log.Error("Error reading response: %s", err.Error())
return
}
resp.Body.Close()
//log.Info("%d", resp.StatusCode)
//log.Info("%s", resp.Header)
log.Info("%s", body.String())
}
}
return
}
config.CurrentRelease = CurrentRelease
var wg sync.WaitGroup
load_index = func(index string, file string) {
indexes[index] = gopan.LoadIndex(file)
}
wg.Add(1)
go func() {
defer wg.Done()
indexes = make(map[string]map[string]*gopan.Source)
// Load CPAN index
if fi, err := os.Stat(config.CacheDir + "/" + config.CPANIndex); err == nil {
config.HasCPANIndex = true
config.CPANIndexDate = fi.ModTime().String()
config.CPANStatus = "Loading"
wg.Add(1)
go func() {
defer wg.Done()
load_index(config.CPANIndex, config.CacheDir+"/"+config.CPANIndex)
config.CPANStatus = "Loaded"
}()
}
// Load BackPAN index
if fi, err := os.Stat(config.CacheDir + "/" + config.BackPANIndex); err == nil {
config.HasBackPANIndex = true
config.BackPANIndexDate = fi.ModTime().String()
config.BackPANStatus = "Loading"
wg.Add(1)
go func() {
defer wg.Done()
load_index(config.BackPANIndex, config.CacheDir+"/"+config.BackPANIndex)
config.BackPANStatus = "Loaded"
}()
}
// Load our secondary indexes
for _, idx := range config.Indexes {
wg.Add(1)
go func() {
defer wg.Done()
load_index(idx, config.CacheDir+"/"+idx)
}()
}
// Load our primary index (this is the only index written back to)
wg.Add(1)
go func() {
defer wg.Done()
load_index(config.Index, config.CacheDir+"/"+config.Index)
}()
}()
update_indexes = func() {
wg.Wait()
wg.Add(1)
go func() {
wg.Wait()
config.ImportAvailable = true
nsrc, nauth, npkg, nprov := gopan.CountIndex(indexes)
// TODO should probably be in the index - needs to udpate when index changes
summary = &Summary{nsrc, nauth, npkg, nprov}
// Do this now so changing the level doesn't interfere with index load
log.Logger().SetLevel(log.Stol(config.LogLevel))
}()
defer wg.Done()
// Create in-memory indexes for UI/search etc
for fname, _ := range indexes {
for idn, idx := range indexes[fname] {
mapped[idx.Name] = make(map[string]map[string]map[string]*gopan.Author)
for _, auth := range idx.Authors {
// author name
if _, ok := mapped[idx.Name][auth.Name[:1]]; !ok {
mapped[idx.Name][auth.Name[:1]] = make(map[string]map[string]*gopan.Author)
}
if _, ok := mapped[idx.Name][auth.Name[:1]][auth.Name[:2]]; !ok {
mapped[idx.Name][auth.Name[:1]][auth.Name[:2]] = make(map[string]*gopan.Author)
}
mapped[idx.Name][auth.Name[:1]][auth.Name[:2]][auth.Name] = auth
// wildcards
if _, ok := mapped[idx.Name]["*"]; !ok {
mapped[idx.Name]["*"] = make(map[string]map[string]*gopan.Author)
}
if _, ok := mapped[idx.Name]["*"]["**"]; !ok {
mapped[idx.Name]["*"]["**"] = make(map[string]*gopan.Author)
}
mapped[idx.Name]["*"]["**"][auth.Name] = auth
// combos
if _, ok := mapped[idx.Name][auth.Name[:1]]["**"]; !ok {
mapped[idx.Name][auth.Name[:1]]["**"] = make(map[string]*gopan.Author)
}
if _, ok := mapped[idx.Name]["*"][auth.Name[:2]]; !ok {
mapped[idx.Name]["*"][auth.Name[:2]] = make(map[string]*gopan.Author)
}
mapped[idx.Name][auth.Name[:1]]["**"][auth.Name] = auth
mapped[idx.Name]["*"][auth.Name[:2]][auth.Name] = auth
for _, pkg := range auth.Packages {
filemap[pkg.AuthorURL()] = idn
for _, prov := range pkg.Provides {
parts := strings.Split(prov.Name, "::")
log.Trace("PACKAGE: %s", prov.Name)
if _, ok := packages[parts[0]]; !ok {
packages[parts[0]] = &PkgSpace{
Namespace: parts[0],
Packages: make([]*gopan.PerlPackage, 0),
Children: make(map[string]*PkgSpace),
Parent: nil,
Versions: make(map[float64]*gopan.PerlPackage),
}
}
if _, ok := idxpackages[idx.Name]; !ok {
idxpackages[idx.Name] = make(map[string]* | {
log.Error("Error loading sources: %s", err)
os.Exit(1)
return
} | conditional_block | |
main.go | mapped[idx.Name][auth.Name[:1]]["**"][auth.Name] = auth
mapped[idx.Name]["*"][auth.Name[:2]][auth.Name] = auth
for _, pkg := range auth.Packages {
filemap[pkg.AuthorURL()] = idn
for _, prov := range pkg.Provides {
parts := strings.Split(prov.Name, "::")
log.Trace("PACKAGE: %s", prov.Name)
if _, ok := packages[parts[0]]; !ok {
packages[parts[0]] = &PkgSpace{
Namespace: parts[0],
Packages: make([]*gopan.PerlPackage, 0),
Children: make(map[string]*PkgSpace),
Parent: nil,
Versions: make(map[float64]*gopan.PerlPackage),
}
}
if _, ok := idxpackages[idx.Name]; !ok {
idxpackages[idx.Name] = make(map[string]*PkgSpace)
}
if _, ok := idxpackages[idx.Name][parts[0]]; !ok {
idxpackages[idx.Name][parts[0]] = &PkgSpace{
Namespace: parts[0],
Packages: make([]*gopan.PerlPackage, 0),
Children: make(map[string]*PkgSpace),
Parent: nil,
Versions: make(map[float64]*gopan.PerlPackage),
}
}
if len(parts) == 1 {
packages[parts[0]].Packages = append(packages[parts[0]].Packages, prov)
packages[parts[0]].Versions[gopan.VersionFromString(prov.Version)] = prov
idxpackages[idx.Name][parts[0]].Packages = append(idxpackages[idx.Name][parts[0]].Packages, prov)
idxpackages[idx.Name][parts[0]].Versions[gopan.VersionFromString(prov.Version)] = prov
log.Trace("Version linked: %f for %s", gopan.VersionFromString(prov.Version), prov.Name)
} else {
packages[parts[0]].Populate(parts[1:], prov)
idxpackages[idx.Name][parts[0]].Populate(parts[1:], prov)
}
}
}
}
}
}
}
go update_indexes()
// Get latest SmartPAN version
go func() {
res, err := nethttp.Get("https://api.github.com/repos/companieshouse/gopan/releases")
if err != nil {
log.Error("Error getting latest version: %s", err.Error())
return
}
defer res.Body.Close()
b, err := ioutil.ReadAll(res.Body)
if err != nil {
log.Error("Error reading stream: %s", err.Error())
return
}
var r Releases
if err = json.Unmarshal(b, &r); err != nil {
log.Error("Error unmarshalling JSON: %s", err.Error())
return
}
log.Info("Current release: %s", config.CurrentRelease)
rel := strings.TrimPrefix(r[0].TagName, "v")
log.Info("Latest release: %s", rel)
config.LatestRelease = rel
config.UpdateURL = r[0].URL
if config.CurrentRelease < rel {
config.CanUpdate = true
log.Info("Your version of SmartPAN can be updated.")
}
}()
// Create our Gotcha application
var app = gotcha.Create(Asset)
app.Config.Listen = config.Bind
summary = &Summary{0, 0, 0, 0}
app.On(events.BeforeHandler, func(session *http.Session, next func()) {
session.Stash["summary"] = summary
session.Stash["config"] = config
next()
})
// Get the router
r := app.Router
// Create some routes
r.Get("/", search)
r.Post("/", search)
r.Get("/help", help)
r.Get("/settings", settings)
r.Get("/browse", browse)
r.Get("/import", import1)
r.Post("/import", import1)
r.Get("/import/(?P<jobid>[^/]+)", import2)
r.Get("/import/(?P<jobid>[^/]+)/stream", importstream)
r.Post("/get-index/(?P<index>(CPAN|BackPAN))/?", getindex)
// Serve static content (but really use a CDN)
r.Get("/images/(?P<file>.*)", r.Static("assets/images/{{file}}"))
r.Get("/css/(?P<file>.*)", r.Static("assets/css/{{file}}"))
// JSON endpoints
r.Get("/where/(?P<module>[^/]+)/?", where)
r.Get("/where/(?P<module>[^/]+)/(?P<version>[^/]+)/?", where)
// Put these last so they only match /{repo} if nothing else matches
r.Get("/(?P<repo>[^/]+)/?", browse)
r.Get("/(?P<repo>[^/]+)/(?P<type>[^/]+)/?", browse)
r.Get("/(?P<repo>[^/]+)/modules/02packages\\.details\\.txt(?P<gz>\\.gz)?", pkgindex)
r.Get("/(?P<repo>[^/]+)/authors/id/(?P<file>.*\\.tar\\.gz)", download)
r.Post("/delete/(?P<repo>[^/]+)/authors/id/(?P<auth1>[^/]+)/(?P<auth2>[^/]+)/(?P<auth3>[^/]+)/(?P<file>.*\\.tar\\.gz)", delete_file)
r.Get("/(?P<repo>[^/]+)/(?P<type>[^/]+)/(?P<path>.*)/?", browse)
// Start our application
app.Start()
<-make(chan int)
}
func getindex(session *http.Session) {
idx := session.Stash["index"]
switch idx {
case "CPAN":
go func() {
config.CPANStatus = "Downloading"
res, err := nethttp.Get("https://s3-eu-west-1.amazonaws.com/gopan/cpan_index.gz")
if err != nil {
log.Error("Error downloading index: %s", err.Error())
session.RenderException(500, errors.New("Error downloading CPAN index: "+err.Error()))
config.CPANStatus = "Failed"
return
}
defer res.Body.Close()
b, err := ioutil.ReadAll(res.Body)
if err != nil {
log.Error("Error reading index: %s", err.Error())
session.RenderException(500, errors.New("Error reading CPAN index: "+err.Error()))
config.CPANStatus = "Failed"
return
}
fi, err := os.Create(config.CacheDir + "/" + config.CPANIndex)
if err != nil {
log.Error("Error creating output file: %s", err.Error())
session.RenderException(500, errors.New("Error creating output file: "+err.Error()))
config.CPANStatus = "Failed"
return
}
defer fi.Close()
fi.Write(b)
config.CPANStatus = "Downloaded"
config.HasCPANIndex = true
config.CPANIndexDate = time.Now().String()
config.CPANStatus = "Loading"
load_index(config.CPANIndex, config.CacheDir+"/"+config.CPANIndex)
config.CPANStatus = "Indexing"
update_indexes()
config.CPANStatus = "Loaded"
}()
session.Redirect(&url.URL{Path: "/settings"})
return
case "BackPAN":
go func() {
config.BackPANStatus = "Downloading"
res, err := nethttp.Get("https://s3-eu-west-1.amazonaws.com/gopan/backpan_index.gz")
if err != nil {
log.Error("Error downloading index: %s", err.Error())
session.RenderException(500, errors.New("Error downloading BackPAN index: "+err.Error()))
config.BackPANStatus = "Failed"
return
}
defer res.Body.Close()
b, err := ioutil.ReadAll(res.Body)
if err != nil {
log.Error("Error reading index: %s", err.Error())
session.RenderException(500, errors.New("Error reading BackPAN index: "+err.Error()))
config.BackPANStatus = "Failed"
return
}
fi, err := os.Create(config.CacheDir + "/" + config.BackPANIndex)
if err != nil {
log.Error("Error creating output file: %s", err.Error())
session.RenderException(500, errors.New("Error creating output file: "+err.Error()))
config.BackPANStatus = "Failed"
return
}
defer fi.Close()
fi.Write(b)
config.BackPANStatus = "Downloaded"
config.HasBackPANIndex = true
config.BackPANIndexDate = time.Now().String()
config.BackPANStatus = "Loading"
load_index(config.BackPANIndex, config.CacheDir+"/"+config.BackPANIndex)
config.BackPANStatus = "Indexing"
update_indexes()
config.BackPANStatus = "Loaded"
}()
session.Redirect(&url.URL{Path: "/settings"})
return
}
session.RenderNotFound()
}
func | help | identifier_name | |
main.go | 1)
go func() {
defer wg.Done()
load_index(idx, config.CacheDir+"/"+idx)
}()
}
// Load our primary index (this is the only index written back to)
wg.Add(1)
go func() {
defer wg.Done()
load_index(config.Index, config.CacheDir+"/"+config.Index)
}()
}()
update_indexes = func() {
wg.Wait()
wg.Add(1)
go func() {
wg.Wait()
config.ImportAvailable = true
nsrc, nauth, npkg, nprov := gopan.CountIndex(indexes)
// TODO should probably be in the index - needs to udpate when index changes
summary = &Summary{nsrc, nauth, npkg, nprov}
// Do this now so changing the level doesn't interfere with index load
log.Logger().SetLevel(log.Stol(config.LogLevel))
}()
defer wg.Done()
// Create in-memory indexes for UI/search etc
for fname, _ := range indexes {
for idn, idx := range indexes[fname] {
mapped[idx.Name] = make(map[string]map[string]map[string]*gopan.Author)
for _, auth := range idx.Authors {
// author name
if _, ok := mapped[idx.Name][auth.Name[:1]]; !ok {
mapped[idx.Name][auth.Name[:1]] = make(map[string]map[string]*gopan.Author)
}
if _, ok := mapped[idx.Name][auth.Name[:1]][auth.Name[:2]]; !ok {
mapped[idx.Name][auth.Name[:1]][auth.Name[:2]] = make(map[string]*gopan.Author)
}
mapped[idx.Name][auth.Name[:1]][auth.Name[:2]][auth.Name] = auth
// wildcards
if _, ok := mapped[idx.Name]["*"]; !ok {
mapped[idx.Name]["*"] = make(map[string]map[string]*gopan.Author)
}
if _, ok := mapped[idx.Name]["*"]["**"]; !ok {
mapped[idx.Name]["*"]["**"] = make(map[string]*gopan.Author)
}
mapped[idx.Name]["*"]["**"][auth.Name] = auth
// combos
if _, ok := mapped[idx.Name][auth.Name[:1]]["**"]; !ok {
mapped[idx.Name][auth.Name[:1]]["**"] = make(map[string]*gopan.Author)
}
if _, ok := mapped[idx.Name]["*"][auth.Name[:2]]; !ok {
mapped[idx.Name]["*"][auth.Name[:2]] = make(map[string]*gopan.Author)
}
mapped[idx.Name][auth.Name[:1]]["**"][auth.Name] = auth
mapped[idx.Name]["*"][auth.Name[:2]][auth.Name] = auth
for _, pkg := range auth.Packages {
filemap[pkg.AuthorURL()] = idn
for _, prov := range pkg.Provides {
parts := strings.Split(prov.Name, "::")
log.Trace("PACKAGE: %s", prov.Name)
if _, ok := packages[parts[0]]; !ok {
packages[parts[0]] = &PkgSpace{
Namespace: parts[0],
Packages: make([]*gopan.PerlPackage, 0),
Children: make(map[string]*PkgSpace),
Parent: nil,
Versions: make(map[float64]*gopan.PerlPackage),
}
}
if _, ok := idxpackages[idx.Name]; !ok {
idxpackages[idx.Name] = make(map[string]*PkgSpace)
}
if _, ok := idxpackages[idx.Name][parts[0]]; !ok {
idxpackages[idx.Name][parts[0]] = &PkgSpace{
Namespace: parts[0],
Packages: make([]*gopan.PerlPackage, 0),
Children: make(map[string]*PkgSpace),
Parent: nil,
Versions: make(map[float64]*gopan.PerlPackage),
}
}
if len(parts) == 1 {
packages[parts[0]].Packages = append(packages[parts[0]].Packages, prov)
packages[parts[0]].Versions[gopan.VersionFromString(prov.Version)] = prov
idxpackages[idx.Name][parts[0]].Packages = append(idxpackages[idx.Name][parts[0]].Packages, prov)
idxpackages[idx.Name][parts[0]].Versions[gopan.VersionFromString(prov.Version)] = prov
log.Trace("Version linked: %f for %s", gopan.VersionFromString(prov.Version), prov.Name)
} else {
packages[parts[0]].Populate(parts[1:], prov)
idxpackages[idx.Name][parts[0]].Populate(parts[1:], prov)
}
}
}
}
}
}
}
go update_indexes()
// Get latest SmartPAN version
go func() {
res, err := nethttp.Get("https://api.github.com/repos/companieshouse/gopan/releases")
if err != nil {
log.Error("Error getting latest version: %s", err.Error())
return
}
defer res.Body.Close()
b, err := ioutil.ReadAll(res.Body)
if err != nil {
log.Error("Error reading stream: %s", err.Error())
return
}
var r Releases
if err = json.Unmarshal(b, &r); err != nil {
log.Error("Error unmarshalling JSON: %s", err.Error())
return
}
log.Info("Current release: %s", config.CurrentRelease)
rel := strings.TrimPrefix(r[0].TagName, "v")
log.Info("Latest release: %s", rel)
config.LatestRelease = rel
config.UpdateURL = r[0].URL
if config.CurrentRelease < rel {
config.CanUpdate = true
log.Info("Your version of SmartPAN can be updated.")
}
}()
// Create our Gotcha application
var app = gotcha.Create(Asset)
app.Config.Listen = config.Bind
summary = &Summary{0, 0, 0, 0}
app.On(events.BeforeHandler, func(session *http.Session, next func()) {
session.Stash["summary"] = summary
session.Stash["config"] = config
next()
})
// Get the router
r := app.Router
// Create some routes
r.Get("/", search)
r.Post("/", search)
r.Get("/help", help)
r.Get("/settings", settings)
r.Get("/browse", browse)
r.Get("/import", import1)
r.Post("/import", import1)
r.Get("/import/(?P<jobid>[^/]+)", import2)
r.Get("/import/(?P<jobid>[^/]+)/stream", importstream)
r.Post("/get-index/(?P<index>(CPAN|BackPAN))/?", getindex)
// Serve static content (but really use a CDN)
r.Get("/images/(?P<file>.*)", r.Static("assets/images/{{file}}"))
r.Get("/css/(?P<file>.*)", r.Static("assets/css/{{file}}"))
// JSON endpoints
r.Get("/where/(?P<module>[^/]+)/?", where)
r.Get("/where/(?P<module>[^/]+)/(?P<version>[^/]+)/?", where)
// Put these last so they only match /{repo} if nothing else matches
r.Get("/(?P<repo>[^/]+)/?", browse)
r.Get("/(?P<repo>[^/]+)/(?P<type>[^/]+)/?", browse)
r.Get("/(?P<repo>[^/]+)/modules/02packages\\.details\\.txt(?P<gz>\\.gz)?", pkgindex)
r.Get("/(?P<repo>[^/]+)/authors/id/(?P<file>.*\\.tar\\.gz)", download)
r.Post("/delete/(?P<repo>[^/]+)/authors/id/(?P<auth1>[^/]+)/(?P<auth2>[^/]+)/(?P<auth3>[^/]+)/(?P<file>.*\\.tar\\.gz)", delete_file)
r.Get("/(?P<repo>[^/]+)/(?P<type>[^/]+)/(?P<path>.*)/?", browse)
// Start our application
app.Start()
<-make(chan int)
}
func getindex(session *http.Session) | {
idx := session.Stash["index"]
switch idx {
case "CPAN":
go func() {
config.CPANStatus = "Downloading"
res, err := nethttp.Get("https://s3-eu-west-1.amazonaws.com/gopan/cpan_index.gz")
if err != nil {
log.Error("Error downloading index: %s", err.Error())
session.RenderException(500, errors.New("Error downloading CPAN index: "+err.Error()))
config.CPANStatus = "Failed"
return
}
defer res.Body.Close()
b, err := ioutil.ReadAll(res.Body)
if err != nil {
log.Error("Error reading index: %s", err.Error())
session.RenderException(500, errors.New("Error reading CPAN index: "+err.Error())) | identifier_body | |
main.go | args[2],
"newindex": "",
"cpanmirror": "",
"importurl": "",
"fromdir": "",
}
if strings.HasPrefix(fname, "http://") || strings.HasPrefix(fname, "https://") {
log.Info("URL: %s", fname)
extraParams["importurl"] = fname
request, err := newFormPostRequest(config.RemoteHost+"/import?stream=y", extraParams)
if err != nil {
log.Error("Create request error: %s", err.Error())
return
}
client := &nethttp.Client{}
resp, err := client.Do(request)
if err != nil {
log.Error("Error connecting to host: %s", err.Error())
return
} else {
// TODO stream this
body := &bytes.Buffer{}
_, err := body.ReadFrom(resp.Body)
if err != nil {
log.Error("Error reading response: %s", err.Error())
return
}
resp.Body.Close()
//log.Info("%d", resp.StatusCode)
//log.Info("%s", resp.Header)
log.Info("%s", body.String())
}
} else {
fname = strings.TrimPrefix(fname, "file://")
log.Info("File: %s", fname)
if _, err := os.Stat(fname); err != nil {
log.Error("File not found: %s", err.Error())
return
}
request, err := newfileUploadRequest(config.RemoteHost+"/import?stream=y", extraParams, "fromfile", fname)
if err != nil {
log.Error("Create upload error: %s", err.Error())
return
}
client := &nethttp.Client{}
resp, err := client.Do(request)
if err != nil {
log.Error("Error connecting to host: %s", err.Error())
return
} else {
// TODO stream this
body := &bytes.Buffer{}
_, err := body.ReadFrom(resp.Body)
if err != nil {
log.Error("Error reading response: %s", err.Error())
return
}
resp.Body.Close()
//log.Info("%d", resp.StatusCode)
//log.Info("%s", resp.Header)
log.Info("%s", body.String())
}
}
return
}
config.CurrentRelease = CurrentRelease
var wg sync.WaitGroup
load_index = func(index string, file string) {
indexes[index] = gopan.LoadIndex(file)
}
wg.Add(1)
go func() {
defer wg.Done()
indexes = make(map[string]map[string]*gopan.Source)
// Load CPAN index
if fi, err := os.Stat(config.CacheDir + "/" + config.CPANIndex); err == nil {
config.HasCPANIndex = true
config.CPANIndexDate = fi.ModTime().String()
config.CPANStatus = "Loading"
wg.Add(1)
go func() {
defer wg.Done()
load_index(config.CPANIndex, config.CacheDir+"/"+config.CPANIndex)
config.CPANStatus = "Loaded"
}()
}
// Load BackPAN index
if fi, err := os.Stat(config.CacheDir + "/" + config.BackPANIndex); err == nil {
config.HasBackPANIndex = true
config.BackPANIndexDate = fi.ModTime().String()
config.BackPANStatus = "Loading"
wg.Add(1)
go func() {
defer wg.Done()
load_index(config.BackPANIndex, config.CacheDir+"/"+config.BackPANIndex)
config.BackPANStatus = "Loaded"
}()
}
// Load our secondary indexes
for _, idx := range config.Indexes {
wg.Add(1)
go func() {
defer wg.Done()
load_index(idx, config.CacheDir+"/"+idx)
}()
}
// Load our primary index (this is the only index written back to)
wg.Add(1)
go func() {
defer wg.Done()
load_index(config.Index, config.CacheDir+"/"+config.Index)
}()
}()
update_indexes = func() {
wg.Wait()
wg.Add(1)
go func() {
wg.Wait()
config.ImportAvailable = true
nsrc, nauth, npkg, nprov := gopan.CountIndex(indexes)
// TODO should probably be in the index - needs to udpate when index changes
summary = &Summary{nsrc, nauth, npkg, nprov}
// Do this now so changing the level doesn't interfere with index load
log.Logger().SetLevel(log.Stol(config.LogLevel))
}()
defer wg.Done()
// Create in-memory indexes for UI/search etc
for fname, _ := range indexes {
for idn, idx := range indexes[fname] {
mapped[idx.Name] = make(map[string]map[string]map[string]*gopan.Author)
for _, auth := range idx.Authors {
// author name
if _, ok := mapped[idx.Name][auth.Name[:1]]; !ok {
mapped[idx.Name][auth.Name[:1]] = make(map[string]map[string]*gopan.Author)
}
if _, ok := mapped[idx.Name][auth.Name[:1]][auth.Name[:2]]; !ok {
mapped[idx.Name][auth.Name[:1]][auth.Name[:2]] = make(map[string]*gopan.Author)
}
mapped[idx.Name][auth.Name[:1]][auth.Name[:2]][auth.Name] = auth
// wildcards
if _, ok := mapped[idx.Name]["*"]; !ok {
mapped[idx.Name]["*"] = make(map[string]map[string]*gopan.Author) | mapped[idx.Name]["*"]["**"][auth.Name] = auth
// combos
if _, ok := mapped[idx.Name][auth.Name[:1]]["**"]; !ok {
mapped[idx.Name][auth.Name[:1]]["**"] = make(map[string]*gopan.Author)
}
if _, ok := mapped[idx.Name]["*"][auth.Name[:2]]; !ok {
mapped[idx.Name]["*"][auth.Name[:2]] = make(map[string]*gopan.Author)
}
mapped[idx.Name][auth.Name[:1]]["**"][auth.Name] = auth
mapped[idx.Name]["*"][auth.Name[:2]][auth.Name] = auth
for _, pkg := range auth.Packages {
filemap[pkg.AuthorURL()] = idn
for _, prov := range pkg.Provides {
parts := strings.Split(prov.Name, "::")
log.Trace("PACKAGE: %s", prov.Name)
if _, ok := packages[parts[0]]; !ok {
packages[parts[0]] = &PkgSpace{
Namespace: parts[0],
Packages: make([]*gopan.PerlPackage, 0),
Children: make(map[string]*PkgSpace),
Parent: nil,
Versions: make(map[float64]*gopan.PerlPackage),
}
}
if _, ok := idxpackages[idx.Name]; !ok {
idxpackages[idx.Name] = make(map[string]*PkgSpace)
}
if _, ok := idxpackages[idx.Name][parts[0]]; !ok {
idxpackages[idx.Name][parts[0]] = &PkgSpace{
Namespace: parts[0],
Packages: make([]*gopan.PerlPackage, 0),
Children: make(map[string]*PkgSpace),
Parent: nil,
Versions: make(map[float64]*gopan.PerlPackage),
}
}
if len(parts) == 1 {
packages[parts[0]].Packages = append(packages[parts[0]].Packages, prov)
packages[parts[0]].Versions[gopan.VersionFromString(prov.Version)] = prov
idxpackages[idx.Name][parts[0]].Packages = append(idxpackages[idx.Name][parts[0]].Packages, prov)
idxpackages[idx.Name][parts[0]].Versions[gopan.VersionFromString(prov.Version)] = prov
log.Trace("Version linked: %f for %s", gopan.VersionFromString(prov.Version), prov.Name)
} else {
packages[parts[0]].Populate(parts[1:], prov)
idxpackages[idx.Name][parts[0]].Populate(parts[1:], prov)
}
}
}
}
}
}
}
go update_indexes()
// Get latest SmartPAN version
go func() {
res, err := nethttp.Get("https://api.github.com/repos/companieshouse/gopan/releases")
if err != nil {
log.Error("Error getting latest version: %s", err.Error())
return
}
defer res.Body.Close()
b, err := ioutil.ReadAll(res.Body)
if err != nil {
log.Error("Error reading stream: %s | }
if _, ok := mapped[idx.Name]["*"]["**"]; !ok {
mapped[idx.Name]["*"]["**"] = make(map[string]*gopan.Author)
} | random_line_split |
sourcemap.rs | 898b45172a637.js",
"e2ac0bea41202dc9.js",
"f01d9f3c7b2b2717.js",
"f15772354efa5ecf.js",
"f17ec9517a3339d9.js",
"fa5b398eeef697a6.js",
"fa9eaf58f51d6926.js",
"faa4a026e1e86145.js",
"fada2c7bbfabe14a.js",
"fb8db7a71f3755fc.js",
"fbde237f11796df9.js",
"fd5ea844fcc07d3d.js",
"6c5f0dd83c417a5a.js",
"78eb22badc114b6f.js",
"7afd38d79e6795a8.js",
"80950061e291542b.js",
"8a0fc8ea31727188.module.js",
"af97a3752e579223.js",
"bbffb851469a3f0e.js",
"bc302492d441d561.js",
"be2fd5888f434cbd.js",
"f3260491590325af.js",
];
#[testing::fixture("../swc_ecma_parser/tests/test262-parser/pass/*.js")]
fn identity(entry: PathBuf) {
let file_name = entry
.file_name()
.unwrap()
.to_str()
.expect("to_str() failed")
.to_string();
let input = read_to_string(&entry).unwrap();
let ignore = IGNORED_PASS_TESTS.contains(&&*file_name);
if ignore {
return;
}
let is_module = file_name.contains("module");
let msg = format!(
"\n\n========== Running codegen test {}\nSource:\n{}\n",
file_name, input
);
let mut wr = vec![];
::testing::run_test(false, |cm, handler| {
let fm = cm.load_file(&entry).expect("failed to load file");
eprintln!(
"{}\nPos: {:?} ~ {:?} (L{})",
msg,
fm.start_pos,
fm.end_pos,
fm.count_lines()
);
let (expected_code, expected_map, visualizer_url_for_expected) =
match get_expected(&fm.src, is_module) {
Some(v) => v,
None => return Ok(()),
};
println!("Expected code:\n{}", expected_code);
let expected_tokens = print_source_map(&expected_map);
let comments = SingleThreadedComments::default();
let lexer = Lexer::new(
Syntax::default(),
Default::default(),
(&*fm).into(),
Some(&comments),
);
let mut parser: Parser<Lexer> = Parser::new_from(lexer);
let mut src_map = vec![];
{
let mut wr = Box::new(swc_ecma_codegen::text_writer::JsWriter::new(
cm.clone(),
"\n",
&mut wr,
Some(&mut src_map),
)) as Box<dyn WriteJs>;
wr = Box::new(swc_ecma_codegen::text_writer::omit_trailing_semi(wr));
let mut emitter = Emitter {
cfg: swc_ecma_codegen::Config {
minify: true,
target: EsVersion::Es5,
ascii_only: true,
..Default::default()
},
cm: cm.clone(),
wr,
comments: None,
};
// Parse source
if is_module {
emitter
.emit_module(
&parser
.parse_module()
.map_err(|e| e.into_diagnostic(handler).emit())?,
)
.unwrap();
} else {
emitter
.emit_script(
&parser
.parse_script()
.map_err(|e| e.into_diagnostic(handler).emit())?,
)
.unwrap();
}
}
let actual_code = String::from_utf8(wr).unwrap();
let actual_map = cm.build_source_map_with_config(&src_map, None, SourceMapConfigImpl);
let visualizer_url_for_actual = visualizer_url(&actual_code, &actual_map);
let actual_tokens = print_source_map(&actual_map);
let common_tokens = actual_tokens
.iter()
.filter(|a| expected_tokens.contains(&**a))
.map(|v| v.to_string())
.collect::<FxHashSet<_>>();
let actual_tokens_diff = actual_tokens
.iter()
.filter(|a| !common_tokens.contains(&**a))
.map(|v| v.to_string())
.collect::<Vec<_>>();
let expected_tokens_diff = expected_tokens
.iter()
.filter(|a| !common_tokens.contains(&**a))
.map(|v| v.to_string())
.collect::<Vec<_>>();
eprintln!("---- Actual -----");
for s in actual_tokens_diff {
eprintln!("{}", s);
}
eprintln!("---- Expected -----");
for s in expected_tokens_diff {
eprintln!("{}", s);
}
dbg!(&src_map);
if actual_code != expected_code {
// Generated code is different
// We can't ensure that identical sourcemap will mean identical code
eprintln!("Actual code:\n{}", actual_code);
eprintln!("Expected code:\n{}", expected_code);
return Ok(());
}
eprintln!(
"----- Visualizer -----\nExpected: {}\nActual: {}",
visualizer_url_for_expected, visualizer_url_for_actual
);
assert_eq_same_map(&expected_map, &actual_map);
Ok(())
})
.expect("failed to run test");
}
fn get_expected(code: &str, is_module: bool) -> Option<(String, SourceMap, String)> {
let output = exec_node_js(
include_str!("./srcmap.mjs"),
JsExecOptions {
cache: true,
module: true,
args: vec![
code.to_string(),
if is_module {
"module".into()
} else {
"script".into()
},
],
},
)
.ok()?;
let v = serde_json::from_str::<serde_json::Map<String, serde_json::Value>>(&output).unwrap();
let code = v.get("code").unwrap().as_str().unwrap();
let map = v.get("map").unwrap().as_str().unwrap();
let map = SourceMap::from_slice(map.as_bytes()).expect("invalid sourcemap");
let visualizer_url = visualizer_url(code, &map);
Some((code.to_string(), map, visualizer_url))
}
fn print_source_map(map: &SourceMap) -> Vec<String> {
let mut v = map
.tokens()
.map(|t| {
format!(
"Token: {}:{} => {}:{}",
t.get_src_line(),
t.get_src_col(),
t.get_dst_line(),
t.get_dst_col()
)
})
.collect::<Vec<_>>();
v.sort();
v
}
fn assert_eq_same_map(expected: &SourceMap, actual: &SourceMap) {
for expected_token in expected.tokens() {
let actual_token = actual
.lookup_token(expected_token.get_dst_line(), expected_token.get_dst_col())
.unwrap_or_else(|| panic!("token not found: {:?}", expected_token));
if expected_token.get_src_line() == 0 && expected_token.get_src_col() == 0 {
continue;
}
assert_eq!(
expected_token.get_src_line(),
actual_token.get_src_line(),
"line mismatch at {}:{}",
expected_token.get_dst_line(),
expected_token.get_dst_col()
);
assert_eq!(
expected_token.get_src_col(),
actual_token.get_src_col(),
"col mismatch at {}:{}",
expected_token.get_dst_line(),
expected_token.get_dst_col()
);
}
}
/// Creates a url for https://evanw.github.io/source-map-visualization/
fn visualizer_url(code: &str, map: &SourceMap) -> String {
let map = {
let mut buf = vec![];
map.to_writer(&mut buf).unwrap();
String::from_utf8(buf).unwrap()
};
let code_len = format!("{}\0", code.len());
let map_len = format!("{}\0", map.len());
let hash = base64::encode(format!("{}{}{}{}", code_len, code, map_len, map));
format!("https://evanw.github.io/source-map-visualization/#{}", hash)
}
struct SourceMapConfigImpl;
impl SourceMapGenConfig for SourceMapConfigImpl {
fn file_name_to_source(&self, f: &swc_common::FileName) -> String {
f.to_string()
}
fn inline_sources_content(&self, _: &swc_common::FileName) -> bool | {
true
} | identifier_body | |
sourcemap.rs | 09637.js",
"c85bc4de504befc7.js",
"c8689b6da6fd227a.js",
"cda499c521ff60c7.js",
"d4b898b45172a637.js",
"e2ac0bea41202dc9.js",
"f01d9f3c7b2b2717.js",
"f15772354efa5ecf.js",
"f17ec9517a3339d9.js",
"fa5b398eeef697a6.js",
"fa9eaf58f51d6926.js",
"faa4a026e1e86145.js",
"fada2c7bbfabe14a.js",
"fb8db7a71f3755fc.js",
"fbde237f11796df9.js",
"fd5ea844fcc07d3d.js",
"6c5f0dd83c417a5a.js",
"78eb22badc114b6f.js",
"7afd38d79e6795a8.js",
"80950061e291542b.js",
"8a0fc8ea31727188.module.js",
"af97a3752e579223.js",
"bbffb851469a3f0e.js",
"bc302492d441d561.js",
"be2fd5888f434cbd.js",
"f3260491590325af.js",
];
#[testing::fixture("../swc_ecma_parser/tests/test262-parser/pass/*.js")]
fn identity(entry: PathBuf) {
let file_name = entry
.file_name()
.unwrap()
.to_str()
.expect("to_str() failed")
.to_string();
let input = read_to_string(&entry).unwrap();
let ignore = IGNORED_PASS_TESTS.contains(&&*file_name);
if ignore {
return;
}
let is_module = file_name.contains("module");
let msg = format!(
"\n\n========== Running codegen test {}\nSource:\n{}\n",
file_name, input
);
let mut wr = vec![];
::testing::run_test(false, |cm, handler| {
let fm = cm.load_file(&entry).expect("failed to load file");
eprintln!(
"{}\nPos: {:?} ~ {:?} (L{})",
msg,
fm.start_pos,
fm.end_pos,
fm.count_lines()
);
let (expected_code, expected_map, visualizer_url_for_expected) =
match get_expected(&fm.src, is_module) {
Some(v) => v,
None => return Ok(()),
};
println!("Expected code:\n{}", expected_code);
let expected_tokens = print_source_map(&expected_map);
let comments = SingleThreadedComments::default();
let lexer = Lexer::new(
Syntax::default(),
Default::default(),
(&*fm).into(),
Some(&comments),
);
let mut parser: Parser<Lexer> = Parser::new_from(lexer);
let mut src_map = vec![];
{
let mut wr = Box::new(swc_ecma_codegen::text_writer::JsWriter::new(
cm.clone(),
"\n",
&mut wr,
Some(&mut src_map),
)) as Box<dyn WriteJs>;
wr = Box::new(swc_ecma_codegen::text_writer::omit_trailing_semi(wr));
let mut emitter = Emitter {
cfg: swc_ecma_codegen::Config {
minify: true,
target: EsVersion::Es5,
ascii_only: true,
..Default::default()
},
cm: cm.clone(),
wr,
comments: None,
};
// Parse source
if is_module {
emitter
.emit_module(
&parser
.parse_module()
.map_err(|e| e.into_diagnostic(handler).emit())?,
)
.unwrap();
} else {
emitter
.emit_script(
&parser
.parse_script()
.map_err(|e| e.into_diagnostic(handler).emit())?,
)
.unwrap();
}
}
let actual_code = String::from_utf8(wr).unwrap();
let actual_map = cm.build_source_map_with_config(&src_map, None, SourceMapConfigImpl);
let visualizer_url_for_actual = visualizer_url(&actual_code, &actual_map);
let actual_tokens = print_source_map(&actual_map);
let common_tokens = actual_tokens
.iter()
.filter(|a| expected_tokens.contains(&**a))
.map(|v| v.to_string())
.collect::<FxHashSet<_>>();
let actual_tokens_diff = actual_tokens
.iter()
.filter(|a| !common_tokens.contains(&**a))
.map(|v| v.to_string())
.collect::<Vec<_>>();
let expected_tokens_diff = expected_tokens
.iter()
.filter(|a| !common_tokens.contains(&**a))
.map(|v| v.to_string())
.collect::<Vec<_>>();
eprintln!("---- Actual -----");
for s in actual_tokens_diff {
eprintln!("{}", s);
}
eprintln!("---- Expected -----");
for s in expected_tokens_diff {
eprintln!("{}", s);
}
dbg!(&src_map);
if actual_code != expected_code {
// Generated code is different
// We can't ensure that identical sourcemap will mean identical code
eprintln!("Actual code:\n{}", actual_code);
eprintln!("Expected code:\n{}", expected_code);
return Ok(());
}
eprintln!(
"----- Visualizer -----\nExpected: {}\nActual: {}",
visualizer_url_for_expected, visualizer_url_for_actual
);
assert_eq_same_map(&expected_map, &actual_map);
Ok(())
})
.expect("failed to run test");
}
fn get_expected(code: &str, is_module: bool) -> Option<(String, SourceMap, String)> {
let output = exec_node_js(
include_str!("./srcmap.mjs"),
JsExecOptions {
cache: true,
module: true,
args: vec![
code.to_string(),
if is_module {
"module".into()
} else {
"script".into()
},
],
},
)
.ok()?;
let v = serde_json::from_str::<serde_json::Map<String, serde_json::Value>>(&output).unwrap();
let code = v.get("code").unwrap().as_str().unwrap();
let map = v.get("map").unwrap().as_str().unwrap();
let map = SourceMap::from_slice(map.as_bytes()).expect("invalid sourcemap");
let visualizer_url = visualizer_url(code, &map);
Some((code.to_string(), map, visualizer_url))
}
fn print_source_map(map: &SourceMap) -> Vec<String> {
let mut v = map
.tokens()
.map(|t| {
format!(
"Token: {}:{} => {}:{}",
t.get_src_line(),
t.get_src_col(),
t.get_dst_line(),
t.get_dst_col()
)
})
.collect::<Vec<_>>();
v.sort();
v
}
fn assert_eq_same_map(expected: &SourceMap, actual: &SourceMap) {
for expected_token in expected.tokens() {
let actual_token = actual
.lookup_token(expected_token.get_dst_line(), expected_token.get_dst_col())
.unwrap_or_else(|| panic!("token not found: {:?}", expected_token));
if expected_token.get_src_line() == 0 && expected_token.get_src_col() == 0 {
continue;
}
assert_eq!(
expected_token.get_src_line(),
actual_token.get_src_line(),
"line mismatch at {}:{}",
expected_token.get_dst_line(),
expected_token.get_dst_col()
);
assert_eq!(
expected_token.get_src_col(),
actual_token.get_src_col(),
"col mismatch at {}:{}",
expected_token.get_dst_line(),
expected_token.get_dst_col()
);
}
}
/// Creates a url for https://evanw.github.io/source-map-visualization/
fn visualizer_url(code: &str, map: &SourceMap) -> String {
let map = {
let mut buf = vec![];
map.to_writer(&mut buf).unwrap();
String::from_utf8(buf).unwrap()
};
let code_len = format!("{}\0", code.len());
let map_len = format!("{}\0", map.len());
let hash = base64::encode(format!("{}{}{}{}", code_len, code, map_len, map));
format!("https://evanw.github.io/source-map-visualization/#{}", hash)
}
struct | SourceMapConfigImpl | identifier_name | |
sourcemap.rs | 6fedbf6759.js",
"2dc0ded5a1bff643.js",
"547fa50af16beca7.js",
"547fa50af16beca7.js",
"8c8a7a2941fb6d64.js",
"9e98dbfde77e3dfe.js",
"d9eb39b11bc766f4.js",
"f9888fa1a1e366e7.js",
"78cf02220fb0937c.js",
// TODO(kdy1): Non-ascii char count
"58cb05d17f7ec010.js",
"4d2c7020de650d40.js",
"dafb7abe5b9b44f5.js",
// Our one is better
"1efde9ddd9d6e6ce.module.js",
"d010d377bcfd5565.js",
"ce0aaec02d5d4465.js",
"edd1f39f90576180.js",
"290fdc5a2f826ead.js",
"e71a91c61343cdb1.js",
"409f30dc7efe75d5.js",
"03608b6e222ae700.js",
"e54c1a2fc15cd4b8.js",
"e08e181172bad2b1.js",
"cc793d44a11617e7.js",
"54e70df597a4f9a3.js",
"efef19e06f58fdd9.js",
"e0fc2148b455a6be.js",
"10857a84ed2962f1.js",
"d7c7ff252e84e81d.js",
"0aa6aab640155051.js",
"c80d9415dde647cd.js",
"09e84f25af85b836.js",
"ce8c443eb361e1a2.js",
"affd557fd820e1f2.js",
"ec99a663d6f3983d.js",
"01fd8e8a0a42307b.js",
"e01c7172cf204b92.js",
"12d5bedf1812952a.js",
"df20c9b7a7d534cb.js",
"c767fa4d683aa3ce.js",
"bf8ffad512a5f568.js",
"c8513472857eae9c.js",
"b86b0122e80c330e.js",
"aa7e721756949024.js",
"a830df7cf2e74c9f.js",
"845631d1a33b3409.js",
"066b76285ce79182.js",
"fe2d3b945530c806.js",
"bd28a7d19ac0d50b.js",
"06c7efc128ce74a0.js",
"075c7204d0b0af60.js",
"0827a8316cca777a.js",
"b9a0cb6df76a73d2.js",
"bf210a4f0cf9e352.js",
"6edc155d463535cb.js",
"b8f8dfc41df97add.js",
"b549d045fc8e93bf.js",
"e42f306327c0f578.js",
"9a9cb616daadf90a.js",
"d2ae1c7b6e55143f.js",
"a445a478b4ce0c58.js",
"0d137e8a97ffe083.js",
"b7a6a807ae6db312.js",
"bb8b546cf9db5996.js",
"50ac15a08f7c812f.js",
"a2cb5a14559c6a50.js",
"bbff5671643cc2ea.js",
"c2f12d66ce17d5ab.js",
"13045bfdda0434e0.js",
"10d6486502949e74.js",
"119e9dce4feae643.js",
"1223609b0f7a2129.js",
"177fef3d002eb873.js",
"19ffea7e9e887e08.js",
"1c6c67fcd71f2d08.js",
"1cdce2d337e64b4f.js",
"1f039e0eeb1bc271.js",
"227118dffd2c9935.js",
"250ced8c8e83b389.js",
"a2798917405b080b.js",
"ad6bf12aa7eda975.js",
"24fa28a37061a18f.js",
"252bb992a448270e.js",
"285648c16156804f.js",
"2d10fed2af94fbd1.js",
"3097f73926c93640.js",
"30aee1020fc69090.js",
"312f85fecc352681.js",
"317532451c2ce8ff.js", | "38284ea2d9914d86.js",
"3b57183c81070eec.js",
"3bbd75d597d54fe6.js",
"3c1e2ada0ac2b8e3.js",
"3e1a6f702041b599.js",
"3e3a99768a4a1502.js",
"3e69c5cc1a7ac103.js",
"3eac36e29398cdc5.js",
"3ff52d86c77678bd.js",
"43023cd549deee77.js",
"44af28febe2288cc.js",
"478ede4cfe7906d5.js",
"4869454dd215468e.js",
"48b6f8ce65d3b3ee.js",
"4c71e11fbbc56349.js",
"4d833cbc56caaaf9.js",
"4e7c58761e24d77c.js",
"4e7c58761e24d77c.js",
"5641ad33abcd1752.js",
"587400d1c019785a.js",
"58ed6ffb30191684.js",
"5b8d2b991d2c1f5b.js | "32b635a9667a9fb1.js",
"36224cf8215ad8e4.js",
"37e4a6eca1ece7e5.js", | random_line_split |
cloudevents.go | 重均衡,默认:10s
// 该值必须在broker配置`group.min.session.timeout.ms`与`group.max.session.timeout.ms`之间
Timeout time.Duration `mapstructure:"timeout"`
} `mapstructure:"session"`
Heartbeat struct {
// kafka协调者预期的心跳间隔,用于确保消费者session处于活跃状态,值必须小于session.timeout,默认:3s
// 一般建议设置为session.timeout的3分之一
Interval time.Duration `mapstructure:"interval"`
} `mapstructure:"heartbeat"`
Rebalance struct {
// topic分区分配给消费者的策略,支持:range, roundrobin, sticky,默认:range
// range: 标识使用范围分区分配策略的策略
// roundrobin: 标识使用循环分区分配策略的策略
// sticky: 标识使用粘性分区分配策略的策略
Strategy string `mapstructure:"strategy"`
// 重均衡开始后,消费者加入群组的最大允许时间,默认:60s
Timeout time.Duration `mapstructure:"timeout"`
Retry struct {
// 最大重试次数,默认:4
Max int `mapstructure:"max"`
// 重试失败之间等待间隔,默认:2s
Backoff time.Duration `mapstructure:"backoff"`
} `mapstructure:"retry"`
} `mapstructure:"rebalance"`
} `mapstructure:"group"`
// 读取分区失败的重试
Retry struct {
// 重试失败之间等待间隔,默认:2s
Backoff time.Duration `mapstructure:"backoff"`
} `mapstructure:"retry"`
// 控制每个请求所拉取数据的大小,单位bytes
Fetch struct {
// 必须等待的最小消息大小,不要设置为0,等同于jvm `fetch.min.bytes`,默认:1
Min int32 `mapstructure:"min"`
// 每请求从broker获取的消息大小,默认:1MB
// 尽量大于你消息的大部分大小,否则还要做额外的切割,等同于jvm `fetch.message.max.bytes`
Default int32 `mapstructure:"default"`
// 每请求可最大获取的消息大小,值为0表示不限制,等同于jvm `fetch.message.max.bytes`,默认:0
Max int32 `mapstructure:"max"`
} `mapstructure:"fetch"`
// broker在等待消息达到 Consumer.Fetch.Min 大小的最大时间,不要设置为0,默认:250ms
// 建议在 100-500ms,等同于jvm `fetch.wait.max.ms`
MaxWaitTime time.Duration `mapstructure:"max_wait_time"`
// 消费者为用户处理消息所需的最长时间,如果写入消息通道所需的时间超过此时间,则该分区将停止获取更多消息,直到可以再次继续。
// 由于消息通道已缓冲,因此实际宽限时间为 (MaxProcessingTime * ChannelBufferSize),默认:100ms
MaxProcessingTime time.Duration `mapstructure:"max_processing_time"`
// 消息响应成功或失败是否写入channel里,如果写入则必须被消费,否则可能出现死锁
Return struct {
// 失败的消息是否记录,默认:false
Errors bool `mapstructure:"errors"`
} `mapstructure:"return"`
// 控制如何提交消费offset
Offsets struct {
AutoCommit struct {
// 是否自动更新,默认:true
Enable bool `mapstructure:"enable"`
// 自动更新频率,默认:1s
Interval time.Duration `mapstructure:"interval"`
} `mapstructure:"auto_commit"`
// OffsetNewest=-1 代表访问 commit 位置的下一条消息
// OffsetOldest=-2 消费者可以访问到的 topic 里的最早的消息
Initial int64 `mapstructure:"initial"`
Retention time.Duration `mapstructure:"retention"`
// 提交offset失败的重试
Retry struct {
// 最大重试次数,默认:3
Max int `mapstructure:"max"`
} `mapstructure:"retry"`
} `mapstructure:"offsets"`
// 消费隔离级别,ReadUncommitted 或 ReadCommitted,默认:ReadUncommitted
// ReadUncommitted: 可以读取到未提交的数据(报错终止前的数据)
// ReadCommitted: 生产者已提交的数据才能读取到
IsolationLevel int8 `mapstructure:"isolation_level"`
} `mapstructure:"consumer"`
// 标识该消费者
ClientID string `mapstructure:"client_id"`
// 机柜标识,见 'broker.rack'
RackID string `mapstructure:"rack_id"`
// 默认:256
ChannelBufferSize int `mapstructure:"chnnel_buffer_size"`
Version string `mapstructure:"version"`
}
// Parse 解析为 https://pkg.go.dev/github.com/Shopify/sarama#Config
func (s *SaramaConfig) Parse() *sarama.Config {
c := sarama.NewConfig()
// net
if s.Net.MaxOpenRequests != 0 {
c.Net.MaxOpenRequests = s.Net.MaxOpenRequests
}
if s.Net.DialTimeout.Seconds() != 0 {
c.Net.DialTimeout = s.Net.DialTimeout
}
if s.Net.ReadTimeout.Seconds() != 0 {
c.Net.ReadTimeout = s.Net.ReadTimeout
}
if s.Net.WriteTimeout.Seconds() != 0 {
c.Net.WriteTimeout = s.Net.WriteTimeout
}
// TODO; tls目前配置不支持解析,需客户端自己解决
if s.Net.TLS.Enable {
c.Net.TLS.Enable = true
}
if s.Net.SASL.Enable {
c.Net.SASL.Enable = true
c.Net.SASL.Mechanism = sarama.SASLMechanism(s.Net.SASL.Mechanism)
if c.Net.SASL.Mechanism == sarama.SASLTypeSCRAMSHA256 {
c.Net.SASL.SCRAMClientGeneratorFunc = func() sarama.SCRAMClient { return &XDGSCRAMClient{HashGeneratorFcn: SHA256} }
}
if c.Net.SASL.Mechanism == sarama.SASLTypeSCRAMSHA512 {
c.Net.SASL.SCRAMClientGeneratorFunc = func() sarama.SCRAMClient { return &XDGSCRAMClient{HashGeneratorFcn: SHA512} }
}
c.Net.SASL.User = s.Net.SASL.User
c.Net.SASL.Password = s.Net.SASL.Password
}
if s.Net.KeepAlive.Seconds() != 0 {
c.Net.KeepAlive = s.Net.KeepAlive
}
// metadata
if s.Metadata.Retry.Max != 0 {
c.Metadata.Retry.Max = s.Metadata.Retry.Max
}
if s.Metadata.Retry.Backoff.Seconds() != 0 {
c.Metadata.Retry.Backoff = s.Metadata.Retry.Backoff
}
if s.Metadata.RefreshFrequency.Seconds() != 0 {
c.Metadata.RefreshFrequency = s.Metadata.RefreshFrequency
}
if s.Metadata.Full {
c.Metadata.Full = true
}
if s.Metadata.Timeout.Seconds() != 0 {
c.Metadata.Timeout = s.Metadata.Timeout
}
if s.Metadata.AllowAutoTopicCreation {
c.Metadata.AllowAutoTopicCreation = true
}
// producer
if s.Producer.MaxMessageBytes != 0 {
c.Producer.MaxMessageBytes = s.Producer.MaxMessageBytes
}
if s.Producer.RequiredAcks != 0 {
c.Producer.RequiredAcks = sarama.RequiredAcks(s.Producer.RequiredAcks)
}
if s.Producer.Timeout.Seconds() != 0 {
c.Producer.Timeout = s.Producer.Timeout
}
if s.Producer.Compression != 0 {
c.Producer.Compression = sarama.CompressionCodec(s.Producer.Compression)
}
if s.Producer.CompressionLevel != 0 {
c.Producer.CompressionLevel = s.Producer.CompressionLevel
}
if s.Producer.Idempotent {
c.Producer.Idempotent = true
}
if s.Producer.Return.Successes {
c.Producer.Return.Successes = true
}
if s.Producer.Return.Errors {
c.Producer.Return.Errors = true
}
if s.Producer.Flush.Bytes != 0 {
c.Producer.Flush.Bytes = s.Producer.Flush.Bytes
}
if s.Producer.Flush.Messages != 0 {
c.Producer.Flush.Messages = s.Producer.Flush.Messages
}
if s.Producer.Flush.Frequency.Seconds() != 0 {
c.Producer.Flush.Frequency = s.Producer.Flush.Frequency
}
if s.Producer.Flush.MaxMessages != 0 {
c.Producer.Flush.MaxMessages = s.Producer.Flush.MaxMessages
}
| if s.Producer.Retry.Max != 0 {
c.Producer.R | conditional_block | |
cloudevents.go | :"isolation_level"`
} `mapstructure:"consumer"`
// 标识该消费者
ClientID string `mapstructure:"client_id"`
// 机柜标识,见 'broker.rack'
RackID string `mapstructure:"rack_id"`
// 默认:256
ChannelBufferSize int `mapstructure:"chnnel_buffer_size"`
Version string `mapstructure:"version"`
}
// Parse 解析为 https://pkg.go.dev/github.com/Shopify/sarama#Config
func (s *SaramaConfig) Parse() *sarama.Config {
c := sarama.NewConfig()
// net
if s.Net.MaxOpenRequests != 0 {
c.Net.MaxOpenRequests = s.Net.MaxOpenRequests
}
if s.Net.DialTimeout.Seconds() != 0 {
c.Net.DialTimeout = s.Net.DialTimeout
}
if s.Net.ReadTimeout.Seconds() != 0 {
c.Net.ReadTimeout = s.Net.ReadTimeout
}
if s.Net.WriteTimeout.Seconds() != 0 {
c.Net.WriteTimeout = s.Net.WriteTimeout
}
// TODO; tls目前配置不支持解析,需客户端自己解决
if s.Net.TLS.Enable {
c.Net.TLS.Enable = true
}
if s.Net.SASL.Enable {
c.Net.SASL.Enable = true
c.Net.SASL.Mechanism = sarama.SASLMechanism(s.Net.SASL.Mechanism)
if c.Net.SASL.Mechanism == sarama.SASLTypeSCRAMSHA256 {
c.Net.SASL.SCRAMClientGeneratorFunc = func() sarama.SCRAMClient { return &XDGSCRAMClient{HashGeneratorFcn: SHA256} }
}
if c.Net.SASL.Mechanism == sarama.SASLTypeSCRAMSHA512 {
c.Net.SASL.SCRAMClientGeneratorFunc = func() sarama.SCRAMClient { return &XDGSCRAMClient{HashGeneratorFcn: SHA512} }
}
c.Net.SASL.User = s.Net.SASL.User
c.Net.SASL.Password = s.Net.SASL.Password
}
if s.Net.KeepAlive.Seconds() != 0 {
c.Net.KeepAlive = s.Net.KeepAlive
}
// metadata
if s.Metadata.Retry.Max != 0 {
c.Metadata.Retry.Max = s.Metadata.Retry.Max
}
if s.Metadata.Retry.Backoff.Seconds() != 0 {
c.Metadata.Retry.Backoff = s.Metadata.Retry.Backoff
}
if s.Metadata.RefreshFrequency.Seconds() != 0 {
c.Metadata.RefreshFrequency = s.Metadata.RefreshFrequency
}
if s.Metadata.Full {
c.Metadata.Full = true
}
if s.Metadata.Timeout.Seconds() != 0 {
c.Metadata.Timeout = s.Metadata.Timeout
}
if s.Metadata.AllowAutoTopicCreation {
c.Metadata.AllowAutoTopicCreation = true
}
// producer
if s.Producer.MaxMessageBytes != 0 {
c.Producer.MaxMessageBytes = s.Producer.MaxMessageBytes
}
if s.Producer.RequiredAcks != 0 {
c.Producer.RequiredAcks = sarama.RequiredAcks(s.Producer.RequiredAcks)
}
if s.Producer.Timeout.Seconds() != 0 {
c.Producer.Timeout = s.Producer.Timeout
}
if s.Producer.Compression != 0 {
c.Producer.Compression = sarama.CompressionCodec(s.Producer.Compression)
}
if s.Producer.CompressionLevel != 0 {
c.Producer.CompressionLevel = s.Producer.CompressionLevel
}
if s.Producer.Idempotent {
c.Producer.Idempotent = true
}
if s.Producer.Return.Successes {
c.Producer.Return.Successes = true
}
if s.Producer.Return.Errors {
c.Producer.Return.Errors = true
}
if s.Producer.Flush.Bytes != 0 {
c.Producer.Flush.Bytes = s.Producer.Flush.Bytes
}
if s.Producer.Flush.Messages != 0 {
c.Producer.Flush.Messages = s.Producer.Flush.Messages
}
if s.Producer.Flush.Frequency.Seconds() != 0 {
c.Producer.Flush.Frequency = s.Producer.Flush.Frequency
}
if s.Producer.Flush.MaxMessages != 0 {
c.Producer.Flush.MaxMessages = s.Producer.Flush.MaxMessages
}
if s.Producer.Retry.Max != 0 {
c.Producer.Retry.Max = s.Producer.Retry.Max
}
if s.Producer.Retry.Backoff.Seconds() != 0 {
c.Producer.Retry.Backoff = s.Producer.Retry.Backoff
}
// consumer
if s.Consumer.Group.Session.Timeout.Seconds() != 0 {
c.Consumer.Group.Session.Timeout = s.Consumer.Group.Session.Timeout
}
if s.Consumer.Group.Heartbeat.Interval.Seconds() != 0 {
c.Consumer.Group.Heartbeat.Interval = s.Consumer.Group.Heartbeat.Interval
}
if s.Consumer.Group.Rebalance.Strategy != "" {
switch s.Consumer.Group.Rebalance.Strategy {
case sarama.RangeBalanceStrategyName:
c.Consumer.Group.Rebalance.Strategy = sarama.BalanceStrategyRange
case sarama.RoundRobinBalanceStrategyName:
c.Consumer.Group.Rebalance.Strategy = sarama.BalanceStrategyRoundRobin
case sarama.StickyBalanceStrategyName:
c.Consumer.Group.Rebalance.Strategy = sarama.BalanceStrategySticky
}
}
if s.Consumer.Group.Rebalance.Timeout.Seconds() != 0 {
c.Consumer.Group.Rebalance.Timeout = s.Consumer.Group.Rebalance.Timeout
}
if s.Consumer.Group.Rebalance.Retry.Max != 0 {
c.Consumer.Group.Rebalance.Retry.Max = s.Consumer.Group.Rebalance.Retry.Max
}
if s.Consumer.Group.Rebalance.Retry.Backoff.Seconds() != 0 {
c.Consumer.Group.Rebalance.Retry.Backoff = s.Consumer.Group.Rebalance.Retry.Backoff
}
if s.Consumer.Retry.Backoff.Seconds() != 0 {
c.Consumer.Retry.Backoff = s.Consumer.Retry.Backoff
}
if s.Consumer.Fetch.Min != 0 {
c.Consumer.Fetch.Min = s.Consumer.Fetch.Min
}
if s.Consumer.Fetch.Max != 0 {
c.Consumer.Fetch.Max = s.Consumer.Fetch.Max
}
if s.Consumer.Fetch.Default != 0 {
c.Consumer.Fetch.Default = s.Consumer.Fetch.Default
}
if s.Consumer.MaxWaitTime.Seconds() != 0 {
c.Consumer.MaxWaitTime = s.Consumer.MaxWaitTime
}
if s.Consumer.MaxProcessingTime.Seconds() != 0 {
c.Consumer.MaxProcessingTime = s.Consumer.MaxProcessingTime
}
if s.Consumer.Return.Errors {
c.Consumer.Return.Errors = true
}
if s.Consumer.Offsets.AutoCommit.Enable {
c.Consumer.Offsets.AutoCommit.Enable = true
c.Consumer.Offsets.AutoCommit.Interval = s.Consumer.Offsets.AutoCommit.Interval
}
if s.Consumer.Offsets.Initial != 0 {
c.Consumer.Offsets.Initial = s.Consumer.Offsets.Initial
}
if s.Consumer.Offsets.Retention.Seconds() != 0 {
c.Consumer.Offsets.Retention = s.Consumer.Offsets.Retention
}
if s.Consumer.Offsets.Retry.Max != 0 {
c.Consumer.Offsets.Retry.Max = s.Consumer.Offsets.Retry.Max
}
if s.Consumer.IsolationLevel != 0 {
c.Consumer.IsolationLevel = sarama.IsolationLevel(s.Consumer.IsolationLevel)
}
if s.ClientID != "" {
c.ClientID = s.ClientID
}
if s.RackID != "" {
c.RackID = s.RackID
}
if s.ChannelBufferSize != 0 {
c.ChannelBufferSize = s.ChannelBufferSize
}
if s.Version != "" {
ver, err := sarama.ParseKafkaVersion(s.Version)
if err != nil {
// 解析版本错误则指定版本1.0.0
c.Version = sarama.V1_0_0_0
} else {
c.Version = ver
}
}
return c
}
// InitCloudEvents 初始化 cloudevents 数据实例
func (c *LocalConfig) InitCloudEvents() error {
if c.CloudEvents == nil || c.CloudEvents.Protocol == "" {
return nil
}
switch c.CloudEvents.Protocol {
case CloudEventsProtocolKafkaSarama:
default:
return fmt.Errorf("not support cloudevents protocol %v", c.CloudEvents.Protocol)
}
saramaConfig := c.CloudEvents.KafkaSarama.Config.Parse()
sender, err := kafkaSarama.NewSender(c.CloudEvents.KafkaSarama.Brokers,
saramaConfig,
c.CloudEvents.KafkaSarama.Topic)
if err != nil {
return err
}
client, err := cloudevents.NewClient(sender, cloudevents.WithTimeNow(), cloudevents.WithUUIDs())
if err != nil {
return err
}
c.eventClient = client
return nil
}
// GetCloudEvents 用于获取 cloudevents 连接客户端
func (c *LocalConfig) GetCloudEvents() (eventclient.Client, error) {
if c.eventClient == nil {
return nil, fmt.Errorf("cloudevents client is nil")
}
return c.eventClient, nil
}
| identifier_body | ||
cloudevents.go | struct {
// 最大重试次数,默认:3
Max int `mapstructure:"max"`
} `mapstructure:"retry"`
} `mapstructure:"offsets"`
// 消费隔离级别,ReadUncommitted 或 ReadCommitted,默认:ReadUncommitted
// ReadUncommitted: 可以读取到未提交的数据(报错终止前的数据)
// ReadCommitted: 生产者已提交的数据才能读取到
IsolationLevel int8 `mapstructure:"isolation_level"`
} `mapstructure:"consumer"`
// 标识该消费者
ClientID string `mapstructure:"client_id"`
// 机柜标识,见 'broker.rack'
RackID string `mapstructure:"rack_id"`
// 默认:256
ChannelBufferSize int `mapstructure:"chnnel_buffer_size"`
Version string `mapstructure:"version"`
}
// Parse 解析为 https://pkg.go.dev/github.com/Shopify/sarama#Config
func (s *SaramaConfig) Parse() *sarama.Config {
c := sarama.NewConfig()
// net
if s.Net.MaxOpenRequests != 0 {
c.Net.MaxOpenRequests = s.Net.MaxOpenRequests
}
if s.Net.DialTimeout.Seconds() != 0 {
c.Net.DialTimeout = s.Net.DialTimeout
}
if s.Net.ReadTimeout.Seconds() != 0 {
c.Net.ReadTimeout = s.Net.ReadTimeout
}
if s.Net.WriteTimeout.Seconds() != 0 {
c.Net.WriteTimeout = s.Net.WriteTimeout
}
// TODO; tls目前配置不支持解析,需客户端自己解决
if s.Net.TLS.Enable {
c.Net.TLS.Enable = true
}
if s.Net.SASL.Enable {
c.Net.SASL.Enable = true
c.Net.SASL.Mechanism = sarama.SASLMechanism(s.Net.SASL.Mechanism)
if c.Net.SASL.Mechanism == sarama.SASLTypeSCRAMSHA256 {
c.Net.SASL.SCRAMClientGeneratorFunc = func() sarama.SCRAMClient { return &XDGSCRAMClient{HashGeneratorFcn: SHA256} }
}
if c.Net.SASL.Mechanism == sarama.SASLTypeSCRAMSHA512 {
c.Net.SASL.SCRAMClientGeneratorFunc = func() sarama.SCRAMClient { return &XDGSCRAMClient{HashGeneratorFcn: SHA512} }
}
c.Net.SASL.User = s.Net.SASL.User
c.Net.SASL.Password = s.Net.SASL.Password
}
if s.Net.KeepAlive.Seconds() != 0 {
c.Net.KeepAlive = s.Net.KeepAlive
}
// metadata
if s.Metadata.Retry.Max != 0 {
c.Metadata.Retry.Max = s.Metadata.Retry.Max
}
if s.Metadata.Retry.Backoff.Seconds() != 0 {
c.Metadata.Retry.Backoff = s.Metadata.Retry.Backoff
}
if s.Metadata.RefreshFrequency.Seconds() != 0 {
c.Metadata.RefreshFrequency = s.Metadata.RefreshFrequency
}
if s.Metadata.Full {
c.Metadata.Full = true
}
if s.Metadata.Timeout.Seconds() != 0 {
c.Metadata.Timeout = s.Metadata.Timeout
}
if s.Metadata.AllowAutoTopicCreation {
c.Metadata.AllowAutoTopicCreation = true
}
// producer
if s.Producer.MaxMessageBytes != 0 {
c.Producer.MaxMessageBytes = s.Producer.MaxMessageBytes
}
if s.Producer.RequiredAcks != 0 {
c.Producer.RequiredAcks = sarama.RequiredAcks(s.Producer.RequiredAcks)
}
if s.Producer.Timeout.Seconds() != 0 {
c.Producer.Timeout = s.Producer.Timeout
}
if s.Producer.Compression != 0 {
c.Producer.Compression = sarama.CompressionCodec(s.Producer.Compression)
}
if s.Producer.CompressionLevel != 0 {
c.Producer.CompressionLevel = s.Producer.CompressionLevel
}
if s.Producer.Idempotent {
c.Producer.Idempotent = true
}
if s.Producer.Return.Successes {
c.Producer.Return.Successes = true
}
if s.Producer.Return.Errors {
c.Producer.Return.Errors = true
}
if s.Producer.Flush.Bytes != 0 {
c.Producer.Flush.Bytes = s.Producer.Flush.Bytes
}
if s.Producer.Flush.Messages != 0 {
c.Producer.Flush.Messages = s.Producer.Flush.Messages
}
if s.Producer.Flush.Frequency.Seconds() != 0 {
c.Producer.Flush.Frequency = s.Producer.Flush.Frequency
}
if s.Producer.Flush.MaxMessages != 0 {
c.Producer.Flush.MaxMessages = s.Producer.Flush.MaxMessages
}
if s.Producer.Retry.Max != 0 {
c.Producer.Retry.Max = s.Producer.Retry.Max
}
if s.Producer.Retry.Backoff.Seconds() != 0 {
c.Producer.Retry.Backoff = s.Producer.Retry.Backoff
}
// consumer
if s.Consumer.Group.Session.Timeout.Seconds() != 0 {
c.Consumer.Group.Session.Timeout = s.Consumer.Group.Session.Timeout
}
if s.Consumer.Group.Heartbeat.Interval.Seconds() != 0 {
c.Consumer.Group.Heartbeat.Interval = s.Consumer.Group.Heartbeat.Interval
}
if s.Consumer.Group.Rebalance.Strategy != "" {
switch s.Consumer.Group.Rebalance.Strategy {
case sarama.RangeBalanceStrategyName:
c.Consumer.Group.Rebalance.Strategy = sarama.BalanceStrategyRange
case sarama.RoundRobinBalanceStrategyName:
c.Consumer.Group.Rebalance.Strategy = sarama.BalanceStrategyRoundRobin
case sarama.StickyBalanceStrategyName:
c.Consumer.Group.Rebalance.Strategy = sarama.BalanceStrategySticky
}
}
if s.Consumer.Group.Rebalance.Timeout.Seconds() != 0 {
c.Consumer.Group.Rebalance.Timeout = s.Consumer.Group.Rebalance.Timeout
}
if s.Consumer.Group.Rebalance.Retry.Max != 0 {
c.Consumer.Group.Rebalance.Retry.Max = s.Consumer.Group.Rebalance.Retry.Max
}
if s.Consumer.Group.Rebalance.Retry.Backoff.Seconds() != 0 {
c.Consumer.Group.Rebalance.Retry.Backoff = s.Consumer.Group.Rebalance.Retry.Backoff
}
if s.Consumer.Retry.Backoff.Seconds() != 0 {
c.Consumer.Retry.Backoff = s.Consumer.Retry.Backoff
}
if s.Consumer.Fetch.Min != 0 {
c.Consumer.Fetch.Min = s.Consumer.Fetch.Min
}
if s.Consumer.Fetch.Max != 0 {
c.Consumer.Fetch.Max = s.Consumer.Fetch.Max
}
if s.Consumer.Fetch.Default != 0 {
c.Consumer.Fetch.Default = s.Consumer.Fetch.Default
}
if s.Consumer.MaxWaitTime.Seconds() != 0 {
c.Consumer.MaxWaitTime = s.Consumer.MaxWaitTime
}
if s.Consumer.MaxProcessingTime.Seconds() != 0 {
c.Consumer.MaxProcessingTime = s.Consumer.MaxProcessingTime
}
if s.Consumer.Return.Errors {
c.Consumer.Return.Errors = true
}
if s.Consumer.Offsets.AutoCommit.Enable {
c.Consumer.Offsets.AutoCommit.Enable = true
c.Consumer.Offsets.AutoCommit.Interval = s.Consumer.Offsets.AutoCommit.Interval
}
if s.Consumer.Offsets.Initial != 0 {
c.Consumer.Offsets.Initial = s.Consumer.Offsets.Initial
}
if s.Consumer.Offsets.Retention.Seconds() != 0 {
c.Consumer.Offsets.Retention = s.Consumer.Offsets.Retention
}
if s.Consumer.Offsets.Retry.Max != 0 {
c.Consumer.Offsets.Retry.Max = s.Consumer.Offsets.Retry.Max
}
if s.Consumer.IsolationLevel != 0 {
c.Consumer.IsolationLevel = sarama.IsolationLevel(s.Consumer.IsolationLevel)
}
if s.ClientID != "" {
c.ClientID = s.ClientID
}
if s.RackID != "" {
c.RackID = s.RackID
}
if s.ChannelBufferSize != 0 {
c.ChannelBufferSize = s.ChannelBufferSize
}
if s.Version != "" {
ver, err := sarama.ParseKafkaVersion(s.Version)
if err != nil {
// 解析版本错误则指定版本1.0.0
c.Version = sarama.V1_0_0_0
} else {
c.Version = ver
}
}
return c
}
// InitCloudEvents 初始化 cloudevents 数据实例
func (c *LocalConfig) InitCloudEvents() error {
if c.CloudEvents == nil || c.CloudEvents.Protocol == "" {
return nil
}
switch c.CloudEvents.Protocol {
case CloudEventsProtocolKafkaSarama:
default:
return fmt.Errorf("not support cloudevents protocol %v", c.CloudEvents.Protocol)
}
saramaConfig := c.CloudEvents.KafkaSarama.Config.Parse()
sender, err := kafkaSarama.NewSender(c.CloudEvents.KafkaSarama.Brokers, | saramaConfig,
c.CloudEvents.KafkaSarama.Topic)
if err != nil {
return err
} | random_line_split | |
cloudevents.go | // 重试失败之间等待间隔,等同于jvm的:retry.backoff.ms,默认值:100ms
Backoff time.Duration `mapstructure:"backoff"`
} `mapstructure:"retry"`
} `mapstructure:"producer"`
// 消费者相关配置
Consumer struct {
Group struct {
Session struct {
// 当broker端未收到消费者的心跳包,超过该时间间隔,则broker认为该消费者离线,将进行重均衡,默认:10s
// 该值必须在broker配置`group.min.session.timeout.ms`与`group.max.session.timeout.ms`之间
Timeout time.Duration `mapstructure:"timeout"`
} `mapstructure:"session"`
Heartbeat struct {
// kafka协调者预期的心跳间隔,用于确保消费者session处于活跃状态,值必须小于session.timeout,默认:3s
// 一般建议设置为session.timeout的3分之一
Interval time.Duration `mapstructure:"interval"`
} `mapstructure:"heartbeat"`
Rebalance struct {
// topic分区分配给消费者的策略,支持:range, roundrobin, sticky,默认:range
// range: 标识使用范围分区分配策略的策略
// roundrobin: 标识使用循环分区分配策略的策略
// sticky: 标识使用粘性分区分配策略的策略
Strategy string `mapstructure:"strategy"`
// 重均衡开始后,消费者加入群组的最大允许时间,默认:60s
Timeout time.Duration `mapstructure:"timeout"`
Retry struct {
// 最大重试次数,默认:4
Max int `mapstructure:"max"`
// 重试失败之间等待间隔,默认:2s
Backoff time.Duration `mapstructure:"backoff"`
} `mapstructure:"retry"`
} `mapstructure:"rebalance"`
} `mapstructure:"group"`
// 读取分区失败的重试
Retry struct {
// 重试失败之间等待间隔,默认:2s
Backoff time.Duration `mapstructure:"backoff"`
} `mapstructure:"retry"`
// 控制每个请求所拉取数据的大小,单位bytes
Fetch struct {
// 必须等待的最小消息大小,不要设置为0,等同于jvm `fetch.min.bytes`,默认:1
Min int32 `mapstructure:"min"`
// 每请求从broker获取的消息大小,默认:1MB
// 尽量大于你消息的大部分大小,否则还要做额外的切割,等同于jvm `fetch.message.max.bytes`
Default int32 `mapstructure:"default"`
// 每请求可最大获取的消息大小,值为0表示不限制,等同于jvm `fetch.message.max.bytes`,默认:0
Max int32 `mapstructure:"max"`
} `mapstructure:"fetch"`
// broker在等待消息达到 Consumer.Fetch.Min 大小的最大时间,不要设置为0,默认:250ms
// 建议在 100-500ms,等同于jvm `fetch.wait.max.ms`
MaxWaitTime time.Duration `mapstructure:"max_wait_time"`
// 消费者为用户处理消息所需的最长时间,如果写入消息通道所需的时间超过此时间,则该分区将停止获取更多消息,直到可以再次继续。
// 由于消息通道已缓冲,因此实际宽限时间为 (MaxProcessingTime * ChannelBufferSize),默认:100ms
MaxProcessingTime time.Duration `mapstructure:"max_processing_time"`
// 消息响应成功或失败是否写入channel里,如果写入则必须被消费,否则可能出现死锁
Return struct {
// 失败的消息是否记录,默认:false
Errors bool `mapstructure:"errors"`
} `mapstructure:"return"`
// 控制如何提交消费offset
Offsets struct {
AutoCommit struct {
// 是否自动更新,默认:true
Enable bool `mapstructure:"enable"`
// 自动更新频率,默认:1s
Interval time.Duration `mapstructure:"interval"`
} `mapstructure:"auto_commit"`
// OffsetNewest=-1 代表访问 commit 位置的下一条消息
// OffsetOldest=-2 消费者可以访问到的 topic 里的最早的消息
Initial int64 `mapstructure:"initial"`
Retention time.Duration `mapstructure:"retention"`
// 提交offset失败的重试
Retry struct {
// 最大重试次数,默认:3
Max int `mapstructure:"max"`
} `mapstructure:"retry"`
} `mapstructure:"offsets"`
// 消费隔离级别,ReadUncommitted 或 ReadCommitted,默认:ReadUncommitted
// ReadUncommitted: 可以读取到未提交的数据(报错终止前的数据)
// ReadCommitted: 生产者已提交的数据才能读取到
IsolationLevel int8 `mapstructure:"isolation_level"`
} `mapstructure:"consumer"`
// 标识该消费者
ClientID string `mapstructure:"client_id"`
// 机柜标识,见 'broker.rack'
RackID string `mapstructure:"rack_id"`
// 默认:256
ChannelBufferSize int `mapstructure:"chnnel_buffer_size"`
Version string `mapstructure:"version"`
}
// Parse 解析为 https://pkg.go.dev/github.com/Shopify/sarama#Config
func (s *SaramaConfig) Parse() *sarama.Config {
c := sarama.NewConfig()
// net
if s.Net.MaxOpenRequests != 0 {
c.Net.MaxOpenRequests = s.Net.MaxOpenRequests
}
if s.Net.DialTimeout.Seconds() != 0 {
c.Net.DialTimeout = s.Net.DialTimeout
}
if s.Net.ReadTimeout.Seconds() != 0 {
c.Net.ReadTimeout = s.Net.ReadTimeout
}
if s.Net.WriteTimeout.Seconds() != 0 {
c.Net.WriteTimeout = s.Net.WriteTimeout
}
// TODO; tls目前配置不支持解析,需客户端自己解决
if s.Net.TLS.Enable {
c.Net.TLS.Enable = true
}
if s.Net.SASL.Enable {
c.Net.SASL.Enable = true
c.Net.SASL.Mechanism = sarama.SASLMechanism(s.Net.SASL.Mechanism)
if c.Net.SASL.Mechanism == sarama.SASLTypeSCRAMSHA256 {
c.Net.SASL.SCRAMClientGeneratorFunc = func() sarama.SCRAMClient { return &XDGSCRAMClient{HashGeneratorFcn: SHA256} }
}
if c.Net.SASL.Mechanism == sarama.SASLTypeSCRAMSHA512 {
c.Net.SASL.SCRAMClientGeneratorFunc = func() sarama.SCRAMClient { return &XDGSCRAMClient{HashGeneratorFcn: SHA512} }
}
c.Net.SASL.User = s.Net.SASL.User
c.Net.SASL.Password = s.Net.SASL.Password
}
if s.Net.KeepAlive.Seconds() != 0 {
c.Net.KeepAlive = s.Net.KeepAlive
}
// metadata
if s.Metadata.Retry.Max != 0 {
c.Metadata.Retry.Max = s.Metadata.Retry.Max
}
if s.Metadata.Retry.Backoff.Seconds() != 0 {
c.Metadata.Retry.Backoff = s.Metadata.Retry.Backoff
}
if s.Metadata.RefreshFrequency.Seconds() != 0 {
c.Metadata.RefreshFrequency = s.Metadata.RefreshFrequency
}
if s.Metadata.Full {
c.Metadata.Full = true
}
if s.Metadata.Timeout.Seconds() != 0 {
c.Metadata.Timeout = s.Metadata.Timeout
}
if s.Metadata.AllowAutoTopicCreation {
c.Metadata.AllowAutoTopicCreation = true
}
// producer
if s.Producer.MaxMessageBytes != 0 {
c.Producer.MaxMessageBytes = s.Producer.MaxMessageBytes
}
if s.Producer.RequiredAcks != 0 {
c.Producer.RequiredAcks = sarama.RequiredAcks(s.Producer.RequiredAcks)
}
if s.Producer.Timeout.Seconds() != 0 {
c.Producer.Timeout = s.Producer.Timeout
}
if s.Producer.Compression != 0 {
c.Producer.Compression = sarama.CompressionCodec(s.Producer.Compression)
}
if s.Producer.CompressionLevel != 0 {
c.Producer.CompressionLevel = s.Producer.CompressionLevel
}
if s.Producer.Idempotent {
c.Producer.Idempotent = true
}
if s.Producer.Return.Successes {
c.Producer.Return.Successes = true
}
if s.Producer.Return.Errors {
c.Producer.Return.Errors = true
}
if s.Producer.Flush.Bytes != 0 {
c.Producer.Flush | .Byte | identifier_name | |
smf.go | s.createBarsUntil(pos, change.AbsPos, num, denom)
}
num, denom = change.Num, change.Denom
b := s.AddBar(change.AbsPos, num, denom)
pos = b.EndPos()
}
s.createBarsUntil(pos, s.lastPos, num, denom)
s.RenumberBars()
}
func (s *Song) findBar(pos uint64) (bar *Bar) {
for _, b := range s.Bars {
if pos >= b.AbsPos {
bar = b
}
}
return
}
func (s *Song) NoOfContentTracks() (no uint16) {
for _, tr := range s.Tracks {
if tr.WithContent {
no++
}
}
return
}
func (s *Song) finishScan() (err error) {
sort.Sort(s.scannedTempoChanges)
sort.Sort(s.scannedTimeSig)
if len(s.scannedTimeSig) > 0 && s.scannedTimeSig[0].AbsPos == 0 {
var rest TimeSigs
if len(s.scannedTimeSig) > 1 {
rest = s.scannedTimeSig[1:]
}
s.createBars([2]uint8{s.scannedTimeSig[0].Num, s.scannedTimeSig[0].Denom}, rest)
} else {
s.createBars([2]uint8{4, 4}, s.scannedTimeSig)
}
for _, msg := range s.scannedMessages {
b := s.findBar(msg.AbsPos)
if b == nil {
return fmt.Errorf("can't find bar for message: %v at position %v", msg.Message, msg.AbsPos)
}
b.SetMessageByRelTicks(msg.AbsPos-b.AbsPos, msg.TrackNo, msg.Message)
b.SortPositions()
}
return nil
}
func (s *Song) LastTrack() *Track {
return s.Tracks[len(s.Tracks)-1]
}
func (s *Song) scanMessage(p *reader.Position, msg midi.Message) {
if p.AbsoluteTicks > s.lastPos {
s.lastPos = p.AbsoluteTicks
}
if msg == meta.EndOfTrack {
s.AddTrack(false, -1)
return
}
t := s.LastTrack()
switch m := msg.(type) {
case meta.Copyright:
s.CopyRight = m.Text()
case meta.TrackSequenceName:
t.Name = m.Text()
case meta.Instrument:
t.Instrument = m.Text()
case meta.TimeSig:
ts := &TimeSig{
AbsPos: p.AbsoluteTicks,
Num: m.Numerator,
Denom: m.Denominator,
}
s.scannedTimeSig = append(s.scannedTimeSig, ts)
case meta.Tempo:
tc := &TempoChange{
AbsPos: p.AbsoluteTicks,
TempoBPM: m.FractionalBPM(),
}
s.scannedTempoChanges = append(s.scannedTempoChanges, tc)
default:
if msg != nil {
tm := &TrackMessage{}
tm.Message = msg
tm.TrackNo = t.No
tm.AbsPos = p.AbsoluteTicks
s.scannedMessages = append(s.scannedMessages, tm)
t.WithContent = true
if chMsg, is := msg.(channel.Message); is {
if t.Channel >= 0 && uint8(t.Channel) != chMsg.Channel() {
panic(fmt.Sprintf("track no %v (%s) has mixed channel messages for channel %v and %v - not supported", t.No, t.Name, t.Channel, chMsg.Channel()))
}
if t.Channel < 0 {
t.Channel = int8(chMsg.Channel())
}
}
}
}
}
func (s *Song) AddBar(pos uint64, num, denom uint8) *Bar {
b := &Bar{
AbsPos: pos,
Song: s,
TimeSig: [2]uint8{num, denom},
}
s.Bars = append(s.Bars, b)
return b
}
func (s *Song) Save(file string) error {
return writer.WriteSMF(file, s.NoOfContentTracks()+2, s.writeSMF, smfwriter.Format(smf.SMF1), smfwriter.TimeFormat(smf.MetricTicks(s.ticksPerQN)))
}
func (s *Song) writeTimeSigTrack(w *writer.SMF) error {
timesig := [2]uint8{4, 4}
var pos uint64
for _, b := range s.Bars {
if b.TimeSig != timesig {
delta := uint32(b.AbsPos - pos)
w.SetDelta(delta)
w.Write(meter.Meter(b.TimeSig[0], b.TimeSig[1]))
timesig = b.TimeSig
pos = b.AbsPos
}
}
return nil
}
func (s *Song) | (w *writer.SMF) error {
tempo := float32(120.0)
var pos uint64
for _, b := range s.Bars {
for _, p := range b.Positions {
if p.Tempo != 0 && p.Tempo != tempo {
absPos := p.AbsTicks()
delta := uint32(absPos - pos)
w.SetDelta(delta)
w.Write(meta.Tempo(p.Tempo))
tempo = p.Tempo
pos = absPos
}
}
}
return nil
}
func (s *Song) writeSMF(w *writer.SMF) (err error) {
err = s.writeTimeSigTrack(w)
if err != nil {
return
}
err = writer.EndOfTrack(w)
if err != nil {
return
}
err = s.writeTempoTrack(w)
if err != nil {
return
}
err = writer.EndOfTrack(w)
if err != nil {
return
}
for _, tr := range s.Tracks {
if tr.WithContent {
var lastTick uint64
for _, b := range s.Bars {
for _, p := range b.Positions {
ticks := p.AbsTicks()
for _, m := range p.Messages {
if m.TrackNo == tr.No && m.Message != nil {
delta := ticks - lastTick
if tr.Channel < 0 {
panic(fmt.Sprintf("channel for content track no %v (%s) is -1, but content tracks must have channels", tr.No, tr.Name))
}
w.SetChannel(uint8(tr.Channel))
w.SetDelta(uint32(delta))
w.Write(m.Message)
lastTick = ticks
}
}
}
}
err = writer.EndOfTrack(w)
if err != nil {
return
}
}
}
return nil
}
func (s *Song) TrackWidth(i int) uint8 {
// TODO calculate the track width
return 0
}
func KeyToNote(key uint8) string {
nt := key % 12
oct := key / 12
notes := map[uint8]string{
0: "C",
1: "C#",
2: "D",
3: "D#",
4: "E",
5: "F",
6: "F#",
7: "G",
8: "G#",
9: "A",
10: "A#",
11: "B",
}
return fmt.Sprintf("%s%v", notes[nt], oct)
}
func ShowMessage(msg midi.Message) string {
switch v := msg.(type) {
case channel.NoteOn:
return fmt.Sprintf("%s/%v_", KeyToNote(v.Key()), v.Velocity())
case channel.NoteOff:
return fmt.Sprintf("_%s", KeyToNote(v.Key()))
case channel.NoteOffVelocity:
return fmt.Sprintf("_%s", KeyToNote(v.Key()))
/*
case channel.Aftertouch:
case channel.ControlChange:
case channel.Pitchbend:
case channel.PolyAftertouch:
case channel.ProgramChange:
*/
case meta.Lyric:
return fmt.Sprintf("%q", v.Text())
case meta.Text:
return fmt.Sprintf("'%s'", v.Text())
default:
return msg.String()
}
}
func (s *Song) BarLines() string {
var bf bytes.Buffer
fmt.Fprintf(&bf, "| Comment | Mark | Tempo | Beat | ")
for _, t := range s.Tracks {
if t.WithContent {
fmt.Fprintf(&bf, " %s[%v] | ", t.Name, t.Channel)
}
}
fmt.Fprintf(&bf, "\n")
for _, b := range s.Bars {
_ = b
fmt.Fprintf(&bf, "----------- #%v %v/%v --------------\n", b.No, b.TimeSig[0], b.TimeSig[1])
for _, p := range b.Positions {
tempo := ""
if p.Tempo != 0 {
tempo = fmt.Sprintf("%0.2f", tempo)
}
var frac float64
if p.Fraction[1] > 0 {
frac = p.Fraction[0] / p.Fraction | writeTempoTrack | identifier_name |
smf.go | .No && m.Message != nil {
delta := ticks - lastTick
if tr.Channel < 0 {
panic(fmt.Sprintf("channel for content track no %v (%s) is -1, but content tracks must have channels", tr.No, tr.Name))
}
w.SetChannel(uint8(tr.Channel))
w.SetDelta(uint32(delta))
w.Write(m.Message)
lastTick = ticks
}
}
}
}
err = writer.EndOfTrack(w)
if err != nil {
return
}
}
}
return nil
}
func (s *Song) TrackWidth(i int) uint8 {
// TODO calculate the track width
return 0
}
func KeyToNote(key uint8) string {
nt := key % 12
oct := key / 12
notes := map[uint8]string{
0: "C",
1: "C#",
2: "D",
3: "D#",
4: "E",
5: "F",
6: "F#",
7: "G",
8: "G#",
9: "A",
10: "A#",
11: "B",
}
return fmt.Sprintf("%s%v", notes[nt], oct)
}
func ShowMessage(msg midi.Message) string {
switch v := msg.(type) {
case channel.NoteOn:
return fmt.Sprintf("%s/%v_", KeyToNote(v.Key()), v.Velocity())
case channel.NoteOff:
return fmt.Sprintf("_%s", KeyToNote(v.Key()))
case channel.NoteOffVelocity:
return fmt.Sprintf("_%s", KeyToNote(v.Key()))
/*
case channel.Aftertouch:
case channel.ControlChange:
case channel.Pitchbend:
case channel.PolyAftertouch:
case channel.ProgramChange:
*/
case meta.Lyric:
return fmt.Sprintf("%q", v.Text())
case meta.Text:
return fmt.Sprintf("'%s'", v.Text())
default:
return msg.String()
}
}
func (s *Song) BarLines() string {
var bf bytes.Buffer
fmt.Fprintf(&bf, "| Comment | Mark | Tempo | Beat | ")
for _, t := range s.Tracks {
if t.WithContent {
fmt.Fprintf(&bf, " %s[%v] | ", t.Name, t.Channel)
}
}
fmt.Fprintf(&bf, "\n")
for _, b := range s.Bars {
_ = b
fmt.Fprintf(&bf, "----------- #%v %v/%v --------------\n", b.No, b.TimeSig[0], b.TimeSig[1])
for _, p := range b.Positions {
tempo := ""
if p.Tempo != 0 {
tempo = fmt.Sprintf("%0.2f", tempo)
}
var frac float64
if p.Fraction[1] > 0 {
frac = p.Fraction[0] / p.Fraction[1]
}
beat := fmt.Sprintf("%0.4f", float64(p.Beat)+float64(1)+frac)
fmt.Fprintf(&bf, "| %s | %s | %s | %s | ", p.Comment, p.Mark, tempo, beat)
for _, t := range s.Tracks {
if t.WithContent {
var printed bool
for _, m := range p.Messages {
if m.TrackNo == t.No {
fmt.Fprintf(&bf, " %s | ", ShowMessage(m.Message))
printed = true
}
}
if !printed {
fmt.Fprintf(&bf, " | ")
}
}
}
fmt.Fprintf(&bf, "\n")
}
}
return bf.String()
}
func (s *Song) RenumberBars() {
for i := range s.Bars {
s.Bars[i].No = uint16(i)
}
}
func (s *Song) RenumberTracks() {
for i := range s.Tracks {
s.Tracks[i].No = uint16(i)
}
}
type TrackMessage struct {
TrackNo uint16
AbsPos uint64
Message midi.Message
Position *Position
}
type Positions []*Position
func (p Positions) Swap(a, b int) {
p[a], p[b] = p[b], p[a]
}
func (p Positions) Len() int {
return len(p)
}
func (p Positions) Less(a, b int) bool {
if p[a].Bar.No < p[b].Bar.No {
return true
}
if p[a].Bar.No > p[b].Bar.No {
return false
}
if p[a].Beat < p[b].Beat {
return true
}
if p[a].Beat > p[b].Beat {
return false
}
var frac_a float64
var frac_b float64
if p[a].Fraction[1] > 0 {
frac_a = float64(p[a].Fraction[0]) / float64(p[a].Fraction[1])
}
if p[b].Fraction[1] > 0 {
frac_b = float64(p[b].Fraction[0]) / float64(p[b].Fraction[1])
}
return frac_a < frac_b
}
type Bar struct {
Song *Song
No uint16
TimeSig [2]uint8
Positions Positions
AbsPos uint64
}
func (b *Bar) EndPos() uint64 {
return b.AbsPos + b.Length()
}
func (b *Bar) Length() uint64 {
l := float64(b.Song.ticksPerQN*4*uint32(b.TimeSig[0])) / float64(b.TimeSig[1])
return uint64(math.Round(l))
}
func (b *Bar) SetMessageByRelTicks(ticks uint64, trackNo uint16, msg midi.Message) {
beat := uint8(ticks / uint64(b.Song.ticksPerQN))
ticksRest := ticks % uint64(b.Song.ticksPerQN)
var pos *Position
for _, p := range b.Positions {
if p.Beat == beat && p.WithinFraction(ticksRest) {
pos = p
break
}
}
if pos == nil {
pos = b.AddPosition()
pos.Beat = beat
pos.Fraction[0] = float64(ticksRest)
pos.Fraction[1] = float64(b.Song.ticksPerQN)
}
pos.SetMessage(trackNo, msg)
}
func (b *Bar) AddPosition() *Position {
p := &Position{
Bar: b,
}
b.Positions = append(b.Positions, p)
return p
}
func (b *Bar) Columns() []string {
//cols := make([]string)
return nil
}
func (b *Bar) SortPositions() {
sort.Sort(b.Positions)
}
type Position struct {
Bar *Bar
Comment string
Mark string
Beat uint8
Tempo float32
Fraction [2]float64
Messages []*TrackMessage
}
/*
WithinFraction determines, if the given ticks are within the fraction of the position.
The given ticks must be less than a quarternote (Songs ticks per quarternote).
The fraction is a fraction of a quarternote. So we first have to check, to which fraction
of the qn the given ticks correspond and then to check, if the difference between this fraction
and the fraction of the Position lies within the tolerance
*/
func (p *Position) WithinFraction(ticks uint64) bool {
//tolerance := float64(0.0000001)
tolerance := float64(0.001)
fracTicks := float64(ticks) / float64(p.Bar.Song.ticksPerQN)
if fracTicks >= 1 {
panic("must not happen, we are on the wrong beat")
}
fracPos := p.Fraction[0] / p.Fraction[1]
//fmt.Printf("\nwithin fraction %v vs %v (ticks: %v perQN: %v)\n", fracPos, fracTicks, ticks, p.Bar.Song.ticksPerQN)
return math.Abs(fracPos-fracTicks) < tolerance
}
func (p *Position) AbsTicks() uint64 {
beatTicks := p.Bar.Song.ticksPerQN * uint32(p.Beat)
fracTicks := math.Round((float64(p.Bar.Song.ticksPerQN) * p.Fraction[0]) / p.Fraction[1])
return p.Bar.AbsPos + uint64(beatTicks) + uint64(fracTicks)
}
func (p *Position) AddMessage(track uint16, msg midi.Message) {
tm := &TrackMessage{
TrackNo: track,
Message: msg,
Position: p,
}
p.Messages = append(p.Messages, tm)
}
func (p *Position) GetMessage(track uint16) *TrackMessage {
for _, m := range p.Messages {
if m.TrackNo == track {
return m
}
} |
return nil
}
| random_line_split | |
smf.go | .createBarsUntil(pos, change.AbsPos, num, denom)
}
num, denom = change.Num, change.Denom
b := s.AddBar(change.AbsPos, num, denom)
pos = b.EndPos()
}
s.createBarsUntil(pos, s.lastPos, num, denom)
s.RenumberBars()
}
func (s *Song) findBar(pos uint64) (bar *Bar) {
for _, b := range s.Bars {
if pos >= b.AbsPos {
bar = b
}
}
return
}
func (s *Song) NoOfContentTracks() (no uint16) {
for _, tr := range s.Tracks {
if tr.WithContent {
no++
}
}
return
}
func (s *Song) finishScan() (err error) {
sort.Sort(s.scannedTempoChanges)
sort.Sort(s.scannedTimeSig)
if len(s.scannedTimeSig) > 0 && s.scannedTimeSig[0].AbsPos == 0 {
var rest TimeSigs
if len(s.scannedTimeSig) > 1 {
rest = s.scannedTimeSig[1:]
}
s.createBars([2]uint8{s.scannedTimeSig[0].Num, s.scannedTimeSig[0].Denom}, rest)
} else {
s.createBars([2]uint8{4, 4}, s.scannedTimeSig)
}
for _, msg := range s.scannedMessages {
b := s.findBar(msg.AbsPos)
if b == nil {
return fmt.Errorf("can't find bar for message: %v at position %v", msg.Message, msg.AbsPos)
}
b.SetMessageByRelTicks(msg.AbsPos-b.AbsPos, msg.TrackNo, msg.Message)
b.SortPositions()
}
return nil
}
func (s *Song) LastTrack() *Track {
return s.Tracks[len(s.Tracks)-1]
}
func (s *Song) scanMessage(p *reader.Position, msg midi.Message) {
if p.AbsoluteTicks > s.lastPos {
s.lastPos = p.AbsoluteTicks
}
if msg == meta.EndOfTrack {
s.AddTrack(false, -1)
return
}
t := s.LastTrack()
switch m := msg.(type) {
case meta.Copyright:
s.CopyRight = m.Text()
case meta.TrackSequenceName:
t.Name = m.Text()
case meta.Instrument:
t.Instrument = m.Text()
case meta.TimeSig:
ts := &TimeSig{
AbsPos: p.AbsoluteTicks,
Num: m.Numerator,
Denom: m.Denominator,
}
s.scannedTimeSig = append(s.scannedTimeSig, ts)
case meta.Tempo:
tc := &TempoChange{
AbsPos: p.AbsoluteTicks,
TempoBPM: m.FractionalBPM(),
}
s.scannedTempoChanges = append(s.scannedTempoChanges, tc)
default:
if msg != nil {
tm := &TrackMessage{}
tm.Message = msg
tm.TrackNo = t.No
tm.AbsPos = p.AbsoluteTicks
s.scannedMessages = append(s.scannedMessages, tm)
t.WithContent = true
if chMsg, is := msg.(channel.Message); is {
if t.Channel >= 0 && uint8(t.Channel) != chMsg.Channel() {
panic(fmt.Sprintf("track no %v (%s) has mixed channel messages for channel %v and %v - not supported", t.No, t.Name, t.Channel, chMsg.Channel()))
}
if t.Channel < 0 {
t.Channel = int8(chMsg.Channel())
}
}
}
}
}
func (s *Song) AddBar(pos uint64, num, denom uint8) *Bar {
b := &Bar{
AbsPos: pos,
Song: s,
TimeSig: [2]uint8{num, denom},
}
s.Bars = append(s.Bars, b)
return b
}
func (s *Song) Save(file string) error {
return writer.WriteSMF(file, s.NoOfContentTracks()+2, s.writeSMF, smfwriter.Format(smf.SMF1), smfwriter.TimeFormat(smf.MetricTicks(s.ticksPerQN)))
}
func (s *Song) writeTimeSigTrack(w *writer.SMF) error {
timesig := [2]uint8{4, 4}
var pos uint64
for _, b := range s.Bars {
if b.TimeSig != timesig {
delta := uint32(b.AbsPos - pos)
w.SetDelta(delta)
w.Write(meter.Meter(b.TimeSig[0], b.TimeSig[1]))
timesig = b.TimeSig
pos = b.AbsPos
}
}
return nil
}
func (s *Song) writeTempoTrack(w *writer.SMF) error {
tempo := float32(120.0)
var pos uint64
for _, b := range s.Bars {
for _, p := range b.Positions {
if p.Tempo != 0 && p.Tempo != tempo {
absPos := p.AbsTicks()
delta := uint32(absPos - pos)
w.SetDelta(delta)
w.Write(meta.Tempo(p.Tempo))
tempo = p.Tempo
pos = absPos
}
}
}
return nil
}
func (s *Song) writeSMF(w *writer.SMF) (err error) {
err = s.writeTimeSigTrack(w)
if err != nil {
return
}
err = writer.EndOfTrack(w)
if err != nil {
return
}
err = s.writeTempoTrack(w)
if err != nil {
return
}
err = writer.EndOfTrack(w)
if err != nil {
return
}
for _, tr := range s.Tracks {
if tr.WithContent {
var lastTick uint64
for _, b := range s.Bars {
for _, p := range b.Positions {
ticks := p.AbsTicks()
for _, m := range p.Messages {
if m.TrackNo == tr.No && m.Message != nil {
delta := ticks - lastTick
if tr.Channel < 0 {
panic(fmt.Sprintf("channel for content track no %v (%s) is -1, but content tracks must have channels", tr.No, tr.Name))
}
w.SetChannel(uint8(tr.Channel))
w.SetDelta(uint32(delta))
w.Write(m.Message)
lastTick = ticks
}
}
}
}
err = writer.EndOfTrack(w)
if err != nil {
return
}
}
}
return nil
}
func (s *Song) TrackWidth(i int) uint8 {
// TODO calculate the track width
return 0
}
func KeyToNote(key uint8) string {
nt := key % 12
oct := key / 12
notes := map[uint8]string{
0: "C",
1: "C#",
2: "D",
3: "D#",
4: "E",
5: "F",
6: "F#",
7: "G",
8: "G#",
9: "A",
10: "A#",
11: "B",
}
return fmt.Sprintf("%s%v", notes[nt], oct)
}
func ShowMessage(msg midi.Message) string {
switch v := msg.(type) {
case channel.NoteOn:
return fmt.Sprintf("%s/%v_", KeyToNote(v.Key()), v.Velocity())
case channel.NoteOff:
return fmt.Sprintf("_%s", KeyToNote(v.Key()))
case channel.NoteOffVelocity:
return fmt.Sprintf("_%s", KeyToNote(v.Key()))
/*
case channel.Aftertouch:
case channel.ControlChange:
case channel.Pitchbend:
case channel.PolyAftertouch:
case channel.ProgramChange:
*/
case meta.Lyric:
return fmt.Sprintf("%q", v.Text())
case meta.Text:
return fmt.Sprintf("'%s'", v.Text())
default:
return msg.String()
}
}
func (s *Song) BarLines() string {
var bf bytes.Buffer
fmt.Fprintf(&bf, "| Comment | Mark | Tempo | Beat | ")
for _, t := range s.Tracks {
if t.WithContent |
}
fmt.Fprintf(&bf, "\n")
for _, b := range s.Bars {
_ = b
fmt.Fprintf(&bf, "----------- #%v %v/%v --------------\n", b.No, b.TimeSig[0], b.TimeSig[1])
for _, p := range b.Positions {
tempo := ""
if p.Tempo != 0 {
tempo = fmt.Sprintf("%0.2f", tempo)
}
var frac float64
if p.Fraction[1] > 0 {
frac = p.Fraction[0] / p.Fraction | {
fmt.Fprintf(&bf, " %s[%v] | ", t.Name, t.Channel)
} | conditional_block |
smf.go | Ticks,
TempoBPM: m.FractionalBPM(),
}
s.scannedTempoChanges = append(s.scannedTempoChanges, tc)
default:
if msg != nil {
tm := &TrackMessage{}
tm.Message = msg
tm.TrackNo = t.No
tm.AbsPos = p.AbsoluteTicks
s.scannedMessages = append(s.scannedMessages, tm)
t.WithContent = true
if chMsg, is := msg.(channel.Message); is {
if t.Channel >= 0 && uint8(t.Channel) != chMsg.Channel() {
panic(fmt.Sprintf("track no %v (%s) has mixed channel messages for channel %v and %v - not supported", t.No, t.Name, t.Channel, chMsg.Channel()))
}
if t.Channel < 0 {
t.Channel = int8(chMsg.Channel())
}
}
}
}
}
func (s *Song) AddBar(pos uint64, num, denom uint8) *Bar {
b := &Bar{
AbsPos: pos,
Song: s,
TimeSig: [2]uint8{num, denom},
}
s.Bars = append(s.Bars, b)
return b
}
func (s *Song) Save(file string) error {
return writer.WriteSMF(file, s.NoOfContentTracks()+2, s.writeSMF, smfwriter.Format(smf.SMF1), smfwriter.TimeFormat(smf.MetricTicks(s.ticksPerQN)))
}
func (s *Song) writeTimeSigTrack(w *writer.SMF) error {
timesig := [2]uint8{4, 4}
var pos uint64
for _, b := range s.Bars {
if b.TimeSig != timesig {
delta := uint32(b.AbsPos - pos)
w.SetDelta(delta)
w.Write(meter.Meter(b.TimeSig[0], b.TimeSig[1]))
timesig = b.TimeSig
pos = b.AbsPos
}
}
return nil
}
func (s *Song) writeTempoTrack(w *writer.SMF) error {
tempo := float32(120.0)
var pos uint64
for _, b := range s.Bars {
for _, p := range b.Positions {
if p.Tempo != 0 && p.Tempo != tempo {
absPos := p.AbsTicks()
delta := uint32(absPos - pos)
w.SetDelta(delta)
w.Write(meta.Tempo(p.Tempo))
tempo = p.Tempo
pos = absPos
}
}
}
return nil
}
func (s *Song) writeSMF(w *writer.SMF) (err error) {
err = s.writeTimeSigTrack(w)
if err != nil {
return
}
err = writer.EndOfTrack(w)
if err != nil {
return
}
err = s.writeTempoTrack(w)
if err != nil {
return
}
err = writer.EndOfTrack(w)
if err != nil {
return
}
for _, tr := range s.Tracks {
if tr.WithContent {
var lastTick uint64
for _, b := range s.Bars {
for _, p := range b.Positions {
ticks := p.AbsTicks()
for _, m := range p.Messages {
if m.TrackNo == tr.No && m.Message != nil {
delta := ticks - lastTick
if tr.Channel < 0 {
panic(fmt.Sprintf("channel for content track no %v (%s) is -1, but content tracks must have channels", tr.No, tr.Name))
}
w.SetChannel(uint8(tr.Channel))
w.SetDelta(uint32(delta))
w.Write(m.Message)
lastTick = ticks
}
}
}
}
err = writer.EndOfTrack(w)
if err != nil {
return
}
}
}
return nil
}
func (s *Song) TrackWidth(i int) uint8 {
// TODO calculate the track width
return 0
}
func KeyToNote(key uint8) string {
nt := key % 12
oct := key / 12
notes := map[uint8]string{
0: "C",
1: "C#",
2: "D",
3: "D#",
4: "E",
5: "F",
6: "F#",
7: "G",
8: "G#",
9: "A",
10: "A#",
11: "B",
}
return fmt.Sprintf("%s%v", notes[nt], oct)
}
func ShowMessage(msg midi.Message) string {
switch v := msg.(type) {
case channel.NoteOn:
return fmt.Sprintf("%s/%v_", KeyToNote(v.Key()), v.Velocity())
case channel.NoteOff:
return fmt.Sprintf("_%s", KeyToNote(v.Key()))
case channel.NoteOffVelocity:
return fmt.Sprintf("_%s", KeyToNote(v.Key()))
/*
case channel.Aftertouch:
case channel.ControlChange:
case channel.Pitchbend:
case channel.PolyAftertouch:
case channel.ProgramChange:
*/
case meta.Lyric:
return fmt.Sprintf("%q", v.Text())
case meta.Text:
return fmt.Sprintf("'%s'", v.Text())
default:
return msg.String()
}
}
func (s *Song) BarLines() string {
var bf bytes.Buffer
fmt.Fprintf(&bf, "| Comment | Mark | Tempo | Beat | ")
for _, t := range s.Tracks {
if t.WithContent {
fmt.Fprintf(&bf, " %s[%v] | ", t.Name, t.Channel)
}
}
fmt.Fprintf(&bf, "\n")
for _, b := range s.Bars {
_ = b
fmt.Fprintf(&bf, "----------- #%v %v/%v --------------\n", b.No, b.TimeSig[0], b.TimeSig[1])
for _, p := range b.Positions {
tempo := ""
if p.Tempo != 0 {
tempo = fmt.Sprintf("%0.2f", tempo)
}
var frac float64
if p.Fraction[1] > 0 {
frac = p.Fraction[0] / p.Fraction[1]
}
beat := fmt.Sprintf("%0.4f", float64(p.Beat)+float64(1)+frac)
fmt.Fprintf(&bf, "| %s | %s | %s | %s | ", p.Comment, p.Mark, tempo, beat)
for _, t := range s.Tracks {
if t.WithContent {
var printed bool
for _, m := range p.Messages {
if m.TrackNo == t.No {
fmt.Fprintf(&bf, " %s | ", ShowMessage(m.Message))
printed = true
}
}
if !printed {
fmt.Fprintf(&bf, " | ")
}
}
}
fmt.Fprintf(&bf, "\n")
}
}
return bf.String()
}
func (s *Song) RenumberBars() {
for i := range s.Bars {
s.Bars[i].No = uint16(i)
}
}
func (s *Song) RenumberTracks() {
for i := range s.Tracks {
s.Tracks[i].No = uint16(i)
}
}
type TrackMessage struct {
TrackNo uint16
AbsPos uint64
Message midi.Message
Position *Position
}
type Positions []*Position
func (p Positions) Swap(a, b int) {
p[a], p[b] = p[b], p[a]
}
func (p Positions) Len() int {
return len(p)
}
func (p Positions) Less(a, b int) bool {
if p[a].Bar.No < p[b].Bar.No {
return true
}
if p[a].Bar.No > p[b].Bar.No {
return false
}
if p[a].Beat < p[b].Beat {
return true
}
if p[a].Beat > p[b].Beat {
return false
}
var frac_a float64
var frac_b float64
if p[a].Fraction[1] > 0 {
frac_a = float64(p[a].Fraction[0]) / float64(p[a].Fraction[1])
}
if p[b].Fraction[1] > 0 {
frac_b = float64(p[b].Fraction[0]) / float64(p[b].Fraction[1])
}
return frac_a < frac_b
}
type Bar struct {
Song *Song
No uint16
TimeSig [2]uint8
Positions Positions
AbsPos uint64
}
func (b *Bar) EndPos() uint64 {
return b.AbsPos + b.Length()
}
func (b *Bar) Length() uint64 | {
l := float64(b.Song.ticksPerQN*4*uint32(b.TimeSig[0])) / float64(b.TimeSig[1])
return uint64(math.Round(l))
} | identifier_body | |
audio.rs | : {:#?}", &input_info);
// Construct the input stream parameters.
let latency = input_info.default_low_input_latency;
let input_params = pa::StreamParameters::<f32>::new(def_input, CHANNELS, INTERLEAVED, latency);
let def_output = pa.default_output_device()?;
let output_info = pa.device_info(def_output)?;
println!("Default output device info: {:#?}", &output_info);
// Construct the output stream parameters.
let latency = output_info.default_low_output_latency;
let output_params =
pa::StreamParameters::<f32>::new(def_output, CHANNELS, INTERLEAVED, latency);
// Check that the stream format is supported.
pa.is_duplex_format_supported(input_params, output_params, sample_rate as f64)?;
// Construct the settings with which we'll open our duplex stream.
let settings =
pa::DuplexStreamSettings::new(input_params, output_params, sample_rate as f64, FRAMES);
let mut stream = pa.open_blocking_stream(settings)?;
stream.start()?;
// We'll use this function to wait for read/write availability.
fn wait_for_stream<F>(f: F, name: &str) -> u32
where
F: Fn() -> Result<pa::StreamAvailable, pa::error::Error>,
{
loop {
match f() {
Ok(available) => match available {
pa::StreamAvailable::Frames(frames) => return frames as u32,
pa::StreamAvailable::InputOverflowed => println!("Input stream has overflowed"),
pa::StreamAvailable::OutputUnderflowed => {
println!("Output stream has underflowed")
}
},
Err(err) => panic!(
"An error occurred while waiting for the {} stream: {}",
name, err
),
}
}
};
// Now start the main read/write loop! In this example, we pass
// the input buffer directly to the output buffer, so watch out
// for feedback.
loop {
// How many frames are available on the input stream?
let in_frames = wait_for_stream(|| stream.read_available(), "Read");
// If there are frames available, let's take them and add them
// to our buffer.
if in_frames > 0 {
let input_samples = stream.read(in_frames)?;
for samp in input_samples {
rx_sender.send(*samp).unwrap();
}
}
// How many frames are available for writing on the output stream?
let out_frames = wait_for_stream(|| stream.write_available(), "Write");
// If there are frames available for writing and we have some
// to write, then write!
if out_frames > 0 {
// If we have more than enough frames for writing, take
// them from the start of the buffer. Otherwise if we
// have less, just take what we can for now.
let write_frames = out_frames;
let n_write_samples = write_frames as usize * CHANNELS as usize;
let mut flag = false;
stream.write(write_frames, |output| {
for i in 0..n_write_samples {
if let Some(samp) = modulator.next() {
output[i] = samp;
} else {
println!("Tx samples finished. Exiting");
flag = true;
break;
}
}
})?;
if flag {
break;
}
}
}
Ok(())
}
pub fn start_audio<'c>(
tx_receiver: Receiver<Complex<f32>>,
config: &'c Config,
) -> Result<(std::thread::JoinHandle<()>, AudioSampleStream<'c>), Error> {
// For the microphone
let (rx_sender, rx_receiver) = std::sync::mpsc::channel::<f32>();
let sample_rate = config.audio.sample_rate;
let config_c = config.clone();
let handle = std::thread::spawn(move || {
// Use the modulator to go from baseband to carrier frequency
let modulator = Modulate::new(tx_receiver.iter(), sample_rate, &config_c);
run(modulator, rx_sender, sample_rate).unwrap();
});
return Ok((
handle,
AudioSampleStream::new(rx_receiver, sample_rate as f32, config),
));
}
| /// Stream of samples from an audio device
pub struct AudioSampleStream<'c> {
channel: Receiver<f32>,
demod: Demodulate<'c>,
}
impl<'c> AudioSampleStream<'c> {
fn new(channel: Receiver<f32>, sample_rate: f32, config: &'c Config) -> Self {
Self {
channel,
demod: Demodulate::new(sample_rate, config),
}
}
}
impl<'c> InputSampleStream for AudioSampleStream<'c> {}
impl<'c> Iterator for AudioSampleStream<'c> {
type Item = Complex<f32>;
fn next(&mut self) -> Option<Complex<f32>> {
loop {
let in_samp = if let Ok(samp) = self.channel.recv() {
samp
} else {
return None;
};
let out = self.demod.push(in_samp);
if out.is_some() {
return out;
}
}
}
}
/// Upconvert signal from baseband to carrier frequency
struct Modulate<'c, T>
where
T: Iterator<Item = Complex<f32>>,
{
config: &'c Config,
/// Our source of baseband samples
src: T,
/// Number of carrier samples to skip per baseband sample
to_skip: u64,
/// Sample rate of the audio signal
sample_rate: f32,
/// Total number of (audio) samples transmitted so far
num_samps: u64,
/// If true, input is done and we'll always return None
done: bool,
/// The two samples we are currently in between sending
cur_samps: (Complex<f32>, Complex<f32>),
/// The current sample we are sending
cur_ewma: Complex<f32>,
}
impl<'c, T> Modulate<'c, T>
where
T: Iterator<Item = Complex<f32>>,
{
fn new(src: T, sample_rate: f32, config: &'c Config) -> Self {
// For now, sample_rate has to be a multiple of bandwidth. Can
// remove restriction later
let to_skip = sample_rate / config.audio.bandwidth;
assert!((to_skip.round() - to_skip).abs() <= 1e-3);
let to_skip = 1; //to_skip.round() as u64;
Self {
config,
src,
to_skip,
sample_rate,
num_samps: 0,
done: false,
cur_samps: (Complex::zero(), Complex::zero()),
cur_ewma: Complex::zero(),
}
}
}
impl<'c, T> Iterator for Modulate<'c, T>
where
T: Iterator<Item = Complex<f32>>,
{
type Item = f32;
fn next(&mut self) -> Option<f32> {
if self.done {
return None;
}
// See if we need to update the current sample
if self.num_samps % self.to_skip == 0 {
if let Some(x) = self.src.next() {
self.cur_samps = (self.cur_samps.1, x);
} else {
self.done = true;
return None;
}
}
let pi2 = 2. * std::f32::consts::PI;
let e = Complex::new(
0.,
pi2 * self.num_samps as f32 / self.sample_rate * self.config.audio.center_frequency,
)
.exp();
self.num_samps += 1;
// Low-pass filter
// Frequency-domain sinc
//let f = (self.num_samps % self.to_skip) as f32 / self.to_skip as f32;
//let samp = prev * (1. - f) + cur * f;
// EWMA (equivalent to RC-filter)
// let alpha = 1. / (1. + self.to_skip as f32);
// self.cur_ewma = alpha * self.cur_samps.1 + (1. - alpha) * self.cur_ewma;
// let samp = self.cur_ewma;
let samp = self.cur_samps.1;
Some(e.re * samp.re + e.im * samp.im)
}
}
/// Convert to baseband from carrier frequency
struct Demodulate<'c> {
config: &'c Config,
/// Number of carrier samples to skip per baseband sample
to_skip: usize,
/// Sample rate of the audio signal
sample_rate: f32,
/// Total number of (audio) samples transmitted so far
num_samps: u64,
/// Average of the sample so far
samp_avg: Complex<f32>,
}
impl<'c> Demodulate<'c> {
fn new(sample_rate: f32, config: &'c Config) -> Self {
// For now, sample_rate has to be a multiple of bandwidth. Can
// remove restriction later
let to_skip = sample_rate / config.audio.bandwidth;
assert!((to_skip.round | random_line_split | |
audio.rs | : {:#?}", &input_info);
// Construct the input stream parameters.
let latency = input_info.default_low_input_latency;
let input_params = pa::StreamParameters::<f32>::new(def_input, CHANNELS, INTERLEAVED, latency);
let def_output = pa.default_output_device()?;
let output_info = pa.device_info(def_output)?;
println!("Default output device info: {:#?}", &output_info);
// Construct the output stream parameters.
let latency = output_info.default_low_output_latency;
let output_params =
pa::StreamParameters::<f32>::new(def_output, CHANNELS, INTERLEAVED, latency);
// Check that the stream format is supported.
pa.is_duplex_format_supported(input_params, output_params, sample_rate as f64)?;
// Construct the settings with which we'll open our duplex stream.
let settings =
pa::DuplexStreamSettings::new(input_params, output_params, sample_rate as f64, FRAMES);
let mut stream = pa.open_blocking_stream(settings)?;
stream.start()?;
// We'll use this function to wait for read/write availability.
fn wait_for_stream<F>(f: F, name: &str) -> u32
where
F: Fn() -> Result<pa::StreamAvailable, pa::error::Error>,
{
loop {
match f() {
Ok(available) => match available {
pa::StreamAvailable::Frames(frames) => return frames as u32,
pa::StreamAvailable::InputOverflowed => println!("Input stream has overflowed"),
pa::StreamAvailable::OutputUnderflowed => {
println!("Output stream has underflowed")
}
},
Err(err) => panic!(
"An error occurred while waiting for the {} stream: {}",
name, err
),
}
}
};
// Now start the main read/write loop! In this example, we pass
// the input buffer directly to the output buffer, so watch out
// for feedback.
loop {
// How many frames are available on the input stream?
let in_frames = wait_for_stream(|| stream.read_available(), "Read");
// If there are frames available, let's take them and add them
// to our buffer.
if in_frames > 0 {
let input_samples = stream.read(in_frames)?;
for samp in input_samples {
rx_sender.send(*samp).unwrap();
}
}
// How many frames are available for writing on the output stream?
let out_frames = wait_for_stream(|| stream.write_available(), "Write");
// If there are frames available for writing and we have some
// to write, then write!
if out_frames > 0 {
// If we have more than enough frames for writing, take
// them from the start of the buffer. Otherwise if we
// have less, just take what we can for now.
let write_frames = out_frames;
let n_write_samples = write_frames as usize * CHANNELS as usize;
let mut flag = false;
stream.write(write_frames, |output| {
for i in 0..n_write_samples {
if let Some(samp) = modulator.next() {
output[i] = samp;
} else {
println!("Tx samples finished. Exiting");
flag = true;
break;
}
}
})?;
if flag |
}
}
Ok(())
}
pub fn start_audio<'c>(
tx_receiver: Receiver<Complex<f32>>,
config: &'c Config,
) -> Result<(std::thread::JoinHandle<()>, AudioSampleStream<'c>), Error> {
// For the microphone
let (rx_sender, rx_receiver) = std::sync::mpsc::channel::<f32>();
let sample_rate = config.audio.sample_rate;
let config_c = config.clone();
let handle = std::thread::spawn(move || {
// Use the modulator to go from baseband to carrier frequency
let modulator = Modulate::new(tx_receiver.iter(), sample_rate, &config_c);
run(modulator, rx_sender, sample_rate).unwrap();
});
return Ok((
handle,
AudioSampleStream::new(rx_receiver, sample_rate as f32, config),
));
}
/// Stream of samples from an audio device
pub struct AudioSampleStream<'c> {
channel: Receiver<f32>,
demod: Demodulate<'c>,
}
impl<'c> AudioSampleStream<'c> {
fn new(channel: Receiver<f32>, sample_rate: f32, config: &'c Config) -> Self {
Self {
channel,
demod: Demodulate::new(sample_rate, config),
}
}
}
impl<'c> InputSampleStream for AudioSampleStream<'c> {}
impl<'c> Iterator for AudioSampleStream<'c> {
type Item = Complex<f32>;
fn next(&mut self) -> Option<Complex<f32>> {
loop {
let in_samp = if let Ok(samp) = self.channel.recv() {
samp
} else {
return None;
};
let out = self.demod.push(in_samp);
if out.is_some() {
return out;
}
}
}
}
/// Upconvert signal from baseband to carrier frequency
struct Modulate<'c, T>
where
T: Iterator<Item = Complex<f32>>,
{
config: &'c Config,
/// Our source of baseband samples
src: T,
/// Number of carrier samples to skip per baseband sample
to_skip: u64,
/// Sample rate of the audio signal
sample_rate: f32,
/// Total number of (audio) samples transmitted so far
num_samps: u64,
/// If true, input is done and we'll always return None
done: bool,
/// The two samples we are currently in between sending
cur_samps: (Complex<f32>, Complex<f32>),
/// The current sample we are sending
cur_ewma: Complex<f32>,
}
impl<'c, T> Modulate<'c, T>
where
T: Iterator<Item = Complex<f32>>,
{
fn new(src: T, sample_rate: f32, config: &'c Config) -> Self {
// For now, sample_rate has to be a multiple of bandwidth. Can
// remove restriction later
let to_skip = sample_rate / config.audio.bandwidth;
assert!((to_skip.round() - to_skip).abs() <= 1e-3);
let to_skip = 1; //to_skip.round() as u64;
Self {
config,
src,
to_skip,
sample_rate,
num_samps: 0,
done: false,
cur_samps: (Complex::zero(), Complex::zero()),
cur_ewma: Complex::zero(),
}
}
}
impl<'c, T> Iterator for Modulate<'c, T>
where
T: Iterator<Item = Complex<f32>>,
{
type Item = f32;
fn next(&mut self) -> Option<f32> {
if self.done {
return None;
}
// See if we need to update the current sample
if self.num_samps % self.to_skip == 0 {
if let Some(x) = self.src.next() {
self.cur_samps = (self.cur_samps.1, x);
} else {
self.done = true;
return None;
}
}
let pi2 = 2. * std::f32::consts::PI;
let e = Complex::new(
0.,
pi2 * self.num_samps as f32 / self.sample_rate * self.config.audio.center_frequency,
)
.exp();
self.num_samps += 1;
// Low-pass filter
// Frequency-domain sinc
//let f = (self.num_samps % self.to_skip) as f32 / self.to_skip as f32;
//let samp = prev * (1. - f) + cur * f;
// EWMA (equivalent to RC-filter)
// let alpha = 1. / (1. + self.to_skip as f32);
// self.cur_ewma = alpha * self.cur_samps.1 + (1. - alpha) * self.cur_ewma;
// let samp = self.cur_ewma;
let samp = self.cur_samps.1;
Some(e.re * samp.re + e.im * samp.im)
}
}
/// Convert to baseband from carrier frequency
struct Demodulate<'c> {
config: &'c Config,
/// Number of carrier samples to skip per baseband sample
to_skip: usize,
/// Sample rate of the audio signal
sample_rate: f32,
/// Total number of (audio) samples transmitted so far
num_samps: u64,
/// Average of the sample so far
samp_avg: Complex<f32>,
}
impl<'c> Demodulate<'c> {
fn new(sample_rate: f32, config: &'c Config) -> Self {
// For now, sample_rate has to be a multiple of bandwidth. Can
// remove restriction later
let to_skip = sample_rate / config.audio.bandwidth;
assert!((to | {
break;
} | conditional_block |
audio.rs | : {:#?}", &input_info);
// Construct the input stream parameters.
let latency = input_info.default_low_input_latency;
let input_params = pa::StreamParameters::<f32>::new(def_input, CHANNELS, INTERLEAVED, latency);
let def_output = pa.default_output_device()?;
let output_info = pa.device_info(def_output)?;
println!("Default output device info: {:#?}", &output_info);
// Construct the output stream parameters.
let latency = output_info.default_low_output_latency;
let output_params =
pa::StreamParameters::<f32>::new(def_output, CHANNELS, INTERLEAVED, latency);
// Check that the stream format is supported.
pa.is_duplex_format_supported(input_params, output_params, sample_rate as f64)?;
// Construct the settings with which we'll open our duplex stream.
let settings =
pa::DuplexStreamSettings::new(input_params, output_params, sample_rate as f64, FRAMES);
let mut stream = pa.open_blocking_stream(settings)?;
stream.start()?;
// We'll use this function to wait for read/write availability.
fn wait_for_stream<F>(f: F, name: &str) -> u32
where
F: Fn() -> Result<pa::StreamAvailable, pa::error::Error>,
{
loop {
match f() {
Ok(available) => match available {
pa::StreamAvailable::Frames(frames) => return frames as u32,
pa::StreamAvailable::InputOverflowed => println!("Input stream has overflowed"),
pa::StreamAvailable::OutputUnderflowed => {
println!("Output stream has underflowed")
}
},
Err(err) => panic!(
"An error occurred while waiting for the {} stream: {}",
name, err
),
}
}
};
// Now start the main read/write loop! In this example, we pass
// the input buffer directly to the output buffer, so watch out
// for feedback.
loop {
// How many frames are available on the input stream?
let in_frames = wait_for_stream(|| stream.read_available(), "Read");
// If there are frames available, let's take them and add them
// to our buffer.
if in_frames > 0 {
let input_samples = stream.read(in_frames)?;
for samp in input_samples {
rx_sender.send(*samp).unwrap();
}
}
// How many frames are available for writing on the output stream?
let out_frames = wait_for_stream(|| stream.write_available(), "Write");
// If there are frames available for writing and we have some
// to write, then write!
if out_frames > 0 {
// If we have more than enough frames for writing, take
// them from the start of the buffer. Otherwise if we
// have less, just take what we can for now.
let write_frames = out_frames;
let n_write_samples = write_frames as usize * CHANNELS as usize;
let mut flag = false;
stream.write(write_frames, |output| {
for i in 0..n_write_samples {
if let Some(samp) = modulator.next() {
output[i] = samp;
} else {
println!("Tx samples finished. Exiting");
flag = true;
break;
}
}
})?;
if flag {
break;
}
}
}
Ok(())
}
pub fn start_audio<'c>(
tx_receiver: Receiver<Complex<f32>>,
config: &'c Config,
) -> Result<(std::thread::JoinHandle<()>, AudioSampleStream<'c>), Error> {
// For the microphone
let (rx_sender, rx_receiver) = std::sync::mpsc::channel::<f32>();
let sample_rate = config.audio.sample_rate;
let config_c = config.clone();
let handle = std::thread::spawn(move || {
// Use the modulator to go from baseband to carrier frequency
let modulator = Modulate::new(tx_receiver.iter(), sample_rate, &config_c);
run(modulator, rx_sender, sample_rate).unwrap();
});
return Ok((
handle,
AudioSampleStream::new(rx_receiver, sample_rate as f32, config),
));
}
/// Stream of samples from an audio device
pub struct AudioSampleStream<'c> {
channel: Receiver<f32>,
demod: Demodulate<'c>,
}
impl<'c> AudioSampleStream<'c> {
fn new(channel: Receiver<f32>, sample_rate: f32, config: &'c Config) -> Self {
Self {
channel,
demod: Demodulate::new(sample_rate, config),
}
}
}
impl<'c> InputSampleStream for AudioSampleStream<'c> {}
impl<'c> Iterator for AudioSampleStream<'c> {
type Item = Complex<f32>;
fn next(&mut self) -> Option<Complex<f32>> {
loop {
let in_samp = if let Ok(samp) = self.channel.recv() {
samp
} else {
return None;
};
let out = self.demod.push(in_samp);
if out.is_some() {
return out;
}
}
}
}
/// Upconvert signal from baseband to carrier frequency
struct Modulate<'c, T>
where
T: Iterator<Item = Complex<f32>>,
{
config: &'c Config,
/// Our source of baseband samples
src: T,
/// Number of carrier samples to skip per baseband sample
to_skip: u64,
/// Sample rate of the audio signal
sample_rate: f32,
/// Total number of (audio) samples transmitted so far
num_samps: u64,
/// If true, input is done and we'll always return None
done: bool,
/// The two samples we are currently in between sending
cur_samps: (Complex<f32>, Complex<f32>),
/// The current sample we are sending
cur_ewma: Complex<f32>,
}
impl<'c, T> Modulate<'c, T>
where
T: Iterator<Item = Complex<f32>>,
{
fn new(src: T, sample_rate: f32, config: &'c Config) -> Self {
// For now, sample_rate has to be a multiple of bandwidth. Can
// remove restriction later
let to_skip = sample_rate / config.audio.bandwidth;
assert!((to_skip.round() - to_skip).abs() <= 1e-3);
let to_skip = 1; //to_skip.round() as u64;
Self {
config,
src,
to_skip,
sample_rate,
num_samps: 0,
done: false,
cur_samps: (Complex::zero(), Complex::zero()),
cur_ewma: Complex::zero(),
}
}
}
impl<'c, T> Iterator for Modulate<'c, T>
where
T: Iterator<Item = Complex<f32>>,
{
type Item = f32;
fn next(&mut self) -> Option<f32> {
if self.done {
return None;
}
// See if we need to update the current sample
if self.num_samps % self.to_skip == 0 {
if let Some(x) = self.src.next() {
self.cur_samps = (self.cur_samps.1, x);
} else {
self.done = true;
return None;
}
}
let pi2 = 2. * std::f32::consts::PI;
let e = Complex::new(
0.,
pi2 * self.num_samps as f32 / self.sample_rate * self.config.audio.center_frequency,
)
.exp();
self.num_samps += 1;
// Low-pass filter
// Frequency-domain sinc
//let f = (self.num_samps % self.to_skip) as f32 / self.to_skip as f32;
//let samp = prev * (1. - f) + cur * f;
// EWMA (equivalent to RC-filter)
// let alpha = 1. / (1. + self.to_skip as f32);
// self.cur_ewma = alpha * self.cur_samps.1 + (1. - alpha) * self.cur_ewma;
// let samp = self.cur_ewma;
let samp = self.cur_samps.1;
Some(e.re * samp.re + e.im * samp.im)
}
}
/// Convert to baseband from carrier frequency
struct | <'c> {
config: &'c Config,
/// Number of carrier samples to skip per baseband sample
to_skip: usize,
/// Sample rate of the audio signal
sample_rate: f32,
/// Total number of (audio) samples transmitted so far
num_samps: u64,
/// Average of the sample so far
samp_avg: Complex<f32>,
}
impl<'c> Demodulate<'c> {
fn new(sample_rate: f32, config: &'c Config) -> Self {
// For now, sample_rate has to be a multiple of bandwidth. Can
// remove restriction later
let to_skip = sample_rate / config.audio.bandwidth;
assert!((to_skip | Demodulate | identifier_name |
tests.rs | 02\xf0hi"[..],
Error::Literal { len: 4, src_len: 2, dst_len: 2 }
);
// A literal whose length is too big, requires 1 extra byte to be read,
// src is too short to read the full literal.
testerrored!(
err_lit_big2b,
&b"\x02\xf0hi\x00\x00\x00"[..],
Error::Literal {
len: 105, // because 105 == 'h' as u8 + 1
src_len: 4,
dst_len: 2,
}
);
// A copy 1 operation that stops at the tag byte. This fails because there's
// no byte to read for the copy offset.
testerrored!(
err_copy1,
&b"\x02\x00a\x01"[..],
Error::CopyRead { len: 1, src_len: 0 }
);
// A copy 2 operation that stops at the tag byte and another copy 2 operation
// that stops after the first byte in the offset.
testerrored!(
err_copy2a,
&b"\x11\x00a\x3e"[..],
Error::CopyRead { len: 2, src_len: 0 }
);
testerrored!(
err_copy2b,
&b"\x11\x00a\x3e\x01"[..],
Error::CopyRead { len: 2, src_len: 1 }
);
// Same as copy 2, but for copy 4.
testerrored!(
err_copy3a,
&b"\x11\x00a\x3f"[..],
Error::CopyRead { len: 4, src_len: 0 }
);
testerrored!(
err_copy3b,
&b"\x11\x00a\x3f\x00"[..],
Error::CopyRead { len: 4, src_len: 1 }
);
testerrored!(
err_copy3c,
&b"\x11\x00a\x3f\x00\x00"[..],
Error::CopyRead { len: 4, src_len: 2 }
);
testerrored!(
err_copy3d,
&b"\x11\x00a\x3f\x00\x00\x00"[..],
Error::CopyRead { len: 4, src_len: 3 }
);
// A copy operation whose offset is zero.
testerrored!(
err_copy_offset_zero,
&b"\x11\x00a\x01\x00"[..],
Error::Offset { offset: 0, dst_pos: 1 }
);
// A copy operation whose offset is too big.
testerrored!(
err_copy_offset_big,
&b"\x11\x00a\x01\xFF"[..],
Error::Offset { offset: 255, dst_pos: 1 }
);
// A copy operation whose length is too big.
testerrored!(
err_copy_len_big,
&b"\x05\x00a\x1d\x01"[..],
Error::CopyWrite { len: 11, dst_len: 4 }
);
// Selected random inputs pulled from quickcheck failure witnesses.
testtrip!(
random1,
&[
0, 0, 0, 0, 1, 0, 0, 0, 2, 0, 0, 0, 3, 0, 0, 0, 4, 0, 0, 0, 5, 0, 0,
1, 1, 0, 0, 1, 2, 0, 0, 2, 1, 0, 0, 2, 2, 0, 0, 0, 6, 0, 0, 3, 1, 0,
0, 0, 7, 0, 0, 1, 3, 0, 0, 0, 8, 0, 0, 2, 3, 0, 0, 0, 9, 0, 0, 1, 4,
0, 0, 1, 0, 0, 3, 0, 0, 1, 0, 1, 0, 0, 0, 10, 0, 0, 0, 0, 2, 4, 0, 0,
2, 0, 0, 3, 0, 1, 0, 0, 1, 5, 0, 0, 6, 0, 0, 0, 0, 11, 0, 0, 1, 6, 0,
0, 1, 7, 0, 0, 0, 12, 0, 0, 3, 2, 0, 0, 0, 13, 0, 0, 2, 5, 0, 0, 0, 3,
3, 0, 0, 0, 1, 8, 0, 0, 1, 0, 1, 0, 0, 0, 4, 1, 0, 0, 0, 0, 14, 0, 0,
0, 1, 9, 0, 0, 0, 1, 10, 0, 0, 0, 0, 1, 11, 0, 0, 0, 1, 0, 2, 0, 0, 0,
1, 1, 1, 0, 0, 0, 0, 5, 1, 0, 0, 0, 1, 2, 1, 0, 0, 0, 0, 0, 2, 6, 0,
0, 0, 0, 0, 1, 12, 0, 0, 0, 0, 0, 3, 4, 0, 0, 0, 0, 0, 7, 0, 0, 0, 0,
0, 1, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0,
]
);
testtrip!(
random2,
&[
10, 2, 14, 13, 0, 8, 2, 10, 2, 14, 13, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0,
]
);
testtrip!(
random3,
&[0, 0, 0, 4, 1, 4, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,]
);
testtrip!(
random4,
&[
0, 0, 0, 0, 1, 0, 0, 0, 2, 0, 0, 0, 3, 0, 0, 0, 4, 0, 0, 0, 5, 0, 0,
1, 1, 0, 0, 1, 2, 0, 0, 1, 3, 0, 0, 1, 4, 0, 0, 2, 1, 0, 0, 0, 4, 0,
1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
]
);
// QuickCheck properties for testing that random data roundtrips.
// These properties tend to produce the inputs for the "random" tests above.
#[test]
fn qc_roundtrip() {
fn p(bytes: Vec<u8>) -> bool {
depress(&press(&bytes)) == bytes
}
QuickCheck::new()
.gen(StdGen::new(rand::thread_rng(), 10_000))
.tests(1_000)
.quickcheck(p as fn(_) -> _);
}
#[test]
fn qc_roundtrip_stream() {
fn p(bytes: Vec<u8>) -> TestResult {
if bytes.is_empty() | {
return TestResult::discard();
} | conditional_block | |
tests.rs | Error::CopyRead { len: 4, src_len: 2 }
);
testerrored!(
err_copy3d,
&b"\x11\x00a\x3f\x00\x00\x00"[..],
Error::CopyRead { len: 4, src_len: 3 }
);
// A copy operation whose offset is zero.
testerrored!(
err_copy_offset_zero,
&b"\x11\x00a\x01\x00"[..],
Error::Offset { offset: 0, dst_pos: 1 }
);
// A copy operation whose offset is too big.
testerrored!(
err_copy_offset_big,
&b"\x11\x00a\x01\xFF"[..],
Error::Offset { offset: 255, dst_pos: 1 }
);
// A copy operation whose length is too big.
testerrored!(
err_copy_len_big,
&b"\x05\x00a\x1d\x01"[..],
Error::CopyWrite { len: 11, dst_len: 4 }
);
// Selected random inputs pulled from quickcheck failure witnesses.
testtrip!(
random1,
&[
0, 0, 0, 0, 1, 0, 0, 0, 2, 0, 0, 0, 3, 0, 0, 0, 4, 0, 0, 0, 5, 0, 0,
1, 1, 0, 0, 1, 2, 0, 0, 2, 1, 0, 0, 2, 2, 0, 0, 0, 6, 0, 0, 3, 1, 0,
0, 0, 7, 0, 0, 1, 3, 0, 0, 0, 8, 0, 0, 2, 3, 0, 0, 0, 9, 0, 0, 1, 4,
0, 0, 1, 0, 0, 3, 0, 0, 1, 0, 1, 0, 0, 0, 10, 0, 0, 0, 0, 2, 4, 0, 0,
2, 0, 0, 3, 0, 1, 0, 0, 1, 5, 0, 0, 6, 0, 0, 0, 0, 11, 0, 0, 1, 6, 0,
0, 1, 7, 0, 0, 0, 12, 0, 0, 3, 2, 0, 0, 0, 13, 0, 0, 2, 5, 0, 0, 0, 3,
3, 0, 0, 0, 1, 8, 0, 0, 1, 0, 1, 0, 0, 0, 4, 1, 0, 0, 0, 0, 14, 0, 0,
0, 1, 9, 0, 0, 0, 1, 10, 0, 0, 0, 0, 1, 11, 0, 0, 0, 1, 0, 2, 0, 0, 0,
1, 1, 1, 0, 0, 0, 0, 5, 1, 0, 0, 0, 1, 2, 1, 0, 0, 0, 0, 0, 2, 6, 0,
0, 0, 0, 0, 1, 12, 0, 0, 0, 0, 0, 3, 4, 0, 0, 0, 0, 0, 7, 0, 0, 0, 0,
0, 1, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0,
]
);
testtrip!(
random2,
&[
10, 2, 14, 13, 0, 8, 2, 10, 2, 14, 13, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0,
]
);
testtrip!(
random3,
&[0, 0, 0, 4, 1, 4, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,]
);
testtrip!(
random4,
&[
0, 0, 0, 0, 1, 0, 0, 0, 2, 0, 0, 0, 3, 0, 0, 0, 4, 0, 0, 0, 5, 0, 0,
1, 1, 0, 0, 1, 2, 0, 0, 1, 3, 0, 0, 1, 4, 0, 0, 2, 1, 0, 0, 0, 4, 0,
1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
]
);
// QuickCheck properties for testing that random data roundtrips.
// These properties tend to produce the inputs for the "random" tests above.
#[test]
fn qc_roundtrip() {
fn p(bytes: Vec<u8>) -> bool {
depress(&press(&bytes)) == bytes
}
QuickCheck::new()
.gen(StdGen::new(rand::thread_rng(), 10_000))
.tests(1_000)
.quickcheck(p as fn(_) -> _);
}
#[test]
fn qc_roundtrip_stream() {
fn p(bytes: Vec<u8>) -> TestResult {
if bytes.is_empty() {
return TestResult::discard();
}
TestResult::from_bool(
read_frame_depress(&write_frame_press(&bytes)) == bytes,
)
}
QuickCheck::new()
.gen(StdGen::new(rand::thread_rng(), 10_000))
.tests(1_000)
.quickcheck(p as fn(_) -> _);
}
#[test]
fn test_short_input() {
// Regression test for https://github.com/BurntSushi/rust-snappy/issues/42
use snap::read;
use std::io::Read;
let err =
read::FrameDecoder::new(&b"123"[..]).read_to_end(&mut Vec::new());
assert_eq!(err.unwrap_err().kind(), std::io::ErrorKind::UnexpectedEof);
}
#[test]
#[cfg(feature = "cpp")]
fn qc_cmpcpp() {
fn p(bytes: Vec<u8>) -> bool {
press(&bytes) == press_cpp(&bytes)
}
QuickCheck::new()
.gen(StdGen::new(rand::thread_rng(), 10_000))
.tests(10_000)
.quickcheck(p as fn(_) -> _);
}
// Regression tests.
// See: https://github.com/BurntSushi/rust-snappy/issues/3
#[cfg(target_pointer_width = "32")]
testerrored!(
err_lit_len_overflow1,
&b"\x11\x00\x00\xfc\xfe\xff\xff\xff"[..],
Error::Literal { len: std::u32::MAX as u64, src_len: 0, dst_len: 16 }
);
#[cfg(target_pointer_width = "32")]
testerrored!(
err_lit_len_overflow2,
&b"\x11\x00\x00\xfc\xff\xff\xff\xff"[..],
Error::Literal { len: std::u32::MAX as u64 + 1, src_len: 0, dst_len: 16 }
);
// Helper functions.
fn press(bytes: &[u8]) -> Vec<u8> {
Encoder::new().compress_vec(bytes).unwrap()
}
fn | depress | identifier_name | |
tests.rs | _bytes!("../data/alice29.txt"));
testtrip!(data_txt2, include_bytes!("../data/asyoulik.txt"));
testtrip!(data_txt3, include_bytes!("../data/lcet10.txt"));
testtrip!(data_txt4, include_bytes!("../data/plrabn12.txt"));
testtrip!(data_pb, include_bytes!("../data/geo.protodata"));
testtrip!(data_gaviota, include_bytes!("../data/kppkn.gtb"));
testtrip!(data_golden, include_bytes!("../data/Mark.Twain-Tom.Sawyer.txt"));
// Do it again, with the Snappy frame format.
// Roundtrip the golden data, starting with the compressed bytes.
#[test]
fn data_golden_rev() |
// Miscellaneous tests.
#[test]
fn small_copy() {
use std::iter::repeat;
for i in 0..32 {
let inner: String = repeat('b').take(i).collect();
roundtrip!(format!("aaaa{}aaaabbbb", inner).into_bytes());
}
}
#[test]
fn small_regular() {
let mut i = 1;
while i < 20_000 {
let mut buf = vec![0; i];
for (j, x) in buf.iter_mut().enumerate() {
*x = (j % 10) as u8 + b'a';
}
roundtrip!(buf);
i += 23;
}
}
// Test that triggered an out of bounds write.
#[test]
fn decompress_copy_close_to_end_1() {
let buf = [
27,
0b000010_00,
1,
2,
3,
0b000_000_10,
3,
0,
0b010110_00,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
20,
21,
22,
23,
24,
25,
26,
];
let decompressed = [
1, 2, 3, 1, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
20, 21, 22, 23, 24, 25, 26,
];
assert_eq!(decompressed, &*depress(&buf));
}
#[test]
fn decompress_copy_close_to_end_2() {
let buf = [
28,
0b000010_00,
1,
2,
3,
0b000_000_10,
3,
0,
0b010111_00,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
20,
21,
22,
23,
24,
25,
26,
27,
];
let decompressed = [
1, 2, 3, 1, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
20, 21, 22, 23, 24, 25, 26, 27,
];
assert_eq!(decompressed, &*depress(&buf));
}
// The `read::FrameEncoder` code uses different code paths depending on buffer
// size, so let's test both. Also, very small buffers are a good stress test.
#[test]
fn read_frame_encoder_big_and_little_buffers() {
use snap::read;
use std::io::{BufReader, Read};
let bytes = &include_bytes!("../data/html")[..];
let mut big =
BufReader::with_capacity(1_000_000, read::FrameEncoder::new(bytes));
let mut big_out = vec![];
big.read_to_end(&mut big_out).unwrap();
// 5 bytes is small enough to break up headers, etc.
let mut little =
BufReader::with_capacity(5, read::FrameEncoder::new(bytes));
let mut little_out = vec![];
little.read_to_end(&mut little_out).unwrap();
assert_eq!(big_out, little_out);
}
// Tests decompression on malformed data.
// An empty buffer.
testerrored!(err_empty, &b""[..], Error::Empty);
// Decompress fewer bytes than the header reports.
testerrored!(
err_header_mismatch,
&b"\x05\x00a"[..],
Error::HeaderMismatch { expected_len: 5, got_len: 1 }
);
// An invalid varint (final byte has continuation bit set).
testerrored!(err_varint1, &b"\xFF"[..], Error::Header, true);
// A varint that overflows u64.
testerrored!(
err_varint2,
&b"\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00"[..],
Error::Header,
true
);
// A varint that fits in u64 but overflows u32.
testerrored!(
err_varint3,
&b"\x80\x80\x80\x80\x10"[..],
Error::TooBig { given: 4294967296, max: 4294967295 },
true
);
// A literal whose length is too small.
// Since the literal length is 1, 'h' is read as a literal and 'i' is
// interpreted as a copy 1 operation missing its offset byte.
testerrored!(
err_lit,
&b"\x02\x00hi"[..],
Error::CopyRead { len: 1, src_len: 0 }
);
// A literal whose length is too big.
testerrored!(
err_lit_big1,
&b"\x02\xechi"[..],
Error::Literal { len: 60, src_len: 2, dst_len: 2 }
);
// A literal whose length is too big, requires 1 extra byte to be read, and
// src is too short to read that byte.
testerrored!(
err_lit_big2a,
&b"\x02\xf0hi"[..],
Error::Literal { len: 4, src_len: 2, dst_len: 2 }
);
// A literal whose length is too big, requires 1 extra byte to be read,
// src is too short to read the full literal.
testerrored!(
err_lit_big2b,
&b"\x02\xf0hi\x00\x00\x00"[..],
Error::Literal {
len: 105, // because 105 == 'h' as u8 + 1
src_len: 4,
dst_len: 2,
}
);
// A copy 1 operation that stops at the tag byte. This fails because there's
// no byte to read for the copy offset.
testerrored!(
err_copy1,
&b"\x02\x00a\x01"[..],
Error::CopyRead { len: 1, src_len: 0 }
);
// A copy 2 operation that stops at the tag byte and another copy 2 operation
// that stops after the first byte in the offset.
testerrored!(
err_copy2a,
&b"\x11\x00a\x3e"[..],
Error::CopyRead { len: 2, src_len: 0 }
);
testerrored!(
err_copy2b,
&b"\x11\x00a\x3e\x01"[..],
Error::CopyRead { len: 2, src_len: 1 }
);
// Same as copy 2, but for copy 4.
testerrored!(
err_copy3a,
&b"\x11\x00a\x3f"[..],
Error::Copy | {
let data = include_bytes!("../data/Mark.Twain-Tom.Sawyer.txt.rawsnappy");
let data = &data[..];
assert_eq!(data, &*press(&depress(data)));
} | identifier_body |
tests.rs | };
match Decoder::new().decompress(d, &mut buf) {
Err(ref err) if err == &$err => {}
Err(ref err) => panic!(
"expected decompression to fail with {:?}, \
but got {:?}",
$err, err
),
Ok(n) => {
panic!(
"\nexpected decompression to fail, but did not!
original (len == {:?})
----------------------
{:?}
decompressed (len == {:?})
--------------------------
{:?}
",
d.len(),
d,
n,
buf
);
}
}
}};
}
// testtrip is a macro that defines a test that compresses the input, then
// decompresses the result and compares it with the original input. If they are
// not equal, then the test fails. This test is performed both on the raw
// Snappy format and the framed Snappy format.
//
// If tests are compiled with the cpp feature, then this also tests that the
// C++ library compresses to the same bytes that the Rust library does.
macro_rules! testtrip {
($name:ident, $data:expr) => {
mod $name {
#[test]
fn roundtrip_raw() {
use super::{depress, press};
roundtrip!($data);
}
#[test]
fn roundtrip_frame() {
use super::{read_frame_depress, write_frame_press};
let d = &$data[..];
assert_eq!(d, &*read_frame_depress(&write_frame_press(d)));
}
#[test]
fn read_and_write_frame_encoder_match() {
use super::{read_frame_press, write_frame_press};
let d = &$data[..];
assert_eq!(read_frame_press(d), write_frame_press(d));
}
#[test]
#[cfg(feature = "cpp")]
fn cmpcpp() {
use super::{press, press_cpp};
let data = &$data[..];
let rust = press(data);
let cpp = press_cpp(data);
if rust == cpp {
return;
}
panic!(
"\ncompression results are not equal!
original (len == {:?})
----------------------
{:?}
rust (len == {:?})
------------------
{:?}
cpp (len == {:?})
-----------------
{:?}
",
data.len(),
data,
rust.len(),
rust,
cpp.len(),
cpp
);
}
}
};
}
// testcorrupt is a macro that defines a test that decompresses the input,
// and if the result is anything other than the error given, the test fails.
macro_rules! testerrored {
($name:ident, $data:expr, $err:expr) => {
testerrored!($name, $data, $err, false);
};
($name:ident, $data:expr, $err:expr, $bad_header:expr) => {
#[test]
fn $name() {
errored!($data, $err, $bad_header);
}
};
}
// Simple test cases.
testtrip!(empty, &[]);
testtrip!(one_zero, &[0]);
// Roundtrip all of the benchmark data.
testtrip!(data_html, include_bytes!("../data/html"));
testtrip!(data_urls, include_bytes!("../data/urls.10K"));
testtrip!(data_jpg, include_bytes!("../data/fireworks.jpeg"));
testtrip!(data_pdf, include_bytes!("../data/paper-100k.pdf"));
testtrip!(data_html4, include_bytes!("../data/html_x_4"));
testtrip!(data_txt1, include_bytes!("../data/alice29.txt"));
testtrip!(data_txt2, include_bytes!("../data/asyoulik.txt"));
testtrip!(data_txt3, include_bytes!("../data/lcet10.txt"));
testtrip!(data_txt4, include_bytes!("../data/plrabn12.txt"));
testtrip!(data_pb, include_bytes!("../data/geo.protodata"));
testtrip!(data_gaviota, include_bytes!("../data/kppkn.gtb"));
testtrip!(data_golden, include_bytes!("../data/Mark.Twain-Tom.Sawyer.txt"));
// Do it again, with the Snappy frame format.
// Roundtrip the golden data, starting with the compressed bytes.
#[test]
fn data_golden_rev() {
let data = include_bytes!("../data/Mark.Twain-Tom.Sawyer.txt.rawsnappy");
let data = &data[..];
assert_eq!(data, &*press(&depress(data)));
}
// Miscellaneous tests.
#[test]
fn small_copy() {
use std::iter::repeat;
for i in 0..32 {
let inner: String = repeat('b').take(i).collect();
roundtrip!(format!("aaaa{}aaaabbbb", inner).into_bytes());
}
}
#[test]
fn small_regular() {
let mut i = 1;
while i < 20_000 {
let mut buf = vec![0; i];
for (j, x) in buf.iter_mut().enumerate() {
*x = (j % 10) as u8 + b'a';
}
roundtrip!(buf);
i += 23;
}
}
// Test that triggered an out of bounds write.
#[test]
fn decompress_copy_close_to_end_1() {
let buf = [
27,
0b000010_00,
1,
2,
3,
0b000_000_10,
3,
0,
0b010110_00,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
20,
21,
22,
23,
24,
25,
26,
];
let decompressed = [
1, 2, 3, 1, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
20, 21, 22, 23, 24, 25, 26,
];
assert_eq!(decompressed, &*depress(&buf));
}
#[test]
fn decompress_copy_close_to_end_2() {
let buf = [
28,
0b000010_00,
1,
2,
3,
0b000_000_10,
3,
0,
0b010111_00,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
20,
21,
22,
23,
24,
25,
26,
27,
];
let decompressed = [
1, 2, 3, 1, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
20, 21, 22, 23, 24, 25, 26, 27,
];
assert_eq!(decompressed, &*depress(&buf));
}
// The `read::FrameEncoder` code uses different code paths depending on buffer
// size, so let's test both. Also, very small buffers are a good stress test.
#[test]
fn read_frame_encoder_big_and_little_buffers() {
use snap::read;
use std::io::{BufReader, Read};
let bytes = &include_bytes!("../data/html")[..];
let mut big =
BufReader::with_capacity(1_000_000, read::FrameEncoder::new(bytes));
let mut big_out = vec![];
big.read_to_end(&mut big_out).unwrap();
// 5 bytes is small enough to break up headers, etc.
let mut little =
BufReader::with_capacity(5, read::FrameEncoder::new(bytes));
let mut little_out = vec![];
little.read_to_end(&mut little_out).unwrap();
assert_eq!(big_out, little_out);
}
// Tests decompression on malformed data.
// An empty buffer.
testerrored!(err_empty, &b""[..], Error::Empty);
// Decompress fewer bytes than the header reports.
| vec![0; decompress_len(d).unwrap()] | random_line_split | |
learn.rs | ReceiverStream;
use optic_diff_engine::streams;
use optic_diff_engine::{analyze_undocumented_bodies, EndpointCommand, SpecCommand};
use optic_diff_engine::{
BodyAnalysisLocation, HttpInteraction, SpecChunkEvent, SpecEvent, SpecIdGenerator,
SpecProjection, TrailObservationsResult,
};
pub const SUBCOMMAND_NAME: &'static str = "learn";
pub fn create_subcommand<'a, 'b>() -> App<'a, 'b> {
SubCommand::with_name(SUBCOMMAND_NAME)
.about("Learns about possible changes to the spec based on interactions or diffs")
.arg(
Arg::with_name("undocumented-bodies")
.long("undocumented-bodies")
.takes_value(false)
.help("Learn shapes of undocumented bodies from interactions piped to stdin"),
)
.arg(
Arg::with_name("shape-diffs")
.long("shape-diffs")
.takes_value(false)
.help("Learn updated shapes from shape diffs piped to stdin"),
)
.group(
ArgGroup::with_name("subject")
.args(&["undocumented-bodies", "shape-diffs"])
.multiple(false)
.required(true),
)
}
pub async fn main<'a>(
command_matches: &'a ArgMatches<'a>,
spec_chunks: Vec<SpecChunkEvent>,
input_queue_size: usize,
) {
let spec_events = events_from_chunks(spec_chunks).await;
if command_matches.is_present("undocumented-bodies") {
let stdin = stdin();
let interaction_lines = streams::http_interaction::json_lines(stdin);
let sink = stdout();
learn_undocumented_bodies(spec_events, input_queue_size, interaction_lines, sink).await;
} else if command_matches.is_present("shape-diffs") {
todo!("shape diffs learning is yet to be implemented");
} else {
unreachable!("subject is required");
}
}
async fn learn_undocumented_bodies<S: 'static + AsyncWrite + Unpin + Send>(
spec_events: Vec<SpecEvent>,
input_queue_size: usize,
interaction_lines: impl Stream<Item = Result<String, std::io::Error>>,
sink: S,
) {
let spec_projection = Arc::new(SpecProjection::from(spec_events));
let (analysis_sender, analysis_receiver) = mpsc::channel(32);
let analyzing_bodies = async move {
let analyze_results = interaction_lines
.map(Ok)
.try_for_each_concurrent(input_queue_size, |interaction_json_result| {
let projection = spec_projection.clone();
let analysis_sender = analysis_sender.clone();
let analyze_task = tokio::spawn(async move {
let analyze_comp = tokio::task::spawn_blocking(move || {
let interaction_json =
interaction_json_result.expect("can rad interaction json line form stdin");
let interaction: HttpInteraction =
serde_json::from_str(&interaction_json).expect("could not parse interaction json");
analyze_undocumented_bodies(&projection, interaction)
});
match analyze_comp.await {
Ok(results) => {
for result in results {
analysis_sender
.send(result)
.await
.expect("could not send analysis result to aggregation channel")
}
}
Err(err) => {
// ignore a single interaction not being able to deserialize
eprintln!("interaction ignored: {}", err);
}
}
});
analyze_task
})
.await;
analyze_results
};
let aggregating_results = tokio::spawn(async move {
let mut analysiss = ReceiverStream::new(analysis_receiver);
let mut id_generator = IdGenerator::default();
let mut observations_by_body_location = HashMap::new();
while let Some(analysis) = analysiss.next().await {
let existing_observations = observations_by_body_location
.entry(analysis.body_location)
.or_insert_with(|| TrailObservationsResult::default());
existing_observations.union(analysis.trail_observations);
}
let mut endpoints_by_endpoint = HashMap::new();
for (body_location, observations) in observations_by_body_location {
let (root_shape_id, body_commands) = observations.into_commands(&mut id_generator);
let endpoint_body = EndpointBody::new(&body_location, root_shape_id, body_commands);
let (path_id, method) = match body_location {
BodyAnalysisLocation::Request {
path_id, method, ..
} => (path_id, method),
BodyAnalysisLocation::Response {
path_id, method, ..
} => (path_id, method),
};
let endpoint_bodies = endpoints_by_endpoint
.entry((path_id, method))
.or_insert_with_key(|(path_id, method)| {
EndpointBodies::new(path_id.clone(), method.clone())
});
endpoint_bodies.push(endpoint_body);
}
streams::write_to_json_lines(sink, endpoints_by_endpoint.values())
.await
.expect("could not write endpoint bodies to stdout");
});
try_join!(analyzing_bodies, aggregating_results).expect("essential worker task panicked");
}
#[derive(Debug, Default)]
struct IdGenerator;
impl SpecIdGenerator for IdGenerator {
fn generate_id(&mut self, prefix: &str) -> String {
// NanoID @ 10 chars:
// - URL-safe,
// - 17 years for a 1% chance of at least one global collision assuming
// writing 1000 ids per hour (https://zelark.github.io/nano-id-cc/)
format!("{}{}", prefix, nanoid!(10))
}
}
#[derive(Default, Debug, Serialize)]
#[serde(rename_all = "camelCase")]
struct EndpointBodies {
path_id: String,
method: String,
requests: Vec<EndpointRequestBody>,
responses: Vec<EndpointResponseBody>,
}
impl EndpointBodies {
pub fn new(path_id: String, method: String) -> Self {
Self {
path_id,
method,
requests: vec![],
responses: vec![],
}
}
pub fn push(&mut self, endpoint: EndpointBody) {
match endpoint {
EndpointBody::Request(endpoint_request) => {
self.requests.push(endpoint_request);
}
EndpointBody::Response(endpoint_response) => {
self.responses.push(endpoint_response);
}
}
}
}
#[derive(Debug)]
enum EndpointBody {
Request(EndpointRequestBody),
Response(EndpointResponseBody),
}
#[derive(Default, Debug, Serialize)]
#[serde(rename_all = "camelCase")]
struct EndpointRequestBody {
commands: Vec<SpecCommand>,
#[serde(skip)]
path_id: String,
#[serde(skip)]
method: String,
#[serde(flatten)]
body_descriptor: Option<EndpointBodyDescriptor>,
}
#[derive(Default, Debug, Serialize)]
#[serde(rename_all = "camelCase")]
struct EndpointResponseBody {
commands: Vec<SpecCommand>,
status_code: u16,
#[serde(skip)]
path_id: String,
#[serde(skip)]
method: String,
#[serde(flatten)]
body_descriptor: Option<EndpointBodyDescriptor>,
}
#[derive(Default, Debug, Serialize)]
#[serde(rename_all = "camelCase")]
struct EndpointBodyDescriptor {
content_type: String,
root_shape_id: String,
}
impl EndpointBody {
fn new(
body_location: &BodyAnalysisLocation,
root_shape_id: Option<String>,
body_commands: impl IntoIterator<Item = SpecCommand>,
) -> Self {
let body_descriptor = match root_shape_id {
Some(root_shape_id) => Some(EndpointBodyDescriptor {
content_type: body_location
.content_type()
.expect("root shape id implies a content type to be present")
.clone(),
root_shape_id,
}),
None => None,
};
let mut body = match body_location {
BodyAnalysisLocation::Request {
path_id, method, ..
} => EndpointBody::Request(EndpointRequestBody {
body_descriptor,
path_id: path_id.clone(),
method: method.clone(),
commands: body_commands.into_iter().collect(),
}),
BodyAnalysisLocation::Response {
status_code,
path_id,
method,
..
} => EndpointBody::Response(EndpointResponseBody {
body_descriptor,
path_id: path_id.clone(),
method: method.clone(),
commands: body_commands.into_iter().collect(),
status_code: *status_code,
}),
};
body.append_endpoint_commands();
body
}
fn append_endpoint_commands(&mut self) {
let mut ids = IdGenerator::default();
match self {
EndpointBody::Request(request_body) => {
let request_id = ids.request();
request_body
.commands
.push(SpecCommand::from(EndpointCommand::add_request(
request_id.clone(),
request_body.path_id.clone(),
request_body.method.clone(),
)));
if let Some(body_descriptor) = &request_body.body_descriptor |
}
EndpointBody::Response(response_body) => {
let response_id = ids.response();
response_body.commands.push(SpecCommand::from(
EndpointCommand::add_response_by_path_and_method(
response_id.clone(),
response_body.path_id.clone(),
response_body.method.clone(),
response_body.status_code.clone(),
),
));
if let Some(body_descriptor | {
request_body
.commands
.push(SpecCommand::from(EndpointCommand::set_request_body_shape(
request_id,
body_descriptor.root_shape_id.clone(),
body_descriptor.content_type.clone(),
false,
)));
} | conditional_block |
learn.rs | ReceiverStream;
use optic_diff_engine::streams;
use optic_diff_engine::{analyze_undocumented_bodies, EndpointCommand, SpecCommand};
use optic_diff_engine::{
BodyAnalysisLocation, HttpInteraction, SpecChunkEvent, SpecEvent, SpecIdGenerator,
SpecProjection, TrailObservationsResult,
};
pub const SUBCOMMAND_NAME: &'static str = "learn";
pub fn create_subcommand<'a, 'b>() -> App<'a, 'b> {
SubCommand::with_name(SUBCOMMAND_NAME)
.about("Learns about possible changes to the spec based on interactions or diffs")
.arg(
Arg::with_name("undocumented-bodies")
.long("undocumented-bodies")
.takes_value(false)
.help("Learn shapes of undocumented bodies from interactions piped to stdin"),
)
.arg(
Arg::with_name("shape-diffs")
.long("shape-diffs")
.takes_value(false)
.help("Learn updated shapes from shape diffs piped to stdin"),
)
.group(
ArgGroup::with_name("subject")
.args(&["undocumented-bodies", "shape-diffs"])
.multiple(false)
.required(true),
)
}
pub async fn main<'a>(
command_matches: &'a ArgMatches<'a>,
spec_chunks: Vec<SpecChunkEvent>,
input_queue_size: usize,
) {
let spec_events = events_from_chunks(spec_chunks).await;
if command_matches.is_present("undocumented-bodies") {
let stdin = stdin();
let interaction_lines = streams::http_interaction::json_lines(stdin);
let sink = stdout();
learn_undocumented_bodies(spec_events, input_queue_size, interaction_lines, sink).await;
} else if command_matches.is_present("shape-diffs") {
todo!("shape diffs learning is yet to be implemented");
} else {
unreachable!("subject is required");
}
}
async fn learn_undocumented_bodies<S: 'static + AsyncWrite + Unpin + Send>(
spec_events: Vec<SpecEvent>,
input_queue_size: usize,
interaction_lines: impl Stream<Item = Result<String, std::io::Error>>,
sink: S,
) {
let spec_projection = Arc::new(SpecProjection::from(spec_events));
let (analysis_sender, analysis_receiver) = mpsc::channel(32);
let analyzing_bodies = async move {
let analyze_results = interaction_lines
.map(Ok)
.try_for_each_concurrent(input_queue_size, |interaction_json_result| {
let projection = spec_projection.clone();
let analysis_sender = analysis_sender.clone();
let analyze_task = tokio::spawn(async move {
let analyze_comp = tokio::task::spawn_blocking(move || {
let interaction_json =
interaction_json_result.expect("can rad interaction json line form stdin");
let interaction: HttpInteraction =
serde_json::from_str(&interaction_json).expect("could not parse interaction json");
analyze_undocumented_bodies(&projection, interaction)
});
match analyze_comp.await {
Ok(results) => {
for result in results {
analysis_sender
.send(result)
.await
.expect("could not send analysis result to aggregation channel")
}
}
Err(err) => {
// ignore a single interaction not being able to deserialize
eprintln!("interaction ignored: {}", err);
}
}
});
analyze_task
})
.await;
analyze_results
};
let aggregating_results = tokio::spawn(async move {
let mut analysiss = ReceiverStream::new(analysis_receiver);
let mut id_generator = IdGenerator::default();
let mut observations_by_body_location = HashMap::new();
while let Some(analysis) = analysiss.next().await {
let existing_observations = observations_by_body_location
.entry(analysis.body_location)
.or_insert_with(|| TrailObservationsResult::default());
existing_observations.union(analysis.trail_observations);
}
let mut endpoints_by_endpoint = HashMap::new();
for (body_location, observations) in observations_by_body_location {
let (root_shape_id, body_commands) = observations.into_commands(&mut id_generator);
let endpoint_body = EndpointBody::new(&body_location, root_shape_id, body_commands);
let (path_id, method) = match body_location {
BodyAnalysisLocation::Request {
path_id, method, ..
} => (path_id, method),
BodyAnalysisLocation::Response {
path_id, method, ..
} => (path_id, method),
};
let endpoint_bodies = endpoints_by_endpoint
.entry((path_id, method))
.or_insert_with_key(|(path_id, method)| {
EndpointBodies::new(path_id.clone(), method.clone())
});
endpoint_bodies.push(endpoint_body);
}
streams::write_to_json_lines(sink, endpoints_by_endpoint.values())
.await
.expect("could not write endpoint bodies to stdout");
});
try_join!(analyzing_bodies, aggregating_results).expect("essential worker task panicked");
}
#[derive(Debug, Default)]
struct IdGenerator;
impl SpecIdGenerator for IdGenerator {
fn generate_id(&mut self, prefix: &str) -> String {
// NanoID @ 10 chars:
// - URL-safe,
// - 17 years for a 1% chance of at least one global collision assuming
// writing 1000 ids per hour (https://zelark.github.io/nano-id-cc/)
format!("{}{}", prefix, nanoid!(10))
}
}
#[derive(Default, Debug, Serialize)]
#[serde(rename_all = "camelCase")]
struct EndpointBodies {
path_id: String,
method: String,
requests: Vec<EndpointRequestBody>,
responses: Vec<EndpointResponseBody>,
}
impl EndpointBodies {
pub fn new(path_id: String, method: String) -> Self {
Self {
path_id,
method,
requests: vec![],
responses: vec![],
}
}
pub fn push(&mut self, endpoint: EndpointBody) {
match endpoint {
EndpointBody::Request(endpoint_request) => {
self.requests.push(endpoint_request);
}
EndpointBody::Response(endpoint_response) => {
self.responses.push(endpoint_response);
}
}
}
}
#[derive(Debug)]
enum EndpointBody {
Request(EndpointRequestBody),
Response(EndpointResponseBody),
}
#[derive(Default, Debug, Serialize)]
#[serde(rename_all = "camelCase")]
struct EndpointRequestBody {
commands: Vec<SpecCommand>,
#[serde(skip)]
path_id: String,
#[serde(skip)]
method: String,
#[serde(flatten)]
body_descriptor: Option<EndpointBodyDescriptor>,
}
#[derive(Default, Debug, Serialize)]
#[serde(rename_all = "camelCase")]
struct EndpointResponseBody {
commands: Vec<SpecCommand>,
status_code: u16,
#[serde(skip)]
path_id: String,
#[serde(skip)]
method: String,
#[serde(flatten)]
body_descriptor: Option<EndpointBodyDescriptor>,
}
#[derive(Default, Debug, Serialize)]
#[serde(rename_all = "camelCase")]
struct EndpointBodyDescriptor {
content_type: String,
root_shape_id: String,
}
impl EndpointBody {
fn new(
body_location: &BodyAnalysisLocation,
root_shape_id: Option<String>,
body_commands: impl IntoIterator<Item = SpecCommand>,
) -> Self {
let body_descriptor = match root_shape_id {
Some(root_shape_id) => Some(EndpointBodyDescriptor {
content_type: body_location
.content_type()
.expect("root shape id implies a content type to be present")
.clone(),
root_shape_id,
}),
None => None,
};
let mut body = match body_location {
BodyAnalysisLocation::Request {
path_id, method, ..
} => EndpointBody::Request(EndpointRequestBody {
body_descriptor,
path_id: path_id.clone(),
method: method.clone(),
commands: body_commands.into_iter().collect(),
}),
BodyAnalysisLocation::Response {
status_code,
path_id,
method,
..
} => EndpointBody::Response(EndpointResponseBody {
body_descriptor,
path_id: path_id.clone(),
method: method.clone(),
commands: body_commands.into_iter().collect(),
status_code: *status_code,
}),
};
body.append_endpoint_commands();
body
}
fn append_endpoint_commands(&mut self) {
let mut ids = IdGenerator::default();
match self {
EndpointBody::Request(request_body) => {
let request_id = ids.request();
request_body
.commands
.push(SpecCommand::from(EndpointCommand::add_request(
request_id.clone(),
request_body.path_id.clone(),
request_body.method.clone(),
)));
if let Some(body_descriptor) = &request_body.body_descriptor { | body_descriptor.content_type.clone(),
false,
)));
}
}
EndpointBody::Response(response_body) => {
let response_id = ids.response();
response_body.commands.push(SpecCommand::from(
EndpointCommand::add_response_by_path_and_method(
response_id.clone(),
response_body.path_id.clone(),
response_body.method.clone(),
response_body.status_code.clone(),
),
));
if let Some(body_descriptor) = | request_body
.commands
.push(SpecCommand::from(EndpointCommand::set_request_body_shape(
request_id,
body_descriptor.root_shape_id.clone(), | random_line_split |
learn.rs | ReceiverStream;
use optic_diff_engine::streams;
use optic_diff_engine::{analyze_undocumented_bodies, EndpointCommand, SpecCommand};
use optic_diff_engine::{
BodyAnalysisLocation, HttpInteraction, SpecChunkEvent, SpecEvent, SpecIdGenerator,
SpecProjection, TrailObservationsResult,
};
pub const SUBCOMMAND_NAME: &'static str = "learn";
pub fn create_subcommand<'a, 'b>() -> App<'a, 'b> {
SubCommand::with_name(SUBCOMMAND_NAME)
.about("Learns about possible changes to the spec based on interactions or diffs")
.arg(
Arg::with_name("undocumented-bodies")
.long("undocumented-bodies")
.takes_value(false)
.help("Learn shapes of undocumented bodies from interactions piped to stdin"),
)
.arg(
Arg::with_name("shape-diffs")
.long("shape-diffs")
.takes_value(false)
.help("Learn updated shapes from shape diffs piped to stdin"),
)
.group(
ArgGroup::with_name("subject")
.args(&["undocumented-bodies", "shape-diffs"])
.multiple(false)
.required(true),
)
}
pub async fn main<'a>(
command_matches: &'a ArgMatches<'a>,
spec_chunks: Vec<SpecChunkEvent>,
input_queue_size: usize,
) {
let spec_events = events_from_chunks(spec_chunks).await;
if command_matches.is_present("undocumented-bodies") {
let stdin = stdin();
let interaction_lines = streams::http_interaction::json_lines(stdin);
let sink = stdout();
learn_undocumented_bodies(spec_events, input_queue_size, interaction_lines, sink).await;
} else if command_matches.is_present("shape-diffs") {
todo!("shape diffs learning is yet to be implemented");
} else {
unreachable!("subject is required");
}
}
async fn learn_undocumented_bodies<S: 'static + AsyncWrite + Unpin + Send>(
spec_events: Vec<SpecEvent>,
input_queue_size: usize,
interaction_lines: impl Stream<Item = Result<String, std::io::Error>>,
sink: S,
) {
let spec_projection = Arc::new(SpecProjection::from(spec_events));
let (analysis_sender, analysis_receiver) = mpsc::channel(32);
let analyzing_bodies = async move {
let analyze_results = interaction_lines
.map(Ok)
.try_for_each_concurrent(input_queue_size, |interaction_json_result| {
let projection = spec_projection.clone();
let analysis_sender = analysis_sender.clone();
let analyze_task = tokio::spawn(async move {
let analyze_comp = tokio::task::spawn_blocking(move || {
let interaction_json =
interaction_json_result.expect("can rad interaction json line form stdin");
let interaction: HttpInteraction =
serde_json::from_str(&interaction_json).expect("could not parse interaction json");
analyze_undocumented_bodies(&projection, interaction)
});
match analyze_comp.await {
Ok(results) => {
for result in results {
analysis_sender
.send(result)
.await
.expect("could not send analysis result to aggregation channel")
}
}
Err(err) => {
// ignore a single interaction not being able to deserialize
eprintln!("interaction ignored: {}", err);
}
}
});
analyze_task
})
.await;
analyze_results
};
let aggregating_results = tokio::spawn(async move {
let mut analysiss = ReceiverStream::new(analysis_receiver);
let mut id_generator = IdGenerator::default();
let mut observations_by_body_location = HashMap::new();
while let Some(analysis) = analysiss.next().await {
let existing_observations = observations_by_body_location
.entry(analysis.body_location)
.or_insert_with(|| TrailObservationsResult::default());
existing_observations.union(analysis.trail_observations);
}
let mut endpoints_by_endpoint = HashMap::new();
for (body_location, observations) in observations_by_body_location {
let (root_shape_id, body_commands) = observations.into_commands(&mut id_generator);
let endpoint_body = EndpointBody::new(&body_location, root_shape_id, body_commands);
let (path_id, method) = match body_location {
BodyAnalysisLocation::Request {
path_id, method, ..
} => (path_id, method),
BodyAnalysisLocation::Response {
path_id, method, ..
} => (path_id, method),
};
let endpoint_bodies = endpoints_by_endpoint
.entry((path_id, method))
.or_insert_with_key(|(path_id, method)| {
EndpointBodies::new(path_id.clone(), method.clone())
});
endpoint_bodies.push(endpoint_body);
}
streams::write_to_json_lines(sink, endpoints_by_endpoint.values())
.await
.expect("could not write endpoint bodies to stdout");
});
try_join!(analyzing_bodies, aggregating_results).expect("essential worker task panicked");
}
#[derive(Debug, Default)]
struct IdGenerator;
impl SpecIdGenerator for IdGenerator {
fn generate_id(&mut self, prefix: &str) -> String {
// NanoID @ 10 chars:
// - URL-safe,
// - 17 years for a 1% chance of at least one global collision assuming
// writing 1000 ids per hour (https://zelark.github.io/nano-id-cc/)
format!("{}{}", prefix, nanoid!(10))
}
}
#[derive(Default, Debug, Serialize)]
#[serde(rename_all = "camelCase")]
struct EndpointBodies {
path_id: String,
method: String,
requests: Vec<EndpointRequestBody>,
responses: Vec<EndpointResponseBody>,
}
impl EndpointBodies {
pub fn new(path_id: String, method: String) -> Self {
Self {
path_id,
method,
requests: vec![],
responses: vec![],
}
}
pub fn push(&mut self, endpoint: EndpointBody) {
match endpoint {
EndpointBody::Request(endpoint_request) => {
self.requests.push(endpoint_request);
}
EndpointBody::Response(endpoint_response) => {
self.responses.push(endpoint_response);
}
}
}
}
#[derive(Debug)]
enum EndpointBody {
Request(EndpointRequestBody),
Response(EndpointResponseBody),
}
#[derive(Default, Debug, Serialize)]
#[serde(rename_all = "camelCase")]
struct EndpointRequestBody {
commands: Vec<SpecCommand>,
#[serde(skip)]
path_id: String,
#[serde(skip)]
method: String,
#[serde(flatten)]
body_descriptor: Option<EndpointBodyDescriptor>,
}
#[derive(Default, Debug, Serialize)]
#[serde(rename_all = "camelCase")]
struct | {
commands: Vec<SpecCommand>,
status_code: u16,
#[serde(skip)]
path_id: String,
#[serde(skip)]
method: String,
#[serde(flatten)]
body_descriptor: Option<EndpointBodyDescriptor>,
}
#[derive(Default, Debug, Serialize)]
#[serde(rename_all = "camelCase")]
struct EndpointBodyDescriptor {
content_type: String,
root_shape_id: String,
}
impl EndpointBody {
fn new(
body_location: &BodyAnalysisLocation,
root_shape_id: Option<String>,
body_commands: impl IntoIterator<Item = SpecCommand>,
) -> Self {
let body_descriptor = match root_shape_id {
Some(root_shape_id) => Some(EndpointBodyDescriptor {
content_type: body_location
.content_type()
.expect("root shape id implies a content type to be present")
.clone(),
root_shape_id,
}),
None => None,
};
let mut body = match body_location {
BodyAnalysisLocation::Request {
path_id, method, ..
} => EndpointBody::Request(EndpointRequestBody {
body_descriptor,
path_id: path_id.clone(),
method: method.clone(),
commands: body_commands.into_iter().collect(),
}),
BodyAnalysisLocation::Response {
status_code,
path_id,
method,
..
} => EndpointBody::Response(EndpointResponseBody {
body_descriptor,
path_id: path_id.clone(),
method: method.clone(),
commands: body_commands.into_iter().collect(),
status_code: *status_code,
}),
};
body.append_endpoint_commands();
body
}
fn append_endpoint_commands(&mut self) {
let mut ids = IdGenerator::default();
match self {
EndpointBody::Request(request_body) => {
let request_id = ids.request();
request_body
.commands
.push(SpecCommand::from(EndpointCommand::add_request(
request_id.clone(),
request_body.path_id.clone(),
request_body.method.clone(),
)));
if let Some(body_descriptor) = &request_body.body_descriptor {
request_body
.commands
.push(SpecCommand::from(EndpointCommand::set_request_body_shape(
request_id,
body_descriptor.root_shape_id.clone(),
body_descriptor.content_type.clone(),
false,
)));
}
}
EndpointBody::Response(response_body) => {
let response_id = ids.response();
response_body.commands.push(SpecCommand::from(
EndpointCommand::add_response_by_path_and_method(
response_id.clone(),
response_body.path_id.clone(),
response_body.method.clone(),
response_body.status_code.clone(),
),
));
if let Some(body_descriptor) | EndpointResponseBody | identifier_name |
provision_spec.go | .AddCephVersionLabelToJob(c.clusterInfo.CephVersion, job)
err = c.clusterInfo.OwnerInfo.SetControllerReference(job)
if err != nil {
return nil, err
}
// override the resources of all the init containers and main container with the expected osd prepare resources
c.applyResourcesToAllContainers(&podSpec.Spec, cephv1.GetPrepareOSDResources(c.spec.Resources))
return job, nil
}
// applyResourcesToAllContainers applies consistent resource requests for all containers and all init containers in the pod
func (c *Cluster) applyResourcesToAllContainers(spec *v1.PodSpec, resources v1.ResourceRequirements) {
for i := range spec.InitContainers {
spec.InitContainers[i].Resources = resources
}
for i := range spec.Containers {
spec.Containers[i].Resources = resources
}
}
func (c *Cluster) provisionPodTemplateSpec(osdProps osdProperties, restart v1.RestartPolicy, provisionConfig *provisionConfig) (*v1.PodTemplateSpec, error) {
copyBinariesVolume, copyBinariesContainer := c.getCopyBinariesContainer()
// ceph-volume is currently set up to use /etc/ceph/ceph.conf; this means no user config
// overrides will apply to ceph-volume, but this is unnecessary anyway
volumes := append(controller.PodVolumes(provisionConfig.DataPathMap, c.spec.DataDirHostPath, c.spec.DataDirHostPath, true), copyBinariesVolume)
// create a volume on /dev so the pod can access devices on the host
devVolume := v1.Volume{Name: "devices", VolumeSource: v1.VolumeSource{HostPath: &v1.HostPathVolumeSource{Path: "/dev"}}}
udevVolume := v1.Volume{Name: "udev", VolumeSource: v1.VolumeSource{HostPath: &v1.HostPathVolumeSource{Path: "/run/udev"}}}
volumes = append(volumes, []v1.Volume{
udevVolume,
devVolume,
mon.CephSecretVolume(),
}...)
if osdProps.onPVC() {
// Create volume config for PVCs
volumes = append(volumes, getPVCOSDVolumes(&osdProps, c.spec.DataDirHostPath, c.clusterInfo.Namespace, true)...)
if osdProps.encrypted {
// If a KMS is configured we populate
if c.spec.Security.KeyManagementService.IsEnabled() {
if c.spec.Security.KeyManagementService.IsVaultKMS() {
volumeTLS, _ := kms.VaultVolumeAndMount(c.spec.Security.KeyManagementService.ConnectionDetails, "")
volumes = append(volumes, volumeTLS)
}
if c.spec.Security.KeyManagementService.IsKMIPKMS() {
volumeKMIP, _ := kms.KMIPVolumeAndMount(c.spec.Security.KeyManagementService.TokenSecretName)
volumes = append(volumes, volumeKMIP)
}
}
}
} else {
// If not running on PVC we mount the rootfs of the host to validate the presence of the LVM package
rootFSVolume := v1.Volume{Name: "rootfs", VolumeSource: v1.VolumeSource{HostPath: &v1.HostPathVolumeSource{Path: "/"}}}
volumes = append(volumes, rootFSVolume)
}
if len(volumes) == 0 {
return nil, errors.New("empty volumes")
}
provisionContainer, err := c.provisionOSDContainer(osdProps, copyBinariesContainer.VolumeMounts[0], provisionConfig)
if err != nil {
return nil, errors.Wrap(err, "failed to generate OSD provisioning container")
}
podSpec := v1.PodSpec{
ServiceAccountName: serviceAccountName,
InitContainers: []v1.Container{
*copyBinariesContainer,
},
Containers: []v1.Container{
provisionContainer,
},
RestartPolicy: restart,
Volumes: volumes,
HostNetwork: c.spec.Network.IsHost(),
PriorityClassName: cephv1.GetOSDPriorityClassName(c.spec.PriorityClassNames),
SchedulerName: osdProps.schedulerName,
}
if c.spec.Network.IsHost() {
podSpec.DNSPolicy = v1.DNSClusterFirstWithHostNet
}
if osdProps.onPVC() {
c.applyAllPlacementIfNeeded(&podSpec)
// apply storageClassDeviceSets.preparePlacement
osdProps.getPreparePlacement().ApplyToPodSpec(&podSpec)
} else {
c.applyAllPlacementIfNeeded(&podSpec)
// apply spec.placement.prepareosd
c.spec.Placement[cephv1.KeyOSDPrepare].ApplyToPodSpec(&podSpec)
}
k8sutil.RemoveDuplicateEnvVars(&podSpec)
podMeta := metav1.ObjectMeta{
Name: AppName,
Labels: map[string]string{
k8sutil.AppAttr: prepareAppName,
k8sutil.ClusterAttr: c.clusterInfo.Namespace,
OSDOverPVCLabelKey: osdProps.pvc.ClaimName,
},
Annotations: map[string]string{},
}
cephv1.GetOSDPrepareAnnotations(c.spec.Annotations).ApplyToObjectMeta(&podMeta)
cephv1.GetOSDPrepareLabels(c.spec.Labels).ApplyToObjectMeta(&podMeta)
// ceph-volume --dmcrypt uses cryptsetup that synchronizes with udev on
// host through semaphore
podSpec.HostIPC = osdProps.storeConfig.EncryptedDevice || osdProps.encrypted
return &v1.PodTemplateSpec{
ObjectMeta: podMeta,
Spec: podSpec,
}, nil
}
func (c *Cluster) provisionOSDContainer(osdProps osdProperties, copyBinariesMount v1.VolumeMount, provisionConfig *provisionConfig) (v1.Container, error) {
envVars := c.getConfigEnvVars(osdProps, k8sutil.DataDir, true)
// enable debug logging in the prepare job
envVars = append(envVars, setDebugLogLevelEnvVar(true))
// only 1 of device list, device filter, device path filter and use all devices can be specified. We prioritize in that order.
if len(osdProps.devices) > 0 {
configuredDevices := []config.ConfiguredDevice{}
for _, device := range osdProps.devices {
id := device.Name
if device.FullPath != "" {
id = device.FullPath
}
cd := config.ConfiguredDevice{
ID: id,
StoreConfig: config.ToStoreConfig(device.Config),
}
configuredDevices = append(configuredDevices, cd)
}
marshalledDevices, err := json.Marshal(configuredDevices)
if err != nil {
return v1.Container{}, errors.Wrapf(err, "failed to JSON marshal configured devices for node %q", osdProps.crushHostname)
}
envVars = append(envVars, dataDevicesEnvVar(string(marshalledDevices)))
} else if osdProps.selection.DeviceFilter != "" {
envVars = append(envVars, deviceFilterEnvVar(osdProps.selection.DeviceFilter))
} else if osdProps.selection.DevicePathFilter != "" {
envVars = append(envVars, devicePathFilterEnvVar(osdProps.selection.DevicePathFilter))
} else if osdProps.selection.GetUseAllDevices() {
envVars = append(envVars, deviceFilterEnvVar("all"))
}
envVars = append(envVars, v1.EnvVar{Name: "ROOK_CEPH_VERSION", Value: c.clusterInfo.CephVersion.CephVersionFormatted()})
envVars = append(envVars, crushDeviceClassEnvVar(osdProps.storeConfig.DeviceClass))
envVars = append(envVars, crushInitialWeightEnvVar(osdProps.storeConfig.InitialWeight))
if osdProps.metadataDevice != "" {
envVars = append(envVars, metadataDeviceEnvVar(osdProps.metadataDevice))
}
volumeMounts := append(controller.CephVolumeMounts(provisionConfig.DataPathMap, true), []v1.VolumeMount{
{Name: "devices", MountPath: "/dev"},
{Name: "udev", MountPath: "/run/udev"},
copyBinariesMount,
mon.CephSecretVolumeMount(),
}...)
if controller.LoopDevicesAllowed() {
envVars = append(envVars, v1.EnvVar{Name: "CEPH_VOLUME_ALLOW_LOOP_DEVICES", Value: "true"})
}
// If the OSD runs on PVC
if osdProps.onPVC() {
volumeMounts = append(volumeMounts, getPvcOSDBridgeMount(osdProps.pvc.ClaimName))
// The device list is read by the Rook CLI via environment variables so let's add them
configuredDevices := []config.ConfiguredDevice{
{
ID: fmt.Sprintf("/mnt/%s", osdProps.pvc.ClaimName),
StoreConfig: config.NewStoreConfig(),
},
}
if osdProps.onPVCWithMetadata() | {
volumeMounts = append(volumeMounts, getPvcMetadataOSDBridgeMount(osdProps.metadataPVC.ClaimName))
configuredDevices = append(configuredDevices,
config.ConfiguredDevice{
ID: fmt.Sprintf("/srv/%s", osdProps.metadataPVC.ClaimName),
StoreConfig: config.NewStoreConfig(),
})
} | conditional_block | |
provision_spec.go | VCWalInitContainer("/wal", osdProps))
}
} else {
podSpec.Spec.NodeSelector = map[string]string{v1.LabelHostname: osdProps.crushHostname}
}
job := &batch.Job{
ObjectMeta: metav1.ObjectMeta{
Name: k8sutil.TruncateNodeNameForJob(prepareAppNameFmt, osdProps.crushHostname),
Namespace: c.clusterInfo.Namespace,
Labels: map[string]string{
k8sutil.AppAttr: prepareAppName,
k8sutil.ClusterAttr: c.clusterInfo.Namespace,
},
},
Spec: batch.JobSpec{
Template: *podSpec,
},
}
if osdProps.onPVC() {
k8sutil.AddLabelToJob(OSDOverPVCLabelKey, osdProps.pvc.ClaimName, job)
k8sutil.AddLabelToJob(CephDeviceSetLabelKey, osdProps.deviceSetName, job)
k8sutil.AddLabelToPod(OSDOverPVCLabelKey, osdProps.pvc.ClaimName, &job.Spec.Template)
k8sutil.AddLabelToPod(CephDeviceSetLabelKey, osdProps.deviceSetName, &job.Spec.Template)
}
k8sutil.AddRookVersionLabelToJob(job)
controller.AddCephVersionLabelToJob(c.clusterInfo.CephVersion, job)
err = c.clusterInfo.OwnerInfo.SetControllerReference(job)
if err != nil {
return nil, err
}
// override the resources of all the init containers and main container with the expected osd prepare resources
c.applyResourcesToAllContainers(&podSpec.Spec, cephv1.GetPrepareOSDResources(c.spec.Resources))
return job, nil
}
// applyResourcesToAllContainers applies consistent resource requests for all containers and all init containers in the pod
func (c *Cluster) applyResourcesToAllContainers(spec *v1.PodSpec, resources v1.ResourceRequirements) {
for i := range spec.InitContainers {
spec.InitContainers[i].Resources = resources
}
for i := range spec.Containers {
spec.Containers[i].Resources = resources
}
}
func (c *Cluster) provisionPodTemplateSpec(osdProps osdProperties, restart v1.RestartPolicy, provisionConfig *provisionConfig) (*v1.PodTemplateSpec, error) {
copyBinariesVolume, copyBinariesContainer := c.getCopyBinariesContainer()
// ceph-volume is currently set up to use /etc/ceph/ceph.conf; this means no user config
// overrides will apply to ceph-volume, but this is unnecessary anyway
volumes := append(controller.PodVolumes(provisionConfig.DataPathMap, c.spec.DataDirHostPath, c.spec.DataDirHostPath, true), copyBinariesVolume)
// create a volume on /dev so the pod can access devices on the host
devVolume := v1.Volume{Name: "devices", VolumeSource: v1.VolumeSource{HostPath: &v1.HostPathVolumeSource{Path: "/dev"}}}
udevVolume := v1.Volume{Name: "udev", VolumeSource: v1.VolumeSource{HostPath: &v1.HostPathVolumeSource{Path: "/run/udev"}}}
volumes = append(volumes, []v1.Volume{
udevVolume,
devVolume,
mon.CephSecretVolume(),
}...)
if osdProps.onPVC() {
// Create volume config for PVCs
volumes = append(volumes, getPVCOSDVolumes(&osdProps, c.spec.DataDirHostPath, c.clusterInfo.Namespace, true)...)
if osdProps.encrypted {
// If a KMS is configured we populate
if c.spec.Security.KeyManagementService.IsEnabled() {
if c.spec.Security.KeyManagementService.IsVaultKMS() {
volumeTLS, _ := kms.VaultVolumeAndMount(c.spec.Security.KeyManagementService.ConnectionDetails, "")
volumes = append(volumes, volumeTLS)
}
if c.spec.Security.KeyManagementService.IsKMIPKMS() {
volumeKMIP, _ := kms.KMIPVolumeAndMount(c.spec.Security.KeyManagementService.TokenSecretName)
volumes = append(volumes, volumeKMIP)
}
}
}
} else {
// If not running on PVC we mount the rootfs of the host to validate the presence of the LVM package
rootFSVolume := v1.Volume{Name: "rootfs", VolumeSource: v1.VolumeSource{HostPath: &v1.HostPathVolumeSource{Path: "/"}}}
volumes = append(volumes, rootFSVolume)
}
if len(volumes) == 0 {
return nil, errors.New("empty volumes")
}
provisionContainer, err := c.provisionOSDContainer(osdProps, copyBinariesContainer.VolumeMounts[0], provisionConfig)
if err != nil {
return nil, errors.Wrap(err, "failed to generate OSD provisioning container")
}
podSpec := v1.PodSpec{
ServiceAccountName: serviceAccountName,
InitContainers: []v1.Container{
*copyBinariesContainer,
},
Containers: []v1.Container{
provisionContainer,
},
RestartPolicy: restart,
Volumes: volumes,
HostNetwork: c.spec.Network.IsHost(),
PriorityClassName: cephv1.GetOSDPriorityClassName(c.spec.PriorityClassNames),
SchedulerName: osdProps.schedulerName,
}
if c.spec.Network.IsHost() {
podSpec.DNSPolicy = v1.DNSClusterFirstWithHostNet
}
if osdProps.onPVC() {
c.applyAllPlacementIfNeeded(&podSpec)
// apply storageClassDeviceSets.preparePlacement
osdProps.getPreparePlacement().ApplyToPodSpec(&podSpec)
} else {
c.applyAllPlacementIfNeeded(&podSpec)
// apply spec.placement.prepareosd
c.spec.Placement[cephv1.KeyOSDPrepare].ApplyToPodSpec(&podSpec)
}
k8sutil.RemoveDuplicateEnvVars(&podSpec)
podMeta := metav1.ObjectMeta{
Name: AppName,
Labels: map[string]string{
k8sutil.AppAttr: prepareAppName,
k8sutil.ClusterAttr: c.clusterInfo.Namespace,
OSDOverPVCLabelKey: osdProps.pvc.ClaimName,
},
Annotations: map[string]string{},
}
cephv1.GetOSDPrepareAnnotations(c.spec.Annotations).ApplyToObjectMeta(&podMeta)
cephv1.GetOSDPrepareLabels(c.spec.Labels).ApplyToObjectMeta(&podMeta)
// ceph-volume --dmcrypt uses cryptsetup that synchronizes with udev on
// host through semaphore
podSpec.HostIPC = osdProps.storeConfig.EncryptedDevice || osdProps.encrypted
return &v1.PodTemplateSpec{
ObjectMeta: podMeta,
Spec: podSpec,
}, nil
}
func (c *Cluster) provisionOSDContainer(osdProps osdProperties, copyBinariesMount v1.VolumeMount, provisionConfig *provisionConfig) (v1.Container, error) | marshalledDevices, err := json.Marshal(configuredDevices)
if err != nil {
return v1.Container{}, errors.Wrapf(err, "failed to JSON marshal configured devices for node %q", osdProps.crushHostname)
}
envVars = append(envVars, dataDevicesEnvVar(string(marshalledDevices)))
} else if osdProps.selection.DeviceFilter != "" {
envVars = append(envVars, deviceFilterEnvVar(osdProps.selection.DeviceFilter))
} else if osdProps.selection.DevicePathFilter != "" {
envVars = append(envVars, devicePathFilterEnvVar(osdProps.selection.DevicePathFilter))
} else if osdProps.selection.GetUseAllDevices() {
envVars = append(envVars, deviceFilterEnvVar("all"))
}
envVars = append(envVars, v1.EnvVar{Name: "ROOK_CEPH_VERSION", Value: c.clusterInfo.CephVersion.CephVersionFormatted()})
envVars = append(envVars, crushDeviceClassEnvVar(osdProps.storeConfig.DeviceClass))
envVars = append(envVars, crushInitialWeightEnvVar(osdProps.storeConfig.InitialWeight))
if osdProps.metadataDevice != "" {
envVars = append(envVars, metadataDeviceEnvVar(osdProps.metadataDevice))
}
volumeMounts := append(controller.CephVolumeMounts(provisionConfig.DataPathMap, true), []v1.VolumeMount{
{Name: " | {
envVars := c.getConfigEnvVars(osdProps, k8sutil.DataDir, true)
// enable debug logging in the prepare job
envVars = append(envVars, setDebugLogLevelEnvVar(true))
// only 1 of device list, device filter, device path filter and use all devices can be specified. We prioritize in that order.
if len(osdProps.devices) > 0 {
configuredDevices := []config.ConfiguredDevice{}
for _, device := range osdProps.devices {
id := device.Name
if device.FullPath != "" {
id = device.FullPath
}
cd := config.ConfiguredDevice{
ID: id,
StoreConfig: config.ToStoreConfig(device.Config),
}
configuredDevices = append(configuredDevices, cd)
} | identifier_body |
provision_spec.go | VCWalInitContainer("/wal", osdProps))
}
} else {
podSpec.Spec.NodeSelector = map[string]string{v1.LabelHostname: osdProps.crushHostname}
}
job := &batch.Job{
ObjectMeta: metav1.ObjectMeta{
Name: k8sutil.TruncateNodeNameForJob(prepareAppNameFmt, osdProps.crushHostname),
Namespace: c.clusterInfo.Namespace,
Labels: map[string]string{
k8sutil.AppAttr: prepareAppName,
k8sutil.ClusterAttr: c.clusterInfo.Namespace,
},
},
Spec: batch.JobSpec{
Template: *podSpec,
},
}
if osdProps.onPVC() {
k8sutil.AddLabelToJob(OSDOverPVCLabelKey, osdProps.pvc.ClaimName, job)
k8sutil.AddLabelToJob(CephDeviceSetLabelKey, osdProps.deviceSetName, job)
k8sutil.AddLabelToPod(OSDOverPVCLabelKey, osdProps.pvc.ClaimName, &job.Spec.Template)
k8sutil.AddLabelToPod(CephDeviceSetLabelKey, osdProps.deviceSetName, &job.Spec.Template)
}
k8sutil.AddRookVersionLabelToJob(job)
controller.AddCephVersionLabelToJob(c.clusterInfo.CephVersion, job)
err = c.clusterInfo.OwnerInfo.SetControllerReference(job)
if err != nil {
return nil, err
}
// override the resources of all the init containers and main container with the expected osd prepare resources
c.applyResourcesToAllContainers(&podSpec.Spec, cephv1.GetPrepareOSDResources(c.spec.Resources))
return job, nil
}
// applyResourcesToAllContainers applies consistent resource requests for all containers and all init containers in the pod
func (c *Cluster) applyResourcesToAllContainers(spec *v1.PodSpec, resources v1.ResourceRequirements) {
for i := range spec.InitContainers {
spec.InitContainers[i].Resources = resources
}
for i := range spec.Containers {
spec.Containers[i].Resources = resources
}
}
func (c *Cluster) provisionPodTemplateSpec(osdProps osdProperties, restart v1.RestartPolicy, provisionConfig *provisionConfig) (*v1.PodTemplateSpec, error) {
copyBinariesVolume, copyBinariesContainer := c.getCopyBinariesContainer()
// ceph-volume is currently set up to use /etc/ceph/ceph.conf; this means no user config
// overrides will apply to ceph-volume, but this is unnecessary anyway
volumes := append(controller.PodVolumes(provisionConfig.DataPathMap, c.spec.DataDirHostPath, c.spec.DataDirHostPath, true), copyBinariesVolume)
// create a volume on /dev so the pod can access devices on the host
devVolume := v1.Volume{Name: "devices", VolumeSource: v1.VolumeSource{HostPath: &v1.HostPathVolumeSource{Path: "/dev"}}}
udevVolume := v1.Volume{Name: "udev", VolumeSource: v1.VolumeSource{HostPath: &v1.HostPathVolumeSource{Path: "/run/udev"}}}
volumes = append(volumes, []v1.Volume{
udevVolume,
devVolume,
mon.CephSecretVolume(),
}...)
if osdProps.onPVC() {
// Create volume config for PVCs
volumes = append(volumes, getPVCOSDVolumes(&osdProps, c.spec.DataDirHostPath, c.clusterInfo.Namespace, true)...)
if osdProps.encrypted {
// If a KMS is configured we populate
if c.spec.Security.KeyManagementService.IsEnabled() {
if c.spec.Security.KeyManagementService.IsVaultKMS() {
volumeTLS, _ := kms.VaultVolumeAndMount(c.spec.Security.KeyManagementService.ConnectionDetails, "")
volumes = append(volumes, volumeTLS)
}
if c.spec.Security.KeyManagementService.IsKMIPKMS() {
volumeKMIP, _ := kms.KMIPVolumeAndMount(c.spec.Security.KeyManagementService.TokenSecretName)
volumes = append(volumes, volumeKMIP)
}
}
}
} else {
// If not running on PVC we mount the rootfs of the host to validate the presence of the LVM package
rootFSVolume := v1.Volume{Name: "rootfs", VolumeSource: v1.VolumeSource{HostPath: &v1.HostPathVolumeSource{Path: "/"}}}
volumes = append(volumes, rootFSVolume)
}
if len(volumes) == 0 {
return nil, errors.New("empty volumes")
}
provisionContainer, err := c.provisionOSDContainer(osdProps, copyBinariesContainer.VolumeMounts[0], provisionConfig)
if err != nil {
return nil, errors.Wrap(err, "failed to generate OSD provisioning container")
}
podSpec := v1.PodSpec{
ServiceAccountName: serviceAccountName,
InitContainers: []v1.Container{
*copyBinariesContainer,
},
Containers: []v1.Container{
provisionContainer,
},
RestartPolicy: restart,
Volumes: volumes,
HostNetwork: c.spec.Network.IsHost(),
PriorityClassName: cephv1.GetOSDPriorityClassName(c.spec.PriorityClassNames),
SchedulerName: osdProps.schedulerName,
}
if c.spec.Network.IsHost() {
podSpec.DNSPolicy = v1.DNSClusterFirstWithHostNet
}
if osdProps.onPVC() {
c.applyAllPlacementIfNeeded(&podSpec)
// apply storageClassDeviceSets.preparePlacement
osdProps.getPreparePlacement().ApplyToPodSpec(&podSpec)
} else {
c.applyAllPlacementIfNeeded(&podSpec)
// apply spec.placement.prepareosd
c.spec.Placement[cephv1.KeyOSDPrepare].ApplyToPodSpec(&podSpec) | podMeta := metav1.ObjectMeta{
Name: AppName,
Labels: map[string]string{
k8sutil.AppAttr: prepareAppName,
k8sutil.ClusterAttr: c.clusterInfo.Namespace,
OSDOverPVCLabelKey: osdProps.pvc.ClaimName,
},
Annotations: map[string]string{},
}
cephv1.GetOSDPrepareAnnotations(c.spec.Annotations).ApplyToObjectMeta(&podMeta)
cephv1.GetOSDPrepareLabels(c.spec.Labels).ApplyToObjectMeta(&podMeta)
// ceph-volume --dmcrypt uses cryptsetup that synchronizes with udev on
// host through semaphore
podSpec.HostIPC = osdProps.storeConfig.EncryptedDevice || osdProps.encrypted
return &v1.PodTemplateSpec{
ObjectMeta: podMeta,
Spec: podSpec,
}, nil
}
func (c *Cluster) provisionOSDContainer(osdProps osdProperties, copyBinariesMount v1.VolumeMount, provisionConfig *provisionConfig) (v1.Container, error) {
envVars := c.getConfigEnvVars(osdProps, k8sutil.DataDir, true)
// enable debug logging in the prepare job
envVars = append(envVars, setDebugLogLevelEnvVar(true))
// only 1 of device list, device filter, device path filter and use all devices can be specified. We prioritize in that order.
if len(osdProps.devices) > 0 {
configuredDevices := []config.ConfiguredDevice{}
for _, device := range osdProps.devices {
id := device.Name
if device.FullPath != "" {
id = device.FullPath
}
cd := config.ConfiguredDevice{
ID: id,
StoreConfig: config.ToStoreConfig(device.Config),
}
configuredDevices = append(configuredDevices, cd)
}
marshalledDevices, err := json.Marshal(configuredDevices)
if err != nil {
return v1.Container{}, errors.Wrapf(err, "failed to JSON marshal configured devices for node %q", osdProps.crushHostname)
}
envVars = append(envVars, dataDevicesEnvVar(string(marshalledDevices)))
} else if osdProps.selection.DeviceFilter != "" {
envVars = append(envVars, deviceFilterEnvVar(osdProps.selection.DeviceFilter))
} else if osdProps.selection.DevicePathFilter != "" {
envVars = append(envVars, devicePathFilterEnvVar(osdProps.selection.DevicePathFilter))
} else if osdProps.selection.GetUseAllDevices() {
envVars = append(envVars, deviceFilterEnvVar("all"))
}
envVars = append(envVars, v1.EnvVar{Name: "ROOK_CEPH_VERSION", Value: c.clusterInfo.CephVersion.CephVersionFormatted()})
envVars = append(envVars, crushDeviceClassEnvVar(osdProps.storeConfig.DeviceClass))
envVars = append(envVars, crushInitialWeightEnvVar(osdProps.storeConfig.InitialWeight))
if osdProps.metadataDevice != "" {
envVars = append(envVars, metadataDeviceEnvVar(osdProps.metadataDevice))
}
volumeMounts := append(controller.CephVolumeMounts(provisionConfig.DataPathMap, true), []v1.VolumeMount{
{Name: "devices | }
k8sutil.RemoveDuplicateEnvVars(&podSpec)
| random_line_split |
provision_spec.go | WalInitContainer("/wal", osdProps))
}
} else {
podSpec.Spec.NodeSelector = map[string]string{v1.LabelHostname: osdProps.crushHostname}
}
job := &batch.Job{
ObjectMeta: metav1.ObjectMeta{
Name: k8sutil.TruncateNodeNameForJob(prepareAppNameFmt, osdProps.crushHostname),
Namespace: c.clusterInfo.Namespace,
Labels: map[string]string{
k8sutil.AppAttr: prepareAppName,
k8sutil.ClusterAttr: c.clusterInfo.Namespace,
},
},
Spec: batch.JobSpec{
Template: *podSpec,
},
}
if osdProps.onPVC() {
k8sutil.AddLabelToJob(OSDOverPVCLabelKey, osdProps.pvc.ClaimName, job)
k8sutil.AddLabelToJob(CephDeviceSetLabelKey, osdProps.deviceSetName, job)
k8sutil.AddLabelToPod(OSDOverPVCLabelKey, osdProps.pvc.ClaimName, &job.Spec.Template)
k8sutil.AddLabelToPod(CephDeviceSetLabelKey, osdProps.deviceSetName, &job.Spec.Template)
}
k8sutil.AddRookVersionLabelToJob(job)
controller.AddCephVersionLabelToJob(c.clusterInfo.CephVersion, job)
err = c.clusterInfo.OwnerInfo.SetControllerReference(job)
if err != nil {
return nil, err
}
// override the resources of all the init containers and main container with the expected osd prepare resources
c.applyResourcesToAllContainers(&podSpec.Spec, cephv1.GetPrepareOSDResources(c.spec.Resources))
return job, nil
}
// applyResourcesToAllContainers applies consistent resource requests for all containers and all init containers in the pod
func (c *Cluster) applyResourcesToAllContainers(spec *v1.PodSpec, resources v1.ResourceRequirements) {
for i := range spec.InitContainers {
spec.InitContainers[i].Resources = resources
}
for i := range spec.Containers {
spec.Containers[i].Resources = resources
}
}
func (c *Cluster) provisionPodTemplateSpec(osdProps osdProperties, restart v1.RestartPolicy, provisionConfig *provisionConfig) (*v1.PodTemplateSpec, error) {
copyBinariesVolume, copyBinariesContainer := c.getCopyBinariesContainer()
// ceph-volume is currently set up to use /etc/ceph/ceph.conf; this means no user config
// overrides will apply to ceph-volume, but this is unnecessary anyway
volumes := append(controller.PodVolumes(provisionConfig.DataPathMap, c.spec.DataDirHostPath, c.spec.DataDirHostPath, true), copyBinariesVolume)
// create a volume on /dev so the pod can access devices on the host
devVolume := v1.Volume{Name: "devices", VolumeSource: v1.VolumeSource{HostPath: &v1.HostPathVolumeSource{Path: "/dev"}}}
udevVolume := v1.Volume{Name: "udev", VolumeSource: v1.VolumeSource{HostPath: &v1.HostPathVolumeSource{Path: "/run/udev"}}}
volumes = append(volumes, []v1.Volume{
udevVolume,
devVolume,
mon.CephSecretVolume(),
}...)
if osdProps.onPVC() {
// Create volume config for PVCs
volumes = append(volumes, getPVCOSDVolumes(&osdProps, c.spec.DataDirHostPath, c.clusterInfo.Namespace, true)...)
if osdProps.encrypted {
// If a KMS is configured we populate
if c.spec.Security.KeyManagementService.IsEnabled() {
if c.spec.Security.KeyManagementService.IsVaultKMS() {
volumeTLS, _ := kms.VaultVolumeAndMount(c.spec.Security.KeyManagementService.ConnectionDetails, "")
volumes = append(volumes, volumeTLS)
}
if c.spec.Security.KeyManagementService.IsKMIPKMS() {
volumeKMIP, _ := kms.KMIPVolumeAndMount(c.spec.Security.KeyManagementService.TokenSecretName)
volumes = append(volumes, volumeKMIP)
}
}
}
} else {
// If not running on PVC we mount the rootfs of the host to validate the presence of the LVM package
rootFSVolume := v1.Volume{Name: "rootfs", VolumeSource: v1.VolumeSource{HostPath: &v1.HostPathVolumeSource{Path: "/"}}}
volumes = append(volumes, rootFSVolume)
}
if len(volumes) == 0 {
return nil, errors.New("empty volumes")
}
provisionContainer, err := c.provisionOSDContainer(osdProps, copyBinariesContainer.VolumeMounts[0], provisionConfig)
if err != nil {
return nil, errors.Wrap(err, "failed to generate OSD provisioning container")
}
podSpec := v1.PodSpec{
ServiceAccountName: serviceAccountName,
InitContainers: []v1.Container{
*copyBinariesContainer,
},
Containers: []v1.Container{
provisionContainer,
},
RestartPolicy: restart,
Volumes: volumes,
HostNetwork: c.spec.Network.IsHost(),
PriorityClassName: cephv1.GetOSDPriorityClassName(c.spec.PriorityClassNames),
SchedulerName: osdProps.schedulerName,
}
if c.spec.Network.IsHost() {
podSpec.DNSPolicy = v1.DNSClusterFirstWithHostNet
}
if osdProps.onPVC() {
c.applyAllPlacementIfNeeded(&podSpec)
// apply storageClassDeviceSets.preparePlacement
osdProps.getPreparePlacement().ApplyToPodSpec(&podSpec)
} else {
c.applyAllPlacementIfNeeded(&podSpec)
// apply spec.placement.prepareosd
c.spec.Placement[cephv1.KeyOSDPrepare].ApplyToPodSpec(&podSpec)
}
k8sutil.RemoveDuplicateEnvVars(&podSpec)
podMeta := metav1.ObjectMeta{
Name: AppName,
Labels: map[string]string{
k8sutil.AppAttr: prepareAppName,
k8sutil.ClusterAttr: c.clusterInfo.Namespace,
OSDOverPVCLabelKey: osdProps.pvc.ClaimName,
},
Annotations: map[string]string{},
}
cephv1.GetOSDPrepareAnnotations(c.spec.Annotations).ApplyToObjectMeta(&podMeta)
cephv1.GetOSDPrepareLabels(c.spec.Labels).ApplyToObjectMeta(&podMeta)
// ceph-volume --dmcrypt uses cryptsetup that synchronizes with udev on
// host through semaphore
podSpec.HostIPC = osdProps.storeConfig.EncryptedDevice || osdProps.encrypted
return &v1.PodTemplateSpec{
ObjectMeta: podMeta,
Spec: podSpec,
}, nil
}
func (c *Cluster) | (osdProps osdProperties, copyBinariesMount v1.VolumeMount, provisionConfig *provisionConfig) (v1.Container, error) {
envVars := c.getConfigEnvVars(osdProps, k8sutil.DataDir, true)
// enable debug logging in the prepare job
envVars = append(envVars, setDebugLogLevelEnvVar(true))
// only 1 of device list, device filter, device path filter and use all devices can be specified. We prioritize in that order.
if len(osdProps.devices) > 0 {
configuredDevices := []config.ConfiguredDevice{}
for _, device := range osdProps.devices {
id := device.Name
if device.FullPath != "" {
id = device.FullPath
}
cd := config.ConfiguredDevice{
ID: id,
StoreConfig: config.ToStoreConfig(device.Config),
}
configuredDevices = append(configuredDevices, cd)
}
marshalledDevices, err := json.Marshal(configuredDevices)
if err != nil {
return v1.Container{}, errors.Wrapf(err, "failed to JSON marshal configured devices for node %q", osdProps.crushHostname)
}
envVars = append(envVars, dataDevicesEnvVar(string(marshalledDevices)))
} else if osdProps.selection.DeviceFilter != "" {
envVars = append(envVars, deviceFilterEnvVar(osdProps.selection.DeviceFilter))
} else if osdProps.selection.DevicePathFilter != "" {
envVars = append(envVars, devicePathFilterEnvVar(osdProps.selection.DevicePathFilter))
} else if osdProps.selection.GetUseAllDevices() {
envVars = append(envVars, deviceFilterEnvVar("all"))
}
envVars = append(envVars, v1.EnvVar{Name: "ROOK_CEPH_VERSION", Value: c.clusterInfo.CephVersion.CephVersionFormatted()})
envVars = append(envVars, crushDeviceClassEnvVar(osdProps.storeConfig.DeviceClass))
envVars = append(envVars, crushInitialWeightEnvVar(osdProps.storeConfig.InitialWeight))
if osdProps.metadataDevice != "" {
envVars = append(envVars, metadataDeviceEnvVar(osdProps.metadataDevice))
}
volumeMounts := append(controller.CephVolumeMounts(provisionConfig.DataPathMap, true), []v1.VolumeMount{
{Name: " | provisionOSDContainer | identifier_name |
lib.rs | make_tuple ($first, $next), $($rest,)*)
);
(
$ty:ident < $($ty_params:ty),+ >;
limited {$($limited:ident: $limited_from:expr => $limited_to:expr),+}
limited_min {$($limited_min:ident: $limited_min_from:expr => $limited_min_to:expr),*}
unlimited {$($unlimited:ident: $unlimited_from:expr => $unlimited_to:expr),*}
) => (
{
use core::iter::repeat;
use crate::Limited;
{
print!("checking below limits ... ");
$(
let from = $limited_from;
let to = $limited_to;
let diff = to - from;
let $limited = (1..11).map(|i| from - (i as f64 / 10.0) * diff);
)+
$(
let from = $limited_min_from;
let to = $limited_min_to;
let diff = to - from;
let $limited_min = (1..11).map(|i| from - (i as f64 / 10.0) * diff);
)*
$(
let from = $unlimited_from;
let to = $unlimited_to;
let diff = to - from;
let $unlimited = (1..11).map(|i| from - (i as f64 / 10.0) * diff);
)*
for assert_ranges!(@make_tuple (), $($limited,)+ $($limited_min,)* $($unlimited,)* ) in repeat(()) $(.zip($limited))+ $(.zip($limited_min))* $(.zip($unlimited))* {
let c: $ty<$($ty_params),+> = $ty {
$($limited: $limited.into(),)+
$($limited_min: $limited_min.into(),)*
$($unlimited: $unlimited.into(),)*
..$ty::default() //This prevents exhaustiveness checking
};
let clamped = c.clamp();
let expected: $ty<$($ty_params),+> = $ty {
$($limited: $limited_from.into(),)+
$($limited_min: $limited_min_from.into(),)*
$($unlimited: $unlimited.into(),)*
..$ty::default() //This prevents exhaustiveness checking
};
assert!(!c.is_valid());
assert_relative_eq!(clamped, expected);
}
println!("ok")
}
{
print!("checking within limits ... ");
$(
let from = $limited_from;
let to = $limited_to;
let diff = to - from;
let $limited = (0..11).map(|i| from + (i as f64 / 10.0) * diff);
)+
$(
let from = $limited_min_from;
let to = $limited_min_to;
let diff = to - from;
let $limited_min = (0..11).map(|i| from + (i as f64 / 10.0) * diff);
)*
$(
let from = $unlimited_from;
let to = $unlimited_to;
let diff = to - from;
let $unlimited = (0..11).map(|i| from + (i as f64 / 10.0) * diff);
)*
for assert_ranges!(@make_tuple (), $($limited,)+ $($limited_min,)* $($unlimited,)* ) in repeat(()) $(.zip($limited))+ $(.zip($limited_min))* $(.zip($unlimited))* {
let c: $ty<$($ty_params),+> = $ty {
$($limited: $limited.into(),)+
$($limited_min: $limited_min.into(),)*
$($unlimited: $unlimited.into(),)*
..$ty::default() //This prevents exhaustiveness checking
};
let clamped = c.clamp();
assert!(c.is_valid());
assert_relative_eq!(clamped, c);
}
println!("ok")
}
{
print!("checking above limits ... ");
$(
let from = $limited_from;
let to = $limited_to;
let diff = to - from;
let $limited = (1..11).map(|i| to + (i as f64 / 10.0) * diff);
)+
$(
let from = $limited_min_from;
let to = $limited_min_to;
let diff = to - from;
let $limited_min = (1..11).map(|i| to + (i as f64 / 10.0) * diff);
)*
$(
let from = $unlimited_from;
let to = $unlimited_to;
let diff = to - from;
let $unlimited = (1..11).map(|i| to + (i as f64 / 10.0) * diff);
)*
for assert_ranges!(@make_tuple (), $($limited,)+ $($limited_min,)* $($unlimited,)* ) in repeat(()) $(.zip($limited))+ $(.zip($limited_min))* $(.zip($unlimited))* {
let c: $ty<$($ty_params),+> = $ty {
$($limited: $limited.into(),)+
$($limited_min: $limited_min.into(),)*
$($unlimited: $unlimited.into(),)*
..$ty::default() //This prevents exhaustiveness checking
};
let clamped = c.clamp();
let expected: $ty<$($ty_params),+> = $ty {
$($limited: $limited_to.into(),)+
$($limited_min: $limited_min.into(),)*
$($unlimited: $unlimited.into(),)*
..$ty::default() //This prevents exhaustiveness checking
};
assert!(!c.is_valid());
assert_relative_eq!(clamped, expected);
}
println!("ok")
}
}
);
}
#[macro_use]
mod macros;
pub mod blend;
#[cfg(feature = "std")]
pub mod gradient;
#[cfg(feature = "named")]
pub mod named;
#[cfg(feature = "random")]
mod random_sampling;
mod alpha;
mod hsl;
mod hsv;
mod hwb;
mod lab;
mod lch;
pub mod luma;
pub mod rgb;
mod xyz;
mod yxy;
mod hues;
pub mod chromatic_adaptation;
mod color_difference;
mod component;
pub mod convert;
pub mod encoding;
mod equality;
mod relative_contrast;
pub mod white_point;
pub mod float;
#[doc(hidden)]
pub mod matrix;
fn clamp<T: PartialOrd>(v: T, min: T, max: T) -> T {
if v < min {
min
} else if v > max {
max
} else {
v
}
}
/// A trait for clamping and checking if colors are within their ranges.
pub trait Limited {
/// Check if the color's components are within the expected ranges.
fn is_valid(&self) -> bool;
/// Return a new color where the components has been clamped to the nearest
/// valid values.
fn clamp(&self) -> Self;
/// Clamp the color's components to the nearest valid values.
fn clamp_self(&mut self);
}
/// A trait for linear color interpolation.
///
/// ```
/// use approx::assert_relative_eq;
///
/// use palette::{LinSrgb, Mix};
///
/// let a = LinSrgb::new(0.0, 0.5, 1.0);
/// let b = LinSrgb::new(1.0, 0.5, 0.0);
///
/// assert_relative_eq!(a.mix(&b, 0.0), a);
/// assert_relative_eq!(a.mix(&b, 0.5), LinSrgb::new(0.5, 0.5, 0.5));
/// assert_relative_eq!(a.mix(&b, 1.0), b);
/// ```
pub trait Mix {
/// The type of the mixing factor.
type Scalar: Float;
/// Mix the color with an other color, by `factor`.
///
/// `factor` sould be between `0.0` and `1.0`, where `0.0` will result in
/// the same color as `self` and `1.0` will result in the same color as
/// `other`.
fn mix(&self, other: &Self, factor: Self::Scalar) -> Self;
}
/// The `Shade` trait allows a color to be lightened or darkened.
///
/// ```
/// use approx::assert_relative_eq;
///
/// use palette::{LinSrgb, Shade};
///
/// let a = LinSrgb::new(0.4, 0.4, 0.4);
/// let b = LinSrgb::new(0.6, 0.6, 0.6);
///
/// assert_relative_eq!(a.lighten(0.1), b.darken(0.1));
/// ```
pub trait Shade: Sized {
/// The type of the lighten/darken amount.
type Scalar: Float;
/// Lighten the color by `amount`.
fn lighten(&self, amount: Self::Scalar) -> Self;
/// Darken the color by `amount`.
fn | darken | identifier_name | |
lib.rs | //! use palette::{Srgb, Pixel};
//!
//! // This works for any (even non-RGB) color type that can have the
//! // buffer element type as component.
//! let color_buffer: &mut [Srgb<u8>] = Pixel::from_raw_slice_mut(&mut image_buffer);
//! ```
//!
//! * If you are getting your colors from the GPU, in a game or other graphical
//! application, or if they are otherwise generated by the application, then
//! chances are that they are already linear. Still, make sure to check that
//! they are not being encoded somewhere.
//!
//! When working with other colors:
//!
//! * For HSL, HSV, HWB: Check if they are based on any other color space than
//! sRGB, such as Adobe or Apple RGB.
//!
//! * For any of the CIE color spaces, check for a specification of white point
//! and light source. These are necessary for converting to RGB and other
//! colors, that depend on perception and "viewing devices". Common defaults
//! are the D65 light source and the sRGB white point. The Palette defaults
//! should take you far.
//!
//! ## 2. Processing
//!
//! When your color has been decoded into some Palette type, it's ready for
//! processing. This includes things like blending, hue shifting, darkening and
//! conversion to other formats. Just make sure that your non-linear RGB is
//! made linear first (`my_srgb.into_linear()`), to make the operations
//! available.
//!
//! Different color spaced have different capabilities, pros and cons. You may
//! have to experiment a bit (or look at the example programs) to find out what
//! gives the desired result.
//!
//! ## 3. Encoding
//!
//! When the desired processing is done, it's time to encode the colors back
//! into some image format. The same rules applies as for the decoding, but the
//! process reversed.
//!
//! # Working with Raw Data
//!
//! Oftentimes, pixel data is stored in a raw buffer such as a `[u8; 3]`. The
//! [`Pixel`](crate::encoding::pixel::Pixel) trait allows for easy interoperation between
//! Palette colors and other crates or systems. `from_raw` can be used to
//! convert into a Palette color, `into_format` converts from `Srgb<u8>` to
//! `Srgb<f32>`, and finally `into_raw` to convert from a Palette color back to
//! a `[u8;3]`.
//!
//! ```rust
//! use approx::assert_relative_eq;
//! use palette::{Srgb, Pixel};
//!
//! let buffer = [255, 0, 255];
//! let raw = Srgb::from_raw(&buffer);
//! assert_eq!(raw, &Srgb::<u8>::new(255u8, 0, 255));
//!
//! let raw_float: Srgb<f32> = raw.into_format();
//! assert_relative_eq!(raw_float, Srgb::new(1.0, 0.0, 1.0));
//!
//! let raw: [u8; 3] = Srgb::into_raw(raw_float.into_format());
//! assert_eq!(raw, buffer);
//! ```
// Keep the standard library when running tests, too
#![cfg_attr(all(not(feature = "std"), not(test)), no_std)]
#![doc(html_root_url = "https://docs.rs/palette/0.5.0/palette/")]
#![warn(missing_docs)]
#[cfg(any(feature = "std", test))]
extern crate core;
#[cfg_attr(test, macro_use)]
extern crate approx;
#[macro_use]
extern crate palette_derive;
#[cfg(feature = "phf")]
extern crate phf;
#[cfg(feature = "serializing")]
#[macro_use]
extern crate serde;
#[cfg(all(test, feature = "serializing"))]
extern crate serde_json;
use float::Float;
use luma::Luma;
pub use alpha::{Alpha, WithAlpha};
pub use blend::Blend;
#[cfg(feature = "std")]
pub use gradient::Gradient;
pub use hsl::{Hsl, Hsla};
pub use hsv::{Hsv, Hsva};
pub use hwb::{Hwb, Hwba};
pub use lab::{Lab, Laba};
pub use lch::{Lch, Lcha};
pub use luma::{GammaLuma, GammaLumaa, LinLuma, LinLumaa, SrgbLuma, SrgbLumaa};
pub use rgb::{GammaSrgb, GammaSrgba, LinSrgb, LinSrgba, Packed, RgbChannels, Srgb, Srgba};
pub use xyz::{Xyz, Xyza};
pub use yxy::{Yxy, Yxya};
pub use color_difference::ColorDifference;
pub use component::*;
pub use convert::{FromColor, IntoColor};
pub use encoding::pixel::Pixel;
pub use hues::{LabHue, RgbHue};
pub use matrix::Mat3;
pub use relative_contrast::{contrast_ratio, RelativeContrast};
//Helper macro for checking ranges and clamping.
#[cfg(test)]
macro_rules! assert_ranges {
(@make_tuple $first:pat, $next:ident,) => (($first, $next));
(@make_tuple $first:pat, $next:ident, $($rest:ident,)*) => (
assert_ranges!(@make_tuple ($first, $next), $($rest,)*)
);
(
$ty:ident < $($ty_params:ty),+ >;
limited {$($limited:ident: $limited_from:expr => $limited_to:expr),+}
limited_min {$($limited_min:ident: $limited_min_from:expr => $limited_min_to:expr),*}
unlimited {$($unlimited:ident: $unlimited_from:expr => $unlimited_to:expr),*}
) => (
{
use core::iter::repeat;
use crate::Limited;
{
print!("checking below limits ... ");
$(
let from = $limited_from;
let to = $limited_to;
let diff = to - from;
let $limited = (1..11).map(|i| from - (i as f64 / 10.0) * diff);
)+
$(
let from = $limited_min_from;
let to = $limited_min_to;
let diff = to - from;
let $limited_min = (1..11).map(|i| from - (i as f64 / 10.0) * diff);
)*
$(
let from = $unlimited_from;
let to = $unlimited_to;
let diff = to - from;
let $unlimited = (1..11).map(|i| from - (i as f64 / 10.0) * diff);
)*
for assert_ranges!(@make_tuple (), $($limited,)+ $($limited_min,)* $($unlimited,)* ) in repeat(()) $(.zip($limited))+ $(.zip($limited_min))* $(.zip($unlimited))* {
let c: $ty<$($ty_params),+> = $ty {
$($limited: $limited.into(),)+
$($limited_min: $limited_min.into(),)*
$($unlimited: $unlimited.into(),)*
..$ty::default() //This prevents exhaustiveness checking
};
let clamped = c.clamp();
let expected: $ty<$($ty_params),+> = $ty {
$($limited: $limited_from.into(),)+
$($limited_min: $limited_min_from.into(),)*
$($unlimited: $unlimited.into(),)*
..$ty::default() //This prevents exhaustiveness checking
};
assert!(!c.is_valid());
assert_relative_eq!(clamped, expected);
}
println!("ok")
}
{
print!("checking within limits ... ");
$(
let from = $limited_from;
let to = $limited_to;
let diff = to - from;
let $limited = (0..11).map(|i| from + (i as f64 / 10.0) * diff);
)+
$(
let from = $limited_min_from;
let to = $limited_min_to;
let diff = to - from;
let $limited_min = (0..11).map(|i| from + (i as f64 / 10.0) * diff);
)*
$(
let from = $unlimited_from;
let to = $unlimited_to;
let diff = to - from;
let $unlimited = (0..11).map(|i| from + (i as f64 / 10.0) * diff);
)*
for assert_ranges!(@make_tuple (), $($limited,)+ $($limited_min,)* $($unlimited,)* ) in repeat(()) $(.zip($limited))+ $(.zip($limited_min))* $(.zip($unlimited))* {
let c: $ty<$($ty_params),+> = $ty {
$($limited: $limited.into(),)+
$($limited_min: $limited_min.into(),)*
$($unlimited: $unlimited.into(),)*
..$ty::default() //This prevents exhaustiveness checking
};
let clamped = c.clamp();
assert!(c.is_valid());
assert_relative_eq!(clamped, c);
}
| println!("ok")
} | random_line_split | |
lib.rs | to = $limited_min_to;
let diff = to - from;
let $limited_min = (1..11).map(|i| to + (i as f64 / 10.0) * diff);
)*
$(
let from = $unlimited_from;
let to = $unlimited_to;
let diff = to - from;
let $unlimited = (1..11).map(|i| to + (i as f64 / 10.0) * diff);
)*
for assert_ranges!(@make_tuple (), $($limited,)+ $($limited_min,)* $($unlimited,)* ) in repeat(()) $(.zip($limited))+ $(.zip($limited_min))* $(.zip($unlimited))* {
let c: $ty<$($ty_params),+> = $ty {
$($limited: $limited.into(),)+
$($limited_min: $limited_min.into(),)*
$($unlimited: $unlimited.into(),)*
..$ty::default() //This prevents exhaustiveness checking
};
let clamped = c.clamp();
let expected: $ty<$($ty_params),+> = $ty {
$($limited: $limited_to.into(),)+
$($limited_min: $limited_min.into(),)*
$($unlimited: $unlimited.into(),)*
..$ty::default() //This prevents exhaustiveness checking
};
assert!(!c.is_valid());
assert_relative_eq!(clamped, expected);
}
println!("ok")
}
}
);
}
#[macro_use]
mod macros;
pub mod blend;
#[cfg(feature = "std")]
pub mod gradient;
#[cfg(feature = "named")]
pub mod named;
#[cfg(feature = "random")]
mod random_sampling;
mod alpha;
mod hsl;
mod hsv;
mod hwb;
mod lab;
mod lch;
pub mod luma;
pub mod rgb;
mod xyz;
mod yxy;
mod hues;
pub mod chromatic_adaptation;
mod color_difference;
mod component;
pub mod convert;
pub mod encoding;
mod equality;
mod relative_contrast;
pub mod white_point;
pub mod float;
#[doc(hidden)]
pub mod matrix;
fn clamp<T: PartialOrd>(v: T, min: T, max: T) -> T {
if v < min {
min
} else if v > max {
max
} else {
v
}
}
/// A trait for clamping and checking if colors are within their ranges.
pub trait Limited {
/// Check if the color's components are within the expected ranges.
fn is_valid(&self) -> bool;
/// Return a new color where the components has been clamped to the nearest
/// valid values.
fn clamp(&self) -> Self;
/// Clamp the color's components to the nearest valid values.
fn clamp_self(&mut self);
}
/// A trait for linear color interpolation.
///
/// ```
/// use approx::assert_relative_eq;
///
/// use palette::{LinSrgb, Mix};
///
/// let a = LinSrgb::new(0.0, 0.5, 1.0);
/// let b = LinSrgb::new(1.0, 0.5, 0.0);
///
/// assert_relative_eq!(a.mix(&b, 0.0), a);
/// assert_relative_eq!(a.mix(&b, 0.5), LinSrgb::new(0.5, 0.5, 0.5));
/// assert_relative_eq!(a.mix(&b, 1.0), b);
/// ```
pub trait Mix {
/// The type of the mixing factor.
type Scalar: Float;
/// Mix the color with an other color, by `factor`.
///
/// `factor` sould be between `0.0` and `1.0`, where `0.0` will result in
/// the same color as `self` and `1.0` will result in the same color as
/// `other`.
fn mix(&self, other: &Self, factor: Self::Scalar) -> Self;
}
/// The `Shade` trait allows a color to be lightened or darkened.
///
/// ```
/// use approx::assert_relative_eq;
///
/// use palette::{LinSrgb, Shade};
///
/// let a = LinSrgb::new(0.4, 0.4, 0.4);
/// let b = LinSrgb::new(0.6, 0.6, 0.6);
///
/// assert_relative_eq!(a.lighten(0.1), b.darken(0.1));
/// ```
pub trait Shade: Sized {
/// The type of the lighten/darken amount.
type Scalar: Float;
/// Lighten the color by `amount`.
fn lighten(&self, amount: Self::Scalar) -> Self;
/// Darken the color by `amount`.
fn darken(&self, amount: Self::Scalar) -> Self {
self.lighten(-amount)
}
}
/// A trait for colors where a hue may be calculated.
///
/// ```
/// use approx::assert_relative_eq;
///
/// use palette::{GetHue, LinSrgb};
///
/// let red = LinSrgb::new(1.0f32, 0.0, 0.0);
/// let green = LinSrgb::new(0.0f32, 1.0, 0.0);
/// let blue = LinSrgb::new(0.0f32, 0.0, 1.0);
/// let gray = LinSrgb::new(0.5f32, 0.5, 0.5);
///
/// assert_relative_eq!(red.get_hue().unwrap(), 0.0.into());
/// assert_relative_eq!(green.get_hue().unwrap(), 120.0.into());
/// assert_relative_eq!(blue.get_hue().unwrap(), 240.0.into());
/// assert_eq!(gray.get_hue(), None);
/// ```
pub trait GetHue {
/// The kind of hue unit this color space uses.
///
/// The hue is most commonly calculated as an angle around a color circle
/// and may not always be uniform between color spaces. It's therefore not
/// recommended to take one type of hue and apply it to a color space that
/// expects an other.
type Hue;
/// Calculate a hue if possible.
///
/// Colors in the gray scale has no well defined hue and should preferably
/// return `None`.
fn get_hue(&self) -> Option<Self::Hue>;
}
/// A trait for colors where the hue can be manipulated without conversion.
pub trait Hue: GetHue {
/// Return a new copy of `self`, but with a specific hue.
fn with_hue<H: Into<Self::Hue>>(&self, hue: H) -> Self;
/// Return a new copy of `self`, but with the hue shifted by `amount`.
fn shift_hue<H: Into<Self::Hue>>(&self, amount: H) -> Self;
}
/// A trait for colors where the saturation (or chroma) can be manipulated
/// without conversion.
///
/// ```
/// use approx::assert_relative_eq;
///
/// use palette::{Hsv, Saturate};
///
/// let a = Hsv::new(0.0, 0.25, 1.0);
/// let b = Hsv::new(0.0, 1.0, 1.0);
///
/// assert_relative_eq!(a.saturate(1.0), b.desaturate(0.5));
/// ```
pub trait Saturate: Sized {
/// The type of the (de)saturation factor.
type Scalar: Float;
/// Increase the saturation by `factor`.
fn saturate(&self, factor: Self::Scalar) -> Self;
/// Decrease the saturation by `factor`.
fn desaturate(&self, factor: Self::Scalar) -> Self {
self.saturate(-factor)
}
}
/// Perform a unary or binary operation on each component of a color.
pub trait ComponentWise {
/// The scalar type for color components.
type Scalar;
/// Perform a binary operation on this and an other color.
fn component_wise<F: FnMut(Self::Scalar, Self::Scalar) -> Self::Scalar>(
&self,
other: &Self,
f: F,
) -> Self;
/// Perform a unary operation on this color.
fn component_wise_self<F: FnMut(Self::Scalar) -> Self::Scalar>(&self, f: F) -> Self;
}
/// A trait for infallible conversion from `f64`. The conversion may be lossy.
pub trait FromF64 {
/// Creates a value from an `f64` constant.
fn from_f64(c: f64) -> Self;
}
impl FromF64 for f32 {
#[inline]
fn from_f64(c: f64) -> Self {
c as f32
}
}
impl FromF64 for f64 {
#[inline]
fn from_f64(c: f64) -> Self {
c
}
}
/// A convenience function to convert a constant number to Float Type
#[inline]
fn from_f64<T: FromF64>(c: f64) -> T | {
T::from_f64(c)
} | identifier_body | |
chord.js | Bn.push(paper.rect(legendArea[0] + 10, legendArea[1] + 10 + (20 + 3) * i, 180, 20).attr({
"fill": "#ebebeb",
"stroke": "none"
//"r": 3
}).hide());
//色框
paper.rect(legendArea[0] + 10 + 3, legendArea[1] + 10 + (20 + 3) * i + 6, 16, 8).attr({
"fill": this.getColor(i),
"stroke": "none"
});
//文字
paper.text(legendArea[0] + 10 + 3 + 16 + 8, legendArea[1] + 10 + (20 + 3) * i + 10, this.groupNames[i]).attr({
"fill": "black",
"fill-opacity": 1,
"font-family": "Verdana",
"font-size": 12
}).attr({
"text-anchor": "start"
});
//选框
rectBn.push(paper.rect(legendArea[0] + 10, legendArea[1] + 10 + (20 + 3) * i, 180, 20).attr({
"fill": "white",
"fill-opacity": 0,
"stroke": "none"
//"r": 3
})).data("clicked", 0);
}
rectBn.forEach(function (d, i) {
d.mouseover(function () {
if (d.data("clicked") === 0) {
underBn[i].attr('opacity', 0.5);
underBn[i].show();
}
}).mouseout(function () {
if (d.data("clicked") === 0) {
underBn[i].hide();
}
});
d.click(function () {
for (j = 0; j < underBn.length; j++) {
if (j === i) {
underBn[j].show();
} else {
underBn[j].hide();
}
}
rectBn.forEach(function (eachBn) {
if (eachBn !== d) {
eachBn.data("clicked", 0);
}
});
if (d.data("clicked") === 0) {
underBn[i].attr('opacity', 1);
underBn[i].show();
that.chordGroups.forEach(function (d) {
if (d.data('source') !== i && d.data('target') !== i) {
d.attr({
'fill-opacity': 0.1
});
} else {
d.attr({
'fill-opacity': 0.6
});
}
});
d.data("clicked", 1);
} else if (d.data("clicked") === 1) {
underBn[i].hide();
d.data("clicked", 0);
that.chordGroups.forEach(function (d) {
d.attr({
'fill-opacity': 0.6
});
});
}
});
});
};
/**
*对原始数据进行处理
* @param {Array} table 将要被绘制成饼图的二维表数据
*/
Chord.prototype.setSource = function (table, map) {
map = this.map(map);
var that = this;
var index = {};
var groupNum = 0;
table = table.slice(1); // 从第一行开始,第0行舍去
table.forEach(function (d) {
var from = d[map.from];
if (typeof index[from] === 'undefined') {
index[from] = groupNum++;
that.groupNames.push(from);
}
});
table.forEach(function (d) {
var to = d[map.to];
if (typeof index[to] === 'undefined') {
index[to] = groupNum++;
that.groupNames.push(to);
}
});
this.groupNum = groupNum;
for (var i = 0; i < groupNum; i++) {
this.matrix[i] = [];
for (var j = 0; j < groupNum; j++) {
this.matrix[i][j] = 0;
};
};
table.forEach(function (d) {
that.matrix[index[d[map.from]]][index[d[map.to]]] += Number(d[map.value]);
});
};
/**
*创建chord布局
*/
Chord.prototype.layout = function () {
var floatTag = this.floatTag;
var that = this;
that.canvas.clear();
/*var see = [
[11975, 5871, 8916, 2868],
[1951, 10048, 2060, 6171],
[8010, 16145, 8090, 8045],
[1013, 990, 940, 6907]
];*/
var chordLayout = d3.layout.chord().padding(0.05) //chord segments之间的padding间隔
.sortSubgroups(d3.descending) //chord segments细分后的排序规则
.matrix(that.matrix);
/*var fillColor = d3.scale.ordinal()
.domain(d3.range(4))
.range(["#000000", "#FFDD89", "#957244", "#F26223"]);*/
//groups数组:获取每个组的起始角度、数值、索引等属性
var groups = chordLayout.groups();
//由内外半径、起始角度计算路径字符串
var pathCalc = d3.svg.arc().innerRadius(that.defaults.innerRadius).outerRadius(that.defaults.outerRadius).startAngle(function (d) {
return d.startAngle;
}).endAngle(function (d) {
return d.endAngle;
});
var chords = chordLayout.chords();
//计算弦的路径曲线
var chordCalc = d3.svg.chord().radius(that.defaults.innerRadius);
//Raphael: Paper.path()
var donutEle;
//获取每个环形的字符串表示
var spline;
//表示每条弦的element
var chordEle;
//每条弦的字符串表示
var belt;
var num; //每个group分割小格数
var unitAngle; //每个group所占的角度
var angle;
var radian;
var tickLine;
var tickStr; //每个tick的路径
var xTrans, yTrans;
var aX, aY, bX, bY; //每个tick起始端点的坐标
var anchor;
var rotateStr;
var wordStr;
var word;
var textEl;
var wXTrans, wYTrans;
var tips;
var minValue = 1000;
that.chordGroups = that.canvas.set();
that.donutGroups = that.canvas.set();
$(this.node).append(this.floatTag);
//计算某条弦被赋值为target或source的颜色
var colorCalc = function (index) {
var i = chords[index].target.value > chords[index].source.value ? chords[index].target.index : chords[index].source.index;
return i;
};
//添加透明效果
var mouseOverDonut = function () {
floatTag.html('<div style = "text-align: center;margin:auto;color:'
//+ jqNode.color
+
"#ffffff" + '">' + this.data('text') + '</div>');
floatTag.css({
"visibility": "visible"
});
that.underBn.forEach(function (d) {
d.hide();
});
index = this.data("donutIndex");
that.chordGroups.forEach(function (d) {
if (d.data('source') !== index && d.data('target') !== index) {
d.attr({
'fill-opacity': 0.1
});
} else {
d.attr({
'fill-opacity': 0.6
});
}
});
//fade(this.data("donutIndex"), 0.2);
that.underBn[index].attr('opacity', 0.5).show();
};
var mouseOutDonut = function () {
floatTag.css({
"visibility": "hidden"
});
index = this.data("donutIndex");
that.chordGroups.forEach(function (d) {
if (d.data('source') !== index && d.data('target') !== index) {
d.attr({
'fill-opacity': 0.6
});
}
});
| };
var mouseoverChord = function () {
floatTag.html('<div style="text-align: center;margin:auto;color:#ffffff">' + this.data('text') + '</div>');
floatTag.css({
"visibility": "visible"
});
that.underBn.forEach(function (d) {
d.hide();
| //fade(this.data("donutIndex"), 0.6);
that.underBn[index].hide();
| conditional_block |
chord.js | Bn.push(paper.rect(legendArea[0] + 10, legendArea[1] + 10 + (20 + 3) * i, 180, 20).attr({
"fill": "#ebebeb",
"stroke": "none"
//"r": 3
}).hide());
//色框
paper.rect(legendArea[0] + 10 + 3, legendArea[1] + 10 + (20 + 3) * i + 6, 16, 8).attr({
"fill": this.getColor(i),
"stroke": "none"
});
//文字
paper.text(legendArea[0] + 10 + 3 + 16 + 8, legendArea[1] + 10 + (20 + 3) * i + 10, this.groupNames[i]).attr({
"fill": "black",
"fill-opacity": 1,
"font-family": "Verdana",
"font-size": 12
}).attr({
"text-anchor": "start"
});
//选框
rectBn.push(paper.rect(legendArea[0] + 10, legendArea[1] + 10 + (20 + 3) * i, 180, 20).attr({
"fill": "white",
"fill-opacity": 0,
"stroke": "none"
//"r": 3
})).data("clicked", 0);
}
rectBn.forEach(function (d, i) {
d.mouseover(function () {
if (d.data("clicked") === 0) {
underBn[i].attr('opacity', 0.5);
underBn[i].show();
}
}).mouseout(function () {
if (d.data("clicked") === 0) {
underBn[i].hide();
}
});
d.click(function () {
for (j = 0; j < underBn.length; j++) {
if (j === i) {
underBn[j].show();
} else {
underBn[j].hide();
}
}
rectBn.forEach(function (eachBn) {
if (eachBn !== d) {
eachBn.data("clicked", 0);
}
});
if (d.data("clicked") === 0) {
underBn[i].attr('opacity', 1);
underBn[i].show();
that.chordGroups.forEach(function (d) {
if (d.data('source') !== i && d.data('target') !== i) {
d.attr({
'fill-opacity': 0.1
});
} else {
d.attr({
'fill-opacity': 0.6
});
}
});
d.data("clicked", 1);
} else if (d.data("clicked") === 1) {
underBn[i].hide();
d.data("clicked", 0);
that.chordGroups.forEach(function (d) {
d.attr({
'fill-opacity': 0.6
});
});
}
});
});
};
/**
*对原始数据进行处理
* @param {Array} table 将要被绘制成饼图的二维表数据
*/
Chord.prototype.setSource = function (table, map) {
map = this.map(map);
var that = this;
var index = {};
var groupNum = 0;
table = table.slice(1); // 从第一行开始,第0行舍去
table.forEach(function (d) {
var from = d[map.from];
if (typeof index[from] === 'undefined') {
index[from] = groupNum++;
that.groupNames.push(from);
}
});
table.forEach(function (d) {
var to = d[map.to];
if (typeof index[to] === 'undefined') {
index[to] = groupNum++;
that.groupNames.push(to);
}
});
this.groupNum = groupNum;
for (var i = 0; i < groupNum; i++) {
this.matrix[i] = [];
for (var j = 0; j < groupNum; j++) {
this.matrix[i][j] = 0;
};
};
table.forEach(function (d) {
that.matrix[index[d[map.from]]][index[d[map.to]]] += Number(d[map.value]);
});
};
/**
*创建chord布局
*/
Chord.prototype.layout = function () {
var floatTag = this.floatTag;
var that = this;
that.canvas.clear();
/*var see = [
[11975, 5871, 8916, 2868],
[1951, 10048, 2060, 6171],
[8010, 16145, 8090, 8045],
[1013, 990, 940, 6907]
];*/
var chordLayout = d3.layout.chord().padding(0.05) //chord segments之间的padding间隔
.sortSubgroups(d3.descending) //chord segments细分后的排序规则
.matrix(that.matrix);
/*var fillColor = d3.scale.ordinal()
.domain(d3.range(4))
.range(["#000000", "#FFDD89", "#957244", "#F26223"]);*/
//groups数组:获取每个组的起始角度、数值、索引等属性
var groups = chordLayout.groups();
//由内外半径、起始角度计算路径字符串
var pathCalc = d3.svg.arc().innerRadius(that.defaults.innerRadius).outerRadius(that.defaults.outerRadius).startAngle(function (d) {
return d.startAngle;
}).endAngle(function (d) {
return d.endAngle;
});
var chords = chordLayout.chords();
//计算弦的路径曲线
var chordCalc = d3.svg.chord().radius(that.defaults.innerRadius);
//Raphael: Paper.path()
var donutEle;
//获取每个环形的字符串表示
var spline;
//表示每条弦的element
var chordEle;
//每条弦的字符串表示
var belt;
var num; //每个group分割小格数
var unitAngle; //每个group所占的角度
var angle;
var radian;
var tickLine;
var tickStr; //每个tick的路径
var xTrans, yTrans;
var aX, aY, bX, bY; //每个tick起始端点的坐标
var anchor;
var rotateStr;
var wordStr;
var word;
var textEl;
var wXTrans, wYTrans;
var tips;
var minValue = 1000;
that.chordGroups = that.canvas.set();
that.donutGroups = that.canvas.set();
$(this.node).append(this.floatTag);
//计算某条弦被赋值为target或source的颜色
var colorCalc = function (index) {
var i = chords[index].target.value > chords[index].source.value ? chords[index].target.index : chords[index].source.index;
return i;
};
|
//添加透明效果
var mouseOverDonut = function () {
floatTag.html('<div style = "text-align: center;margin:auto;color:'
//+ jqNode.color
+
"#ffffff" + '">' + this.data('text') + '</div>');
floatTag.css({
"visibility": "visible"
});
that.underBn.forEach(function (d) {
d.hide();
});
index = this.data("donutIndex");
that.chordGroups.forEach(function (d) {
if (d.data('source') !== index && d.data('target') !== index) {
d.attr({
'fill-opacity': 0.1
});
} else {
d.attr({
'fill-opacity': 0.6
});
}
});
//fade(this.data("donutIndex"), 0.2);
that.underBn[index].attr('opacity', 0.5).show();
};
var mouseOutDonut = function () {
floatTag.css({
"visibility": "hidden"
});
index = this.data("donutIndex");
that.chordGroups.forEach(function (d) {
if (d.data('source') !== index && d.data('target') !== index) {
d.attr({
'fill-opacity': 0.6
});
}
});
//fade(this.data("donutIndex"), 0.6);
that.underBn[index].hide();
};
var mouseoverChord = function () {
floatTag.html('<div style="text-align: center;margin:auto;color:#ffffff">' + this.data('text') + '</div>');
floatTag.css({
"visibility": "visible"
});
that.underBn.forEach(function (d) {
d.hide();
| random_line_split | |
provision.go | provisionFile = args[1]
// Load the provided provisioner file
if _, err := os.Stat(provisionFile); err != nil {
SetError(fmt.Errorf("Could not read PROVISIONER '%s' , error: %v", provisionFile, err), 1)
return
}
b, err := ioutil.ReadFile(provisionFile)
if err != nil {
SetError(fmt.Errorf("Could not read PROVISIONER '%s' , error: %v", provisionFile, err), 2)
return
}
data, err := provisioners.Decrypt(b, provisionPassPhrase)
if err != nil {
SetError(err, 3)
return
}
ptype, err := provisioners.ProvisionerType(data)
if err != nil {
SetError(err, 4)
return
}
prov, err := registry.NewProvisioner(ptype, log, data)
if err != nil {
SetError(err, 5)
return
}
buildablePath := "."
if len(args) >= 1 {
buildablePath = args[0]
}
pkgBuilder, err := getPackageBuilder("BUILDABLE", buildablePath)
if err != nil {
SetError(err, 9)
return
}
err = modifyPackageBuilder(pkgBuilder)
if err != nil {
SetError(err, 10)
return
}
pkgReader, err := vpkg.ReaderFromBuilder(pkgBuilder)
if err != nil {
SetError(err, 11)
return
}
defer pkgReader.Close()
pkgReader, err = vpkg.PeekVCFG(pkgReader)
if err != nil |
err = initKernels()
if err != nil {
SetError(err, 13)
return
}
f, err := ioutil.TempFile(os.TempDir(), "vorteil.disk")
if err != nil {
SetError(err, 14)
return
}
defer os.Remove(f.Name())
defer f.Close()
err = vdisk.Build(context.Background(), f, &vdisk.BuildArgs{
WithVCFGDefaults: true,
PackageReader: pkgReader,
Format: prov.DiskFormat(),
SizeAlign: int64(prov.SizeAlign()),
KernelOptions: vdisk.KernelOptions{
Shell: flagShell,
},
Logger: log,
})
if err != nil {
SetError(err, 15)
return
}
err = f.Close()
if err != nil {
SetError(err, 16)
return
}
err = pkgReader.Close()
if err != nil {
SetError(err, 17)
return
}
image, err := vio.LazyOpen(f.Name())
if err != nil {
SetError(err, 18)
return
}
if provisionName == "" {
provisionName = generateProvisionUUID()
log.Infof("--name flag what not set using generated uuid '%s'", provisionName)
}
ctx := context.TODO()
err = prov.Provision(&provisioners.ProvisionArgs{
Context: ctx,
Image: image,
Name: provisionName,
Description: provisionDescription,
Force: provisionForce,
ReadyWhenUsable: provisionReadyWhenUsable,
})
if err != nil {
SetError(err, 19)
return
}
fmt.Printf("Finished creating image.\n")
},
}
func generateProvisionUUID() string {
pName := strings.ReplaceAll(uuid.New().String(), "-", "")
// Replace first character with v if its a number
if _, err := strconv.Atoi(pName[:1]); err == nil {
pName = "v" + pName[1:]
}
return pName
}
var (
provisionName string
provisionDescription string
provisionForce bool
provisionReadyWhenUsable bool
provisionPassPhrase string
)
func init() {
f := provisionCmd.Flags()
f.StringVarP(&flagKey, "key", "k", "", "vrepo authentication key")
f.StringVarP(&provisionName, "name", "n", "", "Name of the resulting image on the remote platform.")
f.StringVarP(&provisionDescription, "description", "D", "", "Description for the resulting image, if supported by the platform.")
f.BoolVarP(&provisionForce, "force", "f", false, "Force an overwrite if an existing image conflicts with the new.")
f.BoolVarP(&provisionReadyWhenUsable, "ready-when-usable", "r", false, "Return successfully as soon as the operation is complete, regardless of whether or not the platform is still processing the image.")
f.StringVarP(&provisionPassPhrase, "passphrase", "s", "", "Passphrase used to decrypt encrypted provisioner data.")
}
var provisionersCmd = &cobra.Command{
Use: "provisioners",
Short: "Helper commands for working with Vorteil provisioners",
Long: ``,
Example: ``,
}
var provisionersNewCmd = &cobra.Command{
Use: "new",
Short: "Add a new provisioner.",
}
var (
provisionersNewPassphrase string
// Google Cloud Platform
provisionersNewGoogleBucket string
provisionersNewGoogleKeyFile string
// Amazon Web Services
provisionersNewAmazonKey string
provisionersNewAmazonRegion string
provisionersNewAmazonBucket string
provisionersNewAmazonSecret string
// Azure
provisionersNewAzureContainer string
provisionersNewAzureKeyFile string
provisionersNewAzureLocation string
provisionersNewAzureResourceGroup string
provisionersNewAzureStorageAccountKey string
provisionersNewAzureStorageAccountName string
)
var provisionersNewAmazonEC2Cmd = &cobra.Command{
Use: "amazon-ec2 <OUTPUT_FILE>",
Short: "Add a new AWS (Amazon Web Services) Provisioner.",
Args: cobra.ExactArgs(1),
Run: func(cmd *cobra.Command, args []string) {
f, err := os.OpenFile(args[0], os.O_RDWR|os.O_CREATE, 0644)
if err != nil {
SetError(err, 1)
return
}
defer f.Close()
p, err := amazon.NewProvisioner(log, &amazon.Config{
Key: provisionersNewAmazonKey,
Secret: provisionersNewAmazonSecret,
Region: provisionersNewAmazonRegion,
Bucket: provisionersNewAmazonBucket,
})
if err != nil {
SetError(err, 2)
return
}
data, err := p.Marshal()
if err != nil {
SetError(err, 3)
return
}
out := provisioners.Encrypt(data, provisionersNewPassphrase)
_, err = io.Copy(f, bytes.NewReader(out))
if err != nil {
SetError(err, 4)
return
}
},
}
func init() {
f := provisionersNewAmazonEC2Cmd.Flags()
f.StringVarP(&provisionersNewAmazonKey, "key", "k", "", "Access key ID")
provisionersNewAmazonEC2Cmd.MarkFlagRequired("key")
f.StringVarP(&provisionersNewAmazonSecret, "secret", "s", "", "Secret access key")
provisionersNewAmazonEC2Cmd.MarkFlagRequired("secret")
f.StringVarP(&provisionersNewAmazonRegion, "region", "r", "ap-southeast-2", "AWS region")
f.StringVarP(&provisionersNewAmazonBucket, "bucket", "b", "", "AWS bucket")
provisionersNewAmazonEC2Cmd.MarkFlagRequired("bucket")
f.StringVarP(&provisionersNewPassphrase, "passphrase", "p", "", "Passphrase for encrypting exported provisioner data.")
}
var provisionersNewAzureCmd = &cobra.Command{
Use: "azure <OUTPUT_FILE>",
Short: "Add a new Microsoft Azure Provisioner.",
Args: cobra.ExactArgs(1),
Run: func(cmd *cobra.Command, args []string) {
f, err := os.OpenFile(args[0], os.O_RDWR|os.O_CREATE, 0644)
if err != nil {
SetError(err, 1)
return
}
defer f.Close()
path := provisionersNewAzureKeyFile
_, err = os.Stat(path)
if err != nil {
SetError(err, 2)
return
}
b, err := ioutil.ReadFile(path)
if err != nil {
SetError(err, 3)
return
}
p, err := azure.NewProvisioner(log, &azure.Config{
Key: base64.StdEncoding.EncodeToString(b),
Container: provisionersNewAzureContainer,
Location: provisionersNewAzureLocation,
ResourceGroup: provisionersNewAzureResourceGroup,
Storage | {
SetError(err, 12)
return
} | conditional_block |
provision.go | provisionFile = args[1]
// Load the provided provisioner file
if _, err := os.Stat(provisionFile); err != nil {
SetError(fmt.Errorf("Could not read PROVISIONER '%s' , error: %v", provisionFile, err), 1)
return
}
b, err := ioutil.ReadFile(provisionFile)
if err != nil {
SetError(fmt.Errorf("Could not read PROVISIONER '%s' , error: %v", provisionFile, err), 2)
return
}
data, err := provisioners.Decrypt(b, provisionPassPhrase)
if err != nil {
SetError(err, 3)
return
}
ptype, err := provisioners.ProvisionerType(data)
if err != nil {
SetError(err, 4)
return
}
prov, err := registry.NewProvisioner(ptype, log, data)
if err != nil {
SetError(err, 5)
return
}
buildablePath := "."
if len(args) >= 1 {
buildablePath = args[0]
}
pkgBuilder, err := getPackageBuilder("BUILDABLE", buildablePath)
if err != nil {
SetError(err, 9)
return
}
err = modifyPackageBuilder(pkgBuilder)
if err != nil {
SetError(err, 10)
return
}
pkgReader, err := vpkg.ReaderFromBuilder(pkgBuilder)
if err != nil {
SetError(err, 11)
return
}
defer pkgReader.Close()
pkgReader, err = vpkg.PeekVCFG(pkgReader)
if err != nil {
SetError(err, 12)
return
}
err = initKernels()
if err != nil {
SetError(err, 13)
return
}
f, err := ioutil.TempFile(os.TempDir(), "vorteil.disk")
if err != nil {
SetError(err, 14)
return
}
defer os.Remove(f.Name())
defer f.Close()
err = vdisk.Build(context.Background(), f, &vdisk.BuildArgs{
WithVCFGDefaults: true,
PackageReader: pkgReader,
Format: prov.DiskFormat(),
SizeAlign: int64(prov.SizeAlign()),
KernelOptions: vdisk.KernelOptions{
Shell: flagShell,
},
Logger: log,
})
if err != nil {
SetError(err, 15)
return
}
err = f.Close()
if err != nil {
SetError(err, 16)
return
}
err = pkgReader.Close()
if err != nil {
SetError(err, 17)
return
}
image, err := vio.LazyOpen(f.Name())
if err != nil {
SetError(err, 18)
return
}
if provisionName == "" {
provisionName = generateProvisionUUID()
log.Infof("--name flag what not set using generated uuid '%s'", provisionName)
}
ctx := context.TODO()
err = prov.Provision(&provisioners.ProvisionArgs{
Context: ctx,
Image: image,
Name: provisionName,
Description: provisionDescription,
Force: provisionForce,
ReadyWhenUsable: provisionReadyWhenUsable,
})
if err != nil {
SetError(err, 19)
return
}
fmt.Printf("Finished creating image.\n")
},
}
func generateProvisionUUID() string {
pName := strings.ReplaceAll(uuid.New().String(), "-", "")
// Replace first character with v if its a number
if _, err := strconv.Atoi(pName[:1]); err == nil {
pName = "v" + pName[1:]
}
return pName
}
var (
provisionName string
provisionDescription string
provisionForce bool
provisionReadyWhenUsable bool
provisionPassPhrase string
)
func | () {
f := provisionCmd.Flags()
f.StringVarP(&flagKey, "key", "k", "", "vrepo authentication key")
f.StringVarP(&provisionName, "name", "n", "", "Name of the resulting image on the remote platform.")
f.StringVarP(&provisionDescription, "description", "D", "", "Description for the resulting image, if supported by the platform.")
f.BoolVarP(&provisionForce, "force", "f", false, "Force an overwrite if an existing image conflicts with the new.")
f.BoolVarP(&provisionReadyWhenUsable, "ready-when-usable", "r", false, "Return successfully as soon as the operation is complete, regardless of whether or not the platform is still processing the image.")
f.StringVarP(&provisionPassPhrase, "passphrase", "s", "", "Passphrase used to decrypt encrypted provisioner data.")
}
var provisionersCmd = &cobra.Command{
Use: "provisioners",
Short: "Helper commands for working with Vorteil provisioners",
Long: ``,
Example: ``,
}
var provisionersNewCmd = &cobra.Command{
Use: "new",
Short: "Add a new provisioner.",
}
var (
provisionersNewPassphrase string
// Google Cloud Platform
provisionersNewGoogleBucket string
provisionersNewGoogleKeyFile string
// Amazon Web Services
provisionersNewAmazonKey string
provisionersNewAmazonRegion string
provisionersNewAmazonBucket string
provisionersNewAmazonSecret string
// Azure
provisionersNewAzureContainer string
provisionersNewAzureKeyFile string
provisionersNewAzureLocation string
provisionersNewAzureResourceGroup string
provisionersNewAzureStorageAccountKey string
provisionersNewAzureStorageAccountName string
)
var provisionersNewAmazonEC2Cmd = &cobra.Command{
Use: "amazon-ec2 <OUTPUT_FILE>",
Short: "Add a new AWS (Amazon Web Services) Provisioner.",
Args: cobra.ExactArgs(1),
Run: func(cmd *cobra.Command, args []string) {
f, err := os.OpenFile(args[0], os.O_RDWR|os.O_CREATE, 0644)
if err != nil {
SetError(err, 1)
return
}
defer f.Close()
p, err := amazon.NewProvisioner(log, &amazon.Config{
Key: provisionersNewAmazonKey,
Secret: provisionersNewAmazonSecret,
Region: provisionersNewAmazonRegion,
Bucket: provisionersNewAmazonBucket,
})
if err != nil {
SetError(err, 2)
return
}
data, err := p.Marshal()
if err != nil {
SetError(err, 3)
return
}
out := provisioners.Encrypt(data, provisionersNewPassphrase)
_, err = io.Copy(f, bytes.NewReader(out))
if err != nil {
SetError(err, 4)
return
}
},
}
func init() {
f := provisionersNewAmazonEC2Cmd.Flags()
f.StringVarP(&provisionersNewAmazonKey, "key", "k", "", "Access key ID")
provisionersNewAmazonEC2Cmd.MarkFlagRequired("key")
f.StringVarP(&provisionersNewAmazonSecret, "secret", "s", "", "Secret access key")
provisionersNewAmazonEC2Cmd.MarkFlagRequired("secret")
f.StringVarP(&provisionersNewAmazonRegion, "region", "r", "ap-southeast-2", "AWS region")
f.StringVarP(&provisionersNewAmazonBucket, "bucket", "b", "", "AWS bucket")
provisionersNewAmazonEC2Cmd.MarkFlagRequired("bucket")
f.StringVarP(&provisionersNewPassphrase, "passphrase", "p", "", "Passphrase for encrypting exported provisioner data.")
}
var provisionersNewAzureCmd = &cobra.Command{
Use: "azure <OUTPUT_FILE>",
Short: "Add a new Microsoft Azure Provisioner.",
Args: cobra.ExactArgs(1),
Run: func(cmd *cobra.Command, args []string) {
f, err := os.OpenFile(args[0], os.O_RDWR|os.O_CREATE, 0644)
if err != nil {
SetError(err, 1)
return
}
defer f.Close()
path := provisionersNewAzureKeyFile
_, err = os.Stat(path)
if err != nil {
SetError(err, 2)
return
}
b, err := ioutil.ReadFile(path)
if err != nil {
SetError(err, 3)
return
}
p, err := azure.NewProvisioner(log, &azure.Config{
Key: base64.StdEncoding.EncodeToString(b),
Container: provisionersNewAzureContainer,
Location: provisionersNewAzureLocation,
ResourceGroup: provisionersNewAzureResourceGroup,
StorageAccount | init | identifier_name |
provision.go | provisionFile = args[1]
// Load the provided provisioner file
if _, err := os.Stat(provisionFile); err != nil {
SetError(fmt.Errorf("Could not read PROVISIONER '%s' , error: %v", provisionFile, err), 1)
return
}
b, err := ioutil.ReadFile(provisionFile)
if err != nil {
SetError(fmt.Errorf("Could not read PROVISIONER '%s' , error: %v", provisionFile, err), 2)
return
}
data, err := provisioners.Decrypt(b, provisionPassPhrase)
if err != nil {
SetError(err, 3)
return
}
ptype, err := provisioners.ProvisionerType(data)
if err != nil {
SetError(err, 4)
return
}
prov, err := registry.NewProvisioner(ptype, log, data)
if err != nil {
SetError(err, 5)
return
}
buildablePath := "."
if len(args) >= 1 {
buildablePath = args[0]
}
pkgBuilder, err := getPackageBuilder("BUILDABLE", buildablePath)
if err != nil {
SetError(err, 9)
return
}
err = modifyPackageBuilder(pkgBuilder)
if err != nil {
SetError(err, 10)
return
}
pkgReader, err := vpkg.ReaderFromBuilder(pkgBuilder)
if err != nil {
SetError(err, 11)
return
}
defer pkgReader.Close()
pkgReader, err = vpkg.PeekVCFG(pkgReader)
if err != nil {
SetError(err, 12)
return
}
err = initKernels()
if err != nil {
SetError(err, 13)
return
}
f, err := ioutil.TempFile(os.TempDir(), "vorteil.disk")
if err != nil {
SetError(err, 14)
return
}
defer os.Remove(f.Name())
defer f.Close()
err = vdisk.Build(context.Background(), f, &vdisk.BuildArgs{
WithVCFGDefaults: true,
PackageReader: pkgReader,
Format: prov.DiskFormat(),
SizeAlign: int64(prov.SizeAlign()),
KernelOptions: vdisk.KernelOptions{
Shell: flagShell,
},
Logger: log,
})
if err != nil {
SetError(err, 15)
return
}
err = f.Close()
if err != nil {
SetError(err, 16)
return
}
err = pkgReader.Close()
if err != nil {
SetError(err, 17)
return
}
image, err := vio.LazyOpen(f.Name())
if err != nil {
SetError(err, 18)
return
}
if provisionName == "" {
provisionName = generateProvisionUUID()
log.Infof("--name flag what not set using generated uuid '%s'", provisionName)
}
ctx := context.TODO()
err = prov.Provision(&provisioners.ProvisionArgs{
Context: ctx,
Image: image,
Name: provisionName,
Description: provisionDescription,
Force: provisionForce,
ReadyWhenUsable: provisionReadyWhenUsable,
})
if err != nil {
SetError(err, 19)
return
}
fmt.Printf("Finished creating image.\n")
},
}
func generateProvisionUUID() string {
pName := strings.ReplaceAll(uuid.New().String(), "-", "")
// Replace first character with v if its a number
if _, err := strconv.Atoi(pName[:1]); err == nil {
pName = "v" + pName[1:]
}
return pName
}
var (
provisionName string
provisionDescription string
provisionForce bool
provisionReadyWhenUsable bool
provisionPassPhrase string
)
func init() {
f := provisionCmd.Flags()
f.StringVarP(&flagKey, "key", "k", "", "vrepo authentication key")
f.StringVarP(&provisionName, "name", "n", "", "Name of the resulting image on the remote platform.")
f.StringVarP(&provisionDescription, "description", "D", "", "Description for the resulting image, if supported by the platform.")
f.BoolVarP(&provisionForce, "force", "f", false, "Force an overwrite if an existing image conflicts with the new.") | f.StringVarP(&provisionPassPhrase, "passphrase", "s", "", "Passphrase used to decrypt encrypted provisioner data.")
}
var provisionersCmd = &cobra.Command{
Use: "provisioners",
Short: "Helper commands for working with Vorteil provisioners",
Long: ``,
Example: ``,
}
var provisionersNewCmd = &cobra.Command{
Use: "new",
Short: "Add a new provisioner.",
}
var (
provisionersNewPassphrase string
// Google Cloud Platform
provisionersNewGoogleBucket string
provisionersNewGoogleKeyFile string
// Amazon Web Services
provisionersNewAmazonKey string
provisionersNewAmazonRegion string
provisionersNewAmazonBucket string
provisionersNewAmazonSecret string
// Azure
provisionersNewAzureContainer string
provisionersNewAzureKeyFile string
provisionersNewAzureLocation string
provisionersNewAzureResourceGroup string
provisionersNewAzureStorageAccountKey string
provisionersNewAzureStorageAccountName string
)
var provisionersNewAmazonEC2Cmd = &cobra.Command{
Use: "amazon-ec2 <OUTPUT_FILE>",
Short: "Add a new AWS (Amazon Web Services) Provisioner.",
Args: cobra.ExactArgs(1),
Run: func(cmd *cobra.Command, args []string) {
f, err := os.OpenFile(args[0], os.O_RDWR|os.O_CREATE, 0644)
if err != nil {
SetError(err, 1)
return
}
defer f.Close()
p, err := amazon.NewProvisioner(log, &amazon.Config{
Key: provisionersNewAmazonKey,
Secret: provisionersNewAmazonSecret,
Region: provisionersNewAmazonRegion,
Bucket: provisionersNewAmazonBucket,
})
if err != nil {
SetError(err, 2)
return
}
data, err := p.Marshal()
if err != nil {
SetError(err, 3)
return
}
out := provisioners.Encrypt(data, provisionersNewPassphrase)
_, err = io.Copy(f, bytes.NewReader(out))
if err != nil {
SetError(err, 4)
return
}
},
}
func init() {
f := provisionersNewAmazonEC2Cmd.Flags()
f.StringVarP(&provisionersNewAmazonKey, "key", "k", "", "Access key ID")
provisionersNewAmazonEC2Cmd.MarkFlagRequired("key")
f.StringVarP(&provisionersNewAmazonSecret, "secret", "s", "", "Secret access key")
provisionersNewAmazonEC2Cmd.MarkFlagRequired("secret")
f.StringVarP(&provisionersNewAmazonRegion, "region", "r", "ap-southeast-2", "AWS region")
f.StringVarP(&provisionersNewAmazonBucket, "bucket", "b", "", "AWS bucket")
provisionersNewAmazonEC2Cmd.MarkFlagRequired("bucket")
f.StringVarP(&provisionersNewPassphrase, "passphrase", "p", "", "Passphrase for encrypting exported provisioner data.")
}
var provisionersNewAzureCmd = &cobra.Command{
Use: "azure <OUTPUT_FILE>",
Short: "Add a new Microsoft Azure Provisioner.",
Args: cobra.ExactArgs(1),
Run: func(cmd *cobra.Command, args []string) {
f, err := os.OpenFile(args[0], os.O_RDWR|os.O_CREATE, 0644)
if err != nil {
SetError(err, 1)
return
}
defer f.Close()
path := provisionersNewAzureKeyFile
_, err = os.Stat(path)
if err != nil {
SetError(err, 2)
return
}
b, err := ioutil.ReadFile(path)
if err != nil {
SetError(err, 3)
return
}
p, err := azure.NewProvisioner(log, &azure.Config{
Key: base64.StdEncoding.EncodeToString(b),
Container: provisionersNewAzureContainer,
Location: provisionersNewAzureLocation,
ResourceGroup: provisionersNewAzureResourceGroup,
StorageAccountKey | f.BoolVarP(&provisionReadyWhenUsable, "ready-when-usable", "r", false, "Return successfully as soon as the operation is complete, regardless of whether or not the platform is still processing the image.") | random_line_split |
provision.go | provisionFile = args[1]
// Load the provided provisioner file
if _, err := os.Stat(provisionFile); err != nil {
SetError(fmt.Errorf("Could not read PROVISIONER '%s' , error: %v", provisionFile, err), 1)
return
}
b, err := ioutil.ReadFile(provisionFile)
if err != nil {
SetError(fmt.Errorf("Could not read PROVISIONER '%s' , error: %v", provisionFile, err), 2)
return
}
data, err := provisioners.Decrypt(b, provisionPassPhrase)
if err != nil {
SetError(err, 3)
return
}
ptype, err := provisioners.ProvisionerType(data)
if err != nil {
SetError(err, 4)
return
}
prov, err := registry.NewProvisioner(ptype, log, data)
if err != nil {
SetError(err, 5)
return
}
buildablePath := "."
if len(args) >= 1 {
buildablePath = args[0]
}
pkgBuilder, err := getPackageBuilder("BUILDABLE", buildablePath)
if err != nil {
SetError(err, 9)
return
}
err = modifyPackageBuilder(pkgBuilder)
if err != nil {
SetError(err, 10)
return
}
pkgReader, err := vpkg.ReaderFromBuilder(pkgBuilder)
if err != nil {
SetError(err, 11)
return
}
defer pkgReader.Close()
pkgReader, err = vpkg.PeekVCFG(pkgReader)
if err != nil {
SetError(err, 12)
return
}
err = initKernels()
if err != nil {
SetError(err, 13)
return
}
f, err := ioutil.TempFile(os.TempDir(), "vorteil.disk")
if err != nil {
SetError(err, 14)
return
}
defer os.Remove(f.Name())
defer f.Close()
err = vdisk.Build(context.Background(), f, &vdisk.BuildArgs{
WithVCFGDefaults: true,
PackageReader: pkgReader,
Format: prov.DiskFormat(),
SizeAlign: int64(prov.SizeAlign()),
KernelOptions: vdisk.KernelOptions{
Shell: flagShell,
},
Logger: log,
})
if err != nil {
SetError(err, 15)
return
}
err = f.Close()
if err != nil {
SetError(err, 16)
return
}
err = pkgReader.Close()
if err != nil {
SetError(err, 17)
return
}
image, err := vio.LazyOpen(f.Name())
if err != nil {
SetError(err, 18)
return
}
if provisionName == "" {
provisionName = generateProvisionUUID()
log.Infof("--name flag what not set using generated uuid '%s'", provisionName)
}
ctx := context.TODO()
err = prov.Provision(&provisioners.ProvisionArgs{
Context: ctx,
Image: image,
Name: provisionName,
Description: provisionDescription,
Force: provisionForce,
ReadyWhenUsable: provisionReadyWhenUsable,
})
if err != nil {
SetError(err, 19)
return
}
fmt.Printf("Finished creating image.\n")
},
}
func generateProvisionUUID() string {
pName := strings.ReplaceAll(uuid.New().String(), "-", "")
// Replace first character with v if its a number
if _, err := strconv.Atoi(pName[:1]); err == nil {
pName = "v" + pName[1:]
}
return pName
}
var (
provisionName string
provisionDescription string
provisionForce bool
provisionReadyWhenUsable bool
provisionPassPhrase string
)
func init() |
var provisionersCmd = &cobra.Command{
Use: "provisioners",
Short: "Helper commands for working with Vorteil provisioners",
Long: ``,
Example: ``,
}
var provisionersNewCmd = &cobra.Command{
Use: "new",
Short: "Add a new provisioner.",
}
var (
provisionersNewPassphrase string
// Google Cloud Platform
provisionersNewGoogleBucket string
provisionersNewGoogleKeyFile string
// Amazon Web Services
provisionersNewAmazonKey string
provisionersNewAmazonRegion string
provisionersNewAmazonBucket string
provisionersNewAmazonSecret string
// Azure
provisionersNewAzureContainer string
provisionersNewAzureKeyFile string
provisionersNewAzureLocation string
provisionersNewAzureResourceGroup string
provisionersNewAzureStorageAccountKey string
provisionersNewAzureStorageAccountName string
)
var provisionersNewAmazonEC2Cmd = &cobra.Command{
Use: "amazon-ec2 <OUTPUT_FILE>",
Short: "Add a new AWS (Amazon Web Services) Provisioner.",
Args: cobra.ExactArgs(1),
Run: func(cmd *cobra.Command, args []string) {
f, err := os.OpenFile(args[0], os.O_RDWR|os.O_CREATE, 0644)
if err != nil {
SetError(err, 1)
return
}
defer f.Close()
p, err := amazon.NewProvisioner(log, &amazon.Config{
Key: provisionersNewAmazonKey,
Secret: provisionersNewAmazonSecret,
Region: provisionersNewAmazonRegion,
Bucket: provisionersNewAmazonBucket,
})
if err != nil {
SetError(err, 2)
return
}
data, err := p.Marshal()
if err != nil {
SetError(err, 3)
return
}
out := provisioners.Encrypt(data, provisionersNewPassphrase)
_, err = io.Copy(f, bytes.NewReader(out))
if err != nil {
SetError(err, 4)
return
}
},
}
func init() {
f := provisionersNewAmazonEC2Cmd.Flags()
f.StringVarP(&provisionersNewAmazonKey, "key", "k", "", "Access key ID")
provisionersNewAmazonEC2Cmd.MarkFlagRequired("key")
f.StringVarP(&provisionersNewAmazonSecret, "secret", "s", "", "Secret access key")
provisionersNewAmazonEC2Cmd.MarkFlagRequired("secret")
f.StringVarP(&provisionersNewAmazonRegion, "region", "r", "ap-southeast-2", "AWS region")
f.StringVarP(&provisionersNewAmazonBucket, "bucket", "b", "", "AWS bucket")
provisionersNewAmazonEC2Cmd.MarkFlagRequired("bucket")
f.StringVarP(&provisionersNewPassphrase, "passphrase", "p", "", "Passphrase for encrypting exported provisioner data.")
}
var provisionersNewAzureCmd = &cobra.Command{
Use: "azure <OUTPUT_FILE>",
Short: "Add a new Microsoft Azure Provisioner.",
Args: cobra.ExactArgs(1),
Run: func(cmd *cobra.Command, args []string) {
f, err := os.OpenFile(args[0], os.O_RDWR|os.O_CREATE, 0644)
if err != nil {
SetError(err, 1)
return
}
defer f.Close()
path := provisionersNewAzureKeyFile
_, err = os.Stat(path)
if err != nil {
SetError(err, 2)
return
}
b, err := ioutil.ReadFile(path)
if err != nil {
SetError(err, 3)
return
}
p, err := azure.NewProvisioner(log, &azure.Config{
Key: base64.StdEncoding.EncodeToString(b),
Container: provisionersNewAzureContainer,
Location: provisionersNewAzureLocation,
ResourceGroup: provisionersNewAzureResourceGroup,
Storage | {
f := provisionCmd.Flags()
f.StringVarP(&flagKey, "key", "k", "", "vrepo authentication key")
f.StringVarP(&provisionName, "name", "n", "", "Name of the resulting image on the remote platform.")
f.StringVarP(&provisionDescription, "description", "D", "", "Description for the resulting image, if supported by the platform.")
f.BoolVarP(&provisionForce, "force", "f", false, "Force an overwrite if an existing image conflicts with the new.")
f.BoolVarP(&provisionReadyWhenUsable, "ready-when-usable", "r", false, "Return successfully as soon as the operation is complete, regardless of whether or not the platform is still processing the image.")
f.StringVarP(&provisionPassPhrase, "passphrase", "s", "", "Passphrase used to decrypt encrypted provisioner data.")
} | identifier_body |
NumberPicker.js | (obj) { if (obj && obj.__esModule) { return obj; } if (obj === null || typeof obj !== "object" && typeof obj !== "function") { return { default: obj }; } var cache = _getRequireWildcardCache(); if (cache && cache.has(obj)) { return cache.get(obj); } var newObj = {}; var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null; if (desc && (desc.get || desc.set)) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } newObj.default = obj; if (cache) { cache.set(obj, newObj); } return newObj; }
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function _objectWithoutProperties(source, excluded) { if (source == null) return {}; var target = _objectWithoutPropertiesLoose(source, excluded); var key, i; if (Object.getOwnPropertySymbols) { var sourceSymbolKeys = Object.getOwnPropertySymbols(source); for (i = 0; i < sourceSymbolKeys.length; i++) { key = sourceSymbolKeys[i]; if (excluded.indexOf(key) >= 0) continue; if (!Object.prototype.propertyIsEnumerable.call(source, key)) continue; target[key] = source[key]; } } return target; }
function _objectWithoutPropertiesLoose(source, excluded) { if (source == null) return {}; var target = {}; var sourceKeys = Object.keys(source); var key, i; for (i = 0; i < sourceKeys.length; i++) { key = sourceKeys[i]; if (excluded.indexOf(key) >= 0) continue; target[key] = source[key]; } return target; }
// my tests in ie11/chrome/FF indicate that keyDown repeats
// at about 35ms+/- 5ms after an initial 500ms delay. callback fires on the leading edge
function createInterval(callback) {
let fn;
let id;
const cancel = () => clearTimeout(id);
id = window.setTimeout(fn = () => {
id = window.setTimeout(fn, 35);
callback(); //fire after everything in case the user cancels on the first call
}, 500);
return cancel;
}
function clamp(value, min, max) {
max = max == null ? Infinity : max;
min = min == null ? -Infinity : min;
if (value == null || value === '') return null;
return Math.max(Math.min(typeof value == 'string' ? parseInt(value) : value, max), min);
}
const propTypes = {
value: _propTypes.default.number,
/**
* @example ['onChangePicker', [ [1, null] ]]
*/
onChange: _propTypes.default.func,
/**
* The minimum number that the NumberPicker value.
* @example ['prop', ['min', 0]]
*/
min: _propTypes.default.number,
/**
* The maximum number that the NumberPicker value.
*
* @example ['prop', ['max', 0]]
*/
max: _propTypes.default.number,
/**
* Amount to increase or decrease value when using the spinner buttons.
*
* @example ['prop', ['step', 5]]
*/
step: _propTypes.default.number,
/**
* Specify how precise the `value` should be when typing, incrementing, or decrementing the value.
* When empty, precision is parsed from the current `format` and culture.
*/
precision: _propTypes.default.number,
/**
* A format string used to display the number value. Localizer dependent, read [localization](../localization) for more info.
*
* @example ['prop', { max: 1, min: -1 , defaultValue: 0.2585, format: "{ style: 'percent' }" }]
*/
format: _propTypes.default.any,
/**
* Determines how the NumberPicker parses a number from the localized string representation.
* You can also provide a parser `function` to pair with a custom `format`.
*/
parse: _propTypes.default.func,
incrementIcon: _propTypes.default.node,
decrementIcon: _propTypes.default.node,
/** @ignore */
tabIndex: _propTypes.default.any,
name: _propTypes.default.string,
placeholder: _propTypes.default.string,
onKeyDown: _propTypes.default.func,
onKeyPress: _propTypes.default.func,
onKeyUp: _propTypes.default.func,
autoFocus: _propTypes.default.bool,
/**
* @example ['disabled', ['1']]
*/
disabled: CustomPropTypes.disabled,
/**
* @example ['readOnly', ['1.5']]
*/
readOnly: CustomPropTypes.disabled,
/** Adds a css class to the input container element. */
containerClassName: _propTypes.default.string,
inputProps: _propTypes.default.object,
messages: _propTypes.default.shape({
increment: _propTypes.default.string,
decrement: _propTypes.default.string
}),
/** @ignore */
localizer: _propTypes.default.object
};
const defaultProps = {
incrementIcon: _Icon.caretUp,
decrementIcon: _Icon.caretDown,
min: -Infinity,
max: Infinity,
step: 1
};
/**
* ---
* localized: true
* shortcuts:
* - { key: down arrow, label: decrement value }
* - { key: up arrow, label: increment value }
* - { key: home, label: set value to minimum value, if finite }
* - { key: end, label: set value to maximum value, if finite }
* ---
*
* @public
*/
function NumberPicker(uncontrolledProps) {
const _useUncontrolled = (0, _uncontrollable.useUncontrolled)(uncontrolledProps, {
value: 'onChange'
}),
{
className,
containerClassName,
disabled,
readOnly,
value,
min,
max,
incrementIcon,
decrementIcon,
placeholder,
autoFocus,
tabIndex,
parse,
name,
onChange,
messages,
format,
onKeyDown,
onKeyPress,
onKeyUp,
inputProps,
precision,
step: pStep
} = _useUncontrolled,
elementProps = _objectWithoutProperties(_useUncontrolled, ["className", "containerClassName", "disabled", "readOnly", "value", "min", "max", "incrementIcon", "decrementIcon", "placeholder", "autoFocus", "tabIndex", "parse", "name", "onChange", "messages", "format", "onKeyDown", "onKeyPress", "onKeyUp", "inputProps", "precision", "step"]);
const localizer = (0, _Localization.useLocalizer)(messages, {
number: format
});
const ref = (0, _react.useRef)(null);
const inputRef = (0, _react.useRef)(null);
const repeaterRef = (0, _react.useRef)(null);
const [focusEvents, focused] = (0, _useFocusManager.default)(ref, uncontrolledProps, {
willHandle(focused) {
if (focused) focus();
}
});
const handleMouseDown = (0, _useEventCallback.default)((direction, event) => {
if (event) event.persist();
let method = direction === 'UP' ? increment : decrement;
let value = method(event),
atTop = direction === 'UP' && value === max,
atBottom = direction === 'DOWN' && value === min;
if (atTop || atBottom) handleMouseUp();else if (!repeaterRef.current) {
repeaterRef.current = createInterval(() => {
handleMouseDown(direction, event);
});
}
});
const handleMouseUp = (0, _useEventCallback.default)(() => {
if (!repeaterRef.current) return;
repeaterRef.current();
repeaterRef.current = null;
});
const handleKeyDown = (0, _useEventCallback.default)(event => {
if (readOnly) return;
let key = event.key;
(0, _WidgetHelpers.notify)(onKeyDown, [event]);
if (event.defaultPrevented) return;
if (key === 'End' && isFinite(max)) handleChange(max, event);else if (key === 'Home' && isFinite(min)) handleChange(min, event);else if (key === 'ArrowDown') {
event.preventDefault();
decrement(event);
} else if (key === 'ArrowUp') {
event.preventDefault();
increment(event);
}
});
const handleChange = (rawValue, originalEvent = null) => {
let nextValue = clamp(rawValue, min, max);
if (value !== nextValue) (0, _WidgetHelpers.notify)(onChange, [nextValue, {
rawValue,
originalEvent,
lastValue: value
}]);
};
function focus() {
var _inputRef$current;
(_inputRef$current = inputRef.current) === null || _inputRef$current === void 0 ? void 0 : _inputRef$current.focus();
}
function increment(event) {
return step(pStep, event);
}
function decrement(event) {
return step(-pStep, event);
}
function step(amount, event) {
const nextValue = (value || 0) + amount;
handleChange(p | _interopRequireWildcard | identifier_name | |
NumberPicker.js | ") { return { default: obj }; } var cache = _getRequireWildcardCache(); if (cache && cache.has(obj)) { return cache.get(obj); } var newObj = {}; var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null; if (desc && (desc.get || desc.set)) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } newObj.default = obj; if (cache) { cache.set(obj, newObj); } return newObj; }
function _interopRequireDefault(obj) |
function _objectWithoutProperties(source, excluded) { if (source == null) return {}; var target = _objectWithoutPropertiesLoose(source, excluded); var key, i; if (Object.getOwnPropertySymbols) { var sourceSymbolKeys = Object.getOwnPropertySymbols(source); for (i = 0; i < sourceSymbolKeys.length; i++) { key = sourceSymbolKeys[i]; if (excluded.indexOf(key) >= 0) continue; if (!Object.prototype.propertyIsEnumerable.call(source, key)) continue; target[key] = source[key]; } } return target; }
function _objectWithoutPropertiesLoose(source, excluded) { if (source == null) return {}; var target = {}; var sourceKeys = Object.keys(source); var key, i; for (i = 0; i < sourceKeys.length; i++) { key = sourceKeys[i]; if (excluded.indexOf(key) >= 0) continue; target[key] = source[key]; } return target; }
// my tests in ie11/chrome/FF indicate that keyDown repeats
// at about 35ms+/- 5ms after an initial 500ms delay. callback fires on the leading edge
function createInterval(callback) {
let fn;
let id;
const cancel = () => clearTimeout(id);
id = window.setTimeout(fn = () => {
id = window.setTimeout(fn, 35);
callback(); //fire after everything in case the user cancels on the first call
}, 500);
return cancel;
}
function clamp(value, min, max) {
max = max == null ? Infinity : max;
min = min == null ? -Infinity : min;
if (value == null || value === '') return null;
return Math.max(Math.min(typeof value == 'string' ? parseInt(value) : value, max), min);
}
const propTypes = {
value: _propTypes.default.number,
/**
* @example ['onChangePicker', [ [1, null] ]]
*/
onChange: _propTypes.default.func,
/**
* The minimum number that the NumberPicker value.
* @example ['prop', ['min', 0]]
*/
min: _propTypes.default.number,
/**
* The maximum number that the NumberPicker value.
*
* @example ['prop', ['max', 0]]
*/
max: _propTypes.default.number,
/**
* Amount to increase or decrease value when using the spinner buttons.
*
* @example ['prop', ['step', 5]]
*/
step: _propTypes.default.number,
/**
* Specify how precise the `value` should be when typing, incrementing, or decrementing the value.
* When empty, precision is parsed from the current `format` and culture.
*/
precision: _propTypes.default.number,
/**
* A format string used to display the number value. Localizer dependent, read [localization](../localization) for more info.
*
* @example ['prop', { max: 1, min: -1 , defaultValue: 0.2585, format: "{ style: 'percent' }" }]
*/
format: _propTypes.default.any,
/**
* Determines how the NumberPicker parses a number from the localized string representation.
* You can also provide a parser `function` to pair with a custom `format`.
*/
parse: _propTypes.default.func,
incrementIcon: _propTypes.default.node,
decrementIcon: _propTypes.default.node,
/** @ignore */
tabIndex: _propTypes.default.any,
name: _propTypes.default.string,
placeholder: _propTypes.default.string,
onKeyDown: _propTypes.default.func,
onKeyPress: _propTypes.default.func,
onKeyUp: _propTypes.default.func,
autoFocus: _propTypes.default.bool,
/**
* @example ['disabled', ['1']]
*/
disabled: CustomPropTypes.disabled,
/**
* @example ['readOnly', ['1.5']]
*/
readOnly: CustomPropTypes.disabled,
/** Adds a css class to the input container element. */
containerClassName: _propTypes.default.string,
inputProps: _propTypes.default.object,
messages: _propTypes.default.shape({
increment: _propTypes.default.string,
decrement: _propTypes.default.string
}),
/** @ignore */
localizer: _propTypes.default.object
};
const defaultProps = {
incrementIcon: _Icon.caretUp,
decrementIcon: _Icon.caretDown,
min: -Infinity,
max: Infinity,
step: 1
};
/**
* ---
* localized: true
* shortcuts:
* - { key: down arrow, label: decrement value }
* - { key: up arrow, label: increment value }
* - { key: home, label: set value to minimum value, if finite }
* - { key: end, label: set value to maximum value, if finite }
* ---
*
* @public
*/
function NumberPicker(uncontrolledProps) {
const _useUncontrolled = (0, _uncontrollable.useUncontrolled)(uncontrolledProps, {
value: 'onChange'
}),
{
className,
containerClassName,
disabled,
readOnly,
value,
min,
max,
incrementIcon,
decrementIcon,
placeholder,
autoFocus,
tabIndex,
parse,
name,
onChange,
messages,
format,
onKeyDown,
onKeyPress,
onKeyUp,
inputProps,
precision,
step: pStep
} = _useUncontrolled,
elementProps = _objectWithoutProperties(_useUncontrolled, ["className", "containerClassName", "disabled", "readOnly", "value", "min", "max", "incrementIcon", "decrementIcon", "placeholder", "autoFocus", "tabIndex", "parse", "name", "onChange", "messages", "format", "onKeyDown", "onKeyPress", "onKeyUp", "inputProps", "precision", "step"]);
const localizer = (0, _Localization.useLocalizer)(messages, {
number: format
});
const ref = (0, _react.useRef)(null);
const inputRef = (0, _react.useRef)(null);
const repeaterRef = (0, _react.useRef)(null);
const [focusEvents, focused] = (0, _useFocusManager.default)(ref, uncontrolledProps, {
willHandle(focused) {
if (focused) focus();
}
});
const handleMouseDown = (0, _useEventCallback.default)((direction, event) => {
if (event) event.persist();
let method = direction === 'UP' ? increment : decrement;
let value = method(event),
atTop = direction === 'UP' && value === max,
atBottom = direction === 'DOWN' && value === min;
if (atTop || atBottom) handleMouseUp();else if (!repeaterRef.current) {
repeaterRef.current = createInterval(() => {
handleMouseDown(direction, event);
});
}
});
const handleMouseUp = (0, _useEventCallback.default)(() => {
if (!repeaterRef.current) return;
repeaterRef.current();
repeaterRef.current = null;
});
const handleKeyDown = (0, _useEventCallback.default)(event => {
if (readOnly) return;
let key = event.key;
(0, _WidgetHelpers.notify)(onKeyDown, [event]);
if (event.defaultPrevented) return;
if (key === 'End' && isFinite(max)) handleChange(max, event);else if (key === 'Home' && isFinite(min)) handleChange(min, event);else if (key === 'ArrowDown') {
event.preventDefault();
decrement(event);
} else if (key === 'ArrowUp') {
event.preventDefault();
increment(event);
}
});
const handleChange = (rawValue, originalEvent = null) => {
let nextValue = clamp(rawValue, min, max);
if (value !== nextValue) (0, _WidgetHelpers.notify)(onChange, [nextValue, {
rawValue,
originalEvent,
lastValue: value
}]);
};
function focus() {
var _inputRef$current;
(_inputRef$current = inputRef.current) === null || _inputRef$current === void 0 ? void 0 : _inputRef$current.focus();
}
function increment(event) {
return step(pStep, event);
}
function decrement(event) {
return step(-pStep, event);
}
function step(amount, event) {
const nextValue = (value || 0) + amount;
handleChange(precision != null ? parseFloat(round(nextValue, precision)) : nextValue, event);
return nextValue;
}
const clampedValue = clamp(value, min, max | { return obj && obj.__esModule ? obj : { default: obj }; } | identifier_body |
NumberPicker.js | function") { return { default: obj }; } var cache = _getRequireWildcardCache(); if (cache && cache.has(obj)) { return cache.get(obj); } var newObj = {}; var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null; if (desc && (desc.get || desc.set)) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } newObj.default = obj; if (cache) { cache.set(obj, newObj); } return newObj; }
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function _objectWithoutProperties(source, excluded) { if (source == null) return {}; var target = _objectWithoutPropertiesLoose(source, excluded); var key, i; if (Object.getOwnPropertySymbols) { var sourceSymbolKeys = Object.getOwnPropertySymbols(source); for (i = 0; i < sourceSymbolKeys.length; i++) { key = sourceSymbolKeys[i]; if (excluded.indexOf(key) >= 0) continue; if (!Object.prototype.propertyIsEnumerable.call(source, key)) continue; target[key] = source[key]; } } return target; }
function _objectWithoutPropertiesLoose(source, excluded) { if (source == null) return {}; var target = {}; var sourceKeys = Object.keys(source); var key, i; for (i = 0; i < sourceKeys.length; i++) { key = sourceKeys[i]; if (excluded.indexOf(key) >= 0) continue; target[key] = source[key]; } return target; }
// my tests in ie11/chrome/FF indicate that keyDown repeats
// at about 35ms+/- 5ms after an initial 500ms delay. callback fires on the leading edge
function createInterval(callback) {
let fn;
let id;
const cancel = () => clearTimeout(id);
id = window.setTimeout(fn = () => {
id = window.setTimeout(fn, 35);
callback(); //fire after everything in case the user cancels on the first call
}, 500);
return cancel;
}
function clamp(value, min, max) {
max = max == null ? Infinity : max;
min = min == null ? -Infinity : min;
if (value == null || value === '') return null;
return Math.max(Math.min(typeof value == 'string' ? parseInt(value) : value, max), min);
}
const propTypes = {
value: _propTypes.default.number,
/**
* @example ['onChangePicker', [ [1, null] ]]
*/
onChange: _propTypes.default.func,
/**
* The minimum number that the NumberPicker value.
* @example ['prop', ['min', 0]]
*/
min: _propTypes.default.number,
/**
* The maximum number that the NumberPicker value.
*
* @example ['prop', ['max', 0]]
*/
max: _propTypes.default.number,
/**
* Amount to increase or decrease value when using the spinner buttons.
*
* @example ['prop', ['step', 5]]
*/
step: _propTypes.default.number,
/**
* Specify how precise the `value` should be when typing, incrementing, or decrementing the value.
* When empty, precision is parsed from the current `format` and culture.
*/
precision: _propTypes.default.number,
/**
* A format string used to display the number value. Localizer dependent, read [localization](../localization) for more info.
*
* @example ['prop', { max: 1, min: -1 , defaultValue: 0.2585, format: "{ style: 'percent' }" }]
*/
format: _propTypes.default.any,
/**
* Determines how the NumberPicker parses a number from the localized string representation.
* You can also provide a parser `function` to pair with a custom `format`.
*/
parse: _propTypes.default.func,
incrementIcon: _propTypes.default.node,
decrementIcon: _propTypes.default.node,
/** @ignore */
tabIndex: _propTypes.default.any,
name: _propTypes.default.string,
placeholder: _propTypes.default.string,
onKeyDown: _propTypes.default.func,
onKeyPress: _propTypes.default.func,
onKeyUp: _propTypes.default.func,
autoFocus: _propTypes.default.bool,
/**
* @example ['disabled', ['1']]
*/
disabled: CustomPropTypes.disabled,
/**
* @example ['readOnly', ['1.5']]
*/
readOnly: CustomPropTypes.disabled,
/** Adds a css class to the input container element. */
containerClassName: _propTypes.default.string,
inputProps: _propTypes.default.object,
messages: _propTypes.default.shape({
increment: _propTypes.default.string,
decrement: _propTypes.default.string
}),
/** @ignore */
localizer: _propTypes.default.object
};
const defaultProps = {
incrementIcon: _Icon.caretUp,
decrementIcon: _Icon.caretDown,
min: -Infinity,
max: Infinity,
step: 1
};
/**
* ---
* localized: true
* shortcuts:
* - { key: down arrow, label: decrement value }
* - { key: up arrow, label: increment value }
* - { key: home, label: set value to minimum value, if finite }
* - { key: end, label: set value to maximum value, if finite }
* ---
*
* @public
*/
function NumberPicker(uncontrolledProps) {
const _useUncontrolled = (0, _uncontrollable.useUncontrolled)(uncontrolledProps, {
value: 'onChange'
}),
{
className,
containerClassName,
disabled,
readOnly,
value,
min,
max,
incrementIcon,
decrementIcon,
placeholder,
autoFocus,
tabIndex,
parse,
name,
onChange,
messages,
format,
onKeyDown,
onKeyPress,
onKeyUp,
inputProps,
precision,
step: pStep
} = _useUncontrolled,
elementProps = _objectWithoutProperties(_useUncontrolled, ["className", "containerClassName", "disabled", "readOnly", "value", "min", "max", "incrementIcon", "decrementIcon", "placeholder", "autoFocus", "tabIndex", "parse", "name", "onChange", "messages", "format", "onKeyDown", "onKeyPress", "onKeyUp", "inputProps", "precision", "step"]);
const localizer = (0, _Localization.useLocalizer)(messages, {
number: format
});
const ref = (0, _react.useRef)(null);
const inputRef = (0, _react.useRef)(null);
const repeaterRef = (0, _react.useRef)(null);
const [focusEvents, focused] = (0, _useFocusManager.default)(ref, uncontrolledProps, {
willHandle(focused) {
if (focused) focus();
}
});
const handleMouseDown = (0, _useEventCallback.default)((direction, event) => {
if (event) event.persist();
let method = direction === 'UP' ? increment : decrement;
let value = method(event),
atTop = direction === 'UP' && value === max,
atBottom = direction === 'DOWN' && value === min;
if (atTop || atBottom) handleMouseUp();else if (!repeaterRef.current) {
repeaterRef.current = createInterval(() => {
handleMouseDown(direction, event);
});
}
});
const handleMouseUp = (0, _useEventCallback.default)(() => {
if (!repeaterRef.current) return;
repeaterRef.current();
repeaterRef.current = null;
});
const handleKeyDown = (0, _useEventCallback.default)(event => {
if (readOnly) return;
let key = event.key;
(0, _WidgetHelpers.notify)(onKeyDown, [event]);
if (event.defaultPrevented) return;
if (key === 'End' && isFinite(max)) handleChange(max, event);else if (key === 'Home' && isFinite(min)) handleChange(min, event);else if (key === 'ArrowDown') | else if (key === 'ArrowUp') {
event.preventDefault();
increment(event);
}
});
const handleChange = (rawValue, originalEvent = null) => {
let nextValue = clamp(rawValue, min, max);
if (value !== nextValue) (0, _WidgetHelpers.notify)(onChange, [nextValue, {
rawValue,
originalEvent,
lastValue: value
}]);
};
function focus() {
var _inputRef$current;
(_inputRef$current = inputRef.current) === null || _inputRef$current === void 0 ? void 0 : _inputRef$current.focus();
}
function increment(event) {
return step(pStep, event);
}
function decrement(event) {
return step(-pStep, event);
}
function step(amount, event) {
const nextValue = (value || 0) + amount;
handleChange(precision != null ? parseFloat(round(nextValue, precision)) : nextValue, event);
return nextValue;
}
const clampedValue = clamp(value, min, max | {
event.preventDefault();
decrement(event);
} | conditional_block |
NumberPicker.js | });
exports.default = void 0;
var _classnames = _interopRequireDefault(require("classnames"));
var _propTypes = _interopRequireDefault(require("prop-types"));
var _react = _interopRequireWildcard(require("react"));
var _uncontrollable = require("uncontrollable");
var _Button = _interopRequireDefault(require("./Button"));
var _Icon = require("./Icon");
var _Localization = require("./Localization");
var _NumberInput = _interopRequireDefault(require("./NumberInput"));
var _Widget = _interopRequireDefault(require("./Widget"));
var _WidgetPicker = _interopRequireDefault(require("./WidgetPicker"));
var CustomPropTypes = _interopRequireWildcard(require("./PropTypes"));
var _useFocusManager = _interopRequireDefault(require("./useFocusManager"));
var _WidgetHelpers = require("./WidgetHelpers");
var _useEventCallback = _interopRequireDefault(require("@restart/hooks/useEventCallback"));
function _getRequireWildcardCache() { if (typeof WeakMap !== "function") return null; var cache = new WeakMap(); _getRequireWildcardCache = function () { return cache; }; return cache; }
function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } if (obj === null || typeof obj !== "object" && typeof obj !== "function") { return { default: obj }; } var cache = _getRequireWildcardCache(); if (cache && cache.has(obj)) { return cache.get(obj); } var newObj = {}; var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null; if (desc && (desc.get || desc.set)) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } newObj.default = obj; if (cache) { cache.set(obj, newObj); } return newObj; }
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function _objectWithoutProperties(source, excluded) { if (source == null) return {}; var target = _objectWithoutPropertiesLoose(source, excluded); var key, i; if (Object.getOwnPropertySymbols) { var sourceSymbolKeys = Object.getOwnPropertySymbols(source); for (i = 0; i < sourceSymbolKeys.length; i++) { key = sourceSymbolKeys[i]; if (excluded.indexOf(key) >= 0) continue; if (!Object.prototype.propertyIsEnumerable.call(source, key)) continue; target[key] = source[key]; } } return target; }
function _objectWithoutPropertiesLoose(source, excluded) { if (source == null) return {}; var target = {}; var sourceKeys = Object.keys(source); var key, i; for (i = 0; i < sourceKeys.length; i++) { key = sourceKeys[i]; if (excluded.indexOf(key) >= 0) continue; target[key] = source[key]; } return target; }
// my tests in ie11/chrome/FF indicate that keyDown repeats
// at about 35ms+/- 5ms after an initial 500ms delay. callback fires on the leading edge
function createInterval(callback) {
let fn;
let id;
const cancel = () => clearTimeout(id);
id = window.setTimeout(fn = () => {
id = window.setTimeout(fn, 35);
callback(); //fire after everything in case the user cancels on the first call
}, 500);
return cancel;
}
function clamp(value, min, max) {
max = max == null ? Infinity : max;
min = min == null ? -Infinity : min;
if (value == null || value === '') return null;
return Math.max(Math.min(typeof value == 'string' ? parseInt(value) : value, max), min);
}
const propTypes = {
value: _propTypes.default.number,
/**
* @example ['onChangePicker', [ [1, null] ]]
*/
onChange: _propTypes.default.func,
/**
* The minimum number that the NumberPicker value.
* @example ['prop', ['min', 0]]
*/
min: _propTypes.default.number,
/**
* The maximum number that the NumberPicker value.
*
* @example ['prop', ['max', 0]]
*/
max: _propTypes.default.number,
/**
* Amount to increase or decrease value when using the spinner buttons.
*
* @example ['prop', ['step', 5]]
*/
step: _propTypes.default.number,
/**
* Specify how precise the `value` should be when typing, incrementing, or decrementing the value.
* When empty, precision is parsed from the current `format` and culture.
*/
precision: _propTypes.default.number,
/**
* A format string used to display the number value. Localizer dependent, read [localization](../localization) for more info.
*
* @example ['prop', { max: 1, min: -1 , defaultValue: 0.2585, format: "{ style: 'percent' }" }]
*/
format: _propTypes.default.any,
/**
* Determines how the NumberPicker parses a number from the localized string representation.
* You can also provide a parser `function` to pair with a custom `format`.
*/
parse: _propTypes.default.func,
incrementIcon: _propTypes.default.node,
decrementIcon: _propTypes.default.node,
/** @ignore */
tabIndex: _propTypes.default.any,
name: _propTypes.default.string,
placeholder: _propTypes.default.string,
onKeyDown: _propTypes.default.func,
onKeyPress: _propTypes.default.func,
onKeyUp: _propTypes.default.func,
autoFocus: _propTypes.default.bool,
/**
* @example ['disabled', ['1']]
*/
disabled: CustomPropTypes.disabled,
/**
* @example ['readOnly', ['1.5']]
*/
readOnly: CustomPropTypes.disabled,
/** Adds a css class to the input container element. */
containerClassName: _propTypes.default.string,
inputProps: _propTypes.default.object,
messages: _propTypes.default.shape({
increment: _propTypes.default.string,
decrement: _propTypes.default.string
}),
/** @ignore */
localizer: _propTypes.default.object
};
const defaultProps = {
incrementIcon: _Icon.caretUp,
decrementIcon: _Icon.caretDown,
min: -Infinity,
max: Infinity,
step: 1
};
/**
* ---
* localized: true
* shortcuts:
* - { key: down arrow, label: decrement value }
* - { key: up arrow, label: increment value }
* - { key: home, label: set value to minimum value, if finite }
* - { key: end, label: set value to maximum value, if finite }
* ---
*
* @public
*/
function NumberPicker(uncontrolledProps) {
const _useUncontrolled = (0, _uncontrollable.useUncontrolled)(uncontrolledProps, {
value: 'onChange'
}),
{
className,
containerClassName,
disabled,
readOnly,
value,
min,
max,
incrementIcon,
decrementIcon,
placeholder,
autoFocus,
tabIndex,
parse,
name,
onChange,
messages,
format,
onKeyDown,
onKeyPress,
onKeyUp,
inputProps,
precision,
step: pStep
} = _useUncontrolled,
elementProps = _objectWithoutProperties(_useUncontrolled, ["className", "containerClassName", "disabled", "readOnly", "value", "min", "max", "incrementIcon", "decrementIcon", "placeholder", "autoFocus", "tabIndex", "parse", "name", "onChange", "messages", "format", "onKeyDown", "onKeyPress", "onKeyUp", "inputProps", "precision", "step"]);
const localizer = (0, _Localization.useLocalizer)(messages, {
number: format
});
const ref = (0, _react.useRef)(null);
const inputRef = (0, _react.useRef)(null);
const repeaterRef = (0, _react.useRef)(null);
const [focusEvents, focused] = (0, _useFocusManager.default)(ref, uncontrolledProps, {
willHandle(focused) {
if (focused) focus();
}
});
const handleMouseDown = (0, _useEventCallback.default)((direction, event) => {
if (event) event.persist();
let method = direction === 'UP' ? increment : decrement;
let value = method(event),
atTop = direction === 'UP' && value === max,
atBottom = direction === 'DOWN' && value === min;
if (atTop || atBottom) handleMouseUp();else if (!repeaterRef.current) {
repeaterRef.current = createInterval(() => {
handleMouseDown(direction, event);
});
}
});
const handleMouseUp = (0, _useEventCallback.default)(() => {
if (!repeaterRef.current) return;
repeaterRef.current();
repeaterRef.current = null;
});
const handleKeyDown = (0, _useEventCallback.default)(event => {
if (readOnly) return;
let key = event.key;
(0, _WidgetHelpers.notify)(onKeyDown, [event]);
if (event.defaultPrevented) return;
if (key === 'End' && isFinite(max)) handleChange(max, event);else if (key === 'Home' && isFinite(min)) handleChange(min, event);else if (key === 'Arrow | random_line_split | ||
imgur.js | canvas'),
ctx = canvas.getContext("2d");
// set proper canvas dimensions before transform & export
if (4 < srcOrientation && srcOrientation < 9) {
canvas.width = height;
canvas.height = width;
} else {
canvas.width = width;
canvas.height = height;
}
// transform context before drawing image
switch (srcOrientation) {
case 2:
ctx.transform(-1, 0, 0, 1, width, 0);
break;
case 3:
ctx.transform(-1, 0, 0, -1, width, height);
break;
case 4:
ctx.transform(1, 0, 0, -1, 0, height);
break;
case 5:
ctx.transform(0, 1, 1, 0, 0, 0);
break;
case 6:
ctx.transform(0, 1, -1, 0, height, 0);
break;
case 7:
ctx.transform(0, -1, -1, 0, height, width);
break;
case 8:
ctx.transform(0, -1, 1, 0, 0, width);
break;
default:
break;
}
// draw image
ctx.drawImage(img, 0, 0);
// export base64
callback(canvas.toDataURL());
};
img.src = srcBase64;
}
//If the image is not oriented properly uses the resetOrientation function to
// reorient the image
function orientDisplayPic(picture, orientation) {
if (orientation > 1) {
resetOrientation(picture, orientation, function rotate(rotated) {
displayPic(rotated);
});
} else {
displayPic(picture);
}
}
//Displays picture on the page
//picture is a valid string for that an image src tag can use
function displayPic(picture) {
$("#displayImage").attr("src", picture);
$("#displayImage").removeClass("hidden");
$("#drag").removeClass("uploadWanted");
displayLoad();
}
//Display a message and gif to give the user feed back that the program is doing something
//Empties the cloudDesc element, puts it there and uses clouds.gif
function displayLoad() {
var load = $("<img>");
load.addClass("img-center img-responsive");
load.attr("src", "assets/images/clouds.gif");
load.attr("id", "cloud-gif");
$("#cloudDesc").empty();
$("#cloudDesc").attr("hidden", false);
$("#cloudDesc").append("<h3 style='text-align: center;'>Searching for cloud types</h3>");
$("#cloudDesc").append(load);
}
//Display a message to the user, meant to be for errors
//Empties the cloudDesc element and puts the message there
function displayError(message, id = "cloudDesc") {
$("#" + id).empty();
setTimeout(function() {
$("#" + id).attr("hidden", false);
$("#" + id).append("<h3 style='text-align:center;'>" + message + "</h3>");
}, 48);
}
//Make sure that a file is an image and if so sends to the FileReader
//file is a valid file blob
function readPic(file) {
console.log(file);
var fileType = file["type"].split("/")[0];
if (fileType !== "image") {
displayError("Not an image, please use a valid image format: \
JPEG, PNG8, PNG24, GIF, Animated GIF (first frame only), BMP, WEBP, RAW, or ICO");
} else {
loadImage.parseMetaData(file, function(data) {
//default image orientation
var orientation = 0;
//if exif data available, update orientation
if (data.exif) {
orientation = data.exif.get('Orientation');
}
var loadingImage = loadImage(
file,
function(canvas) {
//here's the base64 data result
var base64data = canvas.toDataURL('image/jpeg');
displayPic(base64data);
if (file.size < 4000000) {
sendImageDirect(base64data);
} else {
sendImageImgur(base64data);
}
}, {
//should be set to canvas : true to activate auto fix orientation
canvas: true,
orientation: orientation
}
);
});
// getOrientation(file, function checkRotate(orientation) {
// console.log(orientation);
// var reader = new FileReader();
// getImageBase64(file, function(image) {
// if (file.size < 4000000) {
// orientDisplayPic(image, orientation);
// sendImageDirect(image, orientation);
// } else {
// orientDisplayPic(image, orientation);
// sendImageImgur(image, orientation);
// }
// });
// });
}
}
//Takes a file blob and returns a base64dataURL
function getImageBase64(file, callback) {
var reader = new FileReader();
reader.onload = function gotImage(event) {
var image = event.target.result;
callback(image)
};
reader.readAsDataURL(file);
}
//Checks if a string is a base65URL
//returns boolean
function isBase64image(base64string) {
var legal = base64string[base64string.length - 1] === "=";
console.log(legal);
var base64reg = new RegExp(/data:image\/([a-zA-Z]*);base64,([^\"]*)/);
var valid = base64reg.test(base64string);
console.log(valid);
var image = base64string.search(/data:image/) !== -1;
console.log(image);
return (legal && valid && image);
}
$(document).ready(function addUpload() {
var imgur_client_id = "cffdbdcf9cb88c7";
var imgur_sec = "4e806c50fb260cc521bfe11d4e7edfa22cfbf684";
var authURL = "https://api.imgur.com/oauth2/authorize?client_id=" + imgur_client_id + "&response_type=token"
var auth_token = ""
var refreshForm = new FormData();
refreshForm.append("refresh_token", "4c771b1e1055e3eb3adee452a2155ebd258392d1");
refreshForm.append("client_id", "cffdbdcf9cb88c7");
refreshForm.append("client_secret", "4e806c50fb260cc521bfe11d4e7edfa22cfbf684");
refreshForm.append("grant_type", "refresh_token");
//adds drag and drop listeners to the drag element
$("#drag").on('dragover', function(event) {
event.stopPropagation();
event.preventDefault();
console.log('dragover');
$(this).css('border', '2px solid #66aede');
$(this).css('background-color', 'rgba(255,255,255,0.6)');
$(this).css('color', '#173848');
});
$("#drag").on('dragleave', function(event) {
event.stopPropagation();
event.preventDefault();
console.log('dragleave');
$(this).css('border', '2px dotted #66aede');
$(this).css('background-color', 'rgba(255,255,255,0.4)');
$(this).css('color', '#31708f');
});
$("#drag").on('drop', function(event) {
event.stopPropagation();
event.preventDefault();
$(this).css('border', '2px dotted #66aede');
$(this).css('background-color', 'rgba(255,255,255,0.4)');
$(this).css('color', '#31708f');
var files = event.originalEvent.dataTransfer.files;
console.log(files.length === 0);
if (files.length !== 0) {
//We need to send dropped files to Server
console.log(files);
for (var i = 0; i < files.length; i++) {
readPic(files[i]);
}
} else {
var url = $(event.originalEvent.dataTransfer.getData('text/html')).filter('img').attr('src');
console.log(url);
if (isURL(url)) {
console.log("Upload from another site");
displayPic(url);
queryVisionAPI(url);
} else
if (isBase64image(url)) {
console.log("Upload from another site");
displayPic(url);
queryVisionAPI(url);
} else {
displayError("Not a valid URL");
}
}
});
$("#uploadBtn").on("change", function readFile(event) {
console.log(event);
console.log($(this));
for (var i = 0; i < this.files.length; i++) { |
readPic(this.files[i]);
}
| conditional_block | |
imgur.js | (image, orientation = 1) {
var base64result = image.replace(/^data:image\/[a-z]+;base64,/, "");
console.log("Image is less than 4MB")
queryVisionAPI(base64result);
}
// Sends the image to Imgur to be stored and then Vision.
// Use if image is greater than 4MB.
// Requires a base64Image string with the prepending tags striped out.
function sendImageImgur(image, orientation = 1) {
var base64result = image.replace(/^data:image\/[a-z]+;base64,/, "");
console.log("Image is greater than 4MB, wait for Imgur");
var form = new FormData();
form.append("image", base64result);
var settings = {
"async": true,
"crossDomain": true,
"url": "https://api.imgur.com/3/image",
"method": "POST",
"headers": {
"authorization": `Client-ID ${apiKey.imgur_client_id}`
},
"processData": false,
"contentType": false,
"mimeType": "multipart/form-data",
"data": form
}
$.ajax(settings).done(function(response) {
var res = JSON.parse(response);
console.log(res);
queryVisionAPI(res.data.link);
// displayPic(res.data.link);
});
}
//from http://stackoverflow.com/a/32490603
//Gets the orientation alteration of an image
// based on this picture https://i.stack.imgur.com/VGsAj.gif
//file is a file blob
//call back is the function to perform after completion
// with the paramater of the orientation variable
function getOrientation(file, callback) {
var reader = new FileReader();
reader.onload = function(event) {
var view = new DataView(event.target.result);
if (view.getUint16(0, false) != 0xFFD8) return callback(-2);
var length = view.byteLength,
offset = 2;
while (offset < length) {
var marker = view.getUint16(offset, false);
offset += 2;
if (marker == 0xFFE1) {
if (view.getUint32(offset += 2, false) != 0x45786966) {
return callback(-1);
}
var little = view.getUint16(offset += 6, false) == 0x4949;
offset += view.getUint32(offset + 4, little);
var tags = view.getUint16(offset, little);
offset += 2;
for (var i = 0; i < tags; i++)
if (view.getUint16(offset + (i * 12), little) == 0x0112)
return callback(view.getUint16(offset + (i * 12) + 8, little));
} else if ((marker & 0xFF00) != 0xFF00) break;
else offset += view.getUint16(offset, false);
}
return callback(-1);
};
reader.readAsArrayBuffer(file.slice(0, 64 * 1024));
};
//from https://stackoverflow.com/a/40867559/8630411
//Resets the orientation of an base64 image url string
// based on the orientation from getOrientaion
// or this image https://i.stack.imgur.com/VGsAj.gif
//srcBase64 is a base64 url string
//srcOrientation is a number between 2 and 8
//callback is the function to perform after the orientaion has finished
function resetOrientation(srcBase64, srcOrientation, callback) {
console.log("Transforming image, please wait");
var img = new Image();
img.onload = function() {
var width = img.width,
height = img.height,
canvas = document.createElement('canvas'),
ctx = canvas.getContext("2d");
// set proper canvas dimensions before transform & export
if (4 < srcOrientation && srcOrientation < 9) {
canvas.width = height;
canvas.height = width;
} else {
canvas.width = width;
canvas.height = height;
}
// transform context before drawing image
switch (srcOrientation) {
case 2:
ctx.transform(-1, 0, 0, 1, width, 0);
break;
case 3:
ctx.transform(-1, 0, 0, -1, width, height);
break;
case 4:
ctx.transform(1, 0, 0, -1, 0, height);
break;
case 5:
ctx.transform(0, 1, 1, 0, 0, 0);
break;
case 6:
ctx.transform(0, 1, -1, 0, height, 0);
break;
case 7:
ctx.transform(0, -1, -1, 0, height, width);
break;
case 8:
ctx.transform(0, -1, 1, 0, 0, width);
break;
default:
break;
}
// draw image
ctx.drawImage(img, 0, 0);
// export base64
callback(canvas.toDataURL());
};
img.src = srcBase64;
}
//If the image is not oriented properly uses the resetOrientation function to
// reorient the image
function orientDisplayPic(picture, orientation) {
if (orientation > 1) {
resetOrientation(picture, orientation, function rotate(rotated) {
displayPic(rotated);
});
} else {
displayPic(picture);
}
}
//Displays picture on the page
//picture is a valid string for that an image src tag can use
function displayPic(picture) {
$("#displayImage").attr("src", picture);
$("#displayImage").removeClass("hidden");
$("#drag").removeClass("uploadWanted");
displayLoad();
}
//Display a message and gif to give the user feed back that the program is doing something
//Empties the cloudDesc element, puts it there and uses clouds.gif
function displayLoad() {
var load = $("<img>");
load.addClass("img-center img-responsive");
load.attr("src", "assets/images/clouds.gif");
load.attr("id", "cloud-gif");
$("#cloudDesc").empty();
$("#cloudDesc").attr("hidden", false);
$("#cloudDesc").append("<h3 style='text-align: center;'>Searching for cloud types</h3>");
$("#cloudDesc").append(load);
}
//Display a message to the user, meant to be for errors
//Empties the cloudDesc element and puts the message there
function displayError(message, id = "cloudDesc") {
$("#" + id).empty();
setTimeout(function() {
$("#" + id).attr("hidden", false);
$("#" + id).append("<h3 style='text-align:center;'>" + message + "</h3>");
}, 48);
}
//Make sure that a file is an image and if so sends to the FileReader
//file is a valid file blob
function readPic(file) {
console.log(file);
var fileType = file["type"].split("/")[0];
if (fileType !== "image") {
displayError("Not an image, please use a valid image format: \
JPEG, PNG8, PNG24, GIF, Animated GIF (first frame only), BMP, WEBP, RAW, or ICO");
} else {
loadImage.parseMetaData(file, function(data) {
//default image orientation
var orientation = 0;
//if exif data available, update orientation
if (data.exif) {
orientation = data.exif.get('Orientation');
}
var loadingImage = loadImage(
file,
function(canvas) {
//here's the base64 data result
var base64data = canvas.toDataURL('image/jpeg');
displayPic(base64data);
if (file.size < 4000000) {
sendImageDirect(base64data);
} else {
sendImageImgur(base64data);
}
}, {
//should be set to canvas : true to activate auto fix orientation
canvas: true,
orientation: orientation
}
);
});
// getOrientation(file, function checkRotate(orientation) {
// console.log(orientation);
// var reader = new FileReader();
// getImageBase64(file, function(image) {
// if (file.size < 4000000) {
// orientDisplayPic(image, orientation);
// sendImageDirect(image, orientation);
// } else {
// orientDisplayPic(image, orientation);
// sendImageImgur(image, orientation);
// }
// });
// });
}
}
//Takes a file blob and returns a base64dataURL
function getImageBase64(file, callback) {
var reader = new FileReader();
reader.onload = function gotImage(event) {
var image = event.target.result;
callback(image)
};
reader.readAsDataURL(file);
}
//Checks if a string is a base65URL | sendImageDirect | identifier_name | |
imgur.js | ": `Client-ID ${apiKey.imgur_client_id}`
},
"processData": false,
"contentType": false,
"mimeType": "multipart/form-data",
"data": form
}
$.ajax(settings).done(function(response) {
var res = JSON.parse(response);
console.log(res);
queryVisionAPI(res.data.link);
// displayPic(res.data.link);
});
}
//from http://stackoverflow.com/a/32490603
//Gets the orientation alteration of an image
// based on this picture https://i.stack.imgur.com/VGsAj.gif
//file is a file blob
//call back is the function to perform after completion
// with the paramater of the orientation variable
function getOrientation(file, callback) {
var reader = new FileReader();
reader.onload = function(event) {
var view = new DataView(event.target.result);
if (view.getUint16(0, false) != 0xFFD8) return callback(-2);
var length = view.byteLength,
offset = 2;
while (offset < length) {
var marker = view.getUint16(offset, false);
offset += 2;
if (marker == 0xFFE1) {
if (view.getUint32(offset += 2, false) != 0x45786966) {
return callback(-1);
}
var little = view.getUint16(offset += 6, false) == 0x4949;
offset += view.getUint32(offset + 4, little);
var tags = view.getUint16(offset, little);
offset += 2;
for (var i = 0; i < tags; i++)
if (view.getUint16(offset + (i * 12), little) == 0x0112)
return callback(view.getUint16(offset + (i * 12) + 8, little));
} else if ((marker & 0xFF00) != 0xFF00) break;
else offset += view.getUint16(offset, false);
}
return callback(-1);
};
reader.readAsArrayBuffer(file.slice(0, 64 * 1024));
};
//from https://stackoverflow.com/a/40867559/8630411
//Resets the orientation of an base64 image url string
// based on the orientation from getOrientaion
// or this image https://i.stack.imgur.com/VGsAj.gif
//srcBase64 is a base64 url string
//srcOrientation is a number between 2 and 8
//callback is the function to perform after the orientaion has finished
function resetOrientation(srcBase64, srcOrientation, callback) | switch (srcOrientation) {
case 2:
ctx.transform(-1, 0, 0, 1, width, 0);
break;
case 3:
ctx.transform(-1, 0, 0, -1, width, height);
break;
case 4:
ctx.transform(1, 0, 0, -1, 0, height);
break;
case 5:
ctx.transform(0, 1, 1, 0, 0, 0);
break;
case 6:
ctx.transform(0, 1, -1, 0, height, 0);
break;
case 7:
ctx.transform(0, -1, -1, 0, height, width);
break;
case 8:
ctx.transform(0, -1, 1, 0, 0, width);
break;
default:
break;
}
// draw image
ctx.drawImage(img, 0, 0);
// export base64
callback(canvas.toDataURL());
};
img.src = srcBase64;
}
//If the image is not oriented properly uses the resetOrientation function to
// reorient the image
function orientDisplayPic(picture, orientation) {
if (orientation > 1) {
resetOrientation(picture, orientation, function rotate(rotated) {
displayPic(rotated);
});
} else {
displayPic(picture);
}
}
//Displays picture on the page
//picture is a valid string for that an image src tag can use
function displayPic(picture) {
$("#displayImage").attr("src", picture);
$("#displayImage").removeClass("hidden");
$("#drag").removeClass("uploadWanted");
displayLoad();
}
//Display a message and gif to give the user feed back that the program is doing something
//Empties the cloudDesc element, puts it there and uses clouds.gif
function displayLoad() {
var load = $("<img>");
load.addClass("img-center img-responsive");
load.attr("src", "assets/images/clouds.gif");
load.attr("id", "cloud-gif");
$("#cloudDesc").empty();
$("#cloudDesc").attr("hidden", false);
$("#cloudDesc").append("<h3 style='text-align: center;'>Searching for cloud types</h3>");
$("#cloudDesc").append(load);
}
//Display a message to the user, meant to be for errors
//Empties the cloudDesc element and puts the message there
function displayError(message, id = "cloudDesc") {
$("#" + id).empty();
setTimeout(function() {
$("#" + id).attr("hidden", false);
$("#" + id).append("<h3 style='text-align:center;'>" + message + "</h3>");
}, 48);
}
//Make sure that a file is an image and if so sends to the FileReader
//file is a valid file blob
function readPic(file) {
console.log(file);
var fileType = file["type"].split("/")[0];
if (fileType !== "image") {
displayError("Not an image, please use a valid image format: \
JPEG, PNG8, PNG24, GIF, Animated GIF (first frame only), BMP, WEBP, RAW, or ICO");
} else {
loadImage.parseMetaData(file, function(data) {
//default image orientation
var orientation = 0;
//if exif data available, update orientation
if (data.exif) {
orientation = data.exif.get('Orientation');
}
var loadingImage = loadImage(
file,
function(canvas) {
//here's the base64 data result
var base64data = canvas.toDataURL('image/jpeg');
displayPic(base64data);
if (file.size < 4000000) {
sendImageDirect(base64data);
} else {
sendImageImgur(base64data);
}
}, {
//should be set to canvas : true to activate auto fix orientation
canvas: true,
orientation: orientation
}
);
});
// getOrientation(file, function checkRotate(orientation) {
// console.log(orientation);
// var reader = new FileReader();
// getImageBase64(file, function(image) {
// if (file.size < 4000000) {
// orientDisplayPic(image, orientation);
// sendImageDirect(image, orientation);
// } else {
// orientDisplayPic(image, orientation);
// sendImageImgur(image, orientation);
// }
// });
// });
}
}
//Takes a file blob and returns a base64dataURL
function getImageBase64(file, callback) {
var reader = new FileReader();
reader.onload = function gotImage(event) {
var image = event.target.result;
callback(image)
};
reader.readAsDataURL(file);
}
//Checks if a string is a base65URL
//returns boolean
function isBase64image(base64string) {
var legal = base64string[base64string.length - 1] === "=";
console.log(legal);
var base64reg = new RegExp(/data:image\/([a-zA-Z]*);base64,([^\"]*)/);
var valid = base64reg.test(base64string);
console.log(valid);
var image = base64string.search(/data:image/) !== -1;
console.log(image);
return (legal && valid && image);
}
$(document).ready(function addUpload() {
var imgur_client_id = "cffdbdcf9cb88c7";
var imgur_sec = "4e806c50fb260cc521bfe11d4e7edfa22cfbf684";
var authURL = "https://api.imgur.com/oauth2/authorize?client_id=" + imgur_client_id | {
console.log("Transforming image, please wait");
var img = new Image();
img.onload = function() {
var width = img.width,
height = img.height,
canvas = document.createElement('canvas'),
ctx = canvas.getContext("2d");
// set proper canvas dimensions before transform & export
if (4 < srcOrientation && srcOrientation < 9) {
canvas.width = height;
canvas.height = width;
} else {
canvas.width = width;
canvas.height = height;
}
// transform context before drawing image | identifier_body |
imgur.js | != 0xFF00) break;
else offset += view.getUint16(offset, false);
}
return callback(-1);
};
reader.readAsArrayBuffer(file.slice(0, 64 * 1024));
};
//from https://stackoverflow.com/a/40867559/8630411
//Resets the orientation of an base64 image url string
// based on the orientation from getOrientaion
// or this image https://i.stack.imgur.com/VGsAj.gif
//srcBase64 is a base64 url string
//srcOrientation is a number between 2 and 8
//callback is the function to perform after the orientaion has finished
function resetOrientation(srcBase64, srcOrientation, callback) {
console.log("Transforming image, please wait");
var img = new Image();
img.onload = function() {
var width = img.width,
height = img.height,
canvas = document.createElement('canvas'),
ctx = canvas.getContext("2d");
// set proper canvas dimensions before transform & export
if (4 < srcOrientation && srcOrientation < 9) {
canvas.width = height;
canvas.height = width;
} else {
canvas.width = width;
canvas.height = height;
}
// transform context before drawing image
switch (srcOrientation) {
case 2:
ctx.transform(-1, 0, 0, 1, width, 0);
break;
case 3:
ctx.transform(-1, 0, 0, -1, width, height);
break;
case 4:
ctx.transform(1, 0, 0, -1, 0, height);
break;
case 5:
ctx.transform(0, 1, 1, 0, 0, 0);
break;
case 6:
ctx.transform(0, 1, -1, 0, height, 0);
break;
case 7:
ctx.transform(0, -1, -1, 0, height, width);
break;
case 8:
ctx.transform(0, -1, 1, 0, 0, width);
break;
default:
break;
}
// draw image
ctx.drawImage(img, 0, 0);
// export base64
callback(canvas.toDataURL());
};
img.src = srcBase64;
}
//If the image is not oriented properly uses the resetOrientation function to
// reorient the image
function orientDisplayPic(picture, orientation) {
if (orientation > 1) {
resetOrientation(picture, orientation, function rotate(rotated) {
displayPic(rotated);
});
} else {
displayPic(picture);
}
}
//Displays picture on the page
//picture is a valid string for that an image src tag can use
function displayPic(picture) {
$("#displayImage").attr("src", picture);
$("#displayImage").removeClass("hidden");
$("#drag").removeClass("uploadWanted");
displayLoad();
}
//Display a message and gif to give the user feed back that the program is doing something
//Empties the cloudDesc element, puts it there and uses clouds.gif
function displayLoad() {
var load = $("<img>");
load.addClass("img-center img-responsive");
load.attr("src", "assets/images/clouds.gif");
load.attr("id", "cloud-gif");
$("#cloudDesc").empty();
$("#cloudDesc").attr("hidden", false);
$("#cloudDesc").append("<h3 style='text-align: center;'>Searching for cloud types</h3>");
$("#cloudDesc").append(load);
}
//Display a message to the user, meant to be for errors
//Empties the cloudDesc element and puts the message there
function displayError(message, id = "cloudDesc") {
$("#" + id).empty();
setTimeout(function() {
$("#" + id).attr("hidden", false);
$("#" + id).append("<h3 style='text-align:center;'>" + message + "</h3>");
}, 48);
}
//Make sure that a file is an image and if so sends to the FileReader
//file is a valid file blob
function readPic(file) {
console.log(file);
var fileType = file["type"].split("/")[0];
if (fileType !== "image") {
displayError("Not an image, please use a valid image format: \
JPEG, PNG8, PNG24, GIF, Animated GIF (first frame only), BMP, WEBP, RAW, or ICO");
} else {
loadImage.parseMetaData(file, function(data) {
//default image orientation
var orientation = 0;
//if exif data available, update orientation
if (data.exif) {
orientation = data.exif.get('Orientation');
}
var loadingImage = loadImage(
file,
function(canvas) {
//here's the base64 data result
var base64data = canvas.toDataURL('image/jpeg');
displayPic(base64data);
if (file.size < 4000000) {
sendImageDirect(base64data);
} else {
sendImageImgur(base64data);
}
}, {
//should be set to canvas : true to activate auto fix orientation
canvas: true,
orientation: orientation
}
);
});
// getOrientation(file, function checkRotate(orientation) {
// console.log(orientation);
// var reader = new FileReader();
// getImageBase64(file, function(image) {
// if (file.size < 4000000) {
// orientDisplayPic(image, orientation);
// sendImageDirect(image, orientation);
// } else {
// orientDisplayPic(image, orientation);
// sendImageImgur(image, orientation);
// }
// });
// });
}
}
//Takes a file blob and returns a base64dataURL
function getImageBase64(file, callback) {
var reader = new FileReader();
reader.onload = function gotImage(event) {
var image = event.target.result;
callback(image)
};
reader.readAsDataURL(file);
}
//Checks if a string is a base65URL
//returns boolean
function isBase64image(base64string) {
var legal = base64string[base64string.length - 1] === "=";
console.log(legal);
var base64reg = new RegExp(/data:image\/([a-zA-Z]*);base64,([^\"]*)/);
var valid = base64reg.test(base64string);
console.log(valid);
var image = base64string.search(/data:image/) !== -1;
console.log(image);
return (legal && valid && image);
}
$(document).ready(function addUpload() {
var imgur_client_id = "cffdbdcf9cb88c7";
var imgur_sec = "4e806c50fb260cc521bfe11d4e7edfa22cfbf684";
var authURL = "https://api.imgur.com/oauth2/authorize?client_id=" + imgur_client_id + "&response_type=token"
var auth_token = ""
var refreshForm = new FormData();
refreshForm.append("refresh_token", "4c771b1e1055e3eb3adee452a2155ebd258392d1");
refreshForm.append("client_id", "cffdbdcf9cb88c7");
refreshForm.append("client_secret", "4e806c50fb260cc521bfe11d4e7edfa22cfbf684");
refreshForm.append("grant_type", "refresh_token");
//adds drag and drop listeners to the drag element
$("#drag").on('dragover', function(event) {
event.stopPropagation();
event.preventDefault();
console.log('dragover');
$(this).css('border', '2px solid #66aede');
$(this).css('background-color', 'rgba(255,255,255,0.6)');
$(this).css('color', '#173848');
});
$("#drag").on('dragleave', function(event) {
event.stopPropagation();
event.preventDefault();
console.log('dragleave');
$(this).css('border', '2px dotted #66aede');
$(this).css('background-color', 'rgba(255,255,255,0.4)');
$(this).css('color', '#31708f');
});
$("#drag").on('drop', function(event) {
event.stopPropagation();
event.preventDefault();
$(this).css('border', '2px dotted #66aede');
$(this).css('background-color', 'rgba(255,255,255,0.4)'); | $(this).css('color', '#31708f');
var files = event.originalEvent.dataTransfer.files;
console.log(files.length === 0); | random_line_split | |
Model.py | -59D特征
ninetyDf, woe_ninety, iv_ninety = custom_bins(self.data.Label, self.data['90D'], cut_ninety) # 90D特征
sixtyDf, woe_sixty, iv_sixty = custom_bins(self.data.Label, self.data['60-89D'], cut_sixty) # 60-89D特征
ageDf, cut_age, woe_age, iv_age = optimal_bins(self.data.Label, self.data.Age, n=10)
rrDf, cut_rr, woe_rr, iv_rr = optimal_bins(self.data.Label, self.data.RevolvingRatio, n=10)
n_data = pd.DataFrame()
n_data['90D'] = pd.cut(self.data['90D'], bins=cut_ninety, labels=woe_ninety) # 90D特征
n_data['RevolvingRatio'] = pd.cut(self.data['RevolvingRatio'], bins=cut_rr,
labels=woe_rr) # RevolvingRatio特征
n_data['30-59D'] = pd.cut(self.data['30-59D'], bins=cut_thirty, labels=woe_thirty) # 30-59D特征
n_data['60-89D'] = pd.cut(self.data['60-89D'], bins=cut_sixty, labels=woe_sixty) # 60-89D特征
n_data['Age'] = pd.cut(self.data['Age'], bins=cut_age, labels=woe_age) # Age特征
n_data['Label'] = self.data[['Label']] # 将标签传递
# 特征选择
# print(n_data.tail(10))
X = n_data.iloc[:, 1:] # 特征
y = n_data.iloc[:, 0] # 目标变量
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=0) # 训练集:测试集 = 7:3
# 计算分值
# 计算基础分
B = 20 / math.log(2)
A = 600 + B * math.log(1 / 20)
BaseScore = round(A - B * self.clf1.intercept_[0], 0)
# print("评分卡的基础分为:", BaseScore)
# 每个特征列分值计算函数
def score(coef, woe):
"""
:param coef: 特征在逻辑回归模型中对应的参数
:param woe: 特征的WOE编码取值列表
:return: 分值
"""
scores = []
for x in woe:
score = round(-B * coef * x, 0)
scores.append(score)
return scores
# 不同特征各个区间对应的分值
score_ninety = score(self.clf1.coef_[0][0], woe_ninety) # 90D特征
# print("90D特征各个区间对应的分值为:", score_ninety)
score_rr = score(self.clf1.coef_[0][1], woe_rr) # RevolvingRatio特征
# print("RevolvingRatio特征各个区间对应的分值为:", score_rr)
score_thirty = score(self.clf1.coef_[0][2], woe_thirty) # 30-59D特征
# print("30-59D特征各个区间对应的分值为:", score_thirty)
score_sixty = score(self.clf1.coef_[0][3], woe_sixty) # 60-89D特征
# print("60-89D特征各个区间对应的分值为:", score_sixty)
score_age = score(self.clf1.coef_[0][4], woe_age) # Age特征
# print("Age特征各个区间对应的分值为:", score_age)
# 测试集样本转化为分值形式
cardDf = X_test.copy() # 不改变原测试集,在副本上操作
# 将特征值转化为分值
n_data['90D'] = n_data['90D'].replace(woe_ninety, score_ninety)
n_data['RevolvingRatio'] = n_data['RevolvingRatio'].replace(woe_rr, score_rr)
n_data['30-59D'] = n_data['30-59D'].replace(woe_thirty, score_thirty)
n_data['60-89D'] = n_data['60-89D'].replace(woe_sixty, score_sixty)
n_data['Age'] = n_data['Age'].replace(woe_age, score_age)
# print(n_data.head(10)) # 观察此时的测试集副本
# 计算每个样本的分值
n_data['Score'] = BaseScore + n_data['90D'] + n_data['RevolvingRatio'] + \
n_data['30-59D'] + n_data['60-89D'] + n_data['Age']
# print(n_data.head(10))
return int(n_data.tail(1).Score)
def get_data(self):
"""
导入数据操作,首先需要进行数据导入以及预处理
:return: None
"""
self.data = pd.read_csv('datasets/cs-training.csv')
self.data = self.data.iloc[:, 1:] # 舍弃Unnamed: 0列
self.data.columns = ['Label', 'RevolvingRatio', 'Age', '30-59D', 'DebtRatio', 'MonthlyIncome',
'OpenL', '90D', 'RealEstate', '60-89D', 'Dependents'] # 列重命名
# 用MonthlyIncome特征值非空的样本构建训练集,MonthlyIncome特征值缺失的样本构建测试集
rfDf = self.data.iloc[:, [5, 1, 2, 3, 4, 6, 7, 8, 9]] # 原始数据集中的无缺失数值特征
rfDf_train = rfDf.loc[rfDf['MonthlyIncome'].notnull()]
rfDf_test = rfDf.loc[rfDf['MonthlyIncome'].isnull()]
# 划分训练数据和标签(label)
X = rfDf_train.iloc[:, 1:]
y = rfDf_train.iloc[:, 0]
# 训练过程
rf = RandomForestRegressor(random_state=0, n_estimators=200, max_depth=3, n_jobs=-1) # 这里重在理解过程,因此仅简单选取部分参数
rf.fit(X, y)
# 预测过程
pred = rf.predict(rfDf_test.iloc[:, 1:]).round(0) # 预测值四舍五入并保留一位小数点
self.data.loc[(self.data['MonthlyIncome'].isnull()), 'MonthlyIncome'] = pred # 填补缺失值
# Dependents特征处理
self.data['Dependents'].fillna(self.data['Dependents'].mode()[0], inplace=True) # 这里采用众数填充
# 处理百分比类异常值
# RevolvingRatio特征
ruulDf = self.data[self.data['RevolvingRatio'] <= 1] # 去掉高于1的部分
ruul_mean = ruulDf['RevolvingRatio'].mean() # 计算均值
self.data.loc[self.data['RevolvingRatio'] > 1, 'RevolvingRatio'] = ruul_mean # 均值替代
# DebtRatio特征
ruulDf = self.data[self.data['DebtRatio'] <= 1] # 去掉高于1的部分
ruul_mean = ruulDf['DebtRatio'].mean() # 计算均值
self.data.loc[self.data['DebtRatio'] > 1, 'DebtRatio'] = ruul_mean # 均值替代
# 处理逾期特征异常值
self.data.drop(self.data[self.data['30-59D'] > 80].index, inplace=True) # 根据索引删除样本
# 处理年龄特征异常值
self.data.drop(self.data[self.data['Age'] == 0].index, inplace=True) # 根据索引删除样本
self.data.drop(self.data[self.data['Age'] > 96].index, inplace=True)
| def train(self):
warnings.filterwarnings('ignore') # 忽略弹出的warnings
data = pd.read_csv('datasets/cs-training.csv')
data = data.iloc[:, 1:] # 舍弃Unnamed: 0列 | random_line_split | |
Model.py | # 根据传入的数据信息构造一个字典
dic = {"Label": 1, "90D": info_list[0], "RevolvingRatio": info_list[1], '30-59D': info_list[2],
'60-89D': info_list[3], 'Age': info_list[4]}
# print(self.data)
# 插入要预测的信息
self.data = self.data[['Label', '90D', 'RevolvingRatio', '30-59D', '60-89D', 'Age']]
self.data = self.data.append(dic, ignore_index=True)
# print(self.data)
return self.get_score()
def get_score(self) -> int:
if self.data is None: # 如果未导入数据则首先导入数据
return 0 # 表示错误
ninf = float('-inf')
pinf = float('inf')
cut_thirty = [ninf, 0, 1, 3, 5, pinf] # 30-59D特征
cut_open = [ninf, 1, 2, 3, 5, pinf] # OpenL特征
cut_ninety = [ninf, 0, 1, 3, 5, pinf] # 90D特征
cut_re = [ninf, 0, 1, 2, 3, pinf] # RealEstate特征
cut_sixty = [ninf, 0, 1, 3, pinf] # 60-89D特征
cut_dpt = [ninf, 0, 1, 2, 3, 5, pinf] # Dependents特征
cut_new2 = [ninf, 414, 1209, 2518, pinf]
# 计算统计值、woe 和iv
thirtyDf, woe_thirty, iv_thirty = custom_bins(self.data.Label, self.data['30-59D'], cut_thirty) # 30-59D特征
ninetyDf, woe_ninety, iv_ninety = custom_bins(self.data.Label, self.data['90D'], cut_ninety) # 90D特征
sixtyDf, woe_sixty, iv_sixty = custom_bins(self.data.Label, self.data['60-89D'], cut_sixty) # 60-89D特征
ageDf, cut_age, woe_age, iv_age = optimal_bins(self.data.Label, self.data.Age, n=10)
rrDf, cut_rr, woe_rr, iv_rr = optimal_bins(self.data.Label, self.data.RevolvingRatio, n=10)
n_data = pd.DataFrame()
n_data['90D'] = pd.cut(self.data['90D'], bins=cut_ninety, labels=woe_ninety) # 90D特征
n_data['RevolvingRatio'] = pd.cut(self.data['RevolvingRatio'], bins=cut_rr,
labels=woe_rr) # RevolvingRatio特征
n_data['30-59D'] = pd.cut(self.data['30-59D'], bins=cut_thirty, labels=woe_thirty) # 30-59D特征
n_data['60-89D'] = pd.cut(self.data['60-89D'], bins=cut_sixty, labels=woe_sixty) # 60-89D特征
n_data['Age'] = pd.cut(self.data['Age'], bins=cut_age, labels=woe_age) # Age特征
n_data['Label'] = self.data[['Label']] # 将标签传递
# 特征选择
# print(n_data.tail(10))
X = n_data.iloc[:, 1:] # 特征
y = n_data.iloc[:, 0] # 目标变量
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=0) # 训练集:测试集 = 7:3
# 计算分值
# 计算基础分
B = 20 / math.log(2)
A = 600 + B * math.log(1 / 20)
BaseScore = round(A - B * self.clf1.intercept_[0], 0)
# print("评分卡的基础分为:", BaseScore)
# 每个特征列分值计算函数
def score(coef, woe):
"""
:param coef: 特征在逻辑回归模型中对应的参数
:param woe: 特征的WOE编码取值列表
:return: 分值
"""
scores = []
for x in woe:
score = round(-B * coef * x, 0)
scores.append(score)
return scores
# 不同特征各个区间对应的分值
score_ninety = score(self.clf1.coef_[0][0], woe_ninety) # 90D特征
# print("90D特征各个区间对应的分值为:", score_ninety)
score_rr = score(self.clf1.coef_[0][1], woe_rr) # RevolvingRatio特征
# print("RevolvingRatio特征各个区间对应的分值为:", score_rr)
score_thirty = score(self.clf1.coef_[0][2], woe_thirty) # 30-59D特征
# print("30-59D特征各个区间对应的分值为:", score_thirty)
score_sixty = score(self.clf1.coef_[0][3], woe_sixty) # 60-89D特征
# print("60-89D特征各个区间对应的分值为:", score_sixty)
score_age = score(self.clf1.coef_[0][4], woe_age) # Age特征
# print("Age特征各个区间对应的分值为:", score_age)
# 测试集样本转化为分值形式
cardDf = X_test.copy() # 不改变原测试集,在副本上操作
# 将特征值转化为分值
n_data['90D'] = n_data['90D'].replace(woe_ninety, score_ninety)
n_data['RevolvingRatio'] = n_data['RevolvingRatio'].replace(woe_rr, score_rr)
n_data['30-59D'] = n_data['30-59D'].replace(woe_thirty, score_thirty)
n_data['60-89D'] = n_data['60-89D'].replace(woe_sixty, score_sixty)
n_data['Age'] = n_data['Age'].replace(woe_age, score_age)
# print(n_data.head(10)) # 观察此时的测试集副本
# 计算每个样本的分值
n_data['Score'] = BaseScore + n_data['90D'] + n_data['RevolvingRatio'] + \
n_data['30-59D'] + n_data['60-89D'] + n_data['Age']
# print(n_data.head(10))
return int(n_data.tail(1).Score)
def get_data(self):
"""
导入数据操作,首先需要进行数据导入以及预处理
:return: None
"""
self.data = pd.read_csv('datasets/cs-training.csv')
self.data = self.data.iloc[:, 1:] # 舍弃Unnamed: 0列
self.data.columns = ['Label', 'RevolvingRatio', 'Age', '30-59D', 'DebtRatio', 'MonthlyIncome',
'OpenL', '90D', 'RealEstate', '60-89D', 'Dependents'] # 列重命名
# 用MonthlyIncome特征值非空的样本构建训练集,MonthlyIncome特征值缺失的样本构建测试集
rfDf = self.data.iloc[:, [5, 1, 2, 3, 4, 6, 7, 8, 9]] # 原始数据集中的无缺失数值特征
rfDf_train = rfDf.loc[rfDf['MonthlyIncome'].notnull()]
rfDf_test = rfDf.loc[rfDf['MonthlyIncome'].isnull()]
# 划分训练数据和标签(label)
X = rfDf_train.iloc[:, 1:]
y = rfDf_train.iloc[:, 0]
# 训练过程
rf = RandomForestRegressor(random_state=0, n_estimators=200, max_depth=3, n_jobs=-1) # 这里重在理解过程, | )
| identifier_name | |
Model.py | 边界值列表
:return: 统计值、woe值、iv值
"""
r = 0
total_bad = Y.sum()
total_good = Y.count() - total_bad
# 等距分箱
df1 = pd.DataFrame({'X': X, 'Y': Y, 'bin': pd.cut(X, binList)})
df2 = df1.groupby('bin', as_index=True)
r, p = stats.spearmanr(df2.mean().X, df2.mean().Y)
# 计算woe值和iv值
df3 = pd.DataFrame()
df3['min_' + X.name] = df2.min().X
df3['max_' + X.name] = df2.max().X
df3['sum'] = df2.sum().Y
df3['total'] = df2.count().Y
df3['rate'] = df2.mean().Y
df3['badattr'] = df3['sum'] / total_bad
df3['goodattr'] = (df3['total'] - df3['sum']) / total_good
df3['woe'] = np.log(df3['badattr'] / df3['goodattr'])
iv = ((df3['badattr'] - df3['goodattr']) * df3['woe']).sum()
df3 = df3.sort_values(by='min_' + X.name).reset_index(drop=True)
woe = list(df3['woe'])
return df3, woe, iv
# 90D、RevolvingRatio、30-59D、60-89D、Age
class Model:
data = None
clf1 = None # 保存训练模型的情况
def __init__(self): # 初始化时候要导入数据进行训练,保存导入的数据以及模型参数以供使用
self.get_data()
self.train()
def predict(self, info_list: list) -> int:
if self.data is None: # 如果未导入数据则首先导入数据
self.get_data()
# 根据传入的数据信息构造一个字典
dic = {"Label": 1, "90D": info_list[0], "RevolvingRatio": info_list[1], '30-59D': info_list[2],
'60-89D': info_list[3], 'Age': info_list[4]}
# print(self.data)
# 插入要预测的信息
self.data = self.data[['Label', '90D', 'RevolvingRatio', '30-59D', '60-89D', 'Age']]
self.data = self.data.append(dic, ignore_index=True)
# print(self.data)
return self.get_score()
def get_score(self) -> int:
if self.data is None: # 如果未导入数据则首先导入数据
return 0 # 表示错误
ninf = float('-inf')
pinf = float('inf')
cut_thirty = [ninf, 0, 1, 3, 5, pinf] # 30-59D特征
cut_open = [ninf, 1, 2, 3, 5, pinf] # OpenL特征
cut_ninety = [ninf, 0, 1, 3, 5, pinf] # 90D特征
cut_re = [ninf, 0, 1, 2, 3, pinf] # RealEstate特征
cut_sixty = [ninf, 0, 1, 3, pinf] # 60-89D特征
cut_dpt = [ninf, 0, 1, 2, 3, 5, pinf] # Dependents特征
cut_new2 = [ninf, 414, 1209, 2518, pinf]
# 计算统计值、woe 和iv
thirtyDf, woe_thirty, iv_thirty = custom_bins(self.data.Label, self.data['30-59D'], cut_thirty) # 30-59D特征
ninetyDf, woe_ninety, iv_ninety = custom_bins(self.data.Label, self.data['90D'], cut_ninety) # 90D特征
sixtyDf, woe_sixty, iv_sixty = custom_bins(self.data.Label, self.data['60-89D'], cut_sixty) # 60-89D特征
ageDf, cut_age, woe_age, iv_age = optimal_bins(self.data.Label, self.data.Age, n=10)
rrDf, cut_rr, woe_rr, iv_rr = optimal_bins(self.data.Label, self.data.RevolvingRatio, n=10)
n_data = pd.DataFrame()
n_data['90D'] = pd.cut(self.data['90D'], bins=cut_ninety, labels=woe_ninety) # 90D特征
n_data['RevolvingRatio'] = pd.cut(self.data['RevolvingRatio'], bins=cut_rr,
labels=woe_rr) # RevolvingRatio特征
n_data['30-59D'] = pd.cut(self.data['30-59D'], bins=cut_thirty, labels=woe_thirty) # 30-59D特征
n_data['60-89D'] = pd.cut(self.data['60-89D'], bins=cut_sixty, labels=woe_sixty) # 60-89D特征
n_data['Age'] = pd.cut(self.data['Age'], bins=cut_age, labels=woe_age) # Age特征
n_data['Label'] = self.data[['Label']] # 将标签传递
# 特征选择
# print(n_data.tail(10))
X = n_data.iloc[:, 1:] # 特征
y = n_data.iloc[:, 0] # 目标变量
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=0) # 训练集:测试集 = 7:3
# 计算分值
# 计算基础分
B = 20 / math.log(2)
A = 600 + B * math.log(1 / 20)
BaseScore = round(A - B * self.clf1.intercept_[0], 0)
# print("评分卡的基础分为:", BaseScore)
# 每个特征列分值计算函数
def score(coef, woe):
"""
:param coef: 特征在逻辑回归模型中对应的参数
:param woe: 特征的WOE编码取值列表
:return: 分值
"""
scores = []
for x in woe:
score = round(-B * coef * x, 0)
scores.append(score)
return scores
# 不同特征各个区间对应的分值
score_ninety = score(self.clf1.coef_[0][0], woe_ninety) # 90D特征
# print("90D特征各个区间对应的分值为:", score_ninety)
score_rr = score(self.clf1.coef_[0][1], woe_rr) # RevolvingRatio特征
# print("RevolvingRatio特征各个区间对应的分值为:", score_rr)
score_thirty = score(self.clf1.coef_[0][2], woe_thirty) # 30-59D特征
# print("30-59D特征各个区间对应的分值为:", score_thirty)
score_sixty = score(self.clf1.coef_[0][3], woe_sixty) # 60-89D特征
# print("60-89D特征各个区间对应的分值为:", score_sixty)
score_age = score(self.clf1.coef_[0][4], woe_age) # Age特征
# print("Age特征各个区间对应的分值为:", score_age)
# 测试集样本转化为分值形式
cardDf = X_test.copy() # 不改变原测试集,在副本上操作
# 将特征值转化为分值
n_data['90D'] = n_data['90D'].replace(woe_ninety, score_ninety)
n_data['RevolvingRatio'] = n_data['RevolvingRatio'].replace(woe_rr, score_rr)
n_data['30-59D'] = n_data['30-59D'].replace(woe_thirty, score_thirty)
n_data['60-89D'] = | "
:param Y: 目标变量
:param X: 待分箱特征
:param binList: 分箱 | conditional_block | |
Model.py | [ninf, 414, 1209, 2518, pinf]
# 计算统计值、woe 和iv
thirtyDf, woe_thirty, iv_thirty = custom_bins(self.data.Label, self.data['30-59D'], cut_thirty) # 30-59D特征
ninetyDf, woe_ninety, iv_ninety = custom_bins(self.data.Label, self.data['90D'], cut_ninety) # 90D特征
sixtyDf, woe_sixty, iv_sixty = custom_bins(self.data.Label, self.data['60-89D'], cut_sixty) # 60-89D特征
ageDf, cut_age, woe_age, iv_age = optimal_bins(self.data.Label, self.data.Age, n=10)
rrDf, cut_rr, woe_rr, iv_rr = optimal_bins(self.data.Label, self.data.RevolvingRatio, n=10)
n_data = pd.DataFrame()
n_data['90D'] = pd.cut(self.data['90D'], bins=cut_ninety, labels=woe_ninety) # 90D特征
n_data['RevolvingRatio'] = pd.cut(self.data['RevolvingRatio'], bins=cut_rr,
labels=woe_rr) # RevolvingRatio特征
n_data['30-59D'] = pd.cut(self.data['30-59D'], bins=cut_thirty, labels=woe_thirty) # 30-59D特征
n_data['60-89D'] = pd.cut(self.data['60-89D'], bins=cut_sixty, labels=woe_sixty) # 60-89D特征
n_data['Age'] = pd.cut(self.data['Age'], bins=cut_age, labels=woe_age) # Age特征
n_data['Label'] = self.data[['Label']] # 将标签传递
# 特征选择
# print(n_data.tail(10))
X = n_data.iloc[:, 1:] # 特征
y = n_data.iloc[:, 0] # 目标变量
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=0) # 训练集:测试集 = 7:3
# 计算分值
# 计算基础分
B = 20 / math.log(2)
A = 600 + B * math.log(1 / 20)
BaseScore = round(A - B * self.clf1.intercept_[0], 0)
# print("评分卡的基础分为:", BaseScore)
# 每个特征列分值计算函数
def score(coef, woe):
"""
:param coef: 特征在逻辑回归模型中对应的参数
:param woe: 特征的WOE编码取值列表
:return: 分值
"""
scores = []
for x in woe:
score = round(-B * coef * x, 0)
scores.append(score)
return scores
# 不同特征各个区间对应的分值
score_ninety = score(self.clf1.coef_[0][0], woe_ninety) # 90D特征
# print("90D特征各个区间对应的分值为:", score_ninety)
score_rr = score(self.clf1.coef_[0][1], woe_rr) # RevolvingRatio特征
# print("RevolvingRatio特征各个区间对应的分值为:", score_rr)
score_thirty = score(self.clf1.coef_[0][2], woe_thirty) # 30-59D特征
# print("30-59D特征各个区间对应的分值为:", score_thirty)
score_sixty = score(self.clf1.coef_[0][3], woe_sixty) # 60-89D特征
# print("60-89D特征各个区间对应的分值为:", score_sixty)
score_age = score(self.clf1.coef_[0][4], woe_age) # Age特征
# print("Age特征各个区间对应的分值为:", score_age)
# 测试集样本转化为分值形式
cardDf = X_test.copy() # 不改变原测试集,在副本上操作
# 将特征值转化为分值
n_data['90D'] = n_data['90D'].replace(woe_ninety, score_ninety)
n_data['RevolvingRatio'] = n_data['RevolvingRatio'].replace(woe_rr, score_rr)
n_data['30-59D'] = n_data['30-59D'].replace(woe_thirty, score_thirty)
n_data['60-89D'] = n_data['60-89D'].replace(woe_sixty, score_sixty)
n_data['Age'] = n_data['Age'].replace(woe_age, score_age)
# print(n_data.head(10)) # 观察此时的测试集副本
# 计算每个样本的分值
n_data['Score'] = BaseScore + n_data['90D'] + n_data['RevolvingRatio'] + \
n_data['30-59D'] + n_data['60-89D'] + n_data['Age']
# print(n_data.head(10))
return int(n_data.tail(1).Score)
def get_data(self):
"""
导入数据操作,首先需要进行数据导入以及预处理
:return: None
"""
self.data = pd.read_csv('datasets/cs-training.csv')
self.data = self.data.iloc[:, 1:] # 舍弃Unnamed: 0列
self.data.columns = ['Label', 'RevolvingRatio', 'Age', '30-59D', 'DebtRatio', 'MonthlyIncome',
'OpenL', '90D', 'RealEstate', '60-89D', 'Dependents'] # 列重命名
# 用MonthlyIncome特征值非空的样本构建训练集,MonthlyIncome特征值缺失的样本构建测试集
rfDf = self.data.iloc[:, [5, 1, 2, 3, 4, 6, 7, 8, 9]] # 原始数据集中的无缺失数值特征
rfDf_train = rfDf.loc[rfDf['MonthlyIncome'].notnull()]
rfDf_test = rfDf.loc[rfDf['MonthlyIncome'].isnull()]
# 划分训练数据和标签(label)
X = rfDf_train.iloc[:, 1:]
y = rfDf_train.iloc[:, 0]
# 训练过程
rf = RandomForestRegressor(random_state=0, n_estimators=200, max_depth=3, n_jobs=-1) # 这里重在理解过程,因此仅简单选取部分参数
rf.fit(X, y)
# 预测过程
pred = rf.predict(rfDf_test.iloc[:, 1:]).round(0) # 预测值 | 四舍五入并保留一位小数点
self.data.loc[(self.data['MonthlyIncome'].isnull()), 'MonthlyIncome'] = pred # 填补缺失值
# Dependents特征处理
self.data['Dependents'].fillna(self.data['Dependents'].mode()[0], inplace=True) # 这里采用众数填充
# 处理百分比类异常值
# RevolvingRatio特征
ruulDf = self.data[self.data['RevolvingRatio'] <= 1] # 去掉高于1的部分
ruul_mean = ruulDf['RevolvingRatio'].mean() # 计算均值
self.data.loc[self.data['RevolvingRatio'] > 1, 'RevolvingRatio'] = ruul_mean # 均值替代
# DebtRatio特征
ruulDf = self.data[self.data['DebtRatio'] <= 1] # 去掉高于1的部分
ruul_mean = ruulDf['DebtRatio'].mean() # 计算均值
self.data.loc[self.data['DebtRatio'] > 1, 'DebtRatio'] = ruul_mean # 均值替代
# 处理逾期特征异常值
self.data.drop(self.data[self.data['30-59D'] > 80].index, inplace=True) # 根据索引删除样本
# 处理年龄特征异常值
self.data.drop(self.data[self.data['Age'] == 0].index, inplace=True) # 根据索引删除样本 | identifier_body | |
ledger.go | rlib.GetAccountActivity(bid, lid, &lm.Dt, dt)
return bal, lm
}
// LMStates is an array of strings describing the meaning of the states a Ledger Marker can have.
var LMStates = []string{
"open", "closed", "locked", "initial",
}
// getLedgerGrid returns a list of ARs for w2ui grid
// wsdoc {
// @Title list ARs
// @URL /v1/ars/:BUI
// @Method GET
// @Synopsis Get Account Rules
// @Description Get all ARs associated with BID
// @Desc By default, the search is made for receipts from "today" to 31 days prior.
// @Input WebGridSearchRequest
// @Response SearchLedgersResponse
// wsdoc }
func getLedgerGrid(w http.ResponseWriter, r *http.Request, d *ServiceData) | active := "active"
if 1 == acct.Status {
active = "inactive"
}
posts := "yes"
if acct.AllowPost == 0 {
posts = "no"
}
bal, lm := GetAccountBalance(acct.BID, acct.LID, &dt)
state := "??"
j := int(lm.State)
if 0 <= j && j <= 3 {
state = LMStates[j]
}
var lg = LedgerGrid{
Recid: i,
LID: acct.LID,
GLNumber: acct.GLNumber,
Name: acct.Name,
Active: active,
AllowPost: posts,
Balance: bal,
LMDate: lm.Dt.In(rlib.RRdb.Zone).Format("Jan _2, 2006 15:04:05 MST"),
LMAmount: lm.Balance,
LMState: state,
}
g.Records = append(g.Records, lg)
i++
}
// error check
err = rows.Err()
if err != nil {
SvcGridErrorReturn(w, err, funcname)
return
}
g.Status = "success"
g.Total = int64(len(g.Records))
w.Header().Set("Content-Type", "application/json")
SvcWriteResponse(&g, w)
}
// // SvcFormHandlerAR formats a complete data record for a person suitable for use with the w2ui Form
// // For this call, we expect the URI to contain the BID and the ARID as follows:
// // 0 1 2 3
// // uri /v1/receipt/BUI/ARID
// // The server command can be:
// // get
// // save
// // delete
// //-----------------------------------------------------------------------------------
// func SvcFormHandlerAR(w http.ResponseWriter, r *http.Request, d *ServiceData) {
// var (
// funcname = "SvcFormHandlerAR"
// err error
// )
// fmt.Printf("Entered %s\n", funcname)
// if d.ARID, err = SvcExtractIDFromURI(r.RequestURI, "ARID", 3, w); err != nil {
// SvcGridErrorReturn(w, err, funcname)
// return
// }
// fmt.Printf("Request: %s: BID = %d, ID = %d\n", d.wsSearchReq.Cmd, d.BID, d.ARID)
// switch d.wsSearchReq.Cmd {
// case "get":
// getARForm(w, r, d)
// break
// case "save":
// saveARForm(w, r, d)
// break
// case "delete":
// deleteARForm(w, r, d)
// break
// default:
// err = fmt.Errorf("Unhandled command: %s", d.wsSearchReq.Cmd)
// SvcGridErrorReturn(w, err, funcname)
// return
// }
// }
// // saveARForm returns the requested receipt
// // wsdoc {
// // @Title Save AR
// // @URL /v1/ars/:BUI/:ARID
// // @Method GET
// // @Synopsis Save a AR
// // @Desc This service saves a AR. If :ARID exists, it will
// // @Desc be updated with the information supplied. All fields must
// // @Desc be supplied. If ARID is 0, then a new receipt is created.
// // @Input SaveARInput
// // @Response SvcStatusResponse
// // wsdoc }
// func saveARForm(w http.ResponseWriter, r *http.Request, d *ServiceData) {
// var (
// funcname = "saveARForm"
// foo SaveARInput
// bar SaveAROther
// a rlib.AR
// err error
// )
// fmt.Printf("Entered %s\n", funcname)
// fmt.Printf("record data = %s\n", d.data)
// // get data
// data := []byte(d.data)
// if err := json.Unmarshal(data, &foo); err != nil {
// SvcGridErrorReturn(w, err, funcname)
// return
// }
// if err := json.Unmarshal(data, &bar); err != nil {
// SvcGridErrorReturn(w, err, funcname)
// return
// }
// // migrate foo.Record data to a struct's fields
// rlib.MigrateStructVals(&foo.Record, &a) // the variables that don't need special handling
// fmt.Printf("saveAR - first migrate: a = %#v\n", a)
// var ok bool
// a.BID, ok = rlib.RRdb.BUDlist[bar.Record.BID.ID]
// if !ok {
// e := fmt.Errorf("%s: Could not map BID value: %s", funcname, bar.Record.BID.ID)
// SvcGridErrorReturn(w, e, funcname)
// return
// }
// a.CreditLID, ok = rlib.StringToInt64(bar.Record.CreditLID.ID) // CreditLID has drop list
// if !ok {
// e := fmt.Errorf("%s: invalid CreditLID value: %s", funcname, bar.Record.CreditLID.ID)
// SvcGridErrorReturn(w, e, funcname)
// return
// }
// a.DebitLID, ok = rlib.StringToInt64(bar.Record.DebitLID.ID) // DebitLID has drop list
// if !ok {
// e := fmt.Errorf("%s: invalid DebitLID value: %s", funcname, bar.Record.DebitLID.ID)
// SvcGridErrorReturn(w, e, funcname)
// return
// }
// a.ARType, ok = rlib.StringToInt64(bar.Record.ARType.ID) // ArType has drop list
// if !ok {
// e := fmt.Errorf("%s: Invalid ARType value: %s", funcname, bar.Record.ARType.ID)
// SvcGridErrorReturn(w, e, funcname)
// return
// }
// fmt.Printf("saveAR - second migrate: a = %#v\n", a)
// // get PriorToRAStart and PriorToRAStop values and accordingly get RARequired field value
// formBoolMap := [2]bool{foo.Record.PriorToRAStart, foo.Record.PriorToRAStop}
// for raReq, boolMap := range raRequiredMap {
// if boolMap == formBoolMap {
// a.RARequired = int64(raReq)
// break
// }
// }
// // save or update
// if a.ARID == 0 && d.ARID == 0 {
// // This is a new AR
// fmt.Printf(">>>> NEW RECEIPT IS BEING ADDED\n")
// _, err = rlib.InsertAR(&a)
// } else {
// // update existing record
// fmt.Printf("Updating existing AR: %d\n", a.ARID)
// err = rlib.UpdateAR(&a)
// }
// if err != nil {
// e := fmt.Errorf("Error saving receipt (ARID=%d\n: %s", | {
funcname := "getLedgerGrid"
var (
err error
g SearchLedgersResponse
)
rows, err := rlib.RRdb.Prepstmt.GetLedgersForGrid.Query(d.BID, d.wsSearchReq.Limit, d.wsSearchReq.Offset)
if err != nil {
fmt.Printf("%s: Error from DB Query: %s\n", funcname, err.Error())
SvcGridErrorReturn(w, err, funcname)
return
}
defer rows.Close()
dt := time.Time(d.wsSearchReq.SearchDtStart)
i := int64(d.wsSearchReq.Offset)
for rows.Next() {
var acct rlib.GLAccount
rlib.ReadGLAccounts(rows, &acct) | identifier_body |
ledger.go | URL /v1/ars/:BUI
// @Method GET
// @Synopsis Get Account Rules
// @Description Get all ARs associated with BID
// @Desc By default, the search is made for receipts from "today" to 31 days prior.
// @Input WebGridSearchRequest
// @Response SearchLedgersResponse
// wsdoc }
func getLedgerGrid(w http.ResponseWriter, r *http.Request, d *ServiceData) {
funcname := "getLedgerGrid"
var (
err error
g SearchLedgersResponse
)
rows, err := rlib.RRdb.Prepstmt.GetLedgersForGrid.Query(d.BID, d.wsSearchReq.Limit, d.wsSearchReq.Offset)
if err != nil {
fmt.Printf("%s: Error from DB Query: %s\n", funcname, err.Error())
SvcGridErrorReturn(w, err, funcname)
return
}
defer rows.Close()
dt := time.Time(d.wsSearchReq.SearchDtStart)
i := int64(d.wsSearchReq.Offset)
for rows.Next() {
var acct rlib.GLAccount
rlib.ReadGLAccounts(rows, &acct)
active := "active"
if 1 == acct.Status {
active = "inactive"
}
posts := "yes"
if acct.AllowPost == 0 {
posts = "no"
}
bal, lm := GetAccountBalance(acct.BID, acct.LID, &dt)
state := "??"
j := int(lm.State)
if 0 <= j && j <= 3 {
state = LMStates[j]
}
var lg = LedgerGrid{
Recid: i,
LID: acct.LID,
GLNumber: acct.GLNumber,
Name: acct.Name,
Active: active,
AllowPost: posts,
Balance: bal,
LMDate: lm.Dt.In(rlib.RRdb.Zone).Format("Jan _2, 2006 15:04:05 MST"),
LMAmount: lm.Balance,
LMState: state,
}
g.Records = append(g.Records, lg)
i++
}
// error check
err = rows.Err()
if err != nil {
SvcGridErrorReturn(w, err, funcname)
return
}
g.Status = "success"
g.Total = int64(len(g.Records))
w.Header().Set("Content-Type", "application/json")
SvcWriteResponse(&g, w)
}
// // SvcFormHandlerAR formats a complete data record for a person suitable for use with the w2ui Form
// // For this call, we expect the URI to contain the BID and the ARID as follows:
// // 0 1 2 3
// // uri /v1/receipt/BUI/ARID
// // The server command can be:
// // get
// // save
// // delete
// //-----------------------------------------------------------------------------------
// func SvcFormHandlerAR(w http.ResponseWriter, r *http.Request, d *ServiceData) {
// var (
// funcname = "SvcFormHandlerAR"
// err error
// )
// fmt.Printf("Entered %s\n", funcname)
// if d.ARID, err = SvcExtractIDFromURI(r.RequestURI, "ARID", 3, w); err != nil {
// SvcGridErrorReturn(w, err, funcname)
// return
// }
// fmt.Printf("Request: %s: BID = %d, ID = %d\n", d.wsSearchReq.Cmd, d.BID, d.ARID)
// switch d.wsSearchReq.Cmd {
// case "get":
// getARForm(w, r, d)
// break
// case "save":
// saveARForm(w, r, d)
// break
// case "delete":
// deleteARForm(w, r, d)
// break
// default:
// err = fmt.Errorf("Unhandled command: %s", d.wsSearchReq.Cmd)
// SvcGridErrorReturn(w, err, funcname)
// return
// }
// }
// // saveARForm returns the requested receipt
// // wsdoc {
// // @Title Save AR
// // @URL /v1/ars/:BUI/:ARID
// // @Method GET
// // @Synopsis Save a AR
// // @Desc This service saves a AR. If :ARID exists, it will
// // @Desc be updated with the information supplied. All fields must
// // @Desc be supplied. If ARID is 0, then a new receipt is created.
// // @Input SaveARInput
// // @Response SvcStatusResponse
// // wsdoc }
// func saveARForm(w http.ResponseWriter, r *http.Request, d *ServiceData) {
// var (
// funcname = "saveARForm"
// foo SaveARInput
// bar SaveAROther
// a rlib.AR
// err error
// )
// fmt.Printf("Entered %s\n", funcname)
// fmt.Printf("record data = %s\n", d.data)
// // get data
// data := []byte(d.data)
// if err := json.Unmarshal(data, &foo); err != nil {
// SvcGridErrorReturn(w, err, funcname)
// return
// }
// if err := json.Unmarshal(data, &bar); err != nil {
// SvcGridErrorReturn(w, err, funcname)
// return
// }
// // migrate foo.Record data to a struct's fields
// rlib.MigrateStructVals(&foo.Record, &a) // the variables that don't need special handling
// fmt.Printf("saveAR - first migrate: a = %#v\n", a)
// var ok bool
// a.BID, ok = rlib.RRdb.BUDlist[bar.Record.BID.ID]
// if !ok {
// e := fmt.Errorf("%s: Could not map BID value: %s", funcname, bar.Record.BID.ID)
// SvcGridErrorReturn(w, e, funcname)
// return
// }
// a.CreditLID, ok = rlib.StringToInt64(bar.Record.CreditLID.ID) // CreditLID has drop list
// if !ok {
// e := fmt.Errorf("%s: invalid CreditLID value: %s", funcname, bar.Record.CreditLID.ID)
// SvcGridErrorReturn(w, e, funcname)
// return
// }
// a.DebitLID, ok = rlib.StringToInt64(bar.Record.DebitLID.ID) // DebitLID has drop list
// if !ok {
// e := fmt.Errorf("%s: invalid DebitLID value: %s", funcname, bar.Record.DebitLID.ID)
// SvcGridErrorReturn(w, e, funcname)
// return
// }
// a.ARType, ok = rlib.StringToInt64(bar.Record.ARType.ID) // ArType has drop list
// if !ok {
// e := fmt.Errorf("%s: Invalid ARType value: %s", funcname, bar.Record.ARType.ID)
// SvcGridErrorReturn(w, e, funcname)
// return
// }
// fmt.Printf("saveAR - second migrate: a = %#v\n", a)
// // get PriorToRAStart and PriorToRAStop values and accordingly get RARequired field value
// formBoolMap := [2]bool{foo.Record.PriorToRAStart, foo.Record.PriorToRAStop}
// for raReq, boolMap := range raRequiredMap {
// if boolMap == formBoolMap {
// a.RARequired = int64(raReq)
// break
// }
// }
// // save or update
// if a.ARID == 0 && d.ARID == 0 {
// // This is a new AR
// fmt.Printf(">>>> NEW RECEIPT IS BEING ADDED\n")
// _, err = rlib.InsertAR(&a)
// } else {
// // update existing record
// fmt.Printf("Updating existing AR: %d\n", a.ARID)
// err = rlib.UpdateAR(&a)
// }
// if err != nil {
// e := fmt.Errorf("Error saving receipt (ARID=%d\n: %s", d.ARID, err.Error())
// SvcGridErrorReturn(w, e, funcname)
// return
// }
// SvcWriteSuccessResponseWithID(w, a.ARID)
// }
// // which fields needs to be fetched for SQL query for receipts grid
// var getARQuerySelectFields = rlib.SelectQueryFields{ | // "AR.ARID",
// "AR.Name",
// "AR.ARType", | random_line_split | |
ledger.go | (bid, lid int64, dt *time.Time) (float64, rlib.LedgerMarker) {
lm := rlib.GetRALedgerMarkerOnOrBeforeDeprecated(bid, lid, 0, dt) // find nearest ledgermarker, use it as a starting point
bal, _ := rlib.GetAccountActivity(bid, lid, &lm.Dt, dt)
return bal, lm
}
// LMStates is an array of strings describing the meaning of the states a Ledger Marker can have.
var LMStates = []string{
"open", "closed", "locked", "initial",
}
// getLedgerGrid returns a list of ARs for w2ui grid
// wsdoc {
// @Title list ARs
// @URL /v1/ars/:BUI
// @Method GET
// @Synopsis Get Account Rules
// @Description Get all ARs associated with BID
// @Desc By default, the search is made for receipts from "today" to 31 days prior.
// @Input WebGridSearchRequest
// @Response SearchLedgersResponse
// wsdoc }
func getLedgerGrid(w http.ResponseWriter, r *http.Request, d *ServiceData) {
funcname := "getLedgerGrid"
var (
err error
g SearchLedgersResponse
)
rows, err := rlib.RRdb.Prepstmt.GetLedgersForGrid.Query(d.BID, d.wsSearchReq.Limit, d.wsSearchReq.Offset)
if err != nil {
fmt.Printf("%s: Error from DB Query: %s\n", funcname, err.Error())
SvcGridErrorReturn(w, err, funcname)
return
}
defer rows.Close()
dt := time.Time(d.wsSearchReq.SearchDtStart)
i := int64(d.wsSearchReq.Offset)
for rows.Next() {
var acct rlib.GLAccount
rlib.ReadGLAccounts(rows, &acct)
active := "active"
if 1 == acct.Status {
active = "inactive"
}
posts := "yes"
if acct.AllowPost == 0 {
posts = "no"
}
bal, lm := GetAccountBalance(acct.BID, acct.LID, &dt)
state := "??"
j := int(lm.State)
if 0 <= j && j <= 3 {
state = LMStates[j]
}
var lg = LedgerGrid{
Recid: i,
LID: acct.LID,
GLNumber: acct.GLNumber,
Name: acct.Name,
Active: active,
AllowPost: posts,
Balance: bal,
LMDate: lm.Dt.In(rlib.RRdb.Zone).Format("Jan _2, 2006 15:04:05 MST"),
LMAmount: lm.Balance,
LMState: state,
}
g.Records = append(g.Records, lg)
i++
}
// error check
err = rows.Err()
if err != nil {
SvcGridErrorReturn(w, err, funcname)
return
}
g.Status = "success"
g.Total = int64(len(g.Records))
w.Header().Set("Content-Type", "application/json")
SvcWriteResponse(&g, w)
}
// // SvcFormHandlerAR formats a complete data record for a person suitable for use with the w2ui Form
// // For this call, we expect the URI to contain the BID and the ARID as follows:
// // 0 1 2 3
// // uri /v1/receipt/BUI/ARID
// // The server command can be:
// // get
// // save
// // delete
// //-----------------------------------------------------------------------------------
// func SvcFormHandlerAR(w http.ResponseWriter, r *http.Request, d *ServiceData) {
// var (
// funcname = "SvcFormHandlerAR"
// err error
// )
// fmt.Printf("Entered %s\n", funcname)
// if d.ARID, err = SvcExtractIDFromURI(r.RequestURI, "ARID", 3, w); err != nil {
// SvcGridErrorReturn(w, err, funcname)
// return
// }
// fmt.Printf("Request: %s: BID = %d, ID = %d\n", d.wsSearchReq.Cmd, d.BID, d.ARID)
// switch d.wsSearchReq.Cmd {
// case "get":
// getARForm(w, r, d)
// break
// case "save":
// saveARForm(w, r, d)
// break
// case "delete":
// deleteARForm(w, r, d)
// break
// default:
// err = fmt.Errorf("Unhandled command: %s", d.wsSearchReq.Cmd)
// SvcGridErrorReturn(w, err, funcname)
// return
// }
// }
// // saveARForm returns the requested receipt
// // wsdoc {
// // @Title Save AR
// // @URL /v1/ars/:BUI/:ARID
// // @Method GET
// // @Synopsis Save a AR
// // @Desc This service saves a AR. If :ARID exists, it will
// // @Desc be updated with the information supplied. All fields must
// // @Desc be supplied. If ARID is 0, then a new receipt is created.
// // @Input SaveARInput
// // @Response SvcStatusResponse
// // wsdoc }
// func saveARForm(w http.ResponseWriter, r *http.Request, d *ServiceData) {
// var (
// funcname = "saveARForm"
// foo SaveARInput
// bar SaveAROther
// a rlib.AR
// err error
// )
// fmt.Printf("Entered %s\n", funcname)
// fmt.Printf("record data = %s\n", d.data)
// // get data
// data := []byte(d.data)
// if err := json.Unmarshal(data, &foo); err != nil {
// SvcGridErrorReturn(w, err, funcname)
// return
// }
// if err := json.Unmarshal(data, &bar); err != nil {
// SvcGridErrorReturn(w, err, funcname)
// return
// }
// // migrate foo.Record data to a struct's fields
// rlib.MigrateStructVals(&foo.Record, &a) // the variables that don't need special handling
// fmt.Printf("saveAR - first migrate: a = %#v\n", a)
// var ok bool
// a.BID, ok = rlib.RRdb.BUDlist[bar.Record.BID.ID]
// if !ok {
// e := fmt.Errorf("%s: Could not map BID value: %s", funcname, bar.Record.BID.ID)
// SvcGridErrorReturn(w, e, funcname)
// return
// }
// a.CreditLID, ok = rlib.StringToInt64(bar.Record.CreditLID.ID) // CreditLID has drop list
// if !ok {
// e := fmt.Errorf("%s: invalid CreditLID value: %s", funcname, bar.Record.CreditLID.ID)
// SvcGridErrorReturn(w, e, funcname)
// return
// }
// a.DebitLID, ok = rlib.StringToInt64(bar.Record.DebitLID.ID) // DebitLID has drop list
// if !ok {
// e := fmt.Errorf("%s: invalid DebitLID value: %s", funcname, bar.Record.DebitLID.ID)
// SvcGridErrorReturn(w, e, funcname)
// return
// }
// a.ARType, ok = rlib.StringToInt64(bar.Record.ARType.ID) // ArType has drop list
// if !ok {
// e := fmt.Errorf("%s: Invalid ARType value: %s", funcname, bar.Record.ARType.ID)
// SvcGridErrorReturn(w, e, funcname)
// return
// }
// fmt.Printf("saveAR - second migrate: a = %#v\n", a)
// // get PriorToRAStart and PriorToRAStop values and accordingly get RARequired field value
// formBoolMap := [2]bool{foo.Record.PriorToRAStart, foo.Record.PriorToRAStop}
// for raReq, boolMap := range raRequiredMap {
// if boolMap == formBoolMap {
// a.RARequired = int64(raReq)
// break
// }
// }
// // save or update
// if a.ARID == 0 && d.ARID == 0 {
// // This is a new AR
// fmt.Printf(">>>> NEW RECEIPT IS BEING ADDED\n")
// _, err = rlib.InsertAR(&a)
// | GetAccountBalance | identifier_name | |
ledger.go | rlib.GetAccountActivity(bid, lid, &lm.Dt, dt)
return bal, lm
}
// LMStates is an array of strings describing the meaning of the states a Ledger Marker can have.
var LMStates = []string{
"open", "closed", "locked", "initial",
}
// getLedgerGrid returns a list of ARs for w2ui grid
// wsdoc {
// @Title list ARs
// @URL /v1/ars/:BUI
// @Method GET
// @Synopsis Get Account Rules
// @Description Get all ARs associated with BID
// @Desc By default, the search is made for receipts from "today" to 31 days prior.
// @Input WebGridSearchRequest
// @Response SearchLedgersResponse
// wsdoc }
func getLedgerGrid(w http.ResponseWriter, r *http.Request, d *ServiceData) {
funcname := "getLedgerGrid"
var (
err error
g SearchLedgersResponse
)
rows, err := rlib.RRdb.Prepstmt.GetLedgersForGrid.Query(d.BID, d.wsSearchReq.Limit, d.wsSearchReq.Offset)
if err != nil {
fmt.Printf("%s: Error from DB Query: %s\n", funcname, err.Error())
SvcGridErrorReturn(w, err, funcname)
return
}
defer rows.Close()
dt := time.Time(d.wsSearchReq.SearchDtStart)
i := int64(d.wsSearchReq.Offset)
for rows.Next() {
var acct rlib.GLAccount
rlib.ReadGLAccounts(rows, &acct)
active := "active"
if 1 == acct.Status {
active = "inactive"
}
posts := "yes"
if acct.AllowPost == 0 {
posts = "no"
}
bal, lm := GetAccountBalance(acct.BID, acct.LID, &dt)
state := "??"
j := int(lm.State)
if 0 <= j && j <= 3 {
state = LMStates[j]
}
var lg = LedgerGrid{
Recid: i,
LID: acct.LID,
GLNumber: acct.GLNumber,
Name: acct.Name,
Active: active,
AllowPost: posts,
Balance: bal,
LMDate: lm.Dt.In(rlib.RRdb.Zone).Format("Jan _2, 2006 15:04:05 MST"),
LMAmount: lm.Balance,
LMState: state,
}
g.Records = append(g.Records, lg)
i++
}
// error check
err = rows.Err()
if err != nil |
g.Status = "success"
g.Total = int64(len(g.Records))
w.Header().Set("Content-Type", "application/json")
SvcWriteResponse(&g, w)
}
// // SvcFormHandlerAR formats a complete data record for a person suitable for use with the w2ui Form
// // For this call, we expect the URI to contain the BID and the ARID as follows:
// // 0 1 2 3
// // uri /v1/receipt/BUI/ARID
// // The server command can be:
// // get
// // save
// // delete
// //-----------------------------------------------------------------------------------
// func SvcFormHandlerAR(w http.ResponseWriter, r *http.Request, d *ServiceData) {
// var (
// funcname = "SvcFormHandlerAR"
// err error
// )
// fmt.Printf("Entered %s\n", funcname)
// if d.ARID, err = SvcExtractIDFromURI(r.RequestURI, "ARID", 3, w); err != nil {
// SvcGridErrorReturn(w, err, funcname)
// return
// }
// fmt.Printf("Request: %s: BID = %d, ID = %d\n", d.wsSearchReq.Cmd, d.BID, d.ARID)
// switch d.wsSearchReq.Cmd {
// case "get":
// getARForm(w, r, d)
// break
// case "save":
// saveARForm(w, r, d)
// break
// case "delete":
// deleteARForm(w, r, d)
// break
// default:
// err = fmt.Errorf("Unhandled command: %s", d.wsSearchReq.Cmd)
// SvcGridErrorReturn(w, err, funcname)
// return
// }
// }
// // saveARForm returns the requested receipt
// // wsdoc {
// // @Title Save AR
// // @URL /v1/ars/:BUI/:ARID
// // @Method GET
// // @Synopsis Save a AR
// // @Desc This service saves a AR. If :ARID exists, it will
// // @Desc be updated with the information supplied. All fields must
// // @Desc be supplied. If ARID is 0, then a new receipt is created.
// // @Input SaveARInput
// // @Response SvcStatusResponse
// // wsdoc }
// func saveARForm(w http.ResponseWriter, r *http.Request, d *ServiceData) {
// var (
// funcname = "saveARForm"
// foo SaveARInput
// bar SaveAROther
// a rlib.AR
// err error
// )
// fmt.Printf("Entered %s\n", funcname)
// fmt.Printf("record data = %s\n", d.data)
// // get data
// data := []byte(d.data)
// if err := json.Unmarshal(data, &foo); err != nil {
// SvcGridErrorReturn(w, err, funcname)
// return
// }
// if err := json.Unmarshal(data, &bar); err != nil {
// SvcGridErrorReturn(w, err, funcname)
// return
// }
// // migrate foo.Record data to a struct's fields
// rlib.MigrateStructVals(&foo.Record, &a) // the variables that don't need special handling
// fmt.Printf("saveAR - first migrate: a = %#v\n", a)
// var ok bool
// a.BID, ok = rlib.RRdb.BUDlist[bar.Record.BID.ID]
// if !ok {
// e := fmt.Errorf("%s: Could not map BID value: %s", funcname, bar.Record.BID.ID)
// SvcGridErrorReturn(w, e, funcname)
// return
// }
// a.CreditLID, ok = rlib.StringToInt64(bar.Record.CreditLID.ID) // CreditLID has drop list
// if !ok {
// e := fmt.Errorf("%s: invalid CreditLID value: %s", funcname, bar.Record.CreditLID.ID)
// SvcGridErrorReturn(w, e, funcname)
// return
// }
// a.DebitLID, ok = rlib.StringToInt64(bar.Record.DebitLID.ID) // DebitLID has drop list
// if !ok {
// e := fmt.Errorf("%s: invalid DebitLID value: %s", funcname, bar.Record.DebitLID.ID)
// SvcGridErrorReturn(w, e, funcname)
// return
// }
// a.ARType, ok = rlib.StringToInt64(bar.Record.ARType.ID) // ArType has drop list
// if !ok {
// e := fmt.Errorf("%s: Invalid ARType value: %s", funcname, bar.Record.ARType.ID)
// SvcGridErrorReturn(w, e, funcname)
// return
// }
// fmt.Printf("saveAR - second migrate: a = %#v\n", a)
// // get PriorToRAStart and PriorToRAStop values and accordingly get RARequired field value
// formBoolMap := [2]bool{foo.Record.PriorToRAStart, foo.Record.PriorToRAStop}
// for raReq, boolMap := range raRequiredMap {
// if boolMap == formBoolMap {
// a.RARequired = int64(raReq)
// break
// }
// }
// // save or update
// if a.ARID == 0 && d.ARID == 0 {
// // This is a new AR
// fmt.Printf(">>>> NEW RECEIPT IS BEING ADDED\n")
// _, err = rlib.InsertAR(&a)
// } else {
// // update existing record
// fmt.Printf("Updating existing AR: %d\n", a.ARID)
// err = rlib.UpdateAR(&a)
// }
// if err != nil {
// e := fmt.Errorf("Error saving receipt (ARID=%d\n: %s | {
SvcGridErrorReturn(w, err, funcname)
return
} | conditional_block |
client.go | to start command: %s\n", err)
}
}
func (wp *wsPty) Stop() {
wp.Pty.Close()
wp.Cmd.Wait()
}
var cmdFlag string
var messageData interface{}
func init() {
flag.StringVar(&cmdFlag, "cmd", "/bin/bash", "command to execute on slave side of the pty")
}
func main() {
wp := wsPty{}
wp.Start()
var conHd = make(map[string]*websocket.Conn)
fmt.Println(RsaEncrypt([]byte("aiyouwei")))
var Header http.Header = map[string][]string{
"moja": {"ccccc, asdasdasdasd"},
"terminal": {"en-esadasdasdwrw"},
"success": {"dasdadas", "wdsadaderew"},
"ticket": {RsaEncrypt([]byte("aiyouwei"))},
}
s, err := Socket("ws://127.0.0.1:3000")
if err != nil {
panic(err)
}
//s.Connect(Header)
//建立主连接
if atomic.CompareAndSwapUint32(&s.state, stateOpen, stateConnecting) {
conn, c, err := s.transprot.Dial(s.url.String(), Header)
connection = c
if err != nil {
s.emit(EventError, err)
go s.reconnect(stateConnecting, Header)
return
}
if atomic.CompareAndSwapUint32(&s.state, stateConnecting, stateReady) {
go s.start(conn, Header)
s.emit(EventConnect)
} else {
conn.Close()
}
}
//建立子连接
go func() {
for {
//每次轮训需要判断连接句柄是否存在
//s, _ := ParseString(messageData)
//fmt.Println("bbbbbbbbbbbbbbbbbbbbbbbbbbbbb")
// in := []byte(s)
// var raw = make(map[string]interface{})
// json.Unmarshal(in, &raw)
// fmt.Println(raw["subconn"])
if messageData == "subconn" {
sub, err := Socket("ws://127.0.0.1:3000?a=sub")
if err != nil {
panic(err)
}
if atomic.CompareAndSwapUint32(&sub.state, stateOpen, stateConnecting) {
subConn, c, err := sub.transprot.Dial(sub.url.String(), Header)
conHd["1"] = c
if err != nil {
sub.emit(EventError, err)
go sub.reconnect(stateConnecting, Header)
return
}
if atomic.CompareAndSwapUint32(&sub.state, stateConnecting, stateReady) {
go sub.start(subConn, Header)
sub.emit(EventConnect)
} else {
subConn.Close()
}
}
| enResult, _ := ParseString(args[0])
messageData = DecryptWithAES("asdasdasdasdasd", enResult)
//fmt.Println(cmd)
//wp.Pty.Write([]byte(cmd))
})
} else if messageData == "cmd" {
fmt.Println("wqeqweqwqw")
} else {
// fmt.Println("qweqwerrrrtytyyyqwwetrtyutuiop")
// decodeBytes, err := base64.StdEncoding.DecodeString(s)
// if err != nil {
// log.Fatalln(err)
// }
// fmt.Println(string(decodeBytes))
}
}
}()
input := []byte("testtttt")
// 演示base64编码
encodeString := base64.StdEncoding.EncodeToString(input)
s.Emit("messgae", encodeString)
//主连接接收消息类型
s.On("message", func(args ...interface{}) {
enResult, _ := ParseString(args[0])
messageData = DecryptWithAES("asdasdasdasdasd", enResult)
//fmt.Println(cmd)
//wp.Pty.Write([]byte(cmd))
})
go func() {
resBuf := make([]byte, 1024)
for {
fmt.Println(string(resBuf))
n, err := wp.Pty.Read(resBuf)
if err != nil {
log.Printf("Failed to read from pty master: %s", err)
return
}
out := make([]byte, base64.StdEncoding.EncodedLen(n))
base64.StdEncoding.Encode(out, resBuf[0:n])
s.Emit("result", string(resBuf[0:n]))
}
}()
for {
}
}
func (e *emitter) On(event string, listener Listener) {
e.m.Lock()
defer e.m.Unlock()
listeners, ok := e.listeners[event]
if ok {
listeners = append(listeners, listener)
} else {
listeners = []Listener{listener}
}
e.listeners[event] = listeners
}
func (e *emitter) emit(event string, args ...interface{}) bool {
e.m.RLock()
listeners, ok := e.listeners[event]
if ok {
for _, listener := range listeners {
listener(args...)
}
}
e.m.RUnlock()
return ok
}
func Socket(urlstring string) (*socketClient, error) {
u, err := url.Parse(urlstring)
if err != nil {
return nil, err
}
u.Path = "/socket.io/"
q := u.Query()
q.Add("EIO", "3")
q.Add("transport", "websocket")
u.RawQuery = q.Encode()
return &socketClient{
emitter: emitter{listeners: make(map[string][]Listener)},
url: u,
option: defaultOption,
transprot: protocol.NewWebSocketTransport(),
outChan: make(chan *protocol.Packet, 64),
closeChan: make(chan bool),
}, nil
}
func (s *socketClient) Connect(requestHeader http.Header) {
if atomic.CompareAndSwapUint32(&s.state, stateOpen, stateConnecting) {
conn, c, err := s.transprot.Dial(s.url.String(), requestHeader)
connection = c
if err != nil {
s.emit(EventError, err)
go s.reconnect(stateConnecting, requestHeader)
return
}
if atomic.CompareAndSwapUint32(&s.state, stateConnecting, stateReady) {
go s.start(conn, requestHeader)
s.emit(EventConnect)
} else {
conn.Close()
}
}
}
func (s *socketClient) Disconnect() {
atomic.StoreUint32(&s.state, stateClose)
close(s.outChan)
close(s.closeChan)
}
func (s *socketClient) Emit(event string, args ...interface{}) {
if atomic.LoadUint32(&s.state) == stateReady && !s.emit(event, args) {
m := &protocol.Message{
Type: protocol.MessageTypeEvent,
Namespace: "/",
ID: -1,
Event: event,
Payloads: args,
}
p, err := m.Encode()
if err != nil {
s.emit(EventError, err)
} else {
s.outChan <- p
}
}
}
func (s *socketClient) reconnect(state uint32, requestHeader http.Header) {
time.Sleep(time.Second)
if atomic.CompareAndSwapUint32(&s.state, state, stateReconnecting) {
conn, c, err := s.transprot.Dial(s.url.String(), requestHeader)
connection = c
if err != nil {
s.emit(EventError, err)
go s.reconnect(stateReconnecting, requestHeader)
return
}
if atomic.CompareAndSwapUint32(&s.state, stateReconnecting, stateReady) {
go s.start(conn, requestHeader)
s.emit(EventReconnect)
} else {
conn.Close()
}
}
}
func (s *socketClient) start(conn protocol.Conn, requestHeader http.Header) {
stopper := make(chan bool)
go s.startRead(conn, stopper)
go s.startWrite(conn, stopper)
select {
case <-stopper:
go s.reconnect(stateReady, requestHeader)
conn.Close()
case <-s.closeChan:
conn.Close()
}
}
func (s *socketClient) startRead(conn protocol.Conn, stopper chan bool) {
defer func() {
recover()
}()
for atomic.LoadUint32(&s.state) == stateReady {
p, err := conn.Read()
if err != nil {
s.emit(EventError, err)
close(stopper)
return
}
switch p.Type {
case protocol.PacketTypeOpen:
h, err := p.DecodeHandshake()
if err != nil {
s.emit(EventError, err)
} else {
go s.startPing(h, stopper)
}
case protocol.PacketTypePing:
s.outChan <- protocol.NewPongPacket()
case protocol.PacketTypeMessage:
m, err := p.DecodeMessage()
if err != nil {
s.emit(EventError, err)
} else {
s.emit(m.Event, m.Payloads...)
}
| fmt.Println("pppppppppppppppppppppppppppp")
fmt.Println(conHd["1"])
sub.On("message", func(args ...interface{}) { | random_line_split |
client.go | to start command: %s\n", err)
}
}
func (wp *wsPty) Stop() {
wp.Pty.Close()
wp.Cmd.Wait()
}
var cmdFlag string
var messageData interface{}
func init() {
flag.StringVar(&cmdFlag, "cmd", "/bin/bash", "command to execute on slave side of the pty")
}
func main() { | p := wsPty{}
wp.Start()
var conHd = make(map[string]*websocket.Conn)
fmt.Println(RsaEncrypt([]byte("aiyouwei")))
var Header http.Header = map[string][]string{
"moja": {"ccccc, asdasdasdasd"},
"terminal": {"en-esadasdasdwrw"},
"success": {"dasdadas", "wdsadaderew"},
"ticket": {RsaEncrypt([]byte("aiyouwei"))},
}
s, err := Socket("ws://127.0.0.1:3000")
if err != nil {
panic(err)
}
//s.Connect(Header)
//建立主连接
if atomic.CompareAndSwapUint32(&s.state, stateOpen, stateConnecting) {
conn, c, err := s.transprot.Dial(s.url.String(), Header)
connection = c
if err != nil {
s.emit(EventError, err)
go s.reconnect(stateConnecting, Header)
return
}
if atomic.CompareAndSwapUint32(&s.state, stateConnecting, stateReady) {
go s.start(conn, Header)
s.emit(EventConnect)
} else {
conn.Close()
}
}
//建立子连接
go func() {
for {
//每次轮训需要判断连接句柄是否存在
//s, _ := ParseString(messageData)
//fmt.Println("bbbbbbbbbbbbbbbbbbbbbbbbbbbbb")
// in := []byte(s)
// var raw = make(map[string]interface{})
// json.Unmarshal(in, &raw)
// fmt.Println(raw["subconn"])
if messageData == "subconn" {
sub, err := Socket("ws://127.0.0.1:3000?a=sub")
if err != nil {
panic(err)
}
if atomic.CompareAndSwapUint32(&sub.state, stateOpen, stateConnecting) {
subConn, c, err := sub.transprot.Dial(sub.url.String(), Header)
conHd["1"] = c
if err != nil {
sub.emit(EventError, err)
go sub.reconnect(stateConnecting, Header)
return
}
if atomic.CompareAndSwapUint32(&sub.state, stateConnecting, stateReady) {
go sub.start(subConn, Header)
sub.emit(EventConnect)
} else {
subConn.Close()
}
}
fmt.Println("pppppppppppppppppppppppppppp")
fmt.Println(conHd["1"])
sub.On("message", func(args ...interface{}) {
enResult, _ := ParseString(args[0])
messageData = DecryptWithAES("asdasdasdasdasd", enResult)
//fmt.Println(cmd)
//wp.Pty.Write([]byte(cmd))
})
} else if messageData == "cmd" {
fmt.Println("wqeqweqwqw")
} else {
// fmt.Println("qweqwerrrrtytyyyqwwetrtyutuiop")
// decodeBytes, err := base64.StdEncoding.DecodeString(s)
// if err != nil {
// log.Fatalln(err)
// }
// fmt.Println(string(decodeBytes))
}
}
}()
input := []byte("testtttt")
// 演示base64编码
encodeString := base64.StdEncoding.EncodeToString(input)
s.Emit("messgae", encodeString)
//主连接接收消息类型
s.On("message", func(args ...interface{}) {
enResult, _ := ParseString(args[0])
messageData = DecryptWithAES("asdasdasdasdasd", enResult)
//fmt.Println(cmd)
//wp.Pty.Write([]byte(cmd))
})
go func() {
resBuf := make([]byte, 1024)
for {
fmt.Println(string(resBuf))
n, err := wp.Pty.Read(resBuf)
if err != nil {
log.Printf("Failed to read from pty master: %s", err)
return
}
out := make([]byte, base64.StdEncoding.EncodedLen(n))
base64.StdEncoding.Encode(out, resBuf[0:n])
s.Emit("result", string(resBuf[0:n]))
}
}()
for {
}
}
func (e *emitter) On(event string, listener Listener) {
e.m.Lock()
defer e.m.Unlock()
listeners, ok := e.listeners[event]
if ok {
listeners = append(listeners, listener)
} else {
listeners = []Listener{listener}
}
e.listeners[event] = listeners
}
func (e *emitter) emit(event string, args ...interface{}) bool {
e.m.RLock()
listeners, ok := e.listeners[event]
if ok {
for _, listener := range listeners {
listener(args...)
}
}
e.m.RUnlock()
return ok
}
func Socket(urlstring string) (*socketClient, error) {
u, err := url.Parse(urlstring)
if err != nil {
return nil, err
}
u.Path = "/socket.io/"
q := u.Query()
q.Add("EIO", "3")
q.Add("transport", "websocket")
u.RawQuery = q.Encode()
return &socketClient{
emitter: emitter{listeners: make(map[string][]Listener)},
url: u,
option: defaultOption,
transprot: protocol.NewWebSocketTransport(),
outChan: make(chan *protocol.Packet, 64),
closeChan: make(chan bool),
}, nil
}
func (s *socketClient) Connect(requestHeader http.Header) {
if atomic.CompareAndSwapUint32(&s.state, stateOpen, stateConnecting) {
conn, c, err := s.transprot.Dial(s.url.String(), requestHeader)
connection = c
if err != nil {
s.emit(EventError, err)
go s.reconnect(stateConnecting, requestHeader)
return
}
if atomic.CompareAndSwapUint32(&s.state, stateConnecting, stateReady) {
go s.start(conn, requestHeader)
s.emit(EventConnect)
} else {
conn.Close()
}
}
}
func (s *socketClient) Disconnect() {
atomic.StoreUint32(&s.state, stateClose)
close(s.outChan)
close(s.closeChan)
}
func (s *socketClient) Emit(event string, args ...interface{}) {
if atomic.LoadUint32(&s.state) == stateReady && !s.emit(event, args) {
m := &protocol.Message{
Type: protocol.MessageTypeEvent,
Namespace: "/",
ID: -1,
Event: event,
Payloads: args,
}
p, err := m.Encode()
if err != nil {
s.emit(EventError, err)
} else {
s.outChan <- p
}
}
}
func (s *socketClient) reconnect(state uint32, requestHeader http.Header) {
time.Sleep(time.Second)
if atomic.CompareAndSwapUint32(&s.state, state, stateReconnecting) {
conn, c, err := s.transprot.Dial(s.url.String(), requestHeader)
connection = c
if err != nil {
s.emit(EventError, err)
go s.reconnect(stateReconnecting, requestHeader)
return
}
if atomic.CompareAndSwapUint32(&s.state, stateReconnecting, stateReady) {
go s.start(conn, requestHeader)
s.emit(EventReconnect)
} else {
conn.Close()
}
}
}
func (s *socketClient) start(conn protocol.Conn, requestHeader http.Header) {
stopper := make(chan bool)
go s.startRead(conn, stopper)
go s.startWrite(conn, stopper)
select {
case <-stopper:
go s.reconnect(stateReady, requestHeader)
conn.Close()
case <-s.closeChan:
conn.Close()
}
}
func (s *socketClient) startRead(conn protocol.Conn, stopper chan bool) {
defer func() {
recover()
}()
for atomic.LoadUint32(&s.state) == stateReady {
p, err := conn.Read()
if err != nil {
s.emit(EventError, err)
close(stopper)
return
}
switch p.Type {
case protocol.PacketTypeOpen:
h, err := p.DecodeHandshake()
if err != nil {
s.emit(EventError, err)
} else {
go s.startPing(h, stopper)
}
case protocol.PacketTypePing:
s.outChan <- protocol.NewPongPacket()
case protocol.PacketTypeMessage:
m, err := p.DecodeMessage()
if err != nil {
s.emit(EventError, err)
} else {
s.emit(m.Event, m.Payloads...)
|
w | identifier_name |
client.go | start command: %s\n", err)
}
}
func (wp *wsPty) Stop() {
wp.Pty.Close()
wp.Cmd.Wait()
}
var cmdFlag string
var messageData interface{}
func init() {
flag.StringVar(&cmdFlag, "cmd", "/bin/bash", "command to execute on slave side of the pty")
}
func main() {
wp := wsPty{}
wp.Start()
var conHd = make(map[string]*websocket.Conn)
fmt.Println(RsaEncrypt([]byte("aiyouwei")))
var Header http.Header = map[string][]string{
"moja": {"ccccc, asdasdasdasd"},
"terminal": {"en-esadasdasdwrw"},
"success": {"dasdadas", "wdsadaderew"},
"ticket": {RsaEncrypt([]byte("aiyouwei"))},
}
s, err := Socket("ws://127.0.0.1:3000")
if err != nil {
panic(err)
}
//s.Connect(Header)
//建立主连接
if atomic.CompareAndSwapUint32(&s.state, stateOpen, stateConnecting) {
conn, c, err := s.transprot.Dial(s.url.String(), Header)
connection = c
if err != nil {
s.emit(EventError, err)
go s.reconnect(stateConnecting, Header)
return
}
if atomic.CompareAndSwapUint32(&s.state, stateConnecting, stateReady) {
go s.start(conn, Header)
s.emit(EventConnect)
} else {
conn.Close()
}
}
//建立子连接
go func() {
for {
//每次轮训需要判断连接句柄是否存在
//s, _ := ParseString(messageData)
//fmt.Println("bbbbbbbbbbbbbbbbbbbbbbbbbbbbb")
// in := []byte(s)
// var raw = make(map[string]interface{})
// json.Unmarshal(in, &raw)
// fmt.Println(raw["subconn"])
if messageData == "subconn" {
sub, err := Socket("ws://127.0.0.1:3000?a=sub")
if err != nil {
panic(err)
}
if atomic.CompareAndSwapUint32(&sub.state, stateOpen, stateConnecting) {
subConn, c, err := sub.transprot.Dial(sub.url.String(), Header)
conHd["1"] = c
if err != nil {
sub.emit(EventError, err)
go sub.reconnect(stateConnecting, Header)
return
}
if atomic.CompareAndSwapUint32(&sub.state, stateConnecting, stateReady) {
go sub.start(subConn, Header)
sub.emit(EventConnect)
} else {
subConn.Close()
}
}
fmt.Println("pppppppppppppppppppppppppppp")
fmt.Println(conHd["1"])
sub.On("message", func(args ...interface{}) {
enResult, _ := ParseString(args[0])
messageData = DecryptWithAES("asdasdasdasdasd", enResult)
//fmt.Println(cmd)
//wp.Pty.Write([]byte(cmd))
})
} else if messageData == "cmd" {
fmt.Println("wqeqweqwqw")
} else {
// fmt.Println("qweqwerrrrtytyyyqwwetrtyutuiop")
// decodeBytes, err := base64.StdEncoding.DecodeString(s)
// if err != nil {
// log.Fatalln(err)
// }
// fmt.Println(string(decodeBytes))
}
}
}()
input := []byte("testtttt")
// 演示base64编码
encodeString := base64.StdEncoding.EncodeToString(input)
s.Emit("messgae", encodeString)
//主连接接收消息类型
s.On("message", func(args ...interface{}) {
enResult, _ := ParseString(args[0])
messageData = DecryptWithAES("asdasdasdasdasd", enResult)
//fmt.Println(cmd)
//wp.Pty.Write([]byte(cmd))
})
go func() {
resBuf := make([]byte, 1024)
for {
fmt.Println(string(resBuf))
n, err := wp.Pty.Read(resBuf)
if err != nil {
log.Printf("Failed to read from pty master: %s", err)
return
}
out := make([]byte, base64.StdEncoding.EncodedLen(n))
base64.StdEncoding.Encode(out, resBuf[0:n])
s.Emit("result", string(resBuf[0:n]))
}
}()
for {
}
}
func (e *emitter) On(event string, listener Listener) {
e.m.Lock()
defer e.m.Unlock()
listeners, ok := e.listeners[event]
if ok {
listeners = append(listeners, listener)
} else {
listeners = []Listener{listener}
}
e.listeners[event] = listeners
}
func (e *emitter) emit(event string, args ...interface{}) bool {
e.m.RLock()
listeners, ok := e.listeners[event]
if ok {
for _, listener := range listeners {
listener(args...)
}
}
e.m.RUnlock()
return ok
}
func Socket(urlstring string) (*socketClient, error) {
u, err := url.Parse(urlstring)
if err != nil {
return nil, err
}
u.Path = "/socket.io/"
q := u.Query()
q.Add("EIO", "3")
q.Add("transport", "websocket")
u.RawQuery = q.Encode()
return &socketClient{
emitter: emitter{listeners: make(map[string][]Listener)},
url: u,
option: defaultOption,
transprot: protocol.NewWebSocketTransport(),
outChan: make(chan *protocol.Packet, 64),
closeChan: make(chan bool),
}, nil
}
func (s *socketClient) Connect(requestHeader http.Header) {
if atomic.CompareAndSwapUint32(&s.state, stateOpen, stateConnecting) {
conn, c, err := s.transprot.Dial(s.url.String(), requestHeader)
connection = c
if err != nil {
s.emit(EventError, err)
go s.reconnect(stateConnecting, requestHeader)
return
}
if atomic.CompareAndSwapUint32(&s.state, stateConnecting, stateReady) {
go s.start(conn, requestHeader)
s.emit(EventConnect)
} else {
conn.Close()
}
}
}
func (s *socketClient) Disconnect() {
atomic.StoreUint32(&s.state, stateClose)
close(s.outChan)
close(s.closeChan)
}
func (s *socketClient) Emit(event string, args ...interface{}) {
if atomic.LoadUint32(&s.state) == stateReady && !s.emit(event, args) {
m := &protocol.Message{
Type: protocol.MessageTypeEvent,
Namespace: "/",
ID: -1,
Event: event,
Payloads: args,
}
p, err := m.Encode()
if err != nil {
s.emit(EventError, err)
} else {
s.outChan <- p
}
}
}
func (s *socketClient) reconnect(state uint32, requestHeader http.Header) {
time.Sleep(time.Second)
if atomic.CompareAndSwapUint32(&s.state, state, stateReconnecting) {
conn, c, err := s.transprot.Dial(s.url.String(), requestHeader)
connection = c
if err != nil {
s.emit(EventError, err)
go s.reconnect(stateReconnecting, requestHeader)
return
}
if atomic.CompareAndSwapUint32(&s.state, stateReconnecting, stateReady) {
go s.start(conn, requestHeader)
s.emit(EventReconnect)
} else {
conn.Clos | uestHeader http.Header) {
stopper := make(chan bool)
go s.startRead(conn, stopper)
go s.startWrite(conn, stopper)
select {
case <-stopper:
go s.reconnect(stateReady, requestHeader)
conn.Close()
case <-s.closeChan:
conn.Close()
}
}
func (s *socketClient) startRead(conn protocol.Conn, stopper chan bool) {
defer func() {
recover()
}()
for atomic.LoadUint32(&s.state) == stateReady {
p, err := conn.Read()
if err != nil {
s.emit(EventError, err)
close(stopper)
return
}
switch p.Type {
case protocol.PacketTypeOpen:
h, err := p.DecodeHandshake()
if err != nil {
s.emit(EventError, err)
} else {
go s.startPing(h, stopper)
}
case protocol.PacketTypePing:
s.outChan <- protocol.NewPongPacket()
case protocol.PacketTypeMessage:
m, err := p.DecodeMessage()
if err != nil {
s.emit(EventError, err)
} else {
s.emit(m.Event, m.Payloads...)
| e()
}
}
}
func (s *socketClient) start(conn protocol.Conn, req | conditional_block |
client.go | to start command: %s\n", err)
}
}
func (wp *wsPty) Stop() {
wp.Pty.Close()
wp.Cmd.Wait()
}
var cmdFlag string
var messageData interface{}
func init() {
flag.StringVar(&cmdFlag, "cmd", "/bin/bash", "command to execute on slave side of the pty")
}
func main() {
wp := wsPty{}
wp.Start()
var conHd = make(map[string]*websocket.Conn)
fmt.Println(RsaEncrypt([]byte("aiyouwei")))
var Header http.Header = map[string][]string{
"moja": {"ccccc, asdasdasdasd"},
"terminal": {"en-esadasdasdwrw"},
"success": {"dasdadas", "wdsadaderew"},
"ticket": {RsaEncrypt([]byte("aiyouwei"))},
}
s, err := Socket("ws://127.0.0.1:3000")
if err != nil {
panic(err)
}
//s.Connect(Header)
//建立主连接
if atomic.CompareAndSwapUint32(&s.state, stateOpen, stateConnecting) {
conn, c, err := s.transprot.Dial(s.url.String(), Header)
connection = c
if err != nil {
s.emit(EventError, err)
go s.reconnect(stateConnecting, Header)
return
}
if atomic.CompareAndSwapUint32(&s.state, stateConnecting, stateReady) {
go s.start(conn, Header)
s.emit(EventConnect)
} else {
conn.Close()
}
}
//建立子连接
go func() {
for {
//每次轮训需要判断连接句柄是否存在
//s, _ := ParseString(messageData)
//fmt.Println("bbbbbbbbbbbbbbbbbbbbbbbbbbbbb")
// in := []byte(s)
// var raw = make(map[string]interface{})
// json.Unmarshal(in, &raw)
// fmt.Println(raw["subconn"])
if messageData == "subconn" {
sub, err := Socket("ws://127.0.0.1:3000?a=sub")
if err != nil {
panic(err)
}
if atomic.CompareAndSwapUint32(&sub.state, stateOpen, stateConnecting) {
subConn, c, err := sub.transprot.Dial(sub.url.String(), Header)
conHd["1"] = c
if err != nil {
sub.emit(EventError, err)
go sub.reconnect(stateConnecting, Header)
return
}
if atomic.CompareAndSwapUint32(&sub.state, stateConnecting, stateReady) {
go sub.start(subConn, Header)
sub.emit(EventConnect)
} else {
subConn.Close()
}
}
fmt.Println("pppppppppppppppppppppppppppp")
fmt.Println(conHd["1"])
sub.On("message", func(args ...interface{}) {
enResult, _ := ParseString(args[0])
messageData = DecryptWithAES("asdasdasdasdasd", enResult)
//fmt.Println(cmd)
//wp.Pty.Write([]byte(cmd))
})
} else if messageData == "cmd" {
fmt.Println("wqeqweqwqw")
} else {
// fmt.Println("qweqwerrrrtytyyyqwwetrtyutuiop")
// decodeBytes, err := base64.StdEncoding.DecodeString(s)
// if err != nil {
// log.Fatalln(err)
// }
// fmt.Println(string(decodeBytes))
}
}
}()
input := []byte("testtttt")
// 演示base64编码
encodeString := base64.StdEncoding.EncodeToString(input)
s.Emit("messgae", encodeString)
//主连接接收消息类型
s.On("message", func(args ...interface{}) {
enResult, _ := ParseString(args[0])
messageData = DecryptWithAES("asdasdasdasdasd", enResult)
//fmt.Println(cmd)
//wp.Pty.Write([]byte(cmd))
})
go func() {
resBuf := make([]byte, 1024)
for {
fmt.Println(string(resBuf))
n, err := wp.Pty.Read(resBuf)
if err != nil {
log.Printf("Failed to read from pty master: %s", err)
return
}
out := make([]byte, base64.StdEncoding.EncodedLen(n))
base64.StdEncoding.Encode(out, resBuf[0:n])
s.Emit("result", string(resBuf[0:n]))
}
}()
for {
}
}
func (e *emitter) On(event string, listener Listener) {
e.m.Lock()
defer e.m.Unlock()
listeners, ok := e.listeners[event]
if ok {
listeners = append(listeners, listener)
} else {
listeners = []Listener{listener}
}
e.listeners[event] = listeners
}
func (e *emitter) emit(event string, args ...interface{}) bool {
e.m.RLock()
listeners, ok := e.listeners[event]
if ok {
for _, listener := range listeners {
listener(args...)
}
}
e.m.RUnlock()
return ok
}
func Socket(urlstring string) (*socketClient, error) {
u, err := url.Parse(urlstring)
if err != nil {
return nil, err
}
u.Path = "/socket.io/"
q := u.Query()
q.Add("EIO", "3")
q.Add("transport", "websocket")
u.RawQuery = q.Encode()
return &socketClient{
emitter: emitter{listeners: make(map[string][]Listener)},
url: u,
option: defaultOption,
transprot: protocol.NewWebSocketTransport(),
outChan: make(chan *protocol.Packet, 64),
closeChan: make(chan bool),
}, nil
}
func (s *socketClient) Connect(requestHeader http.Header) {
if atomic.CompareAndSwapUint32(&s.state, stateOpen, stateConnecting) {
conn, c, err := s.transprot.Dial(s.url.String(), requestHeader)
connection = c
if err != nil {
s.emit(EventError, err)
go s.reconnect(stateConnecting, requestHeader)
return
}
if atomic.CompareAndSwapUint32(&s.state, stateConnecting, stateReady) {
go s.start(conn, requestHeader)
s.emit(EventConnect)
} else {
conn.Close()
}
}
}
func (s *socketClient) Disconnect() {
atomic.StoreUint32(&s.state, stateClose)
close(s.outChan)
close(s.closeChan)
}
func (s *socketClient) Emit(event string, args ...interface{}) {
if atomic.LoadUint32(&s.state) == stateReady && !s.emit(event, args) {
m := &protocol.Message{
Type: protocol.MessageTypeEvent,
Namespace: "/",
ID: -1,
Event: event,
Payloads: args,
}
p, err := m.Encode()
if err != nil {
s.emit(EventError, err)
} else {
s.outChan <- p
}
}
}
func (s *socketClient) reconnect(state uint32, requestHeader http.Header) {
time.Sleep(time.Second)
if atomic.CompareAndSwapUint32(&s.state, state, stateReconnecting) {
conn, c, err := s.transprot.Dial(s.url.String(), requestHeader)
connection = c
if err != nil {
s.emit(EventError, err)
go s.reconnect(stateReconnecting, requestHeader)
return
}
if atomic.CompareAndSwapUint32(&s.state, stateReconnecting, stateReady) {
go s.start(conn, requestHeader)
s.emit(EventReconnect)
} else {
conn.Close()
}
}
}
func (s *socketClient) start(conn protocol.Conn, requestHeader http.Header) {
stopper := make(chan bool)
go s.startRead(conn, stopper)
go s.startWrite(conn, stopper)
select {
case <-stopper:
go s.reconnect(stateReady, requestHeader)
conn.Close()
case <-s.closeChan:
conn.Close()
}
}
func (s *socketClient) startRead(conn protocol.Conn, stopper chan bool) {
defer func() {
recover()
}()
for atomic.LoadUint32(&s.state) == stateReady {
| } else {
s.emit(m.Event, m.Payloads...)
}
| p, err := conn.Read()
if err != nil {
s.emit(EventError, err)
close(stopper)
return
}
switch p.Type {
case protocol.PacketTypeOpen:
h, err := p.DecodeHandshake()
if err != nil {
s.emit(EventError, err)
} else {
go s.startPing(h, stopper)
}
case protocol.PacketTypePing:
s.outChan <- protocol.NewPongPacket()
case protocol.PacketTypeMessage:
m, err := p.DecodeMessage()
if err != nil {
s.emit(EventError, err) | identifier_body |
contfilter.go | .Errorf("malformed edit distance tag: %s", edit_tag)
}
edit_dist, err := strconv.Atoi(edit_tag[5:])
if err != nil {
return 0, 0, fmt.Errorf("failed to parse edit dist: %s", edit_tag)
}
return match_len, edit_dist, nil
}
func OpenLogger() {
if args.LogFilename == "" {
logger = log.New(os.Stderr, "", 0)
} else {
logfile, err := os.Create(args.LogFilename)
if err != nil {
log.Fatal(err)
}
logger = log.New(logfile, "", 0)
}
}
func LogArguments() {
logger.Println("command:", strings.Join(os.Args, " "))
blob, err := json.MarshalIndent(args, "", " ")
if err != nil {
logger.Fatal("failed to marshal arguments")
}
logger.Println(string(blob))
}
func MatchesErcc(mate1, mate2 []string) bool {
return args.Ercc &&
(strings.Contains(mate1[2], "ERCC") || (mate2 != nil && strings.Contains(mate2[2], "ERCC")))
}
func main() {
var kept_percent float64
flag.Parse()
contamination := flag.Args()
startedAt := time.Now()
if len(contamination) == 0 {
logger.Println("must specify at least one contamination mapping BAM file")
os.Exit(1)
}
if args.Output == "" {
logger.Println("must specify -output file")
os.Exit(1)
}
OpenLogger()
LogArguments()
scanner := BamScanner{}
if args.Sample == "" {
scanner.OpenStdin()
} else {
if err := scanner.OpenBam(args.Sample); err != nil {
logger.Fatal(err)
}
}
reads_found := make([]int, len(contamination))
reads_filtered := make([]int, len(contamination))
contScanners := make([]BamScanner, len(contamination))
rejected := make([]bool, len(contamination))
found := make([]bool, len(contamination))
for c := 0; c < len(contamination); c++ {
if err := contScanners[c].OpenBam(contamination[c]); err != nil {
logger.Fatal(err)
}
reads_found[c] = 0
reads_filtered[c] = 0
}
header, err := ReadBamHeader(args.Sample)
if err != nil |
out := BamWriter{}
outfp, err := out.Open(args.Output)
if err != nil {
logger.Fatal(err)
}
io.WriteString(outfp, header)
reads_kept := 0
read_mates_kept := 0
total_reads := 0
total_read_mates := 0
ercc := 0
considered := 0
too_short := 0
too_diverged := 0
err = func() error {
defer scanner.Done()
defer benchmark(startedAt, "processing")
for {
if total_reads > 0 && total_reads%100000 == 0 {
kept_percent = float64(reads_kept) / float64(considered) * 100
logger.Printf("considered %d out of %d so far, kept %0.1f%%\n", considered, total_reads, kept_percent)
}
if args.Limit > 0 && args.Limit == total_reads {
return nil
}
// Set up flags for outcomes wrt each potential source of contamination.
for c, _ := range contamination {
rejected[c] = false
found[c] = false
}
// Read the first mate in a paired end run.
mate1, err := scanner.Record()
if err != nil {
return fmt.Errorf("failed to read from sample BAM: %v after %d lines", err, total_reads)
}
if scanner.Closed {
return nil
}
scanner.Ratchet()
read := mate1[0]
total_reads++
total_read_mates++
// See if we have the second mate of this pair.
mate2, err := scanner.Find(read)
if err != nil {
return fmt.Errorf("failed to read from sample BAM: %v after %d lines", err, total_reads)
}
if mate2 != nil {
scanner.Ratchet()
total_read_mates++
}
var mate1_len int
var mate1_edit_dist int
var mate2_len int
var mate2_edit_dist int
mate1_len, mate1_edit_dist, err = extract(mate1)
if err != nil {
return err
}
if args.Verbose {
logger.Println("found read", read, "mate 1:")
logger.Println(strings.Join(mate1, "\t"))
}
if mate2 != nil {
mate2_len, mate2_edit_dist, err = extract(mate2)
if err != nil {
return err
}
if args.Verbose {
logger.Println("found read", read, "mate 2:")
logger.Println(strings.Join(mate2, "\t"))
}
}
// Filter for ERCC if either mate is mapped to ERCC.
if MatchesErcc(mate1, mate2) {
ercc++
if args.Verbose {
logger.Println("ERCC, rejecting")
}
continue
}
if mate1_len < args.MinLength {
// If we don't have mate2 or if it's also too short, we mark this pair as too short.
if mate2 == nil || mate2_len < args.MinLength {
if args.Verbose {
logger.Println("too short, rejecting")
}
too_short++
continue
}
if args.Verbose {
logger.Println("promoting mate 2")
}
// Mate2 is okay, so we promote it to mate1, and forget mate2
mate1_len = mate2_len
mate1_edit_dist = mate2_edit_dist
mate1 = mate2
mate2 = nil
}
if mate2 != nil && mate2_len < args.MinLength {
// We have a mate2, but it doesn't meet the min length criteria, just forget it.
mate2 = nil
if args.Verbose {
logger.Println("mate 2 too short, forgetting")
}
}
// We treate the filter for edit distance the same way as length.
if mate1_edit_dist > args.MaxDist {
if mate2 == nil || mate2_edit_dist > args.MaxDist {
too_diverged++
if args.Verbose {
logger.Println("too divergent, rejecting")
}
continue
}
if args.Verbose {
logger.Println("promothing mate 2")
}
// Mate2 is okay, so we promote it to mate1, and forget mate2
mate1_len = mate2_len
mate1_edit_dist = mate2_edit_dist
mate1 = mate2
mate2 = nil
}
if mate2 != nil && mate2_edit_dist > args.MaxDist {
// We have a mate2, but it doesn't meet the max edit distance criteria, just forget it.
mate2 = nil
if args.Verbose {
logger.Println("mate 2, too diverged, forgetting")
}
}
// If we get this far it means the read met the preliminary filtering criteria.
considered++
// Compare agains the best score for the read pair.
mate1_score := float64(mate1_len) - float64(mate1_edit_dist)*args.Penalty
var mate2_score float64
best_score := mate1_score
best_len := mate1_len
best_edit_dist := mate1_edit_dist
if mate2 != nil {
mate2_score = float64(mate2_len) - float64(mate2_edit_dist)*args.Penalty
if mate2_score > mate1_score {
best_score = mate2_score
best_len = mate2_len
best_edit_dist = mate2_edit_dist
if args.Verbose {
logger.Printf("mate 2 has better score (%f) than mate 1 (%f)\n", mate2_score, mate1_score)
}
}
}
// Reads in the sample BAM will be rejected if either mate in any of the
// contamination BAM files maps better than in the sampel BAM file.
was_rejected := false
for c := 0; c < len(contamination); c++ {
m := 0
for {
mate, err := contScanners[c].Find(read)
if err != nil {
logger.Fatal(err)
}
if mate == nil {
// No more alignments for this read in this contamination mapping
break
}
m++
if args.Verbose {
logger.Printf("found mapping %d for %s in %s\n", m, mate[0], contamination[c])
logger.Println(strings.Join(mate, "\t"))
}
if | {
logger.Fatal(err)
} | conditional_block |
contfilter.go | .Errorf("malformed edit distance tag: %s", edit_tag)
}
edit_dist, err := strconv.Atoi(edit_tag[5:])
if err != nil {
return 0, 0, fmt.Errorf("failed to parse edit dist: %s", edit_tag)
}
return match_len, edit_dist, nil
}
func OpenLogger() {
if args.LogFilename == "" {
logger = log.New(os.Stderr, "", 0)
} else {
logfile, err := os.Create(args.LogFilename)
if err != nil {
log.Fatal(err)
}
logger = log.New(logfile, "", 0)
}
}
func LogArguments() {
logger.Println("command:", strings.Join(os.Args, " "))
blob, err := json.MarshalIndent(args, "", " ")
if err != nil {
logger.Fatal("failed to marshal arguments")
}
logger.Println(string(blob))
}
func MatchesErcc(mate1, mate2 []string) bool {
return args.Ercc &&
(strings.Contains(mate1[2], "ERCC") || (mate2 != nil && strings.Contains(mate2[2], "ERCC"))) |
func main() {
var kept_percent float64
flag.Parse()
contamination := flag.Args()
startedAt := time.Now()
if len(contamination) == 0 {
logger.Println("must specify at least one contamination mapping BAM file")
os.Exit(1)
}
if args.Output == "" {
logger.Println("must specify -output file")
os.Exit(1)
}
OpenLogger()
LogArguments()
scanner := BamScanner{}
if args.Sample == "" {
scanner.OpenStdin()
} else {
if err := scanner.OpenBam(args.Sample); err != nil {
logger.Fatal(err)
}
}
reads_found := make([]int, len(contamination))
reads_filtered := make([]int, len(contamination))
contScanners := make([]BamScanner, len(contamination))
rejected := make([]bool, len(contamination))
found := make([]bool, len(contamination))
for c := 0; c < len(contamination); c++ {
if err := contScanners[c].OpenBam(contamination[c]); err != nil {
logger.Fatal(err)
}
reads_found[c] = 0
reads_filtered[c] = 0
}
header, err := ReadBamHeader(args.Sample)
if err != nil {
logger.Fatal(err)
}
out := BamWriter{}
outfp, err := out.Open(args.Output)
if err != nil {
logger.Fatal(err)
}
io.WriteString(outfp, header)
reads_kept := 0
read_mates_kept := 0
total_reads := 0
total_read_mates := 0
ercc := 0
considered := 0
too_short := 0
too_diverged := 0
err = func() error {
defer scanner.Done()
defer benchmark(startedAt, "processing")
for {
if total_reads > 0 && total_reads%100000 == 0 {
kept_percent = float64(reads_kept) / float64(considered) * 100
logger.Printf("considered %d out of %d so far, kept %0.1f%%\n", considered, total_reads, kept_percent)
}
if args.Limit > 0 && args.Limit == total_reads {
return nil
}
// Set up flags for outcomes wrt each potential source of contamination.
for c, _ := range contamination {
rejected[c] = false
found[c] = false
}
// Read the first mate in a paired end run.
mate1, err := scanner.Record()
if err != nil {
return fmt.Errorf("failed to read from sample BAM: %v after %d lines", err, total_reads)
}
if scanner.Closed {
return nil
}
scanner.Ratchet()
read := mate1[0]
total_reads++
total_read_mates++
// See if we have the second mate of this pair.
mate2, err := scanner.Find(read)
if err != nil {
return fmt.Errorf("failed to read from sample BAM: %v after %d lines", err, total_reads)
}
if mate2 != nil {
scanner.Ratchet()
total_read_mates++
}
var mate1_len int
var mate1_edit_dist int
var mate2_len int
var mate2_edit_dist int
mate1_len, mate1_edit_dist, err = extract(mate1)
if err != nil {
return err
}
if args.Verbose {
logger.Println("found read", read, "mate 1:")
logger.Println(strings.Join(mate1, "\t"))
}
if mate2 != nil {
mate2_len, mate2_edit_dist, err = extract(mate2)
if err != nil {
return err
}
if args.Verbose {
logger.Println("found read", read, "mate 2:")
logger.Println(strings.Join(mate2, "\t"))
}
}
// Filter for ERCC if either mate is mapped to ERCC.
if MatchesErcc(mate1, mate2) {
ercc++
if args.Verbose {
logger.Println("ERCC, rejecting")
}
continue
}
if mate1_len < args.MinLength {
// If we don't have mate2 or if it's also too short, we mark this pair as too short.
if mate2 == nil || mate2_len < args.MinLength {
if args.Verbose {
logger.Println("too short, rejecting")
}
too_short++
continue
}
if args.Verbose {
logger.Println("promoting mate 2")
}
// Mate2 is okay, so we promote it to mate1, and forget mate2
mate1_len = mate2_len
mate1_edit_dist = mate2_edit_dist
mate1 = mate2
mate2 = nil
}
if mate2 != nil && mate2_len < args.MinLength {
// We have a mate2, but it doesn't meet the min length criteria, just forget it.
mate2 = nil
if args.Verbose {
logger.Println("mate 2 too short, forgetting")
}
}
// We treate the filter for edit distance the same way as length.
if mate1_edit_dist > args.MaxDist {
if mate2 == nil || mate2_edit_dist > args.MaxDist {
too_diverged++
if args.Verbose {
logger.Println("too divergent, rejecting")
}
continue
}
if args.Verbose {
logger.Println("promothing mate 2")
}
// Mate2 is okay, so we promote it to mate1, and forget mate2
mate1_len = mate2_len
mate1_edit_dist = mate2_edit_dist
mate1 = mate2
mate2 = nil
}
if mate2 != nil && mate2_edit_dist > args.MaxDist {
// We have a mate2, but it doesn't meet the max edit distance criteria, just forget it.
mate2 = nil
if args.Verbose {
logger.Println("mate 2, too diverged, forgetting")
}
}
// If we get this far it means the read met the preliminary filtering criteria.
considered++
// Compare agains the best score for the read pair.
mate1_score := float64(mate1_len) - float64(mate1_edit_dist)*args.Penalty
var mate2_score float64
best_score := mate1_score
best_len := mate1_len
best_edit_dist := mate1_edit_dist
if mate2 != nil {
mate2_score = float64(mate2_len) - float64(mate2_edit_dist)*args.Penalty
if mate2_score > mate1_score {
best_score = mate2_score
best_len = mate2_len
best_edit_dist = mate2_edit_dist
if args.Verbose {
logger.Printf("mate 2 has better score (%f) than mate 1 (%f)\n", mate2_score, mate1_score)
}
}
}
// Reads in the sample BAM will be rejected if either mate in any of the
// contamination BAM files maps better than in the sampel BAM file.
was_rejected := false
for c := 0; c < len(contamination); c++ {
m := 0
for {
mate, err := contScanners[c].Find(read)
if err != nil {
logger.Fatal(err)
}
if mate == nil {
// No more alignments for this read in this contamination mapping
break
}
m++
if args.Verbose {
logger.Printf("found mapping %d for %s in %s\n", m, mate[0], contamination[c])
logger.Println(strings.Join(mate, "\t"))
}
if ! | } | random_line_split |
contfilter.go | .Errorf("malformed edit distance tag: %s", edit_tag)
}
edit_dist, err := strconv.Atoi(edit_tag[5:])
if err != nil {
return 0, 0, fmt.Errorf("failed to parse edit dist: %s", edit_tag)
}
return match_len, edit_dist, nil
}
func OpenLogger() {
if args.LogFilename == "" {
logger = log.New(os.Stderr, "", 0)
} else {
logfile, err := os.Create(args.LogFilename)
if err != nil {
log.Fatal(err)
}
logger = log.New(logfile, "", 0)
}
}
func LogArguments() {
logger.Println("command:", strings.Join(os.Args, " "))
blob, err := json.MarshalIndent(args, "", " ")
if err != nil {
logger.Fatal("failed to marshal arguments")
}
logger.Println(string(blob))
}
func MatchesErcc(mate1, mate2 []string) bool {
return args.Ercc &&
(strings.Contains(mate1[2], "ERCC") || (mate2 != nil && strings.Contains(mate2[2], "ERCC")))
}
func main() | if args.Sample == "" {
scanner.OpenStdin()
} else {
if err := scanner.OpenBam(args.Sample); err != nil {
logger.Fatal(err)
}
}
reads_found := make([]int, len(contamination))
reads_filtered := make([]int, len(contamination))
contScanners := make([]BamScanner, len(contamination))
rejected := make([]bool, len(contamination))
found := make([]bool, len(contamination))
for c := 0; c < len(contamination); c++ {
if err := contScanners[c].OpenBam(contamination[c]); err != nil {
logger.Fatal(err)
}
reads_found[c] = 0
reads_filtered[c] = 0
}
header, err := ReadBamHeader(args.Sample)
if err != nil {
logger.Fatal(err)
}
out := BamWriter{}
outfp, err := out.Open(args.Output)
if err != nil {
logger.Fatal(err)
}
io.WriteString(outfp, header)
reads_kept := 0
read_mates_kept := 0
total_reads := 0
total_read_mates := 0
ercc := 0
considered := 0
too_short := 0
too_diverged := 0
err = func() error {
defer scanner.Done()
defer benchmark(startedAt, "processing")
for {
if total_reads > 0 && total_reads%100000 == 0 {
kept_percent = float64(reads_kept) / float64(considered) * 100
logger.Printf("considered %d out of %d so far, kept %0.1f%%\n", considered, total_reads, kept_percent)
}
if args.Limit > 0 && args.Limit == total_reads {
return nil
}
// Set up flags for outcomes wrt each potential source of contamination.
for c, _ := range contamination {
rejected[c] = false
found[c] = false
}
// Read the first mate in a paired end run.
mate1, err := scanner.Record()
if err != nil {
return fmt.Errorf("failed to read from sample BAM: %v after %d lines", err, total_reads)
}
if scanner.Closed {
return nil
}
scanner.Ratchet()
read := mate1[0]
total_reads++
total_read_mates++
// See if we have the second mate of this pair.
mate2, err := scanner.Find(read)
if err != nil {
return fmt.Errorf("failed to read from sample BAM: %v after %d lines", err, total_reads)
}
if mate2 != nil {
scanner.Ratchet()
total_read_mates++
}
var mate1_len int
var mate1_edit_dist int
var mate2_len int
var mate2_edit_dist int
mate1_len, mate1_edit_dist, err = extract(mate1)
if err != nil {
return err
}
if args.Verbose {
logger.Println("found read", read, "mate 1:")
logger.Println(strings.Join(mate1, "\t"))
}
if mate2 != nil {
mate2_len, mate2_edit_dist, err = extract(mate2)
if err != nil {
return err
}
if args.Verbose {
logger.Println("found read", read, "mate 2:")
logger.Println(strings.Join(mate2, "\t"))
}
}
// Filter for ERCC if either mate is mapped to ERCC.
if MatchesErcc(mate1, mate2) {
ercc++
if args.Verbose {
logger.Println("ERCC, rejecting")
}
continue
}
if mate1_len < args.MinLength {
// If we don't have mate2 or if it's also too short, we mark this pair as too short.
if mate2 == nil || mate2_len < args.MinLength {
if args.Verbose {
logger.Println("too short, rejecting")
}
too_short++
continue
}
if args.Verbose {
logger.Println("promoting mate 2")
}
// Mate2 is okay, so we promote it to mate1, and forget mate2
mate1_len = mate2_len
mate1_edit_dist = mate2_edit_dist
mate1 = mate2
mate2 = nil
}
if mate2 != nil && mate2_len < args.MinLength {
// We have a mate2, but it doesn't meet the min length criteria, just forget it.
mate2 = nil
if args.Verbose {
logger.Println("mate 2 too short, forgetting")
}
}
// We treate the filter for edit distance the same way as length.
if mate1_edit_dist > args.MaxDist {
if mate2 == nil || mate2_edit_dist > args.MaxDist {
too_diverged++
if args.Verbose {
logger.Println("too divergent, rejecting")
}
continue
}
if args.Verbose {
logger.Println("promothing mate 2")
}
// Mate2 is okay, so we promote it to mate1, and forget mate2
mate1_len = mate2_len
mate1_edit_dist = mate2_edit_dist
mate1 = mate2
mate2 = nil
}
if mate2 != nil && mate2_edit_dist > args.MaxDist {
// We have a mate2, but it doesn't meet the max edit distance criteria, just forget it.
mate2 = nil
if args.Verbose {
logger.Println("mate 2, too diverged, forgetting")
}
}
// If we get this far it means the read met the preliminary filtering criteria.
considered++
// Compare agains the best score for the read pair.
mate1_score := float64(mate1_len) - float64(mate1_edit_dist)*args.Penalty
var mate2_score float64
best_score := mate1_score
best_len := mate1_len
best_edit_dist := mate1_edit_dist
if mate2 != nil {
mate2_score = float64(mate2_len) - float64(mate2_edit_dist)*args.Penalty
if mate2_score > mate1_score {
best_score = mate2_score
best_len = mate2_len
best_edit_dist = mate2_edit_dist
if args.Verbose {
logger.Printf("mate 2 has better score (%f) than mate 1 (%f)\n", mate2_score, mate1_score)
}
}
}
// Reads in the sample BAM will be rejected if either mate in any of the
// contamination BAM files maps better than in the sampel BAM file.
was_rejected := false
for c := 0; c < len(contamination); c++ {
m := 0
for {
mate, err := contScanners[c].Find(read)
if err != nil {
logger.Fatal(err)
}
if mate == nil {
// No more alignments for this read in this contamination mapping
break
}
m++
if args.Verbose {
logger.Printf("found mapping %d for %s in %s\n", m, mate[0], contamination[c])
logger.Println(strings.Join(mate, "\t"))
}
if ! | {
var kept_percent float64
flag.Parse()
contamination := flag.Args()
startedAt := time.Now()
if len(contamination) == 0 {
logger.Println("must specify at least one contamination mapping BAM file")
os.Exit(1)
}
if args.Output == "" {
logger.Println("must specify -output file")
os.Exit(1)
}
OpenLogger()
LogArguments()
scanner := BamScanner{} | identifier_body |
contfilter.go | .Errorf("malformed edit distance tag: %s", edit_tag)
}
edit_dist, err := strconv.Atoi(edit_tag[5:])
if err != nil {
return 0, 0, fmt.Errorf("failed to parse edit dist: %s", edit_tag)
}
return match_len, edit_dist, nil
}
func OpenLogger() {
if args.LogFilename == "" {
logger = log.New(os.Stderr, "", 0)
} else {
logfile, err := os.Create(args.LogFilename)
if err != nil {
log.Fatal(err)
}
logger = log.New(logfile, "", 0)
}
}
func | () {
logger.Println("command:", strings.Join(os.Args, " "))
blob, err := json.MarshalIndent(args, "", " ")
if err != nil {
logger.Fatal("failed to marshal arguments")
}
logger.Println(string(blob))
}
func MatchesErcc(mate1, mate2 []string) bool {
return args.Ercc &&
(strings.Contains(mate1[2], "ERCC") || (mate2 != nil && strings.Contains(mate2[2], "ERCC")))
}
func main() {
var kept_percent float64
flag.Parse()
contamination := flag.Args()
startedAt := time.Now()
if len(contamination) == 0 {
logger.Println("must specify at least one contamination mapping BAM file")
os.Exit(1)
}
if args.Output == "" {
logger.Println("must specify -output file")
os.Exit(1)
}
OpenLogger()
LogArguments()
scanner := BamScanner{}
if args.Sample == "" {
scanner.OpenStdin()
} else {
if err := scanner.OpenBam(args.Sample); err != nil {
logger.Fatal(err)
}
}
reads_found := make([]int, len(contamination))
reads_filtered := make([]int, len(contamination))
contScanners := make([]BamScanner, len(contamination))
rejected := make([]bool, len(contamination))
found := make([]bool, len(contamination))
for c := 0; c < len(contamination); c++ {
if err := contScanners[c].OpenBam(contamination[c]); err != nil {
logger.Fatal(err)
}
reads_found[c] = 0
reads_filtered[c] = 0
}
header, err := ReadBamHeader(args.Sample)
if err != nil {
logger.Fatal(err)
}
out := BamWriter{}
outfp, err := out.Open(args.Output)
if err != nil {
logger.Fatal(err)
}
io.WriteString(outfp, header)
reads_kept := 0
read_mates_kept := 0
total_reads := 0
total_read_mates := 0
ercc := 0
considered := 0
too_short := 0
too_diverged := 0
err = func() error {
defer scanner.Done()
defer benchmark(startedAt, "processing")
for {
if total_reads > 0 && total_reads%100000 == 0 {
kept_percent = float64(reads_kept) / float64(considered) * 100
logger.Printf("considered %d out of %d so far, kept %0.1f%%\n", considered, total_reads, kept_percent)
}
if args.Limit > 0 && args.Limit == total_reads {
return nil
}
// Set up flags for outcomes wrt each potential source of contamination.
for c, _ := range contamination {
rejected[c] = false
found[c] = false
}
// Read the first mate in a paired end run.
mate1, err := scanner.Record()
if err != nil {
return fmt.Errorf("failed to read from sample BAM: %v after %d lines", err, total_reads)
}
if scanner.Closed {
return nil
}
scanner.Ratchet()
read := mate1[0]
total_reads++
total_read_mates++
// See if we have the second mate of this pair.
mate2, err := scanner.Find(read)
if err != nil {
return fmt.Errorf("failed to read from sample BAM: %v after %d lines", err, total_reads)
}
if mate2 != nil {
scanner.Ratchet()
total_read_mates++
}
var mate1_len int
var mate1_edit_dist int
var mate2_len int
var mate2_edit_dist int
mate1_len, mate1_edit_dist, err = extract(mate1)
if err != nil {
return err
}
if args.Verbose {
logger.Println("found read", read, "mate 1:")
logger.Println(strings.Join(mate1, "\t"))
}
if mate2 != nil {
mate2_len, mate2_edit_dist, err = extract(mate2)
if err != nil {
return err
}
if args.Verbose {
logger.Println("found read", read, "mate 2:")
logger.Println(strings.Join(mate2, "\t"))
}
}
// Filter for ERCC if either mate is mapped to ERCC.
if MatchesErcc(mate1, mate2) {
ercc++
if args.Verbose {
logger.Println("ERCC, rejecting")
}
continue
}
if mate1_len < args.MinLength {
// If we don't have mate2 or if it's also too short, we mark this pair as too short.
if mate2 == nil || mate2_len < args.MinLength {
if args.Verbose {
logger.Println("too short, rejecting")
}
too_short++
continue
}
if args.Verbose {
logger.Println("promoting mate 2")
}
// Mate2 is okay, so we promote it to mate1, and forget mate2
mate1_len = mate2_len
mate1_edit_dist = mate2_edit_dist
mate1 = mate2
mate2 = nil
}
if mate2 != nil && mate2_len < args.MinLength {
// We have a mate2, but it doesn't meet the min length criteria, just forget it.
mate2 = nil
if args.Verbose {
logger.Println("mate 2 too short, forgetting")
}
}
// We treate the filter for edit distance the same way as length.
if mate1_edit_dist > args.MaxDist {
if mate2 == nil || mate2_edit_dist > args.MaxDist {
too_diverged++
if args.Verbose {
logger.Println("too divergent, rejecting")
}
continue
}
if args.Verbose {
logger.Println("promothing mate 2")
}
// Mate2 is okay, so we promote it to mate1, and forget mate2
mate1_len = mate2_len
mate1_edit_dist = mate2_edit_dist
mate1 = mate2
mate2 = nil
}
if mate2 != nil && mate2_edit_dist > args.MaxDist {
// We have a mate2, but it doesn't meet the max edit distance criteria, just forget it.
mate2 = nil
if args.Verbose {
logger.Println("mate 2, too diverged, forgetting")
}
}
// If we get this far it means the read met the preliminary filtering criteria.
considered++
// Compare agains the best score for the read pair.
mate1_score := float64(mate1_len) - float64(mate1_edit_dist)*args.Penalty
var mate2_score float64
best_score := mate1_score
best_len := mate1_len
best_edit_dist := mate1_edit_dist
if mate2 != nil {
mate2_score = float64(mate2_len) - float64(mate2_edit_dist)*args.Penalty
if mate2_score > mate1_score {
best_score = mate2_score
best_len = mate2_len
best_edit_dist = mate2_edit_dist
if args.Verbose {
logger.Printf("mate 2 has better score (%f) than mate 1 (%f)\n", mate2_score, mate1_score)
}
}
}
// Reads in the sample BAM will be rejected if either mate in any of the
// contamination BAM files maps better than in the sampel BAM file.
was_rejected := false
for c := 0; c < len(contamination); c++ {
m := 0
for {
mate, err := contScanners[c].Find(read)
if err != nil {
logger.Fatal(err)
}
if mate == nil {
// No more alignments for this read in this contamination mapping
break
}
m++
if args.Verbose {
logger.Printf("found mapping %d for %s in %s\n", m, mate[0], contamination[c])
logger.Println(strings.Join(mate, "\t"))
}
if ! | LogArguments | identifier_name |
autogen.py | =[])
# fix the name of the function pointer
typedecl = self._find_typedecl(newnode)
typedecl.declname = self.ctx_name()
return toC(newnode)
def trampoline_def(self):
# static inline HPy HPyModule_Create(HPyContext ctx, HPyModuleDef *def) {
# return ctx->ctx_Module_Create ( ctx, def );
# }
rettype = toC(self.node.type.type)
parts = []
w = parts.append
w('static inline')
w(toC(self.node))
w('{\n ')
# trampolines cannot deal with varargs easily
assert not self.is_varargs()
if rettype == 'void':
w('ctx->%s' % self.ctx_name())
else:
w('return ctx->%s' % self.ctx_name())
w('(')
params = [p.name for p in self.node.type.args.params]
w(', '.join(params))
w(');')
w('\n}')
return ' '.join(parts)
def implementation(self):
def signature(base_name):
# HPy _HPy_API_NAME(Number_Add)(HPyContext ctx, HPy x, HPy y)
newnode = deepcopy(self.node)
typedecl = self._find_typedecl(newnode)
# rename the function
if self.name.startswith('HPy_'):
typedecl.declname = '_HPy_IMPL_NAME_NOPREFIX(%s)' % base_name
else:
typedecl.declname = '_HPy_IMPL_NAME(%s)' % base_name
return toC(newnode)
#
def call(pyfunc, return_type):
# return _py2h(PyNumber_Add(_h2py(x), _h2py(y)))
args = []
for p in self.node.type.args.params:
if toC(p.type) == 'HPyContext':
continue
elif toC(p.type) == 'HPy':
arg = '_h2py(%s)' % p.name
else:
arg = p.name
args.append(arg)
result = '%s(%s)' % (pyfunc, ', '.join(args))
if return_type == 'HPy':
result = '_py2h(%s)' % result
return result
#
lines = []
w = lines.append
pyfunc = self.cpython_name
if not pyfunc:
raise ValueError(f"Cannot generate implementation for {self}")
return_type = toC(self.node.type.type)
w('HPyAPI_STORAGE %s' % signature(self.base_name()))
w('{')
w(' return %s;' % call(pyfunc, return_type))
w('}')
return '\n'.join(lines)
def ctx_pypy_type(self):
return 'void *'
def pypy_stub(self):
signature = toC(self.node)
if self.is_varargs():
return '# %s' % signature
#
argnames = [p.name for p in self.node.type.args.params]
lines = []
w = lines.append
w('@API.func("%s")' % signature)
w('def %s(space, %s):' % (self.name, ', '.join(argnames)))
w(' from rpython.rlib.nonconst import NonConstant # for the annotator')
w(' if NonConstant(False): return 0')
w(' raise NotImplementedError')
w('')
return '\n'.join(lines)
@attr.s
class GlobalVar:
name = attr.ib()
node = attr.ib(repr=False)
def ctx_name(self):
return self.name
def ctx_impl_name(self):
return '(HPy){CONSTANT_%s}' % (self.name.upper(),)
def ctx_decl(self):
return toC(self.node)
def trampoline_def(self):
return None
def ctx_pypy_type(self):
return 'struct _HPy_s'
def pypy_stub(self):
return ''
class FuncDeclVisitor(pycparser.c_ast.NodeVisitor):
def __init__(self, convert_name):
self.declarations = []
self.convert_name = convert_name
def visit_Decl(self, node):
if isinstance(node.type, c_ast.FuncDecl):
self._visit_function(node)
elif isinstance(node.type, c_ast.TypeDecl):
self._visit_global_var(node)
def _visit_function(self, node):
name = node.name
if not name.startswith('HPy') and not name.startswith('_HPy'):
print('WARNING: Ignoring non-hpy declaration: %s' % name)
return
for p in node.type.args.params:
if hasattr(p, 'name') and p.name is None:
raise ValueError("non-named argument in declaration of %s" %
name)
cpy_name = self.convert_name(name)
self.declarations.append(Function(name, cpy_name, node))
def _visit_global_var(self, node):
name = node.name
if not name.startswith('h_'):
print('WARNING: Ignoring non-hpy variable declaration: %s' % name)
return
assert toC(node.type.type) == "HPy"
self.declarations.append(GlobalVar(name, node))
SPECIAL_CASES = {
'HPy_Dup': None,
'HPy_Close': None,
'HPyModule_Create': None,
'HPy_GetAttr': 'PyObject_GetAttr',
'HPy_GetAttr_s': 'PyObject_GetAttrString',
'HPy_HasAttr': 'PyObject_HasAttr',
'HPy_HasAttr_s': 'PyObject_HasAttrString',
'HPy_SetAttr': 'PyObject_SetAttr',
'HPy_SetAttr_s': 'PyObject_SetAttrString',
'HPy_GetItem': 'PyObject_GetItem',
'HPy_GetItem_i': None,
'HPy_GetItem_s': None,
'HPy_SetItem': 'PyObject_SetItem',
'HPy_SetItem_i': None,
'HPy_SetItem_s': None,
'HPy_FromPyObject': None,
'HPy_AsPyObject': None,
'_HPy_CallRealFunctionFromTrampoline': None,
'HPyErr_Occurred': None,
'HPy_Add': 'PyNumber_Add',
'HPy_Subtract': 'PyNumber_Subtract',
'HPy_Multiply': 'PyNumber_Multiply',
'HPy_MatrixMultiply': 'PyNumber_MatrixMultiply',
'HPy_FloorDivide': 'PyNumber_FloorDivide',
'HPy_TrueDivide': 'PyNumber_TrueDivide',
'HPy_Remainder': 'PyNumber_Remainder',
'HPy_Divmod': 'PyNumber_Divmod',
'HPy_Power': 'PyNumber_Power',
'HPy_Negative': 'PyNumber_Negative',
'HPy_Positive': 'PyNumber_Positive',
'HPy_Absolute': 'PyNumber_Absolute',
'HPy_Invert': 'PyNumber_Invert',
'HPy_Lshift': 'PyNumber_Lshift',
'HPy_Rshift': 'PyNumber_Rshift',
'HPy_And': 'PyNumber_And',
'HPy_Xor': 'PyNumber_Xor',
'HPy_Or': 'PyNumber_Or',
'HPy_Index': 'PyNumber_Index',
'HPy_Long': 'PyNumber_Long',
'HPy_Float': 'PyNumber_Float',
'HPy_InPlaceAdd': 'PyNumber_InPlaceAdd',
'HPy_InPlaceSubtract': 'PyNumber_InPlaceSubtract',
'HPy_InPlaceMultiply': 'PyNumber_InPlaceMultiply',
'HPy_InPlaceMatrixMultiply': 'PyNumber_InPlaceMatrixMultiply',
'HPy_InPlaceFloorDivide': 'PyNumber_InPlaceFloorDivide',
'HPy_InPlaceTrueDivide': 'PyNumber_InPlaceTrueDivide',
'HPy_InPlaceRemainder': 'PyNumber_InPlaceRemainder',
'HPy_InPlacePower': 'PyNumber_InPlacePower',
'HPy_InPlaceLshift': 'PyNumber_InPlaceLshift',
'HPy_InPlaceRshift': 'PyNumber_InPlaceRshift',
'HPy_InPlaceAnd': 'PyNumber_InPlaceAnd',
'HPy_InPlaceXor': 'PyNumber_InPlaceXor',
'HPy_InPlaceOr': 'PyNumber_InPlaceOr',
}
def convert_name(hpy_name):
if hpy_name in SPECIAL_CASES:
return SPECIAL_CASES[hpy_name]
return re.sub(r'^_?HPy_?', 'Py', hpy_name)
class AutoGen:
def | (self, filename):
self.ast = pycparser.parse_file(filename, use_cpp=True)
#self.ast.show()
self.collect_declarations()
def get(self, name):
for d in self.declarations:
if d.name == name:
return d
raise KeyError(name)
def collect_declarations(self):
v = FuncDeclVisitor(convert_name)
v.visit(self.ast)
self.declarations = v.declarations
def gen_ctx_decl(self):
# struct _HPyContext_s {
# int ctx_version;
# HPy h_None;
# ...
# HPy (* | __init__ | identifier_name |
autogen.py | =[])
# fix the name of the function pointer
typedecl = self._find_typedecl(newnode)
typedecl.declname = self.ctx_name()
return toC(newnode)
def trampoline_def(self):
# static inline HPy HPyModule_Create(HPyContext ctx, HPyModuleDef *def) {
# return ctx->ctx_Module_Create ( ctx, def );
# }
rettype = toC(self.node.type.type)
parts = []
w = parts.append
w('static inline')
w(toC(self.node))
w('{\n ')
# trampolines cannot deal with varargs easily
assert not self.is_varargs()
if rettype == 'void':
w('ctx->%s' % self.ctx_name())
else:
w('return ctx->%s' % self.ctx_name())
w('(')
params = [p.name for p in self.node.type.args.params]
w(', '.join(params))
w(');')
w('\n}')
return ' '.join(parts)
def implementation(self):
def signature(base_name):
# HPy _HPy_API_NAME(Number_Add)(HPyContext ctx, HPy x, HPy y)
newnode = deepcopy(self.node)
typedecl = self._find_typedecl(newnode)
# rename the function
if self.name.startswith('HPy_'):
typedecl.declname = '_HPy_IMPL_NAME_NOPREFIX(%s)' % base_name
else:
typedecl.declname = '_HPy_IMPL_NAME(%s)' % base_name
return toC(newnode)
#
def call(pyfunc, return_type):
# return _py2h(PyNumber_Add(_h2py(x), _h2py(y)))
args = []
for p in self.node.type.args.params:
if toC(p.type) == 'HPyContext':
continue
elif toC(p.type) == 'HPy':
arg = '_h2py(%s)' % p.name
else:
arg = p.name
args.append(arg)
result = '%s(%s)' % (pyfunc, ', '.join(args))
if return_type == 'HPy':
result = '_py2h(%s)' % result
return result
#
lines = []
w = lines.append
pyfunc = self.cpython_name
if not pyfunc:
raise ValueError(f"Cannot generate implementation for {self}")
return_type = toC(self.node.type.type)
w('HPyAPI_STORAGE %s' % signature(self.base_name()))
w('{')
w(' return %s;' % call(pyfunc, return_type))
w('}')
return '\n'.join(lines)
def ctx_pypy_type(self):
return 'void *'
def pypy_stub(self):
signature = toC(self.node)
if self.is_varargs():
return '# %s' % signature
#
argnames = [p.name for p in self.node.type.args.params]
lines = []
w = lines.append
w('@API.func("%s")' % signature)
w('def %s(space, %s):' % (self.name, ', '.join(argnames)))
w(' from rpython.rlib.nonconst import NonConstant # for the annotator')
w(' if NonConstant(False): return 0')
w(' raise NotImplementedError')
w('')
return '\n'.join(lines)
@attr.s
class GlobalVar:
name = attr.ib()
node = attr.ib(repr=False)
def ctx_name(self):
|
def ctx_impl_name(self):
return '(HPy){CONSTANT_%s}' % (self.name.upper(),)
def ctx_decl(self):
return toC(self.node)
def trampoline_def(self):
return None
def ctx_pypy_type(self):
return 'struct _HPy_s'
def pypy_stub(self):
return ''
class FuncDeclVisitor(pycparser.c_ast.NodeVisitor):
def __init__(self, convert_name):
self.declarations = []
self.convert_name = convert_name
def visit_Decl(self, node):
if isinstance(node.type, c_ast.FuncDecl):
self._visit_function(node)
elif isinstance(node.type, c_ast.TypeDecl):
self._visit_global_var(node)
def _visit_function(self, node):
name = node.name
if not name.startswith('HPy') and not name.startswith('_HPy'):
print('WARNING: Ignoring non-hpy declaration: %s' % name)
return
for p in node.type.args.params:
if hasattr(p, 'name') and p.name is None:
raise ValueError("non-named argument in declaration of %s" %
name)
cpy_name = self.convert_name(name)
self.declarations.append(Function(name, cpy_name, node))
def _visit_global_var(self, node):
name = node.name
if not name.startswith('h_'):
print('WARNING: Ignoring non-hpy variable declaration: %s' % name)
return
assert toC(node.type.type) == "HPy"
self.declarations.append(GlobalVar(name, node))
SPECIAL_CASES = {
'HPy_Dup': None,
'HPy_Close': None,
'HPyModule_Create': None,
'HPy_GetAttr': 'PyObject_GetAttr',
'HPy_GetAttr_s': 'PyObject_GetAttrString',
'HPy_HasAttr': 'PyObject_HasAttr',
'HPy_HasAttr_s': 'PyObject_HasAttrString',
'HPy_SetAttr': 'PyObject_SetAttr',
'HPy_SetAttr_s': 'PyObject_SetAttrString',
'HPy_GetItem': 'PyObject_GetItem',
'HPy_GetItem_i': None,
'HPy_GetItem_s': None,
'HPy_SetItem': 'PyObject_SetItem',
'HPy_SetItem_i': None,
'HPy_SetItem_s': None,
'HPy_FromPyObject': None,
'HPy_AsPyObject': None,
'_HPy_CallRealFunctionFromTrampoline': None,
'HPyErr_Occurred': None,
'HPy_Add': 'PyNumber_Add',
'HPy_Subtract': 'PyNumber_Subtract',
'HPy_Multiply': 'PyNumber_Multiply',
'HPy_MatrixMultiply': 'PyNumber_MatrixMultiply',
'HPy_FloorDivide': 'PyNumber_FloorDivide',
'HPy_TrueDivide': 'PyNumber_TrueDivide',
'HPy_Remainder': 'PyNumber_Remainder',
'HPy_Divmod': 'PyNumber_Divmod',
'HPy_Power': 'PyNumber_Power',
'HPy_Negative': 'PyNumber_Negative',
'HPy_Positive': 'PyNumber_Positive',
'HPy_Absolute': 'PyNumber_Absolute',
'HPy_Invert': 'PyNumber_Invert',
'HPy_Lshift': 'PyNumber_Lshift',
'HPy_Rshift': 'PyNumber_Rshift',
'HPy_And': 'PyNumber_And',
'HPy_Xor': 'PyNumber_Xor',
'HPy_Or': 'PyNumber_Or',
'HPy_Index': 'PyNumber_Index',
'HPy_Long': 'PyNumber_Long',
'HPy_Float': 'PyNumber_Float',
'HPy_InPlaceAdd': 'PyNumber_InPlaceAdd',
'HPy_InPlaceSubtract': 'PyNumber_InPlaceSubtract',
'HPy_InPlaceMultiply': 'PyNumber_InPlaceMultiply',
'HPy_InPlaceMatrixMultiply': 'PyNumber_InPlaceMatrixMultiply',
'HPy_InPlaceFloorDivide': 'PyNumber_InPlaceFloorDivide',
'HPy_InPlaceTrueDivide': 'PyNumber_InPlaceTrueDivide',
'HPy_InPlaceRemainder': 'PyNumber_InPlaceRemainder',
'HPy_InPlacePower': 'PyNumber_InPlacePower',
'HPy_InPlaceLshift': 'PyNumber_InPlaceLshift',
'HPy_InPlaceRshift': 'PyNumber_InPlaceRshift',
'HPy_InPlaceAnd': 'PyNumber_InPlaceAnd',
'HPy_InPlaceXor': 'PyNumber_InPlaceXor',
'HPy_InPlaceOr': 'PyNumber_InPlaceOr',
}
def convert_name(hpy_name):
if hpy_name in SPECIAL_CASES:
return SPECIAL_CASES[hpy_name]
return re.sub(r'^_?HPy_?', 'Py', hpy_name)
class AutoGen:
def __init__(self, filename):
self.ast = pycparser.parse_file(filename, use_cpp=True)
#self.ast.show()
self.collect_declarations()
def get(self, name):
for d in self.declarations:
if d.name == name:
return d
raise KeyError(name)
def collect_declarations(self):
v = FuncDeclVisitor(convert_name)
v.visit(self.ast)
self.declarations = v.declarations
def gen_ctx_decl(self):
# struct _HPyContext_s {
# int ctx_version;
# HPy h_None;
# ...
# HPy (*ctx | return self.name | identifier_body |
autogen.py | =[])
# fix the name of the function pointer
typedecl = self._find_typedecl(newnode)
typedecl.declname = self.ctx_name()
return toC(newnode)
def trampoline_def(self):
# static inline HPy HPyModule_Create(HPyContext ctx, HPyModuleDef *def) {
# return ctx->ctx_Module_Create ( ctx, def );
# }
rettype = toC(self.node.type.type)
parts = []
w = parts.append
w('static inline')
w(toC(self.node))
w('{\n ')
# trampolines cannot deal with varargs easily
assert not self.is_varargs()
if rettype == 'void':
w('ctx->%s' % self.ctx_name())
else:
w('return ctx->%s' % self.ctx_name())
w('(')
params = [p.name for p in self.node.type.args.params]
w(', '.join(params))
w(');')
w('\n}')
return ' '.join(parts)
def implementation(self):
def signature(base_name):
# HPy _HPy_API_NAME(Number_Add)(HPyContext ctx, HPy x, HPy y)
newnode = deepcopy(self.node)
typedecl = self._find_typedecl(newnode)
# rename the function
if self.name.startswith('HPy_'):
typedecl.declname = '_HPy_IMPL_NAME_NOPREFIX(%s)' % base_name
else:
typedecl.declname = '_HPy_IMPL_NAME(%s)' % base_name
return toC(newnode)
#
def call(pyfunc, return_type):
# return _py2h(PyNumber_Add(_h2py(x), _h2py(y)))
args = []
for p in self.node.type.args.params:
|
result = '%s(%s)' % (pyfunc, ', '.join(args))
if return_type == 'HPy':
result = '_py2h(%s)' % result
return result
#
lines = []
w = lines.append
pyfunc = self.cpython_name
if not pyfunc:
raise ValueError(f"Cannot generate implementation for {self}")
return_type = toC(self.node.type.type)
w('HPyAPI_STORAGE %s' % signature(self.base_name()))
w('{')
w(' return %s;' % call(pyfunc, return_type))
w('}')
return '\n'.join(lines)
def ctx_pypy_type(self):
return 'void *'
def pypy_stub(self):
signature = toC(self.node)
if self.is_varargs():
return '# %s' % signature
#
argnames = [p.name for p in self.node.type.args.params]
lines = []
w = lines.append
w('@API.func("%s")' % signature)
w('def %s(space, %s):' % (self.name, ', '.join(argnames)))
w(' from rpython.rlib.nonconst import NonConstant # for the annotator')
w(' if NonConstant(False): return 0')
w(' raise NotImplementedError')
w('')
return '\n'.join(lines)
@attr.s
class GlobalVar:
name = attr.ib()
node = attr.ib(repr=False)
def ctx_name(self):
return self.name
def ctx_impl_name(self):
return '(HPy){CONSTANT_%s}' % (self.name.upper(),)
def ctx_decl(self):
return toC(self.node)
def trampoline_def(self):
return None
def ctx_pypy_type(self):
return 'struct _HPy_s'
def pypy_stub(self):
return ''
class FuncDeclVisitor(pycparser.c_ast.NodeVisitor):
def __init__(self, convert_name):
self.declarations = []
self.convert_name = convert_name
def visit_Decl(self, node):
if isinstance(node.type, c_ast.FuncDecl):
self._visit_function(node)
elif isinstance(node.type, c_ast.TypeDecl):
self._visit_global_var(node)
def _visit_function(self, node):
name = node.name
if not name.startswith('HPy') and not name.startswith('_HPy'):
print('WARNING: Ignoring non-hpy declaration: %s' % name)
return
for p in node.type.args.params:
if hasattr(p, 'name') and p.name is None:
raise ValueError("non-named argument in declaration of %s" %
name)
cpy_name = self.convert_name(name)
self.declarations.append(Function(name, cpy_name, node))
def _visit_global_var(self, node):
name = node.name
if not name.startswith('h_'):
print('WARNING: Ignoring non-hpy variable declaration: %s' % name)
return
assert toC(node.type.type) == "HPy"
self.declarations.append(GlobalVar(name, node))
SPECIAL_CASES = {
'HPy_Dup': None,
'HPy_Close': None,
'HPyModule_Create': None,
'HPy_GetAttr': 'PyObject_GetAttr',
'HPy_GetAttr_s': 'PyObject_GetAttrString',
'HPy_HasAttr': 'PyObject_HasAttr',
'HPy_HasAttr_s': 'PyObject_HasAttrString',
'HPy_SetAttr': 'PyObject_SetAttr',
'HPy_SetAttr_s': 'PyObject_SetAttrString',
'HPy_GetItem': 'PyObject_GetItem',
'HPy_GetItem_i': None,
'HPy_GetItem_s': None,
'HPy_SetItem': 'PyObject_SetItem',
'HPy_SetItem_i': None,
'HPy_SetItem_s': None,
'HPy_FromPyObject': None,
'HPy_AsPyObject': None,
'_HPy_CallRealFunctionFromTrampoline': None,
'HPyErr_Occurred': None,
'HPy_Add': 'PyNumber_Add',
'HPy_Subtract': 'PyNumber_Subtract',
'HPy_Multiply': 'PyNumber_Multiply',
'HPy_MatrixMultiply': 'PyNumber_MatrixMultiply',
'HPy_FloorDivide': 'PyNumber_FloorDivide',
'HPy_TrueDivide': 'PyNumber_TrueDivide',
'HPy_Remainder': 'PyNumber_Remainder',
'HPy_Divmod': 'PyNumber_Divmod',
'HPy_Power': 'PyNumber_Power',
'HPy_Negative': 'PyNumber_Negative',
'HPy_Positive': 'PyNumber_Positive',
'HPy_Absolute': 'PyNumber_Absolute',
'HPy_Invert': 'PyNumber_Invert',
'HPy_Lshift': 'PyNumber_Lshift',
'HPy_Rshift': 'PyNumber_Rshift',
'HPy_And': 'PyNumber_And',
'HPy_Xor': 'PyNumber_Xor',
'HPy_Or': 'PyNumber_Or',
'HPy_Index': 'PyNumber_Index',
'HPy_Long': 'PyNumber_Long',
'HPy_Float': 'PyNumber_Float',
'HPy_InPlaceAdd': 'PyNumber_InPlaceAdd',
'HPy_InPlaceSubtract': 'PyNumber_InPlaceSubtract',
'HPy_InPlaceMultiply': 'PyNumber_InPlaceMultiply',
'HPy_InPlaceMatrixMultiply': 'PyNumber_InPlaceMatrixMultiply',
'HPy_InPlaceFloorDivide': 'PyNumber_InPlaceFloorDivide',
'HPy_InPlaceTrueDivide': 'PyNumber_InPlaceTrueDivide',
'HPy_InPlaceRemainder': 'PyNumber_InPlaceRemainder',
'HPy_InPlacePower': 'PyNumber_InPlacePower',
'HPy_InPlaceLshift': 'PyNumber_InPlaceLshift',
'HPy_InPlaceRshift': 'PyNumber_InPlaceRshift',
'HPy_InPlaceAnd': 'PyNumber_InPlaceAnd',
'HPy_InPlaceXor': 'PyNumber_InPlaceXor',
'HPy_InPlaceOr': 'PyNumber_InPlaceOr',
}
def convert_name(hpy_name):
if hpy_name in SPECIAL_CASES:
return SPECIAL_CASES[hpy_name]
return re.sub(r'^_?HPy_?', 'Py', hpy_name)
class AutoGen:
def __init__(self, filename):
self.ast = pycparser.parse_file(filename, use_cpp=True)
#self.ast.show()
self.collect_declarations()
def get(self, name):
for d in self.declarations:
if d.name == name:
return d
raise KeyError(name)
def collect_declarations(self):
v = FuncDeclVisitor(convert_name)
v.visit(self.ast)
self.declarations = v.declarations
def gen_ctx_decl(self):
# struct _HPyContext_s {
# int ctx_version;
# HPy h_None;
# ...
# HPy (* | if toC(p.type) == 'HPyContext':
continue
elif toC(p.type) == 'HPy':
arg = '_h2py(%s)' % p.name
else:
arg = p.name
args.append(arg) | conditional_block |
autogen.py | =[])
# fix the name of the function pointer
typedecl = self._find_typedecl(newnode)
typedecl.declname = self.ctx_name()
return toC(newnode)
def trampoline_def(self):
# static inline HPy HPyModule_Create(HPyContext ctx, HPyModuleDef *def) {
# return ctx->ctx_Module_Create ( ctx, def );
# }
rettype = toC(self.node.type.type)
parts = []
w = parts.append
w('static inline')
w(toC(self.node))
w('{\n ')
# trampolines cannot deal with varargs easily
assert not self.is_varargs()
if rettype == 'void':
w('ctx->%s' % self.ctx_name())
else:
w('return ctx->%s' % self.ctx_name())
w('(')
params = [p.name for p in self.node.type.args.params]
w(', '.join(params))
w(');')
w('\n}')
return ' '.join(parts)
def implementation(self):
def signature(base_name):
# HPy _HPy_API_NAME(Number_Add)(HPyContext ctx, HPy x, HPy y)
newnode = deepcopy(self.node)
typedecl = self._find_typedecl(newnode)
# rename the function
if self.name.startswith('HPy_'):
typedecl.declname = '_HPy_IMPL_NAME_NOPREFIX(%s)' % base_name
else:
typedecl.declname = '_HPy_IMPL_NAME(%s)' % base_name
return toC(newnode)
#
def call(pyfunc, return_type):
# return _py2h(PyNumber_Add(_h2py(x), _h2py(y)))
args = []
for p in self.node.type.args.params:
if toC(p.type) == 'HPyContext':
continue
elif toC(p.type) == 'HPy':
arg = '_h2py(%s)' % p.name
else:
arg = p.name
args.append(arg)
result = '%s(%s)' % (pyfunc, ', '.join(args))
if return_type == 'HPy':
result = '_py2h(%s)' % result
return result
#
lines = []
w = lines.append
pyfunc = self.cpython_name
if not pyfunc:
raise ValueError(f"Cannot generate implementation for {self}")
return_type = toC(self.node.type.type)
w('HPyAPI_STORAGE %s' % signature(self.base_name()))
w('{')
w(' return %s;' % call(pyfunc, return_type))
w('}')
return '\n'.join(lines)
def ctx_pypy_type(self):
return 'void *'
def pypy_stub(self):
signature = toC(self.node)
if self.is_varargs():
return '# %s' % signature
#
argnames = [p.name for p in self.node.type.args.params]
lines = []
w = lines.append
w('@API.func("%s")' % signature)
w('def %s(space, %s):' % (self.name, ', '.join(argnames)))
w(' from rpython.rlib.nonconst import NonConstant # for the annotator')
w(' if NonConstant(False): return 0')
w(' raise NotImplementedError')
w('')
return '\n'.join(lines)
@attr.s
class GlobalVar:
name = attr.ib()
node = attr.ib(repr=False)
def ctx_name(self):
return self.name
def ctx_impl_name(self):
return '(HPy){CONSTANT_%s}' % (self.name.upper(),)
def ctx_decl(self):
return toC(self.node)
def trampoline_def(self):
return None
def ctx_pypy_type(self):
return 'struct _HPy_s'
def pypy_stub(self):
return ''
class FuncDeclVisitor(pycparser.c_ast.NodeVisitor):
def __init__(self, convert_name):
self.declarations = []
self.convert_name = convert_name
def visit_Decl(self, node):
if isinstance(node.type, c_ast.FuncDecl):
self._visit_function(node)
elif isinstance(node.type, c_ast.TypeDecl):
self._visit_global_var(node)
def _visit_function(self, node):
name = node.name
if not name.startswith('HPy') and not name.startswith('_HPy'):
print('WARNING: Ignoring non-hpy declaration: %s' % name)
return
for p in node.type.args.params:
if hasattr(p, 'name') and p.name is None:
raise ValueError("non-named argument in declaration of %s" %
name)
cpy_name = self.convert_name(name)
self.declarations.append(Function(name, cpy_name, node))
def _visit_global_var(self, node):
name = node.name
if not name.startswith('h_'):
print('WARNING: Ignoring non-hpy variable declaration: %s' % name)
return
assert toC(node.type.type) == "HPy"
self.declarations.append(GlobalVar(name, node))
SPECIAL_CASES = {
'HPy_Dup': None,
'HPy_Close': None,
'HPyModule_Create': None,
'HPy_GetAttr': 'PyObject_GetAttr',
'HPy_GetAttr_s': 'PyObject_GetAttrString',
'HPy_HasAttr': 'PyObject_HasAttr',
'HPy_HasAttr_s': 'PyObject_HasAttrString',
'HPy_SetAttr': 'PyObject_SetAttr',
'HPy_SetAttr_s': 'PyObject_SetAttrString',
'HPy_GetItem': 'PyObject_GetItem',
'HPy_GetItem_i': None,
'HPy_GetItem_s': None,
'HPy_SetItem': 'PyObject_SetItem',
'HPy_SetItem_i': None,
'HPy_SetItem_s': None,
'HPy_FromPyObject': None,
'HPy_AsPyObject': None,
'_HPy_CallRealFunctionFromTrampoline': None,
'HPyErr_Occurred': None,
'HPy_Add': 'PyNumber_Add',
'HPy_Subtract': 'PyNumber_Subtract',
'HPy_Multiply': 'PyNumber_Multiply',
'HPy_MatrixMultiply': 'PyNumber_MatrixMultiply',
'HPy_FloorDivide': 'PyNumber_FloorDivide',
'HPy_TrueDivide': 'PyNumber_TrueDivide',
'HPy_Remainder': 'PyNumber_Remainder',
'HPy_Divmod': 'PyNumber_Divmod',
'HPy_Power': 'PyNumber_Power',
'HPy_Negative': 'PyNumber_Negative',
'HPy_Positive': 'PyNumber_Positive',
'HPy_Absolute': 'PyNumber_Absolute',
'HPy_Invert': 'PyNumber_Invert',
'HPy_Lshift': 'PyNumber_Lshift',
'HPy_Rshift': 'PyNumber_Rshift',
'HPy_And': 'PyNumber_And',
'HPy_Xor': 'PyNumber_Xor',
'HPy_Or': 'PyNumber_Or',
'HPy_Index': 'PyNumber_Index',
'HPy_Long': 'PyNumber_Long',
'HPy_Float': 'PyNumber_Float',
'HPy_InPlaceAdd': 'PyNumber_InPlaceAdd',
'HPy_InPlaceSubtract': 'PyNumber_InPlaceSubtract',
'HPy_InPlaceMultiply': 'PyNumber_InPlaceMultiply',
'HPy_InPlaceMatrixMultiply': 'PyNumber_InPlaceMatrixMultiply',
'HPy_InPlaceFloorDivide': 'PyNumber_InPlaceFloorDivide',
'HPy_InPlaceTrueDivide': 'PyNumber_InPlaceTrueDivide',
'HPy_InPlaceRemainder': 'PyNumber_InPlaceRemainder',
'HPy_InPlacePower': 'PyNumber_InPlacePower',
'HPy_InPlaceLshift': 'PyNumber_InPlaceLshift',
'HPy_InPlaceRshift': 'PyNumber_InPlaceRshift',
'HPy_InPlaceAnd': 'PyNumber_InPlaceAnd',
'HPy_InPlaceXor': 'PyNumber_InPlaceXor',
'HPy_InPlaceOr': 'PyNumber_InPlaceOr',
}
def convert_name(hpy_name):
if hpy_name in SPECIAL_CASES:
return SPECIAL_CASES[hpy_name]
return re.sub(r'^_?HPy_?', 'Py', hpy_name)
class AutoGen:
def __init__(self, filename):
self.ast = pycparser.parse_file(filename, use_cpp=True)
#self.ast.show()
self.collect_declarations()
def get(self, name):
for d in self.declarations:
if d.name == name: | def collect_declarations(self):
v = FuncDeclVisitor(convert_name)
v.visit(self.ast)
self.declarations = v.declarations
def gen_ctx_decl(self):
# struct _HPyContext_s {
# int ctx_version;
# HPy h_None;
# ...
# HPy (*ctx | return d
raise KeyError(name)
| random_line_split |
appStore.ts | //喜欢的音乐列表
@observable playHistorys: any[] //播放历史
@observable audio: any //audio
@observable songReady: boolean //歌曲是否已经准备好了播放
@observable currentTime: number //歌曲播放的时间
@observable isShowPlaylist: boolean //是否显示播放列表
@observable lyric: any //歌词
@observable playingLyric: string //正在播放的歌词
@observable playingLineNum: number //正在播放的歌词行数
@observable errorTimer:any
@observable sheetSongs:any
constructor() {
this.isExpandSider = false
this.playing = false
this.playlist = []
this.mode = mode.sequence
this.currentIndex = -1
this.isFullScreen = false
this.likeSongs = JSON.parse(localStorage.getItem('likeSongs')) || []
this.playHistorys = JSON.parse(localStorage.getItem('playHistorys')) || []
this.audio = null
this.songReady = false
this.currentTime = 0
this.isShowPlaylist = false
this.lyric = null
this.playingLyric = ''
this.playingLineNum = 0
this.errorTimer = null
this.sheetSongs = []
//当currentSong变化时作出反应
reaction(() => this.currentSong, () => {
this.currentSongChange()
})
}
/**
* 获取当前播放歌曲,并对数据进行处理
* @returns {*|{}}
*/
// 当playlist,currentIndex变化时,reaction就会触发,所以要对前后的变化进行判断是否是同一首歌
@computed({equals: (prevSong:{id:number}, newSong:{id:number}) =>prevSong.id === newSong.id})
get currentSong() {
let song:any = {}
if (this.playlist[this.currentIndex]) {
//引用类型的赋值一定要注意,这里必须深拷贝,否则song的改变会改变this.playlist,this.playlist的改变又触发计算属性,最后导致报错
song = { ...this.playlist[this.currentIndex] }
song = {...this.playlist[this.currentIndex]}
song.artists = song.ar.map(item => item.name).join('/')
song.image = song.al ? song.al.picUrl : ''
song.url = `https://music.163.com/song/media/outer/url?id=${song.id}.mp3`
song.duration = (song.dt / 1000) || (song.duration) / 1000 || 0
}
return song
}
/**
* 获取播放时间的百分比
* @returns {number}
*/
@computed
get percent() {
if (this.currentSong.duration) {
return this.currentTime / this.currentSong.duration
} else {
return 0
}
}
@action
setStore = (obj) => {
if (Object.prototype.toString.call(obj) !== '[object Object]') {
return
}
for (let [key, value] of Object.entries(obj)) {
this[key] = value
}
}
@action
setSheetSongs = (obj) => {
this.sheetSongs = obj
}
// @action
// getSheetSongs = (size = 0) => {
// if(songs.length >= allList.length){
// return
// }
// this.sheetSongs = this.sheetSongs.slice(size, size + 30)
// let list = []
// //增加两秒的延迟,实际项目中可以不用,这里只是为显示这样一个加载中的过程
// setTimeout(()=>{
// list = allList.slice(size, size + 30)
// setSongs(songs.concat(list))
// },2000)
// }
/**
* 切换侧边栏的折叠展开
*/
@action
toggleExpand = () => {
this.isExpandSider = !this.isExpandSider
}
/**
* 设置是否全屏播放音乐
* @param flag
*/
@action
setFullScreen = (flag) => {
this.isFullScreen = flag
}
/**
* 选择播放歌曲,设置播放列表
* @param obj
* @returns {Promise.<void>}
*/
@action
onSelectSong = async (obj) => {
const { songlist, index } = obj
this.playlist = songlist ? songlist.slice() : []
this.currentIndex = index
this.isFullScreen = true
}
/**
* 当current变化时的处理
* @returns {Promise.<void>}
*/
@action
currentSongChange = () => {
if (!this.currentSong.id) {
return
}
this.lyric && this.lyric.stop()
this.playing = true
this.currentTime = 0
this.playingLineNum = 0
this.playingLyric = ''
this.lyric = null
setTimeout(() => {
this.audio && this.audio.play()
this.getLyric(this.currentSong.id)
})
} | * 获取歌曲歌词
* @param id
* @returns {Promise.<void>}
*/
@action
getLyric = async (id) => {
const res = await get(`/lyric?id=${id}`)
runInAction(() => {
this.lyric = res ? new Lyric(res, this.handler) : null
this.lyric && this.lyric.play()
})
}
/**
* 播放的歌词变化时的处理
* @param lineNum 播放的行数
* @param txt 当前播放歌词
*/
@action
handler = ({ lineNum, txt }) => {
this.playingLyric = txt
this.playingLineNum = lineNum
}
/**
* 切换播放模式
*/
@action
changeMode = () => {
let mode = (this.mode + 1) % 3
const infos = ['顺序播放', '随机播放', '单曲循环']
Toast.info(infos[mode], 1, null, false)
this.mode = mode
}
/**
* 循环播放
*/
@action
loop = () => {
this.audio.currentTime = 0
this.audio.play()
this.playing = true
this.lyric && this.lyric.seek(0)
}
/**
* 切歌,实际上就是维护的currentIndex
* @param direction 上一首(prev) 下一首(next)
*/
@action
changeSong = (direction) => {
let currentIndex = this.currentIndex
if (!this.songReady) {
return
}
if (this.playlist.length === 1) {
this.loop()
return
}
if (this.mode === mode.shuffle) {
currentIndex = getRandom(0, this.playlist.length - 1)
while (currentIndex === this.currentIndex) {
currentIndex = getRandom(0, this.playlist.length - 1)
}
} else {
if (direction === 'prev') {
currentIndex--
if (currentIndex === -1) {
currentIndex = this.playlist.length - 1
}
}
if (direction === 'next') {
currentIndex++
if (currentIndex === this.playlist.length) {
currentIndex = 0
}
}
}
this.currentIndex = currentIndex
this.songReady = false
}
/**
* 暂停/播放音乐
*/
@action
togglePlay = () => {
clearTimeout(this.errorTimer)
if (this.playing) {
this.audio && this.audio.pause()
} else {
this.audio && this.audio.play()
}
this.lyric && this.lyric.togglePlay()
this.playing = !this.playing
}
/**
* 设置喜欢的音乐
* @param isAdd 是否是添加音乐
* @param song 喜欢的音乐
* @param index 索引
*/
@action
setLikes = (song) => {
let likeSongs = this.likeSongs.slice()
const findx = likeSongs.findIndex(item => item.id === song.id)
if (findx !== -1) {
likeSongs.splice(findx, 1)
} else {
likeSongs.unshift(song)
}
localStorage.setItem('likeSongs', JSON.stringify(likeSongs))
this.likeSongs = JSON.parse(localStorage.getItem('likeSongs')) || []
}
/**
* 设置播放历史
* @param isAdd 是否是添加音乐
* @param song 音乐
* @param index 索引
*/
@action
setPlayHistorys = ({ isAdd, song, index }:PlayHistorysProps) => {
let playHistorys = this.playHistorys.slice()
if (isAdd) {
let exist = playHistorys.findIndex(item => item.id === song.id)
if (exist !== -1) {
playHistorys.splice(exist, 1)
}
playHistorys.unshift(song)
} else {
playHistorys.splice(index, 1)
}
localStorage.setItem('playHistorys', JSON.stringify(playHistorys))
this.playHistorys = JSON | /** | random_line_split |
appStore.ts | 的音乐列表
@observable playHistorys: any[] //播放历史
@observable audio: any //audio
@observable songReady: boolean //歌曲是否已经准备好了播放
@observable currentTime: number //歌曲播放的时间
@observable isShowPlaylist: boolean //是否显示播放列表
@observable lyric: any //歌词
@observable playingLyric: string //正在播放的歌词
@observable playingLineNum: number //正在播放的歌词行数
@observable errorTimer:any
@observable sheetSongs:any
constructor() {
this.isExpandSider = false
this.playing = false
this.playlist = []
this.mode = mode.sequence
this.currentIndex = -1
this.isFullScreen = false
this.likeSongs = JSON.parse(localStorage.getItem('likeSongs')) || []
this.playHistorys = JSON.parse(localStorage.getItem('playHistorys')) || []
this.audio = null
this.songReady = false
this.currentTime = 0
this.isShowPlaylist = false
this.lyric = null
this.playingLyric = ''
this.playingLineNum = 0
this.errorTimer = null
this.sheetSongs = []
//当currentSong变化时作出反应
reaction(() => this.currentSong, () => {
this.currentSongChange()
})
}
/**
* 获取当前播放歌曲,并对数据进行处理
* @returns {*|{}}
*/
// 当playlist,currentIndex变化时,reaction就会触发,所以要对前后的变化进行判断是否是同一首歌
@computed({equals: (prevSong:{id:number}, newSong:{id:number}) =>prevSong.id === newSong.id})
get currentSong() {
let song:any = {}
if (this.playlist[this.currentIndex]) {
//引用类型的赋值一定要注意,这里必须深拷贝,否则song的改变会改变this.playlist,this.playlist的改变又触发计算属性,最后导致报错
song = { ...this.playlist[this.currentIndex] }
song = {...this.playlist[this.currentIndex]}
song.artists = song.ar.map(ite |
@action
setStore = (obj) => {
if (Object.prototype.toString.call(obj) !== '[object Object]') {
return
}
for (let [key, va
lue] of Object.entries(obj)) {
this[key] = value
}
}
@action
setSheetSongs = (obj) => {
this.sheetSongs = obj
}
// @action
// getSheetSongs = (size = 0) => {
// if(songs.length >= allList.length){
// return
// }
// this.sheetSongs = this.sheetSongs.slice(size, size + 30)
// let list = []
// //增加两秒的延迟,实际项目中可以不用,这里只是为显示这样一个加载中的过程
// setTimeout(()=>{
// list = allList.slice(size, size + 30)
// setSongs(songs.concat(list))
// },2000)
// }
/**
* 切换侧边栏的折叠展开
*/
@action
toggleExpand = () => {
this.isExpandSider = !this.isExpandSider
}
/**
* 设置是否全屏播放音乐
* @param flag
*/
@action
setFullScreen = (flag) => {
this.isFullScreen = flag
}
/**
* 选择播放歌曲,设置播放列表
* @param obj
* @returns {Promise.<void>}
*/
@action
onSelectSong = async (obj) => {
const { songlist, index } = obj
this.playlist = songlist ? songlist.slice() : []
this.currentIndex = index
this.isFullScreen = true
}
/**
* 当current变化时的处理
* @returns {Promise.<void>}
*/
@action
currentSongChange = () => {
if (!this.currentSong.id) {
return
}
this.lyric && this.lyric.stop()
this.playing = true
this.currentTime = 0
this.playingLineNum = 0
this.playingLyric = ''
this.lyric = null
setTimeout(() => {
this.audio && this.audio.play()
this.getLyric(this.currentSong.id)
})
}
/**
* 获取歌曲歌词
* @param id
* @returns {Promise.<void>}
*/
@action
getLyric = async (id) => {
const res = await get(`/lyric?id=${id}`)
runInAction(() => {
this.lyric = res ? new Lyric(res, this.handler) : null
this.lyric && this.lyric.play()
})
}
/**
* 播放的歌词变化时的处理
* @param lineNum 播放的行数
* @param txt 当前播放歌词
*/
@action
handler = ({ lineNum, txt }) => {
this.playingLyric = txt
this.playingLineNum = lineNum
}
/**
* 切换播放模式
*/
@action
changeMode = () => {
let mode = (this.mode + 1) % 3
const infos = ['顺序播放', '随机播放', '单曲循环']
Toast.info(infos[mode], 1, null, false)
this.mode = mode
}
/**
* 循环播放
*/
@action
loop = () => {
this.audio.currentTime = 0
this.audio.play()
this.playing = true
this.lyric && this.lyric.seek(0)
}
/**
* 切歌,实际上就是维护的currentIndex
* @param direction 上一首(prev) 下一首(next)
*/
@action
changeSong = (direction) => {
let currentIndex = this.currentIndex
if (!this.songReady) {
return
}
if (this.playlist.length === 1) {
this.loop()
return
}
if (this.mode === mode.shuffle) {
currentIndex = getRandom(0, this.playlist.length - 1)
while (currentIndex === this.currentIndex) {
currentIndex = getRandom(0, this.playlist.length - 1)
}
} else {
if (direction === 'prev') {
currentIndex--
if (currentIndex === -1) {
currentIndex = this.playlist.length - 1
}
}
if (direction === 'next') {
currentIndex++
if (currentIndex === this.playlist.length) {
currentIndex = 0
}
}
}
this.currentIndex = currentIndex
this.songReady = false
}
/**
* 暂停/播放音乐
*/
@action
togglePlay = () => {
clearTimeout(this.errorTimer)
if (this.playing) {
this.audio && this.audio.pause()
} else {
this.audio && this.audio.play()
}
this.lyric && this.lyric.togglePlay()
this.playing = !this.playing
}
/**
* 设置喜欢的音乐
* @param isAdd 是否是添加音乐
* @param song 喜欢的音乐
* @param index 索引
*/
@action
setLikes = (song) => {
let likeSongs = this.likeSongs.slice()
const findx = likeSongs.findIndex(item => item.id === song.id)
if (findx !== -1) {
likeSongs.splice(findx, 1)
} else {
likeSongs.unshift(song)
}
localStorage.setItem('likeSongs', JSON.stringify(likeSongs))
this.likeSongs = JSON.parse(localStorage.getItem('likeSongs')) || []
}
/**
* 设置播放历史
* @param isAdd 是否是添加音乐
* @param song 音乐
* @param index 索引
*/
@action
setPlayHistorys = ({ isAdd, song, index }:PlayHistorysProps) => {
let playHistorys = this.playHistorys.slice()
if (isAdd) {
let exist = playHistorys.findIndex(item => item.id === song.id)
if (exist !== -1) {
playHistorys.splice(exist, 1)
}
playHistorys.unshift(song)
} else {
playHistorys.splice(index, 1)
}
localStorage.setItem('playHistorys', JSON.stringify(playHistorys))
this.play | m => item.name).join('/')
song.image = song.al ? song.al.picUrl : ''
song.url = `https://music.163.com/song/media/outer/url?id=${song.id}.mp3`
song.duration = (song.dt / 1000) || (song.duration) / 1000 || 0
}
return song
}
/**
* 获取播放时间的百分比
* @returns {number}
*/
@computed
get percent() {
if (this.currentSong.duration) {
return this.currentTime / this.currentSong.duration
} else {
return 0
}
} | identifier_body |
appStore.ts | 喜欢的音乐列表
@observable playHistorys: any[] //播放历史
@observable audio: any //audio
@observable songReady: boolean //歌曲是否已经准备好了播放
@observable currentTime: number //歌曲播放的时间
@observable isShowPlaylist: boolean //是否显示播放列表
@observable lyric: any //歌词
@observable playingLyric: string //正在播放的歌词
@observable playingLineNum: number //正在播放的歌词行数
@observable errorTimer:any
@observable sheetSongs:any
constructor() {
this.isExpandSider = false
this.playing = false
this.playlist = []
this.mode = mode.sequence
this.currentIndex = -1
this.isFullScreen = false
this.likeSongs = JSON.parse(localStorage.getItem('likeSongs')) || []
this.playHistorys = JSON.parse(localStorage.getItem('playHistorys')) || []
this.audio = null
this.songReady = false
this.currentTime = 0
this.isShowPlaylist = false
this.lyric = null
this.playingLyric = ''
this.playingLineNum = 0
this.errorTimer = null
this.sheetSongs = []
//当currentSong变化时作出反应
reaction(() => this.currentSong, () => {
this.currentSongChange()
})
}
/**
* 获取当前播放歌曲,并对数据进行处理
* @returns {*|{}}
*/
// 当playlist,currentIndex变化时,reaction就会触发,所以要对前后的变化进行判断是否是同一首歌
@computed({equals: (prevSong:{id:number}, newSong:{id:number}) =>prevSong.id === newSong.id})
get currentSong() {
let song:any = {}
if (this.playlist[this.currentIndex]) {
//引用类型的赋值一定要注意,这里必须深拷贝,否则song的改变会改变this.playlist,this.playlist的改变又触发计算属性,最后导致报错
song = { ...this.playlist[this.currentIndex] }
song = {...this.playlist[this.currentIndex]}
song.artists = song.ar.map(item => item.name).join('/')
song.image = song.al ? song.al.picUrl : ''
song.url = `https://music.163.com/song/media/outer/url?id=${song.id}.mp3`
song.duration = (song.dt / 1000) || (song.duration) / 1000 || 0
}
return song
}
/**
* 获取播放时间的百分比
* @returns {number}
*/
@computed
get percent() {
if (this.currentSong.duration) {
return this.currentTime / this.currentSong.duration
} else {
return 0
}
}
@action
setStore = (obj) => {
if (Object.prototype.toString.call(obj) !== '[object Object]') {
return
}
for (let [key, value] of Object.entries(obj)) {
this[key] = value
}
}
@action
setSheetSongs = (obj) => {
this.sheetSongs = obj
}
// @action
// getSheetSongs = (size = 0) => {
// if(songs.length >= allList.length){
// return
// }
// this.sheetSongs = this.sheetSongs.slice(size, size + 30)
// let list = []
// //增加两秒的延迟,实际项目中可以不用,这里只是为显示这样一个加载中的过程
// setTimeout(()=>{
// list = allList.slice(size, size + 30)
// setSongs(songs.concat(list))
// },2000)
// }
/**
* 切换侧边栏的折叠展开
*/
@action
toggleExpand = () => {
this.isExpandSider = !this.isExpandSider
}
/**
* 设置是否全屏播放音乐
* @param flag
*/
@action
setFullScreen = (flag) => {
this.isFullScreen = flag
}
/**
* 选择播放歌曲,设置播放列表
* @param obj
* @returns {Promise.<void>}
*/
@action
onSelectSong = async (obj) => {
const { songlist, index } = obj
this.playlist = songlist ? songlist.slice() : []
this.currentIndex = index
this.isFullScreen = true
}
/**
* 当current变化时的处理
* @returns {Promise.<void>}
*/
@action
currentSongChange = () => {
if (!this.currentSong.id) {
return
}
this.lyric && this.lyric.stop()
this.playing = true
this.currentTime = 0
this.playingLineNum = 0
this.playingLyric = ''
this.lyric = null
setTimeout(() => {
this.audio && this.audio.play()
this.getLyric(this.currentSong.id)
})
}
/**
* 获取歌曲歌词
* @param id
* @returns {Promise.<void>}
*/
@action
getLyric = async (id) => {
const res = await get(`/lyric?id=${id}`)
runInAction(() => {
this.lyric = res ? new Lyric(res, this.handler) : null
this.lyric && this.lyric.play()
})
}
/**
* 播放的歌词变化时的处理
* @param lineNum 播放的行数
* @param txt 当前播放歌词
*/
@action
handler = ({ lineNum, txt }) => {
this.playingLyric = txt
this.playingLineNum = lineNum
}
/**
* 切换播放模式
*/
@action
changeMode = () => {
let mode = (this.mode + 1) % 3
const infos = ['顺序播放', '随机播放', '单曲循环']
Toast.info(infos[mode], 1, null, false)
this.mode = mode
}
/**
* 循环播放
*/
@action
loop = () => {
this.audio.currentTime = 0
this.audio.play()
this.playing = true
this.lyric && this.lyric.seek(0)
}
/**
* 切歌,实际上就是维护的currentIndex
* @param direction 上一首(prev) 下一首(next)
*/
@action
changeSong = (direction) => {
let currentIndex = this.currentIndex
if (!this.songReady) {
return
}
if (this.playlist.length === 1) {
this.loop()
return
}
if (this.mode === mode.shuffle) {
currentIndex = getRandom(0, this.playlist.length - 1)
while (currentIndex === this.currentIndex) {
currentIndex = getRandom(0, this.playlist.length - 1)
}
} else {
if (direction === 'prev') {
currentIndex--
if (currentIndex === -1) {
currentIndex = this.playlist.length - 1
}
}
if (direction === 'next') {
currentIndex++
if (currentIndex === this.playlist.length) {
currentIndex = 0
}
}
| this.songReady = false
}
/**
* 暂停/播放音乐
*/
@action
togglePlay = () => {
clearTimeout(this.errorTimer)
if (this.playing) {
this.audio && this.audio.pause()
} else {
this.audio && this.audio.play()
}
this.lyric && this.lyric.togglePlay()
this.playing = !this.playing
}
/**
* 设置喜欢的音乐
* @param isAdd 是否是添加音乐
* @param song 喜欢的音乐
* @param index 索引
*/
@action
setLikes = (song) => {
let likeSongs = this.likeSongs.slice()
const findx = likeSongs.findIndex(item => item.id === song.id)
if (findx !== -1) {
likeSongs.splice(findx, 1)
} else {
likeSongs.unshift(song)
}
localStorage.setItem('likeSongs', JSON.stringify(likeSongs))
this.likeSongs = JSON.parse(localStorage.getItem('likeSongs')) || []
}
/**
* 设置播放历史
* @param isAdd 是否是添加音乐
* @param song 音乐
* @param index 索引
*/
@action
setPlayHistorys = ({ isAdd, song, index }:PlayHistorysProps) => {
let playHistorys = this.playHistorys.slice()
if (isAdd) {
let exist = playHistorys.findIndex(item => item.id === song.id)
if (exist !== -1) {
playHistorys.splice(exist, 1)
}
playHistorys.unshift(song)
} else {
playHistorys.splice(index, 1)
}
localStorage.setItem('playHistorys', JSON.stringify(playHistorys))
this.playHistorys = | }
this.currentIndex = currentIndex
| conditional_block |
appStore.ts | andSider: boolean //侧边栏是否展开
@observable playing: boolean //歌曲是否正在播放
@observable playlist: any[] //播放列表
@observable mode: number //播放模式
@observable currentIndex: number //当前播放歌曲索引
@observable isFullScreen: boolean //是否全屏播放音乐
@observable likeSongs: any[] //喜欢的音乐列表
@observable playHistorys: any[] //播放历史
@observable audio: any //audio
@observable songReady: boolean //歌曲是否已经准备好了播放
@observable currentTime: number //歌曲播放的时间
@observable isShowPlaylist: boolean //是否显示播放列表
@observable lyric: any //歌词
@observable playingLyric: string //正在播放的歌词
@observable playingLineNum: number //正在播放的歌词行数
@observable errorTimer:any
@observable sheetSongs:any
constructor() {
this.isExpandSider = false
this.playing = false
this.playlist = []
this.mode = mode.sequence
this.currentIndex = -1
this.isFullScreen = false
this.likeSongs = JSON.parse(localStorage.getItem('likeSongs')) || []
this.playHistorys = JSON.parse(localStorage.getItem('playHistorys')) || []
this.audio = null
this.songReady = false
this.currentTime = 0
this.isShowPlaylist = false
this.lyric = null
this.playingLyric = ''
this.playingLineNum = 0
this.errorTimer = null
this.sheetSongs = []
//当currentSong变化时作出反应
reaction(() => this.currentSong, () => {
this.currentSongChange()
})
}
/**
* 获取当前播放歌曲,并对数据进行处理
* @returns {*|{}}
*/
// 当playlist,currentIndex变化时,reaction就会触发,所以要对前后的变化进行判断是否是同一首歌
@computed({equals: (prevSong:{id:number}, newSong:{id:number}) =>prevSong.id === newSong.id})
get currentSong() {
let song:any = {}
if (this.playlist[this.currentIndex]) {
//引用类型的赋值一定要注意,这里必须深拷贝,否则song的改变会改变this.playlist,this.playlist的改变又触发计算属性,最后导致报错
song = { ...this.playlist[this.currentIndex] }
song = {...this.playlist[this.currentIndex]}
song.artists = song.ar.map(item => item.name).join('/')
song.image = song.al ? song.al.picUrl : ''
song.url = `https://music.163.com/song/media/outer/url?id=${song.id}.mp3`
song.duration = (song.dt / 1000) || (song.duration) / 1000 || 0
}
return song
}
/**
* 获取播放时间的百分比
* @returns {number}
*/
@computed
get percent() {
if (this.currentSong.duration) {
return this.currentTime / this.currentSong.duration
} else {
return 0
}
}
@action
setStore = (obj) => {
if (Object.prototype.toString.call(obj) !== '[object Object]') {
return
}
for (let [key, value] of Object.entries(obj)) {
this[key] = value
}
}
@action
setSheetSongs = (obj) => {
this.sheetSongs = obj
}
// @action
// getSheetSongs = (size = 0) => {
// if(songs.length >= allList.length){
// return
// }
// this.sheetSongs = this.sheetSongs.slice(size, size + 30)
// let list = []
// //增加两秒的延迟,实际项目中可以不用,这里只是为显示这样一个加载中的过程
// setTimeout(()=>{
// list = allList.slice(size, size + 30)
// setSongs(songs.concat(list))
// },2000)
// }
/**
* 切换侧边栏的折叠展开
*/
@action
toggleExpand = () => {
this.isExpandSider = !this.isExpandSider
}
/**
* 设置是否全屏播放音乐
* @param flag
*/
@action
setFullScreen = (flag) => {
this.isFullScreen = flag
}
/**
* 选择播放歌曲,设置播放列表
* @param obj
* @returns {Promise.<void>}
*/
@action
onSelectSong = async (obj) => {
const { songlist, index } = obj
this.playlist = songlist ? songlist.slice() : []
this.currentIndex = index
this.isFullScreen = true
}
/**
* 当current变化时的处理
* @returns {Promise.<void>}
*/
@action
currentSongChange = () => {
if (!this.currentSong.id) {
return
}
this.lyric && this.lyric.stop()
this.playing = true
this.currentTime = 0
this.playingLineNum = 0
this.playingLyric = ''
this.lyric = null
setTimeout(() => {
this.audio && this.audio.play()
this.getLyric(this.currentSong.id)
})
}
/**
* 获取歌曲歌词
* @param id
* @returns {Promise.<void>}
*/
@action
getLyric = async (id) => {
const res = await get(`/lyric?id=${id}`)
runInAction(() => {
this.lyric = res ? new Lyric(res, this.handler) : null
this.lyric && this.lyric.play()
})
}
/**
* 播放的歌词变化时的处理
* @param lineNum 播放的行数
* @param txt 当前播放歌词
*/
@action
handler = ({ lineNum, txt }) => {
this.playingLyric = txt
this.playingLineNum = lineNum
}
/**
* 切换播放模式
*/
@action
changeMode = () => {
let mode = (this.mode + 1) % 3
const infos = ['顺序播放', '随机播放', '单曲循环']
Toast.info(infos[mode], 1, null, false)
this.mode = mode
}
/**
* 循环播放
*/
@action
loop = () => {
this.audio.currentTime = 0
this.audio.play()
this.playing = true
this.lyric && this.lyric.seek(0)
}
/**
* 切歌,实际上就是维护的currentIndex
* @param direction 上一首(prev) 下一首(next)
*/
@action
changeSong = (direction) => {
let currentIndex = this.currentIndex
if (!this.songReady) {
return
}
if (this.playlist.length === 1) {
this.loop()
return
}
if (this.mode === mode.shuffle) {
currentIndex = getRandom(0, this.playlist.length - 1)
while (currentIndex === this.currentIndex) {
currentIndex = getRandom(0, this.playlist.length - 1)
}
} else {
if (direction === 'prev') {
currentIndex--
if (currentIndex === -1) {
currentIndex = this.playlist.length - 1
}
}
if (direction === 'next') {
currentIndex++
if (currentIndex === this.playlist.length) {
currentIndex = 0
}
}
}
this.currentIndex = currentIndex
this.songReady = false
}
/**
* 暂停/播放音乐
*/
@action
togglePlay = () => {
clearTimeout(this.errorTimer)
if (this.playing) {
this.audio && this.audio.pause()
} else {
this.audio && this.audio.play()
}
this.lyric && this.lyric.togglePlay()
this.playing = !this.playing
}
/**
* 设置喜欢的音乐
* @param isAdd 是否是添加音乐
* @param song 喜欢的音乐
* @param index 索引
*/
@action
setLikes = (song) => {
let likeSongs = this.likeSongs.slice()
const findx = likeSongs.findIndex(item => item.id === song.id)
if (findx !== -1) {
likeSongs.splice(findx, 1)
} else {
likeSongs.unshift(song)
}
localStorage.setItem('likeSongs', JSON.stringify(likeSongs))
this.likeSongs = JSON.parse(localStorage.getItem('likeSongs')) || []
}
/**
* 设置播放历史
* @param isAdd 是否是添加音乐
* @param song 音乐
* @param index 索引
*/
@action
setPlayHistorys = ({ isAdd, song, index }:PlayHistorysProps) => {
let playHistorys = this.playHistorys.slice()
| le isExp | identifier_name | |
qt_events.py |
#@+node:ekr.20110605121601.18540: *3* filter.eventFilter & helpers
def eventFilter(self, obj, event):
"""Return False if Qt should handle the event."""
c, k = self.c, self.c.k
#
# Handle non-key events first.
if not g.app:
return False # For unit tests, but g.unitTesting may be False!
if not self.c.p:
return False # Startup.
#
# Trace events.
if 'events' in g.app.debug:
if isinstance(event, QtGui.QKeyEvent):
self.traceKeys(obj, event)
else:
self.traceEvent(obj, event)
self.traceWidget(event)
#
# Let Qt handle the non-key events.
if self.doNonKeyEvent(event, obj):
return False
#
# Ignore incomplete key events.
if self.shouldIgnoreKeyEvent(event, obj):
return False
#
# Generate a g.KeyStroke for k.masterKeyHandler.
try:
binding, ch, lossage = self.toBinding(event)
if not binding:
return False # Let Qt handle the key.
#
# Pass the KeyStroke to masterKeyHandler.
key_event = self.createKeyEvent(event, c, self.w, ch, binding)
#
# #1933: Update the g.app.lossage
if len(g.app.lossage) > 99:
g.app.lossage.pop()
lossage.stroke = key_event.stroke
g.app.lossage.insert(0, lossage)
#
# Call masterKeyHandler!
k.masterKeyHandler(key_event)
c.outerUpdate()
except Exception:
g.es_exception()
return True # Whatever happens, suppress all other Qt key handling.
#@+node:ekr.20110605195119.16937: *4* filter.createKeyEvent
def createKeyEvent(self, event, c, w, ch, binding):
return leoGui.LeoKeyEvent(
c=self.c,
# char = None doesn't work at present.
# But really, the binding should suffice.
char=ch,
event=event,
binding=binding,
w=w,
x=getattr(event, 'x', None) or 0,
y=getattr(event, 'y', None) or 0,
x_root=getattr(event, 'x_root', None) or 0,
y_root=getattr(event, 'y_root', None) or 0,
)
#@+node:ekr.20180413180751.2: *4* filter.doNonKeyEvent
def doNonKeyEvent(self, event, obj):
"""Handle all non-key event. """
c = self.c
eventType = event.type()
if eventType == Type.WindowActivate:
g.app.gui.onActivateEvent(event, c, obj, self.tag)
elif eventType == Type.WindowDeactivate:
g.app.gui.onDeactivateEvent(event, c, obj, self.tag)
elif eventType == Type.FocusIn:
if self.tag == 'body':
c.frame.body.onFocusIn(obj)
if c.frame and c.frame.top and obj is c.frame.top.lineEdit:
if c.k.getStateKind() == 'getArg':
c.frame.top.lineEdit.restore_selection()
elif eventType == Type.FocusOut and self.tag == 'body':
c.frame.body.onFocusOut(obj)
# Return True unless we have a key event.
return eventType not in (Type.ShortcutOverride, Type.KeyPress, Type.KeyRelease)
#@+node:ekr.20180413180751.3: *4* filter.shouldIgnoreKeyEvent
def shouldIgnoreKeyEvent(self, event, obj):
"""
Return True if we should ignore the key event.
Alas, QLineEdit *only* generates ev.KeyRelease on Windows, Ubuntu,
so the following hack is required.
"""
c = self.c
t = event.type()
isEditWidget = (obj == c.frame.tree.edit_widget(c.p))
if isEditWidget:
# QLineEdit: ignore all key events except keyRelease events.
return t != Type.KeyRelease
if t == Type.KeyPress:
# Hack Alert!
# On some Linux systems (Kubuntu, Debian, the Win or SHIFT-Win keys
# insert garbage symbols into editing areas. Filter out these
# key events. NOTE - this is a *magic number* - who knows if
# it could change in the future?
if event.key() == 0x1000053 and sys.platform == 'linux':
return True
return False # Never ignore KeyPress events.
# This doesn't work. Two shortcut-override events are generated!
# if t == ev.ShortcutOverride and event.text():
# return False # Don't ignore shortcut overrides with a real value.
return True # Ignore everything else.
#@+node:ekr.20110605121601.18543: *4* filter.toBinding & helpers
def toBinding(self, event):
"""
Return (binding, actual_ch):
binding: A user binding, to create g.KeyStroke.
Spelling no longer fragile.
actual_ch: The insertable key, or ''.
"""
mods = self.qtMods(event)
keynum, text, toString, ch = self.qtKey(event)
actual_ch = text or toString
#
# Never allow empty chars, or chars in g.app.gui.ignoreChars
if toString in g.app.gui.ignoreChars:
return None, None, None
ch = ch or toString or ''
if not ch:
return None, None, None
#
# Check for AltGr and Alt+Ctrl keys *before* creating a binding.
actual_ch, ch, mods2 = self.doMacTweaks(actual_ch, ch, mods)
mods3 = self.doAltTweaks(actual_ch, keynum, mods2, toString)
#
# Use *ch* in the binding.
# Clearer w/o f-strings.
binding = '%s%s' % (''.join([f"{z}+" for z in mods3]), ch)
#
# Return the tweaked *actual* char.
binding, actual_ch = self.doLateTweaks(binding, actual_ch)
#
# #1933: Create lossage data.
lossage = LossageData(
actual_ch, binding, ch, keynum, mods, mods2, mods3, text, toString)
return binding, actual_ch, lossage
#@+node:ekr.20180419154543.1: *5* filter.doAltTweaks
def doAltTweaks(self, actual_ch, keynum, mods, toString):
"""Turn AltGr and some Alt-Ctrl keys into plain keys."""
def removeAltCtrl(mods):
for mod in ('Alt', 'Control'):
if mod in mods:
mods.remove(mod)
return mods
#
# Remove Alt, Ctrl for AltGr keys.
# See https://en.wikipedia.org/wiki/AltGr_key
if keynum == Key.Key_AltGr:
return removeAltCtrl(mods)
#
# Never alter complex characters.
if len(actual_ch) != 1:
return mods
#
# #1563: A hack for German and Spanish keyboards:
# Remove *plain* Shift modifier for colon and semicolon.
# https://en.m.wikipedia.org/wiki/German_keyboard_layout
kind = self.keyboard_kind.lower()
if (kind in ('german', 'spanish')
and actual_ch in ":;"
and 'Shift' in mods
and 'Alt' not in mods and 'Control' not in mods
):
mods.remove('Shift')
elif kind == 'us-international':
pass # To do.
#
# Handle Alt-Ctrl modifiers for chars whose that are not ascii.
# Testing: Alt-Ctrl-E is '€'.
if ord(actual_ch) > 127 and 'Alt' in mods and 'Control' in mods:
return removeAltCtrl(mods)
return mods
#@+node:ekr.20180417161548.1: *5* filter.do | """Ctor for LeoQtEventFilter class."""
super().__init__()
self.c = c
self.w = w # A leoQtX object, *not* a Qt object.
self.tag = tag
# Debugging.
self.keyIsActive = False
# Pretend there is a binding for these characters.
close_flashers = c.config.getString('close-flash-brackets') or ''
open_flashers = c.config.getString('open-flash-brackets') or ''
self.flashers = open_flashers + close_flashers
# #1563: Support alternate keyboards.
self.keyboard_kind = c.config.getString('keyboard-kind') or 'default-keyboard'
# Support for ctagscompleter.py plugin.
self.ctagscompleter_active = False
self.ctagscompleter_onKey = None | identifier_body | |
qt_events.py | ])
layout_events = [
(e.Type.ChildAdded, 'child-added'), # 68
(e.Type.ChildRemoved, 'child-removed'), # 71
(e.Type.DynamicPropertyChange, 'dynamic-property-change'), # 170
(e.Type.FontChange, 'font-change'), # 97
(e.Type.LayoutRequest, 'layout-request'), # 76
(e.Type.Move, 'move'), # 13 widget's position changed.
(e.Type.Resize, 'resize'), # 14
(e.Type.StyleChange, 'style-change'), # 100
(e.Type.ZOrderChange, 'z-order-change'), # 126
]
if hasattr(e, 'CloseSoftwareInputPanel'):
layout_events.extend([
(e.Type.CloseSoftwareInputPanel, 'close-sip'), # 200
])
mouse_events = (
(e.Type.MouseMove, 'mouse-move'), # 155
(e.Type.MouseButtonPress, 'mouse-press'), # 2
(e.Type.MouseButtonRelease, 'mouse-release'), # 3
(e.Type.Wheel, 'mouse-wheel'), # 31
)
paint_events = [
(e.Type.ChildPolished, 'child-polished'), # 69
(e.Type.PaletteChange, 'palette-change'), # 39
(e.Type.ParentChange, 'parent-change'), # 21
(e.Type.Paint, 'paint'), # 12
(e.Type.Polish, 'polish'), # 75
(e.Type.PolishRequest, 'polish-request'), # 74
]
if hasattr(e, 'RequestSoftwareInputPanel'):
paint_events.extend([
(e.Type.RequestSoftwareInputPanel, 'sip'), # 199
])
update_events = (
(e.Type.UpdateLater, 'update-later'), # 78
(e.Type.UpdateRequest, 'update'), # 77
)
option_table = (
(traceActivate, activate_events),
(traceFocus, focus_events),
(traceHide, hide_events),
(traceHover, hover_events),
(traceKey, key_events),
(traceLayout, layout_events),
(traceMouse, mouse_events),
(tracePaint, paint_events),
(traceUpdate, update_events),
)
for option, table in option_table:
if option:
show.extend(table)
else:
for n, tag in table:
ignore.append(n)
for val, kind in show:
if self.tag in exclude_names:
return
if eventType == val:
tag = (
obj.objectName() if hasattr(obj, 'objectName')
else f"id: {id(obj)}, {obj.__class__.__name__}"
)
if traceKey:
g.trace(
f"{kind:>25} {self.tag:25} "
f"in-state: {repr(c.k and c.k.inState()):5} obj: {tag}")
return
if eventType not in ignore:
tag = (
obj.objectName() if hasattr(obj, 'objectName')
else f"id: {id(obj)}, {obj.__class__.__name__}"
)
g.trace(f"{eventType:>25} {self.tag:25} {tag}")
#@+node:ekr.20131121050226.16331: *4* filter.traceWidget
def traceWidget(self, event):
"""Show unexpected events in unusual widgets."""
verbose = False # Not good for --trace-events
e = QtCore.QEvent
assert isinstance(event, QtCore.QEvent)
et = event.type()
# http://qt-project.org/doc/qt-4.8/qevent.html#properties
ignore_d = {
e.Type.ChildAdded: 'child-added', # 68
e.Type.ChildPolished: 'child-polished', # 69
e.Type.ChildRemoved: 'child-removed', # 71
e.Type.Close: 'close', # 19
e.Type.CloseSoftwareInputPanel: 'close-software-input-panel', # 200
178: 'contents-rect-change', # 178
# e.Type.DeferredDelete:'deferred-delete', # 52 (let's trace this)
e.Type.DynamicPropertyChange: 'dynamic-property-change', # 170
e.Type.FocusOut: 'focus-out', # 9 (We don't care if we are leaving an unknown widget)
e.Type.FontChange: 'font-change', # 97
e.Type.Hide: 'hide', # 18
e.Type.HideToParent: 'hide-to-parent', # 27
e.Type.HoverEnter: 'hover-enter', # 127
e.Type.HoverLeave: 'hover-leave', # 128
e.Type.HoverMove: 'hover-move', # 129
e.Type.KeyPress: 'key-press', # 6
e.Type.KeyRelease: 'key-release', # 7
e.Type.LayoutRequest: 'layout-request', # 76
e.Type.Leave: 'leave', # 11 (We don't care if we are leaving an unknown widget)
# e.Type.LeaveEditFocus:'leave-edit-focus', # 151
e.Type.MetaCall: 'meta-call', # 43
e.Type.Move: 'move', # 13 widget's position changed.
e.Type.MouseButtonPress: 'mouse-button-press', # 2
e.Type.MouseButtonRelease: 'mouse-button-release', # 3
e.Type.MouseButtonDblClick: 'mouse-button-double-click', # 4
e.Type.MouseMove: 'mouse-move', # 5
e.Type.MouseTrackingChange: 'mouse-tracking-change', # 105
e.Type.Paint: 'paint', # 12
e.Type.PaletteChange: 'palette-change', # 39
e.Type.ParentChange: 'parent-change', # 21
e.Type.Polish: 'polish', # 75
e.Type.PolishRequest: 'polish-request', # 74
e.Type.RequestSoftwareInputPanel: 'request-software-input-panel', # 199
e.Type.Resize: 'resize', # 14
e.Type.ShortcutOverride: 'shortcut-override', # 51
e.Type.Show: 'show', # 17
e.Type.ShowToParent: 'show-to-parent', # 26
e.Type.StyleChange: 'style-change', # 100
e.Type.StatusTip: 'status-tip', # 112
e.Type.Timer: 'timer', # 1
e.Type.ToolTip: 'tool-tip', # 110
e.Type.WindowBlocked: 'window-blocked', # 103
e.Type.WindowUnblocked: 'window-unblocked', # 104
e.Type.ZOrderChange: 'z-order-change', # 126
}
focus_d = {
e.Type.DeferredDelete: 'deferred-delete', # 52
e.Type.Enter: 'enter', # 10
e.Type.FocusIn: 'focus-in', # 8
e.Type.WindowActivate: 'window-activate', # 24
e.Type.WindowDeactivate: 'window-deactivate', # 25
}
line_edit_ignore_d = {
e.Type.Enter: 'enter', # 10 (mouse over)
e.Type.Leave: 'leave', # 11 (mouse over)
e.Type.FocusOut: 'focus-out', # 9
e.Type.WindowActivate: 'window-activate', # 24
e.Type.WindowDeactivate: 'window-deactivate', # 25
}
none_ignore_d = {
e.Type.Enter: 'enter', # 10 (mouse over)
e.Type.Leave: 'leave', # 11 (mouse over)
e.Type.FocusOut: 'focus-out', # 9
e.Type.WindowActivate: 'window-activate', # 24
}
if et in ignore_d:
return
w = QtWidgets.QApplication.focusWidget()
if verbose: # Too verbose for --trace-events.
for d in (ignore_d, focus_d, line_edit_ignore_d, none_ignore_d):
t = d.get(et)
if t:
break
else:
t = et
g.trace(f"{t:20} {w.__class__}")
return
if w is None:
if et not in none_ignore_d:
t = | focus_d.get(et) or et
g.trace(f"None {t}")
| conditional_block | |
qt_events.py | # pylint: disable=no-member
key_events.extend([
(e.Type.InputMethodQuery, 'input-method-query'), # 207
])
layout_events = [
(e.Type.ChildAdded, 'child-added'), # 68
(e.Type.ChildRemoved, 'child-removed'), # 71
(e.Type.DynamicPropertyChange, 'dynamic-property-change'), # 170
(e.Type.FontChange, 'font-change'), # 97
(e.Type.LayoutRequest, 'layout-request'), # 76
(e.Type.Move, 'move'), # 13 widget's position changed.
(e.Type.Resize, 'resize'), # 14
(e.Type.StyleChange, 'style-change'), # 100
(e.Type.ZOrderChange, 'z-order-change'), # 126
]
if hasattr(e, 'CloseSoftwareInputPanel'):
layout_events.extend([
(e.Type.CloseSoftwareInputPanel, 'close-sip'), # 200
])
mouse_events = (
(e.Type.MouseMove, 'mouse-move'), # 155
(e.Type.MouseButtonPress, 'mouse-press'), # 2
(e.Type.MouseButtonRelease, 'mouse-release'), # 3
(e.Type.Wheel, 'mouse-wheel'), # 31
)
paint_events = [
(e.Type.ChildPolished, 'child-polished'), # 69
(e.Type.PaletteChange, 'palette-change'), # 39
(e.Type.ParentChange, 'parent-change'), # 21
(e.Type.Paint, 'paint'), # 12
(e.Type.Polish, 'polish'), # 75
(e.Type.PolishRequest, 'polish-request'), # 74
]
if hasattr(e, 'RequestSoftwareInputPanel'):
paint_events.extend([
(e.Type.RequestSoftwareInputPanel, 'sip'), # 199
])
update_events = (
(e.Type.UpdateLater, 'update-later'), # 78
(e.Type.UpdateRequest, 'update'), # 77
)
option_table = (
(traceActivate, activate_events),
(traceFocus, focus_events),
(traceHide, hide_events),
(traceHover, hover_events),
(traceKey, key_events),
(traceLayout, layout_events),
(traceMouse, mouse_events),
(tracePaint, paint_events),
(traceUpdate, update_events),
)
for option, table in option_table:
if option:
show.extend(table)
else:
for n, tag in table:
ignore.append(n)
for val, kind in show:
if self.tag in exclude_names:
return
if eventType == val:
tag = (
obj.objectName() if hasattr(obj, 'objectName')
else f"id: {id(obj)}, {obj.__class__.__name__}"
)
if traceKey:
g.trace(
f"{kind:>25} {self.tag:25} "
f"in-state: {repr(c.k and c.k.inState()):5} obj: {tag}")
return
if eventType not in ignore:
tag = (
obj.objectName() if hasattr(obj, 'objectName')
else f"id: {id(obj)}, {obj.__class__.__name__}"
)
g.trace(f"{eventType:>25} {self.tag:25} {tag}")
#@+node:ekr.20131121050226.16331: *4* filter.traceWidget
def traceWidget(self, event):
"""Show unexpected events in unusual widgets."""
verbose = False # Not good for --trace-events
e = QtCore.QEvent
assert isinstance(event, QtCore.QEvent)
et = event.type()
# http://qt-project.org/doc/qt-4.8/qevent.html#properties
ignore_d = {
e.Type.ChildAdded: 'child-added', # 68
e.Type.ChildPolished: 'child-polished', # 69
e.Type.ChildRemoved: 'child-removed', # 71
e.Type.Close: 'close', # 19
e.Type.CloseSoftwareInputPanel: 'close-software-input-panel', # 200
178: 'contents-rect-change', # 178
# e.Type.DeferredDelete:'deferred-delete', # 52 (let's trace this)
e.Type.DynamicPropertyChange: 'dynamic-property-change', # 170
e.Type.FocusOut: 'focus-out', # 9 (We don't care if we are leaving an unknown widget)
e.Type.FontChange: 'font-change', # 97
e.Type.Hide: 'hide', # 18
e.Type.HideToParent: 'hide-to-parent', # 27
e.Type.HoverEnter: 'hover-enter', # 127
e.Type.HoverLeave: 'hover-leave', # 128
e.Type.HoverMove: 'hover-move', # 129
e.Type.KeyPress: 'key-press', # 6
e.Type.KeyRelease: 'key-release', # 7
e.Type.LayoutRequest: 'layout-request', # 76
e.Type.Leave: 'leave', # 11 (We don't care if we are leaving an unknown widget)
# e.Type.LeaveEditFocus:'leave-edit-focus', # 151
e.Type.MetaCall: 'meta-call', # 43
e.Type.Move: 'move', # 13 widget's position changed.
e.Type.MouseButtonPress: 'mouse-button-press', # 2
e.Type.MouseButtonRelease: 'mouse-button-release', # 3
e.Type.MouseButtonDblClick: 'mouse-button-double-click', # 4
e.Type.MouseMove: 'mouse-move', # 5
e.Type.MouseTrackingChange: 'mouse-tracking-change', # 105
e.Type.Paint: 'paint', # 12
e.Type.PaletteChange: 'palette-change', # 39
e.Type.ParentChange: 'parent-change', # 21
e.Type.Polish: 'polish', # 75
e.Type.PolishRequest: 'polish-request', # 74
e.Type.RequestSoftwareInputPanel: 'request-software-input-panel', # 199
e.Type.Resize: 'resize', # 14
e.Type.ShortcutOverride: 'shortcut-override', # 51
e.Type.Show: 'show', # 17
e.Type.ShowToParent: 'show-to-parent', # 26
e.Type.StyleChange: 'style-change', # 100
e.Type.StatusTip: 'status-tip', # 112
e.Type.Timer: 'timer', # 1
e.Type.ToolTip: 'tool-tip', # 110
e.Type.WindowBlocked: 'window-blocked', # 103
e.Type.WindowUnblocked: 'window-unblocked', # 104
e.Type.ZOrderChange: 'z-order-change', # 126
}
focus_d = {
e.Type.DeferredDelete: 'deferred-delete', # 52
e.Type.Enter: 'enter', # 10
e.Type.FocusIn: 'focus-in', # 8
e.Type.WindowActivate: 'window-activate', # 24
e.Type.WindowDeactivate: 'window-deactivate', # 25
}
line_edit_ignore_d = {
e.Type.Enter: 'enter', # 10 (mouse over)
e.Type.Leave: 'leave', # 11 (mouse over)
e.Type.FocusOut: 'focus-out', # 9
e.Type.WindowActivate: 'window-activate', # 24
e.Type.WindowDeactivate: 'window-deactivate', # 25
}
none_ignore_d = {
e.Type.Enter: 'enter', # 10 (mouse over)
e.Type.Leave: 'leave', # 11 (mouse over)
e.Type.FocusOut: 'focus-out', # 9
e.Type.WindowActivate: 'window-activate', # 24
}
if et in ignore_d:
return
w = QtWidgets.QApplication.focusWidget()
if verbose: # Too verbose for --trace-events.
for d in (ignore_d, focus_d, line_edit_ignore_d, none_ignore_d):
t = d.get(et)
if t:
break
else:
t = et | g.trace(f"{t:20} {w.__class__}")
return
if w is None: | random_line_split | |
qt_events.py | (self, obj, event):
"""Return False if Qt should handle the event."""
c, k = self.c, self.c.k
#
# Handle non-key events first.
if not g.app:
return False # For unit tests, but g.unitTesting may be False!
if not self.c.p:
return False # Startup.
#
# Trace events.
if 'events' in g.app.debug:
if isinstance(event, QtGui.QKeyEvent):
self.traceKeys(obj, event)
else:
self.traceEvent(obj, event)
self.traceWidget(event)
#
# Let Qt handle the non-key events.
if self.doNonKeyEvent(event, obj):
return False
#
# Ignore incomplete key events.
if self.shouldIgnoreKeyEvent(event, obj):
return False
#
# Generate a g.KeyStroke for k.masterKeyHandler.
try:
binding, ch, lossage = self.toBinding(event)
if not binding:
return False # Let Qt handle the key.
#
# Pass the KeyStroke to masterKeyHandler.
key_event = self.createKeyEvent(event, c, self.w, ch, binding)
#
# #1933: Update the g.app.lossage
if len(g.app.lossage) > 99:
g.app.lossage.pop()
lossage.stroke = key_event.stroke
g.app.lossage.insert(0, lossage)
#
# Call masterKeyHandler!
k.masterKeyHandler(key_event)
c.outerUpdate()
except Exception:
g.es_exception()
return True # Whatever happens, suppress all other Qt key handling.
#@+node:ekr.20110605195119.16937: *4* filter.createKeyEvent
def createKeyEvent(self, event, c, w, ch, binding):
return leoGui.LeoKeyEvent(
c=self.c,
# char = None doesn't work at present.
# But really, the binding should suffice.
char=ch,
event=event,
binding=binding,
w=w,
x=getattr(event, 'x', None) or 0,
y=getattr(event, 'y', None) or 0,
x_root=getattr(event, 'x_root', None) or 0,
y_root=getattr(event, 'y_root', None) or 0,
)
#@+node:ekr.20180413180751.2: *4* filter.doNonKeyEvent
def doNonKeyEvent(self, event, obj):
"""Handle all non-key event. """
c = self.c
eventType = event.type()
if eventType == Type.WindowActivate:
g.app.gui.onActivateEvent(event, c, obj, self.tag)
elif eventType == Type.WindowDeactivate:
g.app.gui.onDeactivateEvent(event, c, obj, self.tag)
elif eventType == Type.FocusIn:
if self.tag == 'body':
c.frame.body.onFocusIn(obj)
if c.frame and c.frame.top and obj is c.frame.top.lineEdit:
if c.k.getStateKind() == 'getArg':
c.frame.top.lineEdit.restore_selection()
elif eventType == Type.FocusOut and self.tag == 'body':
c.frame.body.onFocusOut(obj)
# Return True unless we have a key event.
return eventType not in (Type.ShortcutOverride, Type.KeyPress, Type.KeyRelease)
#@+node:ekr.20180413180751.3: *4* filter.shouldIgnoreKeyEvent
def shouldIgnoreKeyEvent(self, event, obj):
"""
Return True if we should ignore the key event.
Alas, QLineEdit *only* generates ev.KeyRelease on Windows, Ubuntu,
so the following hack is required.
"""
c = self.c
t = event.type()
isEditWidget = (obj == c.frame.tree.edit_widget(c.p))
if isEditWidget:
# QLineEdit: ignore all key events except keyRelease events.
return t != Type.KeyRelease
if t == Type.KeyPress:
# Hack Alert!
# On some Linux systems (Kubuntu, Debian, the Win or SHIFT-Win keys
# insert garbage symbols into editing areas. Filter out these
# key events. NOTE - this is a *magic number* - who knows if
# it could change in the future?
if event.key() == 0x1000053 and sys.platform == 'linux':
return True
return False # Never ignore KeyPress events.
# This doesn't work. Two shortcut-override events are generated!
# if t == ev.ShortcutOverride and event.text():
# return False # Don't ignore shortcut overrides with a real value.
return True # Ignore everything else.
#@+node:ekr.20110605121601.18543: *4* filter.toBinding & helpers
def toBinding(self, event):
"""
Return (binding, actual_ch):
binding: A user binding, to create g.KeyStroke.
Spelling no longer fragile.
actual_ch: The insertable key, or ''.
"""
mods = self.qtMods(event)
keynum, text, toString, ch = self.qtKey(event)
actual_ch = text or toString
#
# Never allow empty chars, or chars in g.app.gui.ignoreChars
if toString in g.app.gui.ignoreChars:
return None, None, None
ch = ch or toString or ''
if not ch:
return None, None, None
#
# Check for AltGr and Alt+Ctrl keys *before* creating a binding.
actual_ch, ch, mods2 = self.doMacTweaks(actual_ch, ch, mods)
mods3 = self.doAltTweaks(actual_ch, keynum, mods2, toString)
#
# Use *ch* in the binding.
# Clearer w/o f-strings.
binding = '%s%s' % (''.join([f"{z}+" for z in mods3]), ch)
#
# Return the tweaked *actual* char.
binding, actual_ch = self.doLateTweaks(binding, actual_ch)
#
# #1933: Create lossage data.
lossage = LossageData(
actual_ch, binding, ch, keynum, mods, mods2, mods3, text, toString)
return binding, actual_ch, lossage
#@+node:ekr.20180419154543.1: *5* filter.doAltTweaks
def doAltTweaks(self, actual_ch, keynum, mods, toString):
"""Turn AltGr and some Alt-Ctrl keys into plain keys."""
def removeAltCtrl(mods):
for mod in ('Alt', 'Control'):
if mod in mods:
mods.remove(mod)
return mods
#
# Remove Alt, Ctrl for AltGr keys.
# See https://en.wikipedia.org/wiki/AltGr_key
if keynum == Key.Key_AltGr:
return removeAltCtrl(mods)
#
# Never alter complex characters.
if len(actual_ch) != 1:
return mods
#
# #1563: A hack for German and Spanish keyboards:
# Remove *plain* Shift modifier for colon and semicolon.
# https://en.m.wikipedia.org/wiki/German_keyboard_layout
kind = self.keyboard_kind.lower()
if (kind in ('german', 'spanish')
and actual_ch in ":;"
and 'Shift' in mods
and 'Alt' not in mods and 'Control' not in mods
):
mods.remove('Shift')
elif kind == 'us-international':
pass # To do.
#
# Handle Alt-Ctrl modifiers for chars whose that are not ascii.
# Testing: Alt-Ctrl-E is '€'.
if ord(actual_ch) > 127 and 'Alt' in mods and 'Control' in mods:
return removeAltCtrl(mods)
return mods
#@+node:ekr.20180417161548.1: *5* filter.doLateTweaks
def doLateTweaks(self, binding, ch):
"""Make final tweaks. g.KeyStroke does other tweaks later."""
#
# These are needed because ch is separate from binding.
if ch == '\r':
ch = '\n'
if binding == 'Escape':
ch = 'Escape'
#
# Adjust the case of the binding string (for the minibuffer).
if len(ch) == 1 and len(binding) == 1 and ch.isalpha() and binding.isalpha():
if ch != binding:
binding = ch
return binding, ch
#@+node:ekr.20180419160958.1: *5* filter.doMacTweaks
def doMacTweaks(self, actual_ch, ch, mods):
"""Replace MacOS Alt characters."""
if not g.isMac:
return actual_ch, ch, mods
if ch == 'Backspace':
# On the Mac, the | eventFilter | identifier_name | |
list.rs | use kas::geom::Rect;
/// A generic row widget
///
/// See documentation of [`List`] type.
pub type Row<W> = List<Horizontal, W>;
/// A generic column widget
///
/// See documentation of [`List`] type.
pub type Column<W> = List<Vertical, W>;
/// A row of boxed widgets
///
/// This is parameterised over handler message type.
///
/// See documentation of [`List`] type.
pub type BoxRow<M> = BoxList<Horizontal, M>;
/// A column of boxed widgets
///
/// This is parameterised over handler message type.
///
/// See documentation of [`List`] type.
pub type BoxColumn<M> = BoxList<Vertical, M>;
/// A row/column of boxed widgets
///
/// This is parameterised over directionality and handler message type.
///
/// See documentation of [`List`] type.
pub type BoxList<D, M> = List<D, Box<dyn Handler<Msg = M>>>;
/// A generic row/column widget
///
/// This type is generic over both directionality and the type of child widgets.
/// Essentially, it is a [`Vec`] which also implements the [`Widget`] trait.
///
/// [`Row`] and [`Column`] are parameterisations with set directionality.
///
/// [`BoxList`] (and its derivatives [`BoxRow`], [`BoxColumn`]) parameterise
/// `W = Box<dyn Widget>`, thus supporting individually boxed child widgets.
/// This allows use of multiple types of child widget at the cost of extra
/// allocation, and requires dynamic dispatch of methods.
///
/// Configuring and resizing elements is O(n) in the number of children.
/// Drawing and event handling is O(log n) in the number of children (assuming
/// only a small number are visible at any one time).
///
/// For fixed configurations of child widgets, [`make_widget`] can be used
/// instead. [`make_widget`] has the advantage that it can support child widgets
/// of multiple types without allocation and via static dispatch, but the
/// disadvantage that drawing and event handling are O(n) in the number of
/// children.
///
/// [`make_widget`]: ../macros/index.html#the-make_widget-macro
#[derive(Clone, Default, Debug)]
pub struct List<D: Directional, W: Widget> {
core: CoreData,
widgets: Vec<W>,
data: layout::DynRowStorage,
direction: D,
}
// We implement this manually, because the derive implementation cannot handle
// vectors of child widgets.
impl<D: Directional, W: Widget> WidgetCore for List<D, W> {
#[inline]
fn core_data(&self) -> &CoreData {
&self.core
}
#[inline]
fn core_data_mut(&mut self) -> &mut CoreData {
&mut self.core
}
#[inline]
fn widget_name(&self) -> &'static str {
"List"
}
#[inline]
fn as_widget(&self) -> &dyn Widget {
self
}
#[inline]
fn as_widget_mut(&mut self) -> &mut dyn Widget {
self
}
#[inline]
fn len(&self) -> usize {
self.widgets.len()
}
#[inline]
fn get(&self, index: usize) -> Option<&dyn Widget> {
self.widgets.get(index).map(|w| w.as_widget())
}
#[inline]
fn get_mut(&mut self, index: usize) -> Option<&mut dyn Widget> {
self.widgets.get_mut(index).map(|w| w.as_widget_mut())
}
fn walk(&self, f: &mut dyn FnMut(&dyn Widget)) {
for child in &self.widgets {
child.walk(f);
}
f(self)
}
fn walk_mut(&mut self, f: &mut dyn FnMut(&mut dyn Widget)) {
for child in &mut self.widgets {
child.walk_mut(f);
}
f(self)
}
}
impl<D: Directional, W: Widget> Widget for List<D, W> {}
impl<D: Directional, W: Widget> Layout for List<D, W> {
fn size_rules(&mut self, size_handle: &mut dyn SizeHandle, axis: AxisInfo) -> SizeRules {
let mut solver = layout::RowSolver::<Vec<u32>, _>::new(
axis,
(self.direction, self.widgets.len()),
&mut self.data,
);
for (n, child) in self.widgets.iter_mut().enumerate() {
solver.for_child(&mut self.data, n, |axis| {
child.size_rules(size_handle, axis)
});
}
solver.finish(&mut self.data, iter::empty(), iter::empty())
}
fn set_rect(&mut self, size_handle: &mut dyn SizeHandle, rect: Rect, _: AlignHints) {
self.core.rect = rect;
let mut setter = layout::RowSetter::<D, Vec<u32>, _>::new(
rect,
Margins::ZERO,
(self.direction, self.widgets.len()),
&mut self.data,
);
for (n, child) in self.widgets.iter_mut().enumerate() {
let align = AlignHints::default();
child.set_rect(size_handle, setter.child_rect(n), align);
}
}
fn find_id(&self, coord: Coord) -> Option<WidgetId> {
let solver = RowPositionSolver::new(self.direction);
if let Some(child) = solver.find_child(&self.widgets, coord) {
return child.find_id(coord);
}
// We should return Some(self), but hit a borrow check error.
// This should however be unreachable anyway.
None
}
fn draw(&self, draw_handle: &mut dyn DrawHandle, mgr: &ManagerState) {
let solver = RowPositionSolver::new(self.direction);
solver.for_children(&self.widgets, draw_handle.target_rect(), |w| {
w.draw(draw_handle, mgr)
});
}
}
impl<D: Directional, W: Widget + Handler> Handler for List<D, W> {
type Msg = <W as Handler>::Msg;
fn handle(&mut self, mgr: &mut Manager, id: WidgetId, event: Event) -> Response<Self::Msg> {
for child in &mut self.widgets {
if id <= child.id() {
return child.handle(mgr, id, event);
}
}
debug_assert!(id == self.id(), "Handler::handle: bad WidgetId");
Response::Unhandled(event)
}
}
impl<D: Directional + Default, W: Widget> List<D, W> {
/// Construct a new instance
///
/// This constructor is available where the direction is determined by the
/// type: for `D: Directional + Default`. In other cases, use
/// [`List::new_with_direction`].
pub fn new(widgets: Vec<W>) -> Self {
List {
core: Default::default(),
widgets,
data: Default::default(),
direction: Default::default(),
}
}
}
impl<D: Directional, W: Widget> List<D, W> {
/// Construct a new instance with explicit direction
pub fn new_with_direction(direction: D, widgets: Vec<W>) -> Self {
List {
core: Default::default(),
widgets,
data: Default::default(),
direction,
}
}
/// True if there are no child widgets
pub fn is_empty(&self) -> bool {
self.widgets.is_empty()
}
/// Returns the number of child widgets
pub fn len(&self) -> usize {
self.widgets.len()
}
/// Returns the number of elements the vector can hold without reallocating.
pub fn capacity(&self) -> usize {
self.widgets.capacity()
}
/// Reserves capacity for at least `additional` more elements to be inserted
/// into the list. See documentation of [`Vec::reserve`].
pub fn reserve(&mut self, additional: usize) {
self.widgets.reserve(additional);
}
/// Remove all child widgets
///
/// Triggers a [reconfigure action](Manager::send_action) if any widget is
/// removed.
pub fn clear(&mut self, mgr: &mut Manager) {
if !self.widgets.is_empty() {
mgr.send_action(TkAction::Reconfigure);
}
self.widgets.clear();
}
/// Append a child widget
///
/// Triggers a [reconfigure action](Manager::send_action).
pub fn push(&mut self, mgr: &mut Manager, widget: W) {
self.widgets.push(widget);
mgr.send_action(TkAction::Reconfigure);
}
/// Remove the last child widget
///
/// Returns `None` if there are no children. Otherwise, this
/// triggers a reconfigure before the next draw operation.
///
/// Triggers a [reconfigure action](Manager::send_action) if any widget is
/// removed.
pub fn pop(&mut self, mgr: &mut Manager) -> Option<W> {
if !self.widgets.is_empty() {
mgr.send_action(TkAction::Reconfigure);
}
self.widgets.pop()
}
/// Inserts a child widget position `index`
///
/// Panics if `index > len`.
///
/// Triggers a [reconfigure action](Manager::send_action).
pub fn insert(& | use crate::{AlignHints, Directional, Horizontal, Vertical};
use crate::{CoreData, Layout, TkAction, Widget, WidgetCore, WidgetId}; | random_line_split | |
list.rs | /// A row of boxed widgets
///
/// This is parameterised over handler message type.
///
/// See documentation of [`List`] type.
pub type BoxRow<M> = BoxList<Horizontal, M>;
/// A column of boxed widgets
///
/// This is parameterised over handler message type.
///
/// See documentation of [`List`] type.
pub type BoxColumn<M> = BoxList<Vertical, M>;
/// A row/column of boxed widgets
///
/// This is parameterised over directionality and handler message type.
///
/// See documentation of [`List`] type.
pub type BoxList<D, M> = List<D, Box<dyn Handler<Msg = M>>>;
/// A generic row/column widget
///
/// This type is generic over both directionality and the type of child widgets.
/// Essentially, it is a [`Vec`] which also implements the [`Widget`] trait.
///
/// [`Row`] and [`Column`] are parameterisations with set directionality.
///
/// [`BoxList`] (and its derivatives [`BoxRow`], [`BoxColumn`]) parameterise
/// `W = Box<dyn Widget>`, thus supporting individually boxed child widgets.
/// This allows use of multiple types of child widget at the cost of extra
/// allocation, and requires dynamic dispatch of methods.
///
/// Configuring and resizing elements is O(n) in the number of children.
/// Drawing and event handling is O(log n) in the number of children (assuming
/// only a small number are visible at any one time).
///
/// For fixed configurations of child widgets, [`make_widget`] can be used
/// instead. [`make_widget`] has the advantage that it can support child widgets
/// of multiple types without allocation and via static dispatch, but the
/// disadvantage that drawing and event handling are O(n) in the number of
/// children.
///
/// [`make_widget`]: ../macros/index.html#the-make_widget-macro
#[derive(Clone, Default, Debug)]
pub struct List<D: Directional, W: Widget> {
core: CoreData,
widgets: Vec<W>,
data: layout::DynRowStorage,
direction: D,
}
// We implement this manually, because the derive implementation cannot handle
// vectors of child widgets.
impl<D: Directional, W: Widget> WidgetCore for List<D, W> {
#[inline]
fn core_data(&self) -> &CoreData {
&self.core
}
#[inline]
fn core_data_mut(&mut self) -> &mut CoreData {
&mut self.core
}
#[inline]
fn widget_name(&self) -> &'static str {
"List"
}
#[inline]
fn as_widget(&self) -> &dyn Widget {
self
}
#[inline]
fn as_widget_mut(&mut self) -> &mut dyn Widget {
self
}
#[inline]
fn len(&self) -> usize {
self.widgets.len()
}
#[inline]
fn get(&self, index: usize) -> Option<&dyn Widget> {
self.widgets.get(index).map(|w| w.as_widget())
}
#[inline]
fn get_mut(&mut self, index: usize) -> Option<&mut dyn Widget> {
self.widgets.get_mut(index).map(|w| w.as_widget_mut())
}
fn walk(&self, f: &mut dyn FnMut(&dyn Widget)) {
for child in &self.widgets {
child.walk(f);
}
f(self)
}
fn walk_mut(&mut self, f: &mut dyn FnMut(&mut dyn Widget)) {
for child in &mut self.widgets {
child.walk_mut(f);
}
f(self)
}
}
impl<D: Directional, W: Widget> Widget for List<D, W> {}
impl<D: Directional, W: Widget> Layout for List<D, W> {
fn size_rules(&mut self, size_handle: &mut dyn SizeHandle, axis: AxisInfo) -> SizeRules {
let mut solver = layout::RowSolver::<Vec<u32>, _>::new(
axis,
(self.direction, self.widgets.len()),
&mut self.data,
);
for (n, child) in self.widgets.iter_mut().enumerate() {
solver.for_child(&mut self.data, n, |axis| {
child.size_rules(size_handle, axis)
});
}
solver.finish(&mut self.data, iter::empty(), iter::empty())
}
fn set_rect(&mut self, size_handle: &mut dyn SizeHandle, rect: Rect, _: AlignHints) {
self.core.rect = rect;
let mut setter = layout::RowSetter::<D, Vec<u32>, _>::new(
rect,
Margins::ZERO,
(self.direction, self.widgets.len()),
&mut self.data,
);
for (n, child) in self.widgets.iter_mut().enumerate() {
let align = AlignHints::default();
child.set_rect(size_handle, setter.child_rect(n), align);
}
}
fn find_id(&self, coord: Coord) -> Option<WidgetId> {
let solver = RowPositionSolver::new(self.direction);
if let Some(child) = solver.find_child(&self.widgets, coord) {
return child.find_id(coord);
}
// We should return Some(self), but hit a borrow check error.
// This should however be unreachable anyway.
None
}
fn draw(&self, draw_handle: &mut dyn DrawHandle, mgr: &ManagerState) {
let solver = RowPositionSolver::new(self.direction);
solver.for_children(&self.widgets, draw_handle.target_rect(), |w| {
w.draw(draw_handle, mgr)
});
}
}
impl<D: Directional, W: Widget + Handler> Handler for List<D, W> {
type Msg = <W as Handler>::Msg;
fn handle(&mut self, mgr: &mut Manager, id: WidgetId, event: Event) -> Response<Self::Msg> {
for child in &mut self.widgets {
if id <= child.id() {
return child.handle(mgr, id, event);
}
}
debug_assert!(id == self.id(), "Handler::handle: bad WidgetId");
Response::Unhandled(event)
}
}
impl<D: Directional + Default, W: Widget> List<D, W> {
/// Construct a new instance
///
/// This constructor is available where the direction is determined by the
/// type: for `D: Directional + Default`. In other cases, use
/// [`List::new_with_direction`].
pub fn new(widgets: Vec<W>) -> Self {
List {
core: Default::default(),
widgets,
data: Default::default(),
direction: Default::default(),
}
}
}
impl<D: Directional, W: Widget> List<D, W> {
/// Construct a new instance with explicit direction
pub fn new_with_direction(direction: D, widgets: Vec<W>) -> Self {
List {
core: Default::default(),
widgets,
data: Default::default(),
direction,
}
}
/// True if there are no child widgets
pub fn is_empty(&self) -> bool {
self.widgets.is_empty()
}
/// Returns the number of child widgets
pub fn len(&self) -> usize {
self.widgets.len()
}
/// Returns the number of elements the vector can hold without reallocating.
pub fn capacity(&self) -> usize {
self.widgets.capacity()
}
/// Reserves capacity for at least `additional` more elements to be inserted
/// into the list. See documentation of [`Vec::reserve`].
pub fn reserve(&mut self, additional: usize) {
self.widgets.reserve(additional);
}
/// Remove all child widgets
///
/// Triggers a [reconfigure action](Manager::send_action) if any widget is
/// removed.
pub fn | (&mut self, mgr: &mut Manager) {
if !self.widgets.is_empty() {
mgr.send_action(TkAction::Reconfigure);
}
self.widgets.clear();
}
/// Append a child widget
///
/// Triggers a [reconfigure action](Manager::send_action).
pub fn push(&mut self, mgr: &mut Manager, widget: W) {
self.widgets.push(widget);
mgr.send_action(TkAction::Reconfigure);
}
/// Remove the last child widget
///
/// Returns `None` if there are no children. Otherwise, this
/// triggers a reconfigure before the next draw operation.
///
/// Triggers a [reconfigure action](Manager::send_action) if any widget is
/// removed.
pub fn pop(&mut self, mgr: &mut Manager) -> Option<W> {
if !self.widgets.is_empty() {
mgr.send_action(TkAction::Reconfigure);
}
self.widgets.pop()
}
/// Inserts a child widget position `index`
///
/// Panics if `index > len`.
///
/// Triggers a [reconfigure action](Manager::send_action).
pub fn insert(&mut self, mgr: &mut Manager, index: usize, widget: W) {
self.widgets.insert(index, widget);
mgr.send_action(TkAction::Reconfigure);
}
/// Removes the child widget at position `index`
///
/// Panics if `index` is out of bounds.
///
/// Triggers a [reconfigure action](Manager::send_action).
pub fn remove(&mut self, mgr: &mut | clear | identifier_name |
list.rs | /// A row of boxed widgets
///
/// This is parameterised over handler message type.
///
/// See documentation of [`List`] type.
pub type BoxRow<M> = BoxList<Horizontal, M>;
/// A column of boxed widgets
///
/// This is parameterised over handler message type.
///
/// See documentation of [`List`] type.
pub type BoxColumn<M> = BoxList<Vertical, M>;
/// A row/column of boxed widgets
///
/// This is parameterised over directionality and handler message type.
///
/// See documentation of [`List`] type.
pub type BoxList<D, M> = List<D, Box<dyn Handler<Msg = M>>>;
/// A generic row/column widget
///
/// This type is generic over both directionality and the type of child widgets.
/// Essentially, it is a [`Vec`] which also implements the [`Widget`] trait.
///
/// [`Row`] and [`Column`] are parameterisations with set directionality.
///
/// [`BoxList`] (and its derivatives [`BoxRow`], [`BoxColumn`]) parameterise
/// `W = Box<dyn Widget>`, thus supporting individually boxed child widgets.
/// This allows use of multiple types of child widget at the cost of extra
/// allocation, and requires dynamic dispatch of methods.
///
/// Configuring and resizing elements is O(n) in the number of children.
/// Drawing and event handling is O(log n) in the number of children (assuming
/// only a small number are visible at any one time).
///
/// For fixed configurations of child widgets, [`make_widget`] can be used
/// instead. [`make_widget`] has the advantage that it can support child widgets
/// of multiple types without allocation and via static dispatch, but the
/// disadvantage that drawing and event handling are O(n) in the number of
/// children.
///
/// [`make_widget`]: ../macros/index.html#the-make_widget-macro
#[derive(Clone, Default, Debug)]
pub struct List<D: Directional, W: Widget> {
core: CoreData,
widgets: Vec<W>,
data: layout::DynRowStorage,
direction: D,
}
// We implement this manually, because the derive implementation cannot handle
// vectors of child widgets.
impl<D: Directional, W: Widget> WidgetCore for List<D, W> {
#[inline]
fn core_data(&self) -> &CoreData |
#[inline]
fn core_data_mut(&mut self) -> &mut CoreData {
&mut self.core
}
#[inline]
fn widget_name(&self) -> &'static str {
"List"
}
#[inline]
fn as_widget(&self) -> &dyn Widget {
self
}
#[inline]
fn as_widget_mut(&mut self) -> &mut dyn Widget {
self
}
#[inline]
fn len(&self) -> usize {
self.widgets.len()
}
#[inline]
fn get(&self, index: usize) -> Option<&dyn Widget> {
self.widgets.get(index).map(|w| w.as_widget())
}
#[inline]
fn get_mut(&mut self, index: usize) -> Option<&mut dyn Widget> {
self.widgets.get_mut(index).map(|w| w.as_widget_mut())
}
fn walk(&self, f: &mut dyn FnMut(&dyn Widget)) {
for child in &self.widgets {
child.walk(f);
}
f(self)
}
fn walk_mut(&mut self, f: &mut dyn FnMut(&mut dyn Widget)) {
for child in &mut self.widgets {
child.walk_mut(f);
}
f(self)
}
}
impl<D: Directional, W: Widget> Widget for List<D, W> {}
impl<D: Directional, W: Widget> Layout for List<D, W> {
fn size_rules(&mut self, size_handle: &mut dyn SizeHandle, axis: AxisInfo) -> SizeRules {
let mut solver = layout::RowSolver::<Vec<u32>, _>::new(
axis,
(self.direction, self.widgets.len()),
&mut self.data,
);
for (n, child) in self.widgets.iter_mut().enumerate() {
solver.for_child(&mut self.data, n, |axis| {
child.size_rules(size_handle, axis)
});
}
solver.finish(&mut self.data, iter::empty(), iter::empty())
}
fn set_rect(&mut self, size_handle: &mut dyn SizeHandle, rect: Rect, _: AlignHints) {
self.core.rect = rect;
let mut setter = layout::RowSetter::<D, Vec<u32>, _>::new(
rect,
Margins::ZERO,
(self.direction, self.widgets.len()),
&mut self.data,
);
for (n, child) in self.widgets.iter_mut().enumerate() {
let align = AlignHints::default();
child.set_rect(size_handle, setter.child_rect(n), align);
}
}
fn find_id(&self, coord: Coord) -> Option<WidgetId> {
let solver = RowPositionSolver::new(self.direction);
if let Some(child) = solver.find_child(&self.widgets, coord) {
return child.find_id(coord);
}
// We should return Some(self), but hit a borrow check error.
// This should however be unreachable anyway.
None
}
fn draw(&self, draw_handle: &mut dyn DrawHandle, mgr: &ManagerState) {
let solver = RowPositionSolver::new(self.direction);
solver.for_children(&self.widgets, draw_handle.target_rect(), |w| {
w.draw(draw_handle, mgr)
});
}
}
impl<D: Directional, W: Widget + Handler> Handler for List<D, W> {
type Msg = <W as Handler>::Msg;
fn handle(&mut self, mgr: &mut Manager, id: WidgetId, event: Event) -> Response<Self::Msg> {
for child in &mut self.widgets {
if id <= child.id() {
return child.handle(mgr, id, event);
}
}
debug_assert!(id == self.id(), "Handler::handle: bad WidgetId");
Response::Unhandled(event)
}
}
impl<D: Directional + Default, W: Widget> List<D, W> {
/// Construct a new instance
///
/// This constructor is available where the direction is determined by the
/// type: for `D: Directional + Default`. In other cases, use
/// [`List::new_with_direction`].
pub fn new(widgets: Vec<W>) -> Self {
List {
core: Default::default(),
widgets,
data: Default::default(),
direction: Default::default(),
}
}
}
impl<D: Directional, W: Widget> List<D, W> {
/// Construct a new instance with explicit direction
pub fn new_with_direction(direction: D, widgets: Vec<W>) -> Self {
List {
core: Default::default(),
widgets,
data: Default::default(),
direction,
}
}
/// True if there are no child widgets
pub fn is_empty(&self) -> bool {
self.widgets.is_empty()
}
/// Returns the number of child widgets
pub fn len(&self) -> usize {
self.widgets.len()
}
/// Returns the number of elements the vector can hold without reallocating.
pub fn capacity(&self) -> usize {
self.widgets.capacity()
}
/// Reserves capacity for at least `additional` more elements to be inserted
/// into the list. See documentation of [`Vec::reserve`].
pub fn reserve(&mut self, additional: usize) {
self.widgets.reserve(additional);
}
/// Remove all child widgets
///
/// Triggers a [reconfigure action](Manager::send_action) if any widget is
/// removed.
pub fn clear(&mut self, mgr: &mut Manager) {
if !self.widgets.is_empty() {
mgr.send_action(TkAction::Reconfigure);
}
self.widgets.clear();
}
/// Append a child widget
///
/// Triggers a [reconfigure action](Manager::send_action).
pub fn push(&mut self, mgr: &mut Manager, widget: W) {
self.widgets.push(widget);
mgr.send_action(TkAction::Reconfigure);
}
/// Remove the last child widget
///
/// Returns `None` if there are no children. Otherwise, this
/// triggers a reconfigure before the next draw operation.
///
/// Triggers a [reconfigure action](Manager::send_action) if any widget is
/// removed.
pub fn pop(&mut self, mgr: &mut Manager) -> Option<W> {
if !self.widgets.is_empty() {
mgr.send_action(TkAction::Reconfigure);
}
self.widgets.pop()
}
/// Inserts a child widget position `index`
///
/// Panics if `index > len`.
///
/// Triggers a [reconfigure action](Manager::send_action).
pub fn insert(&mut self, mgr: &mut Manager, index: usize, widget: W) {
self.widgets.insert(index, widget);
mgr.send_action(TkAction::Reconfigure);
}
/// Removes the child widget at position `index`
///
/// Panics if `index` is out of bounds.
///
/// Triggers a [reconfigure action](Manager::send_action).
pub fn remove(&mut self, mgr: & | {
&self.core
} | identifier_body |
my-orders.js | { value: 'title', label: 'Título' },
{ value: 'responsible', label: 'Responsável' },
{ value: 'status', label: 'Status' },
{ value: 'order_date', label: 'Data do pedido' },
];
const itemsPerPage = 5;
const getTechnologyDataGrid = (order, openModal, setCurrentOrder) => {
const {
id,
status,
created_at,
technology: { title, users },
} = order;
const owner = users?.find((user) => user?.pivot?.role === 'OWNER');
const orderType = 'technology';
return {
id,
title,
institution: owner.institution.initials,
responsible: owner?.full_name,
status: {
status,
content: getDealStatusText(status),
},
orderDate: dateToString(created_at),
type: 'T',
actions: [
{
variant: 'gray',
ariaLabel: 'Order details',
icon: FiEye,
onClick: () => openModal('technologyOrderDetails', { id }),
},
{
variant: 'info',
ariaLabel: 'Send message to technology owner',
icon: FiMessageSquare,
onClick: () => setCurrentOrder({ ...order, owner }),
},
{
variant: 'remove',
ariaLabel: 'Cancel order',
icon: FiX,
onClick: () => openModal('cancelOrder', { id, orderType }),
disabled:
status === dealStatusEnum.DEAL_CANCELLED ||
status === dealStatusEnum.DEAL_STRUCK,
},
],
};
};
const getServiceDataGrid = (order, openModal, setCurrentOrder) => {
const {
id,
status,
created_at,
service: { name, user },
} = order;
const orderType = 'service';
return {
id,
title: name,
institution: user.institution.initials,
responsible: user.full_name,
status: { status, content: getDealStatusText(status) },
orderDate: dateToString(created_at),
type: 'S',
actions: [
{
variant: 'gray',
ariaLabel: 'Order details',
icon: FiEye,
onClick: () => openModal('serviceOrderDetails', { id }),
},
{
variant: 'info',
ariaLabel: 'Send message to service owner',
icon: FiMessageSquare,
onClick: () => setCurrentOrder({ ...order, owner: user }),
},
{
variant: 'remove',
ariaLabel: 'Cancel order',
icon: FiX,
onClick: () => openModal('cancelOrder', { id, orderType }),
disabled:
status === dealStatusEnum.DEAL_CANCELLED ||
status === dealStatusEnum.DEAL_STRUCK,
},
],
};
};
const solutionMapper = {
technology: getTechnologyDataGrid,
service: getServiceDataGrid,
};
const MyOrders = ({ currentPage, totalPages, totalItems, currentSort, orders }) => {
const { t } = useTranslation(['helper', 'account']);
const router = useRouter();
const { openModal } = useModal();
const [currentOrder, setCurrentOrder] = useState(null);
/**
* Pushes new page number to next/router
*
* @param {string} page Page number.
*/
const handlePagination = (page) => {
const { pathname, query } = router;
query.page = page;
router.push({
pathname,
query,
});
};
/**
* Pushes new sort options to next/router
*
* @param {string} orderBy Grid column to sort items.
* @param {('ASC'|'DESC')} order Sort order.
* @returns {Promise<boolean>} Next router push
*/
const handleSortBy = (orderBy, order = currentSort.order || orderEnum.ASC) => {
const { pathname, query } = router;
delete query.page;
|
return router.push({
pathname,
query,
});
};
return (
<Container>
<Protected>
<UserProfile />
{currentOrder ? (
<OrderMessages
isBuyer
currentOrder={currentOrder}
backToList={() => setCurrentOrder(null)}
/>
) : (
<MainContentContainer>
{orders.length ? (
<>
<Title align="left" noPadding noMargin>
{t('account:titles.myOrders')}
</Title>
<MainContent>
<DataGrid
data={orders.map((order) => {
const solutionData = solutionMapper[order.type](
order,
openModal,
setCurrentOrder,
);
return {
id: solutionData.id,
Título: solutionData.title,
Organização: solutionData.institution,
Responsável: solutionData.responsible,
Status: (
<DealStatus status={solutionData.status.status}>
{solutionData.status.content}
</DealStatus>
),
'Data do pedido': solutionData.orderDate,
Tipo: (
<SolutionType type={order.type}>
{solutionData.type}
</SolutionType>
),
Ações: (
<DealActions>
{solutionData.actions.map((action) => (
<IconButton
key={action.ariaLabel}
variant={action.variant}
aria-label={action.ariaLabel}
onClick={action.onClick}
disabled={action.disabled}
>
<action.icon />
</IconButton>
))}
</DealActions>
),
};
})}
hideItemsByKey={['id']}
currentPage={currentPage}
totalPages={totalPages}
totalItems={totalItems}
itemsPerPage={itemsPerPage}
currentOrder={currentSort.order}
sortOptions={sortOptions}
handlePagination={handlePagination}
handleSortBy={handleSortBy}
enablePagination
/>
</MainContent>
</>
) : (
<EmptyScreen message={t('account:messages.noOrdersToShow')} />
)}
</MainContentContainer>
)}
</Protected>
</Container>
);
};
MyOrders.propTypes = {
orders: PropTypes.arrayOf(PropTypes.shape({})).isRequired,
currentPage: PropTypes.number.isRequired,
totalPages: PropTypes.number.isRequired,
totalItems: PropTypes.number.isRequired,
currentSort: PropTypes.shape({
by: PropTypes.string,
order: PropTypes.string,
}),
};
MyOrders.defaultProps = {
currentSort: {},
};
MyOrders.getInitialProps = async (ctx) => {
const { query } = ctx;
const page = Number(query.page) || 1;
const { orders, totalPages, totalItems } = (await getOrders({ fromCurrentUser: true })) || [];
return {
orders,
currentPage: page,
totalPages,
totalItems,
currentSort: { by: query.orderBy, order: query.order },
sortOptions,
};
};
export const Container = styled.div`
display: flex;
margin: 0 auto;
background-color: ${({ theme }) => theme.colors.whiteSmoke};
padding: 3rem 4rem 6rem;
> section:first-child {
margin-right: 4rem;
}
@media screen and (max-width: 950px) {
flex-direction: column;
> section:first-child {
margin-bottom: 1rem;
}
}
`;
export const MainContentContainer = styled.section`
width: 100%;
`;
export const MainContent = styled.div`
min-height: 80vh;
background-color: ${({ theme }) => theme.colors.white};
padding: 2rem;
`;
export const InfoContainer = styled.div`
display: flex;
align-items: center;
justify-content: space-between;
margin-bottom: 1rem;
@media screen and (max-width: 950px) {
flex-direction: column;
button {
margin-bottom: 1rem;
}
}
`;
const statusModifiers = {
[dealStatusEnum.DEAL_STRUCK]: (colors) => css`
color: ${colors.secondary};
&::before {
background: ${colors.secondary};
}
`,
[dealStatusEnum.DEAL_ONGOING]: (colors) => css`
color: ${colors.lightGray2};
&::before {
background: ${colors.lightGray2};
}
`,
[dealStatusEnum.DEAL_CANCELLED]: (colors) => css`
color: ${colors.red};
&::before {
background: ${colors.red};
}
`,
[dealStatusEnum.DEAL_REQUESTED]: (colors) => css`
color: ${colors.lightGray2};
&::before {
background: ${colors.lightGray2};
}
`,
};
export const DealStatus = styled.div`
${({ theme: { colors }, status }) => css`
display: inline-block;
position: relative;
line-height: 2.4rem;
| const shouldOrderAsc = order === orderEnum.DESC && currentSort.by !== orderBy;
query.order = shouldOrderAsc ? orderEnum.ASC : order;
query.orderBy = orderBy; | random_line_split |
database.py | None) # skip headers
kwargs['update'].rows_created = sum(1 for row in reader)
kwargs['update'].save()
if os.path.isfile(temp_file_path):
os.remove(temp_file_path)
def upsert_query(table_name, row, primary_key, ignore_conflict=False):
fields = ', '.join(row.keys())
upsert_fields = ', '.join([k + "= EXCLUDED." + k for k in row.keys()])
placeholders = ', '.join(["%s" for v in row.values()])
conflict_action = "DO NOTHING" if ignore_conflict else "DO UPDATE SET {}".format(upsert_fields)
sql = "INSERT INTO {table_name} ({fields}) VALUES ({values}) ON CONFLICT ({primary_key}) {conflict_action};"
return sql.format(table_name=table_name, fields=fields, values=placeholders, primary_key=primary_key, conflict_action=conflict_action)
def insert_query(table_name, row):
fields = ', '.join(row.keys())
placeholders = ', '.join(["%s" for v in row.values()])
sql = "INSERT INTO {table_name} ({fields}) VALUES ({values})"
return sql.format(table_name=table_name, fields=fields, values=placeholders)
def update_query(table_name, row, primary_key):
fields = ', '.join(['{key} = %s'.format(key=key) for key in row.keys()])
keys = ' AND '.join(['{key} = %s'.format(key=key) for key in primary_key.split(', ')])
sql = 'UPDATE {table_name} SET {fields} WHERE({pk});'
return sql.format(table_name=table_name, fields=fields, pk=keys)
def copy_query(table_name, columns):
return 'COPY {table_name} ({fields}) FROM STDIN WITH (format csv)'.format(table_name=table_name, fields=columns)
def build_row_values(row):
t_row = tuple(row.values())
return tuple(None if x == '' else x for x in t_row)
def build_pkey_tuple(row, pkey):
tup = tuple()
for key in pkey.split(', '):
tup = tup + (row[key],)
return tup
def batch_upsert_from_gen(model, rows, batch_size, **kwargs):
table_name = model._meta.db_table
update = kwargs['update'] if 'update' in kwargs else None
ignore_conflict = kwargs['ignore_conflict'] if 'ignore_conflict' in kwargs else None
with connection.cursor() as curs:
try:
count = 0
while True:
batch = list(itertools.islice(rows, 0, batch_size))
if len(batch) == 0:
logger.info("Database - Batch upserts completed for {}.".format(model.__name__))
if 'callback' in kwargs and kwargs['callback']:
kwargs['callback']()
break
else:
with transaction.atomic():
logger.debug("Seeding next batch for {}.".format(model.__name__))
batch_upsert_rows(model, batch, batch_size, update=update, ignore_conflict=ignore_conflict)
count = count + batch_size
logger.debug("Rows touched: {}".format(count))
except Exception as e:
logger.warning("Unable to batch upsert: {}".format(e))
raise e
# No Conflict = True means DO NOTHING on conflict. False means update on conflict.
def batch_upsert_rows(model, rows, batch_size, update=None, ignore_conflict=False):
table_name = model._meta.db_table
primary_key = model._meta.pk.name
""" Inserts many row, all in the same transaction"""
rows_length = len(rows)
with connection.cursor() as curs:
try:
starting_count = model.objects.count()
with transaction.atomic():
curs.executemany(upsert_query(table_name, rows[0], primary_key, ignore_conflict=ignore_conflict), tuple(
build_row_values(row) for row in rows))
if update:
rows_created = model.objects.count() - starting_count
update.rows_created = update.rows_created + rows_created
update.rows_updated = update.rows_updated + (rows_length - rows_created)
update.save()
except Exception as e:
logger.info('Database - error upserting rows. Doing single row upsert. - Error: {}'.format(e))
upsert_single_rows(model, rows, update=update, ignore_conflict=ignore_conflict)
def upsert_single_rows(model, rows, update=None, ignore_conflict=False):
table_name = model._meta.db_table
primary_key = model._meta.pk.name
rows_created = 0
rows_updated = 0
for row in rows:
try:
with connection.cursor() as curs:
with transaction.atomic():
curs.execute(upsert_query(table_name, row, primary_key, ignore_conflict=ignore_conflict),
build_row_values(row))
rows_updated = rows_updated + 1
rows_created = rows_created + 1
if rows_created % settings.BATCH_SIZE == 0:
logger.debug("{} - seeded {}".format(table_name, rows_created))
if update:
update.rows_created = update.rows_created + rows_created
update.rows_updated = update.rows_updated + rows_created
update.save()
rows_updated = 0
rows_updated = 0
except Exception as e:
logger.error("Database Error * - unable to upsert single record. Error: {}".format(e))
continue
if update:
update.rows_created = update.rows_created + rows_created
update.rows_updated = update.rows_updated + rows_created
update.save()
# https://djangosnippets.org/snippets/1400/
import time
import traceback
import logging
import sys
class Status(object):
def __init__(self):
self.num_successful = 0
self.failed_ids = []
self.done = False
self.cur_idx = 0
def __repr__(self):
return u'<Status: %s/%s, %s failed>' % (
getattr(self, 'cur_idx', '-'),
getattr(self, 'total', '-'),
self.num_failed)
@property
def num_failed(self): return len(self.failed_ids)
def start(self):
self.start_time = time.time()
def finished(self):
self.cur_idx = self.total
self.done = True
self.end_time = time.time()
@property
def rate(self):
if self.done:
end_time = self.end_time
else:
end_time = time.time()
return self.cur_idx / (end_time - self.start_time)
@property
def time_left(self):
rate = self.rate
if rate == 0:
return 0
return (self.total - self.cur_idx) / self.rate
def progress_callback(status):
message = '%d/%d failed=%d, rate~%.2f per second, left~%.2f sec \r' % (
status.cur_idx, status.total, status.num_failed, status.rate, status.time_left)
if status.done:
message = "DONE! - {}".format(message)
print(message)
logger.debug(message)
else:
message = "Progress - {}".format(message)
print(message)
logger.debug(message)
def queryset_foreach(queryset, f, batch_size=1000,
progress_callback=progress_callback, transaction=True):
'''
Call a function for each element in a queryset (actually, any list).
Features:
* stable memory usage (thanks to Django paginators)
* progress indicators
* wraps batches in transactions
* can take managers or even models (e.g., Assertion.objects)
* warns about DEBUG.
* handles failures of single items without dying in general.
* stable even if items are added or removed during processing
(gets a list of ids at the start)
Returns a Status object, with the following interesting attributes
total: number of items in the queryset
num_successful: count of successful items
failed_ids: list of ids of items that failed
'''
from django.conf import settings
if settings.DEBUG:
logger.debug('Warning: DEBUG is on. django.db.connection.queries may use up a lot of memory.')
# Get querysets corresponding to managers
from django.shortcuts import _get_queryset
queryset = _get_queryset(queryset)
# Get a snapshot of all the ids that match the query
logger.debug('qs4e: Getting list of objects')
ids = list(queryset.values_list(queryset.model._meta.pk.name, flat=True))
# Initialize status
status = Status()
status.total = len(ids)
def do_all_objects(objects):
from django.db import transaction
with transaction.atomic():
for id, obj in objects.items():
try:
f(obj)
status.num_successful += 1
except Exception as e: # python 2.5+: doesn't catch KeyboardInterrupt or SystemExit
logger.error(e)
status.failed_ids.append(id)
# if transaction:
# # Wrap each batch in a transaction
# with transaction.atomic():
# do_all_objects = transaction.commit_on_success(do_all_objects)
from django.core.paginator import Paginator
paginator = Paginator(ids, batch_size)
status.start()
progress_callback(status)
for page_num in paginator.page_range: | status.page = page = paginator.page(page_num)
status.cur_idx = page.start_index() - 1
progress_callback(status) | random_line_split | |
database.py | cursor % settings.BATCH_SIZE == 0:
logger.debug("Diff cursor at: {}".format(cursor))
if not found:
count = count + 1
if count % settings.BATCH_SIZE == 0:
logger.debug('Performed csv diff on {} records'.format(count))
yield list(csv.reader(StringIO(new_row), delimiter=',', quotechar='"',
doublequote=True, quoting=csv.QUOTE_ALL, skipinitialspace=True))[0]
def write_gen_to_temp_file(gen_rows):
temp_file_path = os.path.join(settings.MEDIA_TEMP_ROOT, str(
'set_diff' + str(random.randint(1, 10000000))) + '.mock' if settings.TESTING else '.csv')
headers = iter(next(gen_rows))
with open(temp_file_path, 'w') as temp_file:
writer = csv.writer(temp_file, delimiter=',', quotechar='"', quoting=csv.QUOTE_ALL, skipinitialspace=True)
writer.writerow(headers)
for row in gen_rows:
writer.writerow(row)
return temp_file_path
def seed_from_csv_diff(original_file_path, new_file_path, model, **kwargs):
"""
takes new file, filters it down in size, adds to Set()
takes old file, adds to Set()
saves to temporary file for read to avoid high memory usage
Diff Set() = New file Set() - Old file Set()
- preserves new records
- preserves altered/updated records
- removes duplicate, non updated records
seeds Diff Set() in batches
"""
original_diff_set = set()
new_diff_set = set()
new_file = open(new_file_path, 'r')
headers = new_file.readline().replace('\n', '').split(',')
new_reader = model.update_set_filter(csv.reader(new_file), headers)
original_file = open(original_file_path, 'r')
original_reader = csv.reader(original_file)
next(original_reader, None)
logger.debug(" * Beginning CSV diff process.")
for row in new_reader:
new_diff_set.add(json.dumps(row))
for row in original_reader:
original_diff_set.add(json.dumps(row))
diff = new_diff_set - original_diff_set
temp_file_path = os.path.join(settings.MEDIA_TEMP_ROOT, str(
'set_diff' + str(random.randint(1, 10000000))) + '.mock' if settings.TESTING else '.csv')
with open(temp_file_path, 'w') as temp_file:
writer = csv.writer(temp_file, delimiter=',')
writer.writerow(headers)
for row in diff:
writer.writerow(json.loads(row))
diff_gen = from_csv_file_to_gen(temp_file_path, kwargs['update'])
logger.debug(" * Csv diff completed, beginning batch upsert.")
batch_upsert_from_gen(model, diff_gen, settings.BATCH_SIZE, **kwargs)
if os.path.isfile(temp_file_path):
os.remove(temp_file_path)
if 'callback' in kwargs and kwargs['callback']:
kwargs['callback']()
def bulk_insert_from_file(model, file_path, **kwargs):
table_name = model._meta.db_table
logger.debug('creating temp csv with cleaned rows and seeding...')
# create new csv with cleaned rows
temp_file_extension = '.mock' if settings.TESTING else '.csv'
temp_file_path = os.path.join(settings.MEDIA_TEMP_ROOT, str(
'clean_csv_' + str(random.randint(1, 10000000))) + temp_file_extension)
update = kwargs['update'] if 'update' in kwargs else None
rows = model.transform_self_from_file(file_path, update=update)
logger.debug("writing temp file for {} at {}".format(table_name, temp_file_path))
gen_to_csv(rows, temp_file_path)
logger.debug("temp file complete for {}".format(table_name))
copy_file(model, file_path=temp_file_path, **kwargs)
if os.path.isfile(temp_file_path):
os.remove(temp_file_path)
if 'callback' in kwargs and kwargs['callback']:
kwargs['callback']()
def copy_file(model, file_path=None, **kwargs):
table_name = model._meta.db_table
with open(file_path, 'r') as file:
columns = file.readline().replace('"', '').replace('\n', '')
sql = copy_query(table_name, columns)
try:
copy_insert_from_csv(table_name, file_path, **kwargs)
except Exception as e:
logger.warning("Database - Bulk Import Error - beginning Batch seeding. Error: {}".format(e))
rows = from_csv_file_to_gen(file_path, kwargs['update'])
batch_upsert_from_gen(model, rows, settings.BATCH_SIZE, **kwargs)
def copy_insert_from_csv(table_name, temp_file_path, **kwargs):
with open(temp_file_path, 'r') as temp_file:
columns = temp_file.readline().replace('"', '').replace('\n', '')
sql = copy_query(table_name, columns)
with transaction.atomic():
if 'overwrite' in kwargs and kwargs['overwrite']:
logger.debug('Overwriting table...')
connection.cursor().execute('DELETE FROM {};'.format(table_name))
logger.debug("* Beginning Bulk CSV copy.")
connection.cursor().copy_expert(sql, temp_file)
logger.debug(" * Bulk CSV copy completed successfully.")
if 'update' in kwargs and kwargs['update']:
reader = csv.reader(open(temp_file_path, 'r'))
next(reader, None) # skip headers
kwargs['update'].rows_created = sum(1 for row in reader)
kwargs['update'].save()
if os.path.isfile(temp_file_path):
os.remove(temp_file_path)
def upsert_query(table_name, row, primary_key, ignore_conflict=False):
fields = ', '.join(row.keys())
upsert_fields = ', '.join([k + "= EXCLUDED." + k for k in row.keys()])
placeholders = ', '.join(["%s" for v in row.values()])
conflict_action = "DO NOTHING" if ignore_conflict else "DO UPDATE SET {}".format(upsert_fields)
sql = "INSERT INTO {table_name} ({fields}) VALUES ({values}) ON CONFLICT ({primary_key}) {conflict_action};"
return sql.format(table_name=table_name, fields=fields, values=placeholders, primary_key=primary_key, conflict_action=conflict_action)
def insert_query(table_name, row):
fields = ', '.join(row.keys())
placeholders = ', '.join(["%s" for v in row.values()])
sql = "INSERT INTO {table_name} ({fields}) VALUES ({values})"
return sql.format(table_name=table_name, fields=fields, values=placeholders)
def update_query(table_name, row, primary_key):
|
def copy_query(table_name, columns):
return 'COPY {table_name} ({fields}) FROM STDIN WITH (format csv)'.format(table_name=table_name, fields=columns)
def build_row_values(row):
t_row = tuple(row.values())
return tuple(None if x == '' else x for x in t_row)
def build_pkey_tuple(row, pkey):
tup = tuple()
for key in pkey.split(', '):
tup = tup + (row[key],)
return tup
def batch_upsert_from_gen(model, rows, batch_size, **kwargs):
table_name = model._meta.db_table
update = kwargs['update'] if 'update' in kwargs else None
ignore_conflict = kwargs['ignore_conflict'] if 'ignore_conflict' in kwargs else None
with connection.cursor() as curs:
try:
count = 0
while True:
batch = list(itertools.islice(rows, 0, batch_size))
if len(batch) == 0:
logger.info("Database - Batch upserts completed for {}.".format(model.__name__))
if 'callback' in kwargs and kwargs['callback']:
kwargs['callback']()
break
else:
with transaction.atomic():
logger.debug("Seeding next batch for {}.".format(model.__name__))
batch_upsert_rows(model, batch, batch_size, update=update, ignore_conflict=ignore_conflict)
count = count + batch_size
logger.debug("Rows touched: {}".format(count))
except Exception as e:
logger.warning("Unable to batch upsert: {}".format(e))
raise e
# No Conflict = True means DO NOTHING on conflict. False means update on conflict.
def batch_upsert_rows(model, rows, batch_size, update=None, ignore_conflict=False):
table_name = model._meta.db_table
primary_key = model._meta.pk.name
""" Inserts many row, all in the same transaction"""
rows_length = len(rows)
with connection.cursor() as curs:
try:
starting_count = model.objects.count()
with transaction.atomic():
curs.executemany(upsert_query(table_name, rows[0], primary_key, ignore_conflict=ignore_conflict), tuple(
build_row_values(row) for row in rows))
if update:
rows_created = model.objects.count() - starting_count
update.rows_created = update.rows_created + rows_created
update.rows_updated = update.rows_updated + (rows | fields = ', '.join(['{key} = %s'.format(key=key) for key in row.keys()])
keys = ' AND '.join(['{key} = %s'.format(key=key) for key in primary_key.split(', ')])
sql = 'UPDATE {table_name} SET {fields} WHERE({pk});'
return sql.format(table_name=table_name, fields=fields, pk=keys) | identifier_body |
database.py | cursor = cursor + 1
# if cursor % settings.BATCH_SIZE == 0:
logger.debug("Diff cursor at: {}".format(cursor))
if not found:
count = count + 1
if count % settings.BATCH_SIZE == 0:
logger.debug('Performed csv diff on {} records'.format(count))
yield list(csv.reader(StringIO(new_row), delimiter=',', quotechar='"',
doublequote=True, quoting=csv.QUOTE_ALL, skipinitialspace=True))[0]
def write_gen_to_temp_file(gen_rows):
temp_file_path = os.path.join(settings.MEDIA_TEMP_ROOT, str(
'set_diff' + str(random.randint(1, 10000000))) + '.mock' if settings.TESTING else '.csv')
headers = iter(next(gen_rows))
with open(temp_file_path, 'w') as temp_file:
writer = csv.writer(temp_file, delimiter=',', quotechar='"', quoting=csv.QUOTE_ALL, skipinitialspace=True)
writer.writerow(headers)
for row in gen_rows:
writer.writerow(row)
return temp_file_path
def seed_from_csv_diff(original_file_path, new_file_path, model, **kwargs):
"""
takes new file, filters it down in size, adds to Set()
takes old file, adds to Set()
saves to temporary file for read to avoid high memory usage
Diff Set() = New file Set() - Old file Set()
- preserves new records
- preserves altered/updated records
- removes duplicate, non updated records
seeds Diff Set() in batches
"""
original_diff_set = set()
new_diff_set = set()
new_file = open(new_file_path, 'r')
headers = new_file.readline().replace('\n', '').split(',')
new_reader = model.update_set_filter(csv.reader(new_file), headers)
original_file = open(original_file_path, 'r')
original_reader = csv.reader(original_file)
next(original_reader, None)
logger.debug(" * Beginning CSV diff process.")
for row in new_reader:
new_diff_set.add(json.dumps(row))
for row in original_reader:
original_diff_set.add(json.dumps(row))
diff = new_diff_set - original_diff_set
temp_file_path = os.path.join(settings.MEDIA_TEMP_ROOT, str(
'set_diff' + str(random.randint(1, 10000000))) + '.mock' if settings.TESTING else '.csv')
with open(temp_file_path, 'w') as temp_file:
writer = csv.writer(temp_file, delimiter=',')
writer.writerow(headers)
for row in diff:
writer.writerow(json.loads(row))
diff_gen = from_csv_file_to_gen(temp_file_path, kwargs['update'])
logger.debug(" * Csv diff completed, beginning batch upsert.")
batch_upsert_from_gen(model, diff_gen, settings.BATCH_SIZE, **kwargs)
if os.path.isfile(temp_file_path):
os.remove(temp_file_path)
if 'callback' in kwargs and kwargs['callback']:
kwargs['callback']()
def bulk_insert_from_file(model, file_path, **kwargs):
table_name = model._meta.db_table
logger.debug('creating temp csv with cleaned rows and seeding...')
# create new csv with cleaned rows
temp_file_extension = '.mock' if settings.TESTING else '.csv'
temp_file_path = os.path.join(settings.MEDIA_TEMP_ROOT, str(
'clean_csv_' + str(random.randint(1, 10000000))) + temp_file_extension)
update = kwargs['update'] if 'update' in kwargs else None
rows = model.transform_self_from_file(file_path, update=update)
logger.debug("writing temp file for {} at {}".format(table_name, temp_file_path))
gen_to_csv(rows, temp_file_path)
logger.debug("temp file complete for {}".format(table_name))
copy_file(model, file_path=temp_file_path, **kwargs)
if os.path.isfile(temp_file_path):
os.remove(temp_file_path)
if 'callback' in kwargs and kwargs['callback']:
kwargs['callback']()
def copy_file(model, file_path=None, **kwargs):
table_name = model._meta.db_table
with open(file_path, 'r') as file:
columns = file.readline().replace('"', '').replace('\n', '')
sql = copy_query(table_name, columns)
try:
copy_insert_from_csv(table_name, file_path, **kwargs)
except Exception as e:
logger.warning("Database - Bulk Import Error - beginning Batch seeding. Error: {}".format(e))
rows = from_csv_file_to_gen(file_path, kwargs['update'])
batch_upsert_from_gen(model, rows, settings.BATCH_SIZE, **kwargs)
def copy_insert_from_csv(table_name, temp_file_path, **kwargs):
with open(temp_file_path, 'r') as temp_file:
columns = temp_file.readline().replace('"', '').replace('\n', '')
sql = copy_query(table_name, columns)
with transaction.atomic():
if 'overwrite' in kwargs and kwargs['overwrite']:
logger.debug('Overwriting table...')
connection.cursor().execute('DELETE FROM {};'.format(table_name))
logger.debug("* Beginning Bulk CSV copy.")
connection.cursor().copy_expert(sql, temp_file)
logger.debug(" * Bulk CSV copy completed successfully.")
if 'update' in kwargs and kwargs['update']:
reader = csv.reader(open(temp_file_path, 'r'))
next(reader, None) # skip headers
kwargs['update'].rows_created = sum(1 for row in reader)
kwargs['update'].save()
if os.path.isfile(temp_file_path):
os.remove(temp_file_path)
def upsert_query(table_name, row, primary_key, ignore_conflict=False):
fields = ', '.join(row.keys())
upsert_fields = ', '.join([k + "= EXCLUDED." + k for k in row.keys()])
placeholders = ', '.join(["%s" for v in row.values()])
conflict_action = "DO NOTHING" if ignore_conflict else "DO UPDATE SET {}".format(upsert_fields)
sql = "INSERT INTO {table_name} ({fields}) VALUES ({values}) ON CONFLICT ({primary_key}) {conflict_action};"
return sql.format(table_name=table_name, fields=fields, values=placeholders, primary_key=primary_key, conflict_action=conflict_action)
def insert_query(table_name, row):
fields = ', '.join(row.keys())
placeholders = ', '.join(["%s" for v in row.values()])
sql = "INSERT INTO {table_name} ({fields}) VALUES ({values})"
return sql.format(table_name=table_name, fields=fields, values=placeholders)
def update_query(table_name, row, primary_key):
fields = ', '.join(['{key} = %s'.format(key=key) for key in row.keys()])
keys = ' AND '.join(['{key} = %s'.format(key=key) for key in primary_key.split(', ')])
sql = 'UPDATE {table_name} SET {fields} WHERE({pk});'
return sql.format(table_name=table_name, fields=fields, pk=keys)
def copy_query(table_name, columns):
return 'COPY {table_name} ({fields}) FROM STDIN WITH (format csv)'.format(table_name=table_name, fields=columns)
def build_row_values(row):
t_row = tuple(row.values())
return tuple(None if x == '' else x for x in t_row)
def build_pkey_tuple(row, pkey):
tup = tuple()
for key in pkey.split(', '):
tup = tup + (row[key],)
return tup
def batch_upsert_from_gen(model, rows, batch_size, **kwargs):
table_name = model._meta.db_table
update = kwargs['update'] if 'update' in kwargs else None
ignore_conflict = kwargs['ignore_conflict'] if 'ignore_conflict' in kwargs else None
with connection.cursor() as curs:
try:
count = 0
while True:
batch = list(itertools.islice(rows, 0, batch_size))
if len(batch) == 0:
logger.info("Database - Batch upserts completed for {}.".format(model.__name__))
if 'callback' in kwargs and kwargs['callback']:
kwargs['callback']()
break
else:
with transaction.atomic():
logger.debug("Seeding next batch for {}.".format(model.__name__))
batch_upsert_rows(model, batch, batch_size, update=update, ignore_conflict=ignore_conflict)
count = count + batch_size
logger.debug("Rows touched: {}".format(count))
except Exception as e:
logger.warning("Unable to batch upsert: {}".format(e))
raise e
# No Conflict = True | if count == -1:
count = count + 1
yield list(csv.reader(StringIO(new_row), delimiter=',', quotechar='"',
doublequote=True, quoting=csv.QUOTE_ALL, skipinitialspace=True))[0]
continue
found = False
# search for csv row in old file
# original_reader = csv.reader(open(original_file_path, 'r'), delimiter=',', quotechar='"',
# doublequote=True, quoting=csv.QUOTE_ALL, skipinitialspace=True)
# for original_row in original_reader:
#
with open(original_file_path, 'r') as of:
original_content = of.readlines()
for original_row in original_content:
if new_row == original_row:
found = True
break
| conditional_block | |
database.py | cursor % settings.BATCH_SIZE == 0:
logger.debug("Diff cursor at: {}".format(cursor))
if not found:
count = count + 1
if count % settings.BATCH_SIZE == 0:
logger.debug('Performed csv diff on {} records'.format(count))
yield list(csv.reader(StringIO(new_row), delimiter=',', quotechar='"',
doublequote=True, quoting=csv.QUOTE_ALL, skipinitialspace=True))[0]
def write_gen_to_temp_file(gen_rows):
temp_file_path = os.path.join(settings.MEDIA_TEMP_ROOT, str(
'set_diff' + str(random.randint(1, 10000000))) + '.mock' if settings.TESTING else '.csv')
headers = iter(next(gen_rows))
with open(temp_file_path, 'w') as temp_file:
writer = csv.writer(temp_file, delimiter=',', quotechar='"', quoting=csv.QUOTE_ALL, skipinitialspace=True)
writer.writerow(headers)
for row in gen_rows:
writer.writerow(row)
return temp_file_path
def seed_from_csv_diff(original_file_path, new_file_path, model, **kwargs):
"""
takes new file, filters it down in size, adds to Set()
takes old file, adds to Set()
saves to temporary file for read to avoid high memory usage
Diff Set() = New file Set() - Old file Set()
- preserves new records
- preserves altered/updated records
- removes duplicate, non updated records
seeds Diff Set() in batches
"""
original_diff_set = set()
new_diff_set = set()
new_file = open(new_file_path, 'r')
headers = new_file.readline().replace('\n', '').split(',')
new_reader = model.update_set_filter(csv.reader(new_file), headers)
original_file = open(original_file_path, 'r')
original_reader = csv.reader(original_file)
next(original_reader, None)
logger.debug(" * Beginning CSV diff process.")
for row in new_reader:
new_diff_set.add(json.dumps(row))
for row in original_reader:
original_diff_set.add(json.dumps(row))
diff = new_diff_set - original_diff_set
temp_file_path = os.path.join(settings.MEDIA_TEMP_ROOT, str(
'set_diff' + str(random.randint(1, 10000000))) + '.mock' if settings.TESTING else '.csv')
with open(temp_file_path, 'w') as temp_file:
writer = csv.writer(temp_file, delimiter=',')
writer.writerow(headers)
for row in diff:
writer.writerow(json.loads(row))
diff_gen = from_csv_file_to_gen(temp_file_path, kwargs['update'])
logger.debug(" * Csv diff completed, beginning batch upsert.")
batch_upsert_from_gen(model, diff_gen, settings.BATCH_SIZE, **kwargs)
if os.path.isfile(temp_file_path):
os.remove(temp_file_path)
if 'callback' in kwargs and kwargs['callback']:
kwargs['callback']()
def bulk_insert_from_file(model, file_path, **kwargs):
table_name = model._meta.db_table
logger.debug('creating temp csv with cleaned rows and seeding...')
# create new csv with cleaned rows
temp_file_extension = '.mock' if settings.TESTING else '.csv'
temp_file_path = os.path.join(settings.MEDIA_TEMP_ROOT, str(
'clean_csv_' + str(random.randint(1, 10000000))) + temp_file_extension)
update = kwargs['update'] if 'update' in kwargs else None
rows = model.transform_self_from_file(file_path, update=update)
logger.debug("writing temp file for {} at {}".format(table_name, temp_file_path))
gen_to_csv(rows, temp_file_path)
logger.debug("temp file complete for {}".format(table_name))
copy_file(model, file_path=temp_file_path, **kwargs)
if os.path.isfile(temp_file_path):
os.remove(temp_file_path)
if 'callback' in kwargs and kwargs['callback']:
kwargs['callback']()
def copy_file(model, file_path=None, **kwargs):
table_name = model._meta.db_table
with open(file_path, 'r') as file:
columns = file.readline().replace('"', '').replace('\n', '')
sql = copy_query(table_name, columns)
try:
copy_insert_from_csv(table_name, file_path, **kwargs)
except Exception as e:
logger.warning("Database - Bulk Import Error - beginning Batch seeding. Error: {}".format(e))
rows = from_csv_file_to_gen(file_path, kwargs['update'])
batch_upsert_from_gen(model, rows, settings.BATCH_SIZE, **kwargs)
def copy_insert_from_csv(table_name, temp_file_path, **kwargs):
with open(temp_file_path, 'r') as temp_file:
columns = temp_file.readline().replace('"', '').replace('\n', '')
sql = copy_query(table_name, columns)
with transaction.atomic():
if 'overwrite' in kwargs and kwargs['overwrite']:
logger.debug('Overwriting table...')
connection.cursor().execute('DELETE FROM {};'.format(table_name))
logger.debug("* Beginning Bulk CSV copy.")
connection.cursor().copy_expert(sql, temp_file)
logger.debug(" * Bulk CSV copy completed successfully.")
if 'update' in kwargs and kwargs['update']:
reader = csv.reader(open(temp_file_path, 'r'))
next(reader, None) # skip headers
kwargs['update'].rows_created = sum(1 for row in reader)
kwargs['update'].save()
if os.path.isfile(temp_file_path):
os.remove(temp_file_path)
def upsert_query(table_name, row, primary_key, ignore_conflict=False):
fields = ', '.join(row.keys())
upsert_fields = ', '.join([k + "= EXCLUDED." + k for k in row.keys()])
placeholders = ', '.join(["%s" for v in row.values()])
conflict_action = "DO NOTHING" if ignore_conflict else "DO UPDATE SET {}".format(upsert_fields)
sql = "INSERT INTO {table_name} ({fields}) VALUES ({values}) ON CONFLICT ({primary_key}) {conflict_action};"
return sql.format(table_name=table_name, fields=fields, values=placeholders, primary_key=primary_key, conflict_action=conflict_action)
def insert_query(table_name, row):
fields = ', '.join(row.keys())
placeholders = ', '.join(["%s" for v in row.values()])
sql = "INSERT INTO {table_name} ({fields}) VALUES ({values})"
return sql.format(table_name=table_name, fields=fields, values=placeholders)
def update_query(table_name, row, primary_key):
fields = ', '.join(['{key} = %s'.format(key=key) for key in row.keys()])
keys = ' AND '.join(['{key} = %s'.format(key=key) for key in primary_key.split(', ')])
sql = 'UPDATE {table_name} SET {fields} WHERE({pk});'
return sql.format(table_name=table_name, fields=fields, pk=keys)
def copy_query(table_name, columns):
return 'COPY {table_name} ({fields}) FROM STDIN WITH (format csv)'.format(table_name=table_name, fields=columns)
def build_row_values(row):
t_row = tuple(row.values())
return tuple(None if x == '' else x for x in t_row)
def build_pkey_tuple(row, pkey):
tup = tuple()
for key in pkey.split(', '):
tup = tup + (row[key],)
return tup
def batch_upsert_from_gen(model, rows, batch_size, **kwargs):
table_name = model._meta.db_table
update = kwargs['update'] if 'update' in kwargs else None
ignore_conflict = kwargs['ignore_conflict'] if 'ignore_conflict' in kwargs else None
with connection.cursor() as curs:
try:
count = 0
while True:
batch = list(itertools.islice(rows, 0, batch_size))
if len(batch) == 0:
logger.info("Database - Batch upserts completed for {}.".format(model.__name__))
if 'callback' in kwargs and kwargs['callback']:
kwargs['callback']()
break
else:
with transaction.atomic():
logger.debug("Seeding next batch for {}.".format(model.__name__))
batch_upsert_rows(model, batch, batch_size, update=update, ignore_conflict=ignore_conflict)
count = count + batch_size
logger.debug("Rows touched: {}".format(count))
except Exception as e:
logger.warning("Unable to batch upsert: {}".format(e))
raise e
# No Conflict = True means DO NOTHING on conflict. False means update on conflict.
def | (model, rows, batch_size, update=None, ignore_conflict=False):
table_name = model._meta.db_table
primary_key = model._meta.pk.name
""" Inserts many row, all in the same transaction"""
rows_length = len(rows)
with connection.cursor() as curs:
try:
starting_count = model.objects.count()
with transaction.atomic():
curs.executemany(upsert_query(table_name, rows[0], primary_key, ignore_conflict=ignore_conflict), tuple(
build_row_values(row) for row in rows))
if update:
rows_created = model.objects.count() - starting_count
update.rows_created = update.rows_created + rows_created
update.rows_updated = update.rows_updated + (rows | batch_upsert_rows | identifier_name |
image_processor_2.0.py | _color = (255,0,0)
possible_target_color = (0,255,0)
#used to judge whether a polygon side is near vertical or near horizontal, for filtering out shapes that don't match expected target characteristics
vert_threshold = math.tan(math.radians(90-20))
horiz_threshold = math.tan(math.radians(20))
#used to look for only horizontal or vertical rectangles of an aspect ratio that matches the targets.
#currently open wide to find both horizontal and vertical targets
max_target_aspect_ratio = 10 # 1.0 # top target is expected to be 24.5 in x 4 in.
min_target_aspect_ratio = 0.1 #0.01# 3# 0.5
angle_to_robot = 0 #camera's 0 bearing to robot's 0 bearing
camera_offset_position = 0
morph_close_iterations = 9
angle_to_shooter = 0 #camera's 0 bearing to shooter's 0 bearing
camera_color_intensity = 0 #value subject to change
camera_saturation = 0 #value subject to change
camera_contrast = 0 #value subject to change
camera_color_hue = 0 #value subject to change
camera_brightness = 20 #value subject to change
camera_gain = 0 #value subject to change
camera_exposure = 20
robot_heading = 0.0 #input from SmartDashboard if enabled, else hard coded here.
x_resolution = 640 #needs to match the camera.
y_resolution = 480
#theta = math.radians(49.165) #half of field of view of the camera
# field_of_view_degrees = 53.0 horizontal field of view
field_of_view_degrees = 26.4382 # vertical field of view
theta = math.radians(field_of_view_degrees/2.0) #half of field of view of the camera, in radians to work with math.tan function.
# real_target_width = 24.5 #inches #24 * 0.0254 #1 inch / 0.254 meters target is 24 inches wide
real_target_height = 28.5 #using these constants and may not be correct for current robot configuration.
angle_to_shooter = 0
#not currently using these constants and may not be correct for current robot configuration.
# target_min_width = 20
# target_max_width = 200
# degrees_horiz_field_of_view = 47.0
# degrees_vert_field_of_view = 480.0/640*degrees_horiz_field_of_view
# inches_camera_height = 54.0
# inches_top_target_height = 98 + 2 + 98
# degrees_camera_pitch = 21.0
# degrees_sighting_offset = -1.55
def __init__(self, img_path):
self.img_path = img_path
self.layout_result_windows(self.h,self.s,self.v)
self.vc = VideoCapture(0)
SmartDashboard.PutNumber(angle_to_robot_title, self.angle_to_robot)
SmartDashboard.PutNumber(camera_offset_position_title, self.camera_offset_position)
SmartDashboard.PutNumber(morph_close_iterations_title, self.morph_close_iterations)
SmartDashboard.PutNumber(angle_to_shooter_title, self.angle_to_shooter)
SmartDashboard.PutNumber(camera_color_intensity_title, self.camera_color_intensity)
SmartDashboard.PutNumber(camera_exposure_title, self.camera_exposure)
SmartDashboard.PutNumber(camera_saturation.title, self.saturation)
SmartDashboard.PutNumber(camera_contrast_title, self.contrast)
SmartDashboard.PutNumber(camera_color_hue_title, self.camera_color_hue)
SmartDashboard.PutNumber(camera_brihtness_title, self.camera_brightness)
def video_feed(self):
|
def process(self):
if enable_dashboard:
self.camera_saturation = int(SmartDashboard.GetNumber(camera_saturation_title)
self.angle_to_robot = int(SmartDashboard.GetNumber(angle_to_robot_title)
self.camera_offset_postion = int(SmartDashboard.GetNumber(camera_offset_position_title)
self.morph_close_iterations = int(SmartDashboard.GetNumber(morph_close_iterations_title)
self.angle_to_shooter = int(SmartDashboard.GetNumber(angle_to_shooter_title)
self.camera_color_intensity = int(SmartDashboard.GetNumber(camera_color_intensity_title)
self.camera_contrast = int(SmartDashboard.GetNumber(camera_contrast_title)
self.camera_color_hue = int(SmartDashboard.GetNumber(camera_color_hue_title)
self.camera_brightness = int(SmartDashboard.GetNumber(camera_brightness_title)
self.camera_exposure = int(SmartDashboard.GetNumber(camera_exposure_title)
self.camera_gain = int(SmartDashboard.GetNumber(camera_gain_title)
if self.img_path is None:
commands.getoutput(" yavta --set-control '0x009a0901 1' /dev/video0")
#print(commands.getoutput(" yavta --get-control '0x009a0901' /dev/video0") )
commands.getoutput("yavta --set-control '0x009a0902 %s' /dev/video0" % self.camera_exposure)
#print(commands.getoutput(" yavta --get-control '0x009a0902' /dev/video0"))
drawing = np.zeros(self.img.shape, dtype=np.uint8)
self.hsv = cvtColor(self.img, cv.CV_BGR2HSV)
self.h, self.s, self.v = split(self.hsv)
self.h_clipped = self.threshold_in_range(self.h, self.hue_thresh-self.hue_delta, self.hue_thresh+self.hue_delta)
self.s_clipped = self.threshold_in_range(self.s, self.sat_thresh-self.sat_delta, self.sat_thresh+self.sat_delta)
self.v_clipped = self.threshold_in_range(self.v, self.val_thresh-self.val_delta, self.val_thresh+self.val_delta)
if show_windows:
h_scaled = resize(self.h_clipped, window_size)
s_scaled = resize(self.s_clipped, window_size)
v_scaled = resize(self.v_clipped, window_size)
imshow(self.h_title, h_scaled)
imshow(self.s_title, s_scaled)
imshow(self.v_title, v_scaled)
self.find_targets()
if waitKey(self.video_pause) == ord('q'):
exit(1)
def layout_result_windows(self, h, s, v):
if show_windows:
pos_x, pos_y = 500,500
# imshow(self.img_path, self.img)
h_scaled = resize(h, window_size)
s_scaled = resize(s, window_size)
v_scaled = resize(v, window_size)
combined_scaled = resize(self.combined, window_size)
img_scaled = resize(self.img, window_size)
imshow(self.h_title , h_scaled)
imshow(self.s_title , s_scaled)
imshow(self.v_title , v_scaled)
imshow(self.combined_title, combined_scaled)
imshow(self.targets_title , img_scaled)
#moveWindow(self.h_title, pos_x*1, pos_y*0);
#moveWindow(self.s_title, pos_x*0, pos_y*1);
#moveWindow(self.v_title, pos_x*1, pos_y*1);
#moveWindow(self.combined_title, pos_x*2, pos_y*0);
#moveWindow(self.targets_title, pos_x*2, pos_y*1);
#these seem to be placed alphabetically....
# createTrackbar( "Hue High Threshold:", self.source_title, self.hue_high_thresh, self.max_thresh, self.update_hue_high_threshold);
# createTrackbar( "Hue Low Threshold:", self.source_title, self.hue_low_thresh, self.max_thresh, self.update_hue_low_threshold);
# createTrackbar( "Sat High Threshold:", self.source_title, self.sat_high_thresh, self.max_thresh, self.update_sat_high_threshold);
# createTrackbar( "Sat Low Threshold:", self.source_title, self.sat_low_thresh, self.max_thresh, self.update_sat_low_threshold);
# createTrackbar( "Val High Threshold:", self.source_title, self.val_high_thresh, self.max_thresh, self.update_val_high_threshold);
# createTrackbar( "Val Low Threshold:", self.source_title, self.val_low_thresh, self.max_thresh, self.update_val_low_threshold);
def update_hue_threshold(self, thresh):
delta = 15
self.h_clipped = self.threshold_in_range(self.h, thresh-delta, thresh+delta)
imshow(self.h_title, self.h_clipped)
self.find_targets()
def update_sat_threshold(self, thresh):
| while True:
if self.img is not None:
self.process()
if self.img_path is None:
rval, self.img = self.vc.read() #might set to None
else:
self.img = imread(self.img_path) | identifier_body |
image_processor_2.0.py | _color = (255,0,0)
possible_target_color = (0,255,0)
#used to judge whether a polygon side is near vertical or near horizontal, for filtering out shapes that don't match expected target characteristics
vert_threshold = math.tan(math.radians(90-20))
horiz_threshold = math.tan(math.radians(20))
#used to look for only horizontal or vertical rectangles of an aspect ratio that matches the targets.
#currently open wide to find both horizontal and vertical targets
max_target_aspect_ratio = 10 # 1.0 # top target is expected to be 24.5 in x 4 in.
min_target_aspect_ratio = 0.1 #0.01# 3# 0.5
angle_to_robot = 0 #camera's 0 bearing to robot's 0 bearing
camera_offset_position = 0
morph_close_iterations = 9
angle_to_shooter = 0 #camera's 0 bearing to shooter's 0 bearing
camera_color_intensity = 0 #value subject to change
camera_saturation = 0 #value subject to change
camera_contrast = 0 #value subject to change
camera_color_hue = 0 #value subject to change
camera_brightness = 20 #value subject to change
camera_gain = 0 #value subject to change
camera_exposure = 20
robot_heading = 0.0 #input from SmartDashboard if enabled, else hard coded here.
x_resolution = 640 #needs to match the camera.
y_resolution = 480
#theta = math.radians(49.165) #half of field of view of the camera
# field_of_view_degrees = 53.0 horizontal field of view
field_of_view_degrees = 26.4382 # vertical field of view
theta = math.radians(field_of_view_degrees/2.0) #half of field of view of the camera, in radians to work with math.tan function.
# real_target_width = 24.5 #inches #24 * 0.0254 #1 inch / 0.254 meters target is 24 inches wide
real_target_height = 28.5 #using these constants and may not be correct for current robot configuration.
angle_to_shooter = 0
#not currently using these constants and may not be correct for current robot configuration.
# target_min_width = 20
# target_max_width = 200
# degrees_horiz_field_of_view = 47.0
# degrees_vert_field_of_view = 480.0/640*degrees_horiz_field_of_view
# inches_camera_height = 54.0
# inches_top_target_height = 98 + 2 + 98
# degrees_camera_pitch = 21.0
# degrees_sighting_offset = -1.55
def | (self, img_path):
self.img_path = img_path
self.layout_result_windows(self.h,self.s,self.v)
self.vc = VideoCapture(0)
SmartDashboard.PutNumber(angle_to_robot_title, self.angle_to_robot)
SmartDashboard.PutNumber(camera_offset_position_title, self.camera_offset_position)
SmartDashboard.PutNumber(morph_close_iterations_title, self.morph_close_iterations)
SmartDashboard.PutNumber(angle_to_shooter_title, self.angle_to_shooter)
SmartDashboard.PutNumber(camera_color_intensity_title, self.camera_color_intensity)
SmartDashboard.PutNumber(camera_exposure_title, self.camera_exposure)
SmartDashboard.PutNumber(camera_saturation.title, self.saturation)
SmartDashboard.PutNumber(camera_contrast_title, self.contrast)
SmartDashboard.PutNumber(camera_color_hue_title, self.camera_color_hue)
SmartDashboard.PutNumber(camera_brihtness_title, self.camera_brightness)
def video_feed(self):
while True:
if self.img is not None:
self.process()
if self.img_path is None:
rval, self.img = self.vc.read() #might set to None
else:
self.img = imread(self.img_path)
def process(self):
if enable_dashboard:
self.camera_saturation = int(SmartDashboard.GetNumber(camera_saturation_title)
self.angle_to_robot = int(SmartDashboard.GetNumber(angle_to_robot_title)
self.camera_offset_postion = int(SmartDashboard.GetNumber(camera_offset_position_title)
self.morph_close_iterations = int(SmartDashboard.GetNumber(morph_close_iterations_title)
self.angle_to_shooter = int(SmartDashboard.GetNumber(angle_to_shooter_title)
self.camera_color_intensity = int(SmartDashboard.GetNumber(camera_color_intensity_title)
self.camera_contrast = int(SmartDashboard.GetNumber(camera_contrast_title)
self.camera_color_hue = int(SmartDashboard.GetNumber(camera_color_hue_title)
self.camera_brightness = int(SmartDashboard.GetNumber(camera_brightness_title)
self.camera_exposure = int(SmartDashboard.GetNumber(camera_exposure_title)
self.camera_gain = int(SmartDashboard.GetNumber(camera_gain_title)
if self.img_path is None:
commands.getoutput(" yavta --set-control '0x009a0901 1' /dev/video0")
#print(commands.getoutput(" yavta --get-control '0x009a0901' /dev/video0") )
commands.getoutput("yavta --set-control '0x009a0902 %s' /dev/video0" % self.camera_exposure)
#print(commands.getoutput(" yavta --get-control '0x009a0902' /dev/video0"))
drawing = np.zeros(self.img.shape, dtype=np.uint8)
self.hsv = cvtColor(self.img, cv.CV_BGR2HSV)
self.h, self.s, self.v = split(self.hsv)
self.h_clipped = self.threshold_in_range(self.h, self.hue_thresh-self.hue_delta, self.hue_thresh+self.hue_delta)
self.s_clipped = self.threshold_in_range(self.s, self.sat_thresh-self.sat_delta, self.sat_thresh+self.sat_delta)
self.v_clipped = self.threshold_in_range(self.v, self.val_thresh-self.val_delta, self.val_thresh+self.val_delta)
if show_windows:
h_scaled = resize(self.h_clipped, window_size)
s_scaled = resize(self.s_clipped, window_size)
v_scaled = resize(self.v_clipped, window_size)
imshow(self.h_title, h_scaled)
imshow(self.s_title, s_scaled)
imshow(self.v_title, v_scaled)
self.find_targets()
if waitKey(self.video_pause) == ord('q'):
exit(1)
def layout_result_windows(self, h, s, v):
if show_windows:
pos_x, pos_y = 500,500
# imshow(self.img_path, self.img)
h_scaled = resize(h, window_size)
s_scaled = resize(s, window_size)
v_scaled = resize(v, window_size)
combined_scaled = resize(self.combined, window_size)
img_scaled = resize(self.img, window_size)
imshow(self.h_title , h_scaled)
imshow(self.s_title , s_scaled)
imshow(self.v_title , v_scaled)
imshow(self.combined_title, combined_scaled)
imshow(self.targets_title , img_scaled)
#moveWindow(self.h_title, pos_x*1, pos_y*0);
#moveWindow(self.s_title, pos_x*0, pos_y*1);
#moveWindow(self.v_title, pos_x*1, pos_y*1);
#moveWindow(self.combined_title, pos_x*2, pos_y*0);
#moveWindow(self.targets_title, pos_x*2, pos_y*1);
#these seem to be placed alphabetically....
# createTrackbar( "Hue High Threshold:", self.source_title, self.hue_high_thresh, self.max_thresh, self.update_hue_high_threshold);
# createTrackbar( "Hue Low Threshold:", self.source_title, self.hue_low_thresh, self.max_thresh, self.update_hue_low_threshold);
# createTrackbar( "Sat High Threshold:", self.source_title, self.sat_high_thresh, self.max_thresh, self.update_sat_high_threshold);
# createTrackbar( "Sat Low Threshold:", self.source_title, self.sat_low_thresh, self.max_thresh, self.update_sat_low_threshold);
# createTrackbar( "Val High Threshold:", self.source_title, self.val_high_thresh, self.max_thresh, self.update_val_high_threshold);
# createTrackbar( "Val Low Threshold:", self.source_title, self.val_low_thresh, self.max_thresh, self.update_val_low_threshold);
def update_hue_threshold(self, thresh):
delta = 15
self.h_clipped = self.threshold_in_range(self.h, thresh-delta, thresh+delta)
imshow(self.h_title, self.h_clipped)
self.find_targets()
def update_sat_threshold(self, thresh):
| __init__ | identifier_name |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.